Auto merge of #56258 - euclio:fs-read-write, r=euclio
use top level `fs` functions where appropriate This commit replaces many usages of `File::open` and reading or writing with `fs::read_to_string`, `fs::read` and `fs::write`. This reduces code complexity, and will improve performance for most reads, since the functions allocate the buffer to be the size of the file. I believe that this commit will not impact behavior in any way, so some matches will check the error kind in case the file was not valid UTF-8. Some of these cases may not actually care about the error.
This commit is contained in:
commit
0a77980796
26 changed files with 137 additions and 235 deletions
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs::{self, File};
|
use std::fs;
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
@ -707,7 +707,7 @@ impl Step for CodegenBackend {
|
||||||
}
|
}
|
||||||
let stamp = codegen_backend_stamp(builder, compiler, target, backend);
|
let stamp = codegen_backend_stamp(builder, compiler, target, backend);
|
||||||
let codegen_backend = codegen_backend.to_str().unwrap();
|
let codegen_backend = codegen_backend.to_str().unwrap();
|
||||||
t!(t!(File::create(&stamp)).write_all(codegen_backend.as_bytes()));
|
t!(fs::write(&stamp, &codegen_backend));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -796,8 +796,7 @@ fn copy_codegen_backends_to_sysroot(builder: &Builder,
|
||||||
|
|
||||||
for backend in builder.config.rust_codegen_backends.iter() {
|
for backend in builder.config.rust_codegen_backends.iter() {
|
||||||
let stamp = codegen_backend_stamp(builder, compiler, target, *backend);
|
let stamp = codegen_backend_stamp(builder, compiler, target, *backend);
|
||||||
let mut dylib = String::new();
|
let dylib = t!(fs::read_to_string(&stamp));
|
||||||
t!(t!(File::open(&stamp)).read_to_string(&mut dylib));
|
|
||||||
let file = Path::new(&dylib);
|
let file = Path::new(&dylib);
|
||||||
let filename = file.file_name().unwrap().to_str().unwrap();
|
let filename = file.file_name().unwrap().to_str().unwrap();
|
||||||
// change `librustc_codegen_llvm-xxxxxx.so` to `librustc_codegen_llvm-llvm.so`
|
// change `librustc_codegen_llvm-xxxxxx.so` to `librustc_codegen_llvm-llvm.so`
|
||||||
|
@ -1137,10 +1136,7 @@ pub fn run_cargo(builder: &Builder,
|
||||||
// contents (the list of files to copy) is different or if any dep's mtime
|
// contents (the list of files to copy) is different or if any dep's mtime
|
||||||
// is newer then we rewrite the stamp file.
|
// is newer then we rewrite the stamp file.
|
||||||
deps.sort();
|
deps.sort();
|
||||||
let mut stamp_contents = Vec::new();
|
let stamp_contents = fs::read(stamp);
|
||||||
if let Ok(mut f) = File::open(stamp) {
|
|
||||||
t!(f.read_to_end(&mut stamp_contents));
|
|
||||||
}
|
|
||||||
let stamp_mtime = mtime(&stamp);
|
let stamp_mtime = mtime(&stamp);
|
||||||
let mut new_contents = Vec::new();
|
let mut new_contents = Vec::new();
|
||||||
let mut max = None;
|
let mut max = None;
|
||||||
|
@ -1156,7 +1152,10 @@ pub fn run_cargo(builder: &Builder,
|
||||||
}
|
}
|
||||||
let max = max.unwrap();
|
let max = max.unwrap();
|
||||||
let max_path = max_path.unwrap();
|
let max_path = max_path.unwrap();
|
||||||
if stamp_contents == new_contents && max <= stamp_mtime {
|
let contents_equal = stamp_contents
|
||||||
|
.map(|contents| contents == new_contents)
|
||||||
|
.unwrap_or_default();
|
||||||
|
if contents_equal && max <= stamp_mtime {
|
||||||
builder.verbose(&format!("not updating {:?}; contents equal and {:?} <= {:?}",
|
builder.verbose(&format!("not updating {:?}; contents equal and {:?} <= {:?}",
|
||||||
stamp, max, stamp_mtime));
|
stamp, max, stamp_mtime));
|
||||||
return deps
|
return deps
|
||||||
|
@ -1166,7 +1165,7 @@ pub fn run_cargo(builder: &Builder,
|
||||||
} else {
|
} else {
|
||||||
builder.verbose(&format!("updating {:?} as deps changed", stamp));
|
builder.verbose(&format!("updating {:?} as deps changed", stamp));
|
||||||
}
|
}
|
||||||
t!(t!(File::create(stamp)).write_all(&new_contents));
|
t!(fs::write(&stamp, &new_contents));
|
||||||
deps
|
deps
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,8 +15,7 @@
|
||||||
|
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs::{self, File};
|
use std::fs;
|
||||||
use std::io::prelude::*;
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process;
|
use std::process;
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
|
@ -416,9 +415,7 @@ impl Config {
|
||||||
config.run_host_only = !(flags.host.is_empty() && !flags.target.is_empty());
|
config.run_host_only = !(flags.host.is_empty() && !flags.target.is_empty());
|
||||||
|
|
||||||
let toml = file.map(|file| {
|
let toml = file.map(|file| {
|
||||||
let mut f = t!(File::open(&file));
|
let contents = t!(fs::read_to_string(&file));
|
||||||
let mut contents = String::new();
|
|
||||||
t!(f.read_to_string(&mut contents));
|
|
||||||
match toml::from_str(&contents) {
|
match toml::from_str(&contents) {
|
||||||
Ok(table) => table,
|
Ok(table) => table,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
//! pieces of `rustup.rs`!
|
//! pieces of `rustup.rs`!
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs::{self, File};
|
use std::fs;
|
||||||
use std::io::{Read, Write};
|
use std::io::Write;
|
||||||
use std::path::{PathBuf, Path};
|
use std::path::{PathBuf, Path};
|
||||||
use std::process::{Command, Stdio};
|
use std::process::{Command, Stdio};
|
||||||
|
|
||||||
|
@ -1511,8 +1511,7 @@ impl Step for Extended {
|
||||||
}
|
}
|
||||||
|
|
||||||
let xform = |p: &Path| {
|
let xform = |p: &Path| {
|
||||||
let mut contents = String::new();
|
let mut contents = t!(fs::read_to_string(p));
|
||||||
t!(t!(File::open(p)).read_to_string(&mut contents));
|
|
||||||
if rls_installer.is_none() {
|
if rls_installer.is_none() {
|
||||||
contents = filter(&contents, "rls");
|
contents = filter(&contents, "rls");
|
||||||
}
|
}
|
||||||
|
@ -1523,8 +1522,8 @@ impl Step for Extended {
|
||||||
contents = filter(&contents, "rustfmt");
|
contents = filter(&contents, "rustfmt");
|
||||||
}
|
}
|
||||||
let ret = tmp.join(p.file_name().unwrap());
|
let ret = tmp.join(p.file_name().unwrap());
|
||||||
t!(t!(File::create(&ret)).write_all(contents.as_bytes()));
|
t!(fs::write(&ret, &contents));
|
||||||
return ret
|
ret
|
||||||
};
|
};
|
||||||
|
|
||||||
if target.contains("apple-darwin") {
|
if target.contains("apple-darwin") {
|
||||||
|
@ -1869,8 +1868,7 @@ impl Step for HashSign {
|
||||||
let file = builder.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
|
let file = builder.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
|
||||||
panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
|
panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
|
||||||
});
|
});
|
||||||
let mut pass = String::new();
|
let pass = t!(fs::read_to_string(&file));
|
||||||
t!(t!(File::open(&file)).read_to_string(&mut pass));
|
|
||||||
|
|
||||||
let today = output(Command::new("date").arg("+%Y-%m-%d"));
|
let today = output(Command::new("date").arg("+%Y-%m-%d"));
|
||||||
|
|
||||||
|
|
|
@ -18,8 +18,7 @@
|
||||||
//! `rustdoc`.
|
//! `rustdoc`.
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fs::{self, File};
|
use std::fs;
|
||||||
use std::io::prelude::*;
|
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::{PathBuf, Path};
|
use std::path::{PathBuf, Path};
|
||||||
|
|
||||||
|
@ -379,12 +378,11 @@ impl Step for Standalone {
|
||||||
let version_info = out.join("version_info.html");
|
let version_info = out.join("version_info.html");
|
||||||
|
|
||||||
if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
|
if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
|
||||||
let mut info = String::new();
|
let info = t!(fs::read_to_string(&version_input))
|
||||||
t!(t!(File::open(&version_input)).read_to_string(&mut info));
|
.replace("VERSION", &builder.rust_release())
|
||||||
let info = info.replace("VERSION", &builder.rust_release())
|
.replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
|
||||||
.replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
|
.replace("STAMP", builder.rust_info.sha().unwrap_or(""));
|
||||||
.replace("STAMP", builder.rust_info.sha().unwrap_or(""));
|
t!(fs::write(&version_info, &info));
|
||||||
t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for file in t!(fs::read_dir(builder.src.join("src/doc"))) {
|
for file in t!(fs::read_dir(builder.src.join("src/doc"))) {
|
||||||
|
|
|
@ -1067,9 +1067,8 @@ impl Build {
|
||||||
|
|
||||||
/// Returns the `a.b.c` version that the given package is at.
|
/// Returns the `a.b.c` version that the given package is at.
|
||||||
fn release_num(&self, package: &str) -> String {
|
fn release_num(&self, package: &str) -> String {
|
||||||
let mut toml = String::new();
|
|
||||||
let toml_file_name = self.src.join(&format!("src/tools/{}/Cargo.toml", package));
|
let toml_file_name = self.src.join(&format!("src/tools/{}/Cargo.toml", package));
|
||||||
t!(t!(File::open(toml_file_name)).read_to_string(&mut toml));
|
let toml = t!(fs::read_to_string(&toml_file_name));
|
||||||
for line in toml.lines() {
|
for line in toml.lines() {
|
||||||
let prefix = "version = \"";
|
let prefix = "version = \"";
|
||||||
let suffix = "\"";
|
let suffix = "\"";
|
||||||
|
@ -1151,8 +1150,7 @@ impl Build {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut paths = Vec::new();
|
let mut paths = Vec::new();
|
||||||
let mut contents = Vec::new();
|
let contents = t!(fs::read(stamp));
|
||||||
t!(t!(File::open(stamp)).read_to_end(&mut contents));
|
|
||||||
// This is the method we use for extracting paths from the stamp file passed to us. See
|
// This is the method we use for extracting paths from the stamp file passed to us. See
|
||||||
// run_cargo for more information (in compile.rs).
|
// run_cargo for more information (in compile.rs).
|
||||||
for part in contents.split(|b| *b == 0) {
|
for part in contents.split(|b| *b == 0) {
|
||||||
|
|
|
@ -21,7 +21,6 @@
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::io::{Read, Write};
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
|
@ -75,8 +74,7 @@ impl Step for Llvm {
|
||||||
}
|
}
|
||||||
|
|
||||||
let rebuild_trigger = builder.src.join("src/rustllvm/llvm-rebuild-trigger");
|
let rebuild_trigger = builder.src.join("src/rustllvm/llvm-rebuild-trigger");
|
||||||
let mut rebuild_trigger_contents = String::new();
|
let rebuild_trigger_contents = t!(fs::read_to_string(&rebuild_trigger));
|
||||||
t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
|
|
||||||
|
|
||||||
let (out_dir, llvm_config_ret_dir) = if emscripten {
|
let (out_dir, llvm_config_ret_dir) = if emscripten {
|
||||||
let dir = builder.emscripten_llvm_out(target);
|
let dir = builder.emscripten_llvm_out(target);
|
||||||
|
@ -93,8 +91,7 @@ impl Step for Llvm {
|
||||||
let build_llvm_config = llvm_config_ret_dir
|
let build_llvm_config = llvm_config_ret_dir
|
||||||
.join(exe("llvm-config", &*builder.config.build));
|
.join(exe("llvm-config", &*builder.config.build));
|
||||||
if done_stamp.exists() {
|
if done_stamp.exists() {
|
||||||
let mut done_contents = String::new();
|
let done_contents = t!(fs::read_to_string(&done_stamp));
|
||||||
t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
|
|
||||||
|
|
||||||
// If LLVM was already built previously and contents of the rebuild-trigger file
|
// If LLVM was already built previously and contents of the rebuild-trigger file
|
||||||
// didn't change from the previous build, then no action is required.
|
// didn't change from the previous build, then no action is required.
|
||||||
|
@ -261,7 +258,7 @@ impl Step for Llvm {
|
||||||
|
|
||||||
cfg.build();
|
cfg.build();
|
||||||
|
|
||||||
t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
|
t!(fs::write(&done_stamp, &rebuild_trigger_contents));
|
||||||
|
|
||||||
build_llvm_config
|
build_llvm_config
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,8 +21,7 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ffi::{OsString, OsStr};
|
use std::ffi::{OsString, OsStr};
|
||||||
use std::fs::{self, File};
|
use std::fs;
|
||||||
use std::io::Read;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
|
@ -235,9 +234,7 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
|
||||||
}
|
}
|
||||||
|
|
||||||
if build.config.channel == "stable" {
|
if build.config.channel == "stable" {
|
||||||
let mut stage0 = String::new();
|
let stage0 = t!(fs::read_to_string(build.src.join("src/stage0.txt")));
|
||||||
t!(t!(File::open(build.src.join("src/stage0.txt")))
|
|
||||||
.read_to_string(&mut stage0));
|
|
||||||
if stage0.contains("\ndev:") {
|
if stage0.contains("\ndev:") {
|
||||||
panic!("bootstrapping from a dev compiler in a stable release, but \
|
panic!("bootstrapping from a dev compiler in a stable release, but \
|
||||||
should only be bootstrapping from a released compiler!");
|
should only be bootstrapping from a released compiler!");
|
||||||
|
|
|
@ -16,8 +16,7 @@
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::fs::{self, File};
|
use std::fs;
|
||||||
use std::io::Read;
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
@ -1427,10 +1426,8 @@ impl Step for ErrorIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) -> bool {
|
fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) -> bool {
|
||||||
match File::open(markdown) {
|
match fs::read_to_string(markdown) {
|
||||||
Ok(mut file) => {
|
Ok(contents) => {
|
||||||
let mut contents = String::new();
|
|
||||||
t!(file.read_to_string(&mut contents));
|
|
||||||
if !contents.contains("```") {
|
if !contents.contains("```") {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -327,7 +327,8 @@ pub trait Into<T>: Sized {
|
||||||
/// An example usage for error handling:
|
/// An example usage for error handling:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use std::io::{self, Read};
|
/// use std::fs;
|
||||||
|
/// use std::io;
|
||||||
/// use std::num;
|
/// use std::num;
|
||||||
///
|
///
|
||||||
/// enum CliError {
|
/// enum CliError {
|
||||||
|
@ -348,9 +349,7 @@ pub trait Into<T>: Sized {
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// fn open_and_parse_file(file_name: &str) -> Result<i32, CliError> {
|
/// fn open_and_parse_file(file_name: &str) -> Result<i32, CliError> {
|
||||||
/// let mut file = std::fs::File::open("test")?;
|
/// let mut contents = fs::read_to_string(&file_name)?;
|
||||||
/// let mut contents = String::new();
|
|
||||||
/// file.read_to_string(&mut contents)?;
|
|
||||||
/// let num: i32 = contents.trim().parse()?;
|
/// let num: i32 = contents.trim().parse()?;
|
||||||
/// Ok(num)
|
/// Ok(num)
|
||||||
/// }
|
/// }
|
||||||
|
|
|
@ -31,9 +31,8 @@ use std::borrow::Cow;
|
||||||
use std::collections::hash_map::Entry::Vacant;
|
use std::collections::hash_map::Entry::Vacant;
|
||||||
use std::collections::btree_map::BTreeMap;
|
use std::collections::btree_map::BTreeMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs::File;
|
use std::fs;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::io::prelude::*;
|
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
|
||||||
fn print_help_message() {
|
fn print_help_message() {
|
||||||
|
@ -268,5 +267,5 @@ fn dump_region_data_to<'a, 'gcx, 'tcx>(region_rels: &RegionRelations<'a, 'gcx, '
|
||||||
debug!("dump_region_data calling render");
|
debug!("dump_region_data calling render");
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
dot::render(&g, &mut v).unwrap();
|
dot::render(&g, &mut v).unwrap();
|
||||||
File::create(path).and_then(|mut f| f.write_all(&v))
|
fs::write(path, &v)
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,8 +22,8 @@
|
||||||
#![feature(box_syntax)]
|
#![feature(box_syntax)]
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::io::{self, Write};
|
use std::io::Write;
|
||||||
use std::fs::File;
|
use std::fs;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::{mpsc, Arc};
|
use std::sync::{mpsc, Arc};
|
||||||
|
|
||||||
|
@ -81,11 +81,7 @@ pub struct NoLlvmMetadataLoader;
|
||||||
|
|
||||||
impl MetadataLoader for NoLlvmMetadataLoader {
|
impl MetadataLoader for NoLlvmMetadataLoader {
|
||||||
fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result<MetadataRef, String> {
|
fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result<MetadataRef, String> {
|
||||||
let mut file = File::open(filename)
|
let buf = fs::read(filename).map_err(|e| format!("metadata file open err: {:?}", e))?;
|
||||||
.map_err(|e| format!("metadata file open err: {:?}", e))?;
|
|
||||||
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
io::copy(&mut file, &mut buf).unwrap();
|
|
||||||
let buf: OwningRef<Vec<u8>, [u8]> = OwningRef::new(buf);
|
let buf: OwningRef<Vec<u8>, [u8]> = OwningRef::new(buf);
|
||||||
Ok(rustc_erase_owner!(buf.map_owner_box()))
|
Ok(rustc_erase_owner!(buf.map_owner_box()))
|
||||||
}
|
}
|
||||||
|
@ -209,8 +205,7 @@ impl CodegenBackend for MetadataOnlyCodegenBackend {
|
||||||
} else {
|
} else {
|
||||||
&ongoing_codegen.metadata.raw_data
|
&ongoing_codegen.metadata.raw_data
|
||||||
};
|
};
|
||||||
let mut file = File::create(&output_name).unwrap();
|
fs::write(&output_name, metadata).unwrap();
|
||||||
file.write_all(metadata).unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sess.abort_if_errors();
|
sess.abort_if_errors();
|
||||||
|
|
|
@ -778,10 +778,7 @@ fn write_shared(
|
||||||
let mut themes: FxHashSet<String> = FxHashSet::default();
|
let mut themes: FxHashSet<String> = FxHashSet::default();
|
||||||
|
|
||||||
for entry in &cx.shared.themes {
|
for entry in &cx.shared.themes {
|
||||||
let mut content = Vec::with_capacity(100000);
|
let content = try_err!(fs::read(&entry), &entry);
|
||||||
|
|
||||||
let mut f = try_err!(File::open(&entry), &entry);
|
|
||||||
try_err!(f.read_to_end(&mut content), &entry);
|
|
||||||
let theme = try_none!(try_none!(entry.file_stem(), &entry).to_str(), &entry);
|
let theme = try_none!(try_none!(entry.file_stem(), &entry).to_str(), &entry);
|
||||||
let extension = try_none!(try_none!(entry.extension(), &entry).to_str(), &entry);
|
let extension = try_none!(try_none!(entry.extension(), &entry).to_str(), &entry);
|
||||||
write(cx.dst.join(format!("{}{}.{}", theme, cx.shared.resource_suffix, extension)),
|
write(cx.dst.join(format!("{}{}.{}", theme, cx.shared.resource_suffix, extension)),
|
||||||
|
@ -881,10 +878,7 @@ themePicker.onblur = handleThemeButtonsBlur;
|
||||||
if !options.enable_minification {
|
if !options.enable_minification {
|
||||||
try_err!(fs::copy(css, out), css);
|
try_err!(fs::copy(css, out), css);
|
||||||
} else {
|
} else {
|
||||||
let mut f = try_err!(File::open(css), css);
|
let buffer = try_err!(fs::read_to_string(css), css);
|
||||||
let mut buffer = String::with_capacity(1000);
|
|
||||||
|
|
||||||
try_err!(f.read_to_string(&mut buffer), css);
|
|
||||||
write_minify(out, &buffer, options.enable_minification)?;
|
write_minify(out, &buffer, options.enable_minification)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2102,8 +2096,7 @@ impl Context {
|
||||||
if !buf.is_empty() {
|
if !buf.is_empty() {
|
||||||
try_err!(this.shared.ensure_dir(&this.dst), &this.dst);
|
try_err!(this.shared.ensure_dir(&this.dst), &this.dst);
|
||||||
let joint_dst = this.dst.join("index.html");
|
let joint_dst = this.dst.join("index.html");
|
||||||
let mut dst = try_err!(File::create(&joint_dst), &joint_dst);
|
try_err!(fs::write(&joint_dst, buf), &joint_dst);
|
||||||
try_err!(dst.write_all(&buf), &joint_dst);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let m = match item.inner {
|
let m = match item.inner {
|
||||||
|
@ -2137,8 +2130,7 @@ impl Context {
|
||||||
let file_name = &item_path(item_type, name);
|
let file_name = &item_path(item_type, name);
|
||||||
try_err!(self.shared.ensure_dir(&self.dst), &self.dst);
|
try_err!(self.shared.ensure_dir(&self.dst), &self.dst);
|
||||||
let joint_dst = self.dst.join(file_name);
|
let joint_dst = self.dst.join(file_name);
|
||||||
let mut dst = try_err!(File::create(&joint_dst), &joint_dst);
|
try_err!(fs::write(&joint_dst, buf), &joint_dst);
|
||||||
try_err!(dst.write_all(&buf), &joint_dst);
|
|
||||||
|
|
||||||
if !self.render_redirect_pages {
|
if !self.render_redirect_pages {
|
||||||
all.append(full_path(self, &item), &item_type);
|
all.append(full_path(self, &item), &item_type);
|
||||||
|
|
|
@ -9,9 +9,8 @@
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
use std::fs::File;
|
use std::fs;
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
use std::io::Read;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use errors::Handler;
|
use errors::Handler;
|
||||||
|
@ -278,12 +277,9 @@ pub fn get_differences(against: &CssPath, other: &CssPath, v: &mut Vec<String>)
|
||||||
pub fn test_theme_against<P: AsRef<Path>>(f: &P, against: &CssPath, diag: &Handler)
|
pub fn test_theme_against<P: AsRef<Path>>(f: &P, against: &CssPath, diag: &Handler)
|
||||||
-> (bool, Vec<String>)
|
-> (bool, Vec<String>)
|
||||||
{
|
{
|
||||||
let mut file = try_something!(File::open(f), diag, (false, Vec::new()));
|
let data = try_something!(fs::read(f), diag, (false, vec![]));
|
||||||
let mut data = Vec::with_capacity(1000);
|
|
||||||
|
|
||||||
try_something!(file.read_to_end(&mut data), diag, (false, Vec::new()));
|
|
||||||
let paths = load_css_paths(&data);
|
let paths = load_css_paths(&data);
|
||||||
let mut ret = Vec::new();
|
let mut ret = vec![];
|
||||||
get_differences(against, &paths, &mut ret);
|
get_differences(against, &paths, &mut ret);
|
||||||
(true, ret)
|
(true, ret)
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,8 +34,8 @@ use tokenstream::{TokenStream, TokenTree};
|
||||||
use visit::{self, Visitor};
|
use visit::{self, Visitor};
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use std::fs::File;
|
use std::fs;
|
||||||
use std::io::Read;
|
use std::io::ErrorKind;
|
||||||
use std::{iter, mem};
|
use std::{iter, mem};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -1507,20 +1507,8 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
|
||||||
return noop_fold_attribute(at, self);
|
return noop_fold_attribute(at, self);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut buf = vec![];
|
|
||||||
let filename = self.cx.root_path.join(file.to_string());
|
let filename = self.cx.root_path.join(file.to_string());
|
||||||
|
match fs::read_to_string(&filename) {
|
||||||
match File::open(&filename).and_then(|mut f| f.read_to_end(&mut buf)) {
|
|
||||||
Ok(..) => {}
|
|
||||||
Err(e) => {
|
|
||||||
self.cx.span_err(at.span,
|
|
||||||
&format!("couldn't read {}: {}",
|
|
||||||
filename.display(),
|
|
||||||
e));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match String::from_utf8(buf) {
|
|
||||||
Ok(src) => {
|
Ok(src) => {
|
||||||
let src_interned = Symbol::intern(&src);
|
let src_interned = Symbol::intern(&src);
|
||||||
|
|
||||||
|
@ -1530,21 +1518,34 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
|
||||||
|
|
||||||
let include_info = vec![
|
let include_info = vec![
|
||||||
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
|
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
|
||||||
attr::mk_name_value_item_str(Ident::from_str("file"),
|
attr::mk_name_value_item_str(
|
||||||
dummy_spanned(file)))),
|
Ident::from_str("file"),
|
||||||
|
dummy_spanned(file),
|
||||||
|
),
|
||||||
|
)),
|
||||||
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
|
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
|
||||||
attr::mk_name_value_item_str(Ident::from_str("contents"),
|
attr::mk_name_value_item_str(
|
||||||
dummy_spanned(src_interned)))),
|
Ident::from_str("contents"),
|
||||||
|
dummy_spanned(src_interned),
|
||||||
|
),
|
||||||
|
)),
|
||||||
];
|
];
|
||||||
|
|
||||||
let include_ident = Ident::from_str("include");
|
let include_ident = Ident::from_str("include");
|
||||||
let item = attr::mk_list_item(DUMMY_SP, include_ident, include_info);
|
let item = attr::mk_list_item(DUMMY_SP, include_ident, include_info);
|
||||||
items.push(dummy_spanned(ast::NestedMetaItemKind::MetaItem(item)));
|
items.push(dummy_spanned(ast::NestedMetaItemKind::MetaItem(item)));
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(ref e) if e.kind() == ErrorKind::InvalidData => {
|
||||||
self.cx.span_err(at.span,
|
self.cx.span_err(
|
||||||
&format!("{} wasn't a utf-8 file",
|
at.span,
|
||||||
filename.display()));
|
&format!("{} wasn't a utf-8 file", filename.display()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
self.cx.span_err(
|
||||||
|
at.span,
|
||||||
|
&format!("couldn't read {}: {}", filename.display(), e),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -21,8 +21,8 @@ use smallvec::SmallVec;
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
use tokenstream;
|
use tokenstream;
|
||||||
|
|
||||||
use std::fs::File;
|
use std::fs;
|
||||||
use std::io::prelude::*;
|
use std::io::ErrorKind;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
|
||||||
|
@ -137,18 +137,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
|
||||||
None => return DummyResult::expr(sp)
|
None => return DummyResult::expr(sp)
|
||||||
};
|
};
|
||||||
let file = res_rel_file(cx, sp, file);
|
let file = res_rel_file(cx, sp, file);
|
||||||
let mut bytes = Vec::new();
|
match fs::read_to_string(&file) {
|
||||||
match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) {
|
|
||||||
Ok(..) => {}
|
|
||||||
Err(e) => {
|
|
||||||
cx.span_err(sp,
|
|
||||||
&format!("couldn't read {}: {}",
|
|
||||||
file.display(),
|
|
||||||
e));
|
|
||||||
return DummyResult::expr(sp);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
match String::from_utf8(bytes) {
|
|
||||||
Ok(src) => {
|
Ok(src) => {
|
||||||
let interned_src = Symbol::intern(&src);
|
let interned_src = Symbol::intern(&src);
|
||||||
|
|
||||||
|
@ -157,11 +146,13 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
|
||||||
cx.source_map().new_source_file(file.into(), src);
|
cx.source_map().new_source_file(file.into(), src);
|
||||||
|
|
||||||
base::MacEager::expr(cx.expr_str(sp, interned_src))
|
base::MacEager::expr(cx.expr_str(sp, interned_src))
|
||||||
|
},
|
||||||
|
Err(ref e) if e.kind() == ErrorKind::InvalidData => {
|
||||||
|
cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display()));
|
||||||
|
DummyResult::expr(sp)
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(e) => {
|
||||||
cx.span_err(sp,
|
cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
|
||||||
&format!("{} wasn't a utf-8 file",
|
|
||||||
file.display()));
|
|
||||||
DummyResult::expr(sp)
|
DummyResult::expr(sp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -174,22 +165,23 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
|
||||||
None => return DummyResult::expr(sp)
|
None => return DummyResult::expr(sp)
|
||||||
};
|
};
|
||||||
let file = res_rel_file(cx, sp, file);
|
let file = res_rel_file(cx, sp, file);
|
||||||
let mut bytes = Vec::new();
|
match fs::read(&file) {
|
||||||
match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) {
|
Ok(bytes) => {
|
||||||
Err(e) => {
|
// Add the contents to the source map if it contains UTF-8.
|
||||||
cx.span_err(sp,
|
let (contents, bytes) = match String::from_utf8(bytes) {
|
||||||
&format!("couldn't read {}: {}", file.display(), e));
|
Ok(s) => {
|
||||||
DummyResult::expr(sp)
|
let bytes = s.as_bytes().to_owned();
|
||||||
}
|
(s, bytes)
|
||||||
Ok(..) => {
|
},
|
||||||
let src = match String::from_utf8(bytes.clone()) {
|
Err(e) => (String::new(), e.into_bytes()),
|
||||||
Ok(contents) => contents,
|
|
||||||
Err(..) => "".to_string()
|
|
||||||
};
|
};
|
||||||
|
cx.source_map().new_source_file(file.into(), contents);
|
||||||
cx.source_map().new_source_file(file.into(), src);
|
|
||||||
|
|
||||||
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
|
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
|
||||||
|
DummyResult::expr(sp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, Read};
|
use std::io;
|
||||||
use errors::SourceMapper;
|
use errors::SourceMapper;
|
||||||
|
|
||||||
/// Return the span itself if it doesn't come from a macro expansion,
|
/// Return the span itself if it doesn't come from a macro expansion,
|
||||||
|
@ -96,9 +96,7 @@ impl FileLoader for RealFileLoader {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_file(&self, path: &Path) -> io::Result<String> {
|
fn read_file(&self, path: &Path) -> io::Result<String> {
|
||||||
let mut src = String::new();
|
fs::read_to_string(path)
|
||||||
fs::File::open(path)?.read_to_string(&mut src)?;
|
|
||||||
Ok(src)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -606,7 +606,7 @@ impl Builder {
|
||||||
|
|
||||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||||
let sha256 = self.output.join(format!("{}.sha256", filename));
|
let sha256 = self.output.join(format!("{}.sha256", filename));
|
||||||
t!(t!(File::create(&sha256)).write_all(&sha.stdout));
|
t!(fs::write(&sha256, &sha.stdout));
|
||||||
|
|
||||||
let stdout = String::from_utf8_lossy(&sha.stdout);
|
let stdout = String::from_utf8_lossy(&sha.stdout);
|
||||||
stdout.split_whitespace().next().unwrap().to_string()
|
stdout.split_whitespace().next().unwrap().to_string()
|
||||||
|
@ -643,7 +643,7 @@ impl Builder {
|
||||||
|
|
||||||
fn write(&self, contents: &str, channel_name: &str, suffix: &str) {
|
fn write(&self, contents: &str, channel_name: &str, suffix: &str) {
|
||||||
let dst = self.output.join(format!("channel-rust-{}{}", channel_name, suffix));
|
let dst = self.output.join(format!("channel-rust-{}{}", channel_name, suffix));
|
||||||
t!(t!(File::create(&dst)).write_all(contents.as_bytes()));
|
t!(fs::write(&dst, contents));
|
||||||
self.hash(&dst);
|
self.hash(&dst);
|
||||||
self.sign(&dst);
|
self.sign(&dst);
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,8 +11,7 @@
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::fs::File;
|
use std::fs;
|
||||||
use std::io::Write;
|
|
||||||
|
|
||||||
struct Test {
|
struct Test {
|
||||||
repo: &'static str,
|
repo: &'static str,
|
||||||
|
@ -91,10 +90,7 @@ fn test_repo(cargo: &Path, out_dir: &Path, test: &Test) {
|
||||||
println!("testing {}", test.repo);
|
println!("testing {}", test.repo);
|
||||||
let dir = clone_repo(test, out_dir);
|
let dir = clone_repo(test, out_dir);
|
||||||
if let Some(lockfile) = test.lock {
|
if let Some(lockfile) = test.lock {
|
||||||
File::create(&dir.join("Cargo.lock"))
|
fs::write(&dir.join("Cargo.lock"), lockfile).unwrap();
|
||||||
.expect("")
|
|
||||||
.write_all(lockfile.as_bytes())
|
|
||||||
.expect("");
|
|
||||||
}
|
}
|
||||||
if !run_cargo_test(cargo, &dir, test.packages) {
|
if !run_cargo_test(cargo, &dir, test.packages) {
|
||||||
panic!("tests failed for {}", test.repo);
|
panic!("tests failed for {}", test.repo);
|
||||||
|
|
|
@ -38,7 +38,7 @@ use getopts::Options;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, Read};
|
use std::io::{self, ErrorKind};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use test::ColorConfig;
|
use test::ColorConfig;
|
||||||
|
@ -686,13 +686,11 @@ fn up_to_date(
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let stamp_name = stamp(config, testpaths, revision);
|
let stamp_name = stamp(config, testpaths, revision);
|
||||||
// Check hash.
|
// Check hash.
|
||||||
let mut f = match fs::File::open(&stamp_name) {
|
let contents = match fs::read_to_string(&stamp_name) {
|
||||||
Ok(f) => f,
|
Ok(f) => f,
|
||||||
|
Err(ref e) if e.kind() == ErrorKind::InvalidData => panic!("Can't read stamp contents"),
|
||||||
Err(_) => return true,
|
Err(_) => return true,
|
||||||
};
|
};
|
||||||
let mut contents = String::new();
|
|
||||||
f.read_to_string(&mut contents)
|
|
||||||
.expect("Can't read stamp contents");
|
|
||||||
let expected_hash = runtest::compute_stamp_hash(config);
|
let expected_hash = runtest::compute_stamp_hash(config);
|
||||||
if contents != expected_hash {
|
if contents != expected_hash {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -458,11 +458,7 @@ impl<'test> TestCx<'test> {
|
||||||
None => 2,
|
None => 2,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut src = String::new();
|
let src = fs::read_to_string(&self.testpaths.file).unwrap();
|
||||||
File::open(&self.testpaths.file)
|
|
||||||
.unwrap()
|
|
||||||
.read_to_string(&mut src)
|
|
||||||
.unwrap();
|
|
||||||
let mut srcs = vec![src];
|
let mut srcs = vec![src];
|
||||||
|
|
||||||
let mut round = 0;
|
let mut round = 0;
|
||||||
|
@ -500,12 +496,7 @@ impl<'test> TestCx<'test> {
|
||||||
let mut expected = match self.props.pp_exact {
|
let mut expected = match self.props.pp_exact {
|
||||||
Some(ref file) => {
|
Some(ref file) => {
|
||||||
let filepath = self.testpaths.file.parent().unwrap().join(file);
|
let filepath = self.testpaths.file.parent().unwrap().join(file);
|
||||||
let mut s = String::new();
|
fs::read_to_string(&filepath).unwrap()
|
||||||
File::open(&filepath)
|
|
||||||
.unwrap()
|
|
||||||
.read_to_string(&mut s)
|
|
||||||
.unwrap();
|
|
||||||
s
|
|
||||||
}
|
}
|
||||||
None => srcs[srcs.len() - 2].clone(),
|
None => srcs[srcs.len() - 2].clone(),
|
||||||
};
|
};
|
||||||
|
@ -1949,10 +1940,7 @@ impl<'test> TestCx<'test> {
|
||||||
|
|
||||||
fn dump_output_file(&self, out: &str, extension: &str) {
|
fn dump_output_file(&self, out: &str, extension: &str) {
|
||||||
let outfile = self.make_out_name(extension);
|
let outfile = self.make_out_name(extension);
|
||||||
File::create(&outfile)
|
fs::write(&outfile, out).unwrap();
|
||||||
.unwrap()
|
|
||||||
.write_all(out.as_bytes())
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a filename for output with the given extension. Example:
|
/// Create a filename for output with the given extension. Example:
|
||||||
|
@ -2149,11 +2137,7 @@ impl<'test> TestCx<'test> {
|
||||||
path: &P,
|
path: &P,
|
||||||
mut other_files: Option<&mut Vec<String>>,
|
mut other_files: Option<&mut Vec<String>>,
|
||||||
) -> Vec<usize> {
|
) -> Vec<usize> {
|
||||||
let mut file =
|
let content = fs::read_to_string(&path).unwrap();
|
||||||
fs::File::open(path).expect("markdown_test_output_check_entry File::open failed");
|
|
||||||
let mut content = String::new();
|
|
||||||
file.read_to_string(&mut content)
|
|
||||||
.expect("markdown_test_output_check_entry read_to_string failed");
|
|
||||||
let mut ignore = false;
|
let mut ignore = false;
|
||||||
content
|
content
|
||||||
.lines()
|
.lines()
|
||||||
|
@ -2826,11 +2810,7 @@ impl<'test> TestCx<'test> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_mir_dump(&self) {
|
fn check_mir_dump(&self) {
|
||||||
let mut test_file_contents = String::new();
|
let test_file_contents = fs::read_to_string(&self.testpaths.file).unwrap();
|
||||||
fs::File::open(self.testpaths.file.clone())
|
|
||||||
.unwrap()
|
|
||||||
.read_to_string(&mut test_file_contents)
|
|
||||||
.unwrap();
|
|
||||||
if let Some(idx) = test_file_contents.find("// END RUST SOURCE") {
|
if let Some(idx) = test_file_contents.find("// END RUST SOURCE") {
|
||||||
let (_, tests_text) = test_file_contents.split_at(idx + "// END_RUST SOURCE".len());
|
let (_, tests_text) = test_file_contents.split_at(idx + "// END_RUST SOURCE".len());
|
||||||
let tests_text_str = String::from(tests_text);
|
let tests_text_str = String::from(tests_text);
|
||||||
|
@ -2894,9 +2874,7 @@ impl<'test> TestCx<'test> {
|
||||||
}
|
}
|
||||||
self.check_mir_test_timestamp(test_name, &output_file);
|
self.check_mir_test_timestamp(test_name, &output_file);
|
||||||
|
|
||||||
let mut dumped_file = fs::File::open(output_file.clone()).unwrap();
|
let dumped_string = fs::read_to_string(&output_file).unwrap();
|
||||||
let mut dumped_string = String::new();
|
|
||||||
dumped_file.read_to_string(&mut dumped_string).unwrap();
|
|
||||||
let mut dumped_lines = dumped_string
|
let mut dumped_lines = dumped_string
|
||||||
.lines()
|
.lines()
|
||||||
.map(|l| nocomment_mir_line(l))
|
.map(|l| nocomment_mir_line(l))
|
||||||
|
@ -3108,19 +3086,13 @@ impl<'test> TestCx<'test> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_expected_output_from_path(&self, path: &Path) -> Result<String, String> {
|
fn load_expected_output_from_path(&self, path: &Path) -> Result<String, String> {
|
||||||
let mut result = String::new();
|
fs::read_to_string(path).map_err(|err| {
|
||||||
match File::open(path).and_then(|mut f| f.read_to_string(&mut result)) {
|
format!("failed to load expected output from `{}`: {}", path.display(), err)
|
||||||
Ok(_) => Ok(result),
|
})
|
||||||
Err(e) => Err(format!(
|
|
||||||
"failed to load expected output from `{}`: {}",
|
|
||||||
path.display(),
|
|
||||||
e
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_file(&self, file: &PathBuf) {
|
fn delete_file(&self, file: &PathBuf) {
|
||||||
if let Err(e) = ::std::fs::remove_file(file) {
|
if let Err(e) = fs::remove_file(file) {
|
||||||
self.fatal(&format!(
|
self.fatal(&format!(
|
||||||
"failed to delete `{}`: {}",
|
"failed to delete `{}`: {}",
|
||||||
file.display(),
|
file.display(),
|
||||||
|
@ -3182,16 +3154,13 @@ impl<'test> TestCx<'test> {
|
||||||
for output_file in &files {
|
for output_file in &files {
|
||||||
if actual.is_empty() {
|
if actual.is_empty() {
|
||||||
self.delete_file(output_file);
|
self.delete_file(output_file);
|
||||||
} else {
|
} else if let Err(err) = fs::write(&output_file, &actual) {
|
||||||
match File::create(&output_file).and_then(|mut f| f.write_all(actual.as_bytes())) {
|
self.fatal(&format!(
|
||||||
Ok(()) => {}
|
"failed to write {} to `{}`: {}",
|
||||||
Err(e) => self.fatal(&format!(
|
kind,
|
||||||
"failed to write {} to `{}`: {}",
|
output_file.display(),
|
||||||
kind,
|
err,
|
||||||
output_file.display(),
|
));
|
||||||
e
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3243,9 +3212,8 @@ impl<'test> TestCx<'test> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_stamp(&self) {
|
fn create_stamp(&self) {
|
||||||
let mut f = File::create(::stamp(&self.config, self.testpaths, self.revision)).unwrap();
|
let stamp = ::stamp(&self.config, self.testpaths, self.revision);
|
||||||
f.write_all(compute_stamp_hash(&self.config).as_bytes())
|
fs::write(&stamp, compute_stamp_hash(&self.config)).unwrap();
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,8 +18,8 @@ extern crate serialize as rustc_serialize;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fs::{read_dir, File};
|
use std::fs::{self, read_dir, File};
|
||||||
use std::io::{Read, Write};
|
use std::io::Write;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
@ -210,8 +210,7 @@ fn load_all_errors(metadata_dir: &Path) -> Result<ErrorMetadataMap, Box<dyn Erro
|
||||||
for entry in read_dir(metadata_dir)? {
|
for entry in read_dir(metadata_dir)? {
|
||||||
let path = entry?.path();
|
let path = entry?.path();
|
||||||
|
|
||||||
let mut metadata_str = String::new();
|
let metadata_str = fs::read_to_string(&path)?;
|
||||||
File::open(&path).and_then(|mut f| f.read_to_string(&mut metadata_str))?;
|
|
||||||
|
|
||||||
let some_errors: ErrorMetadataMap = json::decode(&metadata_str)?;
|
let some_errors: ErrorMetadataMap = json::decode(&metadata_str)?;
|
||||||
|
|
||||||
|
|
|
@ -24,13 +24,10 @@ pub fn check(_path: &Path, _bad: &mut bool) {}
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
pub fn check(path: &Path, bad: &mut bool) {
|
pub fn check(path: &Path, bad: &mut bool) {
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::Read;
|
|
||||||
use std::process::{Command, Stdio};
|
use std::process::{Command, Stdio};
|
||||||
use std::os::unix::prelude::*;
|
use std::os::unix::prelude::*;
|
||||||
|
|
||||||
if let Ok(mut file) = fs::File::open("/proc/version") {
|
if let Ok(contents) = fs::read_to_string("/proc/version") {
|
||||||
let mut contents = String::new();
|
|
||||||
file.read_to_string(&mut contents).unwrap();
|
|
||||||
// Probably on Windows Linux Subsystem or Docker via VirtualBox,
|
// Probably on Windows Linux Subsystem or Docker via VirtualBox,
|
||||||
// all files will be marked as executable, so skip checking.
|
// all files will be marked as executable, so skip checking.
|
||||||
if contents.contains("Microsoft") || contents.contains("boot2docker") {
|
if contents.contains("Microsoft") || contents.contains("boot2docker") {
|
||||||
|
|
|
@ -15,8 +15,7 @@
|
||||||
//! `extern crate` declarations. This should help us keep the DAG correctly
|
//! `extern crate` declarations. This should help us keep the DAG correctly
|
||||||
//! structured through various refactorings to prune out unnecessary edges.
|
//! structured through various refactorings to prune out unnecessary edges.
|
||||||
|
|
||||||
use std::io::prelude::*;
|
use std::fs;
|
||||||
use std::fs::File;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
pub fn check(path: &Path, bad: &mut bool) {
|
pub fn check(path: &Path, bad: &mut bool) {
|
||||||
|
@ -41,10 +40,8 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||||
// Verify that the dependencies in Cargo.toml at `tomlfile` are sync'd with the
|
// Verify that the dependencies in Cargo.toml at `tomlfile` are sync'd with the
|
||||||
// `extern crate` annotations in the lib.rs at `libfile`.
|
// `extern crate` annotations in the lib.rs at `libfile`.
|
||||||
fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
|
fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
|
||||||
let mut toml = String::new();
|
let toml = t!(fs::read_to_string(&tomlfile));
|
||||||
let mut librs = String::new();
|
let librs = t!(fs::read_to_string(&libfile));
|
||||||
t!(t!(File::open(tomlfile)).read_to_string(&mut toml));
|
|
||||||
t!(t!(File::open(libfile)).read_to_string(&mut librs));
|
|
||||||
|
|
||||||
if toml.contains("name = \"bootstrap\"") {
|
if toml.contains("name = \"bootstrap\"") {
|
||||||
return
|
return
|
||||||
|
|
|
@ -11,8 +11,7 @@
|
||||||
//! Check license of third-party deps by inspecting vendor
|
//! Check license of third-party deps by inspecting vendor
|
||||||
|
|
||||||
use std::collections::{BTreeSet, HashSet, HashMap};
|
use std::collections::{BTreeSet, HashSet, HashMap};
|
||||||
use std::fs::File;
|
use std::fs;
|
||||||
use std::io::Read;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
|
@ -262,8 +261,7 @@ fn check_license(path: &Path) -> bool {
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
panic!("{} does not exist", path.display());
|
panic!("{} does not exist", path.display());
|
||||||
}
|
}
|
||||||
let mut contents = String::new();
|
let contents = t!(fs::read_to_string(&path));
|
||||||
t!(t!(File::open(path)).read_to_string(&mut contents));
|
|
||||||
|
|
||||||
let mut found_license = false;
|
let mut found_license = false;
|
||||||
for line in contents.lines() {
|
for line in contents.lines() {
|
||||||
|
|
|
@ -10,8 +10,7 @@
|
||||||
|
|
||||||
// ! Check for external package sources. Allow only vendorable packages.
|
// ! Check for external package sources. Allow only vendorable packages.
|
||||||
|
|
||||||
use std::fs::File;
|
use std::fs;
|
||||||
use std::io::Read;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
/// List of whitelisted sources for packages
|
/// List of whitelisted sources for packages
|
||||||
|
@ -25,8 +24,7 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||||
let path = path.join("../Cargo.lock");
|
let path = path.join("../Cargo.lock");
|
||||||
|
|
||||||
// open and read the whole file
|
// open and read the whole file
|
||||||
let mut cargo_lock = String::new();
|
let cargo_lock = t!(fs::read_to_string(&path));
|
||||||
t!(t!(File::open(path)).read_to_string(&mut cargo_lock));
|
|
||||||
|
|
||||||
// process each line
|
// process each line
|
||||||
for line in cargo_lock.lines() {
|
for line in cargo_lock.lines() {
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::fs::File;
|
use std::fs::{self, File};
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
|
@ -183,9 +183,7 @@ fn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collect_lang_features(base_src_path: &Path, bad: &mut bool) -> Features {
|
pub fn collect_lang_features(base_src_path: &Path, bad: &mut bool) -> Features {
|
||||||
let mut contents = String::new();
|
let contents = t!(fs::read_to_string(base_src_path.join("libsyntax/feature_gate.rs")));
|
||||||
let path = base_src_path.join("libsyntax/feature_gate.rs");
|
|
||||||
t!(t!(File::open(path)).read_to_string(&mut contents));
|
|
||||||
|
|
||||||
// we allow rustc-internal features to omit a tracking issue.
|
// we allow rustc-internal features to omit a tracking issue.
|
||||||
// these features must be marked with `// rustc internal` in its own group.
|
// these features must be marked with `// rustc internal` in its own group.
|
||||||
|
|
Loading…
Add table
Reference in a new issue