Auto merge of #124222 - GuillaumeGomez:rollup-ws1zju7, r=GuillaumeGomez
Rollup of 4 pull requests Successful merges: - #124069 (enable clippy for bootstrap on CI PRs (in `mingw-check` image)) - #124089 (Fix watchOS and visionOS for pread64 and pwrite64 calls) - #124184 (Suggest using `unsigned_abs` in `abs` documentation) - #124198 (Flip spans for precise capturing syntax not capturing a ty/const param, and for implicit captures of lifetime params) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
aa31bad26b
34 changed files with 303 additions and 246 deletions
|
@ -351,7 +351,7 @@ hir_analysis_param_in_ty_of_assoc_const_binding =
|
|||
*[normal] the {$param_def_kind} `{$param_name}` is defined here
|
||||
}
|
||||
|
||||
hir_analysis_param_not_captured = `impl Trait` must mention all {$kind} parameters in scope
|
||||
hir_analysis_param_not_captured = `impl Trait` must mention all {$kind} parameters in scope in `use<...>`
|
||||
.label = {$kind} parameter is implicitly captured by this `impl Trait`
|
||||
.note = currently, all {$kind} parameters are required to be mentioned in the precise captures list
|
||||
|
||||
|
@ -405,6 +405,10 @@ hir_analysis_self_in_impl_self =
|
|||
`Self` is not valid in the self type of an impl block
|
||||
.note = replace `Self` with a different type
|
||||
|
||||
hir_analysis_self_ty_not_captured = `impl Trait` must mention the `Self` type of the trait in `use<...>`
|
||||
.label = `Self` type parameter is implicitly captured by this `impl Trait`
|
||||
.note = currently, all type parameters are required to be mentioned in the precise captures list
|
||||
|
||||
hir_analysis_simd_ffi_highly_experimental = use of SIMD type{$snip} in FFI is highly experimental and may result in invalid code
|
||||
.help = add `#![feature(simd_ffi)]` to the crate attributes to enable
|
||||
|
||||
|
|
|
@ -580,10 +580,11 @@ fn check_opaque_precise_captures<'tcx>(tcx: TyCtxt<'tcx>, opaque_def_id: LocalDe
|
|||
|
||||
match param.kind {
|
||||
ty::GenericParamDefKind::Lifetime => {
|
||||
let use_span = tcx.def_span(param.def_id);
|
||||
let opaque_span = tcx.def_span(opaque_def_id);
|
||||
// Check if the lifetime param was captured but isn't named in the precise captures list.
|
||||
if variances[param.index as usize] == ty::Invariant {
|
||||
let param_span = if let DefKind::OpaqueTy =
|
||||
tcx.def_kind(tcx.parent(param.def_id))
|
||||
if let DefKind::OpaqueTy = tcx.def_kind(tcx.parent(param.def_id))
|
||||
&& let ty::ReEarlyParam(ty::EarlyParamRegion { def_id, .. })
|
||||
| ty::ReLateParam(ty::LateParamRegion {
|
||||
bound_region: ty::BoundRegionKind::BrNamed(def_id, _),
|
||||
|
@ -591,26 +592,40 @@ fn check_opaque_precise_captures<'tcx>(tcx: TyCtxt<'tcx>, opaque_def_id: LocalDe
|
|||
}) = *tcx
|
||||
.map_opaque_lifetime_to_parent_lifetime(param.def_id.expect_local())
|
||||
{
|
||||
Some(tcx.def_span(def_id))
|
||||
tcx.dcx().emit_err(errors::LifetimeNotCaptured {
|
||||
opaque_span,
|
||||
use_span,
|
||||
param_span: tcx.def_span(def_id),
|
||||
});
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// FIXME(precise_capturing): Structured suggestion for this would be useful
|
||||
tcx.dcx().emit_err(errors::LifetimeNotCaptured {
|
||||
use_span: tcx.def_span(param.def_id),
|
||||
param_span,
|
||||
opaque_span: tcx.def_span(opaque_def_id),
|
||||
});
|
||||
// If the `use_span` is actually just the param itself, then we must
|
||||
// have not duplicated the lifetime but captured the original.
|
||||
// The "effective" `use_span` will be the span of the opaque itself,
|
||||
// and the param span will be the def span of the param.
|
||||
tcx.dcx().emit_err(errors::LifetimeNotCaptured {
|
||||
opaque_span,
|
||||
use_span: opaque_span,
|
||||
param_span: use_span,
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
ty::GenericParamDefKind::Type { .. } => {
|
||||
// FIXME(precise_capturing): Structured suggestion for this would be useful
|
||||
tcx.dcx().emit_err(errors::ParamNotCaptured {
|
||||
param_span: tcx.def_span(param.def_id),
|
||||
opaque_span: tcx.def_span(opaque_def_id),
|
||||
kind: "type",
|
||||
});
|
||||
if matches!(tcx.def_kind(param.def_id), DefKind::Trait | DefKind::TraitAlias) {
|
||||
// FIXME(precise_capturing): Structured suggestion for this would be useful
|
||||
tcx.dcx().emit_err(errors::SelfTyNotCaptured {
|
||||
trait_span: tcx.def_span(param.def_id),
|
||||
opaque_span: tcx.def_span(opaque_def_id),
|
||||
});
|
||||
} else {
|
||||
// FIXME(precise_capturing): Structured suggestion for this would be useful
|
||||
tcx.dcx().emit_err(errors::ParamNotCaptured {
|
||||
param_span: tcx.def_span(param.def_id),
|
||||
opaque_span: tcx.def_span(opaque_def_id),
|
||||
kind: "type",
|
||||
});
|
||||
}
|
||||
}
|
||||
ty::GenericParamDefKind::Const { .. } => {
|
||||
// FIXME(precise_capturing): Structured suggestion for this would be useful
|
||||
|
|
|
@ -6,19 +6,29 @@ use rustc_span::{Span, Symbol};
|
|||
#[note]
|
||||
pub struct ParamNotCaptured {
|
||||
#[primary_span]
|
||||
pub param_span: Span,
|
||||
#[label]
|
||||
pub opaque_span: Span,
|
||||
#[label]
|
||||
pub param_span: Span,
|
||||
pub kind: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(hir_analysis_self_ty_not_captured)]
|
||||
#[note]
|
||||
pub struct SelfTyNotCaptured {
|
||||
#[primary_span]
|
||||
pub opaque_span: Span,
|
||||
#[label]
|
||||
pub trait_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(hir_analysis_lifetime_not_captured)]
|
||||
pub struct LifetimeNotCaptured {
|
||||
#[primary_span]
|
||||
pub use_span: Span,
|
||||
#[label(hir_analysis_param_label)]
|
||||
pub param_span: Option<Span>,
|
||||
pub param_span: Span,
|
||||
#[label]
|
||||
pub opaque_span: Span,
|
||||
}
|
||||
|
|
|
@ -3199,7 +3199,8 @@ macro_rules! int_impl {
|
|||
/// that code in debug mode will trigger a panic on this case and
|
||||
/// optimized code will return
|
||||
#[doc = concat!("`", stringify!($SelfT), "::MIN`")]
|
||||
/// without a panic.
|
||||
/// without a panic. If you do not want this behavior consider
|
||||
/// using [`unsigned_abs`](Self::unsigned_abs) instead.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
|
|
@ -45,13 +45,9 @@ const READ_LIMIT: usize = libc::ssize_t::MAX as usize;
|
|||
#[cfg(any(
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "ios",
|
||||
target_os = "tvos",
|
||||
target_os = "macos",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd",
|
||||
target_os = "watchos",
|
||||
target_os = "visionos",
|
||||
target_vendor = "apple",
|
||||
))]
|
||||
const fn max_iov() -> usize {
|
||||
libc::IOV_MAX as usize
|
||||
|
@ -72,17 +68,13 @@ const fn max_iov() -> usize {
|
|||
target_os = "dragonfly",
|
||||
target_os = "emscripten",
|
||||
target_os = "freebsd",
|
||||
target_os = "ios",
|
||||
target_os = "tvos",
|
||||
target_os = "linux",
|
||||
target_os = "macos",
|
||||
target_os = "netbsd",
|
||||
target_os = "nto",
|
||||
target_os = "openbsd",
|
||||
target_os = "horizon",
|
||||
target_os = "vita",
|
||||
target_os = "watchos",
|
||||
target_os = "visionos",
|
||||
target_vendor = "apple",
|
||||
)))]
|
||||
const fn max_iov() -> usize {
|
||||
16 // The minimum value required by POSIX.
|
||||
|
@ -201,13 +193,10 @@ impl FileDesc {
|
|||
target_os = "fuchsia",
|
||||
target_os = "hurd",
|
||||
target_os = "illumos",
|
||||
target_os = "ios",
|
||||
target_os = "tvos",
|
||||
target_os = "linux",
|
||||
target_os = "macos",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd",
|
||||
target_os = "watchos",
|
||||
target_vendor = "apple",
|
||||
)))]
|
||||
pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize> {
|
||||
io::default_read_vectored(|b| self.read_at(b, offset), bufs)
|
||||
|
@ -241,15 +230,7 @@ impl FileDesc {
|
|||
Ok(ret as usize)
|
||||
}
|
||||
|
||||
// We support old MacOS and iOS versions that do not have `preadv`. There is
|
||||
// no `syscall` possible in these platform.
|
||||
#[cfg(any(
|
||||
all(target_os = "android", target_pointer_width = "32"),
|
||||
target_os = "ios", // ios 14.0
|
||||
target_os = "tvos", // tvos 14.0
|
||||
target_os = "macos", // macos 11.0
|
||||
target_os = "watchos", // watchos 7.0
|
||||
))]
|
||||
#[cfg(all(target_os = "android", target_pointer_width = "32"))]
|
||||
pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize> {
|
||||
super::weak::weak!(fn preadv64(libc::c_int, *const libc::iovec, libc::c_int, off64_t) -> isize);
|
||||
|
||||
|
@ -269,6 +250,35 @@ impl FileDesc {
|
|||
}
|
||||
}
|
||||
|
||||
// We support old MacOS, iOS, watchOS, tvOS and visionOS. `preadv` was added in the following
|
||||
// Apple OS versions:
|
||||
// ios 14.0
|
||||
// tvos 14.0
|
||||
// macos 11.0
|
||||
// watchos 7.0
|
||||
//
|
||||
// These versions may be newer than the minimum supported versions of OS's we support so we must
|
||||
// use "weak" linking.
|
||||
#[cfg(target_vendor = "apple")]
|
||||
pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize> {
|
||||
super::weak::weak!(fn preadv(libc::c_int, *const libc::iovec, libc::c_int, off64_t) -> isize);
|
||||
|
||||
match preadv.get() {
|
||||
Some(preadv) => {
|
||||
let ret = cvt(unsafe {
|
||||
preadv(
|
||||
self.as_raw_fd(),
|
||||
bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec,
|
||||
cmp::min(bufs.len(), max_iov()) as libc::c_int,
|
||||
offset as _,
|
||||
)
|
||||
})?;
|
||||
Ok(ret as usize)
|
||||
}
|
||||
None => io::default_read_vectored(|b| self.read_at(b, offset), bufs),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write(&self, buf: &[u8]) -> io::Result<usize> {
|
||||
let ret = cvt(unsafe {
|
||||
libc::write(
|
||||
|
@ -360,13 +370,10 @@ impl FileDesc {
|
|||
target_os = "fuchsia",
|
||||
target_os = "hurd",
|
||||
target_os = "illumos",
|
||||
target_os = "ios",
|
||||
target_os = "tvos",
|
||||
target_os = "linux",
|
||||
target_os = "macos",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd",
|
||||
target_os = "watchos",
|
||||
target_vendor = "apple",
|
||||
)))]
|
||||
pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result<usize> {
|
||||
io::default_write_vectored(|b| self.write_at(b, offset), bufs)
|
||||
|
@ -400,15 +407,7 @@ impl FileDesc {
|
|||
Ok(ret as usize)
|
||||
}
|
||||
|
||||
// We support old MacOS and iOS versions that do not have `pwritev`. There is
|
||||
// no `syscall` possible in these platform.
|
||||
#[cfg(any(
|
||||
all(target_os = "android", target_pointer_width = "32"),
|
||||
target_os = "ios", // ios 14.0
|
||||
target_os = "tvos", // tvos 14.0
|
||||
target_os = "macos", // macos 11.0
|
||||
target_os = "watchos", // watchos 7.0
|
||||
))]
|
||||
#[cfg(all(target_os = "android", target_pointer_width = "32"))]
|
||||
pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result<usize> {
|
||||
super::weak::weak!(fn pwritev64(libc::c_int, *const libc::iovec, libc::c_int, off64_t) -> isize);
|
||||
|
||||
|
@ -428,6 +427,35 @@ impl FileDesc {
|
|||
}
|
||||
}
|
||||
|
||||
// We support old MacOS, iOS, watchOS, tvOS and visionOS. `pwritev` was added in the following
|
||||
// Apple OS versions:
|
||||
// ios 14.0
|
||||
// tvos 14.0
|
||||
// macos 11.0
|
||||
// watchos 7.0
|
||||
//
|
||||
// These versions may be newer than the minimum supported versions of OS's we support so we must
|
||||
// use "weak" linking.
|
||||
#[cfg(target_vendor = "apple")]
|
||||
pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result<usize> {
|
||||
super::weak::weak!(fn pwritev(libc::c_int, *const libc::iovec, libc::c_int, off64_t) -> isize);
|
||||
|
||||
match pwritev.get() {
|
||||
Some(pwritev) => {
|
||||
let ret = cvt(unsafe {
|
||||
pwritev(
|
||||
self.as_raw_fd(),
|
||||
bufs.as_ptr() as *const libc::iovec,
|
||||
cmp::min(bufs.len(), max_iov()) as libc::c_int,
|
||||
offset as _,
|
||||
)
|
||||
})?;
|
||||
Ok(ret as usize)
|
||||
}
|
||||
None => io::default_write_vectored(|b| self.write_at(b, offset), bufs),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(any(
|
||||
target_env = "newlib",
|
||||
target_os = "solaris",
|
||||
|
|
|
@ -28,7 +28,7 @@ use crate::ptr;
|
|||
use crate::sync::atomic::{self, AtomicPtr, Ordering};
|
||||
|
||||
// We can use true weak linkage on ELF targets.
|
||||
#[cfg(not(any(target_os = "macos", target_os = "ios", target_os = "tvos")))]
|
||||
#[cfg(all(unix, not(target_vendor = "apple")))]
|
||||
pub(crate) macro weak {
|
||||
(fn $name:ident($($t:ty),*) -> $ret:ty) => (
|
||||
let ref $name: ExternWeak<unsafe extern "C" fn($($t),*) -> $ret> = {
|
||||
|
@ -43,7 +43,7 @@ pub(crate) macro weak {
|
|||
}
|
||||
|
||||
// On non-ELF targets, use the dlsym approximation of weak linkage.
|
||||
#[cfg(any(target_os = "macos", target_os = "ios", target_os = "tvos"))]
|
||||
#[cfg(target_vendor = "apple")]
|
||||
pub(crate) use self::dlsym as weak;
|
||||
|
||||
pub(crate) struct ExternWeak<F: Copy> {
|
||||
|
|
|
@ -37,6 +37,7 @@ fn main() {
|
|||
|
||||
build_lock = fd_lock::RwLock::new(t!(fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.create(true)
|
||||
.open(&lock_path)));
|
||||
_build_lock_guard = match build_lock.try_write() {
|
||||
|
@ -143,8 +144,8 @@ fn check_version(config: &Config) -> Option<String> {
|
|||
// then use the one from the config.toml. This way we never show the same warnings
|
||||
// more than once.
|
||||
if let Ok(t) = fs::read_to_string(&warned_id_path) {
|
||||
let last_warned_id =
|
||||
usize::from_str(&t).expect(&format!("{} is corrupted.", warned_id_path.display()));
|
||||
let last_warned_id = usize::from_str(&t)
|
||||
.unwrap_or_else(|_| panic!("{} is corrupted.", warned_id_path.display()));
|
||||
|
||||
// We only use the last_warned_id if it exists in `CONFIG_CHANGE_HISTORY`.
|
||||
// Otherwise, we may retrieve all the changes if it's not the highest value.
|
||||
|
|
|
@ -181,39 +181,33 @@ fn rm_rf(path: &Path) {
|
|||
}
|
||||
Ok(metadata) => {
|
||||
if metadata.file_type().is_file() || metadata.file_type().is_symlink() {
|
||||
do_op(path, "remove file", |p| {
|
||||
fs::remove_file(p).or_else(|e| {
|
||||
// Work around the fact that we cannot
|
||||
// delete an executable while it runs on Windows.
|
||||
#[cfg(windows)]
|
||||
do_op(path, "remove file", |p| match fs::remove_file(p) {
|
||||
#[cfg(windows)]
|
||||
Err(e)
|
||||
if e.kind() == std::io::ErrorKind::PermissionDenied
|
||||
&& p.file_name().and_then(std::ffi::OsStr::to_str)
|
||||
== Some("bootstrap.exe")
|
||||
{
|
||||
eprintln!("WARNING: failed to delete '{}'.", p.display());
|
||||
return Ok(());
|
||||
}
|
||||
Err(e)
|
||||
})
|
||||
== Some("bootstrap.exe") =>
|
||||
{
|
||||
eprintln!("WARNING: failed to delete '{}'.", p.display());
|
||||
Ok(())
|
||||
}
|
||||
r => r,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
for file in t!(fs::read_dir(path)) {
|
||||
rm_rf(&t!(file).path());
|
||||
}
|
||||
do_op(path, "remove dir", |p| {
|
||||
fs::remove_dir(p).or_else(|e| {
|
||||
// Check for dir not empty on Windows
|
||||
// FIXME: Once `ErrorKind::DirectoryNotEmpty` is stabilized,
|
||||
// match on `e.kind()` instead.
|
||||
#[cfg(windows)]
|
||||
if e.raw_os_error() == Some(145) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Err(e)
|
||||
})
|
||||
do_op(path, "remove dir", |p| match fs::remove_dir(p) {
|
||||
// Check for dir not empty on Windows
|
||||
// FIXME: Once `ErrorKind::DirectoryNotEmpty` is stabilized,
|
||||
// match on `e.kind()` instead.
|
||||
#[cfg(windows)]
|
||||
Err(e) if e.raw_os_error() == Some(145) => Ok(()),
|
||||
r => r,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@ -228,14 +222,14 @@ where
|
|||
// On windows we can't remove a readonly file, and git will often clone files as readonly.
|
||||
// As a result, we have some special logic to remove readonly files on windows.
|
||||
// This is also the reason that we can't use things like fs::remove_dir_all().
|
||||
Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => {
|
||||
#[cfg(windows)]
|
||||
Err(ref e) if e.kind() == ErrorKind::PermissionDenied => {
|
||||
let m = t!(path.symlink_metadata());
|
||||
let mut p = m.permissions();
|
||||
p.set_readonly(false);
|
||||
t!(fs::set_permissions(path, p));
|
||||
f(path).unwrap_or_else(|e| {
|
||||
// Delete symlinked directories on Windows
|
||||
#[cfg(windows)]
|
||||
if m.file_type().is_symlink() && path.is_dir() && fs::remove_dir(path).is_ok() {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -1139,7 +1139,7 @@ pub fn rustc_cargo_env(
|
|||
// busting caches (e.g. like #71152).
|
||||
if builder.config.llvm_enabled(target) {
|
||||
let building_is_expensive =
|
||||
crate::core::build_steps::llvm::prebuilt_llvm_config(builder, target).is_err();
|
||||
crate::core::build_steps::llvm::prebuilt_llvm_config(builder, target).should_build();
|
||||
// `top_stage == stage` might be false for `check --stage 1`, if we are building the stage 1 compiler
|
||||
let can_skip_build = builder.kind == Kind::Check && builder.top_stage == stage;
|
||||
let should_skip_build = building_is_expensive && can_skip_build;
|
||||
|
@ -1282,11 +1282,7 @@ fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
|
|||
if path.path.to_str().unwrap().contains(CODEGEN_BACKEND_PREFIX) {
|
||||
let mut needs_codegen_backend_config = true;
|
||||
for backend in run.builder.config.codegen_backends(run.target) {
|
||||
if path
|
||||
.path
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.ends_with(&(CODEGEN_BACKEND_PREFIX.to_owned() + &backend))
|
||||
if path.path.to_str().unwrap().ends_with(&(CODEGEN_BACKEND_PREFIX.to_owned() + backend))
|
||||
{
|
||||
needs_codegen_backend_config = false;
|
||||
}
|
||||
|
@ -1846,7 +1842,7 @@ impl Step for Assemble {
|
|||
extra_features: vec![],
|
||||
});
|
||||
let tool_exe = exe("llvm-bitcode-linker", target_compiler.host);
|
||||
builder.copy_link(&src_path, &libdir_bin.join(&tool_exe));
|
||||
builder.copy_link(&src_path, &libdir_bin.join(tool_exe));
|
||||
}
|
||||
|
||||
// Ensure that `libLLVM.so` ends up in the newly build compiler directory,
|
||||
|
|
|
@ -107,7 +107,7 @@ impl Step for JsonDocs {
|
|||
builder.top_stage,
|
||||
host,
|
||||
builder,
|
||||
DocumentationFormat::JSON,
|
||||
DocumentationFormat::Json,
|
||||
));
|
||||
|
||||
let dest = "share/doc/rust/json";
|
||||
|
@ -1131,7 +1131,7 @@ impl Step for Rls {
|
|||
let rls = builder.ensure(tool::Rls { compiler, target, extra_features: Vec::new() });
|
||||
|
||||
let mut tarball = Tarball::new(builder, "rls", &target.triple);
|
||||
tarball.set_overlay(OverlayKind::RLS);
|
||||
tarball.set_overlay(OverlayKind::Rls);
|
||||
tarball.is_preview(true);
|
||||
tarball.add_file(rls, "bin", 0o755);
|
||||
tarball.add_legal_and_readme_to("share/doc/rls");
|
||||
|
@ -2059,7 +2059,7 @@ fn install_llvm_file(
|
|||
if install_symlink {
|
||||
// For download-ci-llvm, also install the symlink, to match what LLVM does. Using a
|
||||
// symlink is fine here, as this is not a rustup component.
|
||||
builder.copy_link(&source, &full_dest);
|
||||
builder.copy_link(source, &full_dest);
|
||||
} else {
|
||||
// Otherwise, replace the symlink with an equivalent linker script. This is used when
|
||||
// projects like miri link against librustc_driver.so. We don't use a symlink, as
|
||||
|
@ -2076,7 +2076,7 @@ fn install_llvm_file(
|
|||
}
|
||||
}
|
||||
} else {
|
||||
builder.install(&source, destination, 0o644);
|
||||
builder.install(source, destination, 0o644);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2121,7 +2121,7 @@ fn maybe_install_llvm(
|
|||
builder.install(&llvm_dylib_path, dst_libdir, 0o644);
|
||||
}
|
||||
!builder.config.dry_run()
|
||||
} else if let Ok(llvm::LlvmResult { llvm_config, .. }) =
|
||||
} else if let llvm::LlvmBuildStatus::AlreadyBuilt(llvm::LlvmResult { llvm_config, .. }) =
|
||||
llvm::prebuilt_llvm_config(builder, target)
|
||||
{
|
||||
let mut cmd = Command::new(llvm_config);
|
||||
|
@ -2202,7 +2202,7 @@ impl Step for LlvmTools {
|
|||
builder.ensure(crate::core::build_steps::llvm::Llvm { target });
|
||||
|
||||
let mut tarball = Tarball::new(builder, "llvm-tools", &target.triple);
|
||||
tarball.set_overlay(OverlayKind::LLVM);
|
||||
tarball.set_overlay(OverlayKind::Llvm);
|
||||
tarball.is_preview(true);
|
||||
|
||||
if builder.config.llvm_tools_enabled {
|
||||
|
@ -2305,7 +2305,7 @@ impl Step for RustDev {
|
|||
}
|
||||
|
||||
let mut tarball = Tarball::new(builder, "rust-dev", &target.triple);
|
||||
tarball.set_overlay(OverlayKind::LLVM);
|
||||
tarball.set_overlay(OverlayKind::Llvm);
|
||||
// LLVM requires a shared object symlink to exist on some platforms.
|
||||
tarball.permit_symlinks(true);
|
||||
|
||||
|
|
|
@ -567,9 +567,9 @@ impl Step for Std {
|
|||
stage: run.builder.top_stage,
|
||||
target: run.target,
|
||||
format: if run.builder.config.cmd.json() {
|
||||
DocumentationFormat::JSON
|
||||
DocumentationFormat::Json
|
||||
} else {
|
||||
DocumentationFormat::HTML
|
||||
DocumentationFormat::Html
|
||||
},
|
||||
crates: run.make_run_crates(Alias::Library),
|
||||
});
|
||||
|
@ -583,13 +583,13 @@ impl Step for Std {
|
|||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
let out = match self.format {
|
||||
DocumentationFormat::HTML => builder.doc_out(target),
|
||||
DocumentationFormat::JSON => builder.json_doc_out(target),
|
||||
DocumentationFormat::Html => builder.doc_out(target),
|
||||
DocumentationFormat::Json => builder.json_doc_out(target),
|
||||
};
|
||||
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
if self.format == DocumentationFormat::HTML {
|
||||
if self.format == DocumentationFormat::Html {
|
||||
builder.ensure(SharedAssets { target: self.target });
|
||||
}
|
||||
|
||||
|
@ -600,10 +600,10 @@ impl Step for Std {
|
|||
.into_string()
|
||||
.expect("non-utf8 paths are unsupported");
|
||||
let mut extra_args = match self.format {
|
||||
DocumentationFormat::HTML => {
|
||||
DocumentationFormat::Html => {
|
||||
vec!["--markdown-css", "rust.css", "--markdown-no-toc", "--index-page", &index_page]
|
||||
}
|
||||
DocumentationFormat::JSON => vec!["--output-format", "json"],
|
||||
DocumentationFormat::Json => vec!["--output-format", "json"],
|
||||
};
|
||||
|
||||
if !builder.config.docs_minification {
|
||||
|
@ -613,7 +613,7 @@ impl Step for Std {
|
|||
doc_std(builder, self.format, stage, target, &out, &extra_args, &self.crates);
|
||||
|
||||
// Don't open if the format is json
|
||||
if let DocumentationFormat::JSON = self.format {
|
||||
if let DocumentationFormat::Json = self.format {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -646,15 +646,15 @@ const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "tes
|
|||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum DocumentationFormat {
|
||||
HTML,
|
||||
JSON,
|
||||
Html,
|
||||
Json,
|
||||
}
|
||||
|
||||
impl DocumentationFormat {
|
||||
fn as_str(&self) -> &str {
|
||||
match self {
|
||||
DocumentationFormat::HTML => "HTML",
|
||||
DocumentationFormat::JSON => "JSON",
|
||||
DocumentationFormat::Html => "HTML",
|
||||
DocumentationFormat::Json => "JSON",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -678,7 +678,7 @@ fn doc_std(
|
|||
|
||||
let compiler = builder.compiler(stage, builder.config.build);
|
||||
|
||||
let target_doc_dir_name = if format == DocumentationFormat::JSON { "json-doc" } else { "doc" };
|
||||
let target_doc_dir_name = if format == DocumentationFormat::Json { "json-doc" } else { "doc" };
|
||||
let target_dir =
|
||||
builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name);
|
||||
|
||||
|
|
|
@ -225,12 +225,12 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
|
|||
Some(first) => {
|
||||
let find_shortcut_candidates = |p: &PathBuf| {
|
||||
let mut candidates = Vec::new();
|
||||
for candidate in WalkBuilder::new(src.clone()).max_depth(Some(3)).build() {
|
||||
if let Ok(entry) = candidate {
|
||||
if let Some(dir_name) = p.file_name() {
|
||||
if entry.path().is_dir() && entry.file_name() == dir_name {
|
||||
candidates.push(entry.into_path());
|
||||
}
|
||||
for entry in
|
||||
WalkBuilder::new(src.clone()).max_depth(Some(3)).build().map_while(Result::ok)
|
||||
{
|
||||
if let Some(dir_name) = p.file_name() {
|
||||
if entry.path().is_dir() && entry.file_name() == dir_name {
|
||||
candidates.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -130,7 +130,7 @@ fn prepare_dir(destdir_env: &Option<PathBuf>, mut path: PathBuf) -> String {
|
|||
// https://www.gnu.org/prep/standards/html_node/DESTDIR.html
|
||||
if let Some(destdir) = destdir_env {
|
||||
let without_destdir = path.clone();
|
||||
path = destdir.clone();
|
||||
path.clone_from(destdir);
|
||||
// Custom .join() which ignores disk roots.
|
||||
for part in without_destdir.components() {
|
||||
if let Component::Normal(s) = part {
|
||||
|
|
|
@ -42,6 +42,20 @@ pub struct Meta {
|
|||
root: String,
|
||||
}
|
||||
|
||||
pub enum LlvmBuildStatus {
|
||||
AlreadyBuilt(LlvmResult),
|
||||
ShouldBuild(Meta),
|
||||
}
|
||||
|
||||
impl LlvmBuildStatus {
|
||||
pub fn should_build(&self) -> bool {
|
||||
match self {
|
||||
LlvmBuildStatus::AlreadyBuilt(_) => false,
|
||||
LlvmBuildStatus::ShouldBuild(_) => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Linker flags to pass to LLVM's CMake invocation.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct LdFlags {
|
||||
|
@ -72,10 +86,7 @@ impl LdFlags {
|
|||
///
|
||||
/// This will return the llvm-config if it can get it (but it will not build it
|
||||
/// if not).
|
||||
pub fn prebuilt_llvm_config(
|
||||
builder: &Builder<'_>,
|
||||
target: TargetSelection,
|
||||
) -> Result<LlvmResult, Meta> {
|
||||
pub fn prebuilt_llvm_config(builder: &Builder<'_>, target: TargetSelection) -> LlvmBuildStatus {
|
||||
// If we have llvm submodule initialized already, sync it.
|
||||
builder.update_existing_submodule(&Path::new("src").join("llvm-project"));
|
||||
|
||||
|
@ -93,7 +104,7 @@ pub fn prebuilt_llvm_config(
|
|||
llvm_cmake_dir.push("lib");
|
||||
llvm_cmake_dir.push("cmake");
|
||||
llvm_cmake_dir.push("llvm");
|
||||
return Ok(LlvmResult { llvm_config, llvm_cmake_dir });
|
||||
return LlvmBuildStatus::AlreadyBuilt(LlvmResult { llvm_config, llvm_cmake_dir });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -131,10 +142,10 @@ pub fn prebuilt_llvm_config(
|
|||
stamp.path.display()
|
||||
));
|
||||
}
|
||||
return Ok(res);
|
||||
return LlvmBuildStatus::AlreadyBuilt(res);
|
||||
}
|
||||
|
||||
Err(Meta { stamp, res, out_dir, root: root.into() })
|
||||
LlvmBuildStatus::ShouldBuild(Meta { stamp, res, out_dir, root: root.into() })
|
||||
}
|
||||
|
||||
/// This retrieves the LLVM sha we *want* to use, according to git history.
|
||||
|
@ -282,8 +293,8 @@ impl Step for Llvm {
|
|||
|
||||
// If LLVM has already been built or been downloaded through download-ci-llvm, we avoid building it again.
|
||||
let Meta { stamp, res, out_dir, root } = match prebuilt_llvm_config(builder, target) {
|
||||
Ok(p) => return p,
|
||||
Err(m) => m,
|
||||
LlvmBuildStatus::AlreadyBuilt(p) => return p,
|
||||
LlvmBuildStatus::ShouldBuild(m) => m,
|
||||
};
|
||||
|
||||
if builder.llvm_link_shared() && target.is_windows() {
|
||||
|
|
|
@ -93,10 +93,9 @@ impl FromStr for Profile {
|
|||
Ok(Profile::Tools)
|
||||
}
|
||||
"none" => Ok(Profile::None),
|
||||
"llvm" | "codegen" => Err(format!(
|
||||
"the \"llvm\" and \"codegen\" profiles have been removed,\
|
||||
"llvm" | "codegen" => Err("the \"llvm\" and \"codegen\" profiles have been removed,\
|
||||
use \"compiler\" instead which has the same functionality"
|
||||
)),
|
||||
.to_string()),
|
||||
_ => Err(format!("unknown profile: '{s}'")),
|
||||
}
|
||||
}
|
||||
|
@ -474,10 +473,10 @@ impl Step for Hook {
|
|||
|
||||
// install a git hook to automatically run tidy, if they want
|
||||
fn install_git_hook_maybe(config: &Config) -> io::Result<()> {
|
||||
let git = t!(config.git().args(["rev-parse", "--git-common-dir"]).output().map(|output| {
|
||||
let git = config.git().args(["rev-parse", "--git-common-dir"]).output().map(|output| {
|
||||
assert!(output.status.success(), "failed to run `git`");
|
||||
PathBuf::from(t!(String::from_utf8(output.stdout)).trim())
|
||||
}));
|
||||
})?;
|
||||
let hooks_dir = git.join("hooks");
|
||||
let dst = hooks_dir.join("pre-push");
|
||||
if dst.exists() {
|
||||
|
|
|
@ -923,7 +923,7 @@ impl Step for RustdocJSStd {
|
|||
builder.top_stage,
|
||||
self.target,
|
||||
builder,
|
||||
DocumentationFormat::HTML,
|
||||
DocumentationFormat::Html,
|
||||
));
|
||||
let _guard = builder.msg(
|
||||
Kind::Test,
|
||||
|
@ -1389,10 +1389,9 @@ impl Step for RunMakeSupport {
|
|||
builder.run(&mut cargo);
|
||||
|
||||
let lib_name = "librun_make_support.rlib";
|
||||
let lib = builder.tools_dir(self.compiler).join(&lib_name);
|
||||
let lib = builder.tools_dir(self.compiler).join(lib_name);
|
||||
|
||||
let cargo_out =
|
||||
builder.cargo_out(self.compiler, Mode::ToolStd, self.target).join(&lib_name);
|
||||
let cargo_out = builder.cargo_out(self.compiler, Mode::ToolStd, self.target).join(lib_name);
|
||||
builder.copy_link(&cargo_out, &lib);
|
||||
lib
|
||||
}
|
||||
|
@ -2483,6 +2482,7 @@ impl Step for CrateLibrustc {
|
|||
/// Given a `cargo test` subcommand, add the appropriate flags and run it.
|
||||
///
|
||||
/// Returns whether the test succeeded.
|
||||
#[allow(clippy::too_many_arguments)] // FIXME: reduce the number of args and remove this.
|
||||
fn run_cargo_test<'a>(
|
||||
cargo: impl Into<Command>,
|
||||
libtest_args: &[&str],
|
||||
|
|
|
@ -135,6 +135,7 @@ impl Step for ToolBuild {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)] // FIXME: reduce the number of args and remove this.
|
||||
pub fn prepare_tool_cargo(
|
||||
builder: &Builder<'_>,
|
||||
compiler: Compiler,
|
||||
|
@ -545,7 +546,7 @@ impl Step for Cargo {
|
|||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
let cargo_bin_path = builder.ensure(ToolBuild {
|
||||
builder.ensure(ToolBuild {
|
||||
compiler: self.compiler,
|
||||
target: self.target,
|
||||
tool: "cargo",
|
||||
|
@ -554,8 +555,7 @@ impl Step for Cargo {
|
|||
source_type: SourceType::Submodule,
|
||||
extra_features: Vec::new(),
|
||||
allow_features: "",
|
||||
});
|
||||
cargo_bin_path
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -573,7 +573,7 @@ impl Step for LldWrapper {
|
|||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
let src_exe = builder.ensure(ToolBuild {
|
||||
builder.ensure(ToolBuild {
|
||||
compiler: self.compiler,
|
||||
target: self.target,
|
||||
tool: "lld-wrapper",
|
||||
|
@ -582,9 +582,7 @@ impl Step for LldWrapper {
|
|||
source_type: SourceType::InTree,
|
||||
extra_features: Vec::new(),
|
||||
allow_features: "",
|
||||
});
|
||||
|
||||
src_exe
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -245,8 +245,12 @@ impl Builder<'_> {
|
|||
// Ensure the parent directory always exists
|
||||
t!(std::fs::create_dir_all(parent));
|
||||
}
|
||||
let mut file =
|
||||
t!(fs::OpenOptions::new().create(true).write(true).read(true).open(path));
|
||||
let mut file = t!(fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.write(true)
|
||||
.read(true)
|
||||
.open(path));
|
||||
|
||||
serde_json::from_reader(&mut file).unwrap_or_default()
|
||||
} else {
|
||||
|
@ -278,8 +282,12 @@ impl Builder<'_> {
|
|||
// Ensure the parent directory always exists
|
||||
t!(std::fs::create_dir_all(parent));
|
||||
}
|
||||
let mut file =
|
||||
t!(fs::OpenOptions::new().create(true).read(true).write(true).open(path));
|
||||
let mut file = t!(fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.read(true)
|
||||
.write(true)
|
||||
.open(path));
|
||||
|
||||
let mut current_toolstates: HashMap<Box<str>, ToolState> =
|
||||
serde_json::from_reader(&mut file).unwrap_or_default();
|
||||
|
|
|
@ -132,8 +132,7 @@ impl RunConfig<'_> {
|
|||
Alias::Compiler => self.builder.in_tree_crates("rustc-main", Some(self.target)),
|
||||
};
|
||||
|
||||
let crate_names = crates.into_iter().map(|krate| krate.name.to_string()).collect();
|
||||
crate_names
|
||||
crates.into_iter().map(|krate| krate.name.to_string()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -323,16 +322,13 @@ const PATH_REMAP: &[(&str, &[&str])] = &[
|
|||
fn remap_paths(paths: &mut Vec<&Path>) {
|
||||
let mut remove = vec![];
|
||||
let mut add = vec![];
|
||||
for (i, path) in paths
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(i, path)| if let Some(s) = path.to_str() { Some((i, s)) } else { None })
|
||||
for (i, path) in paths.iter().enumerate().filter_map(|(i, path)| path.to_str().map(|s| (i, s)))
|
||||
{
|
||||
for &(search, replace) in PATH_REMAP {
|
||||
// Remove leading and trailing slashes so `tests/` and `tests` are equivalent
|
||||
if path.trim_matches(std::path::is_separator) == search {
|
||||
remove.push(i);
|
||||
add.extend(replace.into_iter().map(Path::new));
|
||||
add.extend(replace.iter().map(Path::new));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -1318,7 +1314,7 @@ impl<'a> Builder<'a> {
|
|||
} else if let Some(subcmd) = cmd.strip_prefix("miri") {
|
||||
// Command must be "miri-X".
|
||||
let subcmd = subcmd
|
||||
.strip_prefix("-")
|
||||
.strip_prefix('-')
|
||||
.unwrap_or_else(|| panic!("expected `miri-$subcommand`, but got {}", cmd));
|
||||
cargo = self.cargo_miri_cmd(compiler);
|
||||
cargo.arg("miri").arg(subcmd);
|
||||
|
@ -1438,7 +1434,7 @@ impl<'a> Builder<'a> {
|
|||
// rustc_llvm. But if LLVM is stale, that'll be a tiny amount
|
||||
// of work comparatively, and we'd likely need to rebuild it anyway,
|
||||
// so that's okay.
|
||||
if crate::core::build_steps::llvm::prebuilt_llvm_config(self, target).is_err() {
|
||||
if crate::core::build_steps::llvm::prebuilt_llvm_config(self, target).should_build() {
|
||||
cargo.env("RUST_CHECK", "1");
|
||||
}
|
||||
}
|
||||
|
@ -1540,7 +1536,7 @@ impl<'a> Builder<'a> {
|
|||
// `rustflags` without `cargo` making it required.
|
||||
rustflags.arg("-Zunstable-options");
|
||||
for (restricted_mode, name, values) in EXTRA_CHECK_CFGS {
|
||||
if *restricted_mode == None || *restricted_mode == Some(mode) {
|
||||
if restricted_mode.is_none() || *restricted_mode == Some(mode) {
|
||||
rustflags.arg(&check_cfg_arg(name, *values));
|
||||
}
|
||||
}
|
||||
|
@ -2216,22 +2212,18 @@ impl<'a> Builder<'a> {
|
|||
let file = File::open(src.join(".gitmodules")).unwrap();
|
||||
|
||||
let mut submodules_paths = vec![];
|
||||
for line in BufReader::new(file).lines() {
|
||||
if let Ok(line) = line {
|
||||
let line = line.trim();
|
||||
|
||||
if line.starts_with("path") {
|
||||
let actual_path =
|
||||
line.split(' ').last().expect("Couldn't get value of path");
|
||||
submodules_paths.push(actual_path.to_owned());
|
||||
}
|
||||
for line in BufReader::new(file).lines().map_while(Result::ok) {
|
||||
let line = line.trim();
|
||||
if line.starts_with("path") {
|
||||
let actual_path = line.split(' ').last().expect("Couldn't get value of path");
|
||||
submodules_paths.push(actual_path.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
submodules_paths
|
||||
};
|
||||
|
||||
&SUBMODULES_PATHS.get_or_init(|| init_submodules_paths(&self.src))
|
||||
SUBMODULES_PATHS.get_or_init(|| init_submodules_paths(&self.src))
|
||||
}
|
||||
|
||||
/// Ensure that a given step is built *only if it's supposed to be built by default*, returning
|
||||
|
|
|
@ -75,7 +75,7 @@ macro_rules! doc_std {
|
|||
$stage,
|
||||
TargetSelection::from_user(stringify!($target)),
|
||||
&builder,
|
||||
DocumentationFormat::HTML,
|
||||
DocumentationFormat::Html,
|
||||
)
|
||||
}};
|
||||
}
|
||||
|
|
|
@ -1217,10 +1217,7 @@ impl Config {
|
|||
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
||||
.unwrap_or_else(|err| {
|
||||
if let Ok(Some(changes)) = toml::from_str(&contents)
|
||||
.and_then(|table: toml::Value| ChangeIdWrapper::deserialize(table))
|
||||
.and_then(|change_id| {
|
||||
Ok(change_id.inner.map(|id| crate::find_recent_config_change_ids(id)))
|
||||
})
|
||||
.and_then(|table: toml::Value| ChangeIdWrapper::deserialize(table)).map(|change_id| change_id.inner.map(crate::find_recent_config_change_ids))
|
||||
{
|
||||
if !changes.is_empty() {
|
||||
println!(
|
||||
|
@ -1799,17 +1796,17 @@ impl Config {
|
|||
if let Some(v) = link_shared {
|
||||
config.llvm_link_shared.set(Some(v));
|
||||
}
|
||||
config.llvm_targets = targets.clone();
|
||||
config.llvm_experimental_targets = experimental_targets.clone();
|
||||
config.llvm_targets.clone_from(&targets);
|
||||
config.llvm_experimental_targets.clone_from(&experimental_targets);
|
||||
config.llvm_link_jobs = link_jobs;
|
||||
config.llvm_version_suffix = version_suffix.clone();
|
||||
config.llvm_clang_cl = clang_cl.clone();
|
||||
config.llvm_version_suffix.clone_from(&version_suffix);
|
||||
config.llvm_clang_cl.clone_from(&clang_cl);
|
||||
|
||||
config.llvm_cflags = cflags.clone();
|
||||
config.llvm_cxxflags = cxxflags.clone();
|
||||
config.llvm_ldflags = ldflags.clone();
|
||||
config.llvm_cflags.clone_from(&cflags);
|
||||
config.llvm_cxxflags.clone_from(&cxxflags);
|
||||
config.llvm_ldflags.clone_from(&ldflags);
|
||||
set(&mut config.llvm_use_libcxx, use_libcxx);
|
||||
config.llvm_use_linker = use_linker.clone();
|
||||
config.llvm_use_linker.clone_from(&use_linker);
|
||||
config.llvm_allow_old_toolchain = allow_old_toolchain.unwrap_or(false);
|
||||
config.llvm_polly = polly.unwrap_or(false);
|
||||
config.llvm_clang = clang.unwrap_or(false);
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
pub(crate) mod config;
|
||||
#[allow(clippy::module_inception)]
|
||||
mod config;
|
||||
pub(crate) mod flags;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
|
@ -76,6 +76,7 @@ const LLD_FILE_NAMES: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"];
|
|||
|
||||
/// Extra --check-cfg to add when building
|
||||
/// (Mode restriction, config name, config values (if any))
|
||||
#[allow(clippy::type_complexity)] // It's fine for hard-coded list and type is explained above.
|
||||
const EXTRA_CHECK_CFGS: &[(Option<Mode>, &str, Option<&[&'static str]>)] = &[
|
||||
(None, "bootstrap", None),
|
||||
(Some(Mode::Rustc), "parallel_compiler", None),
|
||||
|
@ -1625,10 +1626,7 @@ impl Build {
|
|||
/// Returns `true` if unstable features should be enabled for the compiler
|
||||
/// we're building.
|
||||
fn unstable_features(&self) -> bool {
|
||||
match &self.config.channel[..] {
|
||||
"stable" | "beta" => false,
|
||||
"nightly" | _ => true,
|
||||
}
|
||||
!matches!(&self.config.channel[..], "stable" | "beta")
|
||||
}
|
||||
|
||||
/// Returns a Vec of all the dependencies of the given root crate,
|
||||
|
|
|
@ -41,9 +41,7 @@ fn cc2ar(cc: &Path, target: TargetSelection) -> Option<PathBuf> {
|
|||
Some(PathBuf::from(ar))
|
||||
} else if target.is_msvc() {
|
||||
None
|
||||
} else if target.contains("musl") {
|
||||
Some(PathBuf::from("ar"))
|
||||
} else if target.contains("openbsd") {
|
||||
} else if target.contains("musl") || target.contains("openbsd") {
|
||||
Some(PathBuf::from("ar"))
|
||||
} else if target.contains("vxworks") {
|
||||
Some(PathBuf::from("wr-ar"))
|
||||
|
|
|
@ -549,7 +549,12 @@ pub fn hex_encode<T>(input: T) -> String
|
|||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
input.as_ref().iter().map(|x| format!("{:02x}", x)).collect()
|
||||
use std::fmt::Write;
|
||||
|
||||
input.as_ref().iter().fold(String::with_capacity(input.as_ref().len() * 2), |mut acc, &byte| {
|
||||
write!(&mut acc, "{:02x}", byte).expect("Failed to write byte to the hex String.");
|
||||
acc
|
||||
})
|
||||
}
|
||||
|
||||
/// Create a `--check-cfg` argument invocation for a given name
|
||||
|
|
|
@ -11,37 +11,35 @@ pub unsafe fn setup(build: &mut crate::Build) {
|
|||
}
|
||||
}
|
||||
|
||||
/// Job management on Windows for bootstrapping
|
||||
///
|
||||
/// Most of the time when you're running a build system (e.g., make) you expect
|
||||
/// Ctrl-C or abnormal termination to actually terminate the entire tree of
|
||||
/// process in play, not just the one at the top. This currently works "by
|
||||
/// default" on Unix platforms because Ctrl-C actually sends a signal to the
|
||||
/// *process group* rather than the parent process, so everything will get torn
|
||||
/// down. On Windows, however, this does not happen and Ctrl-C just kills the
|
||||
/// parent process.
|
||||
///
|
||||
/// To achieve the same semantics on Windows we use Job Objects to ensure that
|
||||
/// all processes die at the same time. Job objects have a mode of operation
|
||||
/// where when all handles to the object are closed it causes all child
|
||||
/// processes associated with the object to be terminated immediately.
|
||||
/// Conveniently whenever a process in the job object spawns a new process the
|
||||
/// child will be associated with the job object as well. This means if we add
|
||||
/// ourselves to the job object we create then everything will get torn down!
|
||||
///
|
||||
/// Unfortunately most of the time the build system is actually called from a
|
||||
/// python wrapper (which manages things like building the build system) so this
|
||||
/// all doesn't quite cut it so far. To go the last mile we duplicate the job
|
||||
/// object handle into our parent process (a python process probably) and then
|
||||
/// close our own handle. This means that the only handle to the job object
|
||||
/// resides in the parent python process, so when python dies the whole build
|
||||
/// system dies (as one would probably expect!).
|
||||
///
|
||||
/// Note that this is a Windows specific module as none of this logic is required on Unix.
|
||||
#[cfg(windows)]
|
||||
mod for_windows {
|
||||
//! Job management on Windows for bootstrapping
|
||||
//!
|
||||
//! Most of the time when you're running a build system (e.g., make) you expect
|
||||
//! Ctrl-C or abnormal termination to actually terminate the entire tree of
|
||||
//! process in play, not just the one at the top. This currently works "by
|
||||
//! default" on Unix platforms because Ctrl-C actually sends a signal to the
|
||||
//! *process group* rather than the parent process, so everything will get torn
|
||||
//! down. On Windows, however, this does not happen and Ctrl-C just kills the
|
||||
//! parent process.
|
||||
//!
|
||||
//! To achieve the same semantics on Windows we use Job Objects to ensure that
|
||||
//! all processes die at the same time. Job objects have a mode of operation
|
||||
//! where when all handles to the object are closed it causes all child
|
||||
//! processes associated with the object to be terminated immediately.
|
||||
//! Conveniently whenever a process in the job object spawns a new process the
|
||||
//! child will be associated with the job object as well. This means if we add
|
||||
//! ourselves to the job object we create then everything will get torn down!
|
||||
//!
|
||||
//! Unfortunately most of the time the build system is actually called from a
|
||||
//! python wrapper (which manages things like building the build system) so this
|
||||
//! all doesn't quite cut it so far. To go the last mile we duplicate the job
|
||||
//! object handle into our parent process (a python process probably) and then
|
||||
//! close our own handle. This means that the only handle to the job object
|
||||
//! resides in the parent python process, so when python dies the whole build
|
||||
//! system dies (as one would probably expect!).
|
||||
//!
|
||||
//! Note that this module has a #[cfg(windows)] above it as none of this logic
|
||||
//! is required on Unix.
|
||||
|
||||
use crate::Build;
|
||||
use std::env;
|
||||
use std::ffi::c_void;
|
||||
|
|
|
@ -239,7 +239,7 @@ impl<'a> Renderer<'a> {
|
|||
suite.filtered_out,
|
||||
time = match suite.exec_time {
|
||||
Some(t) => format!("; finished in {:.2?}", Duration::from_secs_f64(t)),
|
||||
None => format!(""),
|
||||
None => String::new(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
|
@ -18,13 +18,13 @@ use crate::utils::helpers::t;
|
|||
#[derive(Copy, Clone)]
|
||||
pub(crate) enum OverlayKind {
|
||||
Rust,
|
||||
LLVM,
|
||||
Llvm,
|
||||
Cargo,
|
||||
Clippy,
|
||||
Miri,
|
||||
Rustfmt,
|
||||
RustDemangler,
|
||||
RLS,
|
||||
Rls,
|
||||
RustAnalyzer,
|
||||
RustcCodegenCranelift,
|
||||
LlvmBitcodeLinker,
|
||||
|
@ -34,7 +34,7 @@ impl OverlayKind {
|
|||
fn legal_and_readme(&self) -> &[&str] {
|
||||
match self {
|
||||
OverlayKind::Rust => &["COPYRIGHT", "LICENSE-APACHE", "LICENSE-MIT", "README.md"],
|
||||
OverlayKind::LLVM => {
|
||||
OverlayKind::Llvm => {
|
||||
&["src/llvm-project/llvm/LICENSE.TXT", "src/llvm-project/llvm/README.txt"]
|
||||
}
|
||||
OverlayKind::Cargo => &[
|
||||
|
@ -61,7 +61,7 @@ impl OverlayKind {
|
|||
OverlayKind::RustDemangler => {
|
||||
&["src/tools/rust-demangler/README.md", "LICENSE-APACHE", "LICENSE-MIT"]
|
||||
}
|
||||
OverlayKind::RLS => &["src/tools/rls/README.md", "LICENSE-APACHE", "LICENSE-MIT"],
|
||||
OverlayKind::Rls => &["src/tools/rls/README.md", "LICENSE-APACHE", "LICENSE-MIT"],
|
||||
OverlayKind::RustAnalyzer => &[
|
||||
"src/tools/rust-analyzer/README.md",
|
||||
"src/tools/rust-analyzer/LICENSE-APACHE",
|
||||
|
@ -84,7 +84,7 @@ impl OverlayKind {
|
|||
fn version(&self, builder: &Builder<'_>) -> String {
|
||||
match self {
|
||||
OverlayKind::Rust => builder.rust_version(),
|
||||
OverlayKind::LLVM => builder.rust_version(),
|
||||
OverlayKind::Llvm => builder.rust_version(),
|
||||
OverlayKind::RustDemangler => builder.release_num("rust-demangler"),
|
||||
OverlayKind::Cargo => {
|
||||
builder.cargo_info.version(builder, &builder.release_num("cargo"))
|
||||
|
@ -96,7 +96,7 @@ impl OverlayKind {
|
|||
OverlayKind::Rustfmt => {
|
||||
builder.rustfmt_info.version(builder, &builder.release_num("rustfmt"))
|
||||
}
|
||||
OverlayKind::RLS => builder.release(&builder.release_num("rls")),
|
||||
OverlayKind::Rls => builder.release(&builder.release_num("rls")),
|
||||
OverlayKind::RustAnalyzer => builder
|
||||
.rust_analyzer_info
|
||||
.version(builder, &builder.release_num("rust-analyzer/crates/rust-analyzer")),
|
||||
|
@ -357,7 +357,7 @@ impl<'a> Tarball<'a> {
|
|||
&self.builder.config.dist_compression_profile
|
||||
};
|
||||
|
||||
cmd.args(&["--compression-profile", compression_profile]);
|
||||
cmd.args(["--compression-profile", compression_profile]);
|
||||
self.builder.run(&mut cmd);
|
||||
|
||||
// Ensure there are no symbolic links in the tarball. In particular,
|
||||
|
|
|
@ -48,6 +48,7 @@ ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \
|
|||
# We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs.
|
||||
python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \
|
||||
python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \
|
||||
python3 ../x.py clippy bootstrap -Dwarnings && \
|
||||
python3 ../x.py clippy compiler library -Aclippy::all -Dclippy::correctness && \
|
||||
python3 ../x.py build --stage 0 src/tools/build-manifest && \
|
||||
python3 ../x.py test --stage 0 src/tools/compiletest && \
|
||||
|
|
|
@ -31,8 +31,8 @@ impl<'a> W<'a> {
|
|||
|
||||
// But also make sure that we error here...
|
||||
impl<'a> W<'a> {
|
||||
//~^ ERROR `impl Trait` captures lifetime parameter, but it is not mentioned in `use<...>` precise captures list
|
||||
fn bad2() -> impl use<> Into<<Self as Tr>::Assoc> {}
|
||||
//~^ ERROR `impl Trait` captures lifetime parameter, but it is not mentioned in `use<...>` precise captures list
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -16,13 +16,12 @@ LL | fn bad1() -> impl use<> Into<<W<'a> as Tr>::Assoc> {}
|
|||
| -------------------^^---------------- lifetime captured due to being mentioned in the bounds of the `impl Trait`
|
||||
|
||||
error: `impl Trait` captures lifetime parameter, but it is not mentioned in `use<...>` precise captures list
|
||||
--> $DIR/capture-parent-arg.rs:33:6
|
||||
--> $DIR/capture-parent-arg.rs:34:18
|
||||
|
|
||||
LL | impl<'a> W<'a> {
|
||||
| ^^
|
||||
LL |
|
||||
| -- this lifetime parameter is captured
|
||||
LL | fn bad2() -> impl use<> Into<<Self as Tr>::Assoc> {}
|
||||
| ------------------------------------ lifetime captured due to being mentioned in the bounds of the `impl Trait`
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ lifetime captured due to being mentioned in the bounds of the `impl Trait`
|
||||
|
||||
error: aborting due to 2 previous errors; 1 warning emitted
|
||||
|
||||
|
|
|
@ -7,11 +7,13 @@ LL | #![feature(precise_capturing)]
|
|||
= note: see issue #123432 <https://github.com/rust-lang/rust/issues/123432> for more information
|
||||
= note: `#[warn(incomplete_features)]` on by default
|
||||
|
||||
error: `impl Trait` must mention all const parameters in scope
|
||||
--> $DIR/forgot-to-capture-const.rs:4:13
|
||||
error: `impl Trait` must mention all const parameters in scope in `use<...>`
|
||||
--> $DIR/forgot-to-capture-const.rs:4:34
|
||||
|
|
||||
LL | fn constant<const C: usize>() -> impl use<> Sized {}
|
||||
| ^^^^^^^^^^^^^^ ---------------- const parameter is implicitly captured by this `impl Trait`
|
||||
| -------------- ^^^^^^^^^^^^^^^^
|
||||
| |
|
||||
| const parameter is implicitly captured by this `impl Trait`
|
||||
|
|
||||
= note: currently, all const parameters are required to be mentioned in the precise captures list
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@ fn type_param<T>() -> impl use<> Sized {}
|
|||
//~^ ERROR `impl Trait` must mention all type parameters in scope
|
||||
|
||||
trait Foo {
|
||||
//~^ ERROR `impl Trait` must mention all type parameters in scope
|
||||
fn bar() -> impl use<> Sized;
|
||||
//~^ ERROR `impl Trait` must mention the `Self` type of the trait
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -7,22 +7,23 @@ LL | #![feature(precise_capturing)]
|
|||
= note: see issue #123432 <https://github.com/rust-lang/rust/issues/123432> for more information
|
||||
= note: `#[warn(incomplete_features)]` on by default
|
||||
|
||||
error: `impl Trait` must mention all type parameters in scope
|
||||
--> $DIR/forgot-to-capture-type.rs:4:15
|
||||
error: `impl Trait` must mention all type parameters in scope in `use<...>`
|
||||
--> $DIR/forgot-to-capture-type.rs:4:23
|
||||
|
|
||||
LL | fn type_param<T>() -> impl use<> Sized {}
|
||||
| ^ ---------------- type parameter is implicitly captured by this `impl Trait`
|
||||
| - ^^^^^^^^^^^^^^^^
|
||||
| |
|
||||
| type parameter is implicitly captured by this `impl Trait`
|
||||
|
|
||||
= note: currently, all type parameters are required to be mentioned in the precise captures list
|
||||
|
||||
error: `impl Trait` must mention all type parameters in scope
|
||||
--> $DIR/forgot-to-capture-type.rs:7:1
|
||||
error: `impl Trait` must mention the `Self` type of the trait in `use<...>`
|
||||
--> $DIR/forgot-to-capture-type.rs:8:17
|
||||
|
|
||||
LL | trait Foo {
|
||||
| ^^^^^^^^^
|
||||
LL |
|
||||
| --------- `Self` type parameter is implicitly captured by this `impl Trait`
|
||||
LL | fn bar() -> impl use<> Sized;
|
||||
| ---------------- type parameter is implicitly captured by this `impl Trait`
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: currently, all type parameters are required to be mentioned in the precise captures list
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue