Auto merge of #121400 - fmease:rollup-8m29g7a, r=fmease
Rollup of 8 pull requests Successful merges: - #121044 (Support async trait bounds in macros) - #121175 (match lowering: test one or pattern at a time) - #121340 (bootstrap: apply most of clippy's suggestions) - #121347 (compiletest: support auxiliaries with auxiliaries) - #121359 (miscellaneous type system improvements) - #121366 (Remove `diagnostic_builder.rs`) - #121379 (Remove an `unchecked_error_guaranteed` call.) - #121396 (make it possible for outside crates to inspect a mir::ConstValue with the interpreter) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
f8131a48a4
69 changed files with 1199 additions and 970 deletions
|
@ -881,9 +881,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||||
&item.vis,
|
&item.vis,
|
||||||
errors::VisibilityNotPermittedNote::TraitImpl,
|
errors::VisibilityNotPermittedNote::TraitImpl,
|
||||||
);
|
);
|
||||||
// njn: use Dummy here
|
if let TyKind::Dummy = self_ty.kind {
|
||||||
if let TyKind::Err(_) = self_ty.kind {
|
// Abort immediately otherwise the `TyKind::Dummy` will reach HIR lowering,
|
||||||
this.dcx().emit_err(errors::ObsoleteAuto { span: item.span });
|
// which isn't allowed. Not a problem for this obscure, obsolete syntax.
|
||||||
|
this.dcx().emit_fatal(errors::ObsoleteAuto { span: item.span });
|
||||||
}
|
}
|
||||||
if let (&Unsafe::Yes(span), &ImplPolarity::Negative(sp)) = (unsafety, polarity)
|
if let (&Unsafe::Yes(span), &ImplPolarity::Negative(sp)) = (unsafety, polarity)
|
||||||
{
|
{
|
||||||
|
|
|
@ -3,10 +3,11 @@ use either::{Left, Right};
|
||||||
use rustc_hir::def::DefKind;
|
use rustc_hir::def::DefKind;
|
||||||
use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo};
|
use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo};
|
||||||
use rustc_middle::mir::{self, ConstAlloc, ConstValue};
|
use rustc_middle::mir::{self, ConstAlloc, ConstValue};
|
||||||
|
use rustc_middle::query::TyCtxtAt;
|
||||||
use rustc_middle::traits::Reveal;
|
use rustc_middle::traits::Reveal;
|
||||||
use rustc_middle::ty::layout::LayoutOf;
|
use rustc_middle::ty::layout::LayoutOf;
|
||||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||||
use rustc_middle::ty::{self, TyCtxt};
|
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||||
use rustc_span::def_id::LocalDefId;
|
use rustc_span::def_id::LocalDefId;
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
use rustc_target::abi::{self, Abi};
|
use rustc_target::abi::{self, Abi};
|
||||||
|
@ -87,13 +88,16 @@ fn eval_body_using_ecx<'mir, 'tcx>(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The `InterpCx` is only meant to be used to do field and index projections into constants for
|
/// The `InterpCx` is only meant to be used to do field and index projections into constants for
|
||||||
/// `simd_shuffle` and const patterns in match arms. It never performs alignment checks.
|
/// `simd_shuffle` and const patterns in match arms.
|
||||||
|
///
|
||||||
|
/// This should *not* be used to do any actual interpretation. In particular, alignment checks are
|
||||||
|
/// turned off!
|
||||||
///
|
///
|
||||||
/// The function containing the `match` that is currently being analyzed may have generic bounds
|
/// The function containing the `match` that is currently being analyzed may have generic bounds
|
||||||
/// that inform us about the generic bounds of the constant. E.g., using an associated constant
|
/// that inform us about the generic bounds of the constant. E.g., using an associated constant
|
||||||
/// of a function's generic parameter will require knowledge about the bounds on the generic
|
/// of a function's generic parameter will require knowledge about the bounds on the generic
|
||||||
/// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
|
/// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
|
||||||
pub(crate) fn mk_eval_cx<'mir, 'tcx>(
|
pub(crate) fn mk_eval_cx_to_read_const_val<'mir, 'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
root_span: Span,
|
root_span: Span,
|
||||||
param_env: ty::ParamEnv<'tcx>,
|
param_env: ty::ParamEnv<'tcx>,
|
||||||
|
@ -108,6 +112,19 @@ pub(crate) fn mk_eval_cx<'mir, 'tcx>(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create an interpreter context to inspect the given `ConstValue`.
|
||||||
|
/// Returns both the context and an `OpTy` that represents the constant.
|
||||||
|
pub fn mk_eval_cx_for_const_val<'mir, 'tcx>(
|
||||||
|
tcx: TyCtxtAt<'tcx>,
|
||||||
|
param_env: ty::ParamEnv<'tcx>,
|
||||||
|
val: mir::ConstValue<'tcx>,
|
||||||
|
ty: Ty<'tcx>,
|
||||||
|
) -> Option<(CompileTimeEvalContext<'mir, 'tcx>, OpTy<'tcx>)> {
|
||||||
|
let ecx = mk_eval_cx_to_read_const_val(tcx.tcx, tcx.span, param_env, CanAccessMutGlobal::No);
|
||||||
|
let op = ecx.const_val_to_op(val, ty, None).ok()?;
|
||||||
|
Some((ecx, op))
|
||||||
|
}
|
||||||
|
|
||||||
/// This function converts an interpreter value into a MIR constant.
|
/// This function converts an interpreter value into a MIR constant.
|
||||||
///
|
///
|
||||||
/// The `for_diagnostics` flag turns the usual rules for returning `ConstValue::Scalar` into a
|
/// The `for_diagnostics` flag turns the usual rules for returning `ConstValue::Scalar` into a
|
||||||
|
@ -203,7 +220,7 @@ pub(crate) fn turn_into_const_value<'tcx>(
|
||||||
let def_id = cid.instance.def.def_id();
|
let def_id = cid.instance.def.def_id();
|
||||||
let is_static = tcx.is_static(def_id);
|
let is_static = tcx.is_static(def_id);
|
||||||
// This is just accessing an already computed constant, so no need to check alignment here.
|
// This is just accessing an already computed constant, so no need to check alignment here.
|
||||||
let ecx = mk_eval_cx(
|
let ecx = mk_eval_cx_to_read_const_val(
|
||||||
tcx,
|
tcx,
|
||||||
tcx.def_span(key.value.instance.def_id()),
|
tcx.def_span(key.value.instance.def_id()),
|
||||||
key.param_env,
|
key.param_env,
|
||||||
|
|
|
@ -47,8 +47,7 @@ pub(crate) fn try_destructure_mir_constant_for_user_output<'tcx>(
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
) -> Option<mir::DestructuredConstant<'tcx>> {
|
) -> Option<mir::DestructuredConstant<'tcx>> {
|
||||||
let param_env = ty::ParamEnv::reveal_all();
|
let param_env = ty::ParamEnv::reveal_all();
|
||||||
let ecx = mk_eval_cx(tcx.tcx, tcx.span, param_env, CanAccessMutGlobal::No);
|
let (ecx, op) = mk_eval_cx_for_const_val(tcx, param_env, val, ty)?;
|
||||||
let op = ecx.const_val_to_op(val, ty, None).ok()?;
|
|
||||||
|
|
||||||
// We go to `usize` as we cannot allocate anything bigger anyway.
|
// We go to `usize` as we cannot allocate anything bigger anyway.
|
||||||
let (field_count, variant, down) = match ty.kind() {
|
let (field_count, variant, down) = match ty.kind() {
|
||||||
|
|
|
@ -5,7 +5,7 @@ use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
|
||||||
use rustc_span::DUMMY_SP;
|
use rustc_span::DUMMY_SP;
|
||||||
use rustc_target::abi::{Abi, VariantIdx};
|
use rustc_target::abi::{Abi, VariantIdx};
|
||||||
|
|
||||||
use super::eval_queries::{mk_eval_cx, op_to_const};
|
use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const};
|
||||||
use super::machine::CompileTimeEvalContext;
|
use super::machine::CompileTimeEvalContext;
|
||||||
use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
|
use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
|
||||||
use crate::const_eval::CanAccessMutGlobal;
|
use crate::const_eval::CanAccessMutGlobal;
|
||||||
|
@ -223,7 +223,7 @@ pub(crate) fn eval_to_valtree<'tcx>(
|
||||||
let const_alloc = tcx.eval_to_allocation_raw(param_env.and(cid))?;
|
let const_alloc = tcx.eval_to_allocation_raw(param_env.and(cid))?;
|
||||||
|
|
||||||
// FIXME Need to provide a span to `eval_to_valtree`
|
// FIXME Need to provide a span to `eval_to_valtree`
|
||||||
let ecx = mk_eval_cx(
|
let ecx = mk_eval_cx_to_read_const_val(
|
||||||
tcx,
|
tcx,
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
param_env,
|
param_env,
|
||||||
|
@ -287,7 +287,8 @@ pub fn valtree_to_const_value<'tcx>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::Ref(_, inner_ty, _) => {
|
ty::Ref(_, inner_ty, _) => {
|
||||||
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
|
let mut ecx =
|
||||||
|
mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
|
||||||
let imm = valtree_to_ref(&mut ecx, valtree, *inner_ty);
|
let imm = valtree_to_ref(&mut ecx, valtree, *inner_ty);
|
||||||
let imm = ImmTy::from_immediate(imm, tcx.layout_of(param_env_ty).unwrap());
|
let imm = ImmTy::from_immediate(imm, tcx.layout_of(param_env_ty).unwrap());
|
||||||
op_to_const(&ecx, &imm.into(), /* for diagnostics */ false)
|
op_to_const(&ecx, &imm.into(), /* for diagnostics */ false)
|
||||||
|
@ -314,7 +315,8 @@ pub fn valtree_to_const_value<'tcx>(
|
||||||
bug!("could not find non-ZST field during in {layout:#?}");
|
bug!("could not find non-ZST field during in {layout:#?}");
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
|
let mut ecx =
|
||||||
|
mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
|
||||||
|
|
||||||
// Need to create a place for this valtree.
|
// Need to create a place for this valtree.
|
||||||
let place = create_valtree_place(&mut ecx, layout, valtree);
|
let place = create_valtree_place(&mut ecx, layout, valtree);
|
||||||
|
|
|
@ -6,7 +6,7 @@ use rustc_middle::ty::layout::LayoutOf;
|
||||||
use rustc_span::symbol::Symbol;
|
use rustc_span::symbol::Symbol;
|
||||||
use rustc_type_ir::Mutability;
|
use rustc_type_ir::Mutability;
|
||||||
|
|
||||||
use crate::const_eval::{mk_eval_cx, CanAccessMutGlobal, CompileTimeEvalContext};
|
use crate::const_eval::{mk_eval_cx_to_read_const_val, CanAccessMutGlobal, CompileTimeEvalContext};
|
||||||
use crate::interpret::*;
|
use crate::interpret::*;
|
||||||
|
|
||||||
/// Allocate a `const core::panic::Location` with the provided filename and line/column numbers.
|
/// Allocate a `const core::panic::Location` with the provided filename and line/column numbers.
|
||||||
|
@ -57,7 +57,12 @@ pub(crate) fn const_caller_location_provider(
|
||||||
col: u32,
|
col: u32,
|
||||||
) -> mir::ConstValue<'_> {
|
) -> mir::ConstValue<'_> {
|
||||||
trace!("const_caller_location: {}:{}:{}", file, line, col);
|
trace!("const_caller_location: {}:{}:{}", file, line, col);
|
||||||
let mut ecx = mk_eval_cx(tcx.tcx, tcx.span, ty::ParamEnv::reveal_all(), CanAccessMutGlobal::No);
|
let mut ecx = mk_eval_cx_to_read_const_val(
|
||||||
|
tcx.tcx,
|
||||||
|
tcx.span,
|
||||||
|
ty::ParamEnv::reveal_all(),
|
||||||
|
CanAccessMutGlobal::No,
|
||||||
|
);
|
||||||
|
|
||||||
let loc_place = alloc_caller_location(&mut ecx, file, line, col);
|
let loc_place = alloc_caller_location(&mut ecx, file, line, col);
|
||||||
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
|
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
|
||||||
|
|
|
@ -1,19 +1,22 @@
|
||||||
use crate::snippet::Style;
|
use crate::snippet::Style;
|
||||||
use crate::{
|
use crate::{
|
||||||
CodeSuggestion, DiagnosticBuilder, DiagnosticMessage, EmissionGuarantee, ErrCode, Level,
|
CodeSuggestion, DiagCtxt, DiagnosticMessage, ErrCode, ErrorGuaranteed, ExplicitBug, Level,
|
||||||
MultiSpan, SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle,
|
MultiSpan, StashKey, SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle,
|
||||||
};
|
};
|
||||||
use rustc_data_structures::fx::FxIndexMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_error_messages::fluent_value_from_str_list_sep_by_and;
|
use rustc_error_messages::fluent_value_from_str_list_sep_by_and;
|
||||||
use rustc_error_messages::FluentValue;
|
use rustc_error_messages::FluentValue;
|
||||||
use rustc_lint_defs::{Applicability, LintExpectationId};
|
use rustc_lint_defs::{Applicability, LintExpectationId};
|
||||||
|
use rustc_span::source_map::Spanned;
|
||||||
use rustc_span::symbol::Symbol;
|
use rustc_span::symbol::Symbol;
|
||||||
use rustc_span::{Span, DUMMY_SP};
|
use rustc_span::{Span, DUMMY_SP};
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::fmt::{self, Debug};
|
use std::fmt::{self, Debug};
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
use std::marker::PhantomData;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::panic::Location;
|
use std::panic;
|
||||||
|
use std::thread::panicking;
|
||||||
|
|
||||||
/// Error type for `Diagnostic`'s `suggestions` field, indicating that
|
/// Error type for `Diagnostic`'s `suggestions` field, indicating that
|
||||||
/// `.disable_suggestions()` was called on the `Diagnostic`.
|
/// `.disable_suggestions()` was called on the `Diagnostic`.
|
||||||
|
@ -40,6 +43,86 @@ pub enum DiagnosticArgValue {
|
||||||
StrListSepByAnd(Vec<Cow<'static, str>>),
|
StrListSepByAnd(Vec<Cow<'static, str>>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Trait for types that `DiagnosticBuilder::emit` can return as a "guarantee"
|
||||||
|
/// (or "proof") token that the emission happened.
|
||||||
|
pub trait EmissionGuarantee: Sized {
|
||||||
|
/// This exists so that bugs and fatal errors can both result in `!` (an
|
||||||
|
/// abort) when emitted, but have different aborting behaviour.
|
||||||
|
type EmitResult = Self;
|
||||||
|
|
||||||
|
/// Implementation of `DiagnosticBuilder::emit`, fully controlled by each
|
||||||
|
/// `impl` of `EmissionGuarantee`, to make it impossible to create a value
|
||||||
|
/// of `Self::EmitResult` without actually performing the emission.
|
||||||
|
#[track_caller]
|
||||||
|
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EmissionGuarantee for ErrorGuaranteed {
|
||||||
|
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
||||||
|
db.emit_producing_error_guaranteed()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EmissionGuarantee for () {
|
||||||
|
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
||||||
|
db.emit_producing_nothing();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Marker type which enables implementation of `create_bug` and `emit_bug` functions for
|
||||||
|
/// bug diagnostics.
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub struct BugAbort;
|
||||||
|
|
||||||
|
impl EmissionGuarantee for BugAbort {
|
||||||
|
type EmitResult = !;
|
||||||
|
|
||||||
|
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
||||||
|
db.emit_producing_nothing();
|
||||||
|
panic::panic_any(ExplicitBug);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Marker type which enables implementation of `create_fatal` and `emit_fatal` functions for
|
||||||
|
/// fatal diagnostics.
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub struct FatalAbort;
|
||||||
|
|
||||||
|
impl EmissionGuarantee for FatalAbort {
|
||||||
|
type EmitResult = !;
|
||||||
|
|
||||||
|
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
||||||
|
db.emit_producing_nothing();
|
||||||
|
crate::FatalError.raise()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EmissionGuarantee for rustc_span::fatal_error::FatalError {
|
||||||
|
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
||||||
|
db.emit_producing_nothing();
|
||||||
|
rustc_span::fatal_error::FatalError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait implemented by error types. This is rarely implemented manually. Instead, use
|
||||||
|
/// `#[derive(Diagnostic)]` -- see [rustc_macros::Diagnostic].
|
||||||
|
#[rustc_diagnostic_item = "IntoDiagnostic"]
|
||||||
|
pub trait IntoDiagnostic<'a, G: EmissionGuarantee = ErrorGuaranteed> {
|
||||||
|
/// Write out as a diagnostic out of `DiagCtxt`.
|
||||||
|
#[must_use]
|
||||||
|
fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a, G>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T, G> IntoDiagnostic<'a, G> for Spanned<T>
|
||||||
|
where
|
||||||
|
T: IntoDiagnostic<'a, G>,
|
||||||
|
G: EmissionGuarantee,
|
||||||
|
{
|
||||||
|
fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a, G> {
|
||||||
|
self.node.into_diagnostic(dcx, level).with_span(self.span)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Converts a value of a type into a `DiagnosticArg` (typically a field of an `IntoDiagnostic`
|
/// Converts a value of a type into a `DiagnosticArg` (typically a field of an `IntoDiagnostic`
|
||||||
/// struct). Implemented as a custom trait rather than `From` so that it is implemented on the type
|
/// struct). Implemented as a custom trait rather than `From` so that it is implemented on the type
|
||||||
/// being converted rather than on `DiagnosticArgValue`, which enables types from other `rustc_*`
|
/// being converted rather than on `DiagnosticArgValue`, which enables types from other `rustc_*`
|
||||||
|
@ -98,36 +181,6 @@ pub trait DecorateLint<'a, G: EmissionGuarantee> {
|
||||||
fn msg(&self) -> DiagnosticMessage;
|
fn msg(&self) -> DiagnosticMessage;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The main part of a diagnostic. Note that `DiagnosticBuilder`, which wraps
|
|
||||||
/// this type, is used for most operations, and should be used instead whenever
|
|
||||||
/// possible. This type should only be used when `DiagnosticBuilder`'s lifetime
|
|
||||||
/// causes difficulties, e.g. when storing diagnostics within `DiagCtxt`.
|
|
||||||
#[must_use]
|
|
||||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
|
||||||
pub struct Diagnostic {
|
|
||||||
// NOTE(eddyb) this is private to disallow arbitrary after-the-fact changes,
|
|
||||||
// outside of what methods in this crate themselves allow.
|
|
||||||
pub(crate) level: Level,
|
|
||||||
|
|
||||||
pub messages: Vec<(DiagnosticMessage, Style)>,
|
|
||||||
pub code: Option<ErrCode>,
|
|
||||||
pub span: MultiSpan,
|
|
||||||
pub children: Vec<SubDiagnostic>,
|
|
||||||
pub suggestions: Result<Vec<CodeSuggestion>, SuggestionsDisabled>,
|
|
||||||
args: FxIndexMap<DiagnosticArgName, DiagnosticArgValue>,
|
|
||||||
|
|
||||||
/// This is not used for highlighting or rendering any error message. Rather, it can be used
|
|
||||||
/// as a sort key to sort a buffer of diagnostics. By default, it is the primary span of
|
|
||||||
/// `span` if there is one. Otherwise, it is `DUMMY_SP`.
|
|
||||||
pub sort_span: Span,
|
|
||||||
|
|
||||||
pub is_lint: Option<IsLint>,
|
|
||||||
|
|
||||||
/// With `-Ztrack_diagnostics` enabled,
|
|
||||||
/// we print where in rustc this error was emitted.
|
|
||||||
pub(crate) emitted_at: DiagnosticLocation,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||||
pub struct DiagnosticLocation {
|
pub struct DiagnosticLocation {
|
||||||
file: Cow<'static, str>,
|
file: Cow<'static, str>,
|
||||||
|
@ -138,7 +191,7 @@ pub struct DiagnosticLocation {
|
||||||
impl DiagnosticLocation {
|
impl DiagnosticLocation {
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn caller() -> Self {
|
fn caller() -> Self {
|
||||||
let loc = Location::caller();
|
let loc = panic::Location::caller();
|
||||||
DiagnosticLocation { file: loc.file().into(), line: loc.line(), col: loc.column() }
|
DiagnosticLocation { file: loc.file().into(), line: loc.line(), col: loc.column() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -157,15 +210,6 @@ pub struct IsLint {
|
||||||
has_future_breakage: bool,
|
has_future_breakage: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A "sub"-diagnostic attached to a parent diagnostic.
|
|
||||||
/// For example, a note attached to an error.
|
|
||||||
#[derive(Clone, Debug, PartialEq, Hash, Encodable, Decodable)]
|
|
||||||
pub struct SubDiagnostic {
|
|
||||||
pub level: Level,
|
|
||||||
pub messages: Vec<(DiagnosticMessage, Style)>,
|
|
||||||
pub span: MultiSpan,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct DiagnosticStyledString(pub Vec<StringPart>);
|
pub struct DiagnosticStyledString(pub Vec<StringPart>);
|
||||||
|
|
||||||
|
@ -215,6 +259,36 @@ impl StringPart {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The main part of a diagnostic. Note that `DiagnosticBuilder`, which wraps
|
||||||
|
/// this type, is used for most operations, and should be used instead whenever
|
||||||
|
/// possible. This type should only be used when `DiagnosticBuilder`'s lifetime
|
||||||
|
/// causes difficulties, e.g. when storing diagnostics within `DiagCtxt`.
|
||||||
|
#[must_use]
|
||||||
|
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||||
|
pub struct Diagnostic {
|
||||||
|
// NOTE(eddyb) this is private to disallow arbitrary after-the-fact changes,
|
||||||
|
// outside of what methods in this crate themselves allow.
|
||||||
|
pub(crate) level: Level,
|
||||||
|
|
||||||
|
pub messages: Vec<(DiagnosticMessage, Style)>,
|
||||||
|
pub code: Option<ErrCode>,
|
||||||
|
pub span: MultiSpan,
|
||||||
|
pub children: Vec<SubDiagnostic>,
|
||||||
|
pub suggestions: Result<Vec<CodeSuggestion>, SuggestionsDisabled>,
|
||||||
|
args: FxIndexMap<DiagnosticArgName, DiagnosticArgValue>,
|
||||||
|
|
||||||
|
/// This is not used for highlighting or rendering any error message. Rather, it can be used
|
||||||
|
/// as a sort key to sort a buffer of diagnostics. By default, it is the primary span of
|
||||||
|
/// `span` if there is one. Otherwise, it is `DUMMY_SP`.
|
||||||
|
pub sort_span: Span,
|
||||||
|
|
||||||
|
pub is_lint: Option<IsLint>,
|
||||||
|
|
||||||
|
/// With `-Ztrack_diagnostics` enabled,
|
||||||
|
/// we print where in rustc this error was emitted.
|
||||||
|
pub(crate) emitted_at: DiagnosticLocation,
|
||||||
|
}
|
||||||
|
|
||||||
impl Diagnostic {
|
impl Diagnostic {
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn new<M: Into<DiagnosticMessage>>(level: Level, message: M) -> Self {
|
pub fn new<M: Into<DiagnosticMessage>>(level: Level, message: M) -> Self {
|
||||||
|
@ -336,6 +410,118 @@ impl Diagnostic {
|
||||||
pub fn replace_args(&mut self, args: FxIndexMap<DiagnosticArgName, DiagnosticArgValue>) {
|
pub fn replace_args(&mut self, args: FxIndexMap<DiagnosticArgName, DiagnosticArgValue>) {
|
||||||
self.args = args;
|
self.args = args;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Fields used for Hash, and PartialEq trait.
|
||||||
|
fn keys(
|
||||||
|
&self,
|
||||||
|
) -> (
|
||||||
|
&Level,
|
||||||
|
&[(DiagnosticMessage, Style)],
|
||||||
|
&Option<ErrCode>,
|
||||||
|
&MultiSpan,
|
||||||
|
&[SubDiagnostic],
|
||||||
|
&Result<Vec<CodeSuggestion>, SuggestionsDisabled>,
|
||||||
|
Vec<(&DiagnosticArgName, &DiagnosticArgValue)>,
|
||||||
|
&Option<IsLint>,
|
||||||
|
) {
|
||||||
|
(
|
||||||
|
&self.level,
|
||||||
|
&self.messages,
|
||||||
|
&self.code,
|
||||||
|
&self.span,
|
||||||
|
&self.children,
|
||||||
|
&self.suggestions,
|
||||||
|
self.args().collect(),
|
||||||
|
// omit self.sort_span
|
||||||
|
&self.is_lint,
|
||||||
|
// omit self.emitted_at
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for Diagnostic {
|
||||||
|
fn hash<H>(&self, state: &mut H)
|
||||||
|
where
|
||||||
|
H: Hasher,
|
||||||
|
{
|
||||||
|
self.keys().hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Diagnostic {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.keys() == other.keys()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A "sub"-diagnostic attached to a parent diagnostic.
|
||||||
|
/// For example, a note attached to an error.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Hash, Encodable, Decodable)]
|
||||||
|
pub struct SubDiagnostic {
|
||||||
|
pub level: Level,
|
||||||
|
pub messages: Vec<(DiagnosticMessage, Style)>,
|
||||||
|
pub span: MultiSpan,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Used for emitting structured error messages and other diagnostic information.
|
||||||
|
/// Wraps a `Diagnostic`, adding some useful things.
|
||||||
|
/// - The `dcx` field, allowing it to (a) emit itself, and (b) do a drop check
|
||||||
|
/// that it has been emitted or cancelled.
|
||||||
|
/// - The `EmissionGuarantee`, which determines the type returned from `emit`.
|
||||||
|
///
|
||||||
|
/// Each constructed `DiagnosticBuilder` must be consumed by a function such as
|
||||||
|
/// `emit`, `cancel`, `delay_as_bug`, or `into_diagnostic`. A panic occurrs if a
|
||||||
|
/// `DiagnosticBuilder` is dropped without being consumed by one of these
|
||||||
|
/// functions.
|
||||||
|
///
|
||||||
|
/// If there is some state in a downstream crate you would like to
|
||||||
|
/// access in the methods of `DiagnosticBuilder` here, consider
|
||||||
|
/// extending `DiagCtxtFlags`.
|
||||||
|
#[must_use]
|
||||||
|
pub struct DiagnosticBuilder<'a, G: EmissionGuarantee = ErrorGuaranteed> {
|
||||||
|
pub dcx: &'a DiagCtxt,
|
||||||
|
|
||||||
|
/// Why the `Option`? It is always `Some` until the `DiagnosticBuilder` is
|
||||||
|
/// consumed via `emit`, `cancel`, etc. At that point it is consumed and
|
||||||
|
/// replaced with `None`. Then `drop` checks that it is `None`; if not, it
|
||||||
|
/// panics because a diagnostic was built but not used.
|
||||||
|
///
|
||||||
|
/// Why the Box? `Diagnostic` is a large type, and `DiagnosticBuilder` is
|
||||||
|
/// often used as a return value, especially within the frequently-used
|
||||||
|
/// `PResult` type. In theory, return value optimization (RVO) should avoid
|
||||||
|
/// unnecessary copying. In practice, it does not (at the time of writing).
|
||||||
|
diag: Option<Box<Diagnostic>>,
|
||||||
|
|
||||||
|
_marker: PhantomData<G>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cloning a `DiagnosticBuilder` is a recipe for a diagnostic being emitted
|
||||||
|
// twice, which would be bad.
|
||||||
|
impl<G> !Clone for DiagnosticBuilder<'_, G> {}
|
||||||
|
|
||||||
|
rustc_data_structures::static_assert_size!(
|
||||||
|
DiagnosticBuilder<'_, ()>,
|
||||||
|
2 * std::mem::size_of::<usize>()
|
||||||
|
);
|
||||||
|
|
||||||
|
impl<G: EmissionGuarantee> Deref for DiagnosticBuilder<'_, G> {
|
||||||
|
type Target = Diagnostic;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Diagnostic {
|
||||||
|
self.diag.as_ref().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<G: EmissionGuarantee> DerefMut for DiagnosticBuilder<'_, G> {
|
||||||
|
fn deref_mut(&mut self) -> &mut Diagnostic {
|
||||||
|
self.diag.as_mut().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<G: EmissionGuarantee> Debug for DiagnosticBuilder<'_, G> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
self.diag.fmt(f)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `DiagnosticBuilder` impls many `&mut self -> &mut Self` methods. Each one
|
/// `DiagnosticBuilder` impls many `&mut self -> &mut Self` methods. Each one
|
||||||
|
@ -382,6 +568,20 @@ macro_rules! with_fn {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
|
impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
|
||||||
|
#[rustc_lint_diagnostics]
|
||||||
|
#[track_caller]
|
||||||
|
pub fn new<M: Into<DiagnosticMessage>>(dcx: &'a DiagCtxt, level: Level, message: M) -> Self {
|
||||||
|
Self::new_diagnostic(dcx, Diagnostic::new(level, message))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new `DiagnosticBuilder` with an already constructed
|
||||||
|
/// diagnostic.
|
||||||
|
#[track_caller]
|
||||||
|
pub(crate) fn new_diagnostic(dcx: &'a DiagCtxt, diag: Diagnostic) -> Self {
|
||||||
|
debug!("Created new diagnostic");
|
||||||
|
Self { dcx, diag: Some(Box::new(diag)), _marker: PhantomData }
|
||||||
|
}
|
||||||
|
|
||||||
/// Delay emission of this diagnostic as a bug.
|
/// Delay emission of this diagnostic as a bug.
|
||||||
///
|
///
|
||||||
/// This can be useful in contexts where an error indicates a bug but
|
/// This can be useful in contexts where an error indicates a bug but
|
||||||
|
@ -1040,48 +1240,112 @@ impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
|
||||||
let sub = SubDiagnostic { level, messages, span };
|
let sub = SubDiagnostic { level, messages, span };
|
||||||
self.children.push(sub);
|
self.children.push(sub);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl Diagnostic {
|
/// Takes the diagnostic. For use by methods that consume the
|
||||||
/// Fields used for Hash, and PartialEq trait
|
/// DiagnosticBuilder: `emit`, `cancel`, etc. Afterwards, `drop` is the
|
||||||
fn keys(
|
/// only code that will be run on `self`.
|
||||||
&self,
|
fn take_diag(&mut self) -> Diagnostic {
|
||||||
) -> (
|
Box::into_inner(self.diag.take().unwrap())
|
||||||
&Level,
|
}
|
||||||
&[(DiagnosticMessage, Style)],
|
|
||||||
&Option<ErrCode>,
|
/// Most `emit_producing_guarantee` functions use this as a starting point.
|
||||||
&MultiSpan,
|
fn emit_producing_nothing(mut self) {
|
||||||
&[SubDiagnostic],
|
let diag = self.take_diag();
|
||||||
&Result<Vec<CodeSuggestion>, SuggestionsDisabled>,
|
self.dcx.emit_diagnostic(diag);
|
||||||
Vec<(&DiagnosticArgName, &DiagnosticArgValue)>,
|
}
|
||||||
&Option<IsLint>,
|
|
||||||
) {
|
/// `ErrorGuaranteed::emit_producing_guarantee` uses this.
|
||||||
(
|
fn emit_producing_error_guaranteed(mut self) -> ErrorGuaranteed {
|
||||||
&self.level,
|
let diag = self.take_diag();
|
||||||
&self.messages,
|
|
||||||
&self.code,
|
// The only error levels that produce `ErrorGuaranteed` are
|
||||||
&self.span,
|
// `Error` and `DelayedBug`. But `DelayedBug` should never occur here
|
||||||
&self.children,
|
// because delayed bugs have their level changed to `Bug` when they are
|
||||||
&self.suggestions,
|
// actually printed, so they produce an ICE.
|
||||||
self.args().collect(),
|
//
|
||||||
// omit self.sort_span
|
// (Also, even though `level` isn't `pub`, the whole `Diagnostic` could
|
||||||
&self.is_lint,
|
// be overwritten with a new one thanks to `DerefMut`. So this assert
|
||||||
// omit self.emitted_at
|
// protects against that, too.)
|
||||||
)
|
assert!(
|
||||||
|
matches!(diag.level, Level::Error | Level::DelayedBug),
|
||||||
|
"invalid diagnostic level ({:?})",
|
||||||
|
diag.level,
|
||||||
|
);
|
||||||
|
|
||||||
|
let guar = self.dcx.emit_diagnostic(diag);
|
||||||
|
guar.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Emit and consume the diagnostic.
|
||||||
|
#[track_caller]
|
||||||
|
pub fn emit(self) -> G::EmitResult {
|
||||||
|
G::emit_producing_guarantee(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Emit the diagnostic unless `delay` is true,
|
||||||
|
/// in which case the emission will be delayed as a bug.
|
||||||
|
///
|
||||||
|
/// See `emit` and `delay_as_bug` for details.
|
||||||
|
#[track_caller]
|
||||||
|
pub fn emit_unless(mut self, delay: bool) -> G::EmitResult {
|
||||||
|
if delay {
|
||||||
|
self.downgrade_to_delayed_bug();
|
||||||
|
}
|
||||||
|
self.emit()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Cancel and consume the diagnostic. (A diagnostic must either be emitted or
|
||||||
|
/// cancelled or it will panic when dropped).
|
||||||
|
pub fn cancel(mut self) {
|
||||||
|
self.diag = None;
|
||||||
|
drop(self);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Stashes diagnostic for possible later improvement in a different,
|
||||||
|
/// later stage of the compiler. The diagnostic can be accessed with
|
||||||
|
/// the provided `span` and `key` through [`DiagCtxt::steal_diagnostic()`].
|
||||||
|
pub fn stash(mut self, span: Span, key: StashKey) {
|
||||||
|
self.dcx.stash_diagnostic(span, key, self.take_diag());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delay emission of this diagnostic as a bug.
|
||||||
|
///
|
||||||
|
/// This can be useful in contexts where an error indicates a bug but
|
||||||
|
/// typically this only happens when other compilation errors have already
|
||||||
|
/// happened. In those cases this can be used to defer emission of this
|
||||||
|
/// diagnostic as a bug in the compiler only if no other errors have been
|
||||||
|
/// emitted.
|
||||||
|
///
|
||||||
|
/// In the meantime, though, callsites are required to deal with the "bug"
|
||||||
|
/// locally in whichever way makes the most sense.
|
||||||
|
#[track_caller]
|
||||||
|
pub fn delay_as_bug(mut self) -> G::EmitResult {
|
||||||
|
self.downgrade_to_delayed_bug();
|
||||||
|
self.emit()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Hash for Diagnostic {
|
/// Destructor bomb: every `DiagnosticBuilder` must be consumed (emitted,
|
||||||
fn hash<H>(&self, state: &mut H)
|
/// cancelled, etc.) or we emit a bug.
|
||||||
where
|
impl<G: EmissionGuarantee> Drop for DiagnosticBuilder<'_, G> {
|
||||||
H: Hasher,
|
fn drop(&mut self) {
|
||||||
{
|
match self.diag.take() {
|
||||||
self.keys().hash(state);
|
Some(diag) if !panicking() => {
|
||||||
|
self.dcx.emit_diagnostic(Diagnostic::new(
|
||||||
|
Level::Bug,
|
||||||
|
DiagnosticMessage::from("the following error was constructed but not emitted"),
|
||||||
|
));
|
||||||
|
self.dcx.emit_diagnostic(*diag);
|
||||||
|
panic!("error was constructed but not emitted");
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for Diagnostic {
|
#[macro_export]
|
||||||
fn eq(&self, other: &Self) -> bool {
|
macro_rules! struct_span_code_err {
|
||||||
self.keys() == other.keys()
|
($dcx:expr, $span:expr, $code:expr, $($message:tt)*) => ({
|
||||||
}
|
$dcx.struct_span_err($span, format!($($message)*)).with_code($code)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,282 +0,0 @@
|
||||||
use crate::{
|
|
||||||
DiagCtxt, Diagnostic, DiagnosticMessage, ErrorGuaranteed, ExplicitBug, Level, StashKey,
|
|
||||||
};
|
|
||||||
use rustc_span::source_map::Spanned;
|
|
||||||
use rustc_span::Span;
|
|
||||||
use std::fmt::{self, Debug};
|
|
||||||
use std::marker::PhantomData;
|
|
||||||
use std::ops::{Deref, DerefMut};
|
|
||||||
use std::panic;
|
|
||||||
use std::thread::panicking;
|
|
||||||
|
|
||||||
/// Trait implemented by error types. This is rarely implemented manually. Instead, use
|
|
||||||
/// `#[derive(Diagnostic)]` -- see [rustc_macros::Diagnostic].
|
|
||||||
#[rustc_diagnostic_item = "IntoDiagnostic"]
|
|
||||||
pub trait IntoDiagnostic<'a, G: EmissionGuarantee = ErrorGuaranteed> {
|
|
||||||
/// Write out as a diagnostic out of `DiagCtxt`.
|
|
||||||
#[must_use]
|
|
||||||
fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a, G>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, T, G> IntoDiagnostic<'a, G> for Spanned<T>
|
|
||||||
where
|
|
||||||
T: IntoDiagnostic<'a, G>,
|
|
||||||
G: EmissionGuarantee,
|
|
||||||
{
|
|
||||||
fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a, G> {
|
|
||||||
self.node.into_diagnostic(dcx, level).with_span(self.span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Used for emitting structured error messages and other diagnostic information.
|
|
||||||
/// Wraps a `Diagnostic`, adding some useful things.
|
|
||||||
/// - The `dcx` field, allowing it to (a) emit itself, and (b) do a drop check
|
|
||||||
/// that it has been emitted or cancelled.
|
|
||||||
/// - The `EmissionGuarantee`, which determines the type returned from `emit`.
|
|
||||||
///
|
|
||||||
/// Each constructed `DiagnosticBuilder` must be consumed by a function such as
|
|
||||||
/// `emit`, `cancel`, `delay_as_bug`, or `into_diagnostic`. A panic occurrs if a
|
|
||||||
/// `DiagnosticBuilder` is dropped without being consumed by one of these
|
|
||||||
/// functions.
|
|
||||||
///
|
|
||||||
/// If there is some state in a downstream crate you would like to
|
|
||||||
/// access in the methods of `DiagnosticBuilder` here, consider
|
|
||||||
/// extending `DiagCtxtFlags`.
|
|
||||||
#[must_use]
|
|
||||||
pub struct DiagnosticBuilder<'a, G: EmissionGuarantee = ErrorGuaranteed> {
|
|
||||||
pub dcx: &'a DiagCtxt,
|
|
||||||
|
|
||||||
/// Why the `Option`? It is always `Some` until the `DiagnosticBuilder` is
|
|
||||||
/// consumed via `emit`, `cancel`, etc. At that point it is consumed and
|
|
||||||
/// replaced with `None`. Then `drop` checks that it is `None`; if not, it
|
|
||||||
/// panics because a diagnostic was built but not used.
|
|
||||||
///
|
|
||||||
/// Why the Box? `Diagnostic` is a large type, and `DiagnosticBuilder` is
|
|
||||||
/// often used as a return value, especially within the frequently-used
|
|
||||||
/// `PResult` type. In theory, return value optimization (RVO) should avoid
|
|
||||||
/// unnecessary copying. In practice, it does not (at the time of writing).
|
|
||||||
// FIXME(nnethercote) Make private once this moves to diagnostic.rs.
|
|
||||||
pub(crate) diag: Option<Box<Diagnostic>>,
|
|
||||||
|
|
||||||
// FIXME(nnethercote) Make private once this moves to diagnostic.rs.
|
|
||||||
pub(crate) _marker: PhantomData<G>,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cloning a `DiagnosticBuilder` is a recipe for a diagnostic being emitted
|
|
||||||
// twice, which would be bad.
|
|
||||||
impl<G> !Clone for DiagnosticBuilder<'_, G> {}
|
|
||||||
|
|
||||||
rustc_data_structures::static_assert_size!(
|
|
||||||
DiagnosticBuilder<'_, ()>,
|
|
||||||
2 * std::mem::size_of::<usize>()
|
|
||||||
);
|
|
||||||
|
|
||||||
/// Trait for types that `DiagnosticBuilder::emit` can return as a "guarantee"
|
|
||||||
/// (or "proof") token that the emission happened.
|
|
||||||
pub trait EmissionGuarantee: Sized {
|
|
||||||
/// This exists so that bugs and fatal errors can both result in `!` (an
|
|
||||||
/// abort) when emitted, but have different aborting behaviour.
|
|
||||||
type EmitResult = Self;
|
|
||||||
|
|
||||||
/// Implementation of `DiagnosticBuilder::emit`, fully controlled by each
|
|
||||||
/// `impl` of `EmissionGuarantee`, to make it impossible to create a value
|
|
||||||
/// of `Self::EmitResult` without actually performing the emission.
|
|
||||||
#[track_caller]
|
|
||||||
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
|
|
||||||
/// Takes the diagnostic. For use by methods that consume the
|
|
||||||
/// DiagnosticBuilder: `emit`, `cancel`, etc. Afterwards, `drop` is the
|
|
||||||
/// only code that will be run on `self`.
|
|
||||||
// FIXME(nnethercote) Make private once this moves to diagnostic.rs.
|
|
||||||
pub(crate) fn take_diag(&mut self) -> Diagnostic {
|
|
||||||
Box::into_inner(self.diag.take().unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Most `emit_producing_guarantee` functions use this as a starting point.
|
|
||||||
// FIXME(nnethercote) Make private once this moves to diagnostic.rs.
|
|
||||||
pub(crate) fn emit_producing_nothing(mut self) {
|
|
||||||
let diag = self.take_diag();
|
|
||||||
self.dcx.emit_diagnostic(diag);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `ErrorGuaranteed::emit_producing_guarantee` uses this.
|
|
||||||
// FIXME(nnethercote) Make private once this moves to diagnostic.rs.
|
|
||||||
pub(crate) fn emit_producing_error_guaranteed(mut self) -> ErrorGuaranteed {
|
|
||||||
let diag = self.take_diag();
|
|
||||||
|
|
||||||
// The only error levels that produce `ErrorGuaranteed` are
|
|
||||||
// `Error` and `DelayedBug`. But `DelayedBug` should never occur here
|
|
||||||
// because delayed bugs have their level changed to `Bug` when they are
|
|
||||||
// actually printed, so they produce an ICE.
|
|
||||||
//
|
|
||||||
// (Also, even though `level` isn't `pub`, the whole `Diagnostic` could
|
|
||||||
// be overwritten with a new one thanks to `DerefMut`. So this assert
|
|
||||||
// protects against that, too.)
|
|
||||||
assert!(
|
|
||||||
matches!(diag.level, Level::Error | Level::DelayedBug),
|
|
||||||
"invalid diagnostic level ({:?})",
|
|
||||||
diag.level,
|
|
||||||
);
|
|
||||||
|
|
||||||
let guar = self.dcx.emit_diagnostic(diag);
|
|
||||||
guar.unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EmissionGuarantee for ErrorGuaranteed {
|
|
||||||
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
|
||||||
db.emit_producing_error_guaranteed()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EmissionGuarantee for () {
|
|
||||||
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
|
||||||
db.emit_producing_nothing();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Marker type which enables implementation of `create_bug` and `emit_bug` functions for
|
|
||||||
/// bug diagnostics.
|
|
||||||
#[derive(Copy, Clone)]
|
|
||||||
pub struct BugAbort;
|
|
||||||
|
|
||||||
impl EmissionGuarantee for BugAbort {
|
|
||||||
type EmitResult = !;
|
|
||||||
|
|
||||||
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
|
||||||
db.emit_producing_nothing();
|
|
||||||
panic::panic_any(ExplicitBug);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Marker type which enables implementation of `create_fatal` and `emit_fatal` functions for
|
|
||||||
/// fatal diagnostics.
|
|
||||||
#[derive(Copy, Clone)]
|
|
||||||
pub struct FatalAbort;
|
|
||||||
|
|
||||||
impl EmissionGuarantee for FatalAbort {
|
|
||||||
type EmitResult = !;
|
|
||||||
|
|
||||||
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
|
||||||
db.emit_producing_nothing();
|
|
||||||
crate::FatalError.raise()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EmissionGuarantee for rustc_span::fatal_error::FatalError {
|
|
||||||
fn emit_producing_guarantee(db: DiagnosticBuilder<'_, Self>) -> Self::EmitResult {
|
|
||||||
db.emit_producing_nothing();
|
|
||||||
rustc_span::fatal_error::FatalError
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<G: EmissionGuarantee> Deref for DiagnosticBuilder<'_, G> {
|
|
||||||
type Target = Diagnostic;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Diagnostic {
|
|
||||||
self.diag.as_ref().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<G: EmissionGuarantee> DerefMut for DiagnosticBuilder<'_, G> {
|
|
||||||
fn deref_mut(&mut self) -> &mut Diagnostic {
|
|
||||||
self.diag.as_mut().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
|
|
||||||
#[rustc_lint_diagnostics]
|
|
||||||
#[track_caller]
|
|
||||||
pub fn new<M: Into<DiagnosticMessage>>(dcx: &'a DiagCtxt, level: Level, message: M) -> Self {
|
|
||||||
Self::new_diagnostic(dcx, Diagnostic::new(level, message))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new `DiagnosticBuilder` with an already constructed
|
|
||||||
/// diagnostic.
|
|
||||||
#[track_caller]
|
|
||||||
pub(crate) fn new_diagnostic(dcx: &'a DiagCtxt, diag: Diagnostic) -> Self {
|
|
||||||
debug!("Created new diagnostic");
|
|
||||||
Self { dcx, diag: Some(Box::new(diag)), _marker: PhantomData }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Emit and consume the diagnostic.
|
|
||||||
#[track_caller]
|
|
||||||
pub fn emit(self) -> G::EmitResult {
|
|
||||||
G::emit_producing_guarantee(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Emit the diagnostic unless `delay` is true,
|
|
||||||
/// in which case the emission will be delayed as a bug.
|
|
||||||
///
|
|
||||||
/// See `emit` and `delay_as_bug` for details.
|
|
||||||
#[track_caller]
|
|
||||||
pub fn emit_unless(mut self, delay: bool) -> G::EmitResult {
|
|
||||||
if delay {
|
|
||||||
self.downgrade_to_delayed_bug();
|
|
||||||
}
|
|
||||||
self.emit()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cancel and consume the diagnostic. (A diagnostic must either be emitted or
|
|
||||||
/// cancelled or it will panic when dropped).
|
|
||||||
pub fn cancel(mut self) {
|
|
||||||
self.diag = None;
|
|
||||||
drop(self);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Stashes diagnostic for possible later improvement in a different,
|
|
||||||
/// later stage of the compiler. The diagnostic can be accessed with
|
|
||||||
/// the provided `span` and `key` through [`DiagCtxt::steal_diagnostic()`].
|
|
||||||
pub fn stash(mut self, span: Span, key: StashKey) {
|
|
||||||
self.dcx.stash_diagnostic(span, key, self.take_diag());
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delay emission of this diagnostic as a bug.
|
|
||||||
///
|
|
||||||
/// This can be useful in contexts where an error indicates a bug but
|
|
||||||
/// typically this only happens when other compilation errors have already
|
|
||||||
/// happened. In those cases this can be used to defer emission of this
|
|
||||||
/// diagnostic as a bug in the compiler only if no other errors have been
|
|
||||||
/// emitted.
|
|
||||||
///
|
|
||||||
/// In the meantime, though, callsites are required to deal with the "bug"
|
|
||||||
/// locally in whichever way makes the most sense.
|
|
||||||
#[track_caller]
|
|
||||||
pub fn delay_as_bug(mut self) -> G::EmitResult {
|
|
||||||
self.downgrade_to_delayed_bug();
|
|
||||||
self.emit()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<G: EmissionGuarantee> Debug for DiagnosticBuilder<'_, G> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
self.diag.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Destructor bomb: every `DiagnosticBuilder` must be consumed (emitted,
|
|
||||||
/// cancelled, etc.) or we emit a bug.
|
|
||||||
impl<G: EmissionGuarantee> Drop for DiagnosticBuilder<'_, G> {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
match self.diag.take() {
|
|
||||||
Some(diag) if !panicking() => {
|
|
||||||
self.dcx.emit_diagnostic(Diagnostic::new(
|
|
||||||
Level::Bug,
|
|
||||||
DiagnosticMessage::from("the following error was constructed but not emitted"),
|
|
||||||
));
|
|
||||||
self.dcx.emit_diagnostic(*diag);
|
|
||||||
panic!("error was constructed but not emitted");
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! struct_span_code_err {
|
|
||||||
($dcx:expr, $span:expr, $code:expr, $($message:tt)*) => ({
|
|
||||||
$dcx.struct_span_err($span, format!($($message)*)).with_code($code)
|
|
||||||
})
|
|
||||||
}
|
|
|
@ -37,12 +37,9 @@ extern crate self as rustc_errors;
|
||||||
|
|
||||||
pub use codes::*;
|
pub use codes::*;
|
||||||
pub use diagnostic::{
|
pub use diagnostic::{
|
||||||
AddToDiagnostic, DecorateLint, Diagnostic, DiagnosticArg, DiagnosticArgName,
|
AddToDiagnostic, BugAbort, DecorateLint, Diagnostic, DiagnosticArg, DiagnosticArgName,
|
||||||
DiagnosticArgValue, DiagnosticStyledString, IntoDiagnosticArg, StringPart, SubDiagnostic,
|
DiagnosticArgValue, DiagnosticBuilder, DiagnosticStyledString, EmissionGuarantee, FatalAbort,
|
||||||
SubdiagnosticMessageOp,
|
IntoDiagnostic, IntoDiagnosticArg, StringPart, SubDiagnostic, SubdiagnosticMessageOp,
|
||||||
};
|
|
||||||
pub use diagnostic_builder::{
|
|
||||||
BugAbort, DiagnosticBuilder, EmissionGuarantee, FatalAbort, IntoDiagnostic,
|
|
||||||
};
|
};
|
||||||
pub use diagnostic_impls::{
|
pub use diagnostic_impls::{
|
||||||
DiagnosticArgFromDisplay, DiagnosticSymbolList, ExpectedLifetimeParameter,
|
DiagnosticArgFromDisplay, DiagnosticSymbolList, ExpectedLifetimeParameter,
|
||||||
|
@ -87,7 +84,6 @@ use Level::*;
|
||||||
pub mod annotate_snippet_emitter_writer;
|
pub mod annotate_snippet_emitter_writer;
|
||||||
pub mod codes;
|
pub mod codes;
|
||||||
mod diagnostic;
|
mod diagnostic;
|
||||||
mod diagnostic_builder;
|
|
||||||
mod diagnostic_impls;
|
mod diagnostic_impls;
|
||||||
pub mod emitter;
|
pub mod emitter;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
|
|
@ -194,7 +194,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
ty::ConstKind::Infer(InferConst::Var(b_vid)),
|
ty::ConstKind::Infer(InferConst::Var(b_vid)),
|
||||||
) => {
|
) => {
|
||||||
self.inner.borrow_mut().const_unification_table().union(a_vid, b_vid);
|
self.inner.borrow_mut().const_unification_table().union(a_vid, b_vid);
|
||||||
return Ok(a);
|
Ok(a)
|
||||||
}
|
}
|
||||||
|
|
||||||
(
|
(
|
||||||
|
@ -202,7 +202,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
ty::ConstKind::Infer(InferConst::EffectVar(b_vid)),
|
ty::ConstKind::Infer(InferConst::EffectVar(b_vid)),
|
||||||
) => {
|
) => {
|
||||||
self.inner.borrow_mut().effect_unification_table().union(a_vid, b_vid);
|
self.inner.borrow_mut().effect_unification_table().union(a_vid, b_vid);
|
||||||
return Ok(a);
|
Ok(a)
|
||||||
}
|
}
|
||||||
|
|
||||||
// All other cases of inference with other variables are errors.
|
// All other cases of inference with other variables are errors.
|
||||||
|
@ -220,19 +220,21 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
(ty::ConstKind::Infer(InferConst::Var(vid)), _) => {
|
(ty::ConstKind::Infer(InferConst::Var(vid)), _) => {
|
||||||
return self.instantiate_const_var(vid, b);
|
self.instantiate_const_var(relation, relation.a_is_expected(), vid, b)?;
|
||||||
|
Ok(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
(_, ty::ConstKind::Infer(InferConst::Var(vid))) => {
|
(_, ty::ConstKind::Infer(InferConst::Var(vid))) => {
|
||||||
return self.instantiate_const_var(vid, a);
|
self.instantiate_const_var(relation, !relation.a_is_expected(), vid, a)?;
|
||||||
|
Ok(a)
|
||||||
}
|
}
|
||||||
|
|
||||||
(ty::ConstKind::Infer(InferConst::EffectVar(vid)), _) => {
|
(ty::ConstKind::Infer(InferConst::EffectVar(vid)), _) => {
|
||||||
return Ok(self.unify_effect_variable(vid, b));
|
Ok(self.unify_effect_variable(vid, b))
|
||||||
}
|
}
|
||||||
|
|
||||||
(_, ty::ConstKind::Infer(InferConst::EffectVar(vid))) => {
|
(_, ty::ConstKind::Infer(InferConst::EffectVar(vid))) => {
|
||||||
return Ok(self.unify_effect_variable(vid, a));
|
Ok(self.unify_effect_variable(vid, a))
|
||||||
}
|
}
|
||||||
|
|
||||||
(ty::ConstKind::Unevaluated(..), _) | (_, ty::ConstKind::Unevaluated(..))
|
(ty::ConstKind::Unevaluated(..), _) | (_, ty::ConstKind::Unevaluated(..))
|
||||||
|
@ -240,7 +242,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
{
|
{
|
||||||
let (a, b) = if relation.a_is_expected() { (a, b) } else { (b, a) };
|
let (a, b) = if relation.a_is_expected() { (a, b) } else { (b, a) };
|
||||||
|
|
||||||
relation.register_predicates([ty::Binder::dummy(if self.next_trait_solver() {
|
relation.register_predicates([if self.next_trait_solver() {
|
||||||
ty::PredicateKind::AliasRelate(
|
ty::PredicateKind::AliasRelate(
|
||||||
a.into(),
|
a.into(),
|
||||||
b.into(),
|
b.into(),
|
||||||
|
@ -248,14 +250,12 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
ty::PredicateKind::ConstEquate(a, b)
|
ty::PredicateKind::ConstEquate(a, b)
|
||||||
})]);
|
}]);
|
||||||
|
|
||||||
return Ok(b);
|
Ok(b)
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => ty::relate::structurally_relate_consts(relation, a, b),
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::relate::structurally_relate_consts(relation, a, b)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unify_integral_variable(
|
fn unify_integral_variable(
|
||||||
|
|
|
@ -22,7 +22,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
/// subtyping could occur. This also does the occurs checks, detecting whether
|
/// subtyping could occur. This also does the occurs checks, detecting whether
|
||||||
/// instantiating `target_vid` would result in a cyclic type. We eagerly error
|
/// instantiating `target_vid` would result in a cyclic type. We eagerly error
|
||||||
/// in this case.
|
/// in this case.
|
||||||
#[instrument(skip(self, relation, target_is_expected), level = "debug")]
|
#[instrument(level = "debug", skip(self, relation, target_is_expected))]
|
||||||
pub(super) fn instantiate_ty_var<R: ObligationEmittingRelation<'tcx>>(
|
pub(super) fn instantiate_ty_var<R: ObligationEmittingRelation<'tcx>>(
|
||||||
&self,
|
&self,
|
||||||
relation: &mut R,
|
relation: &mut R,
|
||||||
|
@ -158,26 +158,22 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
/// As `3 + 4` contains `N` in its args, this must not succeed.
|
/// As `3 + 4` contains `N` in its args, this must not succeed.
|
||||||
///
|
///
|
||||||
/// See `tests/ui/const-generics/occurs-check/` for more examples where this is relevant.
|
/// See `tests/ui/const-generics/occurs-check/` for more examples where this is relevant.
|
||||||
#[instrument(level = "debug", skip(self))]
|
#[instrument(level = "debug", skip(self, relation))]
|
||||||
pub(super) fn instantiate_const_var(
|
pub(super) fn instantiate_const_var<R: ObligationEmittingRelation<'tcx>>(
|
||||||
&self,
|
&self,
|
||||||
|
relation: &mut R,
|
||||||
|
target_is_expected: bool,
|
||||||
target_vid: ty::ConstVid,
|
target_vid: ty::ConstVid,
|
||||||
source_ct: ty::Const<'tcx>,
|
source_ct: ty::Const<'tcx>,
|
||||||
) -> RelateResult<'tcx, ty::Const<'tcx>> {
|
) -> RelateResult<'tcx, ()> {
|
||||||
let span = match self.inner.borrow_mut().const_unification_table().probe_value(target_vid) {
|
|
||||||
ConstVariableValue::Known { value } => {
|
|
||||||
bug!("instantiating a known const var: {target_vid:?} {value} {source_ct}")
|
|
||||||
}
|
|
||||||
ConstVariableValue::Unknown { origin, universe: _ } => origin.span,
|
|
||||||
};
|
|
||||||
// FIXME(generic_const_exprs): Occurs check failures for unevaluated
|
// FIXME(generic_const_exprs): Occurs check failures for unevaluated
|
||||||
// constants and generic expressions are not yet handled correctly.
|
// constants and generic expressions are not yet handled correctly.
|
||||||
let Generalization { value_may_be_infer: generalized_ct, has_unconstrained_ty_var } =
|
let Generalization { value_may_be_infer: generalized_ct, has_unconstrained_ty_var } =
|
||||||
self.generalize(span, target_vid, ty::Variance::Invariant, source_ct)?;
|
self.generalize(relation.span(), target_vid, ty::Variance::Invariant, source_ct)?;
|
||||||
|
|
||||||
debug_assert!(!generalized_ct.is_ct_infer());
|
debug_assert!(!generalized_ct.is_ct_infer());
|
||||||
if has_unconstrained_ty_var {
|
if has_unconstrained_ty_var {
|
||||||
span_bug!(span, "unconstrained ty var when generalizing `{source_ct:?}`");
|
bug!("unconstrained ty var when generalizing `{source_ct:?}`");
|
||||||
}
|
}
|
||||||
|
|
||||||
self.inner
|
self.inner
|
||||||
|
@ -185,9 +181,25 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
.const_unification_table()
|
.const_unification_table()
|
||||||
.union_value(target_vid, ConstVariableValue::Known { value: generalized_ct });
|
.union_value(target_vid, ConstVariableValue::Known { value: generalized_ct });
|
||||||
|
|
||||||
// FIXME(generic_const_exprs): We have to make sure we actually equate
|
// HACK: make sure that we `a_is_expected` continues to be
|
||||||
// `generalized_ct` and `source_ct` here.
|
// correct when relating the generalized type with the source.
|
||||||
Ok(generalized_ct)
|
if target_is_expected == relation.a_is_expected() {
|
||||||
|
relation.relate_with_variance(
|
||||||
|
ty::Variance::Invariant,
|
||||||
|
ty::VarianceDiagInfo::default(),
|
||||||
|
generalized_ct,
|
||||||
|
source_ct,
|
||||||
|
)?;
|
||||||
|
} else {
|
||||||
|
relation.relate_with_variance(
|
||||||
|
ty::Variance::Invariant,
|
||||||
|
ty::VarianceDiagInfo::default(),
|
||||||
|
source_ct,
|
||||||
|
generalized_ct,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to generalize `source_term` for the type variable `target_vid`.
|
/// Attempts to generalize `source_term` for the type variable `target_vid`.
|
||||||
|
@ -287,6 +299,49 @@ impl<'tcx> Generalizer<'_, 'tcx> {
|
||||||
ty::TermKind::Const(ct) => TypeError::CyclicConst(ct),
|
ty::TermKind::Const(ct) => TypeError::CyclicConst(ct),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// An occurs check failure inside of an alias does not mean
|
||||||
|
/// that the types definitely don't unify. We may be able
|
||||||
|
/// to normalize the alias after all.
|
||||||
|
///
|
||||||
|
/// We handle this by lazily equating the alias and generalizing
|
||||||
|
/// it to an inference variable.
|
||||||
|
///
|
||||||
|
/// This is incomplete and will hopefully soon get fixed by #119106.
|
||||||
|
fn generalize_alias_ty(
|
||||||
|
&mut self,
|
||||||
|
alias: ty::AliasTy<'tcx>,
|
||||||
|
) -> Result<Ty<'tcx>, TypeError<'tcx>> {
|
||||||
|
let is_nested_alias = mem::replace(&mut self.in_alias, true);
|
||||||
|
let result = match self.relate(alias, alias) {
|
||||||
|
Ok(alias) => Ok(alias.to_ty(self.tcx())),
|
||||||
|
Err(e) => {
|
||||||
|
if is_nested_alias {
|
||||||
|
return Err(e);
|
||||||
|
} else {
|
||||||
|
let mut visitor = MaxUniverse::new();
|
||||||
|
alias.visit_with(&mut visitor);
|
||||||
|
let infer_replacement_is_complete =
|
||||||
|
self.for_universe.can_name(visitor.max_universe())
|
||||||
|
&& !alias.has_escaping_bound_vars();
|
||||||
|
if !infer_replacement_is_complete {
|
||||||
|
warn!("may incompletely handle alias type: {alias:?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("generalization failure in alias");
|
||||||
|
Ok(self.infcx.next_ty_var_in_universe(
|
||||||
|
TypeVariableOrigin {
|
||||||
|
kind: TypeVariableOriginKind::MiscVariable,
|
||||||
|
span: self.span,
|
||||||
|
},
|
||||||
|
self.for_universe,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
self.in_alias = is_nested_alias;
|
||||||
|
result
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
|
impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
|
||||||
|
@ -433,43 +488,7 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::Alias(kind, data) => {
|
ty::Alias(_, data) => self.generalize_alias_ty(data),
|
||||||
// An occurs check failure inside of an alias does not mean
|
|
||||||
// that the types definitely don't unify. We may be able
|
|
||||||
// to normalize the alias after all.
|
|
||||||
//
|
|
||||||
// We handle this by lazily equating the alias and generalizing
|
|
||||||
// it to an inference variable.
|
|
||||||
let is_nested_alias = mem::replace(&mut self.in_alias, true);
|
|
||||||
let result = match self.relate(data, data) {
|
|
||||||
Ok(data) => Ok(Ty::new_alias(self.tcx(), kind, data)),
|
|
||||||
Err(e) => {
|
|
||||||
if is_nested_alias {
|
|
||||||
return Err(e);
|
|
||||||
} else {
|
|
||||||
let mut visitor = MaxUniverse::new();
|
|
||||||
t.visit_with(&mut visitor);
|
|
||||||
let infer_replacement_is_complete =
|
|
||||||
self.for_universe.can_name(visitor.max_universe())
|
|
||||||
&& !t.has_escaping_bound_vars();
|
|
||||||
if !infer_replacement_is_complete {
|
|
||||||
warn!("may incompletely handle alias type: {t:?}");
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("generalization failure in alias");
|
|
||||||
Ok(self.infcx.next_ty_var_in_universe(
|
|
||||||
TypeVariableOrigin {
|
|
||||||
kind: TypeVariableOriginKind::MiscVariable,
|
|
||||||
span: self.span,
|
|
||||||
},
|
|
||||||
self.for_universe,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
self.in_alias = is_nested_alias;
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => relate::structurally_relate_tys(self, t, t),
|
_ => relate::structurally_relate_tys(self, t, t),
|
||||||
}?;
|
}?;
|
||||||
|
|
|
@ -1052,7 +1052,7 @@ struct Ascription<'tcx> {
|
||||||
variance: ty::Variance,
|
variance: ty::Variance,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct MatchPair<'pat, 'tcx> {
|
pub(crate) struct MatchPair<'pat, 'tcx> {
|
||||||
// This place...
|
// This place...
|
||||||
place: PlaceBuilder<'tcx>,
|
place: PlaceBuilder<'tcx>,
|
||||||
|
@ -1408,51 +1408,66 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
span: Span,
|
span: Span,
|
||||||
scrutinee_span: Span,
|
scrutinee_span: Span,
|
||||||
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
||||||
block: BasicBlock,
|
start_block: BasicBlock,
|
||||||
otherwise_block: BasicBlock,
|
otherwise_block: BasicBlock,
|
||||||
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
||||||
) {
|
) {
|
||||||
let (first_candidate, remaining_candidates) = candidates.split_first_mut().unwrap();
|
let (first_candidate, remaining_candidates) = candidates.split_first_mut().unwrap();
|
||||||
|
assert!(first_candidate.subcandidates.is_empty());
|
||||||
// All of the or-patterns have been sorted to the end, so if the first
|
if !matches!(first_candidate.match_pairs[0].pattern.kind, PatKind::Or { .. }) {
|
||||||
// pattern is an or-pattern we only have or-patterns.
|
self.test_candidates(
|
||||||
match first_candidate.match_pairs[0].pattern.kind {
|
span,
|
||||||
PatKind::Or { .. } => (),
|
scrutinee_span,
|
||||||
_ => {
|
candidates,
|
||||||
self.test_candidates(
|
start_block,
|
||||||
span,
|
otherwise_block,
|
||||||
scrutinee_span,
|
fake_borrows,
|
||||||
candidates,
|
);
|
||||||
block,
|
return;
|
||||||
otherwise_block,
|
|
||||||
fake_borrows,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let match_pairs = mem::take(&mut first_candidate.match_pairs);
|
let match_pairs = mem::take(&mut first_candidate.match_pairs);
|
||||||
first_candidate.pre_binding_block = Some(block);
|
let (first_match_pair, remaining_match_pairs) = match_pairs.split_first().unwrap();
|
||||||
|
let PatKind::Or { ref pats } = &first_match_pair.pattern.kind else { unreachable!() };
|
||||||
|
|
||||||
let remainder_start = self.cfg.start_new_block();
|
let remainder_start = self.cfg.start_new_block();
|
||||||
for match_pair in match_pairs {
|
let or_span = first_match_pair.pattern.span;
|
||||||
let PatKind::Or { ref pats } = &match_pair.pattern.kind else {
|
// Test the alternatives of this or-pattern.
|
||||||
bug!("Or-patterns should have been sorted to the end");
|
self.test_or_pattern(
|
||||||
};
|
first_candidate,
|
||||||
let or_span = match_pair.pattern.span;
|
start_block,
|
||||||
|
remainder_start,
|
||||||
|
pats,
|
||||||
|
or_span,
|
||||||
|
&first_match_pair.place,
|
||||||
|
fake_borrows,
|
||||||
|
);
|
||||||
|
|
||||||
|
if !remaining_match_pairs.is_empty() {
|
||||||
|
// If more match pairs remain, test them after each subcandidate.
|
||||||
|
// We could add them to the or-candidates before the call to `test_or_pattern` but this
|
||||||
|
// would make it impossible to detect simplifiable or-patterns. That would guarantee
|
||||||
|
// exponentially large CFGs for cases like `(1 | 2, 3 | 4, ...)`.
|
||||||
first_candidate.visit_leaves(|leaf_candidate| {
|
first_candidate.visit_leaves(|leaf_candidate| {
|
||||||
self.test_or_pattern(
|
assert!(leaf_candidate.match_pairs.is_empty());
|
||||||
leaf_candidate,
|
leaf_candidate.match_pairs.extend(remaining_match_pairs.iter().cloned());
|
||||||
remainder_start,
|
let or_start = leaf_candidate.pre_binding_block.unwrap();
|
||||||
pats,
|
// In a case like `(a | b, c | d)`, if `a` succeeds and `c | d` fails, we know `(b,
|
||||||
or_span,
|
// c | d)` will fail too. If there is no guard, we skip testing of `b` by branching
|
||||||
&match_pair.place,
|
// directly to `remainder_start`. If there is a guard, we have to try `(b, c | d)`.
|
||||||
|
let or_otherwise = leaf_candidate.otherwise_block.unwrap_or(remainder_start);
|
||||||
|
self.test_candidates_with_or(
|
||||||
|
span,
|
||||||
|
scrutinee_span,
|
||||||
|
&mut [leaf_candidate],
|
||||||
|
or_start,
|
||||||
|
or_otherwise,
|
||||||
fake_borrows,
|
fake_borrows,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test the remaining candidates.
|
||||||
self.match_candidates(
|
self.match_candidates(
|
||||||
span,
|
span,
|
||||||
scrutinee_span,
|
scrutinee_span,
|
||||||
|
@ -1460,17 +1475,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
otherwise_block,
|
otherwise_block,
|
||||||
remaining_candidates,
|
remaining_candidates,
|
||||||
fake_borrows,
|
fake_borrows,
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(
|
#[instrument(
|
||||||
skip(self, otherwise, or_span, place, fake_borrows, candidate, pats),
|
skip(self, start_block, otherwise_block, or_span, place, fake_borrows, candidate, pats),
|
||||||
level = "debug"
|
level = "debug"
|
||||||
)]
|
)]
|
||||||
fn test_or_pattern<'pat>(
|
fn test_or_pattern<'pat>(
|
||||||
&mut self,
|
&mut self,
|
||||||
candidate: &mut Candidate<'pat, 'tcx>,
|
candidate: &mut Candidate<'pat, 'tcx>,
|
||||||
otherwise: BasicBlock,
|
start_block: BasicBlock,
|
||||||
|
otherwise_block: BasicBlock,
|
||||||
pats: &'pat [Box<Pat<'tcx>>],
|
pats: &'pat [Box<Pat<'tcx>>],
|
||||||
or_span: Span,
|
or_span: Span,
|
||||||
place: &PlaceBuilder<'tcx>,
|
place: &PlaceBuilder<'tcx>,
|
||||||
|
@ -1482,16 +1498,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
.map(|pat| Candidate::new(place.clone(), pat, candidate.has_guard, self))
|
.map(|pat| Candidate::new(place.clone(), pat, candidate.has_guard, self))
|
||||||
.collect();
|
.collect();
|
||||||
let mut or_candidate_refs: Vec<_> = or_candidates.iter_mut().collect();
|
let mut or_candidate_refs: Vec<_> = or_candidates.iter_mut().collect();
|
||||||
let otherwise = if let Some(otherwise_block) = candidate.otherwise_block {
|
|
||||||
otherwise_block
|
|
||||||
} else {
|
|
||||||
otherwise
|
|
||||||
};
|
|
||||||
self.match_candidates(
|
self.match_candidates(
|
||||||
or_span,
|
or_span,
|
||||||
or_span,
|
or_span,
|
||||||
candidate.pre_binding_block.unwrap(),
|
start_block,
|
||||||
otherwise,
|
otherwise_block,
|
||||||
&mut or_candidate_refs,
|
&mut or_candidate_refs,
|
||||||
fake_borrows,
|
fake_borrows,
|
||||||
);
|
);
|
||||||
|
|
|
@ -27,6 +27,8 @@ parse_async_bound_modifier_in_2015 = `async` trait bounds are only allowed in Ru
|
||||||
parse_async_fn_in_2015 = `async fn` is not permitted in Rust 2015
|
parse_async_fn_in_2015 = `async fn` is not permitted in Rust 2015
|
||||||
.label = to use `async fn`, switch to Rust 2018 or later
|
.label = to use `async fn`, switch to Rust 2018 or later
|
||||||
|
|
||||||
|
parse_async_impl = `async` trait implementations are unsupported
|
||||||
|
|
||||||
parse_async_move_block_in_2015 = `async move` blocks are only allowed in Rust 2018 or later
|
parse_async_move_block_in_2015 = `async move` blocks are only allowed in Rust 2018 or later
|
||||||
|
|
||||||
parse_async_move_order_incorrect = the order of `move` and `async` is incorrect
|
parse_async_move_order_incorrect = the order of `move` and `async` is incorrect
|
||||||
|
|
|
@ -2975,3 +2975,10 @@ pub(crate) struct ArrayIndexInOffsetOf(#[primary_span] pub Span);
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(parse_invalid_offset_of)]
|
#[diag(parse_invalid_offset_of)]
|
||||||
pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span);
|
pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span);
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parse_async_impl)]
|
||||||
|
pub(crate) struct AsyncImpl {
|
||||||
|
#[primary_span]
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
|
@ -562,6 +562,15 @@ impl<'a> Parser<'a> {
|
||||||
self.sess.gated_spans.gate(sym::const_trait_impl, span);
|
self.sess.gated_spans.gate(sym::const_trait_impl, span);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parse stray `impl async Trait`
|
||||||
|
if (self.token.uninterpolated_span().at_least_rust_2018()
|
||||||
|
&& self.token.is_keyword(kw::Async))
|
||||||
|
|| self.is_kw_followed_by_ident(kw::Async)
|
||||||
|
{
|
||||||
|
self.bump();
|
||||||
|
self.dcx().emit_err(errors::AsyncImpl { span: self.prev_token.span });
|
||||||
|
}
|
||||||
|
|
||||||
let polarity = self.parse_polarity();
|
let polarity = self.parse_polarity();
|
||||||
|
|
||||||
// Parse both types and traits as a type, then reinterpret if necessary.
|
// Parse both types and traits as a type, then reinterpret if necessary.
|
||||||
|
@ -592,22 +601,10 @@ impl<'a> Parser<'a> {
|
||||||
// We need to report this error after `cfg` expansion for compatibility reasons
|
// We need to report this error after `cfg` expansion for compatibility reasons
|
||||||
self.bump(); // `..`, do not add it to expected tokens
|
self.bump(); // `..`, do not add it to expected tokens
|
||||||
|
|
||||||
// FIXME(nnethercote): AST validation later detects this
|
// AST validation later detects this `TyKind::Dummy` and emits an
|
||||||
// `TyKind::Err` and emits an errors. So why the unchecked
|
// error. (#121072 will hopefully remove all this special handling
|
||||||
// ErrorGuaranteed?
|
// of the obsolete `impl Trait for ..` and then this can go away.)
|
||||||
// - A `span_delayed_bug` doesn't work here, because rustfmt can
|
Some(self.mk_ty(self.prev_token.span, TyKind::Dummy))
|
||||||
// hit this path but then not hit the follow-up path in the AST
|
|
||||||
// validator that issues the error, which results in ICEs.
|
|
||||||
// - `TyKind::Dummy` doesn't work, because it ends up reaching HIR
|
|
||||||
// lowering, which results in ICEs. Changing `TyKind::Dummy` to
|
|
||||||
// `TyKind::Err` during AST validation might fix that, but that's
|
|
||||||
// not possible because AST validation doesn't allow mutability.
|
|
||||||
//
|
|
||||||
// #121072 will hopefully remove all this special handling of the
|
|
||||||
// obsolete `impl Trait for ..` and then this can go away.
|
|
||||||
#[allow(deprecated)]
|
|
||||||
let guar = rustc_errors::ErrorGuaranteed::unchecked_error_guaranteed();
|
|
||||||
Some(self.mk_ty(self.prev_token.span, TyKind::Err(guar)))
|
|
||||||
} else if has_for || self.token.can_begin_type() {
|
} else if has_for || self.token.can_begin_type() {
|
||||||
Some(self.parse_ty()?)
|
Some(self.parse_ty()?)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -778,9 +778,10 @@ impl<'a> Parser<'a> {
|
||||||
|| self.check(&token::Not)
|
|| self.check(&token::Not)
|
||||||
|| self.check(&token::Question)
|
|| self.check(&token::Question)
|
||||||
|| self.check(&token::Tilde)
|
|| self.check(&token::Tilde)
|
||||||
|| self.check_keyword(kw::Const)
|
|
||||||
|| self.check_keyword(kw::For)
|
|| self.check_keyword(kw::For)
|
||||||
|| self.check(&token::OpenDelim(Delimiter::Parenthesis))
|
|| self.check(&token::OpenDelim(Delimiter::Parenthesis))
|
||||||
|
|| self.check_keyword(kw::Const)
|
||||||
|
|| self.check_keyword(kw::Async)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses a bound according to the grammar:
|
/// Parses a bound according to the grammar:
|
||||||
|
@ -882,11 +883,13 @@ impl<'a> Parser<'a> {
|
||||||
BoundConstness::Never
|
BoundConstness::Never
|
||||||
};
|
};
|
||||||
|
|
||||||
let asyncness = if self.token.span.at_least_rust_2018() && self.eat_keyword(kw::Async) {
|
let asyncness = if self.token.uninterpolated_span().at_least_rust_2018()
|
||||||
|
&& self.eat_keyword(kw::Async)
|
||||||
|
{
|
||||||
self.sess.gated_spans.gate(sym::async_closure, self.prev_token.span);
|
self.sess.gated_spans.gate(sym::async_closure, self.prev_token.span);
|
||||||
BoundAsyncness::Async(self.prev_token.span)
|
BoundAsyncness::Async(self.prev_token.span)
|
||||||
} else if self.may_recover()
|
} else if self.may_recover()
|
||||||
&& self.token.span.is_rust_2015()
|
&& self.token.uninterpolated_span().is_rust_2015()
|
||||||
&& self.is_kw_followed_by_ident(kw::Async)
|
&& self.is_kw_followed_by_ident(kw::Async)
|
||||||
{
|
{
|
||||||
self.bump(); // eat `async`
|
self.bump(); // eat `async`
|
||||||
|
|
|
@ -85,25 +85,16 @@ impl<'tcx> NormalizationFolder<'_, 'tcx> {
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Do not emit an error if normalization is known to fail but instead
|
self.fulfill_cx.register_predicate_obligation(infcx, obligation);
|
||||||
// keep the projection unnormalized. This is the case for projections
|
let errors = self.fulfill_cx.select_all_or_error(infcx);
|
||||||
// with a `T: Trait` where-clause and opaque types outside of the defining
|
if !errors.is_empty() {
|
||||||
// scope.
|
return Err(errors);
|
||||||
let result = if infcx.predicate_may_hold(&obligation) {
|
}
|
||||||
self.fulfill_cx.register_predicate_obligation(infcx, obligation);
|
|
||||||
let errors = self.fulfill_cx.select_all_or_error(infcx);
|
|
||||||
if !errors.is_empty() {
|
|
||||||
return Err(errors);
|
|
||||||
}
|
|
||||||
let ty = infcx.resolve_vars_if_possible(new_infer_ty);
|
|
||||||
|
|
||||||
// Alias is guaranteed to be fully structurally resolved,
|
|
||||||
// so we can super fold here.
|
|
||||||
ty.try_super_fold_with(self)?
|
|
||||||
} else {
|
|
||||||
alias_ty.try_super_fold_with(self)?
|
|
||||||
};
|
|
||||||
|
|
||||||
|
// Alias is guaranteed to be fully structurally resolved,
|
||||||
|
// so we can super fold here.
|
||||||
|
let ty = infcx.resolve_vars_if_possible(new_infer_ty);
|
||||||
|
let result = ty.try_super_fold_with(self)?;
|
||||||
self.depth -= 1;
|
self.depth -= 1;
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
@ -178,6 +169,7 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for NormalizationFolder<'_, 'tcx> {
|
||||||
Ok(t)
|
Ok(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self), ret)]
|
||||||
fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
|
fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
|
||||||
let infcx = self.at.infcx;
|
let infcx = self.at.infcx;
|
||||||
debug_assert_eq!(ty, infcx.shallow_resolve(ty));
|
debug_assert_eq!(ty, infcx.shallow_resolve(ty));
|
||||||
|
@ -204,6 +196,7 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for NormalizationFolder<'_, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self), ret)]
|
||||||
fn try_fold_const(&mut self, ct: ty::Const<'tcx>) -> Result<ty::Const<'tcx>, Self::Error> {
|
fn try_fold_const(&mut self, ct: ty::Const<'tcx>) -> Result<ty::Const<'tcx>, Self::Error> {
|
||||||
let infcx = self.at.infcx;
|
let infcx = self.at.infcx;
|
||||||
debug_assert_eq!(ct, infcx.shallow_resolve(ct));
|
debug_assert_eq!(ct, infcx.shallow_resolve(ct));
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
use crate::solve::EvalCtxt;
|
||||||
|
use rustc_middle::traits::solve::{Certainty, Goal, QueryResult};
|
||||||
|
use rustc_middle::ty;
|
||||||
|
|
||||||
|
impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||||
|
#[instrument(level = "debug", skip(self), ret)]
|
||||||
|
pub(super) fn normalize_anon_const(
|
||||||
|
&mut self,
|
||||||
|
goal: Goal<'tcx, ty::NormalizesTo<'tcx>>,
|
||||||
|
) -> QueryResult<'tcx> {
|
||||||
|
if let Some(normalized_const) = self.try_const_eval_resolve(
|
||||||
|
goal.param_env,
|
||||||
|
ty::UnevaluatedConst::new(goal.predicate.alias.def_id, goal.predicate.alias.args),
|
||||||
|
self.tcx()
|
||||||
|
.type_of(goal.predicate.alias.def_id)
|
||||||
|
.no_bound_vars()
|
||||||
|
.expect("const ty should not rely on other generics"),
|
||||||
|
) {
|
||||||
|
self.eq(goal.param_env, normalized_const, goal.predicate.term.ct().unwrap())?;
|
||||||
|
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
|
||||||
|
} else {
|
||||||
|
self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,8 +18,9 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||||
use rustc_middle::ty::{ToPredicate, TypeVisitableExt};
|
use rustc_middle::ty::{ToPredicate, TypeVisitableExt};
|
||||||
use rustc_span::{sym, ErrorGuaranteed, DUMMY_SP};
|
use rustc_span::{sym, ErrorGuaranteed, DUMMY_SP};
|
||||||
|
|
||||||
|
mod anon_const;
|
||||||
mod inherent;
|
mod inherent;
|
||||||
mod opaques;
|
mod opaque_types;
|
||||||
mod weak_types;
|
mod weak_types;
|
||||||
|
|
||||||
impl<'tcx> EvalCtxt<'_, 'tcx> {
|
impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||||
|
@ -31,34 +32,34 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||||
let def_id = goal.predicate.def_id();
|
let def_id = goal.predicate.def_id();
|
||||||
match self.tcx().def_kind(def_id) {
|
match self.tcx().def_kind(def_id) {
|
||||||
DefKind::AssocTy | DefKind::AssocConst => {
|
DefKind::AssocTy | DefKind::AssocConst => {
|
||||||
// To only compute normalization once for each projection we only
|
match self.tcx().associated_item(def_id).container {
|
||||||
// assemble normalization candidates if the expected term is an
|
ty::AssocItemContainer::TraitContainer => {
|
||||||
// unconstrained inference variable.
|
// To only compute normalization once for each projection we only
|
||||||
//
|
// assemble normalization candidates if the expected term is an
|
||||||
// Why: For better cache hits, since if we have an unconstrained RHS then
|
// unconstrained inference variable.
|
||||||
// there are only as many cache keys as there are (canonicalized) alias
|
//
|
||||||
// types in each normalizes-to goal. This also weakens inference in a
|
// Why: For better cache hits, since if we have an unconstrained RHS then
|
||||||
// forwards-compatible way so we don't use the value of the RHS term to
|
// there are only as many cache keys as there are (canonicalized) alias
|
||||||
// affect candidate assembly for projections.
|
// types in each normalizes-to goal. This also weakens inference in a
|
||||||
//
|
// forwards-compatible way so we don't use the value of the RHS term to
|
||||||
// E.g. for `<T as Trait>::Assoc == u32` we recursively compute the goal
|
// affect candidate assembly for projections.
|
||||||
// `exists<U> <T as Trait>::Assoc == U` and then take the resulting type for
|
//
|
||||||
// `U` and equate it with `u32`. This means that we don't need a separate
|
// E.g. for `<T as Trait>::Assoc == u32` we recursively compute the goal
|
||||||
// projection cache in the solver, since we're piggybacking off of regular
|
// `exists<U> <T as Trait>::Assoc == U` and then take the resulting type for
|
||||||
// goal caching.
|
// `U` and equate it with `u32`. This means that we don't need a separate
|
||||||
if self.term_is_fully_unconstrained(goal) {
|
// projection cache in the solver, since we're piggybacking off of regular
|
||||||
match self.tcx().associated_item(def_id).container {
|
// goal caching.
|
||||||
ty::AssocItemContainer::TraitContainer => {
|
if self.term_is_fully_unconstrained(goal) {
|
||||||
let candidates = self.assemble_and_evaluate_candidates(goal);
|
let candidates = self.assemble_and_evaluate_candidates(goal);
|
||||||
self.merge_candidates(candidates)
|
self.merge_candidates(candidates)
|
||||||
}
|
} else {
|
||||||
ty::AssocItemContainer::ImplContainer => {
|
self.set_normalizes_to_hack_goal(goal);
|
||||||
self.normalize_inherent_associated_type(goal)
|
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
ty::AssocItemContainer::ImplContainer => {
|
||||||
self.set_normalizes_to_hack_goal(goal);
|
self.normalize_inherent_associated_type(goal)
|
||||||
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DefKind::AnonConst => self.normalize_anon_const(goal),
|
DefKind::AnonConst => self.normalize_anon_const(goal),
|
||||||
|
@ -67,26 +68,6 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||||
kind => bug!("unknown DefKind {} in projection goal: {goal:#?}", kind.descr(def_id)),
|
kind => bug!("unknown DefKind {} in projection goal: {goal:#?}", kind.descr(def_id)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self), ret)]
|
|
||||||
fn normalize_anon_const(
|
|
||||||
&mut self,
|
|
||||||
goal: Goal<'tcx, ty::NormalizesTo<'tcx>>,
|
|
||||||
) -> QueryResult<'tcx> {
|
|
||||||
if let Some(normalized_const) = self.try_const_eval_resolve(
|
|
||||||
goal.param_env,
|
|
||||||
ty::UnevaluatedConst::new(goal.predicate.alias.def_id, goal.predicate.alias.args),
|
|
||||||
self.tcx()
|
|
||||||
.type_of(goal.predicate.alias.def_id)
|
|
||||||
.no_bound_vars()
|
|
||||||
.expect("const ty should not rely on other generics"),
|
|
||||||
) {
|
|
||||||
self.eq(goal.param_env, normalized_const, goal.predicate.term.ct().unwrap())?;
|
|
||||||
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
|
|
||||||
} else {
|
|
||||||
self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> {
|
impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> {
|
||||||
|
|
|
@ -39,14 +39,14 @@ fn main() {
|
||||||
.open(&lock_path)));
|
.open(&lock_path)));
|
||||||
_build_lock_guard = match build_lock.try_write() {
|
_build_lock_guard = match build_lock.try_write() {
|
||||||
Ok(mut lock) => {
|
Ok(mut lock) => {
|
||||||
t!(lock.write(&process::id().to_string().as_ref()));
|
t!(lock.write(process::id().to_string().as_ref()));
|
||||||
lock
|
lock
|
||||||
}
|
}
|
||||||
err => {
|
err => {
|
||||||
drop(err);
|
drop(err);
|
||||||
println!("WARNING: build directory locked by process {pid}, waiting for lock");
|
println!("WARNING: build directory locked by process {pid}, waiting for lock");
|
||||||
let mut lock = t!(build_lock.write());
|
let mut lock = t!(build_lock.write());
|
||||||
t!(lock.write(&process::id().to_string().as_ref()));
|
t!(lock.write(process::id().to_string().as_ref()));
|
||||||
lock
|
lock
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -113,14 +113,14 @@ fn main() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let file = t!(fs::File::open(&entry.path()));
|
let file = t!(fs::File::open(entry.path()));
|
||||||
|
|
||||||
// To ensure deterministic results we must sort the dump lines.
|
// To ensure deterministic results we must sort the dump lines.
|
||||||
// This is necessary because the order of rustc invocations different
|
// This is necessary because the order of rustc invocations different
|
||||||
// almost all the time.
|
// almost all the time.
|
||||||
let mut lines: Vec<String> = t!(BufReader::new(&file).lines().collect());
|
let mut lines: Vec<String> = t!(BufReader::new(&file).lines().collect());
|
||||||
lines.sort_by_key(|t| t.to_lowercase());
|
lines.sort_by_key(|t| t.to_lowercase());
|
||||||
let mut file = t!(OpenOptions::new().write(true).truncate(true).open(&entry.path()));
|
let mut file = t!(OpenOptions::new().write(true).truncate(true).open(entry.path()));
|
||||||
t!(file.write_all(lines.join("\n").as_bytes()));
|
t!(file.write_all(lines.join("\n").as_bytes()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -156,7 +156,7 @@ fn check_version(config: &Config) -> Option<String> {
|
||||||
msg.push_str("There have been changes to x.py since you last updated:\n");
|
msg.push_str("There have been changes to x.py since you last updated:\n");
|
||||||
|
|
||||||
for change in changes {
|
for change in changes {
|
||||||
msg.push_str(&format!(" [{}] {}\n", change.severity.to_string(), change.summary));
|
msg.push_str(&format!(" [{}] {}\n", change.severity, change.summary));
|
||||||
msg.push_str(&format!(
|
msg.push_str(&format!(
|
||||||
" - PR Link https://github.com/rust-lang/rust/pull/{}\n",
|
" - PR Link https://github.com/rust-lang/rust/pull/{}\n",
|
||||||
change.change_id
|
change.change_id
|
||||||
|
|
|
@ -276,7 +276,7 @@ fn main() {
|
||||||
dur.as_secs(),
|
dur.as_secs(),
|
||||||
dur.subsec_millis(),
|
dur.subsec_millis(),
|
||||||
if rusage_data.is_some() { " " } else { "" },
|
if rusage_data.is_some() { " " } else { "" },
|
||||||
rusage_data.unwrap_or(String::new()),
|
rusage_data.unwrap_or_default(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -440,5 +440,5 @@ fn format_rusage_data(_child: Child) -> Option<String> {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Some(init_str);
|
Some(init_str)
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,9 +18,9 @@ fn main() {
|
||||||
|
|
||||||
// Invoke sccache with said compiler
|
// Invoke sccache with said compiler
|
||||||
let sccache_path = env::var_os("SCCACHE_PATH").unwrap();
|
let sccache_path = env::var_os("SCCACHE_PATH").unwrap();
|
||||||
let mut cmd = Command::new(&sccache_path);
|
let mut cmd = Command::new(sccache_path);
|
||||||
cmd.arg(compiler.path());
|
cmd.arg(compiler.path());
|
||||||
for &(ref k, ref v) in compiler.env() {
|
for (k, v) in compiler.env() {
|
||||||
cmd.env(k, v);
|
cmd.env(k, v);
|
||||||
}
|
}
|
||||||
for arg in env::args().skip(1) {
|
for arg in env::args().skip(1) {
|
||||||
|
|
|
@ -34,7 +34,7 @@ fn args(builder: &Builder<'_>) -> Vec<String> {
|
||||||
&builder.config.cmd
|
&builder.config.cmd
|
||||||
{
|
{
|
||||||
// disable the most spammy clippy lints
|
// disable the most spammy clippy lints
|
||||||
let ignored_lints = vec![
|
let ignored_lints = [
|
||||||
"many_single_char_names", // there are a lot in stdarch
|
"many_single_char_names", // there are a lot in stdarch
|
||||||
"collapsible_if",
|
"collapsible_if",
|
||||||
"type_complexity",
|
"type_complexity",
|
||||||
|
@ -150,7 +150,7 @@ impl Step for Std {
|
||||||
if compiler.stage == 0 {
|
if compiler.stage == 0 {
|
||||||
let libdir = builder.sysroot_libdir(compiler, target);
|
let libdir = builder.sysroot_libdir(compiler, target);
|
||||||
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
|
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
|
||||||
add_to_sysroot(&builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
|
add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
|
||||||
}
|
}
|
||||||
drop(_guard);
|
drop(_guard);
|
||||||
|
|
||||||
|
@ -301,7 +301,7 @@ impl Step for Rustc {
|
||||||
|
|
||||||
let libdir = builder.sysroot_libdir(compiler, target);
|
let libdir = builder.sysroot_libdir(compiler, target);
|
||||||
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
|
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
|
||||||
add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
|
add_to_sysroot(builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -353,7 +353,7 @@ impl Step for CodegenBackend {
|
||||||
.arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml")));
|
.arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml")));
|
||||||
rustc_cargo_env(builder, &mut cargo, target, compiler.stage);
|
rustc_cargo_env(builder, &mut cargo, target, compiler.stage);
|
||||||
|
|
||||||
let _guard = builder.msg_check(&backend, target);
|
let _guard = builder.msg_check(backend, target);
|
||||||
|
|
||||||
run_cargo(
|
run_cargo(
|
||||||
builder,
|
builder,
|
||||||
|
|
|
@ -107,8 +107,8 @@ impl Std {
|
||||||
) -> Vec<(PathBuf, DependencyType)> {
|
) -> Vec<(PathBuf, DependencyType)> {
|
||||||
let mut deps = Vec::new();
|
let mut deps = Vec::new();
|
||||||
if !self.is_for_mir_opt_tests {
|
if !self.is_for_mir_opt_tests {
|
||||||
deps.extend(copy_third_party_objects(builder, &compiler, target));
|
deps.extend(copy_third_party_objects(builder, compiler, target));
|
||||||
deps.extend(copy_self_contained_objects(builder, &compiler, target));
|
deps.extend(copy_self_contained_objects(builder, compiler, target));
|
||||||
}
|
}
|
||||||
deps
|
deps
|
||||||
}
|
}
|
||||||
|
@ -186,7 +186,7 @@ impl Step for Std {
|
||||||
|
|
||||||
// Profiler information requires LLVM's compiler-rt
|
// Profiler information requires LLVM's compiler-rt
|
||||||
if builder.config.profiler {
|
if builder.config.profiler {
|
||||||
builder.update_submodule(&Path::new("src/llvm-project"));
|
builder.update_submodule(Path::new("src/llvm-project"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut target_deps = builder.ensure(StartupObjects { compiler, target });
|
let mut target_deps = builder.ensure(StartupObjects { compiler, target });
|
||||||
|
@ -271,7 +271,7 @@ impl Step for Std {
|
||||||
if target.is_synthetic() {
|
if target.is_synthetic() {
|
||||||
cargo.env("RUSTC_BOOTSTRAP_SYNTHETIC_TARGET", "1");
|
cargo.env("RUSTC_BOOTSTRAP_SYNTHETIC_TARGET", "1");
|
||||||
}
|
}
|
||||||
for rustflag in self.extra_rust_args.into_iter() {
|
for rustflag in self.extra_rust_args.iter() {
|
||||||
cargo.rustflag(rustflag);
|
cargo.rustflag(rustflag);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -333,7 +333,7 @@ fn copy_third_party_objects(
|
||||||
// The sanitizers are only copied in stage1 or above,
|
// The sanitizers are only copied in stage1 or above,
|
||||||
// to avoid creating dependency on LLVM.
|
// to avoid creating dependency on LLVM.
|
||||||
target_deps.extend(
|
target_deps.extend(
|
||||||
copy_sanitizers(builder, &compiler, target)
|
copy_sanitizers(builder, compiler, target)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|d| (d, DependencyType::Target)),
|
.map(|d| (d, DependencyType::Target)),
|
||||||
);
|
);
|
||||||
|
@ -487,7 +487,7 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
|
||||||
|
|
||||||
// for no-std targets we only compile a few no_std crates
|
// for no-std targets we only compile a few no_std crates
|
||||||
cargo
|
cargo
|
||||||
.args(&["-p", "alloc"])
|
.args(["-p", "alloc"])
|
||||||
.arg("--manifest-path")
|
.arg("--manifest-path")
|
||||||
.arg(builder.src.join("library/alloc/Cargo.toml"))
|
.arg(builder.src.join("library/alloc/Cargo.toml"))
|
||||||
.arg("--features")
|
.arg("--features")
|
||||||
|
@ -626,20 +626,20 @@ impl Step for StdLink {
|
||||||
.build
|
.build
|
||||||
.config
|
.config
|
||||||
.initial_rustc
|
.initial_rustc
|
||||||
.starts_with(builder.out.join(&compiler.host.triple).join("stage0/bin"))
|
.starts_with(builder.out.join(compiler.host.triple).join("stage0/bin"))
|
||||||
{
|
{
|
||||||
// Copy bin files from stage0/bin to stage0-sysroot/bin
|
// Copy bin files from stage0/bin to stage0-sysroot/bin
|
||||||
let sysroot = builder.out.join(&compiler.host.triple).join("stage0-sysroot");
|
let sysroot = builder.out.join(compiler.host.triple).join("stage0-sysroot");
|
||||||
|
|
||||||
let host = compiler.host.triple;
|
let host = compiler.host.triple;
|
||||||
let stage0_bin_dir = builder.out.join(&host).join("stage0/bin");
|
let stage0_bin_dir = builder.out.join(host).join("stage0/bin");
|
||||||
let sysroot_bin_dir = sysroot.join("bin");
|
let sysroot_bin_dir = sysroot.join("bin");
|
||||||
t!(fs::create_dir_all(&sysroot_bin_dir));
|
t!(fs::create_dir_all(&sysroot_bin_dir));
|
||||||
builder.cp_r(&stage0_bin_dir, &sysroot_bin_dir);
|
builder.cp_r(&stage0_bin_dir, &sysroot_bin_dir);
|
||||||
|
|
||||||
// Copy all *.so files from stage0/lib to stage0-sysroot/lib
|
// Copy all *.so files from stage0/lib to stage0-sysroot/lib
|
||||||
let stage0_lib_dir = builder.out.join(&host).join("stage0/lib");
|
let stage0_lib_dir = builder.out.join(host).join("stage0/lib");
|
||||||
if let Ok(files) = fs::read_dir(&stage0_lib_dir) {
|
if let Ok(files) = fs::read_dir(stage0_lib_dir) {
|
||||||
for file in files {
|
for file in files {
|
||||||
let file = t!(file);
|
let file = t!(file);
|
||||||
let path = file.path();
|
let path = file.path();
|
||||||
|
@ -654,9 +654,9 @@ impl Step for StdLink {
|
||||||
t!(fs::create_dir_all(&sysroot_codegen_backends));
|
t!(fs::create_dir_all(&sysroot_codegen_backends));
|
||||||
let stage0_codegen_backends = builder
|
let stage0_codegen_backends = builder
|
||||||
.out
|
.out
|
||||||
.join(&host)
|
.join(host)
|
||||||
.join("stage0/lib/rustlib")
|
.join("stage0/lib/rustlib")
|
||||||
.join(&host)
|
.join(host)
|
||||||
.join("codegen-backends");
|
.join("codegen-backends");
|
||||||
if stage0_codegen_backends.exists() {
|
if stage0_codegen_backends.exists() {
|
||||||
builder.cp_r(&stage0_codegen_backends, &sysroot_codegen_backends);
|
builder.cp_r(&stage0_codegen_backends, &sysroot_codegen_backends);
|
||||||
|
@ -1179,7 +1179,7 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
|
||||||
// The config can also specify its own llvm linker flags.
|
// The config can also specify its own llvm linker flags.
|
||||||
if let Some(ref s) = builder.config.llvm_ldflags {
|
if let Some(ref s) = builder.config.llvm_ldflags {
|
||||||
if !llvm_linker_flags.is_empty() {
|
if !llvm_linker_flags.is_empty() {
|
||||||
llvm_linker_flags.push_str(" ");
|
llvm_linker_flags.push(' ');
|
||||||
}
|
}
|
||||||
llvm_linker_flags.push_str(s);
|
llvm_linker_flags.push_str(s);
|
||||||
}
|
}
|
||||||
|
@ -1270,7 +1270,7 @@ fn needs_codegen_config(run: &RunConfig<'_>) -> bool {
|
||||||
for path_set in &run.paths {
|
for path_set in &run.paths {
|
||||||
needs_codegen_cfg = match path_set {
|
needs_codegen_cfg = match path_set {
|
||||||
PathSet::Set(set) => set.iter().any(|p| is_codegen_cfg_needed(p, run)),
|
PathSet::Set(set) => set.iter().any(|p| is_codegen_cfg_needed(p, run)),
|
||||||
PathSet::Suite(suite) => is_codegen_cfg_needed(&suite, run),
|
PathSet::Suite(suite) => is_codegen_cfg_needed(suite, run),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
needs_codegen_cfg
|
needs_codegen_cfg
|
||||||
|
@ -1279,7 +1279,7 @@ fn needs_codegen_config(run: &RunConfig<'_>) -> bool {
|
||||||
pub(crate) const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_";
|
pub(crate) const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_";
|
||||||
|
|
||||||
fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
|
fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
|
||||||
if path.path.to_str().unwrap().contains(&CODEGEN_BACKEND_PREFIX) {
|
if path.path.to_str().unwrap().contains(CODEGEN_BACKEND_PREFIX) {
|
||||||
let mut needs_codegen_backend_config = true;
|
let mut needs_codegen_backend_config = true;
|
||||||
for &backend in run.builder.config.codegen_backends(run.target) {
|
for &backend in run.builder.config.codegen_backends(run.target) {
|
||||||
if path
|
if path
|
||||||
|
@ -1300,7 +1300,7 @@ fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Step for CodegenBackend {
|
impl Step for CodegenBackend {
|
||||||
|
@ -1393,7 +1393,7 @@ impl Step for CodegenBackend {
|
||||||
}
|
}
|
||||||
let stamp = codegen_backend_stamp(builder, compiler, target, backend);
|
let stamp = codegen_backend_stamp(builder, compiler, target, backend);
|
||||||
let codegen_backend = codegen_backend.to_str().unwrap();
|
let codegen_backend = codegen_backend.to_str().unwrap();
|
||||||
t!(fs::write(&stamp, &codegen_backend));
|
t!(fs::write(stamp, codegen_backend));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1441,7 +1441,7 @@ fn copy_codegen_backends_to_sysroot(
|
||||||
let dot = filename.find('.').unwrap();
|
let dot = filename.find('.').unwrap();
|
||||||
format!("{}-{}{}", &filename[..dash], builder.rust_release(), &filename[dot..])
|
format!("{}-{}{}", &filename[..dash], builder.rust_release(), &filename[dot..])
|
||||||
};
|
};
|
||||||
builder.copy(&file, &dst.join(target_filename));
|
builder.copy(file, &dst.join(target_filename));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1519,7 +1519,7 @@ impl Step for Sysroot {
|
||||||
/// 1-3.
|
/// 1-3.
|
||||||
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
|
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
|
||||||
let compiler = self.compiler;
|
let compiler = self.compiler;
|
||||||
let host_dir = builder.out.join(&compiler.host.triple);
|
let host_dir = builder.out.join(compiler.host.triple);
|
||||||
|
|
||||||
let sysroot_dir = |stage| {
|
let sysroot_dir = |stage| {
|
||||||
if stage == 0 {
|
if stage == 0 {
|
||||||
|
@ -1578,7 +1578,7 @@ impl Step for Sysroot {
|
||||||
let mut add_filtered_files = |suffix, contents| {
|
let mut add_filtered_files = |suffix, contents| {
|
||||||
for path in contents {
|
for path in contents {
|
||||||
let path = Path::new(&path);
|
let path = Path::new(&path);
|
||||||
if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) {
|
if path.parent().map_or(false, |parent| parent.ends_with(suffix)) {
|
||||||
filtered_files.push(path.file_name().unwrap().to_owned());
|
filtered_files.push(path.file_name().unwrap().to_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1802,7 +1802,7 @@ impl Step for Assemble {
|
||||||
if let Some(lld_install) = lld_install {
|
if let Some(lld_install) = lld_install {
|
||||||
let src_exe = exe("lld", target_compiler.host);
|
let src_exe = exe("lld", target_compiler.host);
|
||||||
let dst_exe = exe("rust-lld", target_compiler.host);
|
let dst_exe = exe("rust-lld", target_compiler.host);
|
||||||
builder.copy(&lld_install.join("bin").join(&src_exe), &libdir_bin.join(&dst_exe));
|
builder.copy(&lld_install.join("bin").join(src_exe), &libdir_bin.join(dst_exe));
|
||||||
let self_contained_lld_dir = libdir_bin.join("gcc-ld");
|
let self_contained_lld_dir = libdir_bin.join("gcc-ld");
|
||||||
t!(fs::create_dir_all(&self_contained_lld_dir));
|
t!(fs::create_dir_all(&self_contained_lld_dir));
|
||||||
let lld_wrapper_exe = builder.ensure(crate::core::build_steps::tool::LldWrapper {
|
let lld_wrapper_exe = builder.ensure(crate::core::build_steps::tool::LldWrapper {
|
||||||
|
@ -1850,7 +1850,7 @@ impl Step for Assemble {
|
||||||
let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host);
|
let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host);
|
||||||
let rustc = out_dir.join(exe("rustc-main", host));
|
let rustc = out_dir.join(exe("rustc-main", host));
|
||||||
let bindir = sysroot.join("bin");
|
let bindir = sysroot.join("bin");
|
||||||
t!(fs::create_dir_all(&bindir));
|
t!(fs::create_dir_all(bindir));
|
||||||
let compiler = builder.rustc(target_compiler);
|
let compiler = builder.rustc(target_compiler);
|
||||||
builder.copy(&rustc, &compiler);
|
builder.copy(&rustc, &compiler);
|
||||||
|
|
||||||
|
@ -1869,9 +1869,9 @@ pub fn add_to_sysroot(
|
||||||
stamp: &Path,
|
stamp: &Path,
|
||||||
) {
|
) {
|
||||||
let self_contained_dst = &sysroot_dst.join("self-contained");
|
let self_contained_dst = &sysroot_dst.join("self-contained");
|
||||||
t!(fs::create_dir_all(&sysroot_dst));
|
t!(fs::create_dir_all(sysroot_dst));
|
||||||
t!(fs::create_dir_all(&sysroot_host_dst));
|
t!(fs::create_dir_all(sysroot_host_dst));
|
||||||
t!(fs::create_dir_all(&self_contained_dst));
|
t!(fs::create_dir_all(self_contained_dst));
|
||||||
for (path, dependency_type) in builder.read_stamp_file(stamp) {
|
for (path, dependency_type) in builder.read_stamp_file(stamp) {
|
||||||
let dst = match dependency_type {
|
let dst = match dependency_type {
|
||||||
DependencyType::Host => sysroot_host_dst,
|
DependencyType::Host => sysroot_host_dst,
|
||||||
|
@ -2009,14 +2009,14 @@ pub fn run_cargo(
|
||||||
.map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata())))
|
.map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata())))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
for (prefix, extension, expected_len) in toplevel {
|
for (prefix, extension, expected_len) in toplevel {
|
||||||
let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| {
|
let candidates = contents.iter().filter(|&(_, filename, meta)| {
|
||||||
meta.len() == expected_len
|
meta.len() == expected_len
|
||||||
&& filename
|
&& filename
|
||||||
.strip_prefix(&prefix[..])
|
.strip_prefix(&prefix[..])
|
||||||
.map(|s| s.starts_with('-') && s.ends_with(&extension[..]))
|
.map(|s| s.starts_with('-') && s.ends_with(&extension[..]))
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
});
|
});
|
||||||
let max = candidates.max_by_key(|&&(_, _, ref metadata)| {
|
let max = candidates.max_by_key(|&(_, _, metadata)| {
|
||||||
metadata.modified().expect("mtime should be available on all relevant OSes")
|
metadata.modified().expect("mtime should be available on all relevant OSes")
|
||||||
});
|
});
|
||||||
let path_to_add = match max {
|
let path_to_add = match max {
|
||||||
|
@ -2045,7 +2045,7 @@ pub fn run_cargo(
|
||||||
new_contents.extend(dep.to_str().unwrap().as_bytes());
|
new_contents.extend(dep.to_str().unwrap().as_bytes());
|
||||||
new_contents.extend(b"\0");
|
new_contents.extend(b"\0");
|
||||||
}
|
}
|
||||||
t!(fs::write(&stamp, &new_contents));
|
t!(fs::write(stamp, &new_contents));
|
||||||
deps.into_iter().map(|(d, _)| d).collect()
|
deps.into_iter().map(|(d, _)| d).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -78,7 +78,7 @@ impl Step for Docs {
|
||||||
let mut tarball = Tarball::new(builder, "rust-docs", &host.triple);
|
let mut tarball = Tarball::new(builder, "rust-docs", &host.triple);
|
||||||
tarball.set_product_name("Rust Documentation");
|
tarball.set_product_name("Rust Documentation");
|
||||||
tarball.add_bulk_dir(&builder.doc_out(host), dest);
|
tarball.add_bulk_dir(&builder.doc_out(host), dest);
|
||||||
tarball.add_file(&builder.src.join("src/doc/robots.txt"), dest, 0o644);
|
tarball.add_file(builder.src.join("src/doc/robots.txt"), dest, 0o644);
|
||||||
Some(tarball.generate())
|
Some(tarball.generate())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -342,7 +342,7 @@ impl Step for Mingw {
|
||||||
// thrown away (this contains the runtime DLLs included in the rustc package
|
// thrown away (this contains the runtime DLLs included in the rustc package
|
||||||
// above) and the second argument is where to place all the MinGW components
|
// above) and the second argument is where to place all the MinGW components
|
||||||
// (which is what we want).
|
// (which is what we want).
|
||||||
make_win_dist(&tmpdir(builder), tarball.image_dir(), host, &builder);
|
make_win_dist(&tmpdir(builder), tarball.image_dir(), host, builder);
|
||||||
|
|
||||||
Some(tarball.generate())
|
Some(tarball.generate())
|
||||||
}
|
}
|
||||||
|
@ -658,7 +658,7 @@ impl Step for Std {
|
||||||
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||||
let stamp = compile::libstd_stamp(builder, compiler_to_use, target);
|
let stamp = compile::libstd_stamp(builder, compiler_to_use, target);
|
||||||
verify_uefi_rlib_format(builder, target, &stamp);
|
verify_uefi_rlib_format(builder, target, &stamp);
|
||||||
copy_target_libs(builder, target, &tarball.image_dir(), &stamp);
|
copy_target_libs(builder, target, tarball.image_dir(), &stamp);
|
||||||
|
|
||||||
Some(tarball.generate())
|
Some(tarball.generate())
|
||||||
}
|
}
|
||||||
|
@ -734,7 +734,7 @@ impl Step for Analysis {
|
||||||
const DEFAULT: bool = true;
|
const DEFAULT: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
let default = should_build_extended_tool(&run.builder, "analysis");
|
let default = should_build_extended_tool(run.builder, "analysis");
|
||||||
run.alias("rust-analysis").default_condition(default)
|
run.alias("rust-analysis").default_condition(default)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -890,7 +890,7 @@ impl Step for Src {
|
||||||
/// Creates the `rust-src` installer component
|
/// Creates the `rust-src` installer component
|
||||||
fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
|
fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
|
||||||
if !builder.config.dry_run() {
|
if !builder.config.dry_run() {
|
||||||
builder.update_submodule(&Path::new("src/llvm-project"));
|
builder.update_submodule(Path::new("src/llvm-project"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let tarball = Tarball::new_targetless(builder, "rust-src");
|
let tarball = Tarball::new_targetless(builder, "rust-src");
|
||||||
|
@ -976,7 +976,7 @@ impl Step for PlainSourceTarball {
|
||||||
];
|
];
|
||||||
let src_dirs = ["src", "compiler", "library", "tests"];
|
let src_dirs = ["src", "compiler", "library", "tests"];
|
||||||
|
|
||||||
copy_src_dirs(builder, &builder.src, &src_dirs, &[], &plain_dst_src);
|
copy_src_dirs(builder, &builder.src, &src_dirs, &[], plain_dst_src);
|
||||||
|
|
||||||
// Copy the files normally
|
// Copy the files normally
|
||||||
for item in &src_files {
|
for item in &src_files {
|
||||||
|
@ -986,8 +986,8 @@ impl Step for PlainSourceTarball {
|
||||||
// Create the version file
|
// Create the version file
|
||||||
builder.create(&plain_dst_src.join("version"), &builder.rust_version());
|
builder.create(&plain_dst_src.join("version"), &builder.rust_version());
|
||||||
if let Some(info) = builder.rust_info().info() {
|
if let Some(info) = builder.rust_info().info() {
|
||||||
channel::write_commit_hash_file(&plain_dst_src, &info.sha);
|
channel::write_commit_hash_file(plain_dst_src, &info.sha);
|
||||||
channel::write_commit_info_file(&plain_dst_src, info);
|
channel::write_commit_info_file(plain_dst_src, info);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we're building from git or tarball sources, we need to vendor
|
// If we're building from git or tarball sources, we need to vendor
|
||||||
|
@ -1014,7 +1014,7 @@ impl Step for PlainSourceTarball {
|
||||||
// Will read the libstd Cargo.toml
|
// Will read the libstd Cargo.toml
|
||||||
// which uses the unstable `public-dependency` feature.
|
// which uses the unstable `public-dependency` feature.
|
||||||
.env("RUSTC_BOOTSTRAP", "1")
|
.env("RUSTC_BOOTSTRAP", "1")
|
||||||
.current_dir(&plain_dst_src);
|
.current_dir(plain_dst_src);
|
||||||
|
|
||||||
let config = if !builder.config.dry_run() {
|
let config = if !builder.config.dry_run() {
|
||||||
t!(String::from_utf8(t!(cmd.output()).stdout))
|
t!(String::from_utf8(t!(cmd.output()).stdout))
|
||||||
|
@ -1043,7 +1043,7 @@ impl Step for Cargo {
|
||||||
const ONLY_HOSTS: bool = true;
|
const ONLY_HOSTS: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
let default = should_build_extended_tool(&run.builder, "cargo");
|
let default = should_build_extended_tool(run.builder, "cargo");
|
||||||
run.alias("cargo").default_condition(default)
|
run.alias("cargo").default_condition(default)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1070,7 +1070,7 @@ impl Step for Cargo {
|
||||||
let mut tarball = Tarball::new(builder, "cargo", &target.triple);
|
let mut tarball = Tarball::new(builder, "cargo", &target.triple);
|
||||||
tarball.set_overlay(OverlayKind::Cargo);
|
tarball.set_overlay(OverlayKind::Cargo);
|
||||||
|
|
||||||
tarball.add_file(&cargo, "bin", 0o755);
|
tarball.add_file(cargo, "bin", 0o755);
|
||||||
tarball.add_file(etc.join("_cargo"), "share/zsh/site-functions", 0o644);
|
tarball.add_file(etc.join("_cargo"), "share/zsh/site-functions", 0o644);
|
||||||
tarball.add_renamed_file(etc.join("cargo.bashcomp.sh"), "etc/bash_completion.d", "cargo");
|
tarball.add_renamed_file(etc.join("cargo.bashcomp.sh"), "etc/bash_completion.d", "cargo");
|
||||||
tarball.add_dir(etc.join("man"), "share/man/man1");
|
tarball.add_dir(etc.join("man"), "share/man/man1");
|
||||||
|
@ -1092,7 +1092,7 @@ impl Step for Rls {
|
||||||
const DEFAULT: bool = true;
|
const DEFAULT: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
let default = should_build_extended_tool(&run.builder, "rls");
|
let default = should_build_extended_tool(run.builder, "rls");
|
||||||
run.alias("rls").default_condition(default)
|
run.alias("rls").default_condition(default)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1134,7 +1134,7 @@ impl Step for RustAnalyzer {
|
||||||
const ONLY_HOSTS: bool = true;
|
const ONLY_HOSTS: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
let default = should_build_extended_tool(&run.builder, "rust-analyzer");
|
let default = should_build_extended_tool(run.builder, "rust-analyzer");
|
||||||
run.alias("rust-analyzer").default_condition(default)
|
run.alias("rust-analyzer").default_condition(default)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1176,7 +1176,7 @@ impl Step for Clippy {
|
||||||
const ONLY_HOSTS: bool = true;
|
const ONLY_HOSTS: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
let default = should_build_extended_tool(&run.builder, "clippy");
|
let default = should_build_extended_tool(run.builder, "clippy");
|
||||||
run.alias("clippy").default_condition(default)
|
run.alias("clippy").default_condition(default)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1224,7 +1224,7 @@ impl Step for Miri {
|
||||||
const ONLY_HOSTS: bool = true;
|
const ONLY_HOSTS: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
let default = should_build_extended_tool(&run.builder, "miri");
|
let default = should_build_extended_tool(run.builder, "miri");
|
||||||
run.alias("miri").default_condition(default)
|
run.alias("miri").default_condition(default)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1337,12 +1337,12 @@ impl Step for CodegenBackend {
|
||||||
let src = builder.sysroot(compiler);
|
let src = builder.sysroot(compiler);
|
||||||
let backends_src = builder.sysroot_codegen_backends(compiler);
|
let backends_src = builder.sysroot_codegen_backends(compiler);
|
||||||
let backends_rel = backends_src
|
let backends_rel = backends_src
|
||||||
.strip_prefix(&src)
|
.strip_prefix(src)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.strip_prefix(builder.sysroot_libdir_relative(compiler))
|
.strip_prefix(builder.sysroot_libdir_relative(compiler))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
// Don't use custom libdir here because ^lib/ will be resolved again with installer
|
// Don't use custom libdir here because ^lib/ will be resolved again with installer
|
||||||
let backends_dst = PathBuf::from("lib").join(&backends_rel);
|
let backends_dst = PathBuf::from("lib").join(backends_rel);
|
||||||
|
|
||||||
let backend_name = format!("rustc_codegen_{}", backend);
|
let backend_name = format!("rustc_codegen_{}", backend);
|
||||||
let mut found_backend = false;
|
let mut found_backend = false;
|
||||||
|
@ -1371,7 +1371,7 @@ impl Step for Rustfmt {
|
||||||
const ONLY_HOSTS: bool = true;
|
const ONLY_HOSTS: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
let default = should_build_extended_tool(&run.builder, "rustfmt");
|
let default = should_build_extended_tool(run.builder, "rustfmt");
|
||||||
run.alias("rustfmt").default_condition(default)
|
run.alias("rustfmt").default_condition(default)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1454,7 +1454,7 @@ impl Step for RustDemangler {
|
||||||
let mut tarball = Tarball::new(builder, "rust-demangler", &target.triple);
|
let mut tarball = Tarball::new(builder, "rust-demangler", &target.triple);
|
||||||
tarball.set_overlay(OverlayKind::RustDemangler);
|
tarball.set_overlay(OverlayKind::RustDemangler);
|
||||||
tarball.is_preview(true);
|
tarball.is_preview(true);
|
||||||
tarball.add_file(&rust_demangler, "bin", 0o755);
|
tarball.add_file(rust_demangler, "bin", 0o755);
|
||||||
tarball.add_legal_and_readme_to("share/doc/rust-demangler");
|
tarball.add_legal_and_readme_to("share/doc/rust-demangler");
|
||||||
Some(tarball.generate())
|
Some(tarball.generate())
|
||||||
}
|
}
|
||||||
|
@ -1609,7 +1609,7 @@ impl Step for Extended {
|
||||||
let prepare = |name: &str| {
|
let prepare = |name: &str| {
|
||||||
builder.create_dir(&pkg.join(name));
|
builder.create_dir(&pkg.join(name));
|
||||||
builder.cp_r(
|
builder.cp_r(
|
||||||
&work.join(&format!("{}-{}", pkgname(builder, name), target.triple)),
|
&work.join(format!("{}-{}", pkgname(builder, name), target.triple)),
|
||||||
&pkg.join(name),
|
&pkg.join(name),
|
||||||
);
|
);
|
||||||
builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
|
builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
|
||||||
|
@ -1673,7 +1673,7 @@ impl Step for Extended {
|
||||||
name.to_string()
|
name.to_string()
|
||||||
};
|
};
|
||||||
builder.cp_r(
|
builder.cp_r(
|
||||||
&work.join(&format!("{}-{}", pkgname(builder, name), target.triple)).join(dir),
|
&work.join(format!("{}-{}", pkgname(builder, name), target.triple)).join(dir),
|
||||||
&exe.join(name),
|
&exe.join(name),
|
||||||
);
|
);
|
||||||
builder.remove(&exe.join(name).join("manifest.in"));
|
builder.remove(&exe.join(name).join("manifest.in"));
|
||||||
|
@ -1707,7 +1707,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("rustc")
|
.arg("rustc")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("RustcGroup")
|
.arg("RustcGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1723,7 +1723,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("rust-docs")
|
.arg("rust-docs")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("DocsGroup")
|
.arg("DocsGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1741,7 +1741,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("cargo")
|
.arg("cargo")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("CargoGroup")
|
.arg("CargoGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1758,7 +1758,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("rust-std")
|
.arg("rust-std")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("StdGroup")
|
.arg("StdGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1774,7 +1774,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("rust-analyzer")
|
.arg("rust-analyzer")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("RustAnalyzerGroup")
|
.arg("RustAnalyzerGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1793,7 +1793,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("clippy")
|
.arg("clippy")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("ClippyGroup")
|
.arg("ClippyGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1812,7 +1812,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("rust-demangler")
|
.arg("rust-demangler")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("RustDemanglerGroup")
|
.arg("RustDemanglerGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1831,7 +1831,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("miri")
|
.arg("miri")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("MiriGroup")
|
.arg("MiriGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1849,7 +1849,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("rust-analysis")
|
.arg("rust-analysis")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("AnalysisGroup")
|
.arg("AnalysisGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1867,7 +1867,7 @@ impl Step for Extended {
|
||||||
.current_dir(&exe)
|
.current_dir(&exe)
|
||||||
.arg("dir")
|
.arg("dir")
|
||||||
.arg("rust-mingw")
|
.arg("rust-mingw")
|
||||||
.args(&heat_flags)
|
.args(heat_flags)
|
||||||
.arg("-cg")
|
.arg("-cg")
|
||||||
.arg("GccGroup")
|
.arg("GccGroup")
|
||||||
.arg("-dr")
|
.arg("-dr")
|
||||||
|
@ -1890,10 +1890,10 @@ impl Step for Extended {
|
||||||
.arg("-dStdDir=rust-std")
|
.arg("-dStdDir=rust-std")
|
||||||
.arg("-dAnalysisDir=rust-analysis")
|
.arg("-dAnalysisDir=rust-analysis")
|
||||||
.arg("-arch")
|
.arg("-arch")
|
||||||
.arg(&arch)
|
.arg(arch)
|
||||||
.arg("-out")
|
.arg("-out")
|
||||||
.arg(&output)
|
.arg(&output)
|
||||||
.arg(&input);
|
.arg(input);
|
||||||
add_env(builder, &mut cmd, target);
|
add_env(builder, &mut cmd, target);
|
||||||
|
|
||||||
if built_tools.contains("clippy") {
|
if built_tools.contains("clippy") {
|
||||||
|
@ -2026,7 +2026,7 @@ fn install_llvm_file(builder: &Builder<'_>, source: &Path, destination: &Path) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
builder.install(&source, destination, 0o644);
|
builder.install(source, destination, 0o644);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maybe add LLVM object files to the given destination lib-dir. Allows either static or dynamic linking.
|
/// Maybe add LLVM object files to the given destination lib-dir. Allows either static or dynamic linking.
|
||||||
|
@ -2123,7 +2123,7 @@ impl Step for LlvmTools {
|
||||||
const DEFAULT: bool = true;
|
const DEFAULT: bool = true;
|
||||||
|
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
let default = should_build_extended_tool(&run.builder, "llvm-tools");
|
let default = should_build_extended_tool(run.builder, "llvm-tools");
|
||||||
// FIXME: allow using the names of the tools themselves?
|
// FIXME: allow using the names of the tools themselves?
|
||||||
run.alias("llvm-tools").default_condition(default)
|
run.alias("llvm-tools").default_condition(default)
|
||||||
}
|
}
|
||||||
|
@ -2231,12 +2231,12 @@ impl Step for RustDev {
|
||||||
tarball.add_file(lld_path, "bin", 0o755);
|
tarball.add_file(lld_path, "bin", 0o755);
|
||||||
}
|
}
|
||||||
|
|
||||||
tarball.add_file(&builder.llvm_filecheck(target), "bin", 0o755);
|
tarball.add_file(builder.llvm_filecheck(target), "bin", 0o755);
|
||||||
|
|
||||||
// Copy the include directory as well; needed mostly to build
|
// Copy the include directory as well; needed mostly to build
|
||||||
// librustc_llvm properly (e.g., llvm-config.h is in here). But also
|
// librustc_llvm properly (e.g., llvm-config.h is in here). But also
|
||||||
// just broadly useful to be able to link against the bundled LLVM.
|
// just broadly useful to be able to link against the bundled LLVM.
|
||||||
tarball.add_dir(&builder.llvm_out(target).join("include"), "include");
|
tarball.add_dir(builder.llvm_out(target).join("include"), "include");
|
||||||
|
|
||||||
// Copy libLLVM.so to the target lib dir as well, so the RPATH like
|
// Copy libLLVM.so to the target lib dir as well, so the RPATH like
|
||||||
// `$ORIGIN/../lib` can find it. It may also be used as a dependency
|
// `$ORIGIN/../lib` can find it. It may also be used as a dependency
|
||||||
|
@ -2312,7 +2312,7 @@ impl Step for BuildManifest {
|
||||||
let build_manifest = builder.tool_exe(Tool::BuildManifest);
|
let build_manifest = builder.tool_exe(Tool::BuildManifest);
|
||||||
|
|
||||||
let tarball = Tarball::new(builder, "build-manifest", &self.target.triple);
|
let tarball = Tarball::new(builder, "build-manifest", &self.target.triple);
|
||||||
tarball.add_file(&build_manifest, "bin", 0o755);
|
tarball.add_file(build_manifest, "bin", 0o755);
|
||||||
tarball.generate()
|
tarball.generate()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -151,7 +151,7 @@ impl<P: Step> Step for RustbookSrc<P> {
|
||||||
builder.info(&format!("Rustbook ({target}) - {name}"));
|
builder.info(&format!("Rustbook ({target}) - {name}"));
|
||||||
let _ = fs::remove_dir_all(&out);
|
let _ = fs::remove_dir_all(&out);
|
||||||
|
|
||||||
builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out));
|
builder.run(rustbook_cmd.arg("build").arg(src).arg("-d").arg(out));
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.parent.is_some() {
|
if self.parent.is_some() {
|
||||||
|
@ -384,7 +384,7 @@ impl Step for Standalone {
|
||||||
// with no particular explicit doc requested (e.g. library/core).
|
// with no particular explicit doc requested (e.g. library/core).
|
||||||
if builder.paths.is_empty() || builder.was_invoked_explicitly::<Self>(Kind::Doc) {
|
if builder.paths.is_empty() || builder.was_invoked_explicitly::<Self>(Kind::Doc) {
|
||||||
let index = out.join("index.html");
|
let index = out.join("index.html");
|
||||||
builder.open_in_browser(&index);
|
builder.open_in_browser(index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -517,7 +517,7 @@ impl Step for SharedAssets {
|
||||||
.replace("VERSION", &builder.rust_release())
|
.replace("VERSION", &builder.rust_release())
|
||||||
.replace("SHORT_HASH", builder.rust_info().sha_short().unwrap_or(""))
|
.replace("SHORT_HASH", builder.rust_info().sha_short().unwrap_or(""))
|
||||||
.replace("STAMP", builder.rust_info().sha().unwrap_or(""));
|
.replace("STAMP", builder.rust_info().sha().unwrap_or(""));
|
||||||
t!(fs::write(&version_info, &info));
|
t!(fs::write(&version_info, info));
|
||||||
}
|
}
|
||||||
|
|
||||||
builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css"));
|
builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css"));
|
||||||
|
@ -714,11 +714,11 @@ fn doc_std(
|
||||||
}
|
}
|
||||||
|
|
||||||
let description =
|
let description =
|
||||||
format!("library{} in {} format", crate_description(&requested_crates), format.as_str());
|
format!("library{} in {} format", crate_description(requested_crates), format.as_str());
|
||||||
let _guard = builder.msg_doc(compiler, &description, target);
|
let _guard = builder.msg_doc(compiler, description, target);
|
||||||
|
|
||||||
builder.run(&mut cargo.into());
|
builder.run(&mut cargo.into());
|
||||||
builder.cp_r(&out_dir, &out);
|
builder.cp_r(&out_dir, out);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||||
|
@ -781,7 +781,7 @@ impl Step for Rustc {
|
||||||
let _guard = builder.msg_sysroot_tool(
|
let _guard = builder.msg_sysroot_tool(
|
||||||
Kind::Doc,
|
Kind::Doc,
|
||||||
stage,
|
stage,
|
||||||
&format!("compiler{}", crate_description(&self.crates)),
|
format!("compiler{}", crate_description(&self.crates)),
|
||||||
compiler.host,
|
compiler.host,
|
||||||
target,
|
target,
|
||||||
);
|
);
|
||||||
|
@ -819,7 +819,7 @@ impl Step for Rustc {
|
||||||
// Create all crate output directories first to make sure rustdoc uses
|
// Create all crate output directories first to make sure rustdoc uses
|
||||||
// relative links.
|
// relative links.
|
||||||
// FIXME: Cargo should probably do this itself.
|
// FIXME: Cargo should probably do this itself.
|
||||||
let dir_name = krate.replace("-", "_");
|
let dir_name = krate.replace('-', "_");
|
||||||
t!(fs::create_dir_all(out_dir.join(&*dir_name)));
|
t!(fs::create_dir_all(out_dir.join(&*dir_name)));
|
||||||
cargo.arg("-p").arg(krate);
|
cargo.arg("-p").arg(krate);
|
||||||
if to_open.is_none() {
|
if to_open.is_none() {
|
||||||
|
@ -844,7 +844,7 @@ impl Step for Rustc {
|
||||||
if !builder.config.dry_run() {
|
if !builder.config.dry_run() {
|
||||||
// Sanity check on linked compiler crates
|
// Sanity check on linked compiler crates
|
||||||
for krate in &*self.crates {
|
for krate in &*self.crates {
|
||||||
let dir_name = krate.replace("-", "_");
|
let dir_name = krate.replace('-', "_");
|
||||||
// Making sure the directory exists and is not empty.
|
// Making sure the directory exists and is not empty.
|
||||||
assert!(out.join(&*dir_name).read_dir().unwrap().next().is_some());
|
assert!(out.join(&*dir_name).read_dir().unwrap().next().is_some());
|
||||||
}
|
}
|
||||||
|
@ -1160,7 +1160,7 @@ impl Step for RustcBook {
|
||||||
cmd.arg(&out_listing);
|
cmd.arg(&out_listing);
|
||||||
cmd.arg("--rustc");
|
cmd.arg("--rustc");
|
||||||
cmd.arg(&rustc);
|
cmd.arg(&rustc);
|
||||||
cmd.arg("--rustc-target").arg(&self.target.rustc_target_arg());
|
cmd.arg("--rustc-target").arg(self.target.rustc_target_arg());
|
||||||
if builder.is_verbose() {
|
if builder.is_verbose() {
|
||||||
cmd.arg("--verbose");
|
cmd.arg("--verbose");
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ use std::process::{Command, Stdio};
|
||||||
use std::sync::mpsc::SyncSender;
|
use std::sync::mpsc::SyncSender;
|
||||||
|
|
||||||
fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl FnMut(bool) -> bool {
|
fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl FnMut(bool) -> bool {
|
||||||
let mut cmd = Command::new(&rustfmt);
|
let mut cmd = Command::new(rustfmt);
|
||||||
// avoid the submodule config paths from coming into play,
|
// avoid the submodule config paths from coming into play,
|
||||||
// we only allow a single global config for the workspace for now
|
// we only allow a single global config for the workspace for now
|
||||||
cmd.arg("--config-path").arg(&src.canonicalize().unwrap());
|
cmd.arg("--config-path").arg(&src.canonicalize().unwrap());
|
||||||
|
@ -162,7 +162,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
|
||||||
// against anything like `compiler/rustc_foo/src/foo.rs`,
|
// against anything like `compiler/rustc_foo/src/foo.rs`,
|
||||||
// preventing the latter from being formatted.
|
// preventing the latter from being formatted.
|
||||||
untracked_count += 1;
|
untracked_count += 1;
|
||||||
fmt_override.add(&format!("!/{untracked_path}")).expect(&untracked_path);
|
fmt_override.add(&format!("!/{untracked_path}")).expect(untracked_path);
|
||||||
}
|
}
|
||||||
// Only check modified files locally to speed up runtime.
|
// Only check modified files locally to speed up runtime.
|
||||||
// We still check all files in CI to avoid bugs in `get_modified_rs_files` letting regressions slip through;
|
// We still check all files in CI to avoid bugs in `get_modified_rs_files` letting regressions slip through;
|
||||||
|
@ -221,7 +221,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
|
||||||
assert!(rustfmt_path.exists(), "{}", rustfmt_path.display());
|
assert!(rustfmt_path.exists(), "{}", rustfmt_path.display());
|
||||||
let src = build.src.clone();
|
let src = build.src.clone();
|
||||||
let (tx, rx): (SyncSender<PathBuf>, _) = std::sync::mpsc::sync_channel(128);
|
let (tx, rx): (SyncSender<PathBuf>, _) = std::sync::mpsc::sync_channel(128);
|
||||||
let walker = match paths.get(0) {
|
let walker = match paths.first() {
|
||||||
Some(first) => {
|
Some(first) => {
|
||||||
let find_shortcut_candidates = |p: &PathBuf| {
|
let find_shortcut_candidates = |p: &PathBuf| {
|
||||||
let mut candidates = Vec::new();
|
let mut candidates = Vec::new();
|
||||||
|
|
|
@ -24,7 +24,7 @@ const SHELL: &str = "sh";
|
||||||
// We have to run a few shell scripts, which choke quite a bit on both `\`
|
// We have to run a few shell scripts, which choke quite a bit on both `\`
|
||||||
// characters and on `C:\` paths, so normalize both of them away.
|
// characters and on `C:\` paths, so normalize both of them away.
|
||||||
fn sanitize_sh(path: &Path) -> String {
|
fn sanitize_sh(path: &Path) -> String {
|
||||||
let path = path.to_str().unwrap().replace("\\", "/");
|
let path = path.to_str().unwrap().replace('\\', "/");
|
||||||
return change_drive(unc_to_lfs(&path)).unwrap_or(path);
|
return change_drive(unc_to_lfs(&path)).unwrap_or(path);
|
||||||
|
|
||||||
fn unc_to_lfs(s: &str) -> &str {
|
fn unc_to_lfs(s: &str) -> &str {
|
||||||
|
@ -44,7 +44,7 @@ fn sanitize_sh(path: &Path) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_dir_writable_for_user(dir: &PathBuf) -> bool {
|
fn is_dir_writable_for_user(dir: &Path) -> bool {
|
||||||
let tmp = dir.join(".tmp");
|
let tmp = dir.join(".tmp");
|
||||||
match fs::create_dir_all(&tmp) {
|
match fs::create_dir_all(&tmp) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
|
|
|
@ -110,7 +110,7 @@ pub fn prebuilt_llvm_config(
|
||||||
let smart_stamp_hash = STAMP_HASH_MEMO.get_or_init(|| {
|
let smart_stamp_hash = STAMP_HASH_MEMO.get_or_init(|| {
|
||||||
generate_smart_stamp_hash(
|
generate_smart_stamp_hash(
|
||||||
&builder.config.src.join("src/llvm-project"),
|
&builder.config.src.join("src/llvm-project"),
|
||||||
&builder.in_tree_llvm_info.sha().unwrap_or_default(),
|
builder.in_tree_llvm_info.sha().unwrap_or_default(),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -289,7 +289,7 @@ impl Step for Llvm {
|
||||||
|
|
||||||
let _guard = builder.msg_unstaged(Kind::Build, "LLVM", target);
|
let _guard = builder.msg_unstaged(Kind::Build, "LLVM", target);
|
||||||
t!(stamp.remove());
|
t!(stamp.remove());
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
t!(fs::create_dir_all(&out_dir));
|
t!(fs::create_dir_all(&out_dir));
|
||||||
|
|
||||||
// https://llvm.org/docs/CMake.html
|
// https://llvm.org/docs/CMake.html
|
||||||
|
@ -355,7 +355,7 @@ impl Step for Llvm {
|
||||||
cfg.define("LLVM_BUILD_RUNTIME", "No");
|
cfg.define("LLVM_BUILD_RUNTIME", "No");
|
||||||
}
|
}
|
||||||
if let Some(path) = builder.config.llvm_profile_use.as_ref() {
|
if let Some(path) = builder.config.llvm_profile_use.as_ref() {
|
||||||
cfg.define("LLVM_PROFDATA_FILE", &path);
|
cfg.define("LLVM_PROFDATA_FILE", path);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Disable zstd to avoid a dependency on libzstd.so.
|
// Disable zstd to avoid a dependency on libzstd.so.
|
||||||
|
@ -643,7 +643,7 @@ fn configure_cmake(
|
||||||
|
|
||||||
let sanitize_cc = |cc: &Path| {
|
let sanitize_cc = |cc: &Path| {
|
||||||
if target.is_msvc() {
|
if target.is_msvc() {
|
||||||
OsString::from(cc.to_str().unwrap().replace("\\", "/"))
|
OsString::from(cc.to_str().unwrap().replace('\\', "/"))
|
||||||
} else {
|
} else {
|
||||||
cc.as_os_str().to_owned()
|
cc.as_os_str().to_owned()
|
||||||
}
|
}
|
||||||
|
@ -808,10 +808,10 @@ fn configure_llvm(builder: &Builder<'_>, target: TargetSelection, cfg: &mut cmak
|
||||||
// Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365
|
// Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365
|
||||||
fn get_var(var_base: &str, host: &str, target: &str) -> Option<OsString> {
|
fn get_var(var_base: &str, host: &str, target: &str) -> Option<OsString> {
|
||||||
let kind = if host == target { "HOST" } else { "TARGET" };
|
let kind = if host == target { "HOST" } else { "TARGET" };
|
||||||
let target_u = target.replace("-", "_");
|
let target_u = target.replace('-', "_");
|
||||||
env::var_os(&format!("{var_base}_{target}"))
|
env::var_os(format!("{var_base}_{target}"))
|
||||||
.or_else(|| env::var_os(&format!("{}_{}", var_base, target_u)))
|
.or_else(|| env::var_os(format!("{}_{}", var_base, target_u)))
|
||||||
.or_else(|| env::var_os(&format!("{}_{}", kind, var_base)))
|
.or_else(|| env::var_os(format!("{}_{}", kind, var_base)))
|
||||||
.or_else(|| env::var_os(var_base))
|
.or_else(|| env::var_os(var_base))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -862,7 +862,7 @@ impl Step for Lld {
|
||||||
}
|
}
|
||||||
|
|
||||||
let _guard = builder.msg_unstaged(Kind::Build, "LLD", target);
|
let _guard = builder.msg_unstaged(Kind::Build, "LLD", target);
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
t!(fs::create_dir_all(&out_dir));
|
t!(fs::create_dir_all(&out_dir));
|
||||||
|
|
||||||
let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld"));
|
let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld"));
|
||||||
|
@ -986,7 +986,7 @@ impl Step for Sanitizers {
|
||||||
|
|
||||||
let _guard = builder.msg_unstaged(Kind::Build, "sanitizers", self.target);
|
let _guard = builder.msg_unstaged(Kind::Build, "sanitizers", self.target);
|
||||||
t!(stamp.remove());
|
t!(stamp.remove());
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
|
|
||||||
let mut cfg = cmake::Config::new(&compiler_rt_dir);
|
let mut cfg = cmake::Config::new(&compiler_rt_dir);
|
||||||
cfg.profile("Release");
|
cfg.profile("Release");
|
||||||
|
@ -1051,7 +1051,7 @@ fn supported_sanitizers(
|
||||||
.map(move |c| SanitizerRuntime {
|
.map(move |c| SanitizerRuntime {
|
||||||
cmake_target: format!("clang_rt.{}_{}_dynamic", c, os),
|
cmake_target: format!("clang_rt.{}_{}_dynamic", c, os),
|
||||||
path: out_dir
|
path: out_dir
|
||||||
.join(&format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)),
|
.join(format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)),
|
||||||
name: format!("librustc-{}_rt.{}.dylib", channel, c),
|
name: format!("librustc-{}_rt.{}.dylib", channel, c),
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -1062,7 +1062,7 @@ fn supported_sanitizers(
|
||||||
.iter()
|
.iter()
|
||||||
.map(move |c| SanitizerRuntime {
|
.map(move |c| SanitizerRuntime {
|
||||||
cmake_target: format!("clang_rt.{}-{}", c, arch),
|
cmake_target: format!("clang_rt.{}-{}", c, arch),
|
||||||
path: out_dir.join(&format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)),
|
path: out_dir.join(format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)),
|
||||||
name: format!("librustc-{}_rt.{}.a", channel, c),
|
name: format!("librustc-{}_rt.{}.a", channel, c),
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -1165,7 +1165,7 @@ impl Step for CrtBeginEnd {
|
||||||
|
|
||||||
/// Build crtbegin.o/crtend.o for musl target.
|
/// Build crtbegin.o/crtend.o for musl target.
|
||||||
fn run(self, builder: &Builder<'_>) -> Self::Output {
|
fn run(self, builder: &Builder<'_>) -> Self::Output {
|
||||||
builder.update_submodule(&Path::new("src/llvm-project"));
|
builder.update_submodule(Path::new("src/llvm-project"));
|
||||||
|
|
||||||
let out_dir = builder.native_dir(self.target).join("crt");
|
let out_dir = builder.native_dir(self.target).join("crt");
|
||||||
|
|
||||||
|
@ -1233,7 +1233,7 @@ impl Step for Libunwind {
|
||||||
|
|
||||||
/// Build libunwind.a
|
/// Build libunwind.a
|
||||||
fn run(self, builder: &Builder<'_>) -> Self::Output {
|
fn run(self, builder: &Builder<'_>) -> Self::Output {
|
||||||
builder.update_submodule(&Path::new("src/llvm-project"));
|
builder.update_submodule(Path::new("src/llvm-project"));
|
||||||
|
|
||||||
if builder.config.dry_run() {
|
if builder.config.dry_run() {
|
||||||
return PathBuf::new();
|
return PathBuf::new();
|
||||||
|
|
|
@ -23,7 +23,7 @@ impl Step for ExpandYamlAnchors {
|
||||||
fn run(self, builder: &Builder<'_>) {
|
fn run(self, builder: &Builder<'_>) {
|
||||||
builder.info("Expanding YAML anchors in the GitHub Actions configuration");
|
builder.info("Expanding YAML anchors in the GitHub Actions configuration");
|
||||||
builder.run_delaying_failure(
|
builder.run_delaying_failure(
|
||||||
&mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src),
|
builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ use std::env::consts::EXE_SUFFIX;
|
||||||
use std::fmt::Write as _;
|
use std::fmt::Write as _;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::{Path, PathBuf, MAIN_SEPARATOR};
|
use std::path::{Path, PathBuf, MAIN_SEPARATOR_STR};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::{fmt, fs, io};
|
use std::{fmt, fs, io};
|
||||||
|
@ -257,8 +257,7 @@ impl Step for Link {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let stage_path =
|
let stage_path =
|
||||||
["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string());
|
["build", config.build.rustc_target_arg(), "stage1"].join(MAIN_SEPARATOR_STR);
|
||||||
|
|
||||||
if !rustup_installed() {
|
if !rustup_installed() {
|
||||||
eprintln!("`rustup` is not installed; cannot link `stage1` toolchain");
|
eprintln!("`rustup` is not installed; cannot link `stage1` toolchain");
|
||||||
} else if stage_dir_exists(&stage_path[..]) && !config.dry_run() {
|
} else if stage_dir_exists(&stage_path[..]) && !config.dry_run() {
|
||||||
|
@ -276,7 +275,7 @@ fn rustup_installed() -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stage_dir_exists(stage_path: &str) -> bool {
|
fn stage_dir_exists(stage_path: &str) -> bool {
|
||||||
match fs::create_dir(&stage_path) {
|
match fs::create_dir(stage_path) {
|
||||||
Ok(_) => true,
|
Ok(_) => true,
|
||||||
Err(_) => Path::new(&stage_path).exists(),
|
Err(_) => Path::new(&stage_path).exists(),
|
||||||
}
|
}
|
||||||
|
@ -294,7 +293,7 @@ fn attempt_toolchain_link(stage_path: &str) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if try_link_toolchain(&stage_path) {
|
if try_link_toolchain(stage_path) {
|
||||||
println!(
|
println!(
|
||||||
"Added `stage1` rustup toolchain; try `cargo +stage1 build` on a separate rust project to run a newly-built toolchain"
|
"Added `stage1` rustup toolchain; try `cargo +stage1 build` on a separate rust project to run a newly-built toolchain"
|
||||||
);
|
);
|
||||||
|
@ -310,7 +309,7 @@ fn attempt_toolchain_link(stage_path: &str) {
|
||||||
|
|
||||||
fn toolchain_is_linked() -> bool {
|
fn toolchain_is_linked() -> bool {
|
||||||
match Command::new("rustup")
|
match Command::new("rustup")
|
||||||
.args(&["toolchain", "list"])
|
.args(["toolchain", "list"])
|
||||||
.stdout(std::process::Stdio::piped())
|
.stdout(std::process::Stdio::piped())
|
||||||
.output()
|
.output()
|
||||||
{
|
{
|
||||||
|
@ -337,7 +336,7 @@ fn toolchain_is_linked() -> bool {
|
||||||
fn try_link_toolchain(stage_path: &str) -> bool {
|
fn try_link_toolchain(stage_path: &str) -> bool {
|
||||||
Command::new("rustup")
|
Command::new("rustup")
|
||||||
.stdout(std::process::Stdio::null())
|
.stdout(std::process::Stdio::null())
|
||||||
.args(&["toolchain", "link", "stage1", &stage_path])
|
.args(["toolchain", "link", "stage1", stage_path])
|
||||||
.output()
|
.output()
|
||||||
.map_or(false, |output| output.status.success())
|
.map_or(false, |output| output.status.success())
|
||||||
}
|
}
|
||||||
|
@ -366,7 +365,7 @@ fn ensure_stage1_toolchain_placeholder_exists(stage_path: &str) -> bool {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used to get the path for `Subcommand::Setup`
|
// Used to get the path for `Subcommand::Setup`
|
||||||
|
@ -469,13 +468,13 @@ impl Step for Hook {
|
||||||
if config.dry_run() {
|
if config.dry_run() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
t!(install_git_hook_maybe(&config));
|
t!(install_git_hook_maybe(config));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// install a git hook to automatically run tidy, if they want
|
// install a git hook to automatically run tidy, if they want
|
||||||
fn install_git_hook_maybe(config: &Config) -> io::Result<()> {
|
fn install_git_hook_maybe(config: &Config) -> io::Result<()> {
|
||||||
let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| {
|
let git = t!(config.git().args(["rev-parse", "--git-common-dir"]).output().map(|output| {
|
||||||
assert!(output.status.success(), "failed to run `git`");
|
assert!(output.status.success(), "failed to run `git`");
|
||||||
PathBuf::from(t!(String::from_utf8(output.stdout)).trim())
|
PathBuf::from(t!(String::from_utf8(output.stdout)).trim())
|
||||||
}));
|
}));
|
||||||
|
@ -541,7 +540,7 @@ impl Step for Vscode {
|
||||||
if config.dry_run() {
|
if config.dry_run() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
while !t!(create_vscode_settings_maybe(&config)) {}
|
while !t!(create_vscode_settings_maybe(config)) {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -608,7 +607,7 @@ fn create_vscode_settings_maybe(config: &Config) -> io::Result<bool> {
|
||||||
}
|
}
|
||||||
_ => "Created",
|
_ => "Created",
|
||||||
};
|
};
|
||||||
fs::write(&vscode_settings, &RUST_ANALYZER_SETTINGS)?;
|
fs::write(&vscode_settings, RUST_ANALYZER_SETTINGS)?;
|
||||||
println!("{verb} `.vscode/settings.json`");
|
println!("{verb} `.vscode/settings.json`");
|
||||||
} else {
|
} else {
|
||||||
println!("\n{RUST_ANALYZER_SETTINGS}");
|
println!("\n{RUST_ANALYZER_SETTINGS}");
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub fn suggest(builder: &Builder<'_>, run: bool) {
|
||||||
// this code expects one suggestion per line in the following format:
|
// this code expects one suggestion per line in the following format:
|
||||||
// <x_subcommand> {some number of flags} [optional stage number]
|
// <x_subcommand> {some number of flags} [optional stage number]
|
||||||
let cmd = sections.next().unwrap();
|
let cmd = sections.next().unwrap();
|
||||||
let stage = sections.next_back().map(|s| str::parse(s).ok()).flatten();
|
let stage = sections.next_back().and_then(|s| str::parse(s).ok());
|
||||||
let paths: Vec<PathBuf> = sections.map(|p| PathBuf::from_str(p).unwrap()).collect();
|
let paths: Vec<PathBuf> = sections.map(|p| PathBuf::from_str(p).unwrap()).collect();
|
||||||
|
|
||||||
(cmd, stage, paths)
|
(cmd, stage, paths)
|
||||||
|
|
|
@ -79,7 +79,7 @@ fn create_synthetic_target(
|
||||||
|
|
||||||
customize(spec_map);
|
customize(spec_map);
|
||||||
|
|
||||||
std::fs::write(&path, &serde_json::to_vec_pretty(&spec).unwrap()).unwrap();
|
std::fs::write(&path, serde_json::to_vec_pretty(&spec).unwrap()).unwrap();
|
||||||
let target = TargetSelection::create_synthetic(&name, path.to_str().unwrap());
|
let target = TargetSelection::create_synthetic(&name, path.to_str().unwrap());
|
||||||
crate::utils::cc_detect::find_target(builder, target);
|
crate::utils::cc_detect::find_target(builder, target);
|
||||||
|
|
||||||
|
|
|
@ -156,7 +156,7 @@ You can skip linkcheck with --skip src/tools/linkchecker"
|
||||||
// Run the linkchecker.
|
// Run the linkchecker.
|
||||||
let _guard =
|
let _guard =
|
||||||
builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host);
|
builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host);
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
builder.run_delaying_failure(linkchecker.arg(builder.out.join(host.triple).join("doc")));
|
builder.run_delaying_failure(linkchecker.arg(builder.out.join(host.triple).join("doc")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -253,15 +253,15 @@ impl Step for Cargotest {
|
||||||
let out_dir = builder.out.join("ct");
|
let out_dir = builder.out.join("ct");
|
||||||
t!(fs::create_dir_all(&out_dir));
|
t!(fs::create_dir_all(&out_dir));
|
||||||
|
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
let mut cmd = builder.tool_cmd(Tool::CargoTest);
|
let mut cmd = builder.tool_cmd(Tool::CargoTest);
|
||||||
let mut cmd = cmd
|
let cmd = cmd
|
||||||
.arg(&cargo)
|
.arg(&cargo)
|
||||||
.arg(&out_dir)
|
.arg(&out_dir)
|
||||||
.args(builder.config.test_args())
|
.args(builder.config.test_args())
|
||||||
.env("RUSTC", builder.rustc(compiler))
|
.env("RUSTC", builder.rustc(compiler))
|
||||||
.env("RUSTDOC", builder.rustdoc(compiler));
|
.env("RUSTDOC", builder.rustdoc(compiler));
|
||||||
add_rustdoc_cargo_linker_args(&mut cmd, builder, compiler.host, LldThreads::No);
|
add_rustdoc_cargo_linker_args(cmd, builder, compiler.host, LldThreads::No);
|
||||||
builder.run_delaying_failure(cmd);
|
builder.run_delaying_failure(cmd);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -322,7 +322,7 @@ impl Step for Cargo {
|
||||||
builder,
|
builder,
|
||||||
);
|
);
|
||||||
|
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
add_flags_and_try_run_tests(builder, &mut cargo);
|
add_flags_and_try_run_tests(builder, &mut cargo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -474,7 +474,7 @@ impl Step for RustDemangler {
|
||||||
);
|
);
|
||||||
|
|
||||||
let dir = testdir(builder, compiler.host);
|
let dir = testdir(builder, compiler.host);
|
||||||
t!(fs::create_dir_all(&dir));
|
t!(fs::create_dir_all(dir));
|
||||||
|
|
||||||
cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler);
|
cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler);
|
||||||
cargo.add_rustc_lib_path(builder);
|
cargo.add_rustc_lib_path(builder);
|
||||||
|
@ -525,7 +525,7 @@ impl Miri {
|
||||||
// Tell `cargo miri setup` where to find the sources.
|
// Tell `cargo miri setup` where to find the sources.
|
||||||
cargo.env("MIRI_LIB_SRC", builder.src.join("library"));
|
cargo.env("MIRI_LIB_SRC", builder.src.join("library"));
|
||||||
// Tell it where to find Miri.
|
// Tell it where to find Miri.
|
||||||
cargo.env("MIRI", &miri);
|
cargo.env("MIRI", miri);
|
||||||
// Tell it where to put the sysroot.
|
// Tell it where to put the sysroot.
|
||||||
cargo.env("MIRI_SYSROOT", &miri_sysroot);
|
cargo.env("MIRI_SYSROOT", &miri_sysroot);
|
||||||
// Debug things.
|
// Debug things.
|
||||||
|
@ -637,7 +637,7 @@ impl Step for Miri {
|
||||||
// does not understand the flags added by `add_flags_and_try_run_test`.
|
// does not understand the flags added by `add_flags_and_try_run_test`.
|
||||||
let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
|
let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
|
||||||
{
|
{
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
builder.run(&mut cargo);
|
builder.run(&mut cargo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -649,11 +649,11 @@ impl Step for Miri {
|
||||||
// `MIRI_SKIP_UI_CHECKS` and `RUSTC_BLESS` are incompatible
|
// `MIRI_SKIP_UI_CHECKS` and `RUSTC_BLESS` are incompatible
|
||||||
cargo.env_remove("RUSTC_BLESS");
|
cargo.env_remove("RUSTC_BLESS");
|
||||||
// Optimizations can change error locations and remove UB so don't run `fail` tests.
|
// Optimizations can change error locations and remove UB so don't run `fail` tests.
|
||||||
cargo.args(&["tests/pass", "tests/panic"]);
|
cargo.args(["tests/pass", "tests/panic"]);
|
||||||
|
|
||||||
let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
|
let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
|
||||||
{
|
{
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
builder.run(&mut cargo);
|
builder.run(&mut cargo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -693,7 +693,7 @@ impl Step for Miri {
|
||||||
|
|
||||||
let mut cargo = Command::from(cargo);
|
let mut cargo = Command::from(cargo);
|
||||||
{
|
{
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
builder.run(&mut cargo);
|
builder.run(&mut cargo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -946,7 +946,7 @@ impl Step for RustdocJSNotStd {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String> {
|
fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String> {
|
||||||
let mut command = Command::new(&npm);
|
let mut command = Command::new(npm);
|
||||||
command.arg("list").arg("--parseable").arg("--long").arg("--depth=0");
|
command.arg("list").arg("--parseable").arg("--long").arg("--depth=0");
|
||||||
if global {
|
if global {
|
||||||
command.arg("--global");
|
command.arg("--global");
|
||||||
|
@ -954,7 +954,7 @@ fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String>
|
||||||
let lines = command
|
let lines = command
|
||||||
.output()
|
.output()
|
||||||
.map(|output| String::from_utf8_lossy(&output.stdout).into_owned())
|
.map(|output| String::from_utf8_lossy(&output.stdout).into_owned())
|
||||||
.unwrap_or(String::new());
|
.unwrap_or_default();
|
||||||
lines
|
lines
|
||||||
.lines()
|
.lines()
|
||||||
.find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@"))
|
.find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@"))
|
||||||
|
@ -1048,7 +1048,7 @@ impl Step for RustdocGUI {
|
||||||
cmd.arg("--npm").arg(npm);
|
cmd.arg("--npm").arg(npm);
|
||||||
}
|
}
|
||||||
|
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
let _guard = builder.msg_sysroot_tool(
|
let _guard = builder.msg_sysroot_tool(
|
||||||
Kind::Test,
|
Kind::Test,
|
||||||
self.compiler.stage,
|
self.compiler.stage,
|
||||||
|
@ -1096,7 +1096,7 @@ impl Step for Tidy {
|
||||||
cmd.arg(format!("--extra-checks={s}"));
|
cmd.arg(format!("--extra-checks={s}"));
|
||||||
}
|
}
|
||||||
let mut args = std::env::args_os();
|
let mut args = std::env::args_os();
|
||||||
if let Some(_) = args.find(|arg| arg == OsStr::new("--")) {
|
if args.any(|arg| arg == OsStr::new("--")) {
|
||||||
cmd.arg("--");
|
cmd.arg("--");
|
||||||
cmd.args(args);
|
cmd.args(args);
|
||||||
}
|
}
|
||||||
|
@ -1116,7 +1116,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to
|
||||||
);
|
);
|
||||||
crate::exit!(1);
|
crate::exit!(1);
|
||||||
}
|
}
|
||||||
crate::core::build_steps::format::format(&builder, !builder.config.cmd.bless(), &[]);
|
crate::core::build_steps::format::format(builder, !builder.config.cmd.bless(), &[]);
|
||||||
}
|
}
|
||||||
|
|
||||||
builder.info("tidy check");
|
builder.info("tidy check");
|
||||||
|
@ -1171,7 +1171,7 @@ impl Step for ExpandYamlAnchors {
|
||||||
}
|
}
|
||||||
builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded");
|
builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded");
|
||||||
builder.run_delaying_failure(
|
builder.run_delaying_failure(
|
||||||
&mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src),
|
builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1759,7 +1759,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
|
||||||
|
|
||||||
for exclude in &builder.config.skip {
|
for exclude in &builder.config.skip {
|
||||||
cmd.arg("--skip");
|
cmd.arg("--skip");
|
||||||
cmd.arg(&exclude);
|
cmd.arg(exclude);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get paths from cmd args
|
// Get paths from cmd args
|
||||||
|
@ -1780,7 +1780,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
|
||||||
// so the correct filters are passed to libtest
|
// so the correct filters are passed to libtest
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
let test_args_win: Vec<String> =
|
let test_args_win: Vec<String> =
|
||||||
test_args.iter().map(|s| s.replace("/", "\\")).collect();
|
test_args.iter().map(|s| s.replace('/', "\\")).collect();
|
||||||
cmd.args(&test_args_win);
|
cmd.args(&test_args_win);
|
||||||
} else {
|
} else {
|
||||||
cmd.args(&test_args);
|
cmd.args(&test_args);
|
||||||
|
@ -1900,7 +1900,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
|
||||||
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
|
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
|
||||||
// rather than stomp over it.
|
// rather than stomp over it.
|
||||||
if !builder.config.dry_run() && target.is_msvc() {
|
if !builder.config.dry_run() && target.is_msvc() {
|
||||||
for &(ref k, ref v) in builder.cc.borrow()[&target].env() {
|
for (k, v) in builder.cc.borrow()[&target].env() {
|
||||||
if k != "PATH" {
|
if k != "PATH" {
|
||||||
cmd.env(k, v);
|
cmd.env(k, v);
|
||||||
}
|
}
|
||||||
|
@ -1996,7 +1996,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
|
||||||
let _group = builder.msg(
|
let _group = builder.msg(
|
||||||
Kind::Test,
|
Kind::Test,
|
||||||
compiler.stage,
|
compiler.stage,
|
||||||
&format!("compiletest suite={suite} mode={mode}"),
|
format!("compiletest suite={suite} mode={mode}"),
|
||||||
compiler.host,
|
compiler.host,
|
||||||
target,
|
target,
|
||||||
);
|
);
|
||||||
|
@ -2022,7 +2022,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
|
||||||
"Check compiletest suite={} mode={} compare_mode={} ({} -> {})",
|
"Check compiletest suite={} mode={} compare_mode={} ({} -> {})",
|
||||||
suite, mode, compare_mode, &compiler.host, target
|
suite, mode, compare_mode, &compiler.host, target
|
||||||
));
|
));
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
try_run_tests(builder, &mut cmd, false);
|
try_run_tests(builder, &mut cmd, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2094,7 +2094,7 @@ impl BookTest {
|
||||||
compiler.host,
|
compiler.host,
|
||||||
compiler.host,
|
compiler.host,
|
||||||
);
|
);
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
let toolstate = if builder.run_delaying_failure(&mut rustbook_cmd) {
|
let toolstate = if builder.run_delaying_failure(&mut rustbook_cmd) {
|
||||||
ToolState::TestPass
|
ToolState::TestPass
|
||||||
} else {
|
} else {
|
||||||
|
@ -2111,12 +2111,12 @@ impl BookTest {
|
||||||
builder.ensure(compile::Std::new(compiler, host));
|
builder.ensure(compile::Std::new(compiler, host));
|
||||||
|
|
||||||
let _guard =
|
let _guard =
|
||||||
builder.msg(Kind::Test, compiler.stage, &format!("book {}", self.name), host, host);
|
builder.msg(Kind::Test, compiler.stage, format!("book {}", self.name), host, host);
|
||||||
|
|
||||||
// Do a breadth-first traversal of the `src/doc` directory and just run
|
// Do a breadth-first traversal of the `src/doc` directory and just run
|
||||||
// tests for all files that end in `*.md`
|
// tests for all files that end in `*.md`
|
||||||
let mut stack = vec![builder.src.join(self.path)];
|
let mut stack = vec![builder.src.join(self.path)];
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
let mut files = Vec::new();
|
let mut files = Vec::new();
|
||||||
while let Some(p) = stack.pop() {
|
while let Some(p) = stack.pop() {
|
||||||
if p.is_dir() {
|
if p.is_dir() {
|
||||||
|
@ -2227,7 +2227,7 @@ impl Step for ErrorIndex {
|
||||||
|
|
||||||
let guard =
|
let guard =
|
||||||
builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host);
|
builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host);
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
builder.run_quiet(&mut tool);
|
builder.run_quiet(&mut tool);
|
||||||
drop(guard);
|
drop(guard);
|
||||||
// The tests themselves need to link to std, so make sure it is
|
// The tests themselves need to link to std, so make sure it is
|
||||||
|
@ -2315,11 +2315,8 @@ impl Step for CrateLibrustc {
|
||||||
let builder = run.builder;
|
let builder = run.builder;
|
||||||
let host = run.build_triple();
|
let host = run.build_triple();
|
||||||
let compiler = builder.compiler_for(builder.top_stage, host, host);
|
let compiler = builder.compiler_for(builder.top_stage, host, host);
|
||||||
let crates = run
|
let crates =
|
||||||
.paths
|
run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect();
|
||||||
.iter()
|
|
||||||
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
builder.ensure(CrateLibrustc { compiler, target: run.target, crates });
|
builder.ensure(CrateLibrustc { compiler, target: run.target, crates });
|
||||||
}
|
}
|
||||||
|
@ -2351,7 +2348,7 @@ fn run_cargo_test<'a>(
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut cargo =
|
let mut cargo =
|
||||||
prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder);
|
prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder);
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
let _group = description.into().and_then(|what| {
|
let _group = description.into().and_then(|what| {
|
||||||
builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target)
|
builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target)
|
||||||
});
|
});
|
||||||
|
@ -2406,7 +2403,7 @@ fn prepare_cargo_test(
|
||||||
if krate.has_lib {
|
if krate.has_lib {
|
||||||
cargo.arg("--lib");
|
cargo.arg("--lib");
|
||||||
}
|
}
|
||||||
cargo.args(&["--bins", "--examples", "--tests", "--benches"]);
|
cargo.args(["--bins", "--examples", "--tests", "--benches"]);
|
||||||
}
|
}
|
||||||
DocTests::Yes => {}
|
DocTests::Yes => {}
|
||||||
}
|
}
|
||||||
|
@ -2468,11 +2465,8 @@ impl Step for Crate {
|
||||||
let builder = run.builder;
|
let builder = run.builder;
|
||||||
let host = run.build_triple();
|
let host = run.build_triple();
|
||||||
let compiler = builder.compiler_for(builder.top_stage, host, host);
|
let compiler = builder.compiler_for(builder.top_stage, host, host);
|
||||||
let crates = run
|
let crates =
|
||||||
.paths
|
run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect();
|
||||||
.iter()
|
|
||||||
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates });
|
builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates });
|
||||||
}
|
}
|
||||||
|
@ -2844,11 +2838,11 @@ impl Step for Bootstrap {
|
||||||
let compiler = builder.compiler(0, host);
|
let compiler = builder.compiler(0, host);
|
||||||
let _guard = builder.msg(Kind::Test, 0, "bootstrap", host, host);
|
let _guard = builder.msg(Kind::Test, 0, "bootstrap", host, host);
|
||||||
|
|
||||||
let mut check_bootstrap = Command::new(&builder.python());
|
let mut check_bootstrap = Command::new(builder.python());
|
||||||
check_bootstrap
|
check_bootstrap
|
||||||
.args(["-m", "unittest", "bootstrap_test.py"])
|
.args(["-m", "unittest", "bootstrap_test.py"])
|
||||||
.env("BUILD_DIR", &builder.out)
|
.env("BUILD_DIR", &builder.out)
|
||||||
.env("BUILD_PLATFORM", &builder.build.build.triple)
|
.env("BUILD_PLATFORM", builder.build.build.triple)
|
||||||
.current_dir(builder.src.join("src/bootstrap/"));
|
.current_dir(builder.src.join("src/bootstrap/"));
|
||||||
// NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible.
|
// NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible.
|
||||||
// Use `python -m unittest` manually if you want to pass arguments.
|
// Use `python -m unittest` manually if you want to pass arguments.
|
||||||
|
@ -3171,7 +3165,7 @@ impl Step for CodegenCranelift {
|
||||||
&compiler.host,
|
&compiler.host,
|
||||||
target
|
target
|
||||||
));
|
));
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
|
|
||||||
// FIXME handle vendoring for source tarballs before removing the --skip-test below
|
// FIXME handle vendoring for source tarballs before removing the --skip-test below
|
||||||
let download_dir = builder.out.join("cg_clif_download");
|
let download_dir = builder.out.join("cg_clif_download");
|
||||||
|
@ -3300,7 +3294,7 @@ impl Step for CodegenGCC {
|
||||||
&compiler.host,
|
&compiler.host,
|
||||||
target
|
target
|
||||||
));
|
));
|
||||||
let _time = helpers::timeit(&builder);
|
let _time = helpers::timeit(builder);
|
||||||
|
|
||||||
// FIXME: Uncomment the `prepare` command below once vendoring is implemented.
|
// FIXME: Uncomment the `prepare` command below once vendoring is implemented.
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -819,7 +819,7 @@ impl<'a> Builder<'a> {
|
||||||
if compiler.host.is_msvc() {
|
if compiler.host.is_msvc() {
|
||||||
let curpaths = env::var_os("PATH").unwrap_or_default();
|
let curpaths = env::var_os("PATH").unwrap_or_default();
|
||||||
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
|
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
|
||||||
for &(ref k, ref v) in self.cc.borrow()[&compiler.host].env() {
|
for (k, v) in self.cc.borrow()[&compiler.host].env() {
|
||||||
if k != "PATH" {
|
if k != "PATH" {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -346,7 +346,7 @@ fn prepare_toolstate_config(token: &str) {
|
||||||
|
|
||||||
let credential = format!("https://{token}:x-oauth-basic@github.com\n",);
|
let credential = format!("https://{token}:x-oauth-basic@github.com\n",);
|
||||||
let git_credential_path = PathBuf::from(t!(env::var("HOME"))).join(".git-credentials");
|
let git_credential_path = PathBuf::from(t!(env::var("HOME"))).join(".git-credentials");
|
||||||
t!(fs::write(&git_credential_path, credential));
|
t!(fs::write(git_credential_path, credential));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reads the latest toolstate from the toolstate repo.
|
/// Reads the latest toolstate from the toolstate repo.
|
||||||
|
@ -389,7 +389,7 @@ fn commit_toolstate_change(current_toolstate: &ToolstateData) {
|
||||||
// Upload the test results (the new commit-to-toolstate mapping) to the toolstate repo.
|
// Upload the test results (the new commit-to-toolstate mapping) to the toolstate repo.
|
||||||
// This does *not* change the "current toolstate"; that only happens post-landing
|
// This does *not* change the "current toolstate"; that only happens post-landing
|
||||||
// via `src/ci/docker/publish_toolstate.sh`.
|
// via `src/ci/docker/publish_toolstate.sh`.
|
||||||
publish_test_results(¤t_toolstate);
|
publish_test_results(current_toolstate);
|
||||||
|
|
||||||
// `git commit` failing means nothing to commit.
|
// `git commit` failing means nothing to commit.
|
||||||
let status = t!(Command::new("git")
|
let status = t!(Command::new("git")
|
||||||
|
|
|
@ -290,7 +290,7 @@ impl PathSet {
|
||||||
|
|
||||||
const PATH_REMAP: &[(&str, &str)] = &[("rust-analyzer-proc-macro-srv", "proc-macro-srv-cli")];
|
const PATH_REMAP: &[(&str, &str)] = &[("rust-analyzer-proc-macro-srv", "proc-macro-srv-cli")];
|
||||||
|
|
||||||
fn remap_paths(paths: &mut Vec<&Path>) {
|
fn remap_paths(paths: &mut [&Path]) {
|
||||||
for path in paths.iter_mut() {
|
for path in paths.iter_mut() {
|
||||||
for &(search, replace) in PATH_REMAP {
|
for &(search, replace) in PATH_REMAP {
|
||||||
if path.to_str() == Some(search) {
|
if path.to_str() == Some(search) {
|
||||||
|
@ -329,7 +329,7 @@ impl StepDescription {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_excluded(&self, builder: &Builder<'_>, pathset: &PathSet) -> bool {
|
fn is_excluded(&self, builder: &Builder<'_>, pathset: &PathSet) -> bool {
|
||||||
if builder.config.skip.iter().any(|e| pathset.has(&e, builder.kind)) {
|
if builder.config.skip.iter().any(|e| pathset.has(e, builder.kind)) {
|
||||||
if !matches!(builder.config.dry_run, DryRun::SelfCheck) {
|
if !matches!(builder.config.dry_run, DryRun::SelfCheck) {
|
||||||
println!("Skipping {pathset:?} because it is excluded");
|
println!("Skipping {pathset:?} because it is excluded");
|
||||||
}
|
}
|
||||||
|
@ -369,8 +369,7 @@ impl StepDescription {
|
||||||
}
|
}
|
||||||
|
|
||||||
// strip CurDir prefix if present
|
// strip CurDir prefix if present
|
||||||
let mut paths: Vec<_> =
|
let mut paths: Vec<_> = paths.iter().map(|p| p.strip_prefix(".").unwrap_or(p)).collect();
|
||||||
paths.into_iter().map(|p| p.strip_prefix(".").unwrap_or(p)).collect();
|
|
||||||
|
|
||||||
remap_paths(&mut paths);
|
remap_paths(&mut paths);
|
||||||
|
|
||||||
|
@ -378,7 +377,7 @@ impl StepDescription {
|
||||||
// (This is separate from the loop below to avoid having to handle multiple paths in `is_suite_path` somehow.)
|
// (This is separate from the loop below to avoid having to handle multiple paths in `is_suite_path` somehow.)
|
||||||
paths.retain(|path| {
|
paths.retain(|path| {
|
||||||
for (desc, should_run) in v.iter().zip(&should_runs) {
|
for (desc, should_run) in v.iter().zip(&should_runs) {
|
||||||
if let Some(suite) = should_run.is_suite_path(&path) {
|
if let Some(suite) = should_run.is_suite_path(path) {
|
||||||
desc.maybe_run(builder, vec![suite.clone()]);
|
desc.maybe_run(builder, vec![suite.clone()]);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -537,7 +536,7 @@ impl<'a> ShouldRun<'a> {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| {
|
.map(|p| {
|
||||||
// assert only if `p` isn't submodule
|
// assert only if `p` isn't submodule
|
||||||
if submodules_paths.iter().find(|sm_p| p.contains(*sm_p)).is_none() {
|
if !submodules_paths.iter().any(|sm_p| p.contains(sm_p)) {
|
||||||
assert!(
|
assert!(
|
||||||
self.builder.src.join(p).exists(),
|
self.builder.src.join(p).exists(),
|
||||||
"`should_run.paths` should correspond to real on-disk paths - use `alias` if there is no relevant path: {}",
|
"`should_run.paths` should correspond to real on-disk paths - use `alias` if there is no relevant path: {}",
|
||||||
|
@ -1208,7 +1207,7 @@ impl<'a> Builder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rustdoc_cmd(&self, compiler: Compiler) -> Command {
|
pub fn rustdoc_cmd(&self, compiler: Compiler) -> Command {
|
||||||
let mut cmd = Command::new(&self.bootstrap_out.join("rustdoc"));
|
let mut cmd = Command::new(self.bootstrap_out.join("rustdoc"));
|
||||||
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
|
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
|
||||||
.env("RUSTC_SYSROOT", self.sysroot(compiler))
|
.env("RUSTC_SYSROOT", self.sysroot(compiler))
|
||||||
// Note that this is *not* the sysroot_libdir because rustdoc must be linked
|
// Note that this is *not* the sysroot_libdir because rustdoc must be linked
|
||||||
|
@ -1351,7 +1350,7 @@ impl<'a> Builder<'a> {
|
||||||
|
|
||||||
// See comment in rustc_llvm/build.rs for why this is necessary, largely llvm-config
|
// See comment in rustc_llvm/build.rs for why this is necessary, largely llvm-config
|
||||||
// needs to not accidentally link to libLLVM in stage0/lib.
|
// needs to not accidentally link to libLLVM in stage0/lib.
|
||||||
cargo.env("REAL_LIBRARY_PATH_VAR", &helpers::dylib_path_var());
|
cargo.env("REAL_LIBRARY_PATH_VAR", helpers::dylib_path_var());
|
||||||
if let Some(e) = env::var_os(helpers::dylib_path_var()) {
|
if let Some(e) = env::var_os(helpers::dylib_path_var()) {
|
||||||
cargo.env("REAL_LIBRARY_PATH", e);
|
cargo.env("REAL_LIBRARY_PATH", e);
|
||||||
}
|
}
|
||||||
|
@ -1620,8 +1619,8 @@ impl<'a> Builder<'a> {
|
||||||
.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target))
|
.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target))
|
||||||
.env("RUSTC_REAL", self.rustc(compiler))
|
.env("RUSTC_REAL", self.rustc(compiler))
|
||||||
.env("RUSTC_STAGE", stage.to_string())
|
.env("RUSTC_STAGE", stage.to_string())
|
||||||
.env("RUSTC_SYSROOT", &sysroot)
|
.env("RUSTC_SYSROOT", sysroot)
|
||||||
.env("RUSTC_LIBDIR", &libdir)
|
.env("RUSTC_LIBDIR", libdir)
|
||||||
.env("RUSTDOC", self.bootstrap_out.join("rustdoc"))
|
.env("RUSTDOC", self.bootstrap_out.join("rustdoc"))
|
||||||
.env(
|
.env(
|
||||||
"RUSTDOC_REAL",
|
"RUSTDOC_REAL",
|
||||||
|
@ -1754,7 +1753,7 @@ impl<'a> Builder<'a> {
|
||||||
cargo.env("RUSTC_BOOTSTRAP", "1");
|
cargo.env("RUSTC_BOOTSTRAP", "1");
|
||||||
|
|
||||||
if self.config.dump_bootstrap_shims {
|
if self.config.dump_bootstrap_shims {
|
||||||
prepare_behaviour_dump_dir(&self.build);
|
prepare_behaviour_dump_dir(self.build);
|
||||||
|
|
||||||
cargo
|
cargo
|
||||||
.env("DUMP_BOOTSTRAP_SHIMS", self.build.out.join("bootstrap-shims-dump"))
|
.env("DUMP_BOOTSTRAP_SHIMS", self.build.out.join("bootstrap-shims-dump"))
|
||||||
|
@ -1793,7 +1792,7 @@ impl<'a> Builder<'a> {
|
||||||
// platform-specific environment variable as a workaround.
|
// platform-specific environment variable as a workaround.
|
||||||
if mode == Mode::ToolRustc || mode == Mode::Codegen {
|
if mode == Mode::ToolRustc || mode == Mode::Codegen {
|
||||||
if let Some(llvm_config) = self.llvm_config(target) {
|
if let Some(llvm_config) = self.llvm_config(target) {
|
||||||
let llvm_libdir = output(Command::new(&llvm_config).arg("--libdir"));
|
let llvm_libdir = output(Command::new(llvm_config).arg("--libdir"));
|
||||||
add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cargo);
|
add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cargo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2080,7 +2079,7 @@ impl<'a> Builder<'a> {
|
||||||
|
|
||||||
if self.config.print_step_timings && !self.config.dry_run() {
|
if self.config.print_step_timings && !self.config.dry_run() {
|
||||||
let step_string = format!("{step:?}");
|
let step_string = format!("{step:?}");
|
||||||
let brace_index = step_string.find("{").unwrap_or(0);
|
let brace_index = step_string.find('{').unwrap_or(0);
|
||||||
let type_string = type_name::<S>();
|
let type_string = type_name::<S>();
|
||||||
println!(
|
println!(
|
||||||
"[TIMING] {} {} -- {}.{:03}",
|
"[TIMING] {} {} -- {}.{:03}",
|
||||||
|
@ -2429,7 +2428,7 @@ impl Cargo {
|
||||||
_ => s.display().to_string(),
|
_ => s.display().to_string(),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let triple_underscored = target.triple.replace("-", "_");
|
let triple_underscored = target.triple.replace('-', "_");
|
||||||
let cc = ccacheify(&builder.cc(target));
|
let cc = ccacheify(&builder.cc(target));
|
||||||
self.command.env(format!("CC_{triple_underscored}"), &cc);
|
self.command.env(format!("CC_{triple_underscored}"), &cc);
|
||||||
|
|
||||||
|
|
|
@ -468,7 +468,7 @@ pub struct TargetSelectionList(Vec<TargetSelection>);
|
||||||
|
|
||||||
pub fn target_selection_list(s: &str) -> Result<TargetSelectionList, String> {
|
pub fn target_selection_list(s: &str) -> Result<TargetSelectionList, String> {
|
||||||
Ok(TargetSelectionList(
|
Ok(TargetSelectionList(
|
||||||
s.split(",").filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(),
|
s.split(',').filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -963,10 +963,10 @@ impl<'de> serde::de::Visitor<'de> for OptimizeVisitor {
|
||||||
where
|
where
|
||||||
E: serde::de::Error,
|
E: serde::de::Error,
|
||||||
{
|
{
|
||||||
if ["s", "z"].iter().find(|x| **x == value).is_some() {
|
if matches!(value, "s" | "z") {
|
||||||
Ok(RustOptimize::String(value.to_string()))
|
Ok(RustOptimize::String(value.to_string()))
|
||||||
} else {
|
} else {
|
||||||
Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom)
|
Err(serde::de::Error::custom(format_optimize_error_msg(value)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -977,7 +977,7 @@ impl<'de> serde::de::Visitor<'de> for OptimizeVisitor {
|
||||||
if matches!(value, 0..=3) {
|
if matches!(value, 0..=3) {
|
||||||
Ok(RustOptimize::Int(value as u8))
|
Ok(RustOptimize::Int(value as u8))
|
||||||
} else {
|
} else {
|
||||||
Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom)
|
Err(serde::de::Error::custom(format_optimize_error_msg(value)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1144,41 +1144,44 @@ define_config! {
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
pub fn default_opts() -> Config {
|
pub fn default_opts() -> Config {
|
||||||
let mut config = Config::default();
|
Config {
|
||||||
config.bypass_bootstrap_lock = false;
|
bypass_bootstrap_lock: false,
|
||||||
config.llvm_optimize = true;
|
llvm_optimize: true,
|
||||||
config.ninja_in_file = true;
|
ninja_in_file: true,
|
||||||
config.llvm_static_stdcpp = false;
|
llvm_static_stdcpp: false,
|
||||||
config.backtrace = true;
|
backtrace: true,
|
||||||
config.rust_optimize = RustOptimize::Bool(true);
|
rust_optimize: RustOptimize::Bool(true),
|
||||||
config.rust_optimize_tests = true;
|
rust_optimize_tests: true,
|
||||||
config.submodules = None;
|
submodules: None,
|
||||||
config.docs = true;
|
docs: true,
|
||||||
config.docs_minification = true;
|
docs_minification: true,
|
||||||
config.rust_rpath = true;
|
rust_rpath: true,
|
||||||
config.rust_strip = false;
|
rust_strip: false,
|
||||||
config.channel = "dev".to_string();
|
channel: "dev".to_string(),
|
||||||
config.codegen_tests = true;
|
codegen_tests: true,
|
||||||
config.rust_dist_src = true;
|
rust_dist_src: true,
|
||||||
config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")];
|
rust_codegen_backends: vec![INTERNER.intern_str("llvm")],
|
||||||
config.deny_warnings = true;
|
deny_warnings: true,
|
||||||
config.bindir = "bin".into();
|
bindir: "bin".into(),
|
||||||
config.dist_include_mingw_linker = true;
|
dist_include_mingw_linker: true,
|
||||||
config.dist_compression_profile = "fast".into();
|
dist_compression_profile: "fast".into(),
|
||||||
config.rustc_parallel = true;
|
rustc_parallel: true,
|
||||||
|
|
||||||
config.stdout_is_tty = std::io::stdout().is_terminal();
|
stdout_is_tty: std::io::stdout().is_terminal(),
|
||||||
config.stderr_is_tty = std::io::stderr().is_terminal();
|
stderr_is_tty: std::io::stderr().is_terminal(),
|
||||||
|
|
||||||
// set by build.rs
|
// set by build.rs
|
||||||
config.build = TargetSelection::from_user(&env!("BUILD_TRIPLE"));
|
build: TargetSelection::from_user(env!("BUILD_TRIPLE")),
|
||||||
|
|
||||||
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
src: {
|
||||||
// Undo `src/bootstrap`
|
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||||
config.src = manifest_dir.parent().unwrap().parent().unwrap().to_owned();
|
// Undo `src/bootstrap`
|
||||||
config.out = PathBuf::from("build");
|
manifest_dir.parent().unwrap().parent().unwrap().to_owned()
|
||||||
|
},
|
||||||
|
out: PathBuf::from("build"),
|
||||||
|
|
||||||
config
|
..Default::default()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(args: &[String]) -> Config {
|
pub fn parse(args: &[String]) -> Config {
|
||||||
|
@ -1204,7 +1207,7 @@ impl Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config {
|
pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config {
|
||||||
let mut flags = Flags::parse(&args);
|
let mut flags = Flags::parse(args);
|
||||||
let mut config = Config::default_opts();
|
let mut config = Config::default_opts();
|
||||||
|
|
||||||
// Set flags.
|
// Set flags.
|
||||||
|
@ -1252,7 +1255,7 @@ impl Config {
|
||||||
// Bootstrap is quite bad at handling /? in front of paths
|
// Bootstrap is quite bad at handling /? in front of paths
|
||||||
let src = match s.strip_prefix("\\\\?\\") {
|
let src = match s.strip_prefix("\\\\?\\") {
|
||||||
Some(p) => PathBuf::from(p),
|
Some(p) => PathBuf::from(p),
|
||||||
None => PathBuf::from(git_root),
|
None => git_root,
|
||||||
};
|
};
|
||||||
// If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when,
|
// If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when,
|
||||||
// for example, the build directory is inside of another unrelated git directory.
|
// for example, the build directory is inside of another unrelated git directory.
|
||||||
|
@ -1278,7 +1281,7 @@ impl Config {
|
||||||
.to_path_buf();
|
.to_path_buf();
|
||||||
}
|
}
|
||||||
|
|
||||||
let stage0_json = t!(std::fs::read(&config.src.join("src").join("stage0.json")));
|
let stage0_json = t!(std::fs::read(config.src.join("src").join("stage0.json")));
|
||||||
|
|
||||||
config.stage0_metadata = t!(serde_json::from_slice::<Stage0Metadata>(&stage0_json));
|
config.stage0_metadata = t!(serde_json::from_slice::<Stage0Metadata>(&stage0_json));
|
||||||
|
|
||||||
|
@ -1324,8 +1327,7 @@ impl Config {
|
||||||
let mut override_toml = TomlConfig::default();
|
let mut override_toml = TomlConfig::default();
|
||||||
for option in flags.set.iter() {
|
for option in flags.set.iter() {
|
||||||
fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
|
fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
|
||||||
toml::from_str(&option)
|
toml::from_str(option).and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
||||||
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut err = match get_table(option) {
|
let mut err = match get_table(option) {
|
||||||
|
@ -1337,7 +1339,7 @@ impl Config {
|
||||||
};
|
};
|
||||||
// We want to be able to set string values without quotes,
|
// We want to be able to set string values without quotes,
|
||||||
// like in `configure.py`. Try adding quotes around the right hand side
|
// like in `configure.py`. Try adding quotes around the right hand side
|
||||||
if let Some((key, value)) = option.split_once("=") {
|
if let Some((key, value)) = option.split_once('=') {
|
||||||
if !value.contains('"') {
|
if !value.contains('"') {
|
||||||
match get_table(&format!(r#"{key}="{value}""#)) {
|
match get_table(&format!(r#"{key}="{value}""#)) {
|
||||||
Ok(v) => {
|
Ok(v) => {
|
||||||
|
@ -1660,7 +1662,7 @@ impl Config {
|
||||||
llvm_libunwind.map(|v| v.parse().expect("failed to parse rust.llvm-libunwind"));
|
llvm_libunwind.map(|v| v.parse().expect("failed to parse rust.llvm-libunwind"));
|
||||||
|
|
||||||
if let Some(ref backends) = codegen_backends {
|
if let Some(ref backends) = codegen_backends {
|
||||||
let available_backends = vec!["llvm", "cranelift", "gcc"];
|
let available_backends = ["llvm", "cranelift", "gcc"];
|
||||||
|
|
||||||
config.rust_codegen_backends = backends.iter().map(|s| {
|
config.rust_codegen_backends = backends.iter().map(|s| {
|
||||||
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
|
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
|
||||||
|
@ -1808,7 +1810,7 @@ impl Config {
|
||||||
let mut target = Target::from_triple(&triple);
|
let mut target = Target::from_triple(&triple);
|
||||||
|
|
||||||
if let Some(ref s) = cfg.llvm_config {
|
if let Some(ref s) = cfg.llvm_config {
|
||||||
if config.download_rustc_commit.is_some() && triple == &*config.build.triple {
|
if config.download_rustc_commit.is_some() && triple == *config.build.triple {
|
||||||
panic!(
|
panic!(
|
||||||
"setting llvm_config for the host is incompatible with download-rustc"
|
"setting llvm_config for the host is incompatible with download-rustc"
|
||||||
);
|
);
|
||||||
|
@ -1847,7 +1849,7 @@ impl Config {
|
||||||
target.rpath = cfg.rpath;
|
target.rpath = cfg.rpath;
|
||||||
|
|
||||||
if let Some(ref backends) = cfg.codegen_backends {
|
if let Some(ref backends) = cfg.codegen_backends {
|
||||||
let available_backends = vec!["llvm", "cranelift", "gcc"];
|
let available_backends = ["llvm", "cranelift", "gcc"];
|
||||||
|
|
||||||
target.codegen_backends = Some(backends.iter().map(|s| {
|
target.codegen_backends = Some(backends.iter().map(|s| {
|
||||||
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
|
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
|
||||||
|
@ -1874,7 +1876,7 @@ impl Config {
|
||||||
let build_target = config
|
let build_target = config
|
||||||
.target_config
|
.target_config
|
||||||
.entry(config.build)
|
.entry(config.build)
|
||||||
.or_insert_with(|| Target::from_triple(&triple));
|
.or_insert_with(|| Target::from_triple(triple));
|
||||||
|
|
||||||
check_ci_llvm!(build_target.llvm_config);
|
check_ci_llvm!(build_target.llvm_config);
|
||||||
check_ci_llvm!(build_target.llvm_filecheck);
|
check_ci_llvm!(build_target.llvm_filecheck);
|
||||||
|
@ -2208,7 +2210,7 @@ impl Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sanitizers_enabled(&self, target: TargetSelection) -> bool {
|
pub fn sanitizers_enabled(&self, target: TargetSelection) -> bool {
|
||||||
self.target_config.get(&target).map(|t| t.sanitizers).flatten().unwrap_or(self.sanitizers)
|
self.target_config.get(&target).and_then(|t| t.sanitizers).unwrap_or(self.sanitizers)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn needs_sanitizer_runtime_built(&self, target: TargetSelection) -> bool {
|
pub fn needs_sanitizer_runtime_built(&self, target: TargetSelection) -> bool {
|
||||||
|
@ -2243,7 +2245,7 @@ impl Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rpath_enabled(&self, target: TargetSelection) -> bool {
|
pub fn rpath_enabled(&self, target: TargetSelection) -> bool {
|
||||||
self.target_config.get(&target).map(|t| t.rpath).flatten().unwrap_or(self.rust_rpath)
|
self.target_config.get(&target).and_then(|t| t.rpath).unwrap_or(self.rust_rpath)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn llvm_enabled(&self, target: TargetSelection) -> bool {
|
pub fn llvm_enabled(&self, target: TargetSelection) -> bool {
|
||||||
|
@ -2274,7 +2276,7 @@ impl Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_codegen_backend(&self, target: TargetSelection) -> Option<Interned<String>> {
|
pub fn default_codegen_backend(&self, target: TargetSelection) -> Option<Interned<String>> {
|
||||||
self.codegen_backends(target).get(0).cloned()
|
self.codegen_backends(target).first().cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn git_config(&self) -> GitConfig<'_> {
|
pub fn git_config(&self) -> GitConfig<'_> {
|
||||||
|
@ -2303,9 +2305,9 @@ impl Config {
|
||||||
.next()
|
.next()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_owned();
|
.to_owned();
|
||||||
let rustc_version = Version::parse(&rustc_output.trim()).unwrap();
|
let rustc_version = Version::parse(rustc_output.trim()).unwrap();
|
||||||
let source_version =
|
let source_version =
|
||||||
Version::parse(&fs::read_to_string(self.src.join("src/version")).unwrap().trim())
|
Version::parse(fs::read_to_string(self.src.join("src/version")).unwrap().trim())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
if !(source_version == rustc_version
|
if !(source_version == rustc_version
|
||||||
|| (source_version.major == rustc_version.major
|
|| (source_version.major == rustc_version.major
|
||||||
|
@ -2333,7 +2335,7 @@ impl Config {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Handle running from a directory other than the top level
|
// Handle running from a directory other than the top level
|
||||||
let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"]));
|
let top_level = output(self.git().args(["rev-parse", "--show-toplevel"]));
|
||||||
let top_level = top_level.trim_end();
|
let top_level = top_level.trim_end();
|
||||||
let compiler = format!("{top_level}/compiler/");
|
let compiler = format!("{top_level}/compiler/");
|
||||||
let library = format!("{top_level}/library/");
|
let library = format!("{top_level}/library/");
|
||||||
|
@ -2344,7 +2346,7 @@ impl Config {
|
||||||
self.git()
|
self.git()
|
||||||
.arg("rev-list")
|
.arg("rev-list")
|
||||||
.arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email))
|
.arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email))
|
||||||
.args(&["-n1", "--first-parent", "HEAD"]),
|
.args(["-n1", "--first-parent", "HEAD"]),
|
||||||
);
|
);
|
||||||
let commit = merge_base.trim_end();
|
let commit = merge_base.trim_end();
|
||||||
if commit.is_empty() {
|
if commit.is_empty() {
|
||||||
|
@ -2358,7 +2360,7 @@ impl Config {
|
||||||
// Warn if there were changes to the compiler or standard library since the ancestor commit.
|
// Warn if there were changes to the compiler or standard library since the ancestor commit.
|
||||||
let has_changes = !t!(self
|
let has_changes = !t!(self
|
||||||
.git()
|
.git()
|
||||||
.args(&["diff-index", "--quiet", &commit, "--", &compiler, &library])
|
.args(["diff-index", "--quiet", commit, "--", &compiler, &library])
|
||||||
.status())
|
.status())
|
||||||
.success();
|
.success();
|
||||||
if has_changes {
|
if has_changes {
|
||||||
|
@ -2397,7 +2399,7 @@ impl Config {
|
||||||
// there are some untracked changes in the the given paths.
|
// there are some untracked changes in the the given paths.
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
llvm::is_ci_llvm_available(&self, asserts)
|
llvm::is_ci_llvm_available(self, asserts)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match download_ci_llvm {
|
match download_ci_llvm {
|
||||||
|
@ -2406,7 +2408,7 @@ impl Config {
|
||||||
// FIXME: "if-available" is deprecated. Remove this block later (around mid 2024)
|
// FIXME: "if-available" is deprecated. Remove this block later (around mid 2024)
|
||||||
// to not break builds between the recent-to-old checkouts.
|
// to not break builds between the recent-to-old checkouts.
|
||||||
Some(StringOrBool::String(s)) if s == "if-available" => {
|
Some(StringOrBool::String(s)) if s == "if-available" => {
|
||||||
llvm::is_ci_llvm_available(&self, asserts)
|
llvm::is_ci_llvm_available(self, asserts)
|
||||||
}
|
}
|
||||||
Some(StringOrBool::String(s)) if s == "if-unchanged" => if_unchanged(),
|
Some(StringOrBool::String(s)) if s == "if-unchanged" => if_unchanged(),
|
||||||
Some(StringOrBool::String(other)) => {
|
Some(StringOrBool::String(other)) => {
|
||||||
|
@ -2424,7 +2426,7 @@ impl Config {
|
||||||
if_unchanged: bool,
|
if_unchanged: bool,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
// Handle running from a directory other than the top level
|
// Handle running from a directory other than the top level
|
||||||
let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"]));
|
let top_level = output(self.git().args(["rev-parse", "--show-toplevel"]));
|
||||||
let top_level = top_level.trim_end();
|
let top_level = top_level.trim_end();
|
||||||
|
|
||||||
// Look for a version to compare to based on the current commit.
|
// Look for a version to compare to based on the current commit.
|
||||||
|
@ -2433,7 +2435,7 @@ impl Config {
|
||||||
self.git()
|
self.git()
|
||||||
.arg("rev-list")
|
.arg("rev-list")
|
||||||
.arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email))
|
.arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email))
|
||||||
.args(&["-n1", "--first-parent", "HEAD"]),
|
.args(["-n1", "--first-parent", "HEAD"]),
|
||||||
);
|
);
|
||||||
let commit = merge_base.trim_end();
|
let commit = merge_base.trim_end();
|
||||||
if commit.is_empty() {
|
if commit.is_empty() {
|
||||||
|
@ -2446,7 +2448,7 @@ impl Config {
|
||||||
|
|
||||||
// Warn if there were changes to the compiler or standard library since the ancestor commit.
|
// Warn if there were changes to the compiler or standard library since the ancestor commit.
|
||||||
let mut git = self.git();
|
let mut git = self.git();
|
||||||
git.args(&["diff-index", "--quiet", &commit, "--"]);
|
git.args(["diff-index", "--quiet", commit, "--"]);
|
||||||
|
|
||||||
for path in modified_paths {
|
for path in modified_paths {
|
||||||
git.arg(format!("{top_level}/{path}"));
|
git.arg(format!("{top_level}/{path}"));
|
||||||
|
|
|
@ -159,7 +159,7 @@ impl Config {
|
||||||
";
|
";
|
||||||
nix_build_succeeded = try_run(
|
nix_build_succeeded = try_run(
|
||||||
self,
|
self,
|
||||||
Command::new("nix-build").args(&[
|
Command::new("nix-build").args([
|
||||||
Path::new("-E"),
|
Path::new("-E"),
|
||||||
Path::new(NIX_EXPR),
|
Path::new(NIX_EXPR),
|
||||||
Path::new("-o"),
|
Path::new("-o"),
|
||||||
|
@ -188,7 +188,7 @@ impl Config {
|
||||||
let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
|
let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
|
||||||
// FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
|
// FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
|
||||||
let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
|
let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
|
||||||
patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]);
|
patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = try_run(self, patchelf.arg(fname));
|
let _ = try_run(self, patchelf.arg(fname));
|
||||||
|
@ -218,7 +218,7 @@ impl Config {
|
||||||
println!("downloading {url}");
|
println!("downloading {url}");
|
||||||
// Try curl. If that fails and we are on windows, fallback to PowerShell.
|
// Try curl. If that fails and we are on windows, fallback to PowerShell.
|
||||||
let mut curl = Command::new("curl");
|
let mut curl = Command::new("curl");
|
||||||
curl.args(&[
|
curl.args([
|
||||||
"-y",
|
"-y",
|
||||||
"30",
|
"30",
|
||||||
"-Y",
|
"-Y",
|
||||||
|
@ -242,7 +242,7 @@ impl Config {
|
||||||
if self.build.contains("windows-msvc") {
|
if self.build.contains("windows-msvc") {
|
||||||
eprintln!("Fallback to PowerShell");
|
eprintln!("Fallback to PowerShell");
|
||||||
for _ in 0..3 {
|
for _ in 0..3 {
|
||||||
if try_run(self, Command::new("PowerShell.exe").args(&[
|
if try_run(self, Command::new("PowerShell.exe").args([
|
||||||
"/nologo",
|
"/nologo",
|
||||||
"-Command",
|
"-Command",
|
||||||
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
|
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
|
||||||
|
@ -388,7 +388,7 @@ impl Config {
|
||||||
let bin_root = self.out.join(host.triple).join("stage0");
|
let bin_root = self.out.join(host.triple).join("stage0");
|
||||||
let clippy_stamp = bin_root.join(".clippy-stamp");
|
let clippy_stamp = bin_root.join(".clippy-stamp");
|
||||||
let cargo_clippy = bin_root.join("bin").join(exe("cargo-clippy", host));
|
let cargo_clippy = bin_root.join("bin").join(exe("cargo-clippy", host));
|
||||||
if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, &date) {
|
if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, date) {
|
||||||
return cargo_clippy;
|
return cargo_clippy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -421,14 +421,14 @@ impl Config {
|
||||||
DownloadSource::Dist,
|
DownloadSource::Dist,
|
||||||
format!("rustfmt-{version}-{build}.tar.xz", build = host.triple),
|
format!("rustfmt-{version}-{build}.tar.xz", build = host.triple),
|
||||||
"rustfmt-preview",
|
"rustfmt-preview",
|
||||||
&date,
|
date,
|
||||||
"rustfmt",
|
"rustfmt",
|
||||||
);
|
);
|
||||||
self.download_component(
|
self.download_component(
|
||||||
DownloadSource::Dist,
|
DownloadSource::Dist,
|
||||||
format!("rustc-{version}-{build}.tar.xz", build = host.triple),
|
format!("rustc-{version}-{build}.tar.xz", build = host.triple),
|
||||||
"rustc",
|
"rustc",
|
||||||
&date,
|
date,
|
||||||
"rustfmt",
|
"rustfmt",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -665,7 +665,7 @@ download-rustc = false
|
||||||
}
|
}
|
||||||
let llvm_root = self.ci_llvm_root();
|
let llvm_root = self.ci_llvm_root();
|
||||||
let llvm_stamp = llvm_root.join(".llvm-stamp");
|
let llvm_stamp = llvm_root.join(".llvm-stamp");
|
||||||
let llvm_sha = detect_llvm_sha(&self, self.rust_info.is_managed_git_subrepository());
|
let llvm_sha = detect_llvm_sha(self, self.rust_info.is_managed_git_subrepository());
|
||||||
let key = format!("{}{}", llvm_sha, self.llvm_assertions);
|
let key = format!("{}{}", llvm_sha, self.llvm_assertions);
|
||||||
if program_out_of_date(&llvm_stamp, &key) && !self.dry_run() {
|
if program_out_of_date(&llvm_stamp, &key) && !self.dry_run() {
|
||||||
self.download_ci_llvm(&llvm_sha);
|
self.download_ci_llvm(&llvm_sha);
|
||||||
|
@ -685,11 +685,11 @@ download-rustc = false
|
||||||
// rebuild.
|
// rebuild.
|
||||||
let now = filetime::FileTime::from_system_time(std::time::SystemTime::now());
|
let now = filetime::FileTime::from_system_time(std::time::SystemTime::now());
|
||||||
let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build));
|
let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build));
|
||||||
t!(filetime::set_file_times(&llvm_config, now, now));
|
t!(filetime::set_file_times(llvm_config, now, now));
|
||||||
|
|
||||||
if self.should_fix_bins_and_dylibs() {
|
if self.should_fix_bins_and_dylibs() {
|
||||||
let llvm_lib = llvm_root.join("lib");
|
let llvm_lib = llvm_root.join("lib");
|
||||||
for entry in t!(fs::read_dir(&llvm_lib)) {
|
for entry in t!(fs::read_dir(llvm_lib)) {
|
||||||
let lib = t!(entry).path();
|
let lib = t!(entry).path();
|
||||||
if lib.extension().map_or(false, |ext| ext == "so") {
|
if lib.extension().map_or(false, |ext| ext == "so") {
|
||||||
self.fix_bin_or_dylib(&lib);
|
self.fix_bin_or_dylib(&lib);
|
||||||
|
|
|
@ -467,7 +467,7 @@ impl Build {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make a symbolic link so we can use a consistent directory in the documentation.
|
// Make a symbolic link so we can use a consistent directory in the documentation.
|
||||||
let build_triple = build.out.join(&build.build.triple);
|
let build_triple = build.out.join(build.build.triple);
|
||||||
t!(fs::create_dir_all(&build_triple));
|
t!(fs::create_dir_all(&build_triple));
|
||||||
let host = build.out.join("host");
|
let host = build.out.join("host");
|
||||||
if host.is_symlink() {
|
if host.is_symlink() {
|
||||||
|
@ -491,7 +491,7 @@ impl Build {
|
||||||
///
|
///
|
||||||
/// `relative_path` should be relative to the root of the git repository, not an absolute path.
|
/// `relative_path` should be relative to the root of the git repository, not an absolute path.
|
||||||
pub(crate) fn update_submodule(&self, relative_path: &Path) {
|
pub(crate) fn update_submodule(&self, relative_path: &Path) {
|
||||||
if !self.config.submodules(&self.rust_info()) {
|
if !self.config.submodules(self.rust_info()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -507,11 +507,11 @@ impl Build {
|
||||||
|
|
||||||
// check_submodule
|
// check_submodule
|
||||||
let checked_out_hash =
|
let checked_out_hash =
|
||||||
output(Command::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path));
|
output(Command::new("git").args(["rev-parse", "HEAD"]).current_dir(&absolute_path));
|
||||||
// update_submodules
|
// update_submodules
|
||||||
let recorded = output(
|
let recorded = output(
|
||||||
Command::new("git")
|
Command::new("git")
|
||||||
.args(&["ls-tree", "HEAD"])
|
.args(["ls-tree", "HEAD"])
|
||||||
.arg(relative_path)
|
.arg(relative_path)
|
||||||
.current_dir(&self.config.src),
|
.current_dir(&self.config.src),
|
||||||
);
|
);
|
||||||
|
@ -529,7 +529,7 @@ impl Build {
|
||||||
println!("Updating submodule {}", relative_path.display());
|
println!("Updating submodule {}", relative_path.display());
|
||||||
self.run(
|
self.run(
|
||||||
Command::new("git")
|
Command::new("git")
|
||||||
.args(&["submodule", "-q", "sync"])
|
.args(["submodule", "-q", "sync"])
|
||||||
.arg(relative_path)
|
.arg(relative_path)
|
||||||
.current_dir(&self.config.src),
|
.current_dir(&self.config.src),
|
||||||
);
|
);
|
||||||
|
@ -560,7 +560,7 @@ impl Build {
|
||||||
let branch = branch.strip_prefix("heads/").unwrap_or(&branch);
|
let branch = branch.strip_prefix("heads/").unwrap_or(&branch);
|
||||||
git.arg("-c").arg(format!("branch.{branch}.remote=origin"));
|
git.arg("-c").arg(format!("branch.{branch}.remote=origin"));
|
||||||
}
|
}
|
||||||
git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]);
|
git.args(["submodule", "update", "--init", "--recursive", "--depth=1"]);
|
||||||
if progress {
|
if progress {
|
||||||
git.arg("--progress");
|
git.arg("--progress");
|
||||||
}
|
}
|
||||||
|
@ -577,7 +577,7 @@ impl Build {
|
||||||
let has_local_modifications = !self.run_cmd(
|
let has_local_modifications = !self.run_cmd(
|
||||||
BootstrapCommand::from(
|
BootstrapCommand::from(
|
||||||
Command::new("git")
|
Command::new("git")
|
||||||
.args(&["diff-index", "--quiet", "HEAD"])
|
.args(["diff-index", "--quiet", "HEAD"])
|
||||||
.current_dir(&absolute_path),
|
.current_dir(&absolute_path),
|
||||||
)
|
)
|
||||||
.allow_failure()
|
.allow_failure()
|
||||||
|
@ -587,14 +587,14 @@ impl Build {
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
if has_local_modifications {
|
if has_local_modifications {
|
||||||
self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path));
|
self.run(Command::new("git").args(["stash", "push"]).current_dir(&absolute_path));
|
||||||
}
|
}
|
||||||
|
|
||||||
self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path));
|
self.run(Command::new("git").args(["reset", "-q", "--hard"]).current_dir(&absolute_path));
|
||||||
self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(&absolute_path));
|
self.run(Command::new("git").args(["clean", "-qdfx"]).current_dir(&absolute_path));
|
||||||
|
|
||||||
if has_local_modifications {
|
if has_local_modifications {
|
||||||
self.run(Command::new("git").args(&["stash", "pop"]).current_dir(absolute_path));
|
self.run(Command::new("git").args(["stash", "pop"]).current_dir(absolute_path));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -602,20 +602,20 @@ impl Build {
|
||||||
/// This avoids contributors checking in a submodule change by accident.
|
/// This avoids contributors checking in a submodule change by accident.
|
||||||
pub fn update_existing_submodules(&self) {
|
pub fn update_existing_submodules(&self) {
|
||||||
// Avoid running git when there isn't a git checkout.
|
// Avoid running git when there isn't a git checkout.
|
||||||
if !self.config.submodules(&self.rust_info()) {
|
if !self.config.submodules(self.rust_info()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let output = output(
|
let output = output(
|
||||||
self.config
|
self.config
|
||||||
.git()
|
.git()
|
||||||
.args(&["config", "--file"])
|
.args(["config", "--file"])
|
||||||
.arg(&self.config.src.join(".gitmodules"))
|
.arg(&self.config.src.join(".gitmodules"))
|
||||||
.args(&["--get-regexp", "path"]),
|
.args(["--get-regexp", "path"]),
|
||||||
);
|
);
|
||||||
for line in output.lines() {
|
for line in output.lines() {
|
||||||
// Look for `submodule.$name.path = $path`
|
// Look for `submodule.$name.path = $path`
|
||||||
// Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
|
// Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
|
||||||
let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap());
|
let submodule = Path::new(line.split_once(' ').unwrap().1);
|
||||||
// Don't update the submodule unless it's already been cloned.
|
// Don't update the submodule unless it's already been cloned.
|
||||||
if GitInfo::new(false, submodule).is_managed_git_subrepository() {
|
if GitInfo::new(false, submodule).is_managed_git_subrepository() {
|
||||||
self.update_submodule(submodule);
|
self.update_submodule(submodule);
|
||||||
|
@ -630,26 +630,26 @@ impl Build {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Download rustfmt early so that it can be used in rust-analyzer configs.
|
// Download rustfmt early so that it can be used in rust-analyzer configs.
|
||||||
let _ = &builder::Builder::new(&self).initial_rustfmt();
|
let _ = &builder::Builder::new(self).initial_rustfmt();
|
||||||
|
|
||||||
// hardcoded subcommands
|
// hardcoded subcommands
|
||||||
match &self.config.cmd {
|
match &self.config.cmd {
|
||||||
Subcommand::Format { check } => {
|
Subcommand::Format { check } => {
|
||||||
return core::build_steps::format::format(
|
return core::build_steps::format::format(
|
||||||
&builder::Builder::new(&self),
|
&builder::Builder::new(self),
|
||||||
*check,
|
*check,
|
||||||
&self.config.paths,
|
&self.config.paths,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Subcommand::Suggest { run } => {
|
Subcommand::Suggest { run } => {
|
||||||
return core::build_steps::suggest::suggest(&builder::Builder::new(&self), *run);
|
return core::build_steps::suggest::suggest(&builder::Builder::new(self), *run);
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let builder = builder::Builder::new(&self);
|
let builder = builder::Builder::new(self);
|
||||||
if let Some(path) = builder.paths.get(0) {
|
if let Some(path) = builder.paths.first() {
|
||||||
if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") {
|
if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -659,14 +659,14 @@ impl Build {
|
||||||
if !self.config.dry_run() {
|
if !self.config.dry_run() {
|
||||||
{
|
{
|
||||||
self.config.dry_run = DryRun::SelfCheck;
|
self.config.dry_run = DryRun::SelfCheck;
|
||||||
let builder = builder::Builder::new(&self);
|
let builder = builder::Builder::new(self);
|
||||||
builder.execute_cli();
|
builder.execute_cli();
|
||||||
}
|
}
|
||||||
self.config.dry_run = DryRun::Disabled;
|
self.config.dry_run = DryRun::Disabled;
|
||||||
let builder = builder::Builder::new(&self);
|
let builder = builder::Builder::new(self);
|
||||||
builder.execute_cli();
|
builder.execute_cli();
|
||||||
} else {
|
} else {
|
||||||
let builder = builder::Builder::new(&self);
|
let builder = builder::Builder::new(self);
|
||||||
builder.execute_cli();
|
builder.execute_cli();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -936,7 +936,7 @@ impl Build {
|
||||||
static SYSROOT_CACHE: OnceLock<PathBuf> = OnceLock::new();
|
static SYSROOT_CACHE: OnceLock<PathBuf> = OnceLock::new();
|
||||||
SYSROOT_CACHE.get_or_init(|| {
|
SYSROOT_CACHE.get_or_init(|| {
|
||||||
let mut rustc = Command::new(&self.initial_rustc);
|
let mut rustc = Command::new(&self.initial_rustc);
|
||||||
rustc.args(&["--print", "sysroot"]);
|
rustc.args(["--print", "sysroot"]);
|
||||||
output(&mut rustc).trim().into()
|
output(&mut rustc).trim().into()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1162,7 +1162,7 @@ impl Build {
|
||||||
fn group(&self, msg: &str) -> Option<gha::Group> {
|
fn group(&self, msg: &str) -> Option<gha::Group> {
|
||||||
match self.config.dry_run {
|
match self.config.dry_run {
|
||||||
DryRun::SelfCheck => None,
|
DryRun::SelfCheck => None,
|
||||||
DryRun::Disabled | DryRun::UserSelected => Some(gha::group(&msg)),
|
DryRun::Disabled | DryRun::UserSelected => Some(gha::group(msg)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1322,7 +1322,7 @@ impl Build {
|
||||||
.target_config
|
.target_config
|
||||||
.get(&target)
|
.get(&target)
|
||||||
.and_then(|t| t.musl_root.as_ref())
|
.and_then(|t| t.musl_root.as_ref())
|
||||||
.or_else(|| self.config.musl_root.as_ref())
|
.or(self.config.musl_root.as_ref())
|
||||||
.map(|p| &**p)
|
.map(|p| &**p)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1511,11 +1511,11 @@ impl Build {
|
||||||
|
|
||||||
/// Returns the `a.b.c` version that the given package is at.
|
/// Returns the `a.b.c` version that the given package is at.
|
||||||
fn release_num(&self, package: &str) -> String {
|
fn release_num(&self, package: &str) -> String {
|
||||||
let toml_file_name = self.src.join(&format!("src/tools/{package}/Cargo.toml"));
|
let toml_file_name = self.src.join(format!("src/tools/{package}/Cargo.toml"));
|
||||||
let toml = t!(fs::read_to_string(&toml_file_name));
|
let toml = t!(fs::read_to_string(toml_file_name));
|
||||||
for line in toml.lines() {
|
for line in toml.lines() {
|
||||||
if let Some(stripped) =
|
if let Some(stripped) =
|
||||||
line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\""))
|
line.strip_prefix("version = \"").and_then(|s| s.strip_suffix('"'))
|
||||||
{
|
{
|
||||||
return stripped.to_owned();
|
return stripped.to_owned();
|
||||||
}
|
}
|
||||||
|
@ -1618,7 +1618,7 @@ impl Build {
|
||||||
if src == dst {
|
if src == dst {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let _ = fs::remove_file(&dst);
|
let _ = fs::remove_file(dst);
|
||||||
let metadata = t!(src.symlink_metadata());
|
let metadata = t!(src.symlink_metadata());
|
||||||
let mut src = src.to_path_buf();
|
let mut src = src.to_path_buf();
|
||||||
if metadata.file_type().is_symlink() {
|
if metadata.file_type().is_symlink() {
|
||||||
|
@ -1908,7 +1908,7 @@ pub fn prepare_behaviour_dump_dir(build: &Build) {
|
||||||
|
|
||||||
let dump_path = build.out.join("bootstrap-shims-dump");
|
let dump_path = build.out.join("bootstrap-shims-dump");
|
||||||
|
|
||||||
let initialized = INITIALIZED.get().unwrap_or_else(|| &false);
|
let initialized = INITIALIZED.get().unwrap_or(&false);
|
||||||
if !initialized {
|
if !initialized {
|
||||||
// clear old dumps
|
// clear old dumps
|
||||||
if dump_path.exists() {
|
if dump_path.exists() {
|
||||||
|
|
|
@ -39,8 +39,7 @@ pub(crate) fn maybe_dump(dump_name: String, cmd: &Command) {
|
||||||
if let Ok(dump_dir) = env::var("DUMP_BOOTSTRAP_SHIMS") {
|
if let Ok(dump_dir) = env::var("DUMP_BOOTSTRAP_SHIMS") {
|
||||||
let dump_file = format!("{dump_dir}/{dump_name}");
|
let dump_file = format!("{dump_dir}/{dump_name}");
|
||||||
|
|
||||||
let mut file =
|
let mut file = OpenOptions::new().create(true).append(true).open(dump_file).unwrap();
|
||||||
OpenOptions::new().create(true).write(true).append(true).open(&dump_file).unwrap();
|
|
||||||
|
|
||||||
let cmd_dump = format!("{:?}\n", cmd);
|
let cmd_dump = format!("{:?}\n", cmd);
|
||||||
let cmd_dump = cmd_dump.replace(&env::var("BUILD_OUT").unwrap(), "${BUILD_OUT}");
|
let cmd_dump = cmd_dump.replace(&env::var("BUILD_OUT").unwrap(), "${BUILD_OUT}");
|
||||||
|
|
|
@ -64,7 +64,7 @@ unsafe impl<T> Sync for Interned<T> {}
|
||||||
|
|
||||||
impl fmt::Display for Interned<String> {
|
impl fmt::Display for Interned<String> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let s: &str = &*self;
|
let s: &str = self;
|
||||||
f.write_str(s)
|
f.write_str(s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,7 @@ where
|
||||||
Self: Deref<Target = U>,
|
Self: Deref<Target = U>,
|
||||||
{
|
{
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let s: &U = &*self;
|
let s: &U = self;
|
||||||
f.write_fmt(format_args!("{s:?}"))
|
f.write_fmt(format_args!("{s:?}"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -132,7 +132,7 @@ impl<T: Hash + Clone + Eq> TyIntern<T> {
|
||||||
B: Eq + Hash + ToOwned<Owned = T> + ?Sized,
|
B: Eq + Hash + ToOwned<Owned = T> + ?Sized,
|
||||||
T: Borrow<B>,
|
T: Borrow<B>,
|
||||||
{
|
{
|
||||||
if let Some(i) = self.set.get(&item) {
|
if let Some(i) = self.set.get(item) {
|
||||||
return *i;
|
return *i;
|
||||||
}
|
}
|
||||||
let item = item.to_owned();
|
let item = item.to_owned();
|
||||||
|
@ -233,7 +233,7 @@ impl Cache {
|
||||||
let type_id = TypeId::of::<S>();
|
let type_id = TypeId::of::<S>();
|
||||||
let stepcache = cache
|
let stepcache = cache
|
||||||
.entry(type_id)
|
.entry(type_id)
|
||||||
.or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
|
.or_insert_with(|| Box::<HashMap<S, S::Output>>::default())
|
||||||
.downcast_mut::<HashMap<S, S::Output>>()
|
.downcast_mut::<HashMap<S, S::Output>>()
|
||||||
.expect("invalid type mapped");
|
.expect("invalid type mapped");
|
||||||
assert!(!stepcache.contains_key(&step), "processing {step:?} a second time");
|
assert!(!stepcache.contains_key(&step), "processing {step:?} a second time");
|
||||||
|
@ -245,7 +245,7 @@ impl Cache {
|
||||||
let type_id = TypeId::of::<S>();
|
let type_id = TypeId::of::<S>();
|
||||||
let stepcache = cache
|
let stepcache = cache
|
||||||
.entry(type_id)
|
.entry(type_id)
|
||||||
.or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
|
.or_insert_with(|| Box::<HashMap<S, S::Output>>::default())
|
||||||
.downcast_mut::<HashMap<S, S::Output>>()
|
.downcast_mut::<HashMap<S, S::Output>>()
|
||||||
.expect("invalid type mapped");
|
.expect("invalid type mapped");
|
||||||
stepcache.get(step).cloned()
|
stepcache.get(step).cloned()
|
||||||
|
|
|
@ -35,7 +35,7 @@ use crate::{Build, CLang, GitRepo};
|
||||||
// try to infer the archiver path from the C compiler path.
|
// try to infer the archiver path from the C compiler path.
|
||||||
// In the future this logic should be replaced by calling into the `cc` crate.
|
// In the future this logic should be replaced by calling into the `cc` crate.
|
||||||
fn cc2ar(cc: &Path, target: TargetSelection) -> Option<PathBuf> {
|
fn cc2ar(cc: &Path, target: TargetSelection) -> Option<PathBuf> {
|
||||||
if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace("-", "_"))) {
|
if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace('-', "_"))) {
|
||||||
Some(PathBuf::from(ar))
|
Some(PathBuf::from(ar))
|
||||||
} else if let Some(ar) = env::var_os("AR") {
|
} else if let Some(ar) = env::var_os("AR") {
|
||||||
Some(PathBuf::from(ar))
|
Some(PathBuf::from(ar))
|
||||||
|
@ -172,11 +172,9 @@ fn default_compiler(
|
||||||
// When compiling for android we may have the NDK configured in the
|
// When compiling for android we may have the NDK configured in the
|
||||||
// config.toml in which case we look there. Otherwise the default
|
// config.toml in which case we look there. Otherwise the default
|
||||||
// compiler already takes into account the triple in question.
|
// compiler already takes into account the triple in question.
|
||||||
t if t.contains("android") => build
|
t if t.contains("android") => {
|
||||||
.config
|
build.config.android_ndk.as_ref().map(|ndk| ndk_compiler(compiler, &target.triple, ndk))
|
||||||
.android_ndk
|
}
|
||||||
.as_ref()
|
|
||||||
.map(|ndk| ndk_compiler(compiler, &*target.triple, ndk)),
|
|
||||||
|
|
||||||
// The default gcc version from OpenBSD may be too old, try using egcc,
|
// The default gcc version from OpenBSD may be too old, try using egcc,
|
||||||
// which is a gcc version from ports, if this is the case.
|
// which is a gcc version from ports, if this is the case.
|
||||||
|
@ -230,7 +228,7 @@ fn default_compiler(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn ndk_compiler(compiler: Language, triple: &str, ndk: &Path) -> PathBuf {
|
pub(crate) fn ndk_compiler(compiler: Language, triple: &str, ndk: &Path) -> PathBuf {
|
||||||
let mut triple_iter = triple.split("-");
|
let mut triple_iter = triple.split('-');
|
||||||
let triple_translated = if let Some(arch) = triple_iter.next() {
|
let triple_translated = if let Some(arch) = triple_iter.next() {
|
||||||
let arch_new = match arch {
|
let arch_new = match arch {
|
||||||
"arm" | "armv7" | "armv7neon" | "thumbv7" | "thumbv7neon" => "armv7a",
|
"arm" | "armv7" | "armv7neon" | "thumbv7" | "thumbv7neon" => "armv7a",
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
//! with the goal of keeping developers synchronized with important modifications in
|
//! with the goal of keeping developers synchronized with important modifications in
|
||||||
//! the bootstrap.
|
//! the bootstrap.
|
||||||
|
|
||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
|
@ -24,11 +26,11 @@ pub enum ChangeSeverity {
|
||||||
Warning,
|
Warning,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToString for ChangeSeverity {
|
impl Display for ChangeSeverity {
|
||||||
fn to_string(&self) -> String {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
ChangeSeverity::Info => "INFO".to_string(),
|
ChangeSeverity::Info => write!(f, "INFO"),
|
||||||
ChangeSeverity::Warning => "WARNING".to_string(),
|
ChangeSeverity::Warning => write!(f, "WARNING"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -40,7 +42,7 @@ pub fn find_recent_config_change_ids(current_id: usize) -> Vec<ChangeInfo> {
|
||||||
// older one); otherwise, return the full list (assuming the user provided
|
// older one); otherwise, return the full list (assuming the user provided
|
||||||
// the incorrect change-id by accident).
|
// the incorrect change-id by accident).
|
||||||
if let Some(config) = CONFIG_CHANGE_HISTORY.iter().max_by_key(|config| config.change_id) {
|
if let Some(config) = CONFIG_CHANGE_HISTORY.iter().max_by_key(|config| config.change_id) {
|
||||||
if ¤t_id > &config.change_id {
|
if current_id > config.change_id {
|
||||||
return Vec::new();
|
return Vec::new();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,7 +97,7 @@ impl GitInfo {
|
||||||
|
|
||||||
pub fn version(&self, build: &Build, num: &str) -> String {
|
pub fn version(&self, build: &Build, num: &str) -> String {
|
||||||
let mut version = build.release(num);
|
let mut version = build.release(num);
|
||||||
if let Some(ref inner) = self.info() {
|
if let Some(inner) = self.info() {
|
||||||
version.push_str(" (");
|
version.push_str(" (");
|
||||||
version.push_str(&inner.short_sha);
|
version.push_str(&inner.short_sha);
|
||||||
version.push(' ');
|
version.push(' ');
|
||||||
|
@ -150,7 +150,7 @@ pub fn read_commit_info_file(root: &Path) -> Option<Info> {
|
||||||
/// root.
|
/// root.
|
||||||
pub fn write_commit_info_file(root: &Path, info: &Info) {
|
pub fn write_commit_info_file(root: &Path, info: &Info) {
|
||||||
let commit_info = format!("{}\n{}\n{}\n", info.sha, info.short_sha, info.commit_date);
|
let commit_info = format!("{}\n{}\n{}\n", info.sha, info.short_sha, info.commit_date);
|
||||||
t!(fs::write(root.join("git-commit-info"), &commit_info));
|
t!(fs::write(root.join("git-commit-info"), commit_info));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the commit hash to the `git-commit-hash` file given the project root.
|
/// Write the commit hash to the `git-commit-hash` file given the project root.
|
||||||
|
|
|
@ -425,7 +425,7 @@ pub fn get_clang_cl_resource_dir(clang_cl_path: &str) -> PathBuf {
|
||||||
// Similar to how LLVM does it, to find clang's library runtime directory:
|
// Similar to how LLVM does it, to find clang's library runtime directory:
|
||||||
// - we ask `clang-cl` to locate the `clang_rt.builtins` lib.
|
// - we ask `clang-cl` to locate the `clang_rt.builtins` lib.
|
||||||
let mut builtins_locator = Command::new(clang_cl_path);
|
let mut builtins_locator = Command::new(clang_cl_path);
|
||||||
builtins_locator.args(&["/clang:-print-libgcc-file-name", "/clang:--rtlib=compiler-rt"]);
|
builtins_locator.args(["/clang:-print-libgcc-file-name", "/clang:--rtlib=compiler-rt"]);
|
||||||
|
|
||||||
let clang_rt_builtins = output(&mut builtins_locator);
|
let clang_rt_builtins = output(&mut builtins_locator);
|
||||||
let clang_rt_builtins = Path::new(clang_rt_builtins.trim());
|
let clang_rt_builtins = Path::new(clang_rt_builtins.trim());
|
||||||
|
@ -475,7 +475,7 @@ pub fn dir_is_empty(dir: &Path) -> bool {
|
||||||
/// the "y" part from the string.
|
/// the "y" part from the string.
|
||||||
pub fn extract_beta_rev(version: &str) -> Option<String> {
|
pub fn extract_beta_rev(version: &str) -> Option<String> {
|
||||||
let parts = version.splitn(2, "-beta.").collect::<Vec<_>>();
|
let parts = version.splitn(2, "-beta.").collect::<Vec<_>>();
|
||||||
let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string()));
|
let count = parts.get(1).and_then(|s| s.find(' ').map(|p| s[..p].to_string()));
|
||||||
|
|
||||||
count
|
count
|
||||||
}
|
}
|
||||||
|
@ -559,11 +559,10 @@ pub fn check_cfg_arg(name: &str, values: Option<&[&str]>) -> String {
|
||||||
// ',values("tvos","watchos")' or '' (nothing) when there are no values.
|
// ',values("tvos","watchos")' or '' (nothing) when there are no values.
|
||||||
let next = match values {
|
let next = match values {
|
||||||
Some(values) => {
|
Some(values) => {
|
||||||
let mut tmp =
|
let mut tmp = values.iter().flat_map(|val| [",", "\"", val, "\""]).collect::<String>();
|
||||||
values.iter().map(|val| [",", "\"", val, "\""]).flatten().collect::<String>();
|
|
||||||
|
|
||||||
tmp.insert_str(1, "values(");
|
tmp.insert_str(1, "values(");
|
||||||
tmp.push_str(")");
|
tmp.push(')');
|
||||||
tmp
|
tmp
|
||||||
}
|
}
|
||||||
None => "".to_string(),
|
None => "".to_string(),
|
||||||
|
|
|
@ -15,10 +15,10 @@ use termcolor::{Color, ColorSpec, WriteColor};
|
||||||
const TERSE_TESTS_PER_LINE: usize = 88;
|
const TERSE_TESTS_PER_LINE: usize = 88;
|
||||||
|
|
||||||
pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
|
pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
|
||||||
if cmd.get_args().position(|arg| arg == "--").is_none() {
|
if !cmd.get_args().any(|arg| arg == "--") {
|
||||||
cmd.arg("--");
|
cmd.arg("--");
|
||||||
}
|
}
|
||||||
cmd.args(&["-Z", "unstable-options", "--format", "json"]);
|
cmd.args(["-Z", "unstable-options", "--format", "json"]);
|
||||||
|
|
||||||
try_run_tests(builder, cmd, false)
|
try_run_tests(builder, cmd, false)
|
||||||
}
|
}
|
||||||
|
@ -303,19 +303,19 @@ impl Outcome<'_> {
|
||||||
fn write_short(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> {
|
fn write_short(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> {
|
||||||
match self {
|
match self {
|
||||||
Outcome::Ok => {
|
Outcome::Ok => {
|
||||||
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?;
|
writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?;
|
||||||
write!(writer, ".")?;
|
write!(writer, ".")?;
|
||||||
}
|
}
|
||||||
Outcome::BenchOk => {
|
Outcome::BenchOk => {
|
||||||
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?;
|
writer.set_color(ColorSpec::new().set_fg(Some(Color::Cyan)))?;
|
||||||
write!(writer, "b")?;
|
write!(writer, "b")?;
|
||||||
}
|
}
|
||||||
Outcome::Failed => {
|
Outcome::Failed => {
|
||||||
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?;
|
writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?;
|
||||||
write!(writer, "F")?;
|
write!(writer, "F")?;
|
||||||
}
|
}
|
||||||
Outcome::Ignored { .. } => {
|
Outcome::Ignored { .. } => {
|
||||||
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?;
|
writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
|
||||||
write!(writer, "i")?;
|
write!(writer, "i")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -325,19 +325,19 @@ impl Outcome<'_> {
|
||||||
fn write_long(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> {
|
fn write_long(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> {
|
||||||
match self {
|
match self {
|
||||||
Outcome::Ok => {
|
Outcome::Ok => {
|
||||||
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?;
|
writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?;
|
||||||
write!(writer, "ok")?;
|
write!(writer, "ok")?;
|
||||||
}
|
}
|
||||||
Outcome::BenchOk => {
|
Outcome::BenchOk => {
|
||||||
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?;
|
writer.set_color(ColorSpec::new().set_fg(Some(Color::Cyan)))?;
|
||||||
write!(writer, "benchmarked")?;
|
write!(writer, "benchmarked")?;
|
||||||
}
|
}
|
||||||
Outcome::Failed => {
|
Outcome::Failed => {
|
||||||
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?;
|
writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?;
|
||||||
write!(writer, "FAILED")?;
|
write!(writer, "FAILED")?;
|
||||||
}
|
}
|
||||||
Outcome::Ignored { reason } => {
|
Outcome::Ignored { reason } => {
|
||||||
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?;
|
writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
|
||||||
write!(writer, "ignored")?;
|
write!(writer, "ignored")?;
|
||||||
if let Some(reason) = reason {
|
if let Some(reason) = reason {
|
||||||
write!(writer, ", {reason}")?;
|
write!(writer, ", {reason}")?;
|
||||||
|
|
|
@ -226,8 +226,7 @@ impl<'a> Tarball<'a> {
|
||||||
if self.include_target_in_component_name {
|
if self.include_target_in_component_name {
|
||||||
component_name.push('-');
|
component_name.push('-');
|
||||||
component_name.push_str(
|
component_name.push_str(
|
||||||
&self
|
self.target
|
||||||
.target
|
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.expect("include_target_in_component_name used in a targetless tarball"),
|
.expect("include_target_in_component_name used in a targetless tarball"),
|
||||||
);
|
);
|
||||||
|
@ -326,7 +325,7 @@ impl<'a> Tarball<'a> {
|
||||||
assert!(!formats.is_empty(), "dist.compression-formats can't be empty");
|
assert!(!formats.is_empty(), "dist.compression-formats can't be empty");
|
||||||
cmd.arg("--compression-formats").arg(formats.join(","));
|
cmd.arg("--compression-formats").arg(formats.join(","));
|
||||||
}
|
}
|
||||||
cmd.args(&["--compression-profile", &self.builder.config.dist_compression_profile]);
|
cmd.args(["--compression-profile", &self.builder.config.dist_compression_profile]);
|
||||||
self.builder.run(&mut cmd);
|
self.builder.run(&mut cmd);
|
||||||
|
|
||||||
// Ensure there are no symbolic links in the tarball. In particular,
|
// Ensure there are no symbolic links in the tarball. In particular,
|
||||||
|
@ -347,7 +346,7 @@ impl<'a> Tarball<'a> {
|
||||||
.config
|
.config
|
||||||
.dist_compression_formats
|
.dist_compression_formats
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|formats| formats.get(0))
|
.and_then(|formats| formats.first())
|
||||||
.map(|s| s.as_str())
|
.map(|s| s.as_str())
|
||||||
.unwrap_or("gz");
|
.unwrap_or("gz");
|
||||||
|
|
||||||
|
|
|
@ -1936,7 +1936,7 @@ impl<'test> TestCx<'test> {
|
||||||
fn document(&self, out_dir: &Path) -> ProcRes {
|
fn document(&self, out_dir: &Path) -> ProcRes {
|
||||||
if self.props.build_aux_docs {
|
if self.props.build_aux_docs {
|
||||||
for rel_ab in &self.props.aux_builds {
|
for rel_ab in &self.props.aux_builds {
|
||||||
let aux_testpaths = self.compute_aux_test_paths(rel_ab);
|
let aux_testpaths = self.compute_aux_test_paths(&self.testpaths, rel_ab);
|
||||||
let aux_props =
|
let aux_props =
|
||||||
self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config);
|
self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config);
|
||||||
let aux_cx = TestCx {
|
let aux_cx = TestCx {
|
||||||
|
@ -2092,24 +2092,18 @@ impl<'test> TestCx<'test> {
|
||||||
proc_res
|
proc_res
|
||||||
}
|
}
|
||||||
|
|
||||||
/// For each `aux-build: foo/bar` annotation, we check to find the
|
/// For each `aux-build: foo/bar` annotation, we check to find the file in an `auxiliary`
|
||||||
/// file in an `auxiliary` directory relative to the test itself.
|
/// directory relative to the test itself (not any intermediate auxiliaries).
|
||||||
fn compute_aux_test_paths(&self, rel_ab: &str) -> TestPaths {
|
fn compute_aux_test_paths(&self, of: &TestPaths, rel_ab: &str) -> TestPaths {
|
||||||
let test_ab = self
|
let test_ab =
|
||||||
.testpaths
|
of.file.parent().expect("test file path has no parent").join("auxiliary").join(rel_ab);
|
||||||
.file
|
|
||||||
.parent()
|
|
||||||
.expect("test file path has no parent")
|
|
||||||
.join("auxiliary")
|
|
||||||
.join(rel_ab);
|
|
||||||
if !test_ab.exists() {
|
if !test_ab.exists() {
|
||||||
self.fatal(&format!("aux-build `{}` source not found", test_ab.display()))
|
self.fatal(&format!("aux-build `{}` source not found", test_ab.display()))
|
||||||
}
|
}
|
||||||
|
|
||||||
TestPaths {
|
TestPaths {
|
||||||
file: test_ab,
|
file: test_ab,
|
||||||
relative_dir: self
|
relative_dir: of
|
||||||
.testpaths
|
|
||||||
.relative_dir
|
.relative_dir
|
||||||
.join(self.output_testname_unique())
|
.join(self.output_testname_unique())
|
||||||
.join("auxiliary")
|
.join("auxiliary")
|
||||||
|
@ -2135,7 +2129,7 @@ impl<'test> TestCx<'test> {
|
||||||
self.config.target.contains("vxworks") && !self.is_vxworks_pure_static()
|
self.config.target.contains("vxworks") && !self.is_vxworks_pure_static()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_all_auxiliary(&self, rustc: &mut Command) -> PathBuf {
|
fn aux_output_dir(&self) -> PathBuf {
|
||||||
let aux_dir = self.aux_output_dir_name();
|
let aux_dir = self.aux_output_dir_name();
|
||||||
|
|
||||||
if !self.props.aux_builds.is_empty() {
|
if !self.props.aux_builds.is_empty() {
|
||||||
|
@ -2143,22 +2137,26 @@ impl<'test> TestCx<'test> {
|
||||||
create_dir_all(&aux_dir).unwrap();
|
create_dir_all(&aux_dir).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
aux_dir
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_all_auxiliary(&self, of: &TestPaths, aux_dir: &Path, rustc: &mut Command) {
|
||||||
for rel_ab in &self.props.aux_builds {
|
for rel_ab in &self.props.aux_builds {
|
||||||
self.build_auxiliary(rel_ab, &aux_dir);
|
self.build_auxiliary(of, rel_ab, &aux_dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (aux_name, aux_path) in &self.props.aux_crates {
|
for (aux_name, aux_path) in &self.props.aux_crates {
|
||||||
let is_dylib = self.build_auxiliary(&aux_path, &aux_dir);
|
let is_dylib = self.build_auxiliary(of, &aux_path, &aux_dir);
|
||||||
let lib_name =
|
let lib_name =
|
||||||
get_lib_name(&aux_path.trim_end_matches(".rs").replace('-', "_"), is_dylib);
|
get_lib_name(&aux_path.trim_end_matches(".rs").replace('-', "_"), is_dylib);
|
||||||
rustc.arg("--extern").arg(format!("{}={}/{}", aux_name, aux_dir.display(), lib_name));
|
rustc.arg("--extern").arg(format!("{}={}/{}", aux_name, aux_dir.display(), lib_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
aux_dir
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compose_and_run_compiler(&self, mut rustc: Command, input: Option<String>) -> ProcRes {
|
fn compose_and_run_compiler(&self, mut rustc: Command, input: Option<String>) -> ProcRes {
|
||||||
let aux_dir = self.build_all_auxiliary(&mut rustc);
|
let aux_dir = self.aux_output_dir();
|
||||||
|
self.build_all_auxiliary(&self.testpaths, &aux_dir, &mut rustc);
|
||||||
|
|
||||||
self.props.unset_rustc_env.iter().fold(&mut rustc, Command::env_remove);
|
self.props.unset_rustc_env.iter().fold(&mut rustc, Command::env_remove);
|
||||||
rustc.envs(self.props.rustc_env.clone());
|
rustc.envs(self.props.rustc_env.clone());
|
||||||
self.compose_and_run(
|
self.compose_and_run(
|
||||||
|
@ -2172,10 +2170,10 @@ impl<'test> TestCx<'test> {
|
||||||
/// Builds an aux dependency.
|
/// Builds an aux dependency.
|
||||||
///
|
///
|
||||||
/// Returns whether or not it is a dylib.
|
/// Returns whether or not it is a dylib.
|
||||||
fn build_auxiliary(&self, source_path: &str, aux_dir: &Path) -> bool {
|
fn build_auxiliary(&self, of: &TestPaths, source_path: &str, aux_dir: &Path) -> bool {
|
||||||
let aux_testpaths = self.compute_aux_test_paths(source_path);
|
let aux_testpaths = self.compute_aux_test_paths(of, source_path);
|
||||||
let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config);
|
let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config);
|
||||||
let aux_output = TargetLocation::ThisDirectory(self.aux_output_dir_name());
|
let aux_output = TargetLocation::ThisDirectory(aux_dir.to_path_buf());
|
||||||
let aux_cx = TestCx {
|
let aux_cx = TestCx {
|
||||||
config: self.config,
|
config: self.config,
|
||||||
props: &aux_props,
|
props: &aux_props,
|
||||||
|
@ -2193,6 +2191,7 @@ impl<'test> TestCx<'test> {
|
||||||
LinkToAux::No,
|
LinkToAux::No,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
);
|
);
|
||||||
|
aux_cx.build_all_auxiliary(of, aux_dir, &mut aux_rustc);
|
||||||
|
|
||||||
for key in &aux_props.unset_rustc_env {
|
for key in &aux_props.unset_rustc_env {
|
||||||
aux_rustc.env_remove(key);
|
aux_rustc.env_remove(key);
|
||||||
|
@ -3034,7 +3033,8 @@ impl<'test> TestCx<'test> {
|
||||||
LinkToAux::Yes,
|
LinkToAux::Yes,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
);
|
);
|
||||||
new_rustdoc.build_all_auxiliary(&mut rustc);
|
let aux_dir = new_rustdoc.aux_output_dir();
|
||||||
|
new_rustdoc.build_all_auxiliary(&new_rustdoc.testpaths, &aux_dir, &mut rustc);
|
||||||
|
|
||||||
let proc_res = new_rustdoc.document(&compare_dir);
|
let proc_res = new_rustdoc.document(&compare_dir);
|
||||||
if !proc_res.status.success() {
|
if !proc_res.status.success() {
|
||||||
|
|
|
@ -8,7 +8,7 @@ LL | arg: NotIntoDiagnosticArg,
|
||||||
| ^^^^^^^^^^^^^^^^^^^^ the trait `IntoDiagnosticArg` is not implemented for `NotIntoDiagnosticArg`
|
| ^^^^^^^^^^^^^^^^^^^^ the trait `IntoDiagnosticArg` is not implemented for `NotIntoDiagnosticArg`
|
||||||
|
|
|
|
||||||
= help: normalized in stderr
|
= help: normalized in stderr
|
||||||
note: required by a bound in `rustc_errors::diagnostic::<impl DiagnosticBuilder<'a, G>>::arg`
|
note: required by a bound in `DiagnosticBuilder::<'a, G>::arg`
|
||||||
--> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC
|
--> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC
|
||||||
= note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ LL | arg: NotIntoDiagnosticArg,
|
||||||
| ^^^^^^^^^^^^^^^^^^^^ the trait `IntoDiagnosticArg` is not implemented for `NotIntoDiagnosticArg`
|
| ^^^^^^^^^^^^^^^^^^^^ the trait `IntoDiagnosticArg` is not implemented for `NotIntoDiagnosticArg`
|
||||||
|
|
|
|
||||||
= help: normalized in stderr
|
= help: normalized in stderr
|
||||||
note: required by a bound in `rustc_errors::diagnostic::<impl DiagnosticBuilder<'a, G>>::arg`
|
note: required by a bound in `DiagnosticBuilder::<'a, G>::arg`
|
||||||
--> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC
|
--> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC
|
||||||
= note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
|
|
|
@ -628,7 +628,7 @@ LL | other: Hello,
|
||||||
| ^^^^^ the trait `IntoDiagnosticArg` is not implemented for `Hello`
|
| ^^^^^ the trait `IntoDiagnosticArg` is not implemented for `Hello`
|
||||||
|
|
|
|
||||||
= help: normalized in stderr
|
= help: normalized in stderr
|
||||||
note: required by a bound in `rustc_errors::diagnostic::<impl DiagnosticBuilder<'a, G>>::arg`
|
note: required by a bound in `DiagnosticBuilder::<'a, G>::arg`
|
||||||
--> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC
|
--> $COMPILER_DIR/rustc_errors/src/diagnostic.rs:LL:CC
|
||||||
= note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the macro `with_fn` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,10 @@
|
||||||
struct F;
|
struct F;
|
||||||
|
|
||||||
impl async Fn<()> for F {}
|
impl async Fn<()> for F {}
|
||||||
//~^ ERROR expected type, found keyword `async`
|
//~^ ERROR `async` trait implementations are unsupported
|
||||||
|
//~| ERROR the precise format of `Fn`-family traits' type parameters is subject to change
|
||||||
|
//~| ERROR manual implementations of `Fn` are experimental
|
||||||
|
//~| ERROR expected a `FnMut()` closure, found `F`
|
||||||
|
//~| ERROR not all trait items implemented, missing: `call`
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -1,8 +1,47 @@
|
||||||
error: expected type, found keyword `async`
|
error: `async` trait implementations are unsupported
|
||||||
--> $DIR/impl-header.rs:5:6
|
--> $DIR/impl-header.rs:5:6
|
||||||
|
|
|
|
||||||
LL | impl async Fn<()> for F {}
|
LL | impl async Fn<()> for F {}
|
||||||
| ^^^^^ expected type
|
| ^^^^^
|
||||||
|
|
||||||
error: aborting due to 1 previous error
|
error[E0658]: the precise format of `Fn`-family traits' type parameters is subject to change
|
||||||
|
--> $DIR/impl-header.rs:5:12
|
||||||
|
|
|
||||||
|
LL | impl async Fn<()> for F {}
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #29625 <https://github.com/rust-lang/rust/issues/29625> for more information
|
||||||
|
= help: add `#![feature(unboxed_closures)]` to the crate attributes to enable
|
||||||
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
|
||||||
|
error[E0183]: manual implementations of `Fn` are experimental
|
||||||
|
--> $DIR/impl-header.rs:5:12
|
||||||
|
|
|
||||||
|
LL | impl async Fn<()> for F {}
|
||||||
|
| ^^^^^^ manual implementations of `Fn` are experimental
|
||||||
|
|
|
||||||
|
= help: add `#![feature(unboxed_closures)]` to the crate attributes to enable
|
||||||
|
|
||||||
|
error[E0277]: expected a `FnMut()` closure, found `F`
|
||||||
|
--> $DIR/impl-header.rs:5:23
|
||||||
|
|
|
||||||
|
LL | impl async Fn<()> for F {}
|
||||||
|
| ^ expected an `FnMut()` closure, found `F`
|
||||||
|
|
|
||||||
|
= help: the trait `FnMut<()>` is not implemented for `F`
|
||||||
|
= note: wrap the `F` in a closure with no arguments: `|| { /* code */ }`
|
||||||
|
note: required by a bound in `Fn`
|
||||||
|
--> $SRC_DIR/core/src/ops/function.rs:LL:COL
|
||||||
|
|
||||||
|
error[E0046]: not all trait items implemented, missing: `call`
|
||||||
|
--> $DIR/impl-header.rs:5:1
|
||||||
|
|
|
||||||
|
LL | impl async Fn<()> for F {}
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^ missing `call` in implementation
|
||||||
|
|
|
||||||
|
= help: implement the missing item: `fn call(&self, _: ()) -> <Self as FnOnce<()>>::Output { todo!() }`
|
||||||
|
|
||||||
|
error: aborting due to 5 previous errors
|
||||||
|
|
||||||
|
Some errors have detailed explanations: E0046, E0183, E0277, E0658.
|
||||||
|
For more information about an error, try `rustc --explain E0046`.
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
// Demonstrates and records a theoretical regressions / breaking changes caused by the
|
||||||
|
// introduction of async trait bounds.
|
||||||
|
|
||||||
|
// Setting the edition to 2018 since we don't regress `demo! { dyn async }` in Rust <2018.
|
||||||
|
//@ edition:2018
|
||||||
|
|
||||||
|
macro_rules! demo {
|
||||||
|
($ty:ty) => { compile_error!("ty"); };
|
||||||
|
//~^ ERROR ty
|
||||||
|
//~| ERROR ty
|
||||||
|
(impl $c:ident Trait) => {};
|
||||||
|
(dyn $c:ident Trait) => {};
|
||||||
|
}
|
||||||
|
|
||||||
|
demo! { impl async Trait }
|
||||||
|
//~^ ERROR async closures are unstable
|
||||||
|
|
||||||
|
demo! { dyn async Trait }
|
||||||
|
//~^ ERROR async closures are unstable
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,47 @@
|
||||||
|
error: ty
|
||||||
|
--> $DIR/mbe-async-trait-bound-theoretical-regression.rs:8:19
|
||||||
|
|
|
||||||
|
LL | ($ty:ty) => { compile_error!("ty"); };
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
|
...
|
||||||
|
LL | demo! { impl async Trait }
|
||||||
|
| -------------------------- in this macro invocation
|
||||||
|
|
|
||||||
|
= note: this error originates in the macro `demo` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
|
error: ty
|
||||||
|
--> $DIR/mbe-async-trait-bound-theoretical-regression.rs:8:19
|
||||||
|
|
|
||||||
|
LL | ($ty:ty) => { compile_error!("ty"); };
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
|
...
|
||||||
|
LL | demo! { dyn async Trait }
|
||||||
|
| ------------------------- in this macro invocation
|
||||||
|
|
|
||||||
|
= note: this error originates in the macro `demo` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
|
error[E0658]: async closures are unstable
|
||||||
|
--> $DIR/mbe-async-trait-bound-theoretical-regression.rs:15:14
|
||||||
|
|
|
||||||
|
LL | demo! { impl async Trait }
|
||||||
|
| ^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #62290 <https://github.com/rust-lang/rust/issues/62290> for more information
|
||||||
|
= help: add `#![feature(async_closure)]` to the crate attributes to enable
|
||||||
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
= help: to use an async block, remove the `||`: `async {`
|
||||||
|
|
||||||
|
error[E0658]: async closures are unstable
|
||||||
|
--> $DIR/mbe-async-trait-bound-theoretical-regression.rs:18:13
|
||||||
|
|
|
||||||
|
LL | demo! { dyn async Trait }
|
||||||
|
| ^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #62290 <https://github.com/rust-lang/rust/issues/62290> for more information
|
||||||
|
= help: add `#![feature(async_closure)]` to the crate attributes to enable
|
||||||
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
= help: to use an async block, remove the `||`: `async {`
|
||||||
|
|
||||||
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0658`.
|
12
tests/ui/async-await/async-fn/trait-bounds-in-macro.rs
Normal file
12
tests/ui/async-await/async-fn/trait-bounds-in-macro.rs
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
//@ edition: 2021
|
||||||
|
|
||||||
|
macro_rules! x {
|
||||||
|
($x:item) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
x! {
|
||||||
|
async fn foo() -> impl async Fn() { }
|
||||||
|
//~^ ERROR async closures are unstable
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
14
tests/ui/async-await/async-fn/trait-bounds-in-macro.stderr
Normal file
14
tests/ui/async-await/async-fn/trait-bounds-in-macro.stderr
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
error[E0658]: async closures are unstable
|
||||||
|
--> $DIR/trait-bounds-in-macro.rs:8:28
|
||||||
|
|
|
||||||
|
LL | async fn foo() -> impl async Fn() { }
|
||||||
|
| ^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #62290 <https://github.com/rust-lang/rust/issues/62290> for more information
|
||||||
|
= help: add `#![feature(async_closure)]` to the crate attributes to enable
|
||||||
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
= help: to use an async block, remove the `||`: `async {`
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0658`.
|
|
@ -1,11 +1,11 @@
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
error[E0119]: conflicting implementations of trait `Overlap<for<'a> fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())`
|
error[E0119]: conflicting implementations of trait `Overlap<for<'a> fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())`
|
||||||
--> $DIR/associated-type.rs:31:1
|
--> $DIR/associated-type.rs:31:1
|
||||||
|
|
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Projection, AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, RePlaceholder(!2_BoundRegion { var: 0, kind: BrNamed(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), 'a) })], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit) }
|
||||||
error[E0119]: conflicting implementations of trait `Overlap<for<'a> fn(&'a (), _)>` for type `for<'a> fn(&'a (), _)`
|
error[E0119]: conflicting implementations of trait `Overlap<for<'a> fn(&'a (), _)>` for type `for<'a> fn(&'a (), _)`
|
||||||
--> $DIR/associated-type.rs:31:1
|
--> $DIR/associated-type.rs:31:1
|
||||||
|
|
|
|
||||||
|
|
14
tests/ui/compiletest-self-test/aux-aux.rs
Normal file
14
tests/ui/compiletest-self-test/aux-aux.rs
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
//@ aux-crate: aux_aux_foo=aux_aux_foo.rs
|
||||||
|
//@ aux-crate: aux_aux_bar=aux_aux_bar.rs
|
||||||
|
//@ edition: 2021
|
||||||
|
//@ compile-flags: --crate-type lib
|
||||||
|
//@ check-pass
|
||||||
|
|
||||||
|
use aux_aux_foo::Bar as IndirectBar;
|
||||||
|
use aux_aux_bar::Bar as DirectBar;
|
||||||
|
|
||||||
|
fn foo(x: IndirectBar) {}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
foo(DirectBar);
|
||||||
|
}
|
3
tests/ui/compiletest-self-test/auxiliary/aux_aux_bar.rs
Normal file
3
tests/ui/compiletest-self-test/auxiliary/aux_aux_bar.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
//@ edition: 2021
|
||||||
|
|
||||||
|
pub struct Bar;
|
4
tests/ui/compiletest-self-test/auxiliary/aux_aux_foo.rs
Normal file
4
tests/ui/compiletest-self-test/auxiliary/aux_aux_foo.rs
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
//@ aux-crate: aux_aux_bar=aux_aux_bar.rs
|
||||||
|
//@ edition: 2021
|
||||||
|
|
||||||
|
pub use aux_aux_bar::Bar;
|
|
@ -1,4 +1,4 @@
|
||||||
//@ check-pass
|
// This is just `mbe-async-trait-bound-theoretical-regression.rs` in practice.
|
||||||
|
|
||||||
//@ edition:2021
|
//@ edition:2021
|
||||||
// for the `impl` + keyword test
|
// for the `impl` + keyword test
|
||||||
|
@ -11,5 +11,7 @@ macro_rules! impl_primitive {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_primitive!(impl async);
|
impl_primitive!(impl async);
|
||||||
|
//~^ ERROR expected identifier, found `<eof>`
|
||||||
|
//~| ERROR async closures are unstable
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
23
tests/ui/parser/bad-recover-kw-after-impl.stderr
Normal file
23
tests/ui/parser/bad-recover-kw-after-impl.stderr
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
error: expected identifier, found `<eof>`
|
||||||
|
--> $DIR/bad-recover-kw-after-impl.rs:13:22
|
||||||
|
|
|
||||||
|
LL | ($ty:ty) => {
|
||||||
|
| ------ while parsing argument for this `ty` macro fragment
|
||||||
|
...
|
||||||
|
LL | impl_primitive!(impl async);
|
||||||
|
| ^^^^^ expected identifier
|
||||||
|
|
||||||
|
error[E0658]: async closures are unstable
|
||||||
|
--> $DIR/bad-recover-kw-after-impl.rs:13:22
|
||||||
|
|
|
||||||
|
LL | impl_primitive!(impl async);
|
||||||
|
| ^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #62290 <https://github.com/rust-lang/rust/issues/62290> for more information
|
||||||
|
= help: add `#![feature(async_closure)]` to the crate attributes to enable
|
||||||
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
= help: to use an async block, remove the `||`: `async {`
|
||||||
|
|
||||||
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0658`.
|
|
@ -8,7 +8,7 @@ fn foo2(_: &dyn (Drop + AsRef<str>)) {} //~ ERROR incorrect parentheses around t
|
||||||
fn foo2_no_space(_: &dyn(Drop + AsRef<str>)) {} //~ ERROR incorrect parentheses around trait bounds
|
fn foo2_no_space(_: &dyn(Drop + AsRef<str>)) {} //~ ERROR incorrect parentheses around trait bounds
|
||||||
|
|
||||||
fn foo3(_: &dyn {Drop + AsRef<str>}) {} //~ ERROR expected parameter name, found `{`
|
fn foo3(_: &dyn {Drop + AsRef<str>}) {} //~ ERROR expected parameter name, found `{`
|
||||||
//~^ ERROR expected one of `!`, `(`, `)`, `*`, `,`, `?`, `const`, `for`, `~`, lifetime, or path, found `{`
|
//~^ ERROR expected one of `!`, `(`, `)`, `*`, `,`, `?`, `async`, `const`, `for`, `~`, lifetime, or path, found `{`
|
||||||
//~| ERROR at least one trait is required for an object type
|
//~| ERROR at least one trait is required for an object type
|
||||||
|
|
||||||
fn foo4(_: &dyn <Drop + AsRef<str>>) {} //~ ERROR expected identifier, found `<`
|
fn foo4(_: &dyn <Drop + AsRef<str>>) {} //~ ERROR expected identifier, found `<`
|
||||||
|
|
|
@ -34,11 +34,11 @@ error: expected parameter name, found `{`
|
||||||
LL | fn foo3(_: &dyn {Drop + AsRef<str>}) {}
|
LL | fn foo3(_: &dyn {Drop + AsRef<str>}) {}
|
||||||
| ^ expected parameter name
|
| ^ expected parameter name
|
||||||
|
|
||||||
error: expected one of `!`, `(`, `)`, `*`, `,`, `?`, `const`, `for`, `~`, lifetime, or path, found `{`
|
error: expected one of `!`, `(`, `)`, `*`, `,`, `?`, `async`, `const`, `for`, `~`, lifetime, or path, found `{`
|
||||||
--> $DIR/trait-object-delimiters.rs:10:17
|
--> $DIR/trait-object-delimiters.rs:10:17
|
||||||
|
|
|
|
||||||
LL | fn foo3(_: &dyn {Drop + AsRef<str>}) {}
|
LL | fn foo3(_: &dyn {Drop + AsRef<str>}) {}
|
||||||
| -^ expected one of 11 possible tokens
|
| -^ expected one of 12 possible tokens
|
||||||
| |
|
| |
|
||||||
| help: missing `,`
|
| help: missing `,`
|
||||||
|
|
||||||
|
|
|
@ -6,15 +6,16 @@
|
||||||
|
|
||||||
macro_rules! demo {
|
macro_rules! demo {
|
||||||
($ty:ty) => { compile_error!("ty"); };
|
($ty:ty) => { compile_error!("ty"); };
|
||||||
(impl $c:ident) => {};
|
//~^ ERROR ty
|
||||||
(dyn $c:ident) => {};
|
//~| ERROR ty
|
||||||
|
(impl $c:ident Trait) => {};
|
||||||
|
(dyn $c:ident Trait) => {};
|
||||||
}
|
}
|
||||||
|
|
||||||
demo! { impl const }
|
demo! { impl const Trait }
|
||||||
//~^ ERROR expected identifier, found `<eof>`
|
//~^ ERROR const trait impls are experimental
|
||||||
|
|
||||||
demo! { dyn const }
|
demo! { dyn const Trait }
|
||||||
//~^ ERROR const trait impls are experimental
|
//~^ ERROR const trait impls are experimental
|
||||||
//~| ERROR expected identifier, found `<eof>`
|
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -1,31 +1,45 @@
|
||||||
error: expected identifier, found `<eof>`
|
error: ty
|
||||||
--> $DIR/mbe-const-trait-bound-theoretical-regression.rs:13:14
|
--> $DIR/mbe-const-trait-bound-theoretical-regression.rs:8:19
|
||||||
|
|
|
|
||||||
LL | ($ty:ty) => { compile_error!("ty"); };
|
LL | ($ty:ty) => { compile_error!("ty"); };
|
||||||
| ------ while parsing argument for this `ty` macro fragment
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
...
|
...
|
||||||
LL | demo! { impl const }
|
LL | demo! { impl const Trait }
|
||||||
| ^^^^^ expected identifier
|
| -------------------------- in this macro invocation
|
||||||
|
|
|
||||||
|
= note: this error originates in the macro `demo` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error: expected identifier, found `<eof>`
|
error: ty
|
||||||
--> $DIR/mbe-const-trait-bound-theoretical-regression.rs:16:13
|
--> $DIR/mbe-const-trait-bound-theoretical-regression.rs:8:19
|
||||||
|
|
|
|
||||||
LL | ($ty:ty) => { compile_error!("ty"); };
|
LL | ($ty:ty) => { compile_error!("ty"); };
|
||||||
| ------ while parsing argument for this `ty` macro fragment
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
...
|
...
|
||||||
LL | demo! { dyn const }
|
LL | demo! { dyn const Trait }
|
||||||
| ^^^^^ expected identifier
|
| ------------------------- in this macro invocation
|
||||||
|
|
|
||||||
|
= note: this error originates in the macro `demo` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error[E0658]: const trait impls are experimental
|
error[E0658]: const trait impls are experimental
|
||||||
--> $DIR/mbe-const-trait-bound-theoretical-regression.rs:16:13
|
--> $DIR/mbe-const-trait-bound-theoretical-regression.rs:15:14
|
||||||
|
|
|
|
||||||
LL | demo! { dyn const }
|
LL | demo! { impl const Trait }
|
||||||
|
| ^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #67792 <https://github.com/rust-lang/rust/issues/67792> for more information
|
||||||
|
= help: add `#![feature(const_trait_impl)]` to the crate attributes to enable
|
||||||
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
|
||||||
|
error[E0658]: const trait impls are experimental
|
||||||
|
--> $DIR/mbe-const-trait-bound-theoretical-regression.rs:18:13
|
||||||
|
|
|
||||||
|
LL | demo! { dyn const Trait }
|
||||||
| ^^^^^
|
| ^^^^^
|
||||||
|
|
|
|
||||||
= note: see issue #67792 <https://github.com/rust-lang/rust/issues/67792> for more information
|
= note: see issue #67792 <https://github.com/rust-lang/rust/issues/67792> for more information
|
||||||
= help: add `#![feature(const_trait_impl)]` to the crate attributes to enable
|
= help: add `#![feature(const_trait_impl)]` to the crate attributes to enable
|
||||||
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
|
||||||
error: aborting due to 3 previous errors
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
For more information about this error, try `rustc --explain E0658`.
|
For more information about this error, try `rustc --explain E0658`.
|
||||||
|
|
|
@ -13,14 +13,14 @@ LL | #![feature(lazy_type_alias)]
|
||||||
= note: see issue #112792 <https://github.com/rust-lang/rust/issues/112792> for more information
|
= note: see issue #112792 <https://github.com/rust-lang/rust/issues/112792> for more information
|
||||||
= note: `#[warn(incomplete_features)]` on by default
|
= note: `#[warn(incomplete_features)]` on by default
|
||||||
|
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [ReBound(DebruijnIndex(0), BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }
|
||||||
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: Alias(Weak, AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) })
|
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [RePlaceholder(!1_BoundRegion { var: 0, kind: BrNamed(DefId(0:15 ~ issue_118950_root_region[d54f]::{impl#1}::'a), 'a) }), ?1t], def_id: DefId(0:8 ~ issue_118950_root_region[d54f]::Assoc) }
|
||||||
error[E0119]: conflicting implementations of trait `Overlap<fn(_)>` for type `fn(_)`
|
error[E0119]: conflicting implementations of trait `Overlap<fn(_)>` for type `fn(_)`
|
||||||
--> $DIR/issue-118950-root-region.rs:19:1
|
--> $DIR/issue-118950-root-region.rs:19:1
|
||||||
|
|
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue