Auto merge of #134125 - fmease:rollup-u38o3ob, r=fmease
Rollup of 11 pull requests Successful merges: - #133478 (jsondocck: Parse, don't validate commands.) - #133967 ([AIX] Pass -bnoipath when adding rust upstream dynamic crates) - #133970 ([AIX] Replace sa_sigaction with sa_union.__su_sigaction for AIX) - #133980 ([AIX] Remove option "-n" from AIX "ln" command) - #134008 (Make `Copy` unsafe to implement for ADTs with `unsafe` fields) - #134017 (Don't use `AsyncFnOnce::CallOnceFuture` bounds for signature deduction) - #134023 (handle cygwin environment in `install::sanitize_sh`) - #134041 (Use SourceMap to load debugger visualizer files) - #134065 (Move `write_graphviz_results`) - #134106 (Add compiler-maintainers who requested to be on review rotation) - #134123 (bootstrap: Forward cargo JSON output to stdout, not stderr) Failed merges: - #134120 (Remove Felix from ping groups and review rotation) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
33c245b9e9
31 changed files with 604 additions and 536 deletions
|
@ -55,26 +55,26 @@ impl<T: ?Sized> LegacyReceiver for &mut T {}
|
||||||
impl<T: ?Sized> LegacyReceiver for Box<T> {}
|
impl<T: ?Sized> LegacyReceiver for Box<T> {}
|
||||||
|
|
||||||
#[lang = "copy"]
|
#[lang = "copy"]
|
||||||
pub unsafe trait Copy {}
|
pub trait Copy {}
|
||||||
|
|
||||||
unsafe impl Copy for bool {}
|
impl Copy for bool {}
|
||||||
unsafe impl Copy for u8 {}
|
impl Copy for u8 {}
|
||||||
unsafe impl Copy for u16 {}
|
impl Copy for u16 {}
|
||||||
unsafe impl Copy for u32 {}
|
impl Copy for u32 {}
|
||||||
unsafe impl Copy for u64 {}
|
impl Copy for u64 {}
|
||||||
unsafe impl Copy for u128 {}
|
impl Copy for u128 {}
|
||||||
unsafe impl Copy for usize {}
|
impl Copy for usize {}
|
||||||
unsafe impl Copy for i8 {}
|
impl Copy for i8 {}
|
||||||
unsafe impl Copy for i16 {}
|
impl Copy for i16 {}
|
||||||
unsafe impl Copy for i32 {}
|
impl Copy for i32 {}
|
||||||
unsafe impl Copy for isize {}
|
impl Copy for isize {}
|
||||||
unsafe impl Copy for f32 {}
|
impl Copy for f32 {}
|
||||||
unsafe impl Copy for f64 {}
|
impl Copy for f64 {}
|
||||||
unsafe impl Copy for char {}
|
impl Copy for char {}
|
||||||
unsafe impl<'a, T: ?Sized> Copy for &'a T {}
|
impl<'a, T: ?Sized> Copy for &'a T {}
|
||||||
unsafe impl<T: ?Sized> Copy for *const T {}
|
impl<T: ?Sized> Copy for *const T {}
|
||||||
unsafe impl<T: ?Sized> Copy for *mut T {}
|
impl<T: ?Sized> Copy for *mut T {}
|
||||||
unsafe impl<T: Copy> Copy for Option<T> {}
|
impl<T: Copy> Copy for Option<T> {}
|
||||||
|
|
||||||
#[lang = "sync"]
|
#[lang = "sync"]
|
||||||
pub unsafe trait Sync {}
|
pub unsafe trait Sync {}
|
||||||
|
|
|
@ -52,24 +52,24 @@ impl<T: ?Sized> LegacyReceiver for &mut T {}
|
||||||
impl<T: ?Sized, A: Allocator> LegacyReceiver for Box<T, A> {}
|
impl<T: ?Sized, A: Allocator> LegacyReceiver for Box<T, A> {}
|
||||||
|
|
||||||
#[lang = "copy"]
|
#[lang = "copy"]
|
||||||
pub unsafe trait Copy {}
|
pub trait Copy {}
|
||||||
|
|
||||||
unsafe impl Copy for bool {}
|
impl Copy for bool {}
|
||||||
unsafe impl Copy for u8 {}
|
impl Copy for u8 {}
|
||||||
unsafe impl Copy for u16 {}
|
impl Copy for u16 {}
|
||||||
unsafe impl Copy for u32 {}
|
impl Copy for u32 {}
|
||||||
unsafe impl Copy for u64 {}
|
impl Copy for u64 {}
|
||||||
unsafe impl Copy for usize {}
|
impl Copy for usize {}
|
||||||
unsafe impl Copy for i8 {}
|
impl Copy for i8 {}
|
||||||
unsafe impl Copy for i16 {}
|
impl Copy for i16 {}
|
||||||
unsafe impl Copy for i32 {}
|
impl Copy for i32 {}
|
||||||
unsafe impl Copy for isize {}
|
impl Copy for isize {}
|
||||||
unsafe impl Copy for f32 {}
|
impl Copy for f32 {}
|
||||||
unsafe impl Copy for f64 {}
|
impl Copy for f64 {}
|
||||||
unsafe impl Copy for char {}
|
impl Copy for char {}
|
||||||
unsafe impl<'a, T: ?Sized> Copy for &'a T {}
|
impl<'a, T: ?Sized> Copy for &'a T {}
|
||||||
unsafe impl<T: ?Sized> Copy for *const T {}
|
impl<T: ?Sized> Copy for *const T {}
|
||||||
unsafe impl<T: ?Sized> Copy for *mut T {}
|
impl<T: ?Sized> Copy for *mut T {}
|
||||||
|
|
||||||
#[lang = "sync"]
|
#[lang = "sync"]
|
||||||
pub unsafe trait Sync {}
|
pub unsafe trait Sync {}
|
||||||
|
|
|
@ -2745,6 +2745,15 @@ fn add_upstream_rust_crates(
|
||||||
.find(|(ty, _)| *ty == crate_type)
|
.find(|(ty, _)| *ty == crate_type)
|
||||||
.expect("failed to find crate type in dependency format list");
|
.expect("failed to find crate type in dependency format list");
|
||||||
|
|
||||||
|
if sess.target.is_like_aix {
|
||||||
|
// Unlike ELF linkers, AIX doesn't feature `DT_SONAME` to override
|
||||||
|
// the dependency name when outputing a shared library. Thus, `ld` will
|
||||||
|
// use the full path to shared libraries as the dependency if passed it
|
||||||
|
// by default unless `noipath` is passed.
|
||||||
|
// https://www.ibm.com/docs/en/aix/7.3?topic=l-ld-command.
|
||||||
|
cmd.link_or_cc_arg("-bnoipath");
|
||||||
|
}
|
||||||
|
|
||||||
for &cnum in &codegen_results.crate_info.used_crates {
|
for &cnum in &codegen_results.crate_info.used_crates {
|
||||||
// We may not pass all crates through to the linker. Some crates may appear statically in
|
// We may not pass all crates through to the linker. Some crates may appear statically in
|
||||||
// an existing dylib, meaning we'll pick up all the symbols from the dylib.
|
// an existing dylib, meaning we'll pick up all the symbols from the dylib.
|
||||||
|
|
|
@ -103,7 +103,7 @@ fn visit_implementation_of_copy(checker: &Checker<'_>) -> Result<(), ErrorGuaran
|
||||||
}
|
}
|
||||||
|
|
||||||
let cause = traits::ObligationCause::misc(DUMMY_SP, impl_did);
|
let cause = traits::ObligationCause::misc(DUMMY_SP, impl_did);
|
||||||
match type_allowed_to_implement_copy(tcx, param_env, self_type, cause) {
|
match type_allowed_to_implement_copy(tcx, param_env, self_type, cause, impl_header.safety) {
|
||||||
Ok(()) => Ok(()),
|
Ok(()) => Ok(()),
|
||||||
Err(CopyImplementationError::InfringingFields(fields)) => {
|
Err(CopyImplementationError::InfringingFields(fields)) => {
|
||||||
let span = tcx.hir().expect_item(impl_did).expect_impl().self_ty.span;
|
let span = tcx.hir().expect_item(impl_did).expect_impl().self_ty.span;
|
||||||
|
@ -123,6 +123,12 @@ fn visit_implementation_of_copy(checker: &Checker<'_>) -> Result<(), ErrorGuaran
|
||||||
let span = tcx.hir().expect_item(impl_did).expect_impl().self_ty.span;
|
let span = tcx.hir().expect_item(impl_did).expect_impl().self_ty.span;
|
||||||
Err(tcx.dcx().emit_err(errors::CopyImplOnTypeWithDtor { span }))
|
Err(tcx.dcx().emit_err(errors::CopyImplOnTypeWithDtor { span }))
|
||||||
}
|
}
|
||||||
|
Err(CopyImplementationError::HasUnsafeFields) => {
|
||||||
|
let span = tcx.hir().expect_item(impl_did).expect_impl().self_ty.span;
|
||||||
|
Err(tcx
|
||||||
|
.dcx()
|
||||||
|
.span_delayed_bug(span, format!("cannot implement `Copy` for `{}`", self_type)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
use rustc_errors::codes::*;
|
use rustc_errors::codes::*;
|
||||||
use rustc_errors::struct_span_code_err;
|
use rustc_errors::struct_span_code_err;
|
||||||
use rustc_hir::Safety;
|
use rustc_hir::{LangItem, Safety};
|
||||||
use rustc_middle::ty::ImplPolarity::*;
|
use rustc_middle::ty::ImplPolarity::*;
|
||||||
use rustc_middle::ty::print::PrintTraitRefExt as _;
|
use rustc_middle::ty::print::PrintTraitRefExt as _;
|
||||||
use rustc_middle::ty::{ImplTraitHeader, TraitDef, TyCtxt};
|
use rustc_middle::ty::{ImplTraitHeader, TraitDef, TyCtxt};
|
||||||
|
@ -20,7 +20,19 @@ pub(super) fn check_item(
|
||||||
tcx.generics_of(def_id).own_params.iter().find(|p| p.pure_wrt_drop).map(|_| "may_dangle");
|
tcx.generics_of(def_id).own_params.iter().find(|p| p.pure_wrt_drop).map(|_| "may_dangle");
|
||||||
let trait_ref = trait_header.trait_ref.instantiate_identity();
|
let trait_ref = trait_header.trait_ref.instantiate_identity();
|
||||||
|
|
||||||
match (trait_def.safety, unsafe_attr, trait_header.safety, trait_header.polarity) {
|
let is_copy = tcx.is_lang_item(trait_def.def_id, LangItem::Copy);
|
||||||
|
let trait_def_safety = if is_copy {
|
||||||
|
// If `Self` has unsafe fields, `Copy` is unsafe to implement.
|
||||||
|
if trait_header.trait_ref.skip_binder().self_ty().has_unsafe_fields() {
|
||||||
|
rustc_hir::Safety::Unsafe
|
||||||
|
} else {
|
||||||
|
rustc_hir::Safety::Safe
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
trait_def.safety
|
||||||
|
};
|
||||||
|
|
||||||
|
match (trait_def_safety, unsafe_attr, trait_header.safety, trait_header.polarity) {
|
||||||
(Safety::Safe, None, Safety::Unsafe, Positive | Reservation) => {
|
(Safety::Safe, None, Safety::Unsafe, Positive | Reservation) => {
|
||||||
let span = tcx.def_span(def_id);
|
let span = tcx.def_span(def_id);
|
||||||
return Err(struct_span_code_err!(
|
return Err(struct_span_code_err!(
|
||||||
|
@ -48,12 +60,22 @@ pub(super) fn check_item(
|
||||||
"the trait `{}` requires an `unsafe impl` declaration",
|
"the trait `{}` requires an `unsafe impl` declaration",
|
||||||
trait_ref.print_trait_sugared()
|
trait_ref.print_trait_sugared()
|
||||||
)
|
)
|
||||||
.with_note(format!(
|
.with_note(if is_copy {
|
||||||
"the trait `{}` enforces invariants that the compiler can't check. \
|
format!(
|
||||||
Review the trait documentation and make sure this implementation \
|
"the trait `{}` cannot be safely implemented for `{}` \
|
||||||
upholds those invariants before adding the `unsafe` keyword",
|
because it has unsafe fields. Review the invariants \
|
||||||
trait_ref.print_trait_sugared()
|
of those fields before adding an `unsafe impl`",
|
||||||
))
|
trait_ref.print_trait_sugared(),
|
||||||
|
trait_ref.self_ty(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"the trait `{}` enforces invariants that the compiler can't check. \
|
||||||
|
Review the trait documentation and make sure this implementation \
|
||||||
|
upholds those invariants before adding the `unsafe` keyword",
|
||||||
|
trait_ref.print_trait_sugared()
|
||||||
|
)
|
||||||
|
})
|
||||||
.with_span_suggestion_verbose(
|
.with_span_suggestion_verbose(
|
||||||
span.shrink_to_lo(),
|
span.shrink_to_lo(),
|
||||||
"add `unsafe` to this trait implementation",
|
"add `unsafe` to this trait implementation",
|
||||||
|
|
|
@ -454,20 +454,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
closure_kind: hir::ClosureKind,
|
closure_kind: hir::ClosureKind,
|
||||||
projection: ty::PolyProjectionPredicate<'tcx>,
|
projection: ty::PolyProjectionPredicate<'tcx>,
|
||||||
) -> Option<ExpectedSig<'tcx>> {
|
) -> Option<ExpectedSig<'tcx>> {
|
||||||
let tcx = self.tcx;
|
let def_id = projection.projection_def_id();
|
||||||
|
|
||||||
let trait_def_id = projection.trait_def_id(tcx);
|
|
||||||
|
|
||||||
// For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits,
|
// For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits,
|
||||||
// for closures and async closures, respectively.
|
// for closures and async closures, respectively.
|
||||||
match closure_kind {
|
match closure_kind {
|
||||||
hir::ClosureKind::Closure
|
hir::ClosureKind::Closure if self.tcx.is_lang_item(def_id, LangItem::FnOnceOutput) => {
|
||||||
if self.tcx.fn_trait_kind_from_def_id(trait_def_id).is_some() =>
|
|
||||||
{
|
|
||||||
self.extract_sig_from_projection(cause_span, projection)
|
self.extract_sig_from_projection(cause_span, projection)
|
||||||
}
|
}
|
||||||
hir::ClosureKind::CoroutineClosure(hir::CoroutineDesugaring::Async)
|
hir::ClosureKind::CoroutineClosure(hir::CoroutineDesugaring::Async)
|
||||||
if self.tcx.async_fn_trait_kind_from_def_id(trait_def_id).is_some() =>
|
if self.tcx.is_lang_item(def_id, LangItem::AsyncFnOnceOutput) =>
|
||||||
{
|
{
|
||||||
self.extract_sig_from_projection(cause_span, projection)
|
self.extract_sig_from_projection(cause_span, projection)
|
||||||
}
|
}
|
||||||
|
@ -475,7 +471,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// `F: FnOnce() -> Fut, Fut: Future<Output = T>` style bound. Let's still
|
// `F: FnOnce() -> Fut, Fut: Future<Output = T>` style bound. Let's still
|
||||||
// guide inference here, since it's beneficial for the user.
|
// guide inference here, since it's beneficial for the user.
|
||||||
hir::ClosureKind::CoroutineClosure(hir::CoroutineDesugaring::Async)
|
hir::ClosureKind::CoroutineClosure(hir::CoroutineDesugaring::Async)
|
||||||
if self.tcx.fn_trait_kind_from_def_id(trait_def_id).is_some() =>
|
if self.tcx.is_lang_item(def_id, LangItem::FnOnceOutput) =>
|
||||||
{
|
{
|
||||||
self.extract_sig_from_projection_and_future_bound(cause_span, projection)
|
self.extract_sig_from_projection_and_future_bound(cause_span, projection)
|
||||||
}
|
}
|
||||||
|
|
|
@ -625,6 +625,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingCopyImplementations {
|
||||||
cx.param_env,
|
cx.param_env,
|
||||||
ty,
|
ty,
|
||||||
traits::ObligationCause::misc(item.span, item.owner_id.def_id),
|
traits::ObligationCause::misc(item.span, item.owner_id.def_id),
|
||||||
|
hir::Safety::Safe,
|
||||||
)
|
)
|
||||||
.is_ok()
|
.is_ok()
|
||||||
{
|
{
|
||||||
|
|
|
@ -980,11 +980,7 @@ impl<'tcx> rustc_type_ir::inherent::Ty<TyCtxt<'tcx>> for Ty<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_unsafe_fields(self) -> bool {
|
fn has_unsafe_fields(self) -> bool {
|
||||||
if let ty::Adt(adt_def, ..) = self.kind() {
|
Ty::has_unsafe_fields(self)
|
||||||
adt_def.all_fields().any(|x| x.safety == hir::Safety::Unsafe)
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1288,6 +1288,15 @@ impl<'tcx> Ty<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Checks whether this type is an ADT that has unsafe fields.
|
||||||
|
pub fn has_unsafe_fields(self) -> bool {
|
||||||
|
if let ty::Adt(adt_def, ..) = self.kind() {
|
||||||
|
adt_def.all_fields().any(|x| x.safety == hir::Safety::Unsafe)
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get morphology of the async drop glue, needed for types which do not
|
/// Get morphology of the async drop glue, needed for types which do not
|
||||||
/// use async drop. To get async drop glue morphology for a definition see
|
/// use async drop. To get async drop glue morphology for a definition see
|
||||||
/// [`TyCtxt::async_drop_glue_morphology`]. Used for `AsyncDestruct::Destructor`
|
/// [`TyCtxt::async_drop_glue_morphology`]. Used for `AsyncDestruct::Destructor`
|
||||||
|
|
|
@ -2,19 +2,186 @@
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
use std::ffi::OsString;
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::sync::OnceLock;
|
use std::sync::OnceLock;
|
||||||
use std::{io, ops, str};
|
use std::{io, ops, str};
|
||||||
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rustc_graphviz as dot;
|
use rustc_hir::def_id::DefId;
|
||||||
use rustc_index::bit_set::BitSet;
|
use rustc_index::bit_set::BitSet;
|
||||||
use rustc_middle::mir::{self, BasicBlock, Body, Location, graphviz_safe_def_name};
|
use rustc_middle::mir::{
|
||||||
|
self, BasicBlock, Body, Location, create_dump_file, dump_enabled, graphviz_safe_def_name,
|
||||||
|
traversal,
|
||||||
|
};
|
||||||
|
use rustc_middle::ty::TyCtxt;
|
||||||
|
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||||
|
use rustc_span::symbol::{Symbol, sym};
|
||||||
|
use tracing::debug;
|
||||||
|
use {rustc_ast as ast, rustc_graphviz as dot};
|
||||||
|
|
||||||
use super::fmt::{DebugDiffWithAdapter, DebugWithAdapter, DebugWithContext};
|
use super::fmt::{DebugDiffWithAdapter, DebugWithAdapter, DebugWithContext};
|
||||||
use super::{Analysis, CallReturnPlaces, Direction, Results, ResultsCursor, ResultsVisitor};
|
use super::{Analysis, CallReturnPlaces, Direction, Results, ResultsCursor, ResultsVisitor};
|
||||||
|
use crate::errors::{
|
||||||
|
DuplicateValuesFor, PathMustEndInFilename, RequiresAnArgument, UnknownFormatter,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
|
||||||
|
/// `rustc_mir` attributes and `-Z dump-mir-dataflow`. The `Result` in and the `Results` out are
|
||||||
|
/// the same.
|
||||||
|
pub(super) fn write_graphviz_results<'tcx, A>(
|
||||||
|
tcx: TyCtxt<'tcx>,
|
||||||
|
body: &Body<'tcx>,
|
||||||
|
results: &mut Results<'tcx, A>,
|
||||||
|
pass_name: Option<&'static str>,
|
||||||
|
) -> std::io::Result<()>
|
||||||
|
where
|
||||||
|
A: Analysis<'tcx>,
|
||||||
|
A::Domain: DebugWithContext<A>,
|
||||||
|
{
|
||||||
|
use std::fs;
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
|
let def_id = body.source.def_id();
|
||||||
|
let Ok(attrs) = RustcMirAttrs::parse(tcx, def_id) else {
|
||||||
|
// Invalid `rustc_mir` attrs are reported in `RustcMirAttrs::parse`
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
let file = try {
|
||||||
|
match attrs.output_path(A::NAME) {
|
||||||
|
Some(path) => {
|
||||||
|
debug!("printing dataflow results for {:?} to {}", def_id, path.display());
|
||||||
|
if let Some(parent) = path.parent() {
|
||||||
|
fs::create_dir_all(parent)?;
|
||||||
|
}
|
||||||
|
fs::File::create_buffered(&path)?
|
||||||
|
}
|
||||||
|
|
||||||
|
None if dump_enabled(tcx, A::NAME, def_id) => {
|
||||||
|
create_dump_file(tcx, "dot", false, A::NAME, &pass_name.unwrap_or("-----"), body)?
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => return Ok(()),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut file = match file {
|
||||||
|
Ok(f) => f,
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
};
|
||||||
|
|
||||||
|
let style = match attrs.formatter {
|
||||||
|
Some(sym::two_phase) => OutputStyle::BeforeAndAfter,
|
||||||
|
_ => OutputStyle::AfterOnly,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
|
||||||
|
let graphviz = Formatter::new(body, results, style);
|
||||||
|
let mut render_opts =
|
||||||
|
vec![dot::RenderOption::Fontname(tcx.sess.opts.unstable_opts.graphviz_font.clone())];
|
||||||
|
if tcx.sess.opts.unstable_opts.graphviz_dark_mode {
|
||||||
|
render_opts.push(dot::RenderOption::DarkTheme);
|
||||||
|
}
|
||||||
|
let r = with_no_trimmed_paths!(dot::render_opts(&graphviz, &mut buf, &render_opts));
|
||||||
|
|
||||||
|
let lhs = try {
|
||||||
|
r?;
|
||||||
|
file.write_all(&buf)?;
|
||||||
|
};
|
||||||
|
|
||||||
|
lhs
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct RustcMirAttrs {
|
||||||
|
basename_and_suffix: Option<PathBuf>,
|
||||||
|
formatter: Option<Symbol>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RustcMirAttrs {
|
||||||
|
fn parse(tcx: TyCtxt<'_>, def_id: DefId) -> Result<Self, ()> {
|
||||||
|
let mut result = Ok(());
|
||||||
|
let mut ret = RustcMirAttrs::default();
|
||||||
|
|
||||||
|
let rustc_mir_attrs = tcx
|
||||||
|
.get_attrs(def_id, sym::rustc_mir)
|
||||||
|
.flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
|
||||||
|
|
||||||
|
for attr in rustc_mir_attrs {
|
||||||
|
let attr_result = if attr.has_name(sym::borrowck_graphviz_postflow) {
|
||||||
|
Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| {
|
||||||
|
let path = PathBuf::from(s.to_string());
|
||||||
|
match path.file_name() {
|
||||||
|
Some(_) => Ok(path),
|
||||||
|
None => {
|
||||||
|
tcx.dcx().emit_err(PathMustEndInFilename { span: attr.span() });
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else if attr.has_name(sym::borrowck_graphviz_format) {
|
||||||
|
Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s {
|
||||||
|
sym::gen_kill | sym::two_phase => Ok(s),
|
||||||
|
_ => {
|
||||||
|
tcx.dcx().emit_err(UnknownFormatter { span: attr.span() });
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
|
||||||
|
result = result.and(attr_result);
|
||||||
|
}
|
||||||
|
|
||||||
|
result.map(|()| ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_field<T>(
|
||||||
|
field: &mut Option<T>,
|
||||||
|
tcx: TyCtxt<'_>,
|
||||||
|
attr: &ast::MetaItemInner,
|
||||||
|
mapper: impl FnOnce(Symbol) -> Result<T, ()>,
|
||||||
|
) -> Result<(), ()> {
|
||||||
|
if field.is_some() {
|
||||||
|
tcx.dcx()
|
||||||
|
.emit_err(DuplicateValuesFor { span: attr.span(), name: attr.name_or_empty() });
|
||||||
|
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(s) = attr.value_str() {
|
||||||
|
*field = Some(mapper(s)?);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
tcx.dcx()
|
||||||
|
.emit_err(RequiresAnArgument { span: attr.span(), name: attr.name_or_empty() });
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the path where dataflow results should be written, or `None`
|
||||||
|
/// `borrowck_graphviz_postflow` was not specified.
|
||||||
|
///
|
||||||
|
/// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
|
||||||
|
///
|
||||||
|
/// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
|
||||||
|
fn output_path(&self, analysis_name: &str) -> Option<PathBuf> {
|
||||||
|
let mut ret = self.basename_and_suffix.as_ref().cloned()?;
|
||||||
|
let suffix = ret.file_name().unwrap(); // Checked when parsing attrs
|
||||||
|
|
||||||
|
let mut file_name: OsString = analysis_name.into();
|
||||||
|
file_name.push("_");
|
||||||
|
file_name.push(suffix);
|
||||||
|
ret.set_file_name(file_name);
|
||||||
|
|
||||||
|
Some(ret)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||||
pub(crate) enum OutputStyle {
|
enum OutputStyle {
|
||||||
AfterOnly,
|
AfterOnly,
|
||||||
BeforeAndAfter,
|
BeforeAndAfter,
|
||||||
}
|
}
|
||||||
|
@ -28,7 +195,7 @@ impl OutputStyle {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct Formatter<'mir, 'tcx, A>
|
struct Formatter<'mir, 'tcx, A>
|
||||||
where
|
where
|
||||||
A: Analysis<'tcx>,
|
A: Analysis<'tcx>,
|
||||||
{
|
{
|
||||||
|
@ -45,12 +212,12 @@ impl<'mir, 'tcx, A> Formatter<'mir, 'tcx, A>
|
||||||
where
|
where
|
||||||
A: Analysis<'tcx>,
|
A: Analysis<'tcx>,
|
||||||
{
|
{
|
||||||
pub(crate) fn new(
|
fn new(
|
||||||
body: &'mir Body<'tcx>,
|
body: &'mir Body<'tcx>,
|
||||||
results: &'mir mut Results<'tcx, A>,
|
results: &'mir mut Results<'tcx, A>,
|
||||||
style: OutputStyle,
|
style: OutputStyle,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let reachable = mir::traversal::reachable_as_bitset(body);
|
let reachable = traversal::reachable_as_bitset(body);
|
||||||
Formatter { cursor: results.as_results_cursor(body).into(), style, reachable }
|
Formatter { cursor: results.as_results_cursor(body).into(), style, reachable }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,7 +228,7 @@ where
|
||||||
|
|
||||||
/// A pair of a basic block and an index into that basic blocks `successors`.
|
/// A pair of a basic block and an index into that basic blocks `successors`.
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||||
pub(crate) struct CfgEdge {
|
struct CfgEdge {
|
||||||
source: BasicBlock,
|
source: BasicBlock,
|
||||||
index: usize,
|
index: usize,
|
||||||
}
|
}
|
||||||
|
@ -520,7 +687,7 @@ struct StateDiffCollector<D> {
|
||||||
|
|
||||||
impl<D> StateDiffCollector<D> {
|
impl<D> StateDiffCollector<D> {
|
||||||
fn run<'tcx, A>(
|
fn run<'tcx, A>(
|
||||||
body: &mir::Body<'tcx>,
|
body: &Body<'tcx>,
|
||||||
block: BasicBlock,
|
block: BasicBlock,
|
||||||
results: &mut Results<'tcx, A>,
|
results: &mut Results<'tcx, A>,
|
||||||
style: OutputStyle,
|
style: OutputStyle,
|
||||||
|
|
|
@ -42,7 +42,7 @@ use rustc_middle::mir::{self, BasicBlock, CallReturnPlaces, Location, Terminator
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
use self::results::write_graphviz_results;
|
use self::graphviz::write_graphviz_results;
|
||||||
use super::fmt::DebugWithContext;
|
use super::fmt::DebugWithContext;
|
||||||
|
|
||||||
mod cursor;
|
mod cursor;
|
||||||
|
|
|
@ -1,22 +1,9 @@
|
||||||
//! Dataflow analysis results.
|
//! Dataflow analysis results.
|
||||||
|
|
||||||
use std::ffi::OsString;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use rustc_hir::def_id::DefId;
|
|
||||||
use rustc_index::IndexVec;
|
use rustc_index::IndexVec;
|
||||||
use rustc_middle::mir::{self, BasicBlock, create_dump_file, dump_enabled, traversal};
|
use rustc_middle::mir::{BasicBlock, Body, traversal};
|
||||||
use rustc_middle::ty::TyCtxt;
|
|
||||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
|
||||||
use rustc_span::symbol::{Symbol, sym};
|
|
||||||
use tracing::debug;
|
|
||||||
use {rustc_ast as ast, rustc_graphviz as dot};
|
|
||||||
|
|
||||||
use super::fmt::DebugWithContext;
|
use super::{Analysis, ResultsCursor, ResultsVisitor, visit_results};
|
||||||
use super::{Analysis, ResultsCursor, ResultsVisitor, graphviz, visit_results};
|
|
||||||
use crate::errors::{
|
|
||||||
DuplicateValuesFor, PathMustEndInFilename, RequiresAnArgument, UnknownFormatter,
|
|
||||||
};
|
|
||||||
use crate::framework::cursor::ResultsHandle;
|
use crate::framework::cursor::ResultsHandle;
|
||||||
|
|
||||||
pub type EntrySets<'tcx, A> = IndexVec<BasicBlock, <A as Analysis<'tcx>>::Domain>;
|
pub type EntrySets<'tcx, A> = IndexVec<BasicBlock, <A as Analysis<'tcx>>::Domain>;
|
||||||
|
@ -41,16 +28,13 @@ where
|
||||||
/// `Results` is also used outside the cursor.
|
/// `Results` is also used outside the cursor.
|
||||||
pub fn as_results_cursor<'mir>(
|
pub fn as_results_cursor<'mir>(
|
||||||
&'mir mut self,
|
&'mir mut self,
|
||||||
body: &'mir mir::Body<'tcx>,
|
body: &'mir Body<'tcx>,
|
||||||
) -> ResultsCursor<'mir, 'tcx, A> {
|
) -> ResultsCursor<'mir, 'tcx, A> {
|
||||||
ResultsCursor::new(body, ResultsHandle::BorrowedMut(self))
|
ResultsCursor::new(body, ResultsHandle::BorrowedMut(self))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a `ResultsCursor` that takes ownership of the `Results`.
|
/// Creates a `ResultsCursor` that takes ownership of the `Results`.
|
||||||
pub fn into_results_cursor<'mir>(
|
pub fn into_results_cursor<'mir>(self, body: &'mir Body<'tcx>) -> ResultsCursor<'mir, 'tcx, A> {
|
||||||
self,
|
|
||||||
body: &'mir mir::Body<'tcx>,
|
|
||||||
) -> ResultsCursor<'mir, 'tcx, A> {
|
|
||||||
ResultsCursor::new(body, ResultsHandle::Owned(self))
|
ResultsCursor::new(body, ResultsHandle::Owned(self))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,7 +45,7 @@ where
|
||||||
|
|
||||||
pub fn visit_with<'mir>(
|
pub fn visit_with<'mir>(
|
||||||
&mut self,
|
&mut self,
|
||||||
body: &'mir mir::Body<'tcx>,
|
body: &'mir Body<'tcx>,
|
||||||
blocks: impl IntoIterator<Item = BasicBlock>,
|
blocks: impl IntoIterator<Item = BasicBlock>,
|
||||||
vis: &mut impl ResultsVisitor<'mir, 'tcx, A>,
|
vis: &mut impl ResultsVisitor<'mir, 'tcx, A>,
|
||||||
) {
|
) {
|
||||||
|
@ -70,166 +54,10 @@ where
|
||||||
|
|
||||||
pub fn visit_reachable_with<'mir>(
|
pub fn visit_reachable_with<'mir>(
|
||||||
&mut self,
|
&mut self,
|
||||||
body: &'mir mir::Body<'tcx>,
|
body: &'mir Body<'tcx>,
|
||||||
vis: &mut impl ResultsVisitor<'mir, 'tcx, A>,
|
vis: &mut impl ResultsVisitor<'mir, 'tcx, A>,
|
||||||
) {
|
) {
|
||||||
let blocks = traversal::reachable(body);
|
let blocks = traversal::reachable(body);
|
||||||
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
|
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Graphviz
|
|
||||||
|
|
||||||
/// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
|
|
||||||
/// `rustc_mir` attributes and `-Z dump-mir-dataflow`. The `Result` in and the `Results` out are
|
|
||||||
/// the same.
|
|
||||||
pub(super) fn write_graphviz_results<'tcx, A>(
|
|
||||||
tcx: TyCtxt<'tcx>,
|
|
||||||
body: &mir::Body<'tcx>,
|
|
||||||
results: &mut Results<'tcx, A>,
|
|
||||||
pass_name: Option<&'static str>,
|
|
||||||
) -> std::io::Result<()>
|
|
||||||
where
|
|
||||||
A: Analysis<'tcx>,
|
|
||||||
A::Domain: DebugWithContext<A>,
|
|
||||||
{
|
|
||||||
use std::fs;
|
|
||||||
use std::io::Write;
|
|
||||||
|
|
||||||
let def_id = body.source.def_id();
|
|
||||||
let Ok(attrs) = RustcMirAttrs::parse(tcx, def_id) else {
|
|
||||||
// Invalid `rustc_mir` attrs are reported in `RustcMirAttrs::parse`
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
let file = try {
|
|
||||||
match attrs.output_path(A::NAME) {
|
|
||||||
Some(path) => {
|
|
||||||
debug!("printing dataflow results for {:?} to {}", def_id, path.display());
|
|
||||||
if let Some(parent) = path.parent() {
|
|
||||||
fs::create_dir_all(parent)?;
|
|
||||||
}
|
|
||||||
fs::File::create_buffered(&path)?
|
|
||||||
}
|
|
||||||
|
|
||||||
None if dump_enabled(tcx, A::NAME, def_id) => {
|
|
||||||
create_dump_file(tcx, "dot", false, A::NAME, &pass_name.unwrap_or("-----"), body)?
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => return Ok(()),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let mut file = match file {
|
|
||||||
Ok(f) => f,
|
|
||||||
Err(e) => return Err(e),
|
|
||||||
};
|
|
||||||
|
|
||||||
let style = match attrs.formatter {
|
|
||||||
Some(sym::two_phase) => graphviz::OutputStyle::BeforeAndAfter,
|
|
||||||
_ => graphviz::OutputStyle::AfterOnly,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
|
|
||||||
let graphviz = graphviz::Formatter::new(body, results, style);
|
|
||||||
let mut render_opts =
|
|
||||||
vec![dot::RenderOption::Fontname(tcx.sess.opts.unstable_opts.graphviz_font.clone())];
|
|
||||||
if tcx.sess.opts.unstable_opts.graphviz_dark_mode {
|
|
||||||
render_opts.push(dot::RenderOption::DarkTheme);
|
|
||||||
}
|
|
||||||
let r = with_no_trimmed_paths!(dot::render_opts(&graphviz, &mut buf, &render_opts));
|
|
||||||
|
|
||||||
let lhs = try {
|
|
||||||
r?;
|
|
||||||
file.write_all(&buf)?;
|
|
||||||
};
|
|
||||||
|
|
||||||
lhs
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct RustcMirAttrs {
|
|
||||||
basename_and_suffix: Option<PathBuf>,
|
|
||||||
formatter: Option<Symbol>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RustcMirAttrs {
|
|
||||||
fn parse(tcx: TyCtxt<'_>, def_id: DefId) -> Result<Self, ()> {
|
|
||||||
let mut result = Ok(());
|
|
||||||
let mut ret = RustcMirAttrs::default();
|
|
||||||
|
|
||||||
let rustc_mir_attrs = tcx
|
|
||||||
.get_attrs(def_id, sym::rustc_mir)
|
|
||||||
.flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
|
|
||||||
|
|
||||||
for attr in rustc_mir_attrs {
|
|
||||||
let attr_result = if attr.has_name(sym::borrowck_graphviz_postflow) {
|
|
||||||
Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| {
|
|
||||||
let path = PathBuf::from(s.to_string());
|
|
||||||
match path.file_name() {
|
|
||||||
Some(_) => Ok(path),
|
|
||||||
None => {
|
|
||||||
tcx.dcx().emit_err(PathMustEndInFilename { span: attr.span() });
|
|
||||||
Err(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else if attr.has_name(sym::borrowck_graphviz_format) {
|
|
||||||
Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s {
|
|
||||||
sym::gen_kill | sym::two_phase => Ok(s),
|
|
||||||
_ => {
|
|
||||||
tcx.dcx().emit_err(UnknownFormatter { span: attr.span() });
|
|
||||||
Err(())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
result = result.and(attr_result);
|
|
||||||
}
|
|
||||||
|
|
||||||
result.map(|()| ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_field<T>(
|
|
||||||
field: &mut Option<T>,
|
|
||||||
tcx: TyCtxt<'_>,
|
|
||||||
attr: &ast::MetaItemInner,
|
|
||||||
mapper: impl FnOnce(Symbol) -> Result<T, ()>,
|
|
||||||
) -> Result<(), ()> {
|
|
||||||
if field.is_some() {
|
|
||||||
tcx.dcx()
|
|
||||||
.emit_err(DuplicateValuesFor { span: attr.span(), name: attr.name_or_empty() });
|
|
||||||
|
|
||||||
return Err(());
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(s) = attr.value_str() {
|
|
||||||
*field = Some(mapper(s)?);
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
tcx.dcx()
|
|
||||||
.emit_err(RequiresAnArgument { span: attr.span(), name: attr.name_or_empty() });
|
|
||||||
Err(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the path where dataflow results should be written, or `None`
|
|
||||||
/// `borrowck_graphviz_postflow` was not specified.
|
|
||||||
///
|
|
||||||
/// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
|
|
||||||
///
|
|
||||||
/// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
|
|
||||||
fn output_path(&self, analysis_name: &str) -> Option<PathBuf> {
|
|
||||||
let mut ret = self.basename_and_suffix.as_ref().cloned()?;
|
|
||||||
let suffix = ret.file_name().unwrap(); // Checked when parsing attrs
|
|
||||||
|
|
||||||
let mut file_name: OsString = analysis_name.into();
|
|
||||||
file_name.push("_");
|
|
||||||
file_name.push(suffix);
|
|
||||||
ret.set_file_name(file_name);
|
|
||||||
|
|
||||||
Some(ret)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,7 +1,3 @@
|
||||||
//! Dataflow analyses are built upon some interpretation of the
|
|
||||||
//! bitvectors attached to each basic block, represented via a
|
|
||||||
//! zero-sized structure.
|
|
||||||
|
|
||||||
mod borrowed_locals;
|
mod borrowed_locals;
|
||||||
mod initialized;
|
mod initialized;
|
||||||
mod liveness;
|
mod liveness;
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
//! Detecting usage of the `#[debugger_visualizer]` attribute.
|
//! Detecting usage of the `#[debugger_visualizer]` attribute.
|
||||||
|
|
||||||
use rustc_ast::Attribute;
|
use rustc_ast::Attribute;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_expand::base::resolve_path;
|
use rustc_expand::base::resolve_path;
|
||||||
use rustc_middle::middle::debugger_visualizer::{DebuggerVisualizerFile, DebuggerVisualizerType};
|
use rustc_middle::middle::debugger_visualizer::{DebuggerVisualizerFile, DebuggerVisualizerType};
|
||||||
use rustc_middle::query::{LocalCrate, Providers};
|
use rustc_middle::query::{LocalCrate, Providers};
|
||||||
|
@ -49,10 +48,10 @@ impl DebuggerVisualizerCollector<'_> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match std::fs::read(&file) {
|
match self.sess.source_map().load_binary_file(&file) {
|
||||||
Ok(contents) => {
|
Ok((source, _)) => {
|
||||||
self.visualizers.push(DebuggerVisualizerFile::new(
|
self.visualizers.push(DebuggerVisualizerFile::new(
|
||||||
Lrc::from(contents),
|
source,
|
||||||
visualizer_type,
|
visualizer_type,
|
||||||
file,
|
file,
|
||||||
));
|
));
|
||||||
|
|
|
@ -18,6 +18,7 @@ pub enum CopyImplementationError<'tcx> {
|
||||||
InfringingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>, InfringingFieldsReason<'tcx>)>),
|
InfringingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>, InfringingFieldsReason<'tcx>)>),
|
||||||
NotAnAdt,
|
NotAnAdt,
|
||||||
HasDestructor,
|
HasDestructor,
|
||||||
|
HasUnsafeFields,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum ConstParamTyImplementationError<'tcx> {
|
pub enum ConstParamTyImplementationError<'tcx> {
|
||||||
|
@ -39,11 +40,16 @@ pub enum InfringingFieldsReason<'tcx> {
|
||||||
///
|
///
|
||||||
/// If it's not an ADT, int ty, `bool`, float ty, `char`, raw pointer, `!`,
|
/// If it's not an ADT, int ty, `bool`, float ty, `char`, raw pointer, `!`,
|
||||||
/// a reference or an array returns `Err(NotAnAdt)`.
|
/// a reference or an array returns `Err(NotAnAdt)`.
|
||||||
|
///
|
||||||
|
/// If the impl is `Safe`, `self_type` must not have unsafe fields. When used to
|
||||||
|
/// generate suggestions in lints, `Safe` should be supplied so as to not
|
||||||
|
/// suggest implementing `Copy` for types with unsafe fields.
|
||||||
pub fn type_allowed_to_implement_copy<'tcx>(
|
pub fn type_allowed_to_implement_copy<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
param_env: ty::ParamEnv<'tcx>,
|
param_env: ty::ParamEnv<'tcx>,
|
||||||
self_type: Ty<'tcx>,
|
self_type: Ty<'tcx>,
|
||||||
parent_cause: ObligationCause<'tcx>,
|
parent_cause: ObligationCause<'tcx>,
|
||||||
|
impl_safety: hir::Safety,
|
||||||
) -> Result<(), CopyImplementationError<'tcx>> {
|
) -> Result<(), CopyImplementationError<'tcx>> {
|
||||||
let (adt, args) = match self_type.kind() {
|
let (adt, args) = match self_type.kind() {
|
||||||
// These types used to have a builtin impl.
|
// These types used to have a builtin impl.
|
||||||
|
@ -78,6 +84,10 @@ pub fn type_allowed_to_implement_copy<'tcx>(
|
||||||
return Err(CopyImplementationError::HasDestructor);
|
return Err(CopyImplementationError::HasDestructor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if impl_safety == hir::Safety::Safe && self_type.has_unsafe_fields() {
|
||||||
|
return Err(CopyImplementationError::HasUnsafeFields);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -795,8 +795,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
| ty::Never
|
| ty::Never
|
||||||
| ty::Tuple(_)
|
| ty::Tuple(_)
|
||||||
| ty::CoroutineWitness(..) => {
|
| ty::CoroutineWitness(..) => {
|
||||||
use rustc_type_ir::inherent::*;
|
|
||||||
|
|
||||||
// Only consider auto impls of unsafe traits when there are
|
// Only consider auto impls of unsafe traits when there are
|
||||||
// no unsafe fields.
|
// no unsafe fields.
|
||||||
if self.tcx().trait_is_unsafe(def_id) && self_ty.has_unsafe_fields() {
|
if self.tcx().trait_is_unsafe(def_id) && self_ty.has_unsafe_fields() {
|
||||||
|
|
|
@ -2261,7 +2261,7 @@ pub fn stream_cargo(
|
||||||
Ok(msg) => {
|
Ok(msg) => {
|
||||||
if builder.config.json_output {
|
if builder.config.json_output {
|
||||||
// Forward JSON to stdout.
|
// Forward JSON to stdout.
|
||||||
eprintln!("{line}");
|
println!("{line}");
|
||||||
}
|
}
|
||||||
cb(msg)
|
cb(msg)
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,9 +21,9 @@ const SHELL: &str = "sh";
|
||||||
|
|
||||||
/// We have to run a few shell scripts, which choke quite a bit on both `\`
|
/// We have to run a few shell scripts, which choke quite a bit on both `\`
|
||||||
/// characters and on `C:\` paths, so normalize both of them away.
|
/// characters and on `C:\` paths, so normalize both of them away.
|
||||||
fn sanitize_sh(path: &Path) -> String {
|
fn sanitize_sh(path: &Path, is_cygwin: bool) -> String {
|
||||||
let path = path.to_str().unwrap().replace('\\', "/");
|
let path = path.to_str().unwrap().replace('\\', "/");
|
||||||
return change_drive(unc_to_lfs(&path)).unwrap_or(path);
|
return if is_cygwin { path } else { change_drive(unc_to_lfs(&path)).unwrap_or(path) };
|
||||||
|
|
||||||
fn unc_to_lfs(s: &str) -> &str {
|
fn unc_to_lfs(s: &str) -> &str {
|
||||||
s.strip_prefix("//?/").unwrap_or(s)
|
s.strip_prefix("//?/").unwrap_or(s)
|
||||||
|
@ -71,6 +71,7 @@ fn install_sh(
|
||||||
let prefix = default_path(&builder.config.prefix, "/usr/local");
|
let prefix = default_path(&builder.config.prefix, "/usr/local");
|
||||||
let sysconfdir = prefix.join(default_path(&builder.config.sysconfdir, "/etc"));
|
let sysconfdir = prefix.join(default_path(&builder.config.sysconfdir, "/etc"));
|
||||||
let destdir_env = env::var_os("DESTDIR").map(PathBuf::from);
|
let destdir_env = env::var_os("DESTDIR").map(PathBuf::from);
|
||||||
|
let is_cygwin = builder.config.build.is_cygwin();
|
||||||
|
|
||||||
// Sanity checks on the write access of user.
|
// Sanity checks on the write access of user.
|
||||||
//
|
//
|
||||||
|
@ -103,14 +104,14 @@ fn install_sh(
|
||||||
|
|
||||||
let mut cmd = command(SHELL);
|
let mut cmd = command(SHELL);
|
||||||
cmd.current_dir(&empty_dir)
|
cmd.current_dir(&empty_dir)
|
||||||
.arg(sanitize_sh(&tarball.decompressed_output().join("install.sh")))
|
.arg(sanitize_sh(&tarball.decompressed_output().join("install.sh"), is_cygwin))
|
||||||
.arg(format!("--prefix={}", prepare_dir(&destdir_env, prefix)))
|
.arg(format!("--prefix={}", prepare_dir(&destdir_env, prefix, is_cygwin)))
|
||||||
.arg(format!("--sysconfdir={}", prepare_dir(&destdir_env, sysconfdir)))
|
.arg(format!("--sysconfdir={}", prepare_dir(&destdir_env, sysconfdir, is_cygwin)))
|
||||||
.arg(format!("--datadir={}", prepare_dir(&destdir_env, datadir)))
|
.arg(format!("--datadir={}", prepare_dir(&destdir_env, datadir, is_cygwin)))
|
||||||
.arg(format!("--docdir={}", prepare_dir(&destdir_env, docdir)))
|
.arg(format!("--docdir={}", prepare_dir(&destdir_env, docdir, is_cygwin)))
|
||||||
.arg(format!("--bindir={}", prepare_dir(&destdir_env, bindir)))
|
.arg(format!("--bindir={}", prepare_dir(&destdir_env, bindir, is_cygwin)))
|
||||||
.arg(format!("--libdir={}", prepare_dir(&destdir_env, libdir)))
|
.arg(format!("--libdir={}", prepare_dir(&destdir_env, libdir, is_cygwin)))
|
||||||
.arg(format!("--mandir={}", prepare_dir(&destdir_env, mandir)))
|
.arg(format!("--mandir={}", prepare_dir(&destdir_env, mandir, is_cygwin)))
|
||||||
.arg("--disable-ldconfig");
|
.arg("--disable-ldconfig");
|
||||||
cmd.run(builder);
|
cmd.run(builder);
|
||||||
t!(fs::remove_dir_all(&empty_dir));
|
t!(fs::remove_dir_all(&empty_dir));
|
||||||
|
@ -120,7 +121,7 @@ fn default_path(config: &Option<PathBuf>, default: &str) -> PathBuf {
|
||||||
config.as_ref().cloned().unwrap_or_else(|| PathBuf::from(default))
|
config.as_ref().cloned().unwrap_or_else(|| PathBuf::from(default))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepare_dir(destdir_env: &Option<PathBuf>, mut path: PathBuf) -> String {
|
fn prepare_dir(destdir_env: &Option<PathBuf>, mut path: PathBuf, is_cygwin: bool) -> String {
|
||||||
// The DESTDIR environment variable is a standard way to install software in a subdirectory
|
// The DESTDIR environment variable is a standard way to install software in a subdirectory
|
||||||
// while keeping the original directory structure, even if the prefix or other directories
|
// while keeping the original directory structure, even if the prefix or other directories
|
||||||
// contain absolute paths.
|
// contain absolute paths.
|
||||||
|
@ -146,7 +147,7 @@ fn prepare_dir(destdir_env: &Option<PathBuf>, mut path: PathBuf) -> String {
|
||||||
assert!(path.is_absolute(), "could not make the path relative");
|
assert!(path.is_absolute(), "could not make the path relative");
|
||||||
}
|
}
|
||||||
|
|
||||||
sanitize_sh(&path)
|
sanitize_sh(&path, is_cygwin)
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! install {
|
macro_rules! install {
|
||||||
|
|
|
@ -565,6 +565,12 @@ impl TargetSelection {
|
||||||
self.ends_with("windows-gnu")
|
self.ends_with("windows-gnu")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_cygwin(&self) -> bool {
|
||||||
|
self.is_windows() &&
|
||||||
|
// ref. https://cygwin.com/pipermail/cygwin/2022-February/250802.html
|
||||||
|
env::var("OSTYPE").is_ok_and(|v| v.to_lowercase().contains("cygwin"))
|
||||||
|
}
|
||||||
|
|
||||||
/// Path to the file defining the custom target, if any.
|
/// Path to the file defining the custom target, if any.
|
||||||
pub fn filepath(&self) -> Option<&Path> {
|
pub fn filepath(&self) -> Option<&Path> {
|
||||||
self.file.as_ref().map(Path::new)
|
self.file.as_ref().map(Path::new)
|
||||||
|
|
|
@ -200,6 +200,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
|
||||||
cx.param_env,
|
cx.param_env,
|
||||||
ty,
|
ty,
|
||||||
traits::ObligationCause::dummy_with_span(span),
|
traits::ObligationCause::dummy_with_span(span),
|
||||||
|
rustc_hir::Safety::Safe,
|
||||||
)
|
)
|
||||||
.is_ok()
|
.is_ok()
|
||||||
{
|
{
|
||||||
|
|
|
@ -28,7 +28,8 @@ impl Cache {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn value(&self) -> &Value {
|
// FIXME: Make this failible, so jsonpath syntax error has line number.
|
||||||
&self.value
|
pub fn select(&self, path: &str) -> Vec<&Value> {
|
||||||
|
jsonpath_lib::select(&self.value, path).unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,29 +1,7 @@
|
||||||
use std::error::Error;
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
use crate::Command;
|
use crate::Command;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum CkError {
|
pub struct CkError {
|
||||||
/// A check failed. File didn't exist or failed to match the command
|
pub message: String,
|
||||||
FailedCheck(String, Command),
|
pub command: Command,
|
||||||
/// An error triggered by some other error
|
|
||||||
Induced(Box<dyn Error>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for CkError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
CkError::FailedCheck(msg, cmd) => {
|
|
||||||
write!(f, "Failed check: {} on line {}", msg, cmd.lineno)
|
|
||||||
}
|
|
||||||
CkError::Induced(err) => write!(f, "Check failed: {}", err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Error + 'static> From<T> for CkError {
|
|
||||||
fn from(err: T) -> CkError {
|
|
||||||
CkError::Induced(Box::new(err))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::process::ExitCode;
|
||||||
use std::sync::OnceLock;
|
use std::sync::OnceLock;
|
||||||
use std::{env, fmt, fs};
|
use std::{env, fs};
|
||||||
|
|
||||||
use jsonpath_lib::select;
|
|
||||||
use regex::{Regex, RegexBuilder};
|
use regex::{Regex, RegexBuilder};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
|
@ -14,90 +14,134 @@ use cache::Cache;
|
||||||
use config::parse_config;
|
use config::parse_config;
|
||||||
use error::CkError;
|
use error::CkError;
|
||||||
|
|
||||||
fn main() -> Result<(), String> {
|
fn main() -> ExitCode {
|
||||||
let config = parse_config(env::args().collect());
|
let config = parse_config(env::args().collect());
|
||||||
|
|
||||||
let mut failed = Vec::new();
|
let mut failed = Vec::new();
|
||||||
let mut cache = Cache::new(&config);
|
let mut cache = Cache::new(&config);
|
||||||
let commands = get_commands(&config.template)
|
let Ok(commands) = get_commands(&config.template) else {
|
||||||
.map_err(|_| format!("Jsondocck failed for {}", &config.template))?;
|
eprintln!("Jsondocck failed for {}", &config.template);
|
||||||
|
return ExitCode::FAILURE;
|
||||||
|
};
|
||||||
|
|
||||||
for command in commands {
|
for command in commands {
|
||||||
if let Err(e) = check_command(command, &mut cache) {
|
if let Err(message) = check_command(&command, &mut cache) {
|
||||||
failed.push(e);
|
failed.push(CkError { command, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if failed.is_empty() {
|
if failed.is_empty() {
|
||||||
Ok(())
|
ExitCode::SUCCESS
|
||||||
} else {
|
} else {
|
||||||
for i in failed {
|
for i in failed {
|
||||||
eprintln!("{}", i);
|
eprintln!("{}:{}, command failed", config.template, i.command.lineno);
|
||||||
|
eprintln!("{}", i.message)
|
||||||
}
|
}
|
||||||
Err(format!("Jsondocck failed for {}", &config.template))
|
ExitCode::FAILURE
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Command {
|
pub struct Command {
|
||||||
negated: bool,
|
|
||||||
kind: CommandKind,
|
kind: CommandKind,
|
||||||
args: Vec<String>,
|
path: String,
|
||||||
lineno: usize,
|
lineno: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum CommandKind {
|
enum CommandKind {
|
||||||
Has,
|
/// `//@ has <path>`
|
||||||
Count,
|
///
|
||||||
Is,
|
/// Checks the path exists.
|
||||||
IsMany,
|
HasPath,
|
||||||
Set,
|
|
||||||
|
/// `//@ has <path> <value>`
|
||||||
|
///
|
||||||
|
/// Check one thing at the path is equal to the value.
|
||||||
|
HasValue { value: String },
|
||||||
|
|
||||||
|
/// `//@ !has <path>`
|
||||||
|
///
|
||||||
|
/// Checks the path doesn't exist.
|
||||||
|
HasNotPath,
|
||||||
|
|
||||||
|
/// `//@ is <path> <value>`
|
||||||
|
///
|
||||||
|
/// Check the path is the given value.
|
||||||
|
Is { value: String },
|
||||||
|
|
||||||
|
/// `//@ is <path> <value> <value>...`
|
||||||
|
///
|
||||||
|
/// Check that the path matches to exactly every given value.
|
||||||
|
IsMany { values: Vec<String> },
|
||||||
|
|
||||||
|
/// `//@ !is <path> <value>`
|
||||||
|
///
|
||||||
|
/// Check the path isn't the given value.
|
||||||
|
IsNot { value: String },
|
||||||
|
|
||||||
|
/// `//@ count <path> <value>`
|
||||||
|
///
|
||||||
|
/// Check the path has the expected number of matches.
|
||||||
|
CountIs { expected: usize },
|
||||||
|
|
||||||
|
/// `//@ set <name> = <path>`
|
||||||
|
Set { variable: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CommandKind {
|
impl CommandKind {
|
||||||
fn validate(&self, args: &[String], lineno: usize) -> bool {
|
/// Returns both the kind and the path.
|
||||||
// FIXME(adotinthevoid): We should "parse, don't validate" here, so we avoid ad-hoc
|
///
|
||||||
// indexing in check_command.
|
/// Returns `None` if the command isn't from jsondocck (e.g. from compiletest).
|
||||||
let count = match self {
|
fn parse<'a>(command_name: &str, negated: bool, args: &'a [String]) -> Option<(Self, &'a str)> {
|
||||||
CommandKind::Has => (1..=2).contains(&args.len()),
|
let kind = match (command_name, negated) {
|
||||||
CommandKind::IsMany => args.len() >= 2,
|
("count", false) => {
|
||||||
CommandKind::Count | CommandKind::Is => 2 == args.len(),
|
assert_eq!(args.len(), 2);
|
||||||
CommandKind::Set => 3 == args.len(),
|
let expected = args[1].parse().expect("invalid number for `count`");
|
||||||
};
|
Self::CountIs { expected }
|
||||||
|
|
||||||
if !count {
|
|
||||||
print_err(&format!("Incorrect number of arguments to `{}`", self), lineno);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let CommandKind::Count = self {
|
|
||||||
if args[1].parse::<usize>().is_err() {
|
|
||||||
print_err(
|
|
||||||
&format!(
|
|
||||||
"Second argument to `count` must be a valid usize (got `{}`)",
|
|
||||||
args[1]
|
|
||||||
),
|
|
||||||
lineno,
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
true
|
("ismany", false) => {
|
||||||
}
|
// FIXME: Make this >= 3, and migrate len(values)==1 cases to @is
|
||||||
}
|
assert!(args.len() >= 2, "Not enough args to `ismany`");
|
||||||
|
let values = args[1..].to_owned();
|
||||||
|
Self::IsMany { values }
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Display for CommandKind {
|
("is", false) => {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
assert_eq!(args.len(), 2);
|
||||||
let text = match self {
|
Self::Is { value: args[1].clone() }
|
||||||
CommandKind::Has => "has",
|
}
|
||||||
CommandKind::IsMany => "ismany",
|
("is", true) => {
|
||||||
CommandKind::Count => "count",
|
assert_eq!(args.len(), 2);
|
||||||
CommandKind::Is => "is",
|
Self::IsNot { value: args[1].clone() }
|
||||||
CommandKind::Set => "set",
|
}
|
||||||
|
|
||||||
|
("set", false) => {
|
||||||
|
assert_eq!(args.len(), 3);
|
||||||
|
assert_eq!(args[1], "=");
|
||||||
|
return Some((Self::Set { variable: args[0].clone() }, &args[2]));
|
||||||
|
}
|
||||||
|
|
||||||
|
("has", false) => match args {
|
||||||
|
[_path] => Self::HasPath,
|
||||||
|
[_path, value] => Self::HasValue { value: value.clone() },
|
||||||
|
_ => panic!("`//@ has` must have 2 or 3 arguments, but got {args:?}"),
|
||||||
|
},
|
||||||
|
("has", true) => {
|
||||||
|
assert_eq!(args.len(), 1, "args={args:?}");
|
||||||
|
Self::HasNotPath
|
||||||
|
}
|
||||||
|
|
||||||
|
(_, false) if KNOWN_DIRECTIVE_NAMES.contains(&command_name) => {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
panic!("Invalid command `//@ {}{command_name}`", if negated { "!" } else { "" })
|
||||||
|
}
|
||||||
};
|
};
|
||||||
write!(f, "{}", text)
|
|
||||||
|
Some((kind, &args[0]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,8 +169,7 @@ fn print_err(msg: &str, lineno: usize) {
|
||||||
// See <https://github.com/rust-lang/rust/issues/125813#issuecomment-2141953780>.
|
// See <https://github.com/rust-lang/rust/issues/125813#issuecomment-2141953780>.
|
||||||
include!(concat!(env!("CARGO_MANIFEST_DIR"), "/../compiletest/src/directive-list.rs"));
|
include!(concat!(env!("CARGO_MANIFEST_DIR"), "/../compiletest/src/directive-list.rs"));
|
||||||
|
|
||||||
/// Get a list of commands from a file. Does the work of ensuring the commands
|
/// Get a list of commands from a file.
|
||||||
/// are syntactically valid.
|
|
||||||
fn get_commands(template: &str) -> Result<Vec<Command>, ()> {
|
fn get_commands(template: &str) -> Result<Vec<Command>, ()> {
|
||||||
let mut commands = Vec::new();
|
let mut commands = Vec::new();
|
||||||
let mut errors = false;
|
let mut errors = false;
|
||||||
|
@ -142,217 +185,102 @@ fn get_commands(template: &str) -> Result<Vec<Command>, ()> {
|
||||||
|
|
||||||
let negated = cap.name("negated").unwrap().as_str() == "!";
|
let negated = cap.name("negated").unwrap().as_str() == "!";
|
||||||
|
|
||||||
let cmd = match cap.name("cmd").unwrap().as_str() {
|
let args_str = &cap["args"];
|
||||||
"has" => CommandKind::Has,
|
let args = match shlex::split(args_str) {
|
||||||
"count" => CommandKind::Count,
|
|
||||||
"is" => CommandKind::Is,
|
|
||||||
"ismany" => CommandKind::IsMany,
|
|
||||||
"set" => CommandKind::Set,
|
|
||||||
// FIXME: See the comment above the `include!(...)`.
|
|
||||||
cmd if KNOWN_DIRECTIVE_NAMES.contains(&cmd) => continue,
|
|
||||||
cmd => {
|
|
||||||
print_err(&format!("Unrecognized command name `{cmd}`"), lineno);
|
|
||||||
errors = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let args = cap.name("args").map_or(Some(vec![]), |m| shlex::split(m.as_str()));
|
|
||||||
|
|
||||||
let args = match args {
|
|
||||||
Some(args) => args,
|
Some(args) => args,
|
||||||
None => {
|
None => {
|
||||||
print_err(
|
print_err(&format!("Invalid arguments to shlex::split: `{args_str}`",), lineno);
|
||||||
&format!(
|
|
||||||
"Invalid arguments to shlex::split: `{}`",
|
|
||||||
cap.name("args").unwrap().as_str()
|
|
||||||
),
|
|
||||||
lineno,
|
|
||||||
);
|
|
||||||
errors = true;
|
errors = true;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if !cmd.validate(&args, lineno) {
|
if let Some((kind, path)) = CommandKind::parse(&cap["cmd"], negated, &args) {
|
||||||
errors = true;
|
commands.push(Command { kind, lineno, path: path.to_owned() })
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
commands.push(Command { negated, kind: cmd, args, lineno })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !errors { Ok(commands) } else { Err(()) }
|
if !errors { Ok(commands) } else { Err(()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Performs the actual work of ensuring a command passes. Generally assumes the command
|
/// Performs the actual work of ensuring a command passes.
|
||||||
/// is syntactically valid.
|
fn check_command(command: &Command, cache: &mut Cache) -> Result<(), String> {
|
||||||
fn check_command(command: Command, cache: &mut Cache) -> Result<(), CkError> {
|
let matches = cache.select(&command.path);
|
||||||
// FIXME: Be more granular about why, (e.g. syntax error, count not equal)
|
match &command.kind {
|
||||||
let result = match command.kind {
|
CommandKind::HasPath => {
|
||||||
CommandKind::Has => {
|
if matches.is_empty() {
|
||||||
match command.args.len() {
|
return Err("matched to no values".to_owned());
|
||||||
// `has <jsonpath>`: Check that `jsonpath` exists.
|
}
|
||||||
1 => {
|
}
|
||||||
let val = cache.value();
|
CommandKind::HasNotPath => {
|
||||||
let results = select(val, &command.args[0]).unwrap();
|
if !matches.is_empty() {
|
||||||
!results.is_empty()
|
return Err(format!("matched to {matches:?}, but wanted no matches"));
|
||||||
}
|
}
|
||||||
// `has <jsonpath> <value>`: Check *any* item matched by `jsonpath` equals `value`.
|
}
|
||||||
2 => {
|
CommandKind::HasValue { value } => {
|
||||||
let val = cache.value().clone();
|
let want_value = string_to_value(value, cache);
|
||||||
let results = select(&val, &command.args[0]).unwrap();
|
if !matches.contains(&want_value.as_ref()) {
|
||||||
let pat = string_to_value(&command.args[1], cache);
|
return Err(format!("matched to {matches:?}, which didn't contain {want_value:?}"));
|
||||||
let has = results.contains(&pat.as_ref());
|
}
|
||||||
// Give better error for when `has` check fails.
|
}
|
||||||
if !command.negated && !has {
|
CommandKind::Is { value } => {
|
||||||
return Err(CkError::FailedCheck(
|
let want_value = string_to_value(value, cache);
|
||||||
format!(
|
let matched = get_one(&matches)?;
|
||||||
"{} matched to {:?} but didn't have {:?}",
|
if matched != want_value.as_ref() {
|
||||||
&command.args[0],
|
return Err(format!("matched to {matched:?} but want {want_value:?}"));
|
||||||
results,
|
}
|
||||||
pat.as_ref()
|
}
|
||||||
),
|
CommandKind::IsNot { value } => {
|
||||||
command,
|
let wantnt_value = string_to_value(value, cache);
|
||||||
));
|
let matched = get_one(&matches)?;
|
||||||
} else {
|
if matched == wantnt_value.as_ref() {
|
||||||
has
|
return Err(format!("got value {wantnt_value:?}, but want anything else"));
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// `ismany <path> <jsonpath> <value...>`
|
|
||||||
CommandKind::IsMany => {
|
|
||||||
assert!(!command.negated, "`ismany` may not be negated");
|
|
||||||
let (query, values) = if let [query, values @ ..] = &command.args[..] {
|
|
||||||
(query, values)
|
|
||||||
} else {
|
|
||||||
unreachable!("Checked in CommandKind::validate")
|
|
||||||
};
|
|
||||||
let val = cache.value();
|
|
||||||
let got_values = select(val, &query).unwrap();
|
|
||||||
|
|
||||||
|
CommandKind::IsMany { values } => {
|
||||||
// Serde json doesn't implement Ord or Hash for Value, so we must
|
// Serde json doesn't implement Ord or Hash for Value, so we must
|
||||||
// use a Vec here. While in theory that makes setwize equality
|
// use a Vec here. While in theory that makes setwize equality
|
||||||
// O(n^2), in practice n will never be large enough to matter.
|
// O(n^2), in practice n will never be large enough to matter.
|
||||||
let expected_values =
|
let expected_values =
|
||||||
values.iter().map(|v| string_to_value(v, cache)).collect::<Vec<_>>();
|
values.iter().map(|v| string_to_value(v, cache)).collect::<Vec<_>>();
|
||||||
if expected_values.len() != got_values.len() {
|
if expected_values.len() != matches.len() {
|
||||||
return Err(CkError::FailedCheck(
|
return Err(format!(
|
||||||
format!(
|
"Expected {} values, but matched to {} values ({:?})",
|
||||||
"Expected {} values, but `{}` matched to {} values ({:?})",
|
expected_values.len(),
|
||||||
expected_values.len(),
|
matches.len(),
|
||||||
query,
|
matches
|
||||||
got_values.len(),
|
|
||||||
got_values
|
|
||||||
),
|
|
||||||
command,
|
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
for got_value in got_values {
|
for got_value in matches {
|
||||||
if !expected_values.iter().any(|exp| &**exp == got_value) {
|
if !expected_values.iter().any(|exp| &**exp == got_value) {
|
||||||
return Err(CkError::FailedCheck(
|
return Err(format!("has match {got_value:?}, which was not expected",));
|
||||||
format!("`{}` has match {:?}, which was not expected", query, got_value),
|
|
||||||
command,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
true
|
|
||||||
}
|
}
|
||||||
// `count <jsonpath> <count>`: Check that `jsonpath` matches exactly `count` times.
|
CommandKind::CountIs { expected } => {
|
||||||
CommandKind::Count => {
|
if *expected != matches.len() {
|
||||||
assert_eq!(command.args.len(), 2);
|
return Err(format!(
|
||||||
let expected: usize = command.args[1].parse().unwrap();
|
"matched to `{matches:?}` with length {}, but expected length {expected}",
|
||||||
let val = cache.value();
|
matches.len(),
|
||||||
let results = select(val, &command.args[0]).unwrap();
|
|
||||||
let eq = results.len() == expected;
|
|
||||||
if !command.negated && !eq {
|
|
||||||
return Err(CkError::FailedCheck(
|
|
||||||
format!(
|
|
||||||
"`{}` matched to `{:?}` with length {}, but expected length {}",
|
|
||||||
&command.args[0],
|
|
||||||
results,
|
|
||||||
results.len(),
|
|
||||||
expected
|
|
||||||
),
|
|
||||||
command,
|
|
||||||
));
|
));
|
||||||
} else {
|
|
||||||
eq
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// `has <jsonpath> <value>`: Check` *exactly one* item matched by `jsonpath`, and it equals `value`.
|
CommandKind::Set { variable } => {
|
||||||
CommandKind::Is => {
|
let value = get_one(&matches)?;
|
||||||
assert_eq!(command.args.len(), 2);
|
let r = cache.variables.insert(variable.to_owned(), value.clone());
|
||||||
let val = cache.value().clone();
|
assert!(r.is_none(), "name collision: {variable:?} is duplicated");
|
||||||
let results = select(&val, &command.args[0]).unwrap();
|
|
||||||
let pat = string_to_value(&command.args[1], cache);
|
|
||||||
let is = results.len() == 1 && results[0] == pat.as_ref();
|
|
||||||
if !command.negated && !is {
|
|
||||||
return Err(CkError::FailedCheck(
|
|
||||||
format!(
|
|
||||||
"{} matched to {:?}, but expected {:?}",
|
|
||||||
&command.args[0],
|
|
||||||
results,
|
|
||||||
pat.as_ref()
|
|
||||||
),
|
|
||||||
command,
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
is
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// `set <name> = <jsonpath>`
|
}
|
||||||
CommandKind::Set => {
|
|
||||||
assert!(!command.negated, "`set` may not be negated");
|
|
||||||
assert_eq!(command.args.len(), 3);
|
|
||||||
assert_eq!(command.args[1], "=", "Expected an `=`");
|
|
||||||
let val = cache.value().clone();
|
|
||||||
let results = select(&val, &command.args[2]).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
results.len(),
|
|
||||||
1,
|
|
||||||
"Expected 1 match for `{}` (because of `set`): matched to {:?}",
|
|
||||||
command.args[2],
|
|
||||||
results
|
|
||||||
);
|
|
||||||
match results.len() {
|
|
||||||
0 => false,
|
|
||||||
1 => {
|
|
||||||
let r = cache.variables.insert(command.args[0].clone(), results[0].clone());
|
|
||||||
assert!(r.is_none(), "Name collision: {} is duplicated", command.args[0]);
|
|
||||||
true
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
panic!(
|
|
||||||
"Got multiple results in `set` for `{}`: {:?}",
|
|
||||||
&command.args[2], results,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if result == command.negated {
|
Ok(())
|
||||||
if command.negated {
|
}
|
||||||
Err(CkError::FailedCheck(
|
|
||||||
format!("`!{} {}` matched when it shouldn't", command.kind, command.args.join(" ")),
|
fn get_one<'a>(matches: &[&'a Value]) -> Result<&'a Value, String> {
|
||||||
command,
|
match matches {
|
||||||
))
|
[] => Err("matched to no values".to_owned()),
|
||||||
} else {
|
[matched] => Ok(matched),
|
||||||
// FIXME: In the future, try 'peeling back' each step, and see at what level the match failed
|
_ => Err(format!("matched to multiple values {matches:?}, but want exactly 1")),
|
||||||
Err(CkError::FailedCheck(
|
|
||||||
format!(
|
|
||||||
"`{} {}` didn't match when it should",
|
|
||||||
command.kind,
|
|
||||||
command.args.join(" ")
|
|
||||||
),
|
|
||||||
command,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,10 +3,20 @@ include ../tools.mk
|
||||||
|
|
||||||
# ignore-windows
|
# ignore-windows
|
||||||
|
|
||||||
|
# The option -n for the AIX ln command has a different purpose than it does
|
||||||
|
# on Linux. On Linux, the -n option is used to treat the destination path as
|
||||||
|
# normal file if it is a symbolic link to a directory, which is the default
|
||||||
|
# behavior of the AIX ln command.
|
||||||
|
ifeq ($(UNAME),AIX)
|
||||||
|
LN_FLAGS := -sf
|
||||||
|
else
|
||||||
|
LN_FLAGS := -nsf
|
||||||
|
endif
|
||||||
|
|
||||||
NAME := $(shell $(RUSTC) --print file-names foo.rs)
|
NAME := $(shell $(RUSTC) --print file-names foo.rs)
|
||||||
|
|
||||||
all:
|
all:
|
||||||
mkdir -p $(TMPDIR)/outdir
|
mkdir -p $(TMPDIR)/outdir
|
||||||
$(RUSTC) foo.rs -o $(TMPDIR)/outdir/$(NAME)
|
$(RUSTC) foo.rs -o $(TMPDIR)/outdir/$(NAME)
|
||||||
ln -nsf outdir/$(NAME) $(TMPDIR)
|
ln $(LN_FLAGS) outdir/$(NAME) $(TMPDIR)
|
||||||
RUSTC_LOG=rustc_metadata::loader $(RUSTC) bar.rs
|
RUSTC_LOG=rustc_metadata::loader $(RUSTC) bar.rs
|
||||||
|
|
14
tests/ui/async-await/async-closures/call-once-deduction.rs
Normal file
14
tests/ui/async-await/async-closures/call-once-deduction.rs
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
//@ edition: 2021
|
||||||
|
//@ check-pass
|
||||||
|
|
||||||
|
#![feature(async_closure, async_fn_traits, unboxed_closures)]
|
||||||
|
|
||||||
|
fn bar<F, O>(_: F)
|
||||||
|
where
|
||||||
|
F: AsyncFnOnce<(), CallOnceFuture = O>,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
bar(async move || {});
|
||||||
|
}
|
|
@ -25,7 +25,14 @@ fn start(argc: isize, argv: *const *const u8) -> isize {
|
||||||
let actual = unsafe {
|
let actual = unsafe {
|
||||||
let mut actual: libc::sigaction = std::mem::zeroed();
|
let mut actual: libc::sigaction = std::mem::zeroed();
|
||||||
libc::sigaction(libc::SIGPIPE, std::ptr::null(), &mut actual);
|
libc::sigaction(libc::SIGPIPE, std::ptr::null(), &mut actual);
|
||||||
actual.sa_sigaction
|
#[cfg(not(target_os = "aix"))]
|
||||||
|
{
|
||||||
|
actual.sa_sigaction
|
||||||
|
}
|
||||||
|
#[cfg(target_os = "aix")]
|
||||||
|
{
|
||||||
|
actual.sa_union.__su_sigaction as libc::sighandler_t
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(actual, expected, "actual and expected SIGPIPE disposition in child differs");
|
assert_eq!(actual, expected, "actual and expected SIGPIPE disposition in child differs");
|
||||||
|
|
|
@ -20,7 +20,14 @@ pub fn assert_sigpipe_handler(expected_handler: SignalHandler) {
|
||||||
let actual = unsafe {
|
let actual = unsafe {
|
||||||
let mut actual: libc::sigaction = std::mem::zeroed();
|
let mut actual: libc::sigaction = std::mem::zeroed();
|
||||||
libc::sigaction(libc::SIGPIPE, std::ptr::null(), &mut actual);
|
libc::sigaction(libc::SIGPIPE, std::ptr::null(), &mut actual);
|
||||||
actual.sa_sigaction
|
#[cfg(not(target_os = "aix"))]
|
||||||
|
{
|
||||||
|
actual.sa_sigaction
|
||||||
|
}
|
||||||
|
#[cfg(target_os = "aix")]
|
||||||
|
{
|
||||||
|
actual.sa_union.__su_sigaction as libc::sighandler_t
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let expected = match expected_handler {
|
let expected = match expected_handler {
|
||||||
|
|
|
@ -29,7 +29,14 @@ fn main() {
|
||||||
// Install signal handler that runs on alternate signal stack.
|
// Install signal handler that runs on alternate signal stack.
|
||||||
let mut action: sigaction = std::mem::zeroed();
|
let mut action: sigaction = std::mem::zeroed();
|
||||||
action.sa_flags = (SA_ONSTACK | SA_SIGINFO) as _;
|
action.sa_flags = (SA_ONSTACK | SA_SIGINFO) as _;
|
||||||
action.sa_sigaction = signal_handler as sighandler_t;
|
#[cfg(not(target_os = "aix"))]
|
||||||
|
{
|
||||||
|
action.sa_sigaction = signal_handler as sighandler_t;
|
||||||
|
}
|
||||||
|
#[cfg(target_os = "aix")]
|
||||||
|
{
|
||||||
|
action.sa_union.__su_sigaction = signal_handler as sighandler_t;
|
||||||
|
}
|
||||||
sigaction(SIGWINCH, &action, std::ptr::null_mut());
|
sigaction(SIGWINCH, &action, std::ptr::null_mut());
|
||||||
|
|
||||||
// Send SIGWINCH on exit.
|
// Send SIGWINCH on exit.
|
||||||
|
|
41
tests/ui/unsafe-fields/copy-trait.rs
Normal file
41
tests/ui/unsafe-fields/copy-trait.rs
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
//@ compile-flags: --crate-type=lib
|
||||||
|
|
||||||
|
#![feature(unsafe_fields)]
|
||||||
|
#![allow(incomplete_features)]
|
||||||
|
#![deny(missing_copy_implementations)]
|
||||||
|
|
||||||
|
mod good_safe_impl {
|
||||||
|
enum SafeEnum {
|
||||||
|
Safe(u8),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Copy for SafeEnum {}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod bad_safe_impl {
|
||||||
|
enum UnsafeEnum {
|
||||||
|
Safe(u8),
|
||||||
|
Unsafe { unsafe field: u8 },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Copy for UnsafeEnum {}
|
||||||
|
//~^ ERROR the trait `Copy` requires an `unsafe impl` declaration
|
||||||
|
}
|
||||||
|
|
||||||
|
mod good_unsafe_impl {
|
||||||
|
enum UnsafeEnum {
|
||||||
|
Safe(u8),
|
||||||
|
Unsafe { unsafe field: u8 },
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Copy for UnsafeEnum {}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod bad_unsafe_impl {
|
||||||
|
enum SafeEnum {
|
||||||
|
Safe(u8),
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Copy for SafeEnum {}
|
||||||
|
//~^ ERROR implementing the trait `Copy` is not unsafe
|
||||||
|
}
|
28
tests/ui/unsafe-fields/copy-trait.stderr
Normal file
28
tests/ui/unsafe-fields/copy-trait.stderr
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
error[E0200]: the trait `Copy` requires an `unsafe impl` declaration
|
||||||
|
--> $DIR/copy-trait.rs:21:5
|
||||||
|
|
|
||||||
|
LL | impl Copy for UnsafeEnum {}
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: the trait `Copy` cannot be safely implemented for `bad_safe_impl::UnsafeEnum` because it has unsafe fields. Review the invariants of those fields before adding an `unsafe impl`
|
||||||
|
help: add `unsafe` to this trait implementation
|
||||||
|
|
|
||||||
|
LL | unsafe impl Copy for UnsafeEnum {}
|
||||||
|
| ++++++
|
||||||
|
|
||||||
|
error[E0199]: implementing the trait `Copy` is not unsafe
|
||||||
|
--> $DIR/copy-trait.rs:39:5
|
||||||
|
|
|
||||||
|
LL | unsafe impl Copy for SafeEnum {}
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
help: remove `unsafe` from this trait implementation
|
||||||
|
|
|
||||||
|
LL - unsafe impl Copy for SafeEnum {}
|
||||||
|
LL + impl Copy for SafeEnum {}
|
||||||
|
|
|
||||||
|
|
||||||
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
|
Some errors have detailed explanations: E0199, E0200.
|
||||||
|
For more information about an error, try `rustc --explain E0199`.
|
|
@ -1011,9 +1011,11 @@ compiler = [
|
||||||
"@lcnr",
|
"@lcnr",
|
||||||
"@Nadrieril",
|
"@Nadrieril",
|
||||||
"@nnethercote",
|
"@nnethercote",
|
||||||
|
"@Noratrieb",
|
||||||
"@oli-obk",
|
"@oli-obk",
|
||||||
"@petrochenkov",
|
"@petrochenkov",
|
||||||
"@pnkfelix",
|
"@pnkfelix",
|
||||||
|
"@SparrowLii",
|
||||||
"@wesleywiser",
|
"@wesleywiser",
|
||||||
]
|
]
|
||||||
libs = [
|
libs = [
|
||||||
|
|
Loading…
Add table
Reference in a new issue