Auto merge of #70072 - Centril:rollup-722hooh, r=Centril
Rollup of 7 pull requests Successful merges: - #68746 (Make macro metavars respect (non-)hygiene) - #69688 (Move tidy check to mingw-check) - #69735 (bootstrap: Use hash to determine if sanitizers needs to be rebuilt) - #69922 (implement zeroed and uninitialized with MaybeUninit) - #69956 (Ensure HAS_FREE_LOCAL_NAMES is set for ReFree) - #70061 (Cosmetic fixes in documentation) - #70064 (Update books) Failed merges: r? @ghost
This commit is contained in:
commit
660326e979
26 changed files with 237 additions and 205 deletions
|
@ -11,6 +11,7 @@
|
|||
use std::env;
|
||||
use std::ffi::OsString;
|
||||
use std::fs::{self, File};
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
|
@ -54,7 +55,6 @@ impl Step for Llvm {
|
|||
}
|
||||
}
|
||||
|
||||
let llvm_info = &builder.in_tree_llvm_info;
|
||||
let root = "src/llvm-project/llvm";
|
||||
let out_dir = builder.llvm_out(target);
|
||||
let mut llvm_config_ret_dir = builder.llvm_out(builder.config.build);
|
||||
|
@ -65,40 +65,35 @@ impl Step for Llvm {
|
|||
|
||||
let build_llvm_config =
|
||||
llvm_config_ret_dir.join(exe("llvm-config", &*builder.config.build));
|
||||
let done_stamp = out_dir.join("llvm-finished-building");
|
||||
|
||||
if done_stamp.exists() {
|
||||
if builder.config.llvm_skip_rebuild {
|
||||
builder.info(
|
||||
"Warning: \
|
||||
Using a potentially stale build of LLVM; \
|
||||
This may not behave well.",
|
||||
);
|
||||
return build_llvm_config;
|
||||
}
|
||||
let stamp = out_dir.join("llvm-finished-building");
|
||||
let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha());
|
||||
|
||||
if let Some(llvm_commit) = llvm_info.sha() {
|
||||
let done_contents = t!(fs::read(&done_stamp));
|
||||
if builder.config.llvm_skip_rebuild && stamp.path.exists() {
|
||||
builder.info(
|
||||
"Warning: \
|
||||
Using a potentially stale build of LLVM; \
|
||||
This may not behave well.",
|
||||
);
|
||||
return build_llvm_config;
|
||||
}
|
||||
|
||||
// If LLVM was already built previously and the submodule's commit didn't change
|
||||
// from the previous build, then no action is required.
|
||||
if done_contents == llvm_commit.as_bytes() {
|
||||
return build_llvm_config;
|
||||
}
|
||||
} else {
|
||||
if stamp.is_done() {
|
||||
if stamp.hash.is_none() {
|
||||
builder.info(
|
||||
"Could not determine the LLVM submodule commit hash. \
|
||||
Assuming that an LLVM rebuild is not necessary.",
|
||||
);
|
||||
builder.info(&format!(
|
||||
"To force LLVM to rebuild, remove the file `{}`",
|
||||
done_stamp.display()
|
||||
stamp.path.display()
|
||||
));
|
||||
return build_llvm_config;
|
||||
}
|
||||
return build_llvm_config;
|
||||
}
|
||||
|
||||
builder.info(&format!("Building LLVM for {}", target));
|
||||
t!(stamp.remove());
|
||||
let _time = util::timeit(&builder);
|
||||
t!(fs::create_dir_all(&out_dir));
|
||||
|
||||
|
@ -271,7 +266,7 @@ impl Step for Llvm {
|
|||
|
||||
cfg.build();
|
||||
|
||||
t!(fs::write(&done_stamp, llvm_info.sha().unwrap_or("")));
|
||||
t!(stamp.write());
|
||||
|
||||
build_llvm_config
|
||||
}
|
||||
|
@ -584,17 +579,21 @@ impl Step for Sanitizers {
|
|||
return runtimes;
|
||||
}
|
||||
|
||||
let done_stamp = out_dir.join("sanitizers-finished-building");
|
||||
if done_stamp.exists() {
|
||||
builder.info(&format!(
|
||||
"Assuming that sanitizers rebuild is not necessary. \
|
||||
To force a rebuild, remove the file `{}`",
|
||||
done_stamp.display()
|
||||
));
|
||||
let stamp = out_dir.join("sanitizers-finished-building");
|
||||
let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha());
|
||||
|
||||
if stamp.is_done() {
|
||||
if stamp.hash.is_none() {
|
||||
builder.info(&format!(
|
||||
"Rebuild sanitizers by removing the file `{}`",
|
||||
stamp.path.display()
|
||||
));
|
||||
}
|
||||
return runtimes;
|
||||
}
|
||||
|
||||
builder.info(&format!("Building sanitizers for {}", self.target));
|
||||
t!(stamp.remove());
|
||||
let _time = util::timeit(&builder);
|
||||
|
||||
let mut cfg = cmake::Config::new(&compiler_rt_dir);
|
||||
|
@ -623,8 +622,7 @@ impl Step for Sanitizers {
|
|||
cfg.build_target(&runtime.cmake_target);
|
||||
cfg.build();
|
||||
}
|
||||
|
||||
t!(fs::write(&done_stamp, b""));
|
||||
t!(stamp.write());
|
||||
|
||||
runtimes
|
||||
}
|
||||
|
@ -689,3 +687,41 @@ fn supported_sanitizers(
|
|||
}
|
||||
result
|
||||
}
|
||||
|
||||
struct HashStamp {
|
||||
path: PathBuf,
|
||||
hash: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl HashStamp {
|
||||
fn new(path: PathBuf, hash: Option<&str>) -> Self {
|
||||
HashStamp { path, hash: hash.map(|s| s.as_bytes().to_owned()) }
|
||||
}
|
||||
|
||||
fn is_done(&self) -> bool {
|
||||
match fs::read(&self.path) {
|
||||
Ok(h) => self.hash.as_deref().unwrap_or(b"") == h.as_slice(),
|
||||
Err(e) if e.kind() == io::ErrorKind::NotFound => false,
|
||||
Err(e) => {
|
||||
panic!("failed to read stamp file `{}`: {}", self.path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remove(&self) -> io::Result<()> {
|
||||
match fs::remove_file(&self.path) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(e) => {
|
||||
if e.kind() == io::ErrorKind::NotFound {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn write(&self) -> io::Result<()> {
|
||||
fs::write(&self.path, self.hash.as_deref().unwrap_or(b""))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,4 +25,5 @@ ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1
|
|||
ENV SCRIPT python2.7 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \
|
||||
python2.7 ../x.py build --stage 0 src/tools/build-manifest && \
|
||||
python2.7 ../x.py test --stage 0 src/tools/compiletest && \
|
||||
python2.7 ../x.py test src/tools/tidy && \
|
||||
/scripts/validate-toolstate.sh
|
||||
|
|
|
@ -29,7 +29,7 @@ ENV RUST_CONFIGURE_ARGS \
|
|||
--enable-llvm-link-shared \
|
||||
--set rust.thin-lto-import-instr-limit=10
|
||||
|
||||
ENV SCRIPT python2.7 ../x.py test src/tools/tidy && python2.7 ../x.py test
|
||||
ENV SCRIPT python2.7 ../x.py test --exclude src/tools/tidy && python2.7 ../x.py test src/tools/tidy
|
||||
|
||||
# The purpose of this container isn't to test with debug assertions and
|
||||
# this is run on all PRs, so let's get speedier builds by disabling these extra
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit b81ffb7a6f4c5aaed92786e770e99db116aa4ebd
|
||||
Subproject commit d22a9c487c78095afc4584f1d9b4ec43529d713c
|
|
@ -1 +1 @@
|
|||
Subproject commit 559e09caa9661043744cf7af7bd88432d966f743
|
||||
Subproject commit e2f11fe4d6a5ecb471c70323197da43c70cb96b6
|
|
@ -1 +1 @@
|
|||
Subproject commit db57f899ea2a56a544c8d280cbf033438666273d
|
||||
Subproject commit cb369ae95ca36b841960182d26f6d5d9b2e3cc18
|
|
@ -1027,46 +1027,8 @@ extern "rust-intrinsic" {
|
|||
#[rustc_const_unstable(feature = "const_caller_location", issue = "47809")]
|
||||
pub fn caller_location() -> &'static crate::panic::Location<'static>;
|
||||
|
||||
/// Creates a value initialized to zero.
|
||||
///
|
||||
/// `init` is unsafe because it returns a zeroed-out datum,
|
||||
/// which is unsafe unless `T` is `Copy`. Also, even if T is
|
||||
/// `Copy`, an all-zero value may not correspond to any legitimate
|
||||
/// state for the type in question.
|
||||
///
|
||||
/// The stabilized version of this intrinsic is
|
||||
/// [`std::mem::zeroed`](../../std/mem/fn.zeroed.html).
|
||||
#[unstable(
|
||||
feature = "core_intrinsics",
|
||||
reason = "intrinsics are unlikely to ever be stabilized, instead \
|
||||
they should be used through stabilized interfaces \
|
||||
in the rest of the standard library",
|
||||
issue = "none"
|
||||
)]
|
||||
#[rustc_deprecated(reason = "superseded by MaybeUninit, removal planned", since = "1.38.0")]
|
||||
pub fn init<T>() -> T;
|
||||
|
||||
/// Creates an uninitialized value.
|
||||
///
|
||||
/// `uninit` is unsafe because there is no guarantee of what its
|
||||
/// contents are. In particular its drop-flag may be set to any
|
||||
/// state, which means it may claim either dropped or
|
||||
/// undropped. In the general case one must use `ptr::write` to
|
||||
/// initialize memory previous set to the result of `uninit`.
|
||||
///
|
||||
/// The stabilized version of this intrinsic is
|
||||
/// [`std::mem::MaybeUninit`](../../std/mem/union.MaybeUninit.html).
|
||||
#[unstable(
|
||||
feature = "core_intrinsics",
|
||||
reason = "intrinsics are unlikely to ever be stabilized, instead \
|
||||
they should be used through stabilized interfaces \
|
||||
in the rest of the standard library",
|
||||
issue = "none"
|
||||
)]
|
||||
#[rustc_deprecated(reason = "superseded by MaybeUninit, removal planned", since = "1.38.0")]
|
||||
pub fn uninit<T>() -> T;
|
||||
|
||||
/// Moves a value out of scope without running drop glue.
|
||||
/// This exists solely for `mem::forget_unsized`; normal `forget` uses `ManuallyDrop` instead.
|
||||
pub fn forget<T: ?Sized>(_: T);
|
||||
|
||||
/// Reinterprets the bits of a value of one type as another type.
|
||||
|
|
|
@ -490,7 +490,7 @@ pub const fn needs_drop<T>() -> bool {
|
|||
///
|
||||
/// let _x: &i32 = unsafe { mem::zeroed() }; // Undefined behavior!
|
||||
/// ```
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[allow(deprecated_in_future)]
|
||||
#[allow(deprecated)]
|
||||
|
@ -500,7 +500,7 @@ pub unsafe fn zeroed<T>() -> T {
|
|||
intrinsics::assert_zero_valid::<T>();
|
||||
#[cfg(bootstrap)]
|
||||
intrinsics::panic_if_uninhabited::<T>();
|
||||
intrinsics::init()
|
||||
MaybeUninit::zeroed().assume_init()
|
||||
}
|
||||
|
||||
/// Bypasses Rust's normal memory-initialization checks by pretending to
|
||||
|
@ -525,7 +525,7 @@ pub unsafe fn zeroed<T>() -> T {
|
|||
/// [uninit]: union.MaybeUninit.html#method.uninit
|
||||
/// [assume_init]: union.MaybeUninit.html#method.assume_init
|
||||
/// [inv]: union.MaybeUninit.html#initialization-invariant
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
#[rustc_deprecated(since = "1.39.0", reason = "use `mem::MaybeUninit` instead")]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[allow(deprecated_in_future)]
|
||||
|
@ -536,7 +536,7 @@ pub unsafe fn uninitialized<T>() -> T {
|
|||
intrinsics::assert_uninit_valid::<T>();
|
||||
#[cfg(bootstrap)]
|
||||
intrinsics::panic_if_uninhabited::<T>();
|
||||
intrinsics::uninit()
|
||||
MaybeUninit::uninit().assume_init()
|
||||
}
|
||||
|
||||
/// Swaps the values at two mutable locations, without deinitializing either one.
|
||||
|
|
|
@ -554,24 +554,26 @@ bitflags! {
|
|||
/// Does this have [ConstKind::Placeholder]?
|
||||
const HAS_CT_PLACEHOLDER = 1 << 8;
|
||||
|
||||
/// `true` if there are "names" of regions and so forth
|
||||
/// that are local to a particular fn/inferctxt
|
||||
const HAS_FREE_LOCAL_REGIONS = 1 << 9;
|
||||
|
||||
/// `true` if there are "names" of types and regions and so forth
|
||||
/// that are local to a particular fn
|
||||
const HAS_FREE_LOCAL_NAMES = TypeFlags::HAS_TY_PARAM.bits
|
||||
| TypeFlags::HAS_RE_PARAM.bits
|
||||
| TypeFlags::HAS_CT_PARAM.bits
|
||||
| TypeFlags::HAS_TY_INFER.bits
|
||||
| TypeFlags::HAS_RE_INFER.bits
|
||||
| TypeFlags::HAS_CT_INFER.bits
|
||||
| TypeFlags::HAS_TY_PLACEHOLDER.bits
|
||||
| TypeFlags::HAS_RE_PLACEHOLDER.bits
|
||||
| TypeFlags::HAS_CT_PLACEHOLDER.bits;
|
||||
| TypeFlags::HAS_CT_PLACEHOLDER.bits
|
||||
| TypeFlags::HAS_FREE_LOCAL_REGIONS.bits;
|
||||
|
||||
/// Does this have [Projection] or [UnnormalizedProjection]?
|
||||
const HAS_TY_PROJECTION = 1 << 9;
|
||||
const HAS_TY_PROJECTION = 1 << 10;
|
||||
/// Does this have [Opaque]?
|
||||
const HAS_TY_OPAQUE = 1 << 10;
|
||||
const HAS_TY_OPAQUE = 1 << 11;
|
||||
/// Does this have [ConstKind::Unevaluated]?
|
||||
const HAS_CT_PROJECTION = 1 << 11;
|
||||
const HAS_CT_PROJECTION = 1 << 12;
|
||||
|
||||
/// Could this type be normalized further?
|
||||
const HAS_PROJECTION = TypeFlags::HAS_TY_PROJECTION.bits
|
||||
|
@ -580,21 +582,21 @@ bitflags! {
|
|||
|
||||
/// Present if the type belongs in a local type context.
|
||||
/// Set for placeholders and inference variables that are not "Fresh".
|
||||
const KEEP_IN_LOCAL_TCX = 1 << 12;
|
||||
const KEEP_IN_LOCAL_TCX = 1 << 13;
|
||||
|
||||
/// Is an error type reachable?
|
||||
const HAS_TY_ERR = 1 << 13;
|
||||
const HAS_TY_ERR = 1 << 14;
|
||||
|
||||
/// Does this have any region that "appears free" in the type?
|
||||
/// Basically anything but [ReLateBound] and [ReErased].
|
||||
const HAS_FREE_REGIONS = 1 << 14;
|
||||
const HAS_FREE_REGIONS = 1 << 15;
|
||||
|
||||
/// Does this have any [ReLateBound] regions? Used to check
|
||||
/// if a global bound is safe to evaluate.
|
||||
const HAS_RE_LATE_BOUND = 1 << 15;
|
||||
const HAS_RE_LATE_BOUND = 1 << 16;
|
||||
|
||||
/// Does this have any [ReErased] regions?
|
||||
const HAS_RE_ERASED = 1 << 16;
|
||||
const HAS_RE_ERASED = 1 << 17;
|
||||
|
||||
/// Flags representing the nominal content of a type,
|
||||
/// computed by FlagsComputation. If you add a new nominal
|
||||
|
@ -608,6 +610,7 @@ bitflags! {
|
|||
| TypeFlags::HAS_TY_PLACEHOLDER.bits
|
||||
| TypeFlags::HAS_RE_PLACEHOLDER.bits
|
||||
| TypeFlags::HAS_CT_PLACEHOLDER.bits
|
||||
| TypeFlags::HAS_FREE_LOCAL_REGIONS.bits
|
||||
| TypeFlags::HAS_TY_PROJECTION.bits
|
||||
| TypeFlags::HAS_TY_OPAQUE.bits
|
||||
| TypeFlags::HAS_CT_PROJECTION.bits
|
||||
|
|
|
@ -1743,42 +1743,42 @@ impl RegionKind {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn keep_in_local_tcx(&self) -> bool {
|
||||
if let ty::ReVar(..) = self { true } else { false }
|
||||
}
|
||||
|
||||
pub fn type_flags(&self) -> TypeFlags {
|
||||
let mut flags = TypeFlags::empty();
|
||||
|
||||
if self.keep_in_local_tcx() {
|
||||
flags = flags | TypeFlags::KEEP_IN_LOCAL_TCX;
|
||||
}
|
||||
|
||||
match *self {
|
||||
ty::ReVar(..) => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS;
|
||||
flags = flags | TypeFlags::HAS_RE_INFER;
|
||||
flags = flags | TypeFlags::KEEP_IN_LOCAL_TCX;
|
||||
}
|
||||
ty::RePlaceholder(..) => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS;
|
||||
flags = flags | TypeFlags::HAS_RE_PLACEHOLDER;
|
||||
}
|
||||
ty::ReEarlyBound(..) => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS;
|
||||
flags = flags | TypeFlags::HAS_RE_PARAM;
|
||||
}
|
||||
ty::ReFree { .. } | ty::ReScope { .. } => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS;
|
||||
}
|
||||
ty::ReEmpty(_) | ty::ReStatic => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
}
|
||||
ty::ReClosureBound(..) => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
}
|
||||
ty::ReLateBound(..) => {
|
||||
flags = flags | TypeFlags::HAS_RE_LATE_BOUND;
|
||||
}
|
||||
ty::ReEarlyBound(..) => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
flags = flags | TypeFlags::HAS_RE_PARAM;
|
||||
}
|
||||
ty::ReEmpty(_) | ty::ReStatic | ty::ReFree { .. } | ty::ReScope { .. } => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
}
|
||||
ty::ReErased => {
|
||||
flags = flags | TypeFlags::HAS_RE_ERASED;
|
||||
}
|
||||
ty::ReClosureBound(..) => {
|
||||
flags = flags | TypeFlags::HAS_FREE_REGIONS;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("type_flags({:?}) = {:?}", self, flags);
|
||||
|
|
|
@ -195,26 +195,8 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
|||
.unwrap();
|
||||
OperandRef::from_const(self, ty_name, ret_ty).immediate_or_packed_pair(self)
|
||||
}
|
||||
"init" => {
|
||||
let ty = substs.type_at(0);
|
||||
if !self.layout_of(ty).is_zst() {
|
||||
// Just zero out the stack slot.
|
||||
// If we store a zero constant, LLVM will drown in vreg allocation for large
|
||||
// data structures, and the generated code will be awful. (A telltale sign of
|
||||
// this is large quantities of `mov [byte ptr foo],0` in the generated code.)
|
||||
memset_intrinsic(
|
||||
self,
|
||||
false,
|
||||
ty,
|
||||
llresult,
|
||||
self.const_u8(0),
|
||||
self.const_usize(1),
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Effectively no-ops
|
||||
"uninit" | "forget" => {
|
||||
// Effectively no-op
|
||||
"forget" => {
|
||||
return;
|
||||
}
|
||||
"offset" => {
|
||||
|
|
|
@ -112,7 +112,7 @@ use rustc_data_structures::fx::FxHashMap;
|
|||
use rustc_session::lint::builtin::META_VARIABLE_MISUSE;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::kw;
|
||||
use rustc_span::{symbol::Ident, MultiSpan, Span};
|
||||
use rustc_span::{symbol::MacroRulesNormalizedIdent, MultiSpan, Span};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
|
@ -179,7 +179,7 @@ struct BinderInfo {
|
|||
}
|
||||
|
||||
/// An environment of meta-variables to their binder information.
|
||||
type Binders = FxHashMap<Ident, BinderInfo>;
|
||||
type Binders = FxHashMap<MacroRulesNormalizedIdent, BinderInfo>;
|
||||
|
||||
/// The state at which we entered a macro definition in the RHS of another macro definition.
|
||||
struct MacroState<'a> {
|
||||
|
@ -245,6 +245,7 @@ fn check_binders(
|
|||
if macros.is_empty() {
|
||||
sess.span_diagnostic.span_bug(span, "unexpected MetaVar in lhs");
|
||||
}
|
||||
let name = MacroRulesNormalizedIdent::new(name);
|
||||
// There are 3 possibilities:
|
||||
if let Some(prev_info) = binders.get(&name) {
|
||||
// 1. The meta-variable is already bound in the current LHS: This is an error.
|
||||
|
@ -264,6 +265,7 @@ fn check_binders(
|
|||
if !macros.is_empty() {
|
||||
sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in nested lhs");
|
||||
}
|
||||
let name = MacroRulesNormalizedIdent::new(name);
|
||||
if let Some(prev_info) = get_binder_info(macros, binders, name) {
|
||||
// Duplicate binders at the top-level macro definition are errors. The lint is only
|
||||
// for nested macro definitions.
|
||||
|
@ -300,7 +302,7 @@ fn check_binders(
|
|||
fn get_binder_info<'a>(
|
||||
mut macros: &'a Stack<'a, MacroState<'a>>,
|
||||
binders: &'a Binders,
|
||||
name: Ident,
|
||||
name: MacroRulesNormalizedIdent,
|
||||
) -> Option<&'a BinderInfo> {
|
||||
binders.get(&name).or_else(|| macros.find_map(|state| state.binders.get(&name)))
|
||||
}
|
||||
|
@ -331,6 +333,7 @@ fn check_occurrences(
|
|||
sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in rhs")
|
||||
}
|
||||
TokenTree::MetaVar(span, name) => {
|
||||
let name = MacroRulesNormalizedIdent::new(name);
|
||||
check_ops_is_prefix(sess, node_id, macros, binders, ops, span, name);
|
||||
}
|
||||
TokenTree::Delimited(_, ref del) => {
|
||||
|
@ -552,7 +555,7 @@ fn check_ops_is_prefix(
|
|||
binders: &Binders,
|
||||
ops: &Stack<'_, KleeneToken>,
|
||||
span: Span,
|
||||
name: Ident,
|
||||
name: MacroRulesNormalizedIdent,
|
||||
) {
|
||||
let macros = macros.push(MacroState { binders, ops: ops.into() });
|
||||
// Accumulates the stacks the operators of each state until (and including when) the
|
||||
|
@ -598,7 +601,7 @@ fn ops_is_prefix(
|
|||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
span: Span,
|
||||
name: Ident,
|
||||
name: MacroRulesNormalizedIdent,
|
||||
binder_ops: &[KleeneToken],
|
||||
occurrence_ops: &[KleeneToken],
|
||||
) {
|
||||
|
|
|
@ -76,13 +76,13 @@ use TokenTreeOrTokenTreeSlice::*;
|
|||
|
||||
use crate::mbe::{self, TokenTree};
|
||||
|
||||
use rustc_ast::ast::{Ident, Name};
|
||||
use rustc_ast::ast::Name;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, DocComment, Nonterminal, Token};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_parse::parser::{FollowedByType, Parser, PathStyle};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
use rustc_span::symbol::{kw, sym, Ident, MacroRulesNormalizedIdent, Symbol};
|
||||
|
||||
use rustc_errors::{FatalError, PResult};
|
||||
use rustc_span::Span;
|
||||
|
@ -273,9 +273,10 @@ crate enum ParseResult<T> {
|
|||
Error(rustc_span::Span, String),
|
||||
}
|
||||
|
||||
/// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es.
|
||||
/// This represents the mapping of metavars to the token trees they bind to.
|
||||
crate type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;
|
||||
/// A `ParseResult` where the `Success` variant contains a mapping of
|
||||
/// `MacroRulesNormalizedIdent`s to `NamedMatch`es. This represents the mapping
|
||||
/// of metavars to the token trees they bind to.
|
||||
crate type NamedParseResult = ParseResult<FxHashMap<MacroRulesNormalizedIdent, NamedMatch>>;
|
||||
|
||||
/// Count how many metavars are named in the given matcher `ms`.
|
||||
pub(super) fn count_names(ms: &[TokenTree]) -> usize {
|
||||
|
@ -368,7 +369,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
|
|||
sess: &ParseSess,
|
||||
m: &TokenTree,
|
||||
res: &mut I,
|
||||
ret_val: &mut FxHashMap<Ident, NamedMatch>,
|
||||
ret_val: &mut FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
) -> Result<(), (rustc_span::Span, String)> {
|
||||
match *m {
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
|
@ -386,7 +387,9 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
|
|||
return Err((span, "missing fragment specifier".to_string()));
|
||||
}
|
||||
}
|
||||
TokenTree::MetaVarDecl(sp, bind_name, _) => match ret_val.entry(bind_name) {
|
||||
TokenTree::MetaVarDecl(sp, bind_name, _) => match ret_val
|
||||
.entry(MacroRulesNormalizedIdent::new(bind_name))
|
||||
{
|
||||
Vacant(spot) => {
|
||||
spot.insert(res.next().unwrap());
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ use rustc_parse::Directory;
|
|||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::hygiene::Transparency;
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
use rustc_span::symbol::{kw, sym, MacroRulesNormalizedIdent, Symbol};
|
||||
use rustc_span::Span;
|
||||
|
||||
use log::debug;
|
||||
|
@ -411,7 +411,7 @@ pub fn compile_declarative_macro(
|
|||
let mut valid = true;
|
||||
|
||||
// Extract the arguments:
|
||||
let lhses = match argument_map[&lhs_nm] {
|
||||
let lhses = match argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
|
||||
MatchedSeq(ref s) => s
|
||||
.iter()
|
||||
.map(|m| {
|
||||
|
@ -428,7 +428,7 @@ pub fn compile_declarative_macro(
|
|||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
|
||||
};
|
||||
|
||||
let rhses = match argument_map[&rhs_nm] {
|
||||
let rhses = match argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
|
||||
MatchedSeq(ref s) => s
|
||||
.iter()
|
||||
.map(|m| {
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::base::ExtCtxt;
|
|||
use crate::mbe;
|
||||
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
||||
|
||||
use rustc_ast::ast::{Ident, MacCall};
|
||||
use rustc_ast::ast::MacCall;
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_ast::token::{self, NtTT, Token};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||
|
@ -10,6 +10,7 @@ use rustc_data_structures::fx::FxHashMap;
|
|||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::pluralize;
|
||||
use rustc_span::hygiene::{ExpnId, Transparency};
|
||||
use rustc_span::symbol::MacroRulesNormalizedIdent;
|
||||
use rustc_span::Span;
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
@ -81,7 +82,7 @@ impl Iterator for Frame {
|
|||
/// Along the way, we do some additional error checking.
|
||||
pub(super) fn transcribe(
|
||||
cx: &ExtCtxt<'_>,
|
||||
interp: &FxHashMap<Ident, NamedMatch>,
|
||||
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
src: Vec<mbe::TokenTree>,
|
||||
transparency: Transparency,
|
||||
) -> TokenStream {
|
||||
|
@ -223,9 +224,10 @@ pub(super) fn transcribe(
|
|||
}
|
||||
|
||||
// Replace the meta-var with the matched token tree from the invocation.
|
||||
mbe::TokenTree::MetaVar(mut sp, mut ident) => {
|
||||
mbe::TokenTree::MetaVar(mut sp, mut orignal_ident) => {
|
||||
// Find the matched nonterminal from the macro invocation, and use it to replace
|
||||
// the meta-var.
|
||||
let ident = MacroRulesNormalizedIdent::new(orignal_ident);
|
||||
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
|
||||
if let MatchedNonterminal(ref nt) = cur_matched {
|
||||
// FIXME #2887: why do we apply a mark when matching a token tree meta-var
|
||||
|
@ -249,9 +251,9 @@ pub(super) fn transcribe(
|
|||
// If we aren't able to match the meta-var, we push it back into the result but
|
||||
// with modified syntax context. (I believe this supports nested macros).
|
||||
marker.visit_span(&mut sp);
|
||||
marker.visit_ident(&mut ident);
|
||||
marker.visit_ident(&mut orignal_ident);
|
||||
result.push(TokenTree::token(token::Dollar, sp).into());
|
||||
result.push(TokenTree::Token(Token::from_ast_ident(ident)).into());
|
||||
result.push(TokenTree::Token(Token::from_ast_ident(orignal_ident)).into());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -287,8 +289,8 @@ pub(super) fn transcribe(
|
|||
/// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
|
||||
/// made a mistake, and we return `None`.
|
||||
fn lookup_cur_matched<'a>(
|
||||
ident: Ident,
|
||||
interpolations: &'a FxHashMap<Ident, NamedMatch>,
|
||||
ident: MacroRulesNormalizedIdent,
|
||||
interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
repeats: &[(usize, usize)],
|
||||
) -> Option<&'a NamedMatch> {
|
||||
interpolations.get(&ident).map(|matched| {
|
||||
|
@ -316,7 +318,7 @@ enum LockstepIterSize {
|
|||
|
||||
/// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
|
||||
/// meta-var are returned.
|
||||
Constraint(usize, Ident),
|
||||
Constraint(usize, MacroRulesNormalizedIdent),
|
||||
|
||||
/// Two `Constraint`s on the same sequence had different lengths. This is an error.
|
||||
Contradiction(String),
|
||||
|
@ -360,7 +362,7 @@ impl LockstepIterSize {
|
|||
/// multiple nested matcher sequences.
|
||||
fn lockstep_iter_size(
|
||||
tree: &mbe::TokenTree,
|
||||
interpolations: &FxHashMap<Ident, NamedMatch>,
|
||||
interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
repeats: &[(usize, usize)],
|
||||
) -> LockstepIterSize {
|
||||
use mbe::TokenTree;
|
||||
|
@ -376,6 +378,7 @@ fn lockstep_iter_size(
|
|||
})
|
||||
}
|
||||
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
|
||||
let name = MacroRulesNormalizedIdent::new(name);
|
||||
match lookup_cur_matched(name, interpolations, repeats) {
|
||||
Some(matched) => match matched {
|
||||
MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
|
||||
|
|
|
@ -98,10 +98,10 @@ pub enum StackPopCleanup {
|
|||
/// Jump to the next block in the caller, or cause UB if None (that's a function
|
||||
/// that may never return). Also store layout of return place so
|
||||
/// we can validate it at that layout.
|
||||
/// `ret` stores the block we jump to on a normal return, while 'unwind'
|
||||
/// stores the block used for cleanup during unwinding
|
||||
/// `ret` stores the block we jump to on a normal return, while `unwind`
|
||||
/// stores the block used for cleanup during unwinding.
|
||||
Goto { ret: Option<mir::BasicBlock>, unwind: Option<mir::BasicBlock> },
|
||||
/// Just do nohing: Used by Main and for the box_alloc hook in miri.
|
||||
/// Just do nothing: Used by Main and for the `box_alloc` hook in miri.
|
||||
/// `cleanup` says whether locals are deallocated. Static computation
|
||||
/// wants them leaked to intern what they need (and just throw away
|
||||
/// the entire `ecx` when it is done).
|
||||
|
|
|
@ -983,6 +983,31 @@ impl fmt::Display for IdentPrinter {
|
|||
}
|
||||
}
|
||||
|
||||
/// An newtype around `Ident` that calls [Ident::normalize_to_macro_rules] on
|
||||
/// construction.
|
||||
// FIXME(matthewj, petrochenkov) Use this more often, add a similar
|
||||
// `ModernIdent` struct and use that as well.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct MacroRulesNormalizedIdent(Ident);
|
||||
|
||||
impl MacroRulesNormalizedIdent {
|
||||
pub fn new(ident: Ident) -> Self {
|
||||
Self(ident.normalize_to_macro_rules())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for MacroRulesNormalizedIdent {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Debug::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for MacroRulesNormalizedIdent {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
/// An interned string.
|
||||
///
|
||||
/// Internally, a `Symbol` is implemented as an index, and all operations
|
||||
|
|
|
@ -150,8 +150,6 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
"assert_inhabited" | "assert_zero_valid" | "assert_uninit_valid" => {
|
||||
(1, Vec::new(), tcx.mk_unit())
|
||||
}
|
||||
"init" => (1, Vec::new(), param(0)),
|
||||
"uninit" => (1, Vec::new(), param(0)),
|
||||
"forget" => (1, vec![param(0)], tcx.mk_unit()),
|
||||
"transmute" => (2, vec![param(0)], param(1)),
|
||||
"move_val_init" => (1, vec![tcx.mk_mut_ptr(param(0)), param(0)], tcx.mk_unit()),
|
||||
|
|
29
src/test/ui/hygiene/macro-metavars-legacy.rs
Normal file
29
src/test/ui/hygiene/macro-metavars-legacy.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
// Ensure macro metavariables are compared with legacy hygiene
|
||||
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
// run-pass
|
||||
|
||||
macro_rules! make_mac {
|
||||
( $($dollar:tt $arg:ident),+ ) => {
|
||||
macro_rules! mac {
|
||||
( $($dollar $arg : ident),+ ) => {
|
||||
$( $dollar $arg )-+
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! show_hygiene {
|
||||
( $dollar:tt $arg:ident ) => {
|
||||
make_mac!($dollar $arg, $dollar arg);
|
||||
}
|
||||
}
|
||||
|
||||
show_hygiene!( $arg );
|
||||
|
||||
fn main() {
|
||||
let x = 5;
|
||||
let y = 3;
|
||||
assert_eq!(2, mac!(x, y));
|
||||
}
|
24
src/test/ui/hygiene/macro-metavars-transparent.rs
Normal file
24
src/test/ui/hygiene/macro-metavars-transparent.rs
Normal file
|
@ -0,0 +1,24 @@
|
|||
// Ensure macro metavariables are not compared without removing transparent
|
||||
// marks.
|
||||
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
// run-pass
|
||||
|
||||
#[rustc_macro_transparency = "transparent"]
|
||||
macro_rules! k {
|
||||
($($s:tt)*) => {
|
||||
macro_rules! m {
|
||||
($y:tt) => {
|
||||
$($s)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
k!(1 + $y);
|
||||
|
||||
fn main() {
|
||||
let x = 2;
|
||||
assert_eq!(3, m!(x));
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
// compile-flags: -O
|
||||
// run-pass
|
||||
|
||||
#![allow(unused_must_use)]
|
||||
|
@ -10,17 +11,13 @@
|
|||
|
||||
#![feature(intrinsics)]
|
||||
|
||||
use std::thread;
|
||||
|
||||
extern "rust-intrinsic" {
|
||||
pub fn init<T>() -> T;
|
||||
}
|
||||
use std::{mem, thread};
|
||||
|
||||
const SIZE: usize = 1024 * 1024;
|
||||
|
||||
fn main() {
|
||||
// do the test in a new thread to avoid (spurious?) stack overflows
|
||||
thread::spawn(|| {
|
||||
let _memory: [u8; SIZE] = unsafe { init() };
|
||||
let _memory: [u8; SIZE] = unsafe { mem::zeroed() };
|
||||
}).join();
|
||||
}
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
#![allow(deprecated)]
|
||||
#![feature(core_intrinsics)]
|
||||
|
||||
use std::intrinsics::{init};
|
||||
|
||||
// Test that the `init` intrinsic is really unsafe
|
||||
pub fn main() {
|
||||
let stuff = init::<isize>(); //~ ERROR call to unsafe function is unsafe
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
error[E0133]: call to unsafe function is unsafe and requires unsafe function or block
|
||||
--> $DIR/init-unsafe.rs:8:17
|
||||
|
|
||||
LL | let stuff = init::<isize>();
|
||||
| ^^^^^^^^^^^^^^^ call to unsafe function
|
||||
|
|
||||
= note: consult the function's documentation for information on how to avoid undefined behavior
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0133`.
|
|
@ -5,7 +5,6 @@
|
|||
|
||||
mod rusti {
|
||||
extern "rust-intrinsic" {
|
||||
pub fn init<T>() -> T;
|
||||
pub fn move_val_init<T>(dst: *mut T, src: T);
|
||||
}
|
||||
}
|
||||
|
@ -15,17 +14,17 @@ pub fn main() {
|
|||
// sanity check
|
||||
check_drops_state(0, None);
|
||||
|
||||
let mut x: Box<D> = box D(1);
|
||||
assert_eq!(x.0, 1);
|
||||
let mut x: Option<Box<D>> = Some(box D(1));
|
||||
assert_eq!(x.as_ref().unwrap().0, 1);
|
||||
|
||||
// A normal overwrite, to demonstrate `check_drops_state`.
|
||||
x = box D(2);
|
||||
x = Some(box D(2));
|
||||
|
||||
// At this point, one destructor has run, because the
|
||||
// overwrite of `x` drops its initial value.
|
||||
check_drops_state(1, Some(1));
|
||||
|
||||
let mut y: Box<D> = rusti::init();
|
||||
let mut y: Option<Box<D>> = std::mem::zeroed();
|
||||
|
||||
// An initial binding does not overwrite anything.
|
||||
check_drops_state(1, Some(1));
|
||||
|
@ -51,9 +50,9 @@ pub fn main() {
|
|||
// during such a destructor call. We do so after the end of
|
||||
// this scope.
|
||||
|
||||
assert_eq!(y.0, 2);
|
||||
y.0 = 3;
|
||||
assert_eq!(y.0, 3);
|
||||
assert_eq!(y.as_ref().unwrap().0, 2);
|
||||
y.as_mut().unwrap().0 = 3;
|
||||
assert_eq!(y.as_ref().unwrap().0, 3);
|
||||
|
||||
check_drops_state(1, Some(1));
|
||||
}
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
// run-pass
|
||||
// pretty-expanded FIXME #23616
|
||||
|
||||
#![feature(intrinsics)]
|
||||
|
||||
mod rusti {
|
||||
extern "rust-intrinsic" {
|
||||
pub fn uninit<T>() -> T;
|
||||
}
|
||||
}
|
||||
pub fn main() {
|
||||
let _a : isize = unsafe {rusti::uninit()};
|
||||
}
|
|
@ -10,5 +10,5 @@ use proc_macro::TokenStream;
|
|||
#[proc_macro_derive(Unstable)]
|
||||
pub fn derive(_input: TokenStream) -> TokenStream {
|
||||
|
||||
"unsafe fn foo() -> u32 { ::std::intrinsics::init() }".parse().unwrap()
|
||||
"unsafe fn foo() -> u32 { ::std::intrinsics::abort() }".parse().unwrap()
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue