Auto merge of #127454 - matthiaskrgr:rollup-k3vfen2, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #127179 (Print `TypeId` as hex for debugging) - #127189 (LinkedList's Cursor: method to get a ref to the cursor's list) - #127236 (doc: update config file path in platform-support/wasm32-wasip1-threads.md) - #127297 (Improve std::Path's Hash quality by avoiding prefix collisions) - #127308 (Attribute cleanups) - #127354 (Describe Sized requirements for mem::offset_of) - #127409 (Emit a wrap expr span_bug only if context is not tainted) - #127447 (once_lock: make test not take as long in Miri) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
0ca92de473
22 changed files with 191 additions and 135 deletions
|
@ -10,8 +10,6 @@ use crate::{AssocItem, Expr, ForeignItem, Item, NodeId};
|
|||
use crate::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
|
||||
use crate::{AttrVec, Attribute, Stmt, StmtKind};
|
||||
|
||||
use rustc_span::Span;
|
||||
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
|
@ -91,37 +89,6 @@ impl<T: AstDeref<Target: HasNodeId>> HasNodeId for T {
|
|||
}
|
||||
}
|
||||
|
||||
/// A trait for AST nodes having a span.
|
||||
pub trait HasSpan {
|
||||
fn span(&self) -> Span;
|
||||
}
|
||||
|
||||
macro_rules! impl_has_span {
|
||||
($($T:ty),+ $(,)?) => {
|
||||
$(
|
||||
impl HasSpan for $T {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
)+
|
||||
};
|
||||
}
|
||||
|
||||
impl_has_span!(AssocItem, Block, Expr, ForeignItem, Item, Pat, Path, Stmt, Ty, Visibility);
|
||||
|
||||
impl<T: AstDeref<Target: HasSpan>> HasSpan for T {
|
||||
fn span(&self) -> Span {
|
||||
self.ast_deref().span()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for AttrItem {
|
||||
fn span(&self) -> Span {
|
||||
self.span()
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for AST nodes having (or not having) collected tokens.
|
||||
pub trait HasTokens {
|
||||
fn tokens(&self) -> Option<&LazyAttrTokenStream>;
|
||||
|
|
|
@ -202,7 +202,8 @@ impl Attribute {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn tokens(&self) -> TokenStream {
|
||||
// Named `get_tokens` to distinguish it from the `<Attribute as HasTokens>::tokens` method.
|
||||
pub fn get_tokens(&self) -> TokenStream {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(normal) => TokenStream::new(
|
||||
normal
|
||||
|
|
|
@ -44,7 +44,7 @@ pub mod tokenstream;
|
|||
pub mod visit;
|
||||
|
||||
pub use self::ast::*;
|
||||
pub use self::ast_traits::{AstDeref, AstNodeWrapper, HasAttrs, HasNodeId, HasSpan, HasTokens};
|
||||
pub use self::ast_traits::{AstDeref, AstNodeWrapper, HasAttrs, HasNodeId, HasTokens};
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
|
||||
|
|
|
@ -704,7 +704,7 @@ fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
|
|||
visit_attr_tts(tts, vis);
|
||||
visit_delim_span(dspan, vis);
|
||||
}
|
||||
AttrTokenTree::Attributes(AttributesData { attrs, tokens }) => {
|
||||
AttrTokenTree::AttrsTarget(AttrsTarget { attrs, tokens }) => {
|
||||
visit_attrs(attrs, vis);
|
||||
visit_lazy_tts_opt_mut(Some(tokens), vis);
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
//! ownership of the original.
|
||||
|
||||
use crate::ast::{AttrStyle, StmtKind};
|
||||
use crate::ast_traits::{HasAttrs, HasSpan, HasTokens};
|
||||
use crate::ast_traits::{HasAttrs, HasTokens};
|
||||
use crate::token::{self, Delimiter, Nonterminal, Token, TokenKind};
|
||||
use crate::AttrVec;
|
||||
|
||||
|
@ -170,8 +170,8 @@ pub enum AttrTokenTree {
|
|||
Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
|
||||
/// Stores the attributes for an attribute target,
|
||||
/// along with the tokens for that attribute target.
|
||||
/// See `AttributesData` for more information
|
||||
Attributes(AttributesData),
|
||||
/// See `AttrsTarget` for more information
|
||||
AttrsTarget(AttrsTarget),
|
||||
}
|
||||
|
||||
impl AttrTokenStream {
|
||||
|
@ -180,7 +180,7 @@ impl AttrTokenStream {
|
|||
}
|
||||
|
||||
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
|
||||
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
|
||||
/// During conversion, `AttrTokenTree::AttrsTarget` get 'flattened'
|
||||
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
||||
/// If there are inner attributes, they are inserted into the proper
|
||||
/// place in the attribute target tokens.
|
||||
|
@ -199,13 +199,13 @@ impl AttrTokenStream {
|
|||
TokenStream::new(stream.to_token_trees()),
|
||||
))
|
||||
}
|
||||
AttrTokenTree::Attributes(data) => {
|
||||
let idx = data
|
||||
AttrTokenTree::AttrsTarget(target) => {
|
||||
let idx = target
|
||||
.attrs
|
||||
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
||||
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
|
||||
let (outer_attrs, inner_attrs) = target.attrs.split_at(idx);
|
||||
|
||||
let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
|
||||
let mut target_tokens = target.tokens.to_attr_token_stream().to_token_trees();
|
||||
if !inner_attrs.is_empty() {
|
||||
let mut found = false;
|
||||
// Check the last two trees (to account for a trailing semi)
|
||||
|
@ -227,7 +227,7 @@ impl AttrTokenStream {
|
|||
|
||||
let mut stream = TokenStream::default();
|
||||
for inner_attr in inner_attrs {
|
||||
stream.push_stream(inner_attr.tokens());
|
||||
stream.push_stream(inner_attr.get_tokens());
|
||||
}
|
||||
stream.push_stream(delim_tokens.clone());
|
||||
*tree = TokenTree::Delimited(*span, *spacing, *delim, stream);
|
||||
|
@ -242,7 +242,7 @@ impl AttrTokenStream {
|
|||
);
|
||||
}
|
||||
for attr in outer_attrs {
|
||||
res.extend(attr.tokens().0.iter().cloned());
|
||||
res.extend(attr.get_tokens().0.iter().cloned());
|
||||
}
|
||||
res.extend(target_tokens);
|
||||
}
|
||||
|
@ -262,7 +262,7 @@ impl AttrTokenStream {
|
|||
/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
|
||||
/// and a `tokens` field storing the (unparsed) tokens `struct Foo {}`
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
pub struct AttributesData {
|
||||
pub struct AttrsTarget {
|
||||
/// Attributes, both outer and inner.
|
||||
/// These are stored in the original order that they were parsed in.
|
||||
pub attrs: AttrVec,
|
||||
|
@ -436,17 +436,17 @@ impl TokenStream {
|
|||
TokenStream::new(vec![TokenTree::token_alone(kind, span)])
|
||||
}
|
||||
|
||||
pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
|
||||
pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream {
|
||||
let Some(tokens) = node.tokens() else {
|
||||
panic!("missing tokens for node at {:?}: {:?}", node.span(), node);
|
||||
panic!("missing tokens for node: {:?}", node);
|
||||
};
|
||||
let attrs = node.attrs();
|
||||
let attr_stream = if attrs.is_empty() {
|
||||
tokens.to_attr_token_stream()
|
||||
} else {
|
||||
let attr_data =
|
||||
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
||||
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
|
||||
let target =
|
||||
AttrsTarget { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
||||
AttrTokenStream::new(vec![AttrTokenTree::AttrsTarget(target)])
|
||||
};
|
||||
TokenStream::new(attr_stream.to_token_trees())
|
||||
}
|
||||
|
@ -765,6 +765,7 @@ mod size_asserts {
|
|||
static_assert_size!(AttrTokenStream, 8);
|
||||
static_assert_size!(AttrTokenTree, 32);
|
||||
static_assert_size!(LazyAttrTokenStream, 8);
|
||||
static_assert_size!(Option<LazyAttrTokenStream>, 8); // must be small, used in many AST nodes
|
||||
static_assert_size!(TokenStream, 8);
|
||||
static_assert_size!(TokenTree, 32);
|
||||
// tidy-alphabetical-end
|
||||
|
|
|
@ -193,7 +193,7 @@ impl CfgEval<'_> {
|
|||
|
||||
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
||||
// to the captured `AttrTokenStream` (specifically, we capture
|
||||
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
||||
// `AttrTokenTree::AttrsTarget` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
||||
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
|
||||
parser.capture_cfg = true;
|
||||
match parse_annotatable_with(&mut parser) {
|
||||
|
|
|
@ -172,7 +172,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||
fn configure_tokens(&self, stream: &AttrTokenStream) -> AttrTokenStream {
|
||||
fn can_skip(stream: &AttrTokenStream) -> bool {
|
||||
stream.0.iter().all(|tree| match tree {
|
||||
AttrTokenTree::Attributes(_) => false,
|
||||
AttrTokenTree::AttrsTarget(_) => false,
|
||||
AttrTokenTree::Token(..) => true,
|
||||
AttrTokenTree::Delimited(.., inner) => can_skip(inner),
|
||||
})
|
||||
|
@ -185,22 +185,22 @@ impl<'a> StripUnconfigured<'a> {
|
|||
let trees: Vec<_> = stream
|
||||
.0
|
||||
.iter()
|
||||
.flat_map(|tree| match tree.clone() {
|
||||
AttrTokenTree::Attributes(mut data) => {
|
||||
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
|
||||
.filter_map(|tree| match tree.clone() {
|
||||
AttrTokenTree::AttrsTarget(mut target) => {
|
||||
target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
|
||||
|
||||
if self.in_cfg(&data.attrs) {
|
||||
data.tokens = LazyAttrTokenStream::new(
|
||||
self.configure_tokens(&data.tokens.to_attr_token_stream()),
|
||||
if self.in_cfg(&target.attrs) {
|
||||
target.tokens = LazyAttrTokenStream::new(
|
||||
self.configure_tokens(&target.tokens.to_attr_token_stream()),
|
||||
);
|
||||
Some(AttrTokenTree::Attributes(data)).into_iter()
|
||||
Some(AttrTokenTree::AttrsTarget(target))
|
||||
} else {
|
||||
None.into_iter()
|
||||
None
|
||||
}
|
||||
}
|
||||
AttrTokenTree::Delimited(sp, spacing, delim, mut inner) => {
|
||||
inner = self.configure_tokens(&inner);
|
||||
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner)).into_iter()
|
||||
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner))
|
||||
}
|
||||
AttrTokenTree::Token(
|
||||
Token {
|
||||
|
@ -220,9 +220,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||
) => {
|
||||
panic!("Should be `AttrTokenTree::Delimited`, not delim tokens: {:?}", tree);
|
||||
}
|
||||
AttrTokenTree::Token(token, spacing) => {
|
||||
Some(AttrTokenTree::Token(token, spacing)).into_iter()
|
||||
}
|
||||
AttrTokenTree::Token(token, spacing) => Some(AttrTokenTree::Token(token, spacing)),
|
||||
})
|
||||
.collect();
|
||||
AttrTokenStream::new(trees)
|
||||
|
@ -294,7 +292,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||
attr: &Attribute,
|
||||
(item, item_span): (ast::AttrItem, Span),
|
||||
) -> Attribute {
|
||||
let orig_tokens = attr.tokens();
|
||||
let orig_tokens = attr.get_tokens();
|
||||
|
||||
// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
|
||||
// and producing an attribute of the form `#[attr]`. We
|
||||
|
@ -310,12 +308,11 @@ impl<'a> StripUnconfigured<'a> {
|
|||
else {
|
||||
panic!("Bad tokens for attribute {attr:?}");
|
||||
};
|
||||
let pound_span = pound_token.span;
|
||||
|
||||
// We don't really have a good span to use for the synthesized `[]`
|
||||
// in `#[attr]`, so just use the span of the `#` token.
|
||||
let bracket_group = AttrTokenTree::Delimited(
|
||||
DelimSpan::from_single(pound_span),
|
||||
DelimSpan::from_single(pound_token.span),
|
||||
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
|
||||
Delimiter::Bracket,
|
||||
item.tokens
|
||||
|
|
|
@ -734,7 +734,9 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx
|
|||
// struct; however, when EUV is run during typeck, it
|
||||
// may not. This will generate an error earlier in typeck,
|
||||
// so we can just ignore it.
|
||||
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
||||
if self.cx.tainted_by_errors().is_ok() {
|
||||
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -282,7 +282,7 @@ impl<'a> Parser<'a> {
|
|||
pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
|
||||
let mut attrs = ast::AttrVec::new();
|
||||
loop {
|
||||
let start_pos: u32 = self.num_bump_calls.try_into().unwrap();
|
||||
let start_pos = self.num_bump_calls;
|
||||
// Only try to parse if it is an inner attribute (has `!`).
|
||||
let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
|
||||
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
|
||||
|
@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
|
|||
None
|
||||
};
|
||||
if let Some(attr) = attr {
|
||||
let end_pos: u32 = self.num_bump_calls.try_into().unwrap();
|
||||
let end_pos = self.num_bump_calls;
|
||||
// If we are currently capturing tokens, mark the location of this inner attribute.
|
||||
// If capturing ends up creating a `LazyAttrTokenStream`, we will include
|
||||
// this replace range with it, removing the inner attribute from the final
|
||||
|
@ -313,7 +313,7 @@ impl<'a> Parser<'a> {
|
|||
// corresponding macro).
|
||||
let range = start_pos..end_pos;
|
||||
if let Capturing::Yes = self.capture_state.capturing {
|
||||
self.capture_state.inner_attr_ranges.insert(attr.id, (range, vec![]));
|
||||
self.capture_state.inner_attr_ranges.insert(attr.id, (range, None));
|
||||
}
|
||||
attrs.push(attr);
|
||||
} else {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
|
||||
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing};
|
||||
use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
|
||||
use rustc_ast::{self as ast};
|
||||
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
|
||||
|
@ -8,7 +8,6 @@ use rustc_errors::PResult;
|
|||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::{sym, Span, DUMMY_SP};
|
||||
|
||||
use std::ops::Range;
|
||||
use std::{iter, mem};
|
||||
|
||||
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
||||
|
@ -88,7 +87,6 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
|||
//
|
||||
// This also makes `Parser` very cheap to clone, since
|
||||
// there is no intermediate collection buffer to clone.
|
||||
#[derive(Clone)]
|
||||
struct LazyAttrTokenStreamImpl {
|
||||
start_token: (Token, Spacing),
|
||||
cursor_snapshot: TokenCursor,
|
||||
|
@ -146,24 +144,23 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
|||
// start position, we ensure that any replace range which encloses
|
||||
// another replace range will capture the *replaced* tokens for the inner
|
||||
// range, not the original tokens.
|
||||
for (range, new_tokens) in replace_ranges.into_iter().rev() {
|
||||
for (range, target) in replace_ranges.into_iter().rev() {
|
||||
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
|
||||
// Replace ranges are only allowed to decrease the number of tokens.
|
||||
assert!(
|
||||
range.len() >= new_tokens.len(),
|
||||
"Range {range:?} has greater len than {new_tokens:?}"
|
||||
);
|
||||
|
||||
// Replace any removed tokens with `FlatToken::Empty`.
|
||||
// This keeps the total length of `tokens` constant throughout the
|
||||
// replacement process, allowing us to use all of the `ReplaceRanges` entries
|
||||
// without adjusting indices.
|
||||
let filler = iter::repeat((FlatToken::Empty, Spacing::Alone))
|
||||
.take(range.len() - new_tokens.len());
|
||||
|
||||
// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
|
||||
// enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
|
||||
// total length of `tokens` constant throughout the replacement process, allowing
|
||||
// us to use all of the `ReplaceRanges` entries without adjusting indices.
|
||||
let target_len = target.is_some() as usize;
|
||||
tokens.splice(
|
||||
(range.start as usize)..(range.end as usize),
|
||||
new_tokens.into_iter().chain(filler),
|
||||
target
|
||||
.into_iter()
|
||||
.map(|target| (FlatToken::AttrsTarget(target), Spacing::Alone))
|
||||
.chain(
|
||||
iter::repeat((FlatToken::Empty, Spacing::Alone))
|
||||
.take(range.len() - target_len),
|
||||
),
|
||||
);
|
||||
}
|
||||
make_attr_token_stream(tokens.into_iter(), self.break_last_token)
|
||||
|
@ -316,7 +313,7 @@ impl<'a> Parser<'a> {
|
|||
.iter()
|
||||
.cloned()
|
||||
.chain(inner_attr_replace_ranges.iter().cloned())
|
||||
.map(|(range, tokens)| ((range.start - start_pos)..(range.end - start_pos), tokens))
|
||||
.map(|(range, data)| ((range.start - start_pos)..(range.end - start_pos), data))
|
||||
.collect()
|
||||
};
|
||||
|
||||
|
@ -346,18 +343,14 @@ impl<'a> Parser<'a> {
|
|||
&& matches!(self.capture_state.capturing, Capturing::Yes)
|
||||
&& has_cfg_or_cfg_attr(final_attrs)
|
||||
{
|
||||
let attr_data = AttributesData { attrs: final_attrs.iter().cloned().collect(), tokens };
|
||||
|
||||
// Replace the entire AST node that we just parsed, including attributes,
|
||||
// with a `FlatToken::AttrTarget`. If this AST node is inside an item
|
||||
// that has `#[derive]`, then this will allow us to cfg-expand this
|
||||
// AST node.
|
||||
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
|
||||
let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)];
|
||||
|
||||
assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
|
||||
let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap());
|
||||
self.capture_state.replace_ranges.push((range, new_tokens));
|
||||
|
||||
// Replace the entire AST node that we just parsed, including attributes, with
|
||||
// `target`. If this AST node is inside an item that has `#[derive]`, then this will
|
||||
// allow us to cfg-expand this AST node.
|
||||
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
|
||||
let target = AttrsTarget { attrs: final_attrs.iter().cloned().collect(), tokens };
|
||||
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target)));
|
||||
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
|
||||
}
|
||||
|
||||
|
@ -419,11 +412,11 @@ fn make_attr_token_stream(
|
|||
.expect("Bottom token frame is missing!")
|
||||
.inner
|
||||
.push(AttrTokenTree::Token(token, spacing)),
|
||||
FlatToken::AttrTarget(data) => stack
|
||||
FlatToken::AttrsTarget(target) => stack
|
||||
.last_mut()
|
||||
.expect("Bottom token frame is missing!")
|
||||
.inner
|
||||
.push(AttrTokenTree::Attributes(data)),
|
||||
.push(AttrTokenTree::AttrsTarget(target)),
|
||||
FlatToken::Empty => {}
|
||||
}
|
||||
token_and_spacing = iter.next();
|
||||
|
|
|
@ -20,7 +20,7 @@ use path::PathStyle;
|
|||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
|
||||
use rustc_ast::tokenstream::{AttrsTarget, DelimSpacing, DelimSpan, Spacing};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
|
||||
use rustc_ast::util::case::Case;
|
||||
use rustc_ast::{
|
||||
|
@ -203,13 +203,13 @@ struct ClosureSpans {
|
|||
}
|
||||
|
||||
/// Indicates a range of tokens that should be replaced by
|
||||
/// the tokens in the provided vector. This is used in two
|
||||
/// the tokens in the provided `AttrsTarget`. This is used in two
|
||||
/// places during token collection:
|
||||
///
|
||||
/// 1. During the parsing of an AST node that may have a `#[derive]`
|
||||
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
|
||||
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
|
||||
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
|
||||
/// with `FlatToken::AttrsTarget`, allowing us to perform eager cfg-expansion
|
||||
/// on an `AttrTokenStream`.
|
||||
///
|
||||
/// 2. When we parse an inner attribute while collecting tokens. We
|
||||
|
@ -219,7 +219,7 @@ struct ClosureSpans {
|
|||
/// the first macro inner attribute to invoke a proc-macro).
|
||||
/// When create a `TokenStream`, the inner attributes get inserted
|
||||
/// into the proper place in the token stream.
|
||||
type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
|
||||
type ReplaceRange = (Range<u32>, Option<AttrsTarget>);
|
||||
|
||||
/// Controls how we capture tokens. Capturing can be expensive,
|
||||
/// so we try to avoid performing capturing in cases where
|
||||
|
@ -1608,11 +1608,10 @@ enum FlatToken {
|
|||
/// A token - this holds both delimiter (e.g. '{' and '}')
|
||||
/// and non-delimiter tokens
|
||||
Token(Token),
|
||||
/// Holds the `AttributesData` for an AST node. The
|
||||
/// `AttributesData` is inserted directly into the
|
||||
/// constructed `AttrTokenStream` as
|
||||
/// an `AttrTokenTree::Attributes`.
|
||||
AttrTarget(AttributesData),
|
||||
/// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
|
||||
/// directly into the constructed `AttrTokenStream` as an
|
||||
/// `AttrTokenTree::AttrsTarget`.
|
||||
AttrsTarget(AttrsTarget),
|
||||
/// A special 'empty' token that is ignored during the conversion
|
||||
/// to an `AttrTokenStream`. This is used to simplify the
|
||||
/// handling of replace ranges.
|
||||
|
|
|
@ -1744,7 +1744,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
) {
|
||||
self.r.dcx().emit_err(errors::LendingIteratorReportError {
|
||||
lifetime: lifetime.ident.span,
|
||||
ty: ty.span(),
|
||||
ty: ty.span,
|
||||
});
|
||||
} else {
|
||||
self.r.dcx().emit_err(errors::AnonymousLivetimeNonGatReportError {
|
||||
|
|
|
@ -1495,6 +1495,14 @@ impl<'a, T, A: Allocator> Cursor<'a, T, A> {
|
|||
pub fn back(&self) -> Option<&'a T> {
|
||||
self.list.back()
|
||||
}
|
||||
|
||||
/// Provides a reference to the cursor's parent list.
|
||||
#[must_use]
|
||||
#[inline(always)]
|
||||
#[unstable(feature = "linked_list_cursors", issue = "58533")]
|
||||
pub fn as_list(&self) -> &'a LinkedList<T, A> {
|
||||
self.list
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T, A: Allocator> CursorMut<'a, T, A> {
|
||||
|
@ -1605,6 +1613,18 @@ impl<'a, T, A: Allocator> CursorMut<'a, T, A> {
|
|||
pub fn as_cursor(&self) -> Cursor<'_, T, A> {
|
||||
Cursor { list: self.list, current: self.current, index: self.index }
|
||||
}
|
||||
|
||||
/// Provides a read-only reference to the cursor's parent list.
|
||||
///
|
||||
/// The lifetime of the returned reference is bound to that of the
|
||||
/// `CursorMut`, which means it cannot outlive the `CursorMut` and that the
|
||||
/// `CursorMut` is frozen for the lifetime of the reference.
|
||||
#[must_use]
|
||||
#[inline(always)]
|
||||
#[unstable(feature = "linked_list_cursors", issue = "58533")]
|
||||
pub fn as_list(&self) -> &LinkedList<T, A> {
|
||||
self.list
|
||||
}
|
||||
}
|
||||
|
||||
// Now the list editing operations
|
||||
|
|
|
@ -673,7 +673,7 @@ impl hash::Hash for TypeId {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl fmt::Debug for TypeId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
f.debug_tuple("TypeId").field(&self.as_u128()).finish()
|
||||
write!(f, "TypeId({:#034x})", self.as_u128())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1266,6 +1266,20 @@ impl<T> SizedTypeProperties for T {}
|
|||
/// // ^^^ error[E0616]: field `private` of struct `Struct` is private
|
||||
/// ```
|
||||
///
|
||||
/// Only [`Sized`] fields are supported, but the container may be unsized:
|
||||
/// ```
|
||||
/// # use core::mem;
|
||||
/// #[repr(C)]
|
||||
/// pub struct Struct {
|
||||
/// a: u8,
|
||||
/// b: [u8],
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(mem::offset_of!(Struct, a), 0); // OK
|
||||
/// // assert_eq!(mem::offset_of!(Struct, b), 1);
|
||||
/// // ^^^ error[E0277]: doesn't have a size known at compile-time
|
||||
/// ```
|
||||
///
|
||||
/// Note that type layout is, in general, [subject to change and
|
||||
/// platform-specific](https://doc.rust-lang.org/reference/type-layout.html). If
|
||||
/// layout stability is required, consider using an [explicit `repr` attribute].
|
||||
|
|
|
@ -3192,15 +3192,19 @@ impl Hash for Path {
|
|||
let bytes = &bytes[prefix_len..];
|
||||
|
||||
let mut component_start = 0;
|
||||
let mut bytes_hashed = 0;
|
||||
// track some extra state to avoid prefix collisions.
|
||||
// ["foo", "bar"] and ["foobar"], will have the same payload bytes
|
||||
// but result in different chunk_bits
|
||||
let mut chunk_bits: usize = 0;
|
||||
|
||||
for i in 0..bytes.len() {
|
||||
let is_sep = if verbatim { is_verbatim_sep(bytes[i]) } else { is_sep_byte(bytes[i]) };
|
||||
if is_sep {
|
||||
if i > component_start {
|
||||
let to_hash = &bytes[component_start..i];
|
||||
chunk_bits = chunk_bits.wrapping_add(to_hash.len());
|
||||
chunk_bits = chunk_bits.rotate_right(2);
|
||||
h.write(to_hash);
|
||||
bytes_hashed += to_hash.len();
|
||||
}
|
||||
|
||||
// skip over separator and optionally a following CurDir item
|
||||
|
@ -3221,11 +3225,12 @@ impl Hash for Path {
|
|||
|
||||
if component_start < bytes.len() {
|
||||
let to_hash = &bytes[component_start..];
|
||||
chunk_bits = chunk_bits.wrapping_add(to_hash.len());
|
||||
chunk_bits = chunk_bits.rotate_right(2);
|
||||
h.write(to_hash);
|
||||
bytes_hashed += to_hash.len();
|
||||
}
|
||||
|
||||
h.write_usize(bytes_hashed);
|
||||
h.write_usize(chunk_bits);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1619,6 +1619,20 @@ pub fn test_compare() {
|
|||
relative_from: Some("")
|
||||
);
|
||||
|
||||
tc!("foo//", "foo",
|
||||
eq: true,
|
||||
starts_with: true,
|
||||
ends_with: true,
|
||||
relative_from: Some("")
|
||||
);
|
||||
|
||||
tc!("foo///", "foo",
|
||||
eq: true,
|
||||
starts_with: true,
|
||||
ends_with: true,
|
||||
relative_from: Some("")
|
||||
);
|
||||
|
||||
tc!("foo/.", "foo",
|
||||
eq: true,
|
||||
starts_with: true,
|
||||
|
@ -1633,6 +1647,20 @@ pub fn test_compare() {
|
|||
relative_from: Some("")
|
||||
);
|
||||
|
||||
tc!("foo/.//bar", "foo/bar",
|
||||
eq: true,
|
||||
starts_with: true,
|
||||
ends_with: true,
|
||||
relative_from: Some("")
|
||||
);
|
||||
|
||||
tc!("foo//./bar", "foo/bar",
|
||||
eq: true,
|
||||
starts_with: true,
|
||||
ends_with: true,
|
||||
relative_from: Some("")
|
||||
);
|
||||
|
||||
tc!("foo/bar", "foo",
|
||||
eq: false,
|
||||
starts_with: true,
|
||||
|
@ -1640,6 +1668,13 @@ pub fn test_compare() {
|
|||
relative_from: Some("bar")
|
||||
);
|
||||
|
||||
tc!("foo/bar", "foobar",
|
||||
eq: false,
|
||||
starts_with: false,
|
||||
ends_with: false,
|
||||
relative_from: None
|
||||
);
|
||||
|
||||
tc!("foo/bar/baz", "foo/bar",
|
||||
eq: false,
|
||||
starts_with: true,
|
||||
|
|
|
@ -80,14 +80,21 @@ use crate::sync::Once;
|
|||
/// static LIST: OnceList<u32> = OnceList::new();
|
||||
/// static COUNTER: AtomicU32 = AtomicU32::new(0);
|
||||
///
|
||||
/// let vec = (0..thread::available_parallelism().unwrap().get()).map(|_| thread::spawn(|| {
|
||||
/// while let i @ 0..=1000 = COUNTER.fetch_add(1, Ordering::Relaxed) {
|
||||
/// LIST.push(i);
|
||||
/// # const LEN: u32 = if cfg!(miri) { 50 } else { 1000 };
|
||||
/// # /*
|
||||
/// const LEN: u32 = 1000;
|
||||
/// # */
|
||||
/// thread::scope(|s| {
|
||||
/// for _ in 0..thread::available_parallelism().unwrap().get() {
|
||||
/// s.spawn(|| {
|
||||
/// while let i @ 0..LEN = COUNTER.fetch_add(1, Ordering::Relaxed) {
|
||||
/// LIST.push(i);
|
||||
/// }
|
||||
/// });
|
||||
/// }
|
||||
/// })).collect::<Vec<thread::JoinHandle<_>>>();
|
||||
/// vec.into_iter().for_each(|handle| handle.join().unwrap());
|
||||
/// });
|
||||
///
|
||||
/// for i in 0..=1000 {
|
||||
/// for i in 0..LEN {
|
||||
/// assert!(LIST.contains(&i));
|
||||
/// }
|
||||
///
|
||||
|
|
|
@ -107,7 +107,7 @@ flag, for example:
|
|||
|
||||
Users need to install or built wasi-sdk since release 20.0
|
||||
https://github.com/WebAssembly/wasi-sdk/releases/tag/wasi-sdk-20
|
||||
and specify path to *wasi-root* `.cargo/config.toml`
|
||||
and specify path to *wasi-root* `config.toml`
|
||||
|
||||
```toml
|
||||
[target.wasm32-wasip1-threads]
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
//@ known-bug: rust-lang/rust #127332
|
||||
|
||||
async fn fun() {
|
||||
enum Foo {
|
||||
A { x: u32 },
|
||||
}
|
||||
let orig = Foo::A { x: 5 };
|
||||
Foo::A { x: 6, ..orig };
|
||||
}
|
15
tests/ui/typeck/ice-with-expr-not-struct-127332.rs
Normal file
15
tests/ui/typeck/ice-with-expr-not-struct-127332.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
// Regression test for ICE #127332
|
||||
|
||||
// Tests that we do not ICE when a with expr is
|
||||
// not a struct but something else like an enum
|
||||
|
||||
fn main() {
|
||||
let x = || {
|
||||
enum Foo {
|
||||
A { x: u32 },
|
||||
}
|
||||
let orig = Foo::A { x: 5 };
|
||||
Foo::A { x: 6, ..orig };
|
||||
//~^ ERROR functional record update syntax requires a struct
|
||||
};
|
||||
}
|
9
tests/ui/typeck/ice-with-expr-not-struct-127332.stderr
Normal file
9
tests/ui/typeck/ice-with-expr-not-struct-127332.stderr
Normal file
|
@ -0,0 +1,9 @@
|
|||
error[E0436]: functional record update syntax requires a struct
|
||||
--> $DIR/ice-with-expr-not-struct-127332.rs:12:26
|
||||
|
|
||||
LL | Foo::A { x: 6, ..orig };
|
||||
| ^^^^
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0436`.
|
Loading…
Add table
Reference in a new issue