Auto merge of #129130 - matthiaskrgr:rollup-603jta0, r=matthiaskrgr

Rollup of 8 pull requests

Successful merges:

 - #128348 (Unconditionally allow shadow call-stack sanitizer for AArch64)
 - #129065 (Use `impl PartialEq<TokenKind> for Token` more.)
 - #129072 (Infer async closure args from `Fn` bound even if there is no corresponding `Future` bound on return)
 - #129096 (Print more verbose error for commands that capture output)
 - #129101 (Fix projections when parent capture is by-ref but child capture is by-value in the `ByMoveBody` pass)
 - #129106 (Remove redundant type ops: `Eq`/`Subtype`)
 - #129122 (Remove duplicated `Rustdoc::output` method from `run-make-support` lib)
 - #129124 (rustdoc-json: Use FxHashMap from rustdoc_json_types)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-08-15 20:04:49 +00:00
commit 2c93fabd98
50 changed files with 396 additions and 306 deletions

View file

@ -328,7 +328,7 @@ pub fn parse_asm_args<'a>(
/// Otherwise, the suggestion will be incorrect.
fn err_duplicate_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
// Tool-only output
let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span };
let full_span = if p.token == token::Comma { span.to(p.token.span) } else { span };
p.dcx().emit_err(errors::AsmOptAlreadyprovided { span, symbol, full_span });
}
@ -338,7 +338,7 @@ fn err_duplicate_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
/// Otherwise, the suggestion will be incorrect.
fn err_unsupported_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
// Tool-only output
let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span };
let full_span = if p.token == token::Comma { span.to(p.token.span) } else { span };
p.dcx().emit_err(errors::GlobalAsmUnsupportedOption { span, symbol, full_span });
}

View file

@ -1154,7 +1154,7 @@ fn check_matcher_core<'tt>(
&& matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
&& matches!(
next_token,
TokenTree::Token(token) if token.kind == BinOp(token::BinOpToken::Or)
TokenTree::Token(token) if *token == BinOp(token::BinOpToken::Or)
)
{
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.

View file

@ -14,7 +14,7 @@ use rustc_middle::span_bug;
use rustc_middle::ty::visit::{TypeVisitable, TypeVisitableExt};
use rustc_middle::ty::{self, GenericArgs, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor};
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
use rustc_span::{Span, DUMMY_SP};
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::error_reporting::traits::ArgKind;
use rustc_trait_selection::traits;
@ -539,6 +539,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// we identify the `FnOnce<Args, Output = ?Fut>` bound, and if the output type is
/// an inference variable `?Fut`, we check if that is bounded by a `Future<Output = Ty>`
/// projection.
///
/// This function is actually best-effort with the return type; if we don't find a
/// `Future` projection, we still will return arguments that we extracted from the `FnOnce`
/// projection, and the output will be an unconstrained type variable instead.
fn extract_sig_from_projection_and_future_bound(
&self,
cause_span: Option<Span>,
@ -564,14 +568,37 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
};
// FIXME: We may want to elaborate here, though I assume this will be exceedingly rare.
let mut return_ty = None;
for bound in self.obligations_for_self_ty(return_vid) {
if let Some(ret_projection) = bound.predicate.as_projection_clause()
&& let Some(ret_projection) = ret_projection.no_bound_vars()
&& self.tcx.is_lang_item(ret_projection.def_id(), LangItem::FutureOutput)
{
return_ty = Some(ret_projection.term.expect_type());
break;
}
}
// SUBTLE: If we didn't find a `Future<Output = ...>` bound for the return
// vid, we still want to attempt to provide inference guidance for the async
// closure's arguments. Instantiate a new vid to plug into the output type.
//
// You may be wondering, what if it's higher-ranked? Well, given that we
// found a type variable for the `FnOnce::Output` projection above, we know
// that the output can't mention any of the vars.
//
// Also note that we use a fresh var here for the signature since the signature
// records the output of the *future*, and `return_vid` above is the type
// variable of the future, not its output.
//
// FIXME: We probably should store this signature inference output in a way
// that does not misuse a `FnSig` type, but that can be done separately.
let return_ty =
return_ty.unwrap_or_else(|| self.next_ty_var(cause_span.unwrap_or(DUMMY_SP)));
let sig = projection.rebind(self.tcx.mk_fn_sig(
input_tys,
ret_projection.term.expect_type(),
return_ty,
false,
hir::Safety::Safe,
Abi::Rust,
@ -579,10 +606,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return Some(ExpectedSig { cause_span, sig });
}
}
None
}
fn sig_of_closure(
&self,

View file

@ -1853,7 +1853,7 @@ impl KeywordIdents {
if !prev_dollar {
self.check_ident_token(cx, UnderMacro(true), ident);
}
} else if token.kind == TokenKind::Dollar {
} else if *token == TokenKind::Dollar {
prev_dollar = true;
continue;
}

View file

@ -65,10 +65,9 @@ use crate::query::plumbing::{
};
use crate::traits::query::{
CanonicalAliasGoal, CanonicalPredicateGoal, CanonicalTyGoal,
CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpNormalizeGoal,
CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal, DropckConstraint,
DropckOutlivesResult, MethodAutoderefStepsResult, NoSolution, NormalizationResult,
OutlivesBound,
CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpNormalizeGoal,
CanonicalTypeOpProvePredicateGoal, DropckConstraint, DropckOutlivesResult,
MethodAutoderefStepsResult, NoSolution, NormalizationResult, OutlivesBound,
};
use crate::traits::{
specialization_graph, CodegenObligationError, EvaluationResult, ImplSource,
@ -2090,26 +2089,6 @@ rustc_queries! {
desc { "evaluating `type_op_ascribe_user_type` `{:?}`", goal.value.value }
}
/// Do not call this query directly: part of the `Eq` type-op
query type_op_eq(
goal: CanonicalTypeOpEqGoal<'tcx>
) -> Result<
&'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>,
NoSolution,
> {
desc { "evaluating `type_op_eq` `{:?}`", goal.value.value }
}
/// Do not call this query directly: part of the `Subtype` type-op
query type_op_subtype(
goal: CanonicalTypeOpSubtypeGoal<'tcx>
) -> Result<
&'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>,
NoSolution,
> {
desc { "evaluating `type_op_subtype` `{:?}`", goal.value.value }
}
/// Do not call this query directly: part of the `ProvePredicate` type-op
query type_op_prove_predicate(
goal: CanonicalTypeOpProvePredicateGoal<'tcx>

View file

@ -78,6 +78,8 @@ use rustc_middle::mir::{self, dump_mir, MirPass};
use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt, TypeVisitableExt};
use rustc_target::abi::{FieldIdx, VariantIdx};
use crate::pass_manager::validate_body;
pub struct ByMoveBody;
impl<'tcx> MirPass<'tcx> for ByMoveBody {
@ -131,20 +133,40 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody {
|(parent_field_idx, parent_capture), (child_field_idx, child_capture)| {
// Store this set of additional projections (fields and derefs).
// We need to re-apply them later.
let child_precise_captures =
&child_capture.place.projections[parent_capture.place.projections.len()..];
let mut child_precise_captures = child_capture.place.projections
[parent_capture.place.projections.len()..]
.to_vec();
// If the parent captures by-move, and the child captures by-ref, then we
// need to peel an additional `deref` off of the body of the child.
let needs_deref = child_capture.is_by_ref() && !parent_capture.is_by_ref();
if needs_deref {
assert_ne!(
coroutine_kind,
ty::ClosureKind::FnOnce,
// If the parent capture is by-ref, then we need to apply an additional
// deref before applying any further projections to this place.
if parent_capture.is_by_ref() {
child_precise_captures.insert(
0,
Projection { ty: parent_capture.place.ty(), kind: ProjectionKind::Deref },
);
}
// If the child capture is by-ref, then we need to apply a "ref"
// projection (i.e. `&`) at the end. But wait! We don't have that
// as a projection kind. So instead, we can apply its dual and
// *peel* a deref off of the place when it shows up in the MIR body.
// Luckily, by construction this is always possible.
let peel_deref = if child_capture.is_by_ref() {
assert!(
parent_capture.is_by_ref() || coroutine_kind != ty::ClosureKind::FnOnce,
"`FnOnce` coroutine-closures return coroutines that capture from \
their body; it will always result in a borrowck error!"
);
}
true
} else {
false
};
// Regarding the behavior above, you may think that it's redundant to both
// insert a deref and then peel a deref if the parent and child are both
// captured by-ref. This would be correct, except for the case where we have
// precise capturing projections, since the inserted deref is to the *beginning*
// and the peeled deref is at the *end*. I cannot seem to actually find a
// case where this happens, though, but let's keep this code flexible.
// Finally, store the type of the parent's captured place. We need
// this when building the field projection in the MIR body later on.
@ -164,7 +186,7 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody {
(
FieldIdx::from_usize(parent_field_idx + num_args),
parent_capture_ty,
needs_deref,
peel_deref,
child_precise_captures,
),
)
@ -192,6 +214,10 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody {
let mut by_move_body = body.clone();
MakeByMoveBody { tcx, field_remapping, by_move_coroutine_ty }.visit_body(&mut by_move_body);
dump_mir(tcx, false, "coroutine_by_move", &0, &by_move_body, |_, _| Ok(()));
// Let's just always validate this body.
validate_body(tcx, &mut by_move_body, "Initial coroutine_by_move body".to_string());
// FIXME: use query feeding to generate the body right here and then only store the `DefId` of the new body.
by_move_body.source = mir::MirSource::from_instance(InstanceKind::CoroutineKindShim {
coroutine_def_id: coroutine_def_id.to_def_id(),
@ -202,7 +228,7 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody {
struct MakeByMoveBody<'tcx> {
tcx: TyCtxt<'tcx>,
field_remapping: UnordMap<FieldIdx, (FieldIdx, Ty<'tcx>, bool, &'tcx [Projection<'tcx>])>,
field_remapping: UnordMap<FieldIdx, (FieldIdx, Ty<'tcx>, bool, Vec<Projection<'tcx>>)>,
by_move_coroutine_ty: Ty<'tcx>,
}
@ -223,14 +249,14 @@ impl<'tcx> MutVisitor<'tcx> for MakeByMoveBody<'tcx> {
if place.local == ty::CAPTURE_STRUCT_LOCAL
&& let Some((&mir::ProjectionElem::Field(idx, _), projection)) =
place.projection.split_first()
&& let Some(&(remapped_idx, remapped_ty, needs_deref, bridging_projections)) =
&& let Some(&(remapped_idx, remapped_ty, peel_deref, ref bridging_projections)) =
self.field_remapping.get(&idx)
{
// As noted before, if the parent closure captures a field by value, and
// the child captures a field by ref, then for the by-move body we're
// generating, we also are taking that field by value. Peel off a deref,
// since a layer of ref'ing has now become redundant.
let final_projections = if needs_deref {
let final_projections = if peel_deref {
let Some((mir::ProjectionElem::Deref, projection)) = projection.split_first()
else {
bug!(

View file

@ -229,7 +229,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
} else {
let this_spacing = if next_tok.is_punct() {
Spacing::Joint
} else if next_tok.kind == token::Eof {
} else if next_tok == token::Eof {
Spacing::Alone
} else {
Spacing::JointHidden

View file

@ -162,7 +162,7 @@ impl<'a> Parser<'a> {
}
loop {
// skip any other attributes, we want the item
if snapshot.token.kind == token::Pound {
if snapshot.token == token::Pound {
if let Err(err) = snapshot.parse_attribute(InnerAttrPolicy::Permitted) {
err.cancel();
return Some(replacement_span);
@ -343,7 +343,7 @@ impl<'a> Parser<'a> {
// Presumably, the majority of the time there will only be one attr.
let mut expanded_attrs = Vec::with_capacity(1);
while self.token.kind != token::Eof {
while self.token != token::Eof {
let lo = self.token.span;
let item = self.parse_attr_item(ForceCollect::Yes)?;
expanded_attrs.push((item, lo.to(self.prev_token.span)));
@ -359,7 +359,7 @@ impl<'a> Parser<'a> {
pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::NestedMetaItem>> {
// Presumably, the majority of the time there will only be one attr.
let mut nmis = ThinVec::with_capacity(1);
while self.token.kind != token::Eof {
while self.token != token::Eof {
nmis.push(self.parse_meta_item_inner()?);
if !self.eat(&token::Comma) {
break;

View file

@ -474,8 +474,8 @@ impl<'a> Parser<'a> {
// If this isn't the case however, and the suggestion is a token the
// content of which is the same as the found token's, we remove it as well.
if !eq {
if let TokenType::Token(kind) = &token {
if kind == &self.token.kind {
if let TokenType::Token(kind) = token {
if self.token == *kind {
return false;
}
}
@ -506,7 +506,7 @@ impl<'a> Parser<'a> {
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
// The current token is in the same line as the prior token, not recoverable.
} else if [token::Comma, token::Colon].contains(&self.token.kind)
&& self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis)
&& self.prev_token == token::CloseDelim(Delimiter::Parenthesis)
{
// Likely typo: The current token is on a new line and is expected to be
// `.`, `;`, `?`, or an operator after a close delimiter token.
@ -518,7 +518,7 @@ impl<'a> Parser<'a> {
// https://github.com/rust-lang/rust/issues/72253
} else if self.look_ahead(1, |t| {
t == &token::CloseDelim(Delimiter::Brace)
|| t.can_begin_expr() && t.kind != token::Colon
|| t.can_begin_expr() && *t != token::Colon
}) && [token::Comma, token::Colon].contains(&self.token.kind)
{
// Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
@ -562,7 +562,7 @@ impl<'a> Parser<'a> {
}
}
if self.token.kind == TokenKind::EqEq
if self.token == TokenKind::EqEq
&& self.prev_token.is_ident()
&& expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq)))
{
@ -655,9 +655,9 @@ impl<'a> Parser<'a> {
// positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
// that in the parser requires unbounded lookahead, so we only add a hint to the existing
// error rather than replacing it entirely.
if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No)
if ((self.prev_token == TokenKind::Ident(sym::c, IdentIsRaw::No)
&& matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No)
|| (self.prev_token == TokenKind::Ident(sym::cr, IdentIsRaw::No)
&& matches!(
&self.token.kind,
TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
@ -673,7 +673,7 @@ impl<'a> Parser<'a> {
// `pub` may be used for an item or `pub(crate)`
if self.prev_token.is_ident_named(sym::public)
&& (self.token.can_begin_item()
|| self.token.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
|| self.token == TokenKind::OpenDelim(Delimiter::Parenthesis))
{
err.span_suggestion_short(
self.prev_token.span,
@ -772,7 +772,7 @@ impl<'a> Parser<'a> {
),
);
if self.token == token::Pound
&& self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Bracket))
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
{
// We have
// #[attr]
@ -867,7 +867,7 @@ impl<'a> Parser<'a> {
let str_span = self.prev_token.span;
let mut span = self.token.span;
let mut count = 0;
while self.token.kind == TokenKind::Pound
while self.token == TokenKind::Pound
&& !sm.is_multiline(span.shrink_to_hi().until(self.token.span.shrink_to_lo()))
{
span = span.with_hi(self.token.span.hi());
@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> {
return;
}
if token::PathSep == self.token.kind && segment.args.is_none() {
if self.token == token::PathSep && segment.args.is_none() {
let snapshot = self.create_snapshot_for_diagnostic();
self.bump();
let lo = self.token.span;
@ -1176,13 +1176,11 @@ impl<'a> Parser<'a> {
let span = lo.to(self.prev_token.span);
// Detect trailing `>` like in `x.collect::Vec<_>>()`.
let mut trailing_span = self.prev_token.span.shrink_to_hi();
while self.token.kind == token::BinOp(token::Shr)
|| self.token.kind == token::Gt
{
while self.token == token::BinOp(token::Shr) || self.token == token::Gt {
trailing_span = trailing_span.to(self.token.span);
self.bump();
}
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
// Recover from bad turbofish: `foo.collect::Vec<_>()`.
segment.args = Some(AngleBracketedArgs { args, span }.into());
@ -1430,7 +1428,7 @@ impl<'a> Parser<'a> {
self.restore_snapshot(snapshot);
}
}
return if token::PathSep == self.token.kind {
return if self.token == token::PathSep {
// We have some certainty that this was a bad turbofish at this point.
// `foo< bar >::`
if let ExprKind::Binary(o, ..) = inner_op.kind
@ -1462,7 +1460,7 @@ impl<'a> Parser<'a> {
Err(self.dcx().create_err(err))
}
}
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
// We have high certainty that this was a bad turbofish at this point.
// `foo< bar >(`
if let ExprKind::Binary(o, ..) = inner_op.kind
@ -1528,7 +1526,7 @@ impl<'a> Parser<'a> {
];
self.consume_tts(1, &modifiers);
if self.token.kind == token::Eof {
if self.token == token::Eof {
// Not entirely sure that what we consumed were fn arguments, rollback.
self.restore_snapshot(snapshot);
Err(())
@ -1811,7 +1809,7 @@ impl<'a> Parser<'a> {
/// This function gets called in places where a semicolon is NOT expected and if there's a
/// semicolon it emits the appropriate error and returns true.
pub fn maybe_consume_incorrect_semicolon(&mut self, previous_item: Option<&Item>) -> bool {
if self.token.kind != TokenKind::Semi {
if self.token != TokenKind::Semi {
return false;
}
@ -2405,10 +2403,10 @@ impl<'a> Parser<'a> {
modifier: &[(token::TokenKind, i64)],
) {
while acc > 0 {
if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
if let Some((_, val)) = modifier.iter().find(|(t, _)| self.token == *t) {
acc += *val;
}
if self.token.kind == token::Eof {
if self.token == token::Eof {
break;
}
self.bump();
@ -2598,7 +2596,7 @@ impl<'a> Parser<'a> {
}
})
.is_some()
|| self.token.kind == TokenKind::Dot;
|| self.token == TokenKind::Dot;
// This will be true when a trait object type `Foo +` or a path which was a `const fn` with
// type params has been parsed.
let was_op =
@ -2617,7 +2615,7 @@ impl<'a> Parser<'a> {
})() {
Ok(expr) => {
// Find a mistake like `MyTrait<Assoc == S::Assoc>`.
if token::EqEq == snapshot.token.kind {
if snapshot.token == token::EqEq {
err.span_suggestion(
snapshot.token.span,
"if you meant to use an associated type binding, replace `==` with `=`",
@ -2627,7 +2625,7 @@ impl<'a> Parser<'a> {
let guar = err.emit();
let value = self.mk_expr_err(start.to(expr.span), guar);
return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }));
} else if token::Colon == snapshot.token.kind
} else if snapshot.token == token::Colon
&& expr.span.lo() == snapshot.token.span.hi()
&& matches!(expr.kind, ExprKind::Path(..))
{
@ -2642,8 +2640,7 @@ impl<'a> Parser<'a> {
return Ok(GenericArg::Type(
self.mk_ty(start.to(expr.span), TyKind::Err(guar)),
));
} else if token::Comma == self.token.kind || self.token.kind.should_end_const_arg()
{
} else if self.token == token::Comma || self.token.kind.should_end_const_arg() {
// Avoid the following output by checking that we consumed a full const arg:
// help: expressions must be enclosed in braces to be used as const generic
// arguments
@ -2846,8 +2843,8 @@ impl<'a> Parser<'a> {
pub(crate) fn maybe_recover_unexpected_block_label(&mut self) -> bool {
// Check for `'a : {`
if !(self.check_lifetime()
&& self.look_ahead(1, |tok| tok.kind == token::Colon)
&& self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Brace)))
&& self.look_ahead(1, |t| *t == token::Colon)
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace)))
{
return false;
}
@ -3001,7 +2998,7 @@ impl<'a> Parser<'a> {
// >>>>>>>
let mut end = None;
loop {
if self.token.kind == TokenKind::Eof {
if self.token == TokenKind::Eof {
break;
}
if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or))

View file

@ -165,7 +165,7 @@ impl<'a> Parser<'a> {
// Look for JS' `===` and `!==` and recover
if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual)
&& self.token.kind == token::Eq
&& self.token == token::Eq
&& self.prev_token.span.hi() == self.token.span.lo()
{
let sp = op.span.to(self.token.span);
@ -190,7 +190,7 @@ impl<'a> Parser<'a> {
// Look for PHP's `<>` and recover
if op.node == AssocOp::Less
&& self.token.kind == token::Gt
&& self.token == token::Gt
&& self.prev_token.span.hi() == self.token.span.lo()
{
let sp = op.span.to(self.token.span);
@ -208,7 +208,7 @@ impl<'a> Parser<'a> {
// Look for C++'s `<=>` and recover
if op.node == AssocOp::LessEqual
&& self.token.kind == token::Gt
&& self.token == token::Gt
&& self.prev_token.span.hi() == self.token.span.lo()
{
let sp = op.span.to(self.token.span);
@ -882,7 +882,7 @@ impl<'a> Parser<'a> {
let mut res = ensure_sufficient_stack(|| {
loop {
let has_question =
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
// We are using noexpect here because we don't expect a `?` directly after
// a `return` which could be suggested otherwise.
self.eat_noexpect(&token::Question)
@ -894,12 +894,11 @@ impl<'a> Parser<'a> {
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
continue;
}
let has_dot =
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
let has_dot = if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
// We are using noexpect here because we don't expect a `.` directly after
// a `return` which could be suggested otherwise.
self.eat_noexpect(&token::Dot)
} else if self.token.kind == TokenKind::RArrow && self.may_recover() {
} else if self.token == TokenKind::RArrow && self.may_recover() {
// Recovery for `expr->suffix`.
self.bump();
let span = self.prev_token.span;
@ -1206,7 +1205,7 @@ impl<'a> Parser<'a> {
}
fn mk_expr_tuple_field_access(
&mut self,
&self,
lo: Span,
ident_span: Span,
base: P<Expr>,
@ -1221,7 +1220,7 @@ impl<'a> Parser<'a> {
/// Parse a function call expression, `expr(...)`.
fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
let snapshot = if self.token == token::OpenDelim(Delimiter::Parenthesis) {
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
} else {
None
@ -1585,7 +1584,7 @@ impl<'a> Parser<'a> {
// Suggests using '<=' if there is an error parsing qpath when the previous token
// is an '=' token. Only emits suggestion if the '<' token and '=' token are
// directly adjacent (i.e. '=<')
if maybe_eq_tok.kind == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() {
if maybe_eq_tok == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() {
let eq_lt = maybe_eq_tok.span.to(lt_span);
err.span_suggestion(eq_lt, "did you mean", "<=", Applicability::Unspecified);
}
@ -2230,7 +2229,7 @@ impl<'a> Parser<'a> {
return Ok(());
}
if self.token.kind == token::Comma {
if self.token == token::Comma {
if !self.psess.source_map().is_multiline(prev_span.until(self.token.span)) {
return Ok(());
}
@ -2360,7 +2359,7 @@ impl<'a> Parser<'a> {
None => {}
}
if self.token.kind == TokenKind::Semi
if self.token == TokenKind::Semi
&& matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
&& self.may_recover()
{
@ -2557,7 +2556,7 @@ impl<'a> Parser<'a> {
);
} else {
// Look for usages of '=>' where '>=' might be intended
if maybe_fatarrow.kind == token::FatArrow {
if maybe_fatarrow == token::FatArrow {
err.span_suggestion(
maybe_fatarrow.span,
"you might have meant to write a \"greater than or equal to\" comparison",
@ -2606,7 +2605,7 @@ impl<'a> Parser<'a> {
missing_let: None,
comparison: None,
};
if self.prev_token.kind == token::BinOp(token::Or) {
if self.prev_token == token::BinOp(token::Or) {
// This was part of a closure, the that part of the parser recover.
return Err(self.dcx().create_err(err));
} else {
@ -2742,7 +2741,7 @@ impl<'a> Parser<'a> {
}
fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
let begin_paren = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
let begin_paren = if self.token == token::OpenDelim(Delimiter::Parenthesis) {
// Record whether we are about to parse `for (`.
// This is used below for recovery in case of `for ( $stuff ) $block`
// in which case we will suggest `for $stuff $block`.
@ -2776,7 +2775,7 @@ impl<'a> Parser<'a> {
return Err(err);
}
};
return if self.token.kind == token::CloseDelim(Delimiter::Parenthesis) {
return if self.token == token::CloseDelim(Delimiter::Parenthesis) {
// We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the
// parser state and emit a targeted suggestion.
let span = vec![start_span, self.token.span];
@ -2995,7 +2994,7 @@ impl<'a> Parser<'a> {
first_expr: &P<Expr>,
arrow_span: Span,
) -> Option<(Span, ErrorGuaranteed)> {
if self.token.kind != token::Semi {
if self.token != token::Semi {
return None;
}
let start_snapshot = self.create_snapshot_for_diagnostic();
@ -3024,18 +3023,18 @@ impl<'a> Parser<'a> {
// We might have either a `,` -> `;` typo, or a block without braces. We need
// a more subtle parsing strategy.
loop {
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
if self.token == token::CloseDelim(Delimiter::Brace) {
// We have reached the closing brace of the `match` expression.
return Some(err(self, stmts));
}
if self.token.kind == token::Comma {
if self.token == token::Comma {
self.restore_snapshot(start_snapshot);
return None;
}
let pre_pat_snapshot = self.create_snapshot_for_diagnostic();
match self.parse_pat_no_top_alt(None, None) {
Ok(_pat) => {
if self.token.kind == token::FatArrow {
if self.token == token::FatArrow {
// Reached arm end.
self.restore_snapshot(pre_pat_snapshot);
return Some(err(self, stmts));
@ -3286,7 +3285,7 @@ impl<'a> Parser<'a> {
}
fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> {
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
// Detect and recover from `($pat if $cond) => $arm`.
let left = self.token.span;
match self.parse_pat_allow_top_alt(
@ -3344,7 +3343,7 @@ impl<'a> Parser<'a> {
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
let msg = "you might have meant to start a match arm after the match guard";
if self.eat(&token::CloseDelim(Delimiter::Brace)) {
let applicability = if self.token.kind != token::FatArrow {
let applicability = if self.token != token::FatArrow {
// We have high confidence that we indeed didn't have a struct
// literal in the match guard, but rather we had some operation
// that ended in a path, immediately followed by a block that was
@ -3565,7 +3564,7 @@ impl<'a> Parser<'a> {
&& self.look_ahead(1, |t| {
AssocOp::from_token(t).is_some()
|| matches!(t.kind, token::OpenDelim(_))
|| t.kind == token::Dot
|| *t == token::Dot
})
{
// Looks like they tried to write a shorthand, complex expression.
@ -3850,11 +3849,11 @@ impl<'a> Parser<'a> {
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let res = f(this, attrs)?;
let trailing = (this.restrictions.contains(Restrictions::STMT_EXPR)
&& this.token.kind == token::Semi)
&& this.token == token::Semi)
// FIXME: pass an additional condition through from the place
// where we know we need a comma, rather than assuming that
// `#[attr] expr,` always captures a trailing comma.
|| this.token.kind == token::Comma;
|| this.token == token::Comma;
Ok((res, trailing))
})
}

View file

@ -393,7 +393,7 @@ impl<'a> Parser<'a> {
if let Some(struct_) = struct_
&& self.may_recover()
&& self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
&& self.token == token::OpenDelim(Delimiter::Parenthesis)
{
snapshot = Some((struct_, self.create_snapshot_for_diagnostic()));
};

View file

@ -354,7 +354,7 @@ impl<'a> Parser<'a> {
fn is_reuse_path_item(&mut self) -> bool {
// no: `reuse ::path` for compatibility reasons with macro invocations
self.token.is_keyword(kw::Reuse)
&& self.look_ahead(1, |t| t.is_path_start() && t.kind != token::PathSep)
&& self.look_ahead(1, |t| t.is_path_start() && *t != token::PathSep)
}
/// Are we sure this could not possibly be a macro invocation?
@ -499,7 +499,7 @@ impl<'a> Parser<'a> {
let mut err = self.dcx().struct_span_err(end.span, msg);
if end.is_doc_comment() {
err.span_label(end.span, "this doc comment doesn't document anything");
} else if self.token.kind == TokenKind::Semi {
} else if self.token == TokenKind::Semi {
err.span_suggestion_verbose(
self.token.span,
"consider removing this semicolon",
@ -777,12 +777,12 @@ impl<'a> Parser<'a> {
&& self
.span_to_snippet(self.prev_token.span)
.is_ok_and(|snippet| snippet == "}")
&& self.token.kind == token::Semi;
&& self.token == token::Semi;
let mut semicolon_span = self.token.span;
if !is_unnecessary_semicolon {
// #105369, Detect spurious `;` before assoc fn body
is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace)
&& self.prev_token.kind == token::Semi;
&& self.prev_token == token::Semi;
semicolon_span = self.prev_token.span;
}
// We have to bail or we'll potentially never make progress.
@ -1194,7 +1194,7 @@ impl<'a> Parser<'a> {
// FIXME: This recovery should be tested better.
if safety == Safety::Default
&& self.token.is_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Brace))
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
{
self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit();
safety = Safety::Unsafe(self.token.span);
@ -1258,7 +1258,7 @@ impl<'a> Parser<'a> {
&& self.is_keyword_ahead(1, &[kw::Extern])
&& self.look_ahead(
2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize),
|t| t.kind == token::OpenDelim(Delimiter::Brace),
|t| *t == token::OpenDelim(Delimiter::Brace),
)
}
@ -1343,7 +1343,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, (Ident, StaticItem)> {
let ident = self.parse_ident()?;
if self.token.kind == TokenKind::Lt && self.may_recover() {
if self.token == TokenKind::Lt && self.may_recover() {
let generics = self.parse_generics()?;
self.dcx().emit_err(errors::StaticWithGenerics { span: generics.span });
}
@ -1914,7 +1914,7 @@ impl<'a> Parser<'a> {
let mut err = self.dcx().struct_span_err(sp, msg);
if self.token.is_ident()
|| (self.token.kind == TokenKind::Pound
|| (self.token == TokenKind::Pound
&& (self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Bracket))))
{
// This is likely another field, TokenKind::Pound is used for `#[..]`
@ -1937,8 +1937,8 @@ impl<'a> Parser<'a> {
fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
if let Err(err) = self.expect(&token::Colon) {
let sm = self.psess.source_map();
let eq_typo = self.token.kind == token::Eq && self.look_ahead(1, |t| t.is_path_start());
let semi_typo = self.token.kind == token::Semi
let eq_typo = self.token == token::Eq && self.look_ahead(1, |t| t.is_path_start());
let semi_typo = self.token == token::Semi
&& self.look_ahead(1, |t| {
t.is_path_start()
// We check that we are in a situation like `foo; bar` to avoid bad suggestions
@ -1974,7 +1974,7 @@ impl<'a> Parser<'a> {
attrs: AttrVec,
) -> PResult<'a, FieldDef> {
let name = self.parse_field_ident(adt_ty, lo)?;
if self.token.kind == token::Not {
if self.token == token::Not {
if let Err(mut err) = self.unexpected() {
// Encounter the macro invocation
err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
@ -1983,10 +1983,10 @@ impl<'a> Parser<'a> {
}
self.expect_field_ty_separator()?;
let ty = self.parse_ty_for_field_def()?;
if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) {
if self.token == token::Colon && self.look_ahead(1, |t| *t != token::Colon) {
self.dcx().emit_err(errors::SingleColonStructType { span: self.token.span });
}
if self.token.kind == token::Eq {
if self.token == token::Eq {
self.bump();
let const_expr = self.parse_expr_anon_const()?;
let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
@ -2064,7 +2064,7 @@ impl<'a> Parser<'a> {
.parse_ident_common(false)
// Cancel this error, we don't need it.
.map_err(|err| err.cancel())
&& self.token.kind == TokenKind::Colon
&& self.token == TokenKind::Colon
{
err.span_suggestion(
removal_span,
@ -2367,12 +2367,12 @@ impl<'a> Parser<'a> {
match self.expected_one_of_not_found(&[], expected) {
Ok(error_guaranteed) => Ok(error_guaranteed),
Err(mut err) => {
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
if self.token == token::CloseDelim(Delimiter::Brace) {
// The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
// the AST for typechecking.
err.span_label(ident_span, "while parsing this `fn`");
Ok(err.emit())
} else if self.token.kind == token::RArrow
} else if self.token == token::RArrow
&& let Some(fn_params_end) = fn_params_end
{
// Instead of a function body, the parser has encountered a right arrow
@ -2445,7 +2445,7 @@ impl<'a> Parser<'a> {
fn_params_end: Option<Span>,
) -> PResult<'a, Option<P<Block>>> {
let has_semi = if req_body {
self.token.kind == TokenKind::Semi
self.token == TokenKind::Semi
} else {
// Only include `;` in list of expected tokens if body is not required
self.check(&TokenKind::Semi)
@ -2458,7 +2458,7 @@ impl<'a> Parser<'a> {
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
.map(|(attrs, body)| (attrs, Some(body)))?
} else if self.token.kind == token::Eq {
} else if self.token == token::Eq {
// Recover `fn foo() = $expr;`.
self.bump(); // `=`
let eq_sp = self.prev_token.span;
@ -2761,7 +2761,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
let mut first_param = true;
// Parse the arguments, starting out with `self` being allowed...
if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis)
if self.token != TokenKind::OpenDelim(Delimiter::Parenthesis)
// might be typo'd trait impl, handled elsewhere
&& !self.token.is_keyword(kw::For)
{

View file

@ -527,7 +527,7 @@ impl<'a> Parser<'a> {
} else if inedible.contains(&self.token.kind) {
// leave it in the input
Ok(Recovered::No)
} else if self.token.kind != token::Eof
} else if self.token != token::Eof
&& self.last_unexpected_token_span == Some(self.token.span)
{
FatalError.raise();
@ -756,7 +756,7 @@ impl<'a> Parser<'a> {
/// compound tokens like multi-character operators in process.
/// Returns `true` if the token was eaten.
fn break_and_eat(&mut self, expected: TokenKind) -> bool {
if self.token.kind == expected {
if self.token == expected {
self.bump();
return true;
}
@ -882,7 +882,7 @@ impl<'a> Parser<'a> {
let token_str = pprust::token_kind_to_string(t);
match self.current_closure.take() {
Some(closure_spans) if self.token.kind == TokenKind::Semi => {
Some(closure_spans) if self.token == TokenKind::Semi => {
// Finding a semicolon instead of a comma
// after a closure body indicates that the
// closure body may be a block but the user
@ -910,7 +910,7 @@ impl<'a> Parser<'a> {
// If this was a missing `@` in a binding pattern
// bail with a suggestion
// https://github.com/rust-lang/rust/issues/72373
if self.prev_token.is_ident() && self.token.kind == token::DotDot {
if self.prev_token.is_ident() && self.token == token::DotDot {
let msg = format!(
"if you meant to bind the contents of the rest of the array \
pattern into `{}`, use `@`",

View file

@ -369,7 +369,7 @@ impl<'a> Parser<'a> {
.and_then(|(ident, _)| ident.name.as_str().chars().next())
.is_some_and(char::is_lowercase)
})
&& self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Parenthesis));
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Parenthesis));
// Check for operators.
// `|` is excluded as it is used in pattern alternatives and lambdas,
@ -377,9 +377,9 @@ impl<'a> Parser<'a> {
// `[` is included for indexing operations,
// `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`)
let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or)
|| self.token.kind == token::Question
|| (self.token.kind == token::OpenDelim(Delimiter::Bracket)
&& self.look_ahead(1, |tok| tok.kind != token::CloseDelim(Delimiter::Bracket)));
|| self.token == token::Question
|| (self.token == token::OpenDelim(Delimiter::Bracket)
&& self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket)));
if !has_trailing_method && !has_trailing_operator {
// Nothing to recover here.
@ -413,7 +413,7 @@ impl<'a> Parser<'a> {
let is_bound = is_end_bound
// is_start_bound: either `..` or `)..`
|| self.token.is_range_separator()
|| self.token.kind == token::CloseDelim(Delimiter::Parenthesis)
|| self.token == token::CloseDelim(Delimiter::Parenthesis)
&& self.look_ahead(1, Token::is_range_separator);
// Check that `parse_expr_assoc_with` didn't eat a rhs.
@ -450,7 +450,7 @@ impl<'a> Parser<'a> {
lo = self.token.span;
}
let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd {
let pat = if self.check(&token::BinOp(token::And)) || self.token == token::AndAnd {
self.parse_pat_deref(expected)?
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
self.parse_pat_tuple_or_parens()?
@ -625,7 +625,7 @@ impl<'a> Parser<'a> {
///
/// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching
fn recover_intersection_pat(&mut self, lhs: P<Pat>) -> PResult<'a, P<Pat>> {
if self.token.kind != token::At {
if self.token != token::At {
// Next token is not `@` so it's not going to be an intersection pattern.
return Ok(lhs);
}
@ -958,14 +958,14 @@ impl<'a> Parser<'a> {
self.check_inline_const(dist)
|| self.look_ahead(dist, |t| {
t.is_path_start() // e.g. `MY_CONST`;
|| t.kind == token::Dot // e.g. `.5` for recovery;
|| *t == token::Dot // e.g. `.5` for recovery;
|| matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus))
|| t.is_bool_lit()
|| t.is_whole_expr()
|| t.is_lifetime() // recover `'a` instead of `'a'`
|| (self.may_recover() // recover leading `(`
&& t.kind == token::OpenDelim(Delimiter::Parenthesis)
&& self.look_ahead(dist + 1, |t| t.kind != token::OpenDelim(Delimiter::Parenthesis))
&& *t == token::OpenDelim(Delimiter::Parenthesis)
&& self.look_ahead(dist + 1, |t| *t != token::OpenDelim(Delimiter::Parenthesis))
&& self.is_pat_range_end_start(dist + 1))
})
}

View file

@ -358,9 +358,9 @@ impl<'a> Parser<'a> {
})?;
let span = lo.to(self.prev_token.span);
AngleBracketedArgs { args, span }.into()
} else if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
} else if self.token == token::OpenDelim(Delimiter::Parenthesis)
// FIXME(return_type_notation): Could also recover `...` here.
&& self.look_ahead(1, |tok| tok.kind == token::DotDot)
&& self.look_ahead(1, |t| *t == token::DotDot)
{
self.bump(); // (
self.bump(); // ..
@ -384,7 +384,7 @@ impl<'a> Parser<'a> {
let token_before_parsing = self.token.clone();
let mut snapshot = None;
if self.may_recover()
&& prev_token_before_parsing.kind == token::PathSep
&& prev_token_before_parsing == token::PathSep
&& (style == PathStyle::Expr && self.token.can_begin_expr()
|| style == PathStyle::Pat && self.token.can_begin_pattern())
{
@ -393,7 +393,7 @@ impl<'a> Parser<'a> {
let (inputs, _) = match self.parse_paren_comma_seq(|p| p.parse_ty()) {
Ok(output) => output,
Err(mut error) if prev_token_before_parsing.kind == token::PathSep => {
Err(mut error) if prev_token_before_parsing == token::PathSep => {
error.span_label(
prev_token_before_parsing.span.to(token_before_parsing.span),
"while parsing this parenthesized list of type arguments starting here",

View file

@ -68,7 +68,7 @@ impl<'a> Parser<'a> {
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
this.expect_keyword(kw::Let)?;
let local = this.parse_local(attrs)?;
let trailing = capture_semi && this.token.kind == token::Semi;
let trailing = capture_semi && this.token == token::Semi;
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
})?
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
@ -760,7 +760,7 @@ impl<'a> Parser<'a> {
)
),
);
let suggest_eq = if self.token.kind == token::Dot
let suggest_eq = if self.token == token::Dot
&& let _ = self.bump()
&& let mut snapshot = self.create_snapshot_for_diagnostic()
&& let Ok(_) = snapshot

View file

@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
let mut trailing_plus = false;
let (ts, trailing) = self.parse_paren_comma_seq(|p| {
let ty = p.parse_ty()?;
trailing_plus = p.prev_token.kind == TokenKind::BinOp(token::Plus);
trailing_plus = p.prev_token == TokenKind::BinOp(token::Plus);
Ok(ty)
})?;
@ -499,8 +499,8 @@ impl<'a> Parser<'a> {
let elt_ty = match self.parse_ty() {
Ok(ty) => ty,
Err(err)
if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket))
| self.look_ahead(1, |t| t.kind == token::Semi) =>
if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Bracket))
| self.look_ahead(1, |t| *t == token::Semi) =>
{
// Recover from `[LIT; EXPR]` and `[LIT]`
self.bump();
@ -601,7 +601,7 @@ impl<'a> Parser<'a> {
let span_start = self.token.span;
let ast::FnHeader { ext, safety, constness, coroutine_kind } =
self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?;
if self.may_recover() && self.token.kind == TokenKind::Lt {
if self.may_recover() && self.token == TokenKind::Lt {
self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
}
let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?;
@ -681,7 +681,7 @@ impl<'a> Parser<'a> {
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds()?;
*impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
}
@ -727,8 +727,7 @@ impl<'a> Parser<'a> {
self.check_keyword(kw::Dyn)
&& (self.token.uninterpolated_span().at_least_rust_2018()
|| self.look_ahead(1, |t| {
(can_begin_dyn_bound_in_edition_2015(t)
|| t.kind == TokenKind::BinOp(token::Star))
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
&& !can_continue_type_after_non_fn_ident(t)
}))
}
@ -750,7 +749,7 @@ impl<'a> Parser<'a> {
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds()?;
*impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
Ok(TyKind::TraitObject(bounds, syntax))
}
@ -1060,7 +1059,7 @@ impl<'a> Parser<'a> {
}
let mut path = if self.token.is_keyword(kw::Fn)
&& self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
&& self.look_ahead(1, |t| *t == TokenKind::OpenDelim(Delimiter::Parenthesis))
&& let Some(path) = self.recover_path_from_fn()
{
path

View file

@ -1188,7 +1188,12 @@ fn validate_commandline_args_with_session_available(sess: &Session) {
// Sanitizers can only be used on platforms that we know have working sanitizer codegen.
let supported_sanitizers = sess.target.options.supported_sanitizers;
let unsupported_sanitizers = sess.opts.unstable_opts.sanitizer - supported_sanitizers;
let mut unsupported_sanitizers = sess.opts.unstable_opts.sanitizer - supported_sanitizers;
// Niche: if `fixed-x18`, or effectively switching on `reserved-x18` flag, is enabled
// we should allow Shadow Call Stack sanitizer.
if sess.opts.unstable_opts.fixed_x18 && sess.target.arch == "aarch64" {
unsupported_sanitizers -= SanitizerSet::SHADOWCALLSTACK;
}
match unsupported_sanitizers.into_iter().count() {
0 => {}
1 => {

View file

@ -1,33 +0,0 @@
pub use rustc_middle::traits::query::type_op::Eq;
use rustc_middle::traits::query::NoSolution;
use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::{ParamEnvAnd, TyCtxt};
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
use crate::traits::ObligationCtxt;
impl<'tcx> super::QueryTypeOp<'tcx> for Eq<'tcx> {
type QueryResponse = ();
fn try_fast_path(
_tcx: TyCtxt<'tcx>,
key: &ParamEnvAnd<'tcx, Eq<'tcx>>,
) -> Option<Self::QueryResponse> {
if key.value.a == key.value.b { Some(()) } else { None }
}
fn perform_query(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Self>>,
) -> Result<CanonicalQueryResponse<'tcx, ()>, NoSolution> {
tcx.type_op_eq(canonicalized)
}
fn perform_locally_with_next_solver(
ocx: &ObligationCtxt<'_, 'tcx>,
key: ParamEnvAnd<'tcx, Self>,
) -> Result<Self::QueryResponse, NoSolution> {
ocx.eq(&ObligationCause::dummy(), key.param_env, key.value.a, key.value.b)?;
Ok(())
}
}

View file

@ -16,12 +16,10 @@ use crate::traits::{ObligationCause, ObligationCtxt};
pub mod ascribe_user_type;
pub mod custom;
pub mod eq;
pub mod implied_outlives_bounds;
pub mod normalize;
pub mod outlives;
pub mod prove_predicate;
pub mod subtype;
pub use rustc_middle::traits::query::type_op::*;

View file

@ -1,30 +0,0 @@
pub use rustc_middle::traits::query::type_op::Subtype;
use rustc_middle::traits::query::NoSolution;
use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::{ParamEnvAnd, TyCtxt};
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
use crate::traits::ObligationCtxt;
impl<'tcx> super::QueryTypeOp<'tcx> for Subtype<'tcx> {
type QueryResponse = ();
fn try_fast_path(_tcx: TyCtxt<'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> {
if key.value.sub == key.value.sup { Some(()) } else { None }
}
fn perform_query(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Self>>,
) -> Result<CanonicalQueryResponse<'tcx, ()>, NoSolution> {
tcx.type_op_subtype(canonicalized)
}
fn perform_locally_with_next_solver(
ocx: &ObligationCtxt<'_, 'tcx>,
key: ParamEnvAnd<'tcx, Self>,
) -> Result<Self::QueryResponse, NoSolution> {
ocx.sub(&ObligationCause::dummy(), key.param_env, key.value.sub, key.value.sup)?;
Ok(())
}
}

View file

@ -10,18 +10,14 @@ use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;
use rustc_trait_selection::traits::query::type_op::ascribe_user_type::{
type_op_ascribe_user_type_with_span, AscribeUserType,
};
use rustc_trait_selection::traits::query::type_op::eq::Eq;
use rustc_trait_selection::traits::query::type_op::normalize::Normalize;
use rustc_trait_selection::traits::query::type_op::prove_predicate::ProvePredicate;
use rustc_trait_selection::traits::query::type_op::subtype::Subtype;
use rustc_trait_selection::traits::{Normalized, Obligation, ObligationCause, ObligationCtxt};
pub(crate) fn provide(p: &mut Providers) {
*p = Providers {
type_op_ascribe_user_type,
type_op_eq,
type_op_prove_predicate,
type_op_subtype,
type_op_normalize_ty,
type_op_normalize_clause,
type_op_normalize_fn_sig,
@ -39,16 +35,6 @@ fn type_op_ascribe_user_type<'tcx>(
})
}
fn type_op_eq<'tcx>(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| {
let (param_env, Eq { a, b }) = key.into_parts();
Ok(ocx.eq(&ObligationCause::dummy(), param_env, a, b)?)
})
}
fn type_op_normalize<'tcx, T>(
ocx: &ObligationCtxt<'_, 'tcx>,
key: ParamEnvAnd<'tcx, Normalize<T>>,
@ -91,16 +77,6 @@ fn type_op_normalize_poly_fn_sig<'tcx>(
tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, type_op_normalize)
}
fn type_op_subtype<'tcx>(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| {
let (param_env, Subtype { sub, sup }) = key.into_parts();
Ok(ocx.sup(&ObligationCause::dummy(), param_env, sup, sub)?)
})
}
fn type_op_prove_predicate<'tcx>(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, ProvePredicate<'tcx>>>,

View file

@ -1056,12 +1056,30 @@ Executed at: {executed_at}"#,
}
};
let fail = |message: &str| {
let fail = |message: &str, output: CommandOutput| -> ! {
if self.is_verbose() {
println!("{message}");
} else {
let (stdout, stderr) = (output.stdout_if_present(), output.stderr_if_present());
// If the command captures output, the user would not see any indication that
// it has failed. In this case, print a more verbose error, since to provide more
// context.
if stdout.is_some() || stderr.is_some() {
if let Some(stdout) =
output.stdout_if_present().take_if(|s| !s.trim().is_empty())
{
println!("STDOUT:\n{stdout}\n");
}
if let Some(stderr) =
output.stderr_if_present().take_if(|s| !s.trim().is_empty())
{
println!("STDERR:\n{stderr}\n");
}
println!("Command {command:?} has failed. Rerun with -v to see more details.");
} else {
println!("Command has failed. Rerun with -v to see more details.");
}
}
exit!(1);
};
@ -1069,14 +1087,14 @@ Executed at: {executed_at}"#,
match command.failure_behavior {
BehaviorOnFailure::DelayFail => {
if self.fail_fast {
fail(&message);
fail(&message, output);
}
let mut failures = self.delayed_failures.borrow_mut();
failures.push(message);
}
BehaviorOnFailure::Exit => {
fail(&message);
fail(&message, output);
}
BehaviorOnFailure::Ignore => {
// If failures are allowed, either the error has been printed already

View file

@ -291,6 +291,11 @@ impl CommandOutput {
.expect("Cannot parse process stdout as UTF-8")
}
#[must_use]
pub fn stdout_if_present(&self) -> Option<String> {
self.stdout.as_ref().and_then(|s| String::from_utf8(s.clone()).ok())
}
#[must_use]
pub fn stdout_if_ok(&self) -> Option<String> {
if self.is_success() { Some(self.stdout()) } else { None }
@ -303,6 +308,11 @@ impl CommandOutput {
)
.expect("Cannot parse process stderr as UTF-8")
}
#[must_use]
pub fn stderr_if_present(&self) -> Option<String> {
self.stderr.as_ref().and_then(|s| String::from_utf8(s.clone()).ok())
}
}
impl Default for CommandOutput {

View file

@ -61,3 +61,8 @@ Currently the `riscv64-linux-android` target requires the following architecture
* `Zba` (address calculation instructions)
* `Zbb` (base instructions)
* `Zbs` (single-bit instructions)
### aarch64-linux-android on Nightly compilers
As soon as `-Zfixed-x18` compiler flag is supplied, the [`ShadowCallStack` sanitizer](https://releases.llvm.org/7.0.1/tools/clang/docs/ShadowCallStack.html)
instrumentation is also made avaiable by supplying the second compiler flag `-Zsanitizer=shadow-call-stack`.

View file

@ -1,7 +1,7 @@
# `fixed-x18`
This option prevents the compiler from using the x18 register. It is only
supported on aarch64.
supported on `aarch64`.
From the [ABI spec][arm-abi]:
@ -23,6 +23,11 @@ Currently, the `-Zsanitizer=shadow-call-stack` flag is only supported on
platforms that always treat x18 as a reserved register, and the `-Zfixed-x18`
flag is not required to use the sanitizer on such platforms. However, the
sanitizer may be supported on targets where this is not the case in the future.
One way to do so now on Nightly compilers is to explicitly supply this `-Zfixed-x18`
flag with `aarch64` targets, so that the sanitizer is available for instrumentation
on targets like `aarch64-unknown-none`, for instance. However, discretion is still
required to make sure that the runtime support is in place for this sanitizer
to be effective.
It is undefined behavior for `-Zsanitizer=shadow-call-stack` code to call into
code where x18 is a temporary register. On the other hand, when you are *not*

View file

@ -787,6 +787,10 @@ A runtime must be provided by the application or operating system.
See the [Clang ShadowCallStack documentation][clang-scs] for more details.
* `aarch64-unknown-none`
In addition to support from a runtime by the application or operating system, the `-Zfixed-x18` flag is also mandatory.
# ThreadSanitizer
ThreadSanitizer is a data race detection tool. It is supported on the following

View file

@ -13,12 +13,15 @@ use std::io::{stdout, BufWriter, Write};
use std::path::PathBuf;
use std::rc::Rc;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::{DefId, DefIdSet};
use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
use rustc_span::def_id::LOCAL_CRATE;
use rustdoc_json_types as types;
// It's important to use the FxHashMap from rustdoc_json_types here, instead of
// the one from rustc_data_structures, as they're different types due to sysroots.
// See #110051 and #127456 for details
use rustdoc_json_types::FxHashMap;
use crate::clean::types::{ExternalCrate, ExternalLocation};
use crate::clean::ItemKind;
@ -234,14 +237,11 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
let index = (*self.index).clone().into_inner();
debug!("Constructing Output");
// This needs to be the default HashMap for compatibility with the public interface for
// rustdoc-json-types
#[allow(rustc::default_hash_types)]
let output = types::Crate {
root: types::Id(format!("0:0:{}", e.name(self.tcx).as_u32())),
crate_version: self.cache.crate_version.clone(),
includes_private: self.cache.document_private,
index: index.into_iter().collect(),
index,
paths: self
.cache
.paths

View file

@ -5,7 +5,7 @@
use std::path::PathBuf;
use rustc_hash::FxHashMap;
pub use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
/// The version of JSON output that this crate represents.

View file

@ -470,7 +470,7 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str>
});
// Find both the last lint declaration (declare_clippy_lint!) and the lint pass impl
while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token_kind == TokenKind::Ident) {
while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token == TokenKind::Ident) {
let mut iter = iter
.by_ref()
.filter(|t| !matches!(t.token_kind, TokenKind::Whitespace | TokenKind::LineComment { .. }));
@ -480,7 +480,7 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str>
// matches `!{`
match_tokens!(iter, Bang OpenBrace);
if let Some(LintDeclSearchResult { range, .. }) =
iter.find(|result| result.token_kind == TokenKind::CloseBrace)
iter.find(|result| result.token == TokenKind::CloseBrace)
{
last_decl_curly_offset = Some(range.end);
}

View file

@ -71,14 +71,8 @@ impl Rustdoc {
self
}
/// Specify path to the output folder.
pub fn output<P: AsRef<Path>>(&mut self, path: P) -> &mut Self {
self.cmd.arg("-o");
self.cmd.arg(path.as_ref());
self
}
/// Specify output directory.
#[doc(alias = "output")]
pub fn out_dir<P: AsRef<Path>>(&mut self, path: P) -> &mut Self {
self.cmd.arg("--out-dir").arg(path.as_ref());
self

View file

@ -84,9 +84,7 @@ pub(crate) struct ParsedMacroArgs {
fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
for &keyword in RUST_KW.iter() {
if parser.token.is_keyword(keyword)
&& parser.look_ahead(1, |t| {
t.kind == TokenKind::Eof || t.kind == TokenKind::Comma
})
&& parser.look_ahead(1, |t| *t == TokenKind::Eof || *t == TokenKind::Comma)
{
parser.bump();
return Some(MacroArg::Keyword(
@ -131,7 +129,7 @@ pub(crate) fn parse_macro_args(
Some(arg) => {
args.push(arg);
parser.bump();
if parser.token.kind == TokenKind::Eof && args.len() == 2 {
if parser.token == TokenKind::Eof && args.len() == 2 {
vec_with_semi = true;
break;
}
@ -150,7 +148,7 @@ pub(crate) fn parse_macro_args(
parser.bump();
if parser.token.kind == TokenKind::Eof {
if parser.token == TokenKind::Eof {
trailing_comma = true;
break;
}

View file

@ -0,0 +1,19 @@
//@ revisions: aarch64 android
//@[aarch64] compile-flags: --target aarch64-unknown-none -Zfixed-x18 -Zsanitizer=shadow-call-stack
//@[aarch64] needs-llvm-components: aarch64
//@[android] compile-flags: --target aarch64-linux-android -Zsanitizer=shadow-call-stack
//@[android] needs-llvm-components: aarch64
#![allow(internal_features)]
#![crate_type = "rlib"]
#![feature(no_core, lang_items)]
#![no_core]
#[lang = "sized"]
trait Sized {}
// CHECK: ; Function Attrs:{{.*}}shadowcallstack
#[no_mangle]
pub fn foo() {}
// CHECK: attributes #0 = {{.*}}shadowcallstack{{.*}}

View file

@ -12,5 +12,5 @@ use run_make_support::{cwd, rustc, rustdoc};
fn main() {
rustc().input("foo.rs").run();
rustc().input("bar.rs").run();
rustdoc().input("baz.rs").library_search_path(cwd()).output(cwd()).run();
rustdoc().input("baz.rs").library_search_path(cwd()).out_dir(cwd()).run();
}

View file

@ -13,7 +13,7 @@ fn main() {
rustdoc()
.arg("-Zunstable-options")
.arg("--emit=invocation-specific")
.output("invocation-only")
.out_dir("invocation-only")
.arg("--resource-suffix=-xxx")
.args(&["--theme", "y.css"])
.args(&["--extend-css", "z.css"])
@ -34,7 +34,7 @@ fn main() {
rustdoc()
.arg("-Zunstable-options")
.arg("--emit=toolchain-shared-resources")
.output("toolchain-only")
.out_dir("toolchain-only")
.arg("--resource-suffix=-xxx")
.args(&["--extend-css", "z.css"])
.input("x.rs")
@ -68,7 +68,7 @@ fn main() {
rustdoc()
.arg("-Zunstable-options")
.arg("--emit=toolchain-shared-resources,unversioned-shared-resources")
.output("all-shared")
.out_dir("all-shared")
.arg("--resource-suffix=-xxx")
.args(&["--extend-css", "z.css"])
.input("x.rs")

View file

@ -16,7 +16,7 @@ fn main() {
.run_fail()
.assert_exit_code(101);
rustdoc().arg("success.rs").output("exit-code").run();
rustdoc().arg("success.rs").out_dir("exit-code").run();
rustdoc().arg("--invalid-arg-foo").run_fail().assert_exit_code(1);

View file

@ -7,12 +7,12 @@ use run_make_support::{diff, rustdoc};
fn main() {
let foo_first = Path::new("foo_first");
rustdoc().input("foo.rs").output(&foo_first).run();
rustdoc().input("bar.rs").output(&foo_first).run();
rustdoc().input("foo.rs").out_dir(&foo_first).run();
rustdoc().input("bar.rs").out_dir(&foo_first).run();
let bar_first = Path::new("bar_first");
rustdoc().input("bar.rs").output(&bar_first).run();
rustdoc().input("foo.rs").output(&bar_first).run();
rustdoc().input("bar.rs").out_dir(&bar_first).run();
rustdoc().input("foo.rs").out_dir(&bar_first).run();
diff()
.expected_file(foo_first.join("search-index.js"))

View file

@ -25,7 +25,7 @@ fn main() {
permissions.set_readonly(true);
rfs::set_permissions(&out_dir, permissions);
let output = rustdoc().input("foo.rs").output(&out_dir).env("RUST_BACKTRACE", "1").run_fail();
let output = rustdoc().input("foo.rs").out_dir(&out_dir).env("RUST_BACKTRACE", "1").run_fail();
rfs::set_permissions(&out_dir, original_permissions);

View file

@ -6,7 +6,7 @@ fn main() {
.input("foo.rs")
.arg("-Zunstable-options")
.arg("--generate-redirect-map")
.output(&out_dir)
.out_dir(&out_dir)
.run();
// FIXME (GuillaumeGomez): Port the python script to Rust as well.
python_command().arg("validate_json.py").arg(&out_dir).run();

View file

@ -6,6 +6,6 @@ use run_make_support::rustdoc;
fn main() {
let out_dir = Path::new("foo/bar/doc");
rustdoc().input("foo.rs").output(&out_dir).run();
rustdoc().input("foo.rs").out_dir(&out_dir).run();
assert!(out_dir.exists());
}

View file

@ -10,7 +10,7 @@ fn main() {
// First we check that we generate the JSON in the stdout.
rustdoc()
.input("foo.rs")
.output("-")
.out_dir("-")
.arg("-Zunstable-options")
.output_format("json")
.run()

View file

@ -35,7 +35,7 @@ fn main() {
.input("examples/ex.rs")
.crate_name("ex")
.crate_type("bin")
.output(&out_dir)
.out_dir(&out_dir)
.extern_(crate_name, rust_lib_name(crate_name))
.extern_(proc_crate_name, dylib_name.trim())
.arg("-Zunstable-options")
@ -49,7 +49,7 @@ fn main() {
.input("src/lib.rs")
.crate_name(crate_name)
.crate_type("lib")
.output(&out_dir)
.out_dir(&out_dir)
.arg("-Zunstable-options")
.arg("--with-examples")
.arg(&ex_dir)

View file

@ -20,7 +20,7 @@ pub fn scrape(extra_args: &[&str]) {
.input(&dep)
.crate_name(&dep_stem)
.crate_type("bin")
.output(&out_dir)
.out_dir(&out_dir)
.extern_(crate_name, format!("lib{crate_name}.rmeta"))
.arg("-Zunstable-options")
.arg("--scrape-examples-output-path")
@ -35,7 +35,7 @@ pub fn scrape(extra_args: &[&str]) {
let mut rustdoc = rustdoc();
rustdoc
.input("src/lib.rs")
.output(&out_dir)
.out_dir(&out_dir)
.crate_name(crate_name)
.crate_type("lib")
.arg("-Zunstable-options");

View file

@ -7,7 +7,7 @@ fn main() {
rustc().crate_type("lib").input("dummy_core.rs").target("target.json").run();
rustdoc()
.input("my_crate.rs")
.output(out_dir)
.out_dir(out_dir)
.library_search_path(cwd())
.target("target.json")
.run();

View file

@ -27,6 +27,6 @@ fn main() {
rfs::create_dir_all(&out_dir);
rfs::write(&test_css, test_content);
rustdoc().output(&out_dir).input("foo.rs").arg("--theme").arg(&test_css).run();
rustdoc().out_dir(&out_dir).input("foo.rs").arg("--theme").arg(&test_css).run();
htmldocck().arg(out_dir).arg("foo.rs").run();
}

View file

@ -2,6 +2,6 @@ use run_make_support::{htmldocck, rustdoc};
fn main() {
let out_dir = "rustdoc";
rustdoc().input("src/lib.rs").crate_name("foobar").crate_type("lib").output(&out_dir).run();
rustdoc().input("src/lib.rs").crate_name("foobar").crate_type("lib").out_dir(&out_dir).run();
htmldocck().arg(out_dir).arg("src/lib.rs").run();
}

View file

@ -0,0 +1,15 @@
//@ compile-flags: --target aarch64-unknown-none -Zsanitizer=shadow-call-stack
//@ error-pattern: shadow-call-stack sanitizer is not supported for this target
//@ dont-check-compiler-stderr
//@ needs-llvm-components: aarch64
#![allow(internal_features)]
#![crate_type = "rlib"]
#![feature(no_core, lang_items)]
#![no_core]
#[lang = "sized"]
trait Sized {}
#[no_mangle]
pub fn foo() {}

View file

@ -0,0 +1,16 @@
//@ compile-flags: -Zvalidate-mir
//@ edition: 2021
#![feature(async_closure)]
// NOT copy.
struct Ty;
fn hello(x: &Ty) {
let c = async || {
*x;
//~^ ERROR cannot move out of `*x` which is behind a shared reference
};
}
fn main() {}

View file

@ -0,0 +1,18 @@
error[E0507]: cannot move out of `*x` which is behind a shared reference
--> $DIR/move-out-of-ref.rs:11:9
|
LL | *x;
| ^^ move occurs because `*x` has type `Ty`, which does not implement the `Copy` trait
|
note: if `Ty` implemented `Clone`, you could clone the value
--> $DIR/move-out-of-ref.rs:7:1
|
LL | struct Ty;
| ^^^^^^^^^ consider implementing `Clone` for this type
...
LL | *x;
| -- you could clone this value
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0507`.

View file

@ -0,0 +1,49 @@
//@ check-pass
//@ edition: 2021
// Make sure that we infer the args of an async closure even if it's passed to
// a function that requires the async closure implement `Fn*` but does *not* have
// a `Future` bound on the return type.
#![feature(async_closure)]
use std::future::Future;
trait TryStream {
type Ok;
type Err;
}
trait TryFuture {
type Ok;
type Err;
}
impl<F, T, E> TryFuture for F where F: Future<Output = Result<T, E>> {
type Ok = T;
type Err = E;
}
trait TryStreamExt: TryStream {
fn try_for_each<F, Fut>(&self, f: F)
where
F: FnMut(Self::Ok) -> Fut,
Fut: TryFuture<Ok = (), Err = Self::Err>;
}
impl<S> TryStreamExt for S where S: TryStream {
fn try_for_each<F, Fut>(&self, f: F)
where
F: FnMut(Self::Ok) -> Fut,
Fut: TryFuture<Ok = (), Err = Self::Err>,
{ }
}
fn test(stream: impl TryStream<Ok = &'static str, Err = ()>) {
stream.try_for_each(async |s| {
s.trim(); // Make sure we know the type of `s` at this point.
Ok(())
});
}
fn main() {}