Remove ParseSess
methods that duplicate DiagCtxt
methods.
Also add missing `#[track_caller]` attributes to `DiagCtxt` methods as necessary to keep tests working.
This commit is contained in:
parent
ec9af0d6cb
commit
d51db05d7e
22 changed files with 256 additions and 308 deletions
|
@ -588,6 +588,7 @@ pub fn eval_condition(
|
|||
features: Option<&Features>,
|
||||
eval: &mut impl FnMut(Condition) -> bool,
|
||||
) -> bool {
|
||||
let dcx = &sess.dcx;
|
||||
match &cfg.kind {
|
||||
ast::MetaItemKind::List(mis) if cfg.name_or_empty() == sym::version => {
|
||||
try_gate_cfg(sym::version, cfg.span, sess, features);
|
||||
|
@ -599,18 +600,18 @@ pub fn eval_condition(
|
|||
NestedMetaItem::Lit(MetaItemLit { span, .. })
|
||||
| NestedMetaItem::MetaItem(MetaItem { span, .. }),
|
||||
] => {
|
||||
sess.emit_err(session_diagnostics::ExpectedVersionLiteral { span: *span });
|
||||
dcx.emit_err(session_diagnostics::ExpectedVersionLiteral { span: *span });
|
||||
return false;
|
||||
}
|
||||
[..] => {
|
||||
sess.emit_err(session_diagnostics::ExpectedSingleVersionLiteral {
|
||||
dcx.emit_err(session_diagnostics::ExpectedSingleVersionLiteral {
|
||||
span: cfg.span,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
};
|
||||
let Some(min_version) = parse_version(*min_version) else {
|
||||
sess.emit_warning(session_diagnostics::UnknownVersionLiteral { span: *span });
|
||||
dcx.emit_warning(session_diagnostics::UnknownVersionLiteral { span: *span });
|
||||
return false;
|
||||
};
|
||||
|
||||
|
@ -624,7 +625,7 @@ pub fn eval_condition(
|
|||
ast::MetaItemKind::List(mis) => {
|
||||
for mi in mis.iter() {
|
||||
if !mi.is_meta_item() {
|
||||
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||
dcx.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||
span: mi.span(),
|
||||
reason: UnsupportedLiteralReason::Generic,
|
||||
is_bytestr: false,
|
||||
|
@ -653,9 +654,7 @@ pub fn eval_condition(
|
|||
}),
|
||||
sym::not => {
|
||||
if mis.len() != 1 {
|
||||
sess.emit_err(session_diagnostics::ExpectedOneCfgPattern {
|
||||
span: cfg.span,
|
||||
});
|
||||
dcx.emit_err(session_diagnostics::ExpectedOneCfgPattern { span: cfg.span });
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -684,7 +683,7 @@ pub fn eval_condition(
|
|||
})
|
||||
}
|
||||
_ => {
|
||||
sess.emit_err(session_diagnostics::InvalidPredicate {
|
||||
dcx.emit_err(session_diagnostics::InvalidPredicate {
|
||||
span: cfg.span,
|
||||
predicate: pprust::path_to_string(&cfg.path),
|
||||
});
|
||||
|
@ -693,11 +692,11 @@ pub fn eval_condition(
|
|||
}
|
||||
}
|
||||
ast::MetaItemKind::Word | MetaItemKind::NameValue(..) if cfg.path.segments.len() != 1 => {
|
||||
sess.emit_err(session_diagnostics::CfgPredicateIdentifier { span: cfg.path.span });
|
||||
dcx.emit_err(session_diagnostics::CfgPredicateIdentifier { span: cfg.path.span });
|
||||
true
|
||||
}
|
||||
MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
|
||||
sess.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||
dcx.emit_err(session_diagnostics::UnsupportedLiteral {
|
||||
span: lit.span,
|
||||
reason: UnsupportedLiteralReason::CfgString,
|
||||
is_bytestr: lit.kind.is_bytestr(),
|
||||
|
@ -945,7 +944,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
|||
assert!(attr.has_name(sym::repr), "expected `#[repr(..)]`, found: {attr:?}");
|
||||
use ReprAttr::*;
|
||||
let mut acc = Vec::new();
|
||||
let diagnostic = sess.dcx();
|
||||
let dcx = sess.dcx();
|
||||
|
||||
if let Some(items) = attr.meta_item_list() {
|
||||
for item in items {
|
||||
|
@ -1062,7 +1061,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
|||
// (e.g. if we only pretty-print the source), so we have to gate
|
||||
// the `span_delayed_bug` call as follows:
|
||||
if sess.opts.pretty.map_or(true, |pp| pp.needs_analysis()) {
|
||||
diagnostic.span_delayed_bug(item.span(), "unrecognized representation hint");
|
||||
dcx.span_delayed_bug(item.span(), "unrecognized representation hint");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -422,7 +422,7 @@ fn parse_reg<'a>(
|
|||
ast::InlineAsmRegOrRegClass::Reg(symbol)
|
||||
}
|
||||
_ => {
|
||||
return Err(p.sess.create_err(errors::ExpectedRegisterClassOrExplicitRegister {
|
||||
return Err(p.dcx().create_err(errors::ExpectedRegisterClassOrExplicitRegister {
|
||||
span: p.token.span,
|
||||
}));
|
||||
}
|
||||
|
|
|
@ -1203,6 +1203,7 @@ impl DiagCtxt {
|
|||
self.inner.borrow_mut().emit_diagnostic_without_consuming(diagnostic)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_err<'a>(&'a self, err: impl IntoDiagnostic<'a>) -> ErrorGuaranteed {
|
||||
self.create_err(err).emit()
|
||||
}
|
||||
|
@ -1212,6 +1213,7 @@ impl DiagCtxt {
|
|||
err.into_diagnostic(self, Error { lint: false })
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_warning<'a>(
|
||||
&'a self,
|
||||
warning: impl IntoDiagnostic<'a, ()>,
|
||||
|
@ -1219,10 +1221,12 @@ impl DiagCtxt {
|
|||
warning.into_diagnostic(self, Warning(None))
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_warning<'a>(&'a self, warning: impl IntoDiagnostic<'a, ()>) {
|
||||
self.create_warning(warning).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_almost_fatal<'a>(
|
||||
&'a self,
|
||||
fatal: impl IntoDiagnostic<'a, FatalError>,
|
||||
|
@ -1230,6 +1234,7 @@ impl DiagCtxt {
|
|||
fatal.into_diagnostic(self, Fatal)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_almost_fatal<'a>(
|
||||
&'a self,
|
||||
fatal: impl IntoDiagnostic<'a, FatalError>,
|
||||
|
@ -1237,6 +1242,7 @@ impl DiagCtxt {
|
|||
self.create_almost_fatal(fatal).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_fatal<'a>(
|
||||
&'a self,
|
||||
fatal: impl IntoDiagnostic<'a, FatalAbort>,
|
||||
|
@ -1244,10 +1250,12 @@ impl DiagCtxt {
|
|||
fatal.into_diagnostic(self, Fatal)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_fatal<'a>(&'a self, fatal: impl IntoDiagnostic<'a, FatalAbort>) -> ! {
|
||||
self.create_fatal(fatal).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_bug<'a>(
|
||||
&'a self,
|
||||
bug: impl IntoDiagnostic<'a, BugAbort>,
|
||||
|
@ -1255,14 +1263,17 @@ impl DiagCtxt {
|
|||
bug.into_diagnostic(self, Bug)
|
||||
}
|
||||
|
||||
pub fn emit_bug<'a>(&'a self, bug: impl IntoDiagnostic<'a, BugAbort>) -> ! {
|
||||
#[track_caller]
|
||||
pub fn emit_bug<'a>(&'a self, bug: impl IntoDiagnostic<'a, diagnostic_builder::BugAbort>) -> ! {
|
||||
self.create_bug(bug).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_note<'a>(&'a self, note: impl IntoDiagnostic<'a, ()>) {
|
||||
self.create_note(note).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_note<'a>(
|
||||
&'a self,
|
||||
note: impl IntoDiagnostic<'a, ()>,
|
||||
|
|
|
@ -1115,7 +1115,7 @@ impl<'a> ExtCtxt<'a> {
|
|||
|
||||
pub fn trace_macros_diag(&mut self) {
|
||||
for (span, notes) in self.expansions.iter() {
|
||||
let mut db = self.sess.parse_sess.create_note(errors::TraceMacro { span: *span });
|
||||
let mut db = self.dcx().create_note(errors::TraceMacro { span: *span });
|
||||
for note in notes {
|
||||
db.note(note.clone());
|
||||
}
|
||||
|
|
|
@ -957,7 +957,7 @@ pub fn ensure_complete_parse<'a>(
|
|||
|
||||
let expands_to_match_arm = kind_name == "pattern" && parser.token == token::FatArrow;
|
||||
|
||||
parser.sess.emit_err(IncompleteParse {
|
||||
parser.dcx().emit_err(IncompleteParse {
|
||||
span: def_site_span,
|
||||
token,
|
||||
label_span: span,
|
||||
|
|
|
@ -88,6 +88,6 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
},
|
||||
None => ExplicitLifetimeRequired::WithParamType { span, named, new_ty_span, new_ty },
|
||||
};
|
||||
Some(self.tcx().sess.parse_sess.create_err(err))
|
||||
Some(self.tcx().sess.dcx().create_err(err))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
|
|||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast::util::unicode::contains_text_flow_control_chars;
|
||||
use rustc_errors::{
|
||||
error_code, Applicability, Diagnostic, DiagnosticBuilder, FatalAbort, StashKey,
|
||||
error_code, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder, FatalAbort, StashKey,
|
||||
};
|
||||
use rustc_lexer::unescape::{self, EscapeError, Mode};
|
||||
use rustc_lexer::{Base, DocStyle, RawStrError};
|
||||
|
@ -110,6 +110,10 @@ struct StringReader<'a> {
|
|||
}
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
pub fn dcx(&self) -> &'a DiagCtxt {
|
||||
&self.sess.dcx
|
||||
}
|
||||
|
||||
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
|
||||
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
|
||||
}
|
||||
|
@ -176,7 +180,7 @@ impl<'a> StringReader<'a> {
|
|||
let span = self.mk_sp(start, self.pos);
|
||||
self.sess.symbol_gallery.insert(sym, span);
|
||||
if !sym.can_be_raw() {
|
||||
self.sess.emit_err(errors::CannotBeRawIdent { span, ident: sym });
|
||||
self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
|
||||
}
|
||||
self.sess.raw_identifier_spans.push(span);
|
||||
token::Ident(sym, true)
|
||||
|
@ -247,7 +251,7 @@ impl<'a> StringReader<'a> {
|
|||
let lifetime_name = self.str_from(start);
|
||||
if starts_with_number {
|
||||
let span = self.mk_sp(start, self.pos);
|
||||
let mut diag = self.sess.struct_err("lifetimes cannot start with a number");
|
||||
let mut diag = self.dcx().struct_err("lifetimes cannot start with a number");
|
||||
diag.set_span(span);
|
||||
diag.stash(span, StashKey::LifetimeIsChar);
|
||||
}
|
||||
|
@ -308,7 +312,7 @@ impl<'a> StringReader<'a> {
|
|||
// fancier error recovery to it, as there will be less overall work to do this
|
||||
// way.
|
||||
let (token, sugg) = unicode_chars::check_for_substitution(self, start, c, repeats+1);
|
||||
self.sess.emit_err(errors::UnknownTokenStart {
|
||||
self.dcx().emit_err(errors::UnknownTokenStart {
|
||||
span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())),
|
||||
escaped: escaped_char(c),
|
||||
sugg,
|
||||
|
@ -384,7 +388,7 @@ impl<'a> StringReader<'a> {
|
|||
content_start + BytePos(idx as u32 + 1),
|
||||
);
|
||||
let block = matches!(comment_kind, CommentKind::Block);
|
||||
self.sess.emit_err(errors::CrDocComment { span, block });
|
||||
self.dcx().emit_err(errors::CrDocComment { span, block });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -483,7 +487,7 @@ impl<'a> StringReader<'a> {
|
|||
rustc_lexer::LiteralKind::Int { base, empty_int } => {
|
||||
if empty_int {
|
||||
let span = self.mk_sp(start, end);
|
||||
self.sess.emit_err(errors::NoDigitsLiteral { span });
|
||||
self.dcx().emit_err(errors::NoDigitsLiteral { span });
|
||||
(token::Integer, sym::integer(0))
|
||||
} else {
|
||||
if matches!(base, Base::Binary | Base::Octal) {
|
||||
|
@ -495,7 +499,7 @@ impl<'a> StringReader<'a> {
|
|||
start + BytePos::from_usize(2 + idx + c.len_utf8()),
|
||||
);
|
||||
if c != '_' && c.to_digit(base).is_none() {
|
||||
self.sess.emit_err(errors::InvalidDigitLiteral { span, base });
|
||||
self.dcx().emit_err(errors::InvalidDigitLiteral { span, base });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -505,7 +509,7 @@ impl<'a> StringReader<'a> {
|
|||
rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
|
||||
if empty_exponent {
|
||||
let span = self.mk_sp(start, self.pos);
|
||||
self.sess.emit_err(errors::EmptyExponentFloat { span });
|
||||
self.dcx().emit_err(errors::EmptyExponentFloat { span });
|
||||
}
|
||||
let base = match base {
|
||||
Base::Hexadecimal => Some("hexadecimal"),
|
||||
|
@ -515,7 +519,7 @@ impl<'a> StringReader<'a> {
|
|||
};
|
||||
if let Some(base) = base {
|
||||
let span = self.mk_sp(start, end);
|
||||
self.sess.emit_err(errors::FloatLiteralUnsupportedBase { span, base });
|
||||
self.dcx().emit_err(errors::FloatLiteralUnsupportedBase { span, base });
|
||||
}
|
||||
(token::Float, self.symbol_from_to(start, end))
|
||||
}
|
||||
|
@ -678,7 +682,7 @@ impl<'a> StringReader<'a> {
|
|||
} else {
|
||||
None
|
||||
};
|
||||
self.sess.emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
|
||||
self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
|
||||
} else {
|
||||
// Before Rust 2021, only emit a lint for migration.
|
||||
self.sess.buffer_lint_with_diagnostic(
|
||||
|
@ -692,7 +696,7 @@ impl<'a> StringReader<'a> {
|
|||
}
|
||||
|
||||
fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! {
|
||||
self.sess.emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
|
||||
self.dcx().emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
|
||||
}
|
||||
|
||||
fn cook_common(
|
||||
|
|
|
@ -263,5 +263,5 @@ const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
|
|||
#the-cfg_attr-attribute>";
|
||||
|
||||
fn error_malformed_cfg_attr_missing(span: Span, parse_sess: &ParseSess) {
|
||||
parse_sess.emit_err(errors::MalformedCfgAttr { span, sugg: CFG_ATTR_GRAMMAR_HELP });
|
||||
parse_sess.dcx.emit_err(errors::MalformedCfgAttr { span, sugg: CFG_ATTR_GRAMMAR_HELP });
|
||||
}
|
||||
|
|
|
@ -323,7 +323,7 @@ impl<'a> Parser<'a> {
|
|||
debug!("checking if {:?} is unsuffixed", lit);
|
||||
|
||||
if !lit.kind.is_unsuffixed() {
|
||||
self.sess.emit_err(SuffixedLiteralInAttribute { span: lit.span });
|
||||
self.dcx().emit_err(SuffixedLiteralInAttribute { span: lit.span });
|
||||
}
|
||||
|
||||
Ok(lit)
|
||||
|
|
|
@ -524,7 +524,7 @@ impl<'a> Parser<'a> {
|
|||
//
|
||||
// let x = 32:
|
||||
// let y = 42;
|
||||
self.sess.emit_err(ExpectedSemi {
|
||||
self.dcx().emit_err(ExpectedSemi {
|
||||
span: self.token.span,
|
||||
token: self.token.clone(),
|
||||
unexpected_token_label: None,
|
||||
|
@ -549,7 +549,7 @@ impl<'a> Parser<'a> {
|
|||
// let x = 32
|
||||
// let y = 42;
|
||||
let span = self.prev_token.span.shrink_to_hi();
|
||||
self.sess.emit_err(ExpectedSemi {
|
||||
self.dcx().emit_err(ExpectedSemi {
|
||||
span,
|
||||
token: self.token.clone(),
|
||||
unexpected_token_label: Some(self.token.span),
|
||||
|
@ -564,13 +564,13 @@ impl<'a> Parser<'a> {
|
|||
&& expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq)))
|
||||
{
|
||||
// Likely typo: `=` → `==` in let expr or enum item
|
||||
return Err(self.sess.create_err(UseEqInstead { span: self.token.span }));
|
||||
return Err(self.dcx().create_err(UseEqInstead { span: self.token.span }));
|
||||
}
|
||||
|
||||
if self.token.is_keyword(kw::Move) && self.prev_token.is_keyword(kw::Async) {
|
||||
// The 2015 edition is in use because parsing of `async move` has failed.
|
||||
let span = self.prev_token.span.to(self.token.span);
|
||||
return Err(self.sess.create_err(AsyncMoveBlockIn2015 { span }));
|
||||
return Err(self.dcx().create_err(AsyncMoveBlockIn2015 { span }));
|
||||
}
|
||||
|
||||
let expect = tokens_to_string(&expected);
|
||||
|
@ -724,7 +724,7 @@ impl<'a> Parser<'a> {
|
|||
pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) {
|
||||
// Missing semicolon typo error.
|
||||
let span = self.prev_token.span.shrink_to_hi();
|
||||
let mut err = self.sess.create_err(ExpectedSemi {
|
||||
let mut err = self.dcx().create_err(ExpectedSemi {
|
||||
span,
|
||||
token: self.token.clone(),
|
||||
unexpected_token_label: Some(self.token.span),
|
||||
|
@ -901,7 +901,7 @@ impl<'a> Parser<'a> {
|
|||
if let Ok(extend_before) = sm.span_extend_prev_while(before, |t| {
|
||||
t.is_alphanumeric() || t == ':' || t == '_'
|
||||
}) {
|
||||
Err(self.sess.create_err(StructLiteralNeedingParens {
|
||||
Err(self.dcx().create_err(StructLiteralNeedingParens {
|
||||
span: maybe_struct_name.span.to(expr.span),
|
||||
sugg: StructLiteralNeedingParensSugg {
|
||||
before: extend_before.shrink_to_lo(),
|
||||
|
@ -912,7 +912,7 @@ impl<'a> Parser<'a> {
|
|||
return None;
|
||||
}
|
||||
} else {
|
||||
self.sess.emit_err(StructLiteralBodyWithoutPath {
|
||||
self.dcx().emit_err(StructLiteralBodyWithoutPath {
|
||||
span: expr.span,
|
||||
sugg: StructLiteralBodyWithoutPathSugg {
|
||||
before: expr.span.shrink_to_lo(),
|
||||
|
@ -1117,7 +1117,7 @@ impl<'a> Parser<'a> {
|
|||
let span = lo.until(self.token.span);
|
||||
|
||||
let num_extra_brackets = number_of_gt + number_of_shr * 2;
|
||||
self.sess.emit_err(UnmatchedAngleBrackets { span, num_extra_brackets });
|
||||
self.dcx().emit_err(UnmatchedAngleBrackets { span, num_extra_brackets });
|
||||
return true;
|
||||
}
|
||||
false
|
||||
|
@ -1149,7 +1149,7 @@ impl<'a> Parser<'a> {
|
|||
// Recover from bad turbofish: `foo.collect::Vec<_>()`.
|
||||
segment.args = Some(AngleBracketedArgs { args, span }.into());
|
||||
|
||||
self.sess.emit_err(GenericParamsWithoutAngleBrackets {
|
||||
self.dcx().emit_err(GenericParamsWithoutAngleBrackets {
|
||||
span,
|
||||
sugg: GenericParamsWithoutAngleBracketsSugg {
|
||||
left: span.shrink_to_lo(),
|
||||
|
@ -1403,7 +1403,7 @@ impl<'a> Parser<'a> {
|
|||
match self.parse_expr() {
|
||||
Ok(_) => {
|
||||
// 99% certain that the suggestion is correct, continue parsing.
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
// FIXME: actually check that the two expressions in the binop are
|
||||
// paths and resynthesize new fn call expression instead of using
|
||||
// `ExprKind::Err` placeholder.
|
||||
|
@ -1431,7 +1431,7 @@ impl<'a> Parser<'a> {
|
|||
match self.consume_fn_args() {
|
||||
Err(()) => Err(self.dcx().create_err(err)),
|
||||
Ok(()) => {
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
// FIXME: actually check that the two expressions in the binop are
|
||||
// paths and resynthesize new fn call expression instead of using
|
||||
// `ExprKind::Err` placeholder.
|
||||
|
@ -1451,7 +1451,7 @@ impl<'a> Parser<'a> {
|
|||
// misformatted turbofish, for instance), suggest a correct form.
|
||||
if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
|
||||
{
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
mk_err_expr(self, inner_op.span.to(self.prev_token.span))
|
||||
} else {
|
||||
// These cases cause too many knock-down errors, bail out (#61329).
|
||||
|
@ -1461,7 +1461,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
let recover =
|
||||
self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
if recover {
|
||||
return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
|
||||
}
|
||||
|
@ -1494,7 +1494,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
pub(super) fn maybe_report_ambiguous_plus(&mut self, impl_dyn_multi: bool, ty: &Ty) {
|
||||
if impl_dyn_multi {
|
||||
self.sess.emit_err(AmbiguousPlus { sum_ty: pprust::ty_to_string(ty), span: ty.span });
|
||||
self.dcx().emit_err(AmbiguousPlus { sum_ty: pprust::ty_to_string(ty), span: ty.span });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1502,7 +1502,7 @@ impl<'a> Parser<'a> {
|
|||
pub(super) fn maybe_recover_from_question_mark(&mut self, ty: P<Ty>) -> P<Ty> {
|
||||
if self.token == token::Question {
|
||||
self.bump();
|
||||
self.sess.emit_err(QuestionMarkInType {
|
||||
self.dcx().emit_err(QuestionMarkInType {
|
||||
span: self.prev_token.span,
|
||||
sugg: QuestionMarkInTypeSugg {
|
||||
left: ty.span.shrink_to_lo(),
|
||||
|
@ -1539,7 +1539,7 @@ impl<'a> Parser<'a> {
|
|||
match self.parse_expr() {
|
||||
Ok(_) => {
|
||||
return Err(self
|
||||
.sess
|
||||
.dcx()
|
||||
.create_err(TernaryOperator { span: self.token.span.with_lo(lo) }));
|
||||
}
|
||||
Err(err) => {
|
||||
|
@ -1583,7 +1583,7 @@ impl<'a> Parser<'a> {
|
|||
_ => BadTypePlusSub::ExpectPath { span: sum_span },
|
||||
};
|
||||
|
||||
self.sess.emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub });
|
||||
self.dcx().emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub });
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1761,7 +1761,7 @@ impl<'a> Parser<'a> {
|
|||
self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
|
||||
path.span = ty_span.to(self.prev_token.span);
|
||||
|
||||
self.sess.emit_err(BadQPathStage2 {
|
||||
self.dcx().emit_err(BadQPathStage2 {
|
||||
span: ty_span,
|
||||
wrap: WrapType { lo: ty_span.shrink_to_lo(), hi: ty_span.shrink_to_hi() },
|
||||
});
|
||||
|
@ -1793,7 +1793,7 @@ impl<'a> Parser<'a> {
|
|||
err.name = name;
|
||||
}
|
||||
}
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
|
@ -1863,7 +1863,7 @@ impl<'a> Parser<'a> {
|
|||
&& self.token == token::Colon
|
||||
&& self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span))
|
||||
{
|
||||
self.sess.emit_err(ColonAsSemi {
|
||||
self.dcx().emit_err(ColonAsSemi {
|
||||
span: self.token.span,
|
||||
type_ascription: self.sess.unstable_features.is_nightly_build().then_some(()),
|
||||
});
|
||||
|
@ -1924,7 +1924,7 @@ impl<'a> Parser<'a> {
|
|||
_ => Applicability::MachineApplicable,
|
||||
};
|
||||
|
||||
self.sess.emit_err(IncorrectAwait {
|
||||
self.dcx().emit_err(IncorrectAwait {
|
||||
span,
|
||||
sugg_span: (span, applicability),
|
||||
expr: self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(expr)),
|
||||
|
@ -1945,7 +1945,7 @@ impl<'a> Parser<'a> {
|
|||
let span = lo.to(self.token.span);
|
||||
self.bump(); // )
|
||||
|
||||
self.sess.emit_err(IncorrectUseOfAwait { span });
|
||||
self.dcx().emit_err(IncorrectUseOfAwait { span });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2119,7 +2119,7 @@ impl<'a> Parser<'a> {
|
|||
pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
|
||||
if self.eat_keyword(kw::In) {
|
||||
// a common typo: `for _ in in bar {}`
|
||||
self.sess.emit_err(InInTypo {
|
||||
self.dcx().emit_err(InInTypo {
|
||||
span: self.prev_token.span,
|
||||
sugg_span: in_span.until(self.prev_token.span),
|
||||
});
|
||||
|
@ -2128,7 +2128,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
|
||||
if let token::DocComment(..) = self.token.kind {
|
||||
self.sess.emit_err(DocCommentOnParamType { span: self.token.span });
|
||||
self.dcx().emit_err(DocCommentOnParamType { span: self.token.span });
|
||||
self.bump();
|
||||
} else if self.token == token::Pound
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
|
||||
|
@ -2140,7 +2140,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
let sp = lo.to(self.token.span);
|
||||
self.bump();
|
||||
self.sess.emit_err(AttributeOnParamType { span: sp });
|
||||
self.dcx().emit_err(AttributeOnParamType { span: sp });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2261,7 +2261,7 @@ impl<'a> Parser<'a> {
|
|||
self.expect(&token::Colon)?;
|
||||
let ty = self.parse_ty()?;
|
||||
|
||||
self.sess.emit_err(PatternMethodParamWithoutBody { span: pat.span });
|
||||
self.dcx().emit_err(PatternMethodParamWithoutBody { span: pat.span });
|
||||
|
||||
// Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
|
||||
let pat =
|
||||
|
@ -2272,7 +2272,7 @@ impl<'a> Parser<'a> {
|
|||
pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> {
|
||||
let span = param.pat.span;
|
||||
param.ty.kind = TyKind::Err;
|
||||
self.sess.emit_err(SelfParamNotFirst { span });
|
||||
self.dcx().emit_err(SelfParamNotFirst { span });
|
||||
Ok(param)
|
||||
}
|
||||
|
||||
|
@ -2474,7 +2474,7 @@ impl<'a> Parser<'a> {
|
|||
err
|
||||
})?;
|
||||
if !self.expr_is_valid_const_arg(&expr) {
|
||||
self.sess.emit_err(ConstGenericWithoutBraces {
|
||||
self.dcx().emit_err(ConstGenericWithoutBraces {
|
||||
span: expr.span,
|
||||
sugg: ConstGenericWithoutBracesSugg {
|
||||
left: expr.span.shrink_to_lo(),
|
||||
|
@ -2516,7 +2516,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
_ => None,
|
||||
};
|
||||
self.sess.emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg });
|
||||
self.dcx().emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg });
|
||||
|
||||
let value = self.mk_expr_err(param.span());
|
||||
Some(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }))
|
||||
|
@ -2539,7 +2539,7 @@ impl<'a> Parser<'a> {
|
|||
let mut err = UnexpectedConstInGenericParam { span: start, to_remove: None };
|
||||
if self.check_const_arg() {
|
||||
err.to_remove = Some(start.until(self.token.span));
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
Ok(Some(GenericArg::Const(self.parse_const_arg()?)))
|
||||
} else {
|
||||
let after_kw_const = self.token.span;
|
||||
|
|
|
@ -237,7 +237,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
.into();
|
||||
let invalid = format!("{sugg}=");
|
||||
self.sess.emit_err(errors::InvalidComparisonOperator {
|
||||
self.dcx().emit_err(errors::InvalidComparisonOperator {
|
||||
span: sp,
|
||||
invalid: invalid.clone(),
|
||||
sub: errors::InvalidComparisonOperatorSub::Correctable {
|
||||
|
@ -255,7 +255,7 @@ impl<'a> Parser<'a> {
|
|||
&& self.prev_token.span.hi() == self.token.span.lo()
|
||||
{
|
||||
let sp = op.span.to(self.token.span);
|
||||
self.sess.emit_err(errors::InvalidComparisonOperator {
|
||||
self.dcx().emit_err(errors::InvalidComparisonOperator {
|
||||
span: sp,
|
||||
invalid: "<>".into(),
|
||||
sub: errors::InvalidComparisonOperatorSub::Correctable {
|
||||
|
@ -273,7 +273,7 @@ impl<'a> Parser<'a> {
|
|||
&& self.prev_token.span.hi() == self.token.span.lo()
|
||||
{
|
||||
let sp = op.span.to(self.token.span);
|
||||
self.sess.emit_err(errors::InvalidComparisonOperator {
|
||||
self.dcx().emit_err(errors::InvalidComparisonOperator {
|
||||
span: sp,
|
||||
invalid: "<=>".into(),
|
||||
sub: errors::InvalidComparisonOperatorSub::Spaceship(sp),
|
||||
|
@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
|
|||
/// but the next token implies this should be parsed as an expression.
|
||||
/// For example: `if let Some(x) = x { x } else { 0 } / 2`.
|
||||
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
|
||||
self.sess.emit_err(errors::FoundExprWouldBeStmt {
|
||||
self.dcx().emit_err(errors::FoundExprWouldBeStmt {
|
||||
span: self.token.span,
|
||||
token: self.token.clone(),
|
||||
suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
|
||||
|
@ -447,7 +447,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
(Some(op), _) => (op, self.token.span),
|
||||
(None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
|
||||
self.sess.emit_err(errors::InvalidLogicalOperator {
|
||||
self.dcx().emit_err(errors::InvalidLogicalOperator {
|
||||
span: self.token.span,
|
||||
incorrect: "and".into(),
|
||||
sub: errors::InvalidLogicalOperatorSub::Conjunction(self.token.span),
|
||||
|
@ -455,7 +455,7 @@ impl<'a> Parser<'a> {
|
|||
(AssocOp::LAnd, span)
|
||||
}
|
||||
(None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
|
||||
self.sess.emit_err(errors::InvalidLogicalOperator {
|
||||
self.dcx().emit_err(errors::InvalidLogicalOperator {
|
||||
span: self.token.span,
|
||||
incorrect: "or".into(),
|
||||
sub: errors::InvalidLogicalOperatorSub::Disjunction(self.token.span),
|
||||
|
@ -593,7 +593,7 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
err.remove_plus = Some(lo);
|
||||
}
|
||||
this.sess.emit_err(err);
|
||||
this.dcx().emit_err(err);
|
||||
|
||||
this.bump();
|
||||
this.parse_expr_prefix(None)
|
||||
|
@ -636,7 +636,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Recover on `~expr` in favor of `!expr`.
|
||||
fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
||||
self.sess.emit_err(errors::TildeAsUnaryOperator(lo));
|
||||
self.dcx().emit_err(errors::TildeAsUnaryOperator(lo));
|
||||
|
||||
self.parse_expr_unary(lo, UnOp::Not)
|
||||
}
|
||||
|
@ -646,7 +646,7 @@ impl<'a> Parser<'a> {
|
|||
fn parse_expr_box(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
||||
let (span, expr) = self.parse_expr_prefix_common(lo)?;
|
||||
let code = self.sess.source_map().span_to_snippet(span.with_lo(lo.hi())).unwrap();
|
||||
self.sess.emit_err(errors::BoxSyntaxRemoved { span, code: code.trim() });
|
||||
self.dcx().emit_err(errors::BoxSyntaxRemoved { span, code: code.trim() });
|
||||
// So typechecking works, parse `box <expr>` as `::std::boxed::Box::new(expr)`
|
||||
let path = Path {
|
||||
span,
|
||||
|
@ -687,7 +687,7 @@ impl<'a> Parser<'a> {
|
|||
errors::NotAsNegationOperatorSub::SuggestNotDefault
|
||||
};
|
||||
|
||||
self.sess.emit_err(errors::NotAsNegationOperator {
|
||||
self.dcx().emit_err(errors::NotAsNegationOperator {
|
||||
negated: negated_token.span,
|
||||
negated_desc: super::token_descr(&negated_token),
|
||||
// Span the `not` plus trailing whitespace to avoid
|
||||
|
@ -750,7 +750,7 @@ impl<'a> Parser<'a> {
|
|||
match self.parse_expr_labeled(label, false) {
|
||||
Ok(expr) => {
|
||||
type_err.cancel();
|
||||
self.sess.emit_err(errors::MalformedLoopLabel {
|
||||
self.dcx().emit_err(errors::MalformedLoopLabel {
|
||||
span: label.ident.span,
|
||||
correct_label: label.ident,
|
||||
});
|
||||
|
@ -782,7 +782,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
match self.token.kind {
|
||||
token::Lt => {
|
||||
self.sess.emit_err(errors::ComparisonInterpretedAsGeneric {
|
||||
self.dcx().emit_err(errors::ComparisonInterpretedAsGeneric {
|
||||
comparison: self.token.span,
|
||||
r#type: path,
|
||||
args: args_span,
|
||||
|
@ -790,7 +790,7 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
token::BinOp(token::Shl) => {
|
||||
self.sess.emit_err(errors::ShiftInterpretedAsGeneric {
|
||||
self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
|
||||
shift: self.token.span,
|
||||
r#type: path,
|
||||
args: args_span,
|
||||
|
@ -898,7 +898,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) {
|
||||
self.sess.emit_err(errors::LifetimeInBorrowExpression { span, lifetime_span: lt_span });
|
||||
self.dcx().emit_err(errors::LifetimeInBorrowExpression { span, lifetime_span: lt_span });
|
||||
}
|
||||
|
||||
/// Parse `mut?` or `raw [ const | mut ]`.
|
||||
|
@ -1012,7 +1012,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
_ => (span, actual),
|
||||
};
|
||||
self.sess.emit_err(errors::UnexpectedTokenAfterDot { span, actual });
|
||||
self.dcx().emit_err(errors::UnexpectedTokenAfterDot { span, actual });
|
||||
}
|
||||
|
||||
// We need an identifier or integer, but the next token is a float.
|
||||
|
@ -1329,7 +1329,7 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
// Field access `expr.f`
|
||||
if let Some(args) = seg.args {
|
||||
self.sess.emit_err(errors::FieldExpressionWithGeneric(args.span()));
|
||||
self.dcx().emit_err(errors::FieldExpressionWithGeneric(args.span()));
|
||||
}
|
||||
|
||||
let span = lo.to(self.prev_token.span);
|
||||
|
@ -1557,7 +1557,7 @@ impl<'a> Parser<'a> {
|
|||
let (span, kind) = if self.eat(&token::Not) {
|
||||
// MACRO INVOCATION expression
|
||||
if qself.is_some() {
|
||||
self.sess.emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
|
||||
self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
|
||||
}
|
||||
let lo = path.span;
|
||||
let mac = P(MacCall { path, args: self.parse_delim_args()? });
|
||||
|
@ -1603,7 +1603,7 @@ impl<'a> Parser<'a> {
|
|||
{
|
||||
let (lit, _) =
|
||||
self.recover_unclosed_char(label_.ident, Parser::mk_token_lit_char, |self_| {
|
||||
self_.sess.create_err(errors::UnexpectedTokenAfterLabel {
|
||||
self_.dcx().create_err(errors::UnexpectedTokenAfterLabel {
|
||||
span: self_.token.span,
|
||||
remove_label: None,
|
||||
enclose_in_block: None,
|
||||
|
@ -1615,7 +1615,7 @@ impl<'a> Parser<'a> {
|
|||
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
|
||||
{
|
||||
// We're probably inside of a `Path<'a>` that needs a turbofish
|
||||
self.sess.emit_err(errors::UnexpectedTokenAfterLabel {
|
||||
self.dcx().emit_err(errors::UnexpectedTokenAfterLabel {
|
||||
span: self.token.span,
|
||||
remove_label: None,
|
||||
enclose_in_block: None,
|
||||
|
@ -1670,12 +1670,12 @@ impl<'a> Parser<'a> {
|
|||
self.mk_expr(span, ExprKind::Block(blk, label))
|
||||
});
|
||||
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
expr
|
||||
}?;
|
||||
|
||||
if !ate_colon && consume_colon {
|
||||
self.sess.emit_err(errors::RequireColonAfterLabeledExpression {
|
||||
self.dcx().emit_err(errors::RequireColonAfterLabeledExpression {
|
||||
span: expr.span,
|
||||
label: lo,
|
||||
label_end: lo.shrink_to_hi(),
|
||||
|
@ -1723,7 +1723,7 @@ impl<'a> Parser<'a> {
|
|||
self.bump(); // `catch`
|
||||
|
||||
let span = lo.to(self.prev_token.span);
|
||||
self.sess.emit_err(errors::DoCatchSyntaxRemoved { span });
|
||||
self.dcx().emit_err(errors::DoCatchSyntaxRemoved { span });
|
||||
|
||||
self.parse_try_block(lo)
|
||||
}
|
||||
|
@ -1783,7 +1783,7 @@ impl<'a> Parser<'a> {
|
|||
// The value expression can be a labeled loop, see issue #86948, e.g.:
|
||||
// `loop { break 'label: loop { break 'label 42; }; }`
|
||||
let lexpr = self.parse_expr_labeled(label, true)?;
|
||||
self.sess.emit_err(errors::LabeledLoopInBreak {
|
||||
self.dcx().emit_err(errors::LabeledLoopInBreak {
|
||||
span: lexpr.span,
|
||||
sub: errors::WrapExpressionInParentheses {
|
||||
left: lexpr.span.shrink_to_lo(),
|
||||
|
@ -2018,7 +2018,7 @@ impl<'a> Parser<'a> {
|
|||
});
|
||||
if let Some(token) = &recovered {
|
||||
self.bump();
|
||||
self.sess.emit_err(errors::FloatLiteralRequiresIntegerPart {
|
||||
self.dcx().emit_err(errors::FloatLiteralRequiresIntegerPart {
|
||||
span: token.span,
|
||||
correct: pprust::token_to_string(token).into_owned(),
|
||||
});
|
||||
|
@ -2077,13 +2077,13 @@ impl<'a> Parser<'a> {
|
|||
if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) {
|
||||
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
||||
// through the ecosystem when people fix their macros
|
||||
self.sess.emit_warning(errors::InvalidLiteralSuffixOnTupleIndex {
|
||||
self.dcx().emit_warning(errors::InvalidLiteralSuffixOnTupleIndex {
|
||||
span,
|
||||
suffix,
|
||||
exception: Some(()),
|
||||
});
|
||||
} else {
|
||||
self.sess.emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
|
||||
self.dcx().emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
|
||||
span,
|
||||
suffix,
|
||||
exception: None,
|
||||
|
@ -2121,7 +2121,7 @@ impl<'a> Parser<'a> {
|
|||
let mut snapshot = self.create_snapshot_for_diagnostic();
|
||||
match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) {
|
||||
Ok(arr) => {
|
||||
self.sess.emit_err(errors::ArrayBracketsInsteadOfSpaces {
|
||||
self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces {
|
||||
span: arr.span,
|
||||
sub: errors::ArrayBracketsInsteadOfSpacesSugg {
|
||||
left: lo,
|
||||
|
@ -2195,7 +2195,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
if self.token.is_whole_block() {
|
||||
self.sess.emit_err(errors::InvalidBlockMacroSegment {
|
||||
self.dcx().emit_err(errors::InvalidBlockMacroSegment {
|
||||
span: self.token.span,
|
||||
context: lo.to(self.token.span),
|
||||
wrap: errors::WrapInExplicitBlock {
|
||||
|
@ -2399,7 +2399,7 @@ impl<'a> Parser<'a> {
|
|||
ExprKind::Binary(Spanned { span: binop_span, .. }, _, right)
|
||||
if let ExprKind::Block(_, None) = right.kind =>
|
||||
{
|
||||
self.sess.emit_err(errors::IfExpressionMissingThenBlock {
|
||||
this.dcx().emit_err(errors::IfExpressionMissingThenBlock {
|
||||
if_span: lo,
|
||||
missing_then_block_sub:
|
||||
errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(
|
||||
|
@ -2410,7 +2410,7 @@ impl<'a> Parser<'a> {
|
|||
std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
|
||||
}
|
||||
ExprKind::Block(_, None) => {
|
||||
self.sess.emit_err(errors::IfExpressionMissingCondition {
|
||||
this.dcx().emit_err(errors::IfExpressionMissingCondition {
|
||||
if_span: lo.shrink_to_hi(),
|
||||
block_span: self.sess.source_map().start_point(cond_span),
|
||||
});
|
||||
|
@ -2434,7 +2434,7 @@ impl<'a> Parser<'a> {
|
|||
let let_else_sub = matches!(cond.kind, ExprKind::Let(..))
|
||||
.then(|| errors::IfExpressionLetSomeSub { if_span: lo.until(cond_span) });
|
||||
|
||||
self.sess.emit_err(errors::IfExpressionMissingThenBlock {
|
||||
self.dcx().emit_err(errors::IfExpressionMissingThenBlock {
|
||||
if_span: lo,
|
||||
missing_then_block_sub: errors::IfExpressionMissingThenBlockSub::AddThenBlock(
|
||||
cond_span.shrink_to_hi(),
|
||||
|
@ -2513,7 +2513,7 @@ impl<'a> Parser<'a> {
|
|||
// This was part of a closure, the that part of the parser recover.
|
||||
return Err(self.dcx().create_err(err));
|
||||
} else {
|
||||
Some(self.sess.emit_err(err))
|
||||
Some(self.dcx().emit_err(err))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
|
@ -2527,7 +2527,7 @@ impl<'a> Parser<'a> {
|
|||
CommaRecoveryMode::LikelyTuple,
|
||||
)?;
|
||||
if self.token == token::EqEq {
|
||||
self.sess.emit_err(errors::ExpectedEqForLetExpr {
|
||||
self.dcx().emit_err(errors::ExpectedEqForLetExpr {
|
||||
span: self.token.span,
|
||||
sugg_span: self.token.span,
|
||||
});
|
||||
|
@ -2559,7 +2559,7 @@ impl<'a> Parser<'a> {
|
|||
if self.check(&TokenKind::OpenDelim(Delimiter::Brace))
|
||||
&& classify::expr_requires_semi_to_be_stmt(&cond) =>
|
||||
{
|
||||
self.sess.emit_err(errors::ExpectedElseBlock {
|
||||
self.dcx().emit_err(errors::ExpectedElseBlock {
|
||||
first_tok_span,
|
||||
first_tok,
|
||||
else_span,
|
||||
|
@ -2599,7 +2599,7 @@ impl<'a> Parser<'a> {
|
|||
[x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
|
||||
};
|
||||
let ctx = if is_ctx_else { "else" } else { "if" };
|
||||
self.sess.emit_err(errors::OuterAttributeNotAllowedOnIfElse {
|
||||
self.dcx().emit_err(errors::OuterAttributeNotAllowedOnIfElse {
|
||||
last,
|
||||
branch_span,
|
||||
ctx_span,
|
||||
|
@ -2613,7 +2613,7 @@ impl<'a> Parser<'a> {
|
|||
&& let BinOpKind::And = binop
|
||||
&& let ExprKind::If(cond, ..) = &right.kind
|
||||
{
|
||||
Err(self.sess.create_err(errors::UnexpectedIfWithIf(
|
||||
Err(self.dcx().create_err(errors::UnexpectedIfWithIf(
|
||||
binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()),
|
||||
)))
|
||||
} else {
|
||||
|
@ -2662,7 +2662,7 @@ impl<'a> Parser<'a> {
|
|||
let right = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
|
||||
self.bump(); // )
|
||||
err.cancel();
|
||||
self.sess.emit_err(errors::ParenthesesInForHead {
|
||||
self.dcx().emit_err(errors::ParenthesesInForHead {
|
||||
span,
|
||||
// With e.g. `for (x) in y)` this would replace `(x) in y)`
|
||||
// with `x) in y)` which is syntactically invalid.
|
||||
|
@ -2701,7 +2701,7 @@ impl<'a> Parser<'a> {
|
|||
&& !matches!(self.token.kind, token::OpenDelim(Delimiter::Brace))
|
||||
&& self.may_recover()
|
||||
{
|
||||
self.sess
|
||||
self.dcx()
|
||||
.emit_err(errors::MissingExpressionInForLoop { span: expr.span.shrink_to_lo() });
|
||||
let err_expr = self.mk_expr(expr.span, ExprKind::Err);
|
||||
let block = self.mk_block(thin_vec![], BlockCheckMode::Default, self.prev_token.span);
|
||||
|
@ -2726,7 +2726,7 @@ impl<'a> Parser<'a> {
|
|||
let else_span = self.token.span;
|
||||
self.bump();
|
||||
let else_clause = self.parse_expr_else()?;
|
||||
self.sess.emit_err(errors::LoopElseNotSupported {
|
||||
self.dcx().emit_err(errors::LoopElseNotSupported {
|
||||
span: else_span.to(else_clause.span),
|
||||
loop_kind,
|
||||
loop_kw,
|
||||
|
@ -2745,7 +2745,7 @@ impl<'a> Parser<'a> {
|
|||
(self.prev_token.span.between(self.token.span), errors::MissingInInForLoopSub::AddIn)
|
||||
};
|
||||
|
||||
self.sess.emit_err(errors::MissingInInForLoop { span, sub: sub(span) });
|
||||
self.dcx().emit_err(errors::MissingInInForLoop { span, sub: sub(span) });
|
||||
}
|
||||
|
||||
/// Parses a `while` or `while let` expression (`while` token already eaten).
|
||||
|
@ -2853,7 +2853,7 @@ impl<'a> Parser<'a> {
|
|||
let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
|
||||
let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
|
||||
|
||||
this.sess.emit_err(errors::MatchArmBodyWithoutBraces {
|
||||
this.dcx().emit_err(errors::MatchArmBodyWithoutBraces {
|
||||
statements: span,
|
||||
arrow: arrow_span,
|
||||
num_statements: stmts.len(),
|
||||
|
@ -3060,7 +3060,7 @@ impl<'a> Parser<'a> {
|
|||
.is_ok();
|
||||
if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
|
||||
err.cancel();
|
||||
this.sess.emit_err(errors::MissingCommaAfterMatchArm {
|
||||
this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
|
||||
span: arm_span.shrink_to_hi(),
|
||||
});
|
||||
return Ok(true);
|
||||
|
@ -3149,7 +3149,7 @@ impl<'a> Parser<'a> {
|
|||
self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
let right = self.prev_token.span;
|
||||
self.sess.emit_err(errors::ParenthesesInMatchPat {
|
||||
self.dcx().emit_err(errors::ParenthesesInMatchPat {
|
||||
span: vec![left, right],
|
||||
sugg: errors::ParenthesesInMatchPatSugg { left, right },
|
||||
});
|
||||
|
@ -3303,7 +3303,7 @@ impl<'a> Parser<'a> {
|
|||
let expr = self.parse_expr_struct(qself.clone(), path.clone(), true);
|
||||
if let (Ok(expr), false) = (&expr, struct_allowed) {
|
||||
// This is a struct literal, but we don't can't accept them here.
|
||||
self.sess.emit_err(errors::StructLiteralNotAllowedHere {
|
||||
self.dcx().emit_err(errors::StructLiteralNotAllowedHere {
|
||||
span: expr.span,
|
||||
sub: errors::StructLiteralNotAllowedHereSugg {
|
||||
left: path.span.shrink_to_lo(),
|
||||
|
@ -3487,7 +3487,7 @@ impl<'a> Parser<'a> {
|
|||
if self.token != token::Comma {
|
||||
return;
|
||||
}
|
||||
self.sess.emit_err(errors::CommaAfterBaseStruct {
|
||||
self.dcx().emit_err(errors::CommaAfterBaseStruct {
|
||||
span: span.to(self.prev_token.span),
|
||||
comma: self.token.span,
|
||||
});
|
||||
|
@ -3500,7 +3500,7 @@ impl<'a> Parser<'a> {
|
|||
{
|
||||
// recover from typo of `...`, suggest `..`
|
||||
let span = self.prev_token.span;
|
||||
self.sess.emit_err(errors::MissingDotDot { token_span: span, sugg_span: span });
|
||||
self.dcx().emit_err(errors::MissingDotDot { token_span: span, sugg_span: span });
|
||||
return true;
|
||||
}
|
||||
false
|
||||
|
@ -3513,7 +3513,7 @@ impl<'a> Parser<'a> {
|
|||
let label = format!("'{}", ident.name);
|
||||
let ident = Ident { name: Symbol::intern(&label), span: ident.span };
|
||||
|
||||
self.sess.emit_err(errors::ExpectedLabelFoundIdent {
|
||||
self.dcx().emit_err(errors::ExpectedLabelFoundIdent {
|
||||
span: ident.span,
|
||||
start: ident.span.shrink_to_lo(),
|
||||
});
|
||||
|
@ -3581,18 +3581,18 @@ impl<'a> Parser<'a> {
|
|||
return;
|
||||
}
|
||||
|
||||
self.sess.emit_err(errors::EqFieldInit {
|
||||
self.dcx().emit_err(errors::EqFieldInit {
|
||||
span: self.token.span,
|
||||
eq: field_name.span.shrink_to_hi().to(self.token.span),
|
||||
});
|
||||
}
|
||||
|
||||
fn err_dotdotdot_syntax(&self, span: Span) {
|
||||
self.sess.emit_err(errors::DotDotDot { span });
|
||||
self.dcx().emit_err(errors::DotDotDot { span });
|
||||
}
|
||||
|
||||
fn err_larrow_operator(&self, span: Span) {
|
||||
self.sess.emit_err(errors::LeftArrowOperator { span });
|
||||
self.dcx().emit_err(errors::LeftArrowOperator { span });
|
||||
}
|
||||
|
||||
fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
|
||||
|
@ -3729,7 +3729,7 @@ impl MutVisitor for CondChecker<'_> {
|
|||
ExprKind::Let(_, _, _, ref mut is_recovered @ None) => {
|
||||
if let Some(reason) = self.forbid_let_reason {
|
||||
*is_recovered =
|
||||
Some(self.parser.sess.emit_err(errors::ExpectedExpressionFoundLet {
|
||||
Some(self.parser.dcx().emit_err(errors::ExpectedExpressionFoundLet {
|
||||
span,
|
||||
reason,
|
||||
missing_let: self.missing_let,
|
||||
|
|
|
@ -176,7 +176,7 @@ impl<'a> Parser<'a> {
|
|||
if this.eat_keyword_noexpect(kw::SelfUpper) {
|
||||
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
|
||||
// as if `Self` never existed.
|
||||
this.sess.emit_err(UnexpectedSelfInGenericParameters {
|
||||
this.dcx().emit_err(UnexpectedSelfInGenericParameters {
|
||||
span: this.prev_token.span,
|
||||
});
|
||||
|
||||
|
@ -200,7 +200,7 @@ impl<'a> Parser<'a> {
|
|||
this.bump(); // `=`
|
||||
this.bump(); // `'lifetime`
|
||||
let span = lo.to(this.prev_token.span);
|
||||
this.sess.emit_err(
|
||||
this.dcx().emit_err(
|
||||
UnexpectedDefaultValueForLifetimeInGenericParameters { span },
|
||||
);
|
||||
}
|
||||
|
@ -225,7 +225,7 @@ impl<'a> Parser<'a> {
|
|||
let snapshot = this.create_snapshot_for_diagnostic();
|
||||
match this.parse_ty_where_predicate() {
|
||||
Ok(where_predicate) => {
|
||||
this.sess.emit_err(errors::BadAssocTypeBounds {
|
||||
this.dcx().emit_err(errors::BadAssocTypeBounds {
|
||||
span: where_predicate.span(),
|
||||
});
|
||||
// FIXME - try to continue parsing other generics?
|
||||
|
@ -242,10 +242,10 @@ impl<'a> Parser<'a> {
|
|||
// Check for trailing attributes and stop parsing.
|
||||
if !attrs.is_empty() {
|
||||
if !params.is_empty() {
|
||||
this.sess
|
||||
this.dcx()
|
||||
.emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
|
||||
} else {
|
||||
this.sess
|
||||
this.dcx()
|
||||
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
|
||||
}
|
||||
}
|
||||
|
@ -334,7 +334,7 @@ impl<'a> Parser<'a> {
|
|||
// change we parse those generics now, but report an error.
|
||||
if self.choose_generics_over_qpath(0) {
|
||||
let generics = self.parse_generics()?;
|
||||
self.sess.emit_err(errors::WhereOnGenerics { span: generics.span });
|
||||
self.dcx().emit_err(errors::WhereOnGenerics { span: generics.span });
|
||||
}
|
||||
|
||||
loop {
|
||||
|
@ -370,7 +370,7 @@ impl<'a> Parser<'a> {
|
|||
let ate_comma = self.eat(&token::Comma);
|
||||
|
||||
if self.eat_keyword_noexpect(kw::Where) {
|
||||
self.sess.emit_err(MultipleWhereClauses {
|
||||
self.dcx().emit_err(MultipleWhereClauses {
|
||||
span: self.token.span,
|
||||
previous: pred_lo,
|
||||
between: prev_token.shrink_to_hi().to(self.prev_token.span),
|
||||
|
@ -422,7 +422,7 @@ impl<'a> Parser<'a> {
|
|||
let body_sp = pred_lo.to(snapshot.prev_token.span);
|
||||
let map = self.sess.source_map();
|
||||
|
||||
self.sess.emit_err(WhereClauseBeforeTupleStructBody {
|
||||
self.dcx().emit_err(WhereClauseBeforeTupleStructBody {
|
||||
span: where_sp,
|
||||
name: struct_name.span,
|
||||
body: body_sp,
|
||||
|
|
|
@ -162,11 +162,11 @@ impl<'a> Parser<'a> {
|
|||
|
||||
// At this point, we have failed to parse an item.
|
||||
if !matches!(vis.kind, VisibilityKind::Inherited) {
|
||||
self.sess.emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
|
||||
self.dcx().emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
|
||||
}
|
||||
|
||||
if let Defaultness::Default(span) = def {
|
||||
self.sess.emit_err(errors::DefaultNotFollowedByItem { span });
|
||||
self.dcx().emit_err(errors::DefaultNotFollowedByItem { span });
|
||||
}
|
||||
|
||||
if !attrs_allowed {
|
||||
|
@ -178,7 +178,7 @@ impl<'a> Parser<'a> {
|
|||
/// Error in-case `default` was parsed in an in-appropriate context.
|
||||
fn error_on_unconsumed_default(&self, def: Defaultness, kind: &ItemKind) {
|
||||
if let Defaultness::Default(span) = def {
|
||||
self.sess.emit_err(errors::InappropriateDefault {
|
||||
self.dcx().emit_err(errors::InappropriateDefault {
|
||||
span,
|
||||
article: kind.article(),
|
||||
descr: kind.descr(),
|
||||
|
@ -318,7 +318,7 @@ impl<'a> Parser<'a> {
|
|||
self.bump();
|
||||
match self.parse_use_item() {
|
||||
Ok(u) => {
|
||||
self.sess.emit_err(errors::RecoverImportAsUse { span, token_name });
|
||||
self.dcx().emit_err(errors::RecoverImportAsUse { span, token_name });
|
||||
Ok(Some(u))
|
||||
}
|
||||
Err(e) => {
|
||||
|
@ -560,7 +560,7 @@ impl<'a> Parser<'a> {
|
|||
let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt)
|
||||
{
|
||||
let span = self.prev_token.span.between(self.token.span);
|
||||
self.sess.emit_err(errors::MissingTraitInTraitImpl {
|
||||
self.dcx().emit_err(errors::MissingTraitInTraitImpl {
|
||||
span,
|
||||
for_span: span.to(self.token.span),
|
||||
});
|
||||
|
@ -597,7 +597,7 @@ impl<'a> Parser<'a> {
|
|||
Some(ty_second) => {
|
||||
// impl Trait for Type
|
||||
if !has_for {
|
||||
self.sess.emit_err(errors::MissingForInTraitImpl { span: missing_for_span });
|
||||
self.dcx().emit_err(errors::MissingForInTraitImpl { span: missing_for_span });
|
||||
}
|
||||
|
||||
let ty_first = ty_first.into_inner();
|
||||
|
@ -612,12 +612,12 @@ impl<'a> Parser<'a> {
|
|||
// `impl<T: Default> impl Default for Wrapper<T>`
|
||||
// ^^^^^
|
||||
let extra_impl_kw = ty_first.span.until(bound.span());
|
||||
self.sess.emit_err(errors::ExtraImplKeywordInTraitImpl {
|
||||
self.dcx().emit_err(errors::ExtraImplKeywordInTraitImpl {
|
||||
extra_impl_kw,
|
||||
impl_trait_span: ty_first.span,
|
||||
});
|
||||
} else {
|
||||
self.sess.emit_err(errors::ExpectedTraitInTraitImplFoundType {
|
||||
self.dcx().emit_err(errors::ExpectedTraitInTraitImplFoundType {
|
||||
span: ty_first.span,
|
||||
});
|
||||
}
|
||||
|
@ -664,7 +664,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
// Recover `impl Ty;` instead of `impl Ty {}`
|
||||
if self.token == TokenKind::Semi {
|
||||
self.sess.emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
|
||||
self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
|
||||
self.bump();
|
||||
return Ok(ThinVec::new());
|
||||
}
|
||||
|
@ -823,7 +823,7 @@ impl<'a> Parser<'a> {
|
|||
// It's a trait alias.
|
||||
if had_colon {
|
||||
let span = span_at_colon.to(span_before_eq);
|
||||
self.sess.emit_err(errors::BoundsNotAllowedOnTraitAliases { span });
|
||||
self.dcx().emit_err(errors::BoundsNotAllowedOnTraitAliases { span });
|
||||
}
|
||||
|
||||
let bounds = self.parse_generic_bounds()?;
|
||||
|
@ -832,10 +832,10 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let whole_span = lo.to(self.prev_token.span);
|
||||
if is_auto == IsAuto::Yes {
|
||||
self.sess.emit_err(errors::TraitAliasCannotBeAuto { span: whole_span });
|
||||
self.dcx().emit_err(errors::TraitAliasCannotBeAuto { span: whole_span });
|
||||
}
|
||||
if let Unsafe::Yes(_) = unsafety {
|
||||
self.sess.emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span });
|
||||
self.dcx().emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span });
|
||||
}
|
||||
|
||||
self.sess.gated_spans.gate(sym::trait_alias, whole_span);
|
||||
|
@ -881,7 +881,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(kind) => kind,
|
||||
Err(kind) => match kind {
|
||||
ItemKind::Static(box StaticItem { ty, mutability: _, expr }) => {
|
||||
self.sess.emit_err(errors::AssociatedStaticItemNotAllowed { span });
|
||||
self.dcx().emit_err(errors::AssociatedStaticItemNotAllowed { span });
|
||||
AssocItemKind::Const(Box::new(ConstItem {
|
||||
defaultness: Defaultness::Final,
|
||||
generics: Generics::default(),
|
||||
|
@ -981,7 +981,7 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
// Recover from using a colon as path separator.
|
||||
while self.eat_noexpect(&token::Colon) {
|
||||
self.sess
|
||||
self.dcx()
|
||||
.emit_err(errors::SingleColonImportPath { span: self.prev_token.span });
|
||||
|
||||
// We parse the rest of the path and append it to the original prefix.
|
||||
|
@ -1078,7 +1078,7 @@ impl<'a> Parser<'a> {
|
|||
write!(fixed_name, "_{}", part.name).unwrap();
|
||||
}
|
||||
|
||||
self.sess.emit_err(errors::ExternCrateNameWithDashes {
|
||||
self.dcx().emit_err(errors::ExternCrateNameWithDashes {
|
||||
span: fixed_name_sp,
|
||||
sugg: errors::ExternCrateNameWithDashesSugg { dashes },
|
||||
});
|
||||
|
@ -1133,7 +1133,7 @@ impl<'a> Parser<'a> {
|
|||
ItemKind::Const(box ConstItem { ty, expr, .. }) => {
|
||||
let const_span = Some(span.with_hi(ident.span.lo()))
|
||||
.filter(|span| span.can_be_used_for_suggestions());
|
||||
self.sess.emit_err(errors::ExternItemCannotBeConst {
|
||||
self.dcx().emit_err(errors::ExternItemCannotBeConst {
|
||||
ident_span: ident.span,
|
||||
const_span,
|
||||
});
|
||||
|
@ -1151,7 +1151,7 @@ impl<'a> Parser<'a> {
|
|||
// FIXME(#100717): needs variant for each `ItemKind` (instead of using `ItemKind::descr()`)
|
||||
let span = self.sess.source_map().guess_head_span(span);
|
||||
let descr = kind.descr();
|
||||
self.sess.emit_err(errors::BadItemKind { span, descr, ctx });
|
||||
self.dcx().emit_err(errors::BadItemKind { span, descr, ctx });
|
||||
None
|
||||
}
|
||||
|
||||
|
@ -1182,10 +1182,11 @@ impl<'a> Parser<'a> {
|
|||
fn recover_const_mut(&mut self, const_span: Span) {
|
||||
if self.eat_keyword(kw::Mut) {
|
||||
let span = self.prev_token.span;
|
||||
self.sess.emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
|
||||
self.dcx()
|
||||
.emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
|
||||
} else if self.eat_keyword(kw::Let) {
|
||||
let span = self.prev_token.span;
|
||||
self.sess.emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
|
||||
self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1239,7 +1240,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
if self.token.kind == TokenKind::Lt && self.may_recover() {
|
||||
let generics = self.parse_generics()?;
|
||||
self.sess.emit_err(errors::StaticWithGenerics { span: generics.span });
|
||||
self.dcx().emit_err(errors::StaticWithGenerics { span: generics.span });
|
||||
}
|
||||
|
||||
// Parse the type of a static item. That is, the `":" $ty` fragment.
|
||||
|
@ -1300,7 +1301,7 @@ impl<'a> Parser<'a> {
|
|||
if before_where_clause.has_where_token
|
||||
&& let Some(expr) = &expr
|
||||
{
|
||||
self.sess.emit_err(errors::WhereClauseBeforeConstBody {
|
||||
self.dcx().emit_err(errors::WhereClauseBeforeConstBody {
|
||||
span: before_where_clause.span,
|
||||
name: ident.span,
|
||||
body: expr.span,
|
||||
|
@ -1386,7 +1387,7 @@ impl<'a> Parser<'a> {
|
|||
let err = errors::EnumStructMutuallyExclusive { span };
|
||||
if self.look_ahead(1, |t| t.is_ident()) {
|
||||
self.bump();
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
} else {
|
||||
return Err(self.dcx().create_err(err));
|
||||
}
|
||||
|
@ -1399,7 +1400,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
// Possibly recover `enum Foo;` instead of `enum Foo {}`
|
||||
let (variants, _) = if self.token == TokenKind::Semi {
|
||||
self.sess.emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
|
||||
self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
|
||||
self.bump();
|
||||
(thin_vec![], false)
|
||||
} else {
|
||||
|
@ -1779,7 +1780,7 @@ impl<'a> Parser<'a> {
|
|||
seen_comma = true;
|
||||
}
|
||||
if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
self.sess.emit_err(err);
|
||||
self.dcx().emit_err(err);
|
||||
} else {
|
||||
if !seen_comma {
|
||||
let sp = previous_span.shrink_to_hi();
|
||||
|
@ -1894,13 +1895,13 @@ impl<'a> Parser<'a> {
|
|||
self.expect_field_ty_separator()?;
|
||||
let ty = self.parse_ty_for_field_def()?;
|
||||
if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) {
|
||||
self.sess.emit_err(errors::SingleColonStructType { span: self.token.span });
|
||||
self.dcx().emit_err(errors::SingleColonStructType { span: self.token.span });
|
||||
}
|
||||
if self.token.kind == token::Eq {
|
||||
self.bump();
|
||||
let const_expr = self.parse_expr_anon_const()?;
|
||||
let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
|
||||
self.sess.emit_err(errors::EqualsStructDefault { span: sp });
|
||||
self.dcx().emit_err(errors::EqualsStructDefault { span: sp });
|
||||
}
|
||||
Ok(FieldDef {
|
||||
span: lo.to(self.prev_token.span),
|
||||
|
@ -2041,7 +2042,7 @@ impl<'a> Parser<'a> {
|
|||
return IsMacroRulesItem::Yes { has_bang: true };
|
||||
} else if self.look_ahead(1, |t| (t.is_ident())) {
|
||||
// macro_rules foo
|
||||
self.sess.emit_err(errors::MacroRulesMissingBang {
|
||||
self.dcx().emit_err(errors::MacroRulesMissingBang {
|
||||
span: macro_rules_span,
|
||||
hi: macro_rules_span.shrink_to_hi(),
|
||||
});
|
||||
|
@ -2069,7 +2070,7 @@ impl<'a> Parser<'a> {
|
|||
if self.eat(&token::Not) {
|
||||
// Handle macro_rules! foo!
|
||||
let span = self.prev_token.span;
|
||||
self.sess.emit_err(errors::MacroNameRemoveBang { span });
|
||||
self.dcx().emit_err(errors::MacroNameRemoveBang { span });
|
||||
}
|
||||
|
||||
let body = self.parse_delim_args()?;
|
||||
|
@ -2089,9 +2090,9 @@ impl<'a> Parser<'a> {
|
|||
let vstr = pprust::vis_to_string(vis);
|
||||
let vstr = vstr.trim_end();
|
||||
if macro_rules {
|
||||
self.sess.emit_err(errors::MacroRulesVisibility { span: vis.span, vis: vstr });
|
||||
self.dcx().emit_err(errors::MacroRulesVisibility { span: vis.span, vis: vstr });
|
||||
} else {
|
||||
self.sess.emit_err(errors::MacroInvocationVisibility { span: vis.span, vis: vstr });
|
||||
self.dcx().emit_err(errors::MacroInvocationVisibility { span: vis.span, vis: vstr });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2137,7 +2138,7 @@ impl<'a> Parser<'a> {
|
|||
let kw_token = self.token.clone();
|
||||
let kw_str = pprust::token_to_string(&kw_token);
|
||||
let item = self.parse_item(ForceCollect::No)?;
|
||||
self.sess.emit_err(errors::NestedAdt {
|
||||
self.dcx().emit_err(errors::NestedAdt {
|
||||
span: kw_token.span,
|
||||
item: item.unwrap().span,
|
||||
kw_str,
|
||||
|
@ -2236,7 +2237,7 @@ impl<'a> Parser<'a> {
|
|||
// If we see `for Ty ...` then user probably meant `impl` item.
|
||||
if self.token.is_keyword(kw::For) {
|
||||
old_err.cancel();
|
||||
return Err(self.sess.create_err(errors::FnTypoWithImpl { fn_span }));
|
||||
return Err(self.dcx().create_err(errors::FnTypoWithImpl { fn_span }));
|
||||
} else {
|
||||
return Err(old_err);
|
||||
}
|
||||
|
@ -2281,7 +2282,7 @@ impl<'a> Parser<'a> {
|
|||
let _ = self.parse_expr()?;
|
||||
self.expect_semi()?; // `;`
|
||||
let span = eq_sp.to(self.prev_token.span);
|
||||
self.sess.emit_err(errors::FunctionBodyEqualsExpr {
|
||||
self.dcx().emit_err(errors::FunctionBodyEqualsExpr {
|
||||
span,
|
||||
sugg: errors::FunctionBodyEqualsExprSugg { eq: eq_sp, semi: self.prev_token.span },
|
||||
});
|
||||
|
@ -2396,7 +2397,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
if let Some(CoroutineKind::Async { span, .. }) = coroutine_kind {
|
||||
if span.is_rust_2015() {
|
||||
self.sess.emit_err(errors::AsyncFnIn2015 {
|
||||
self.dcx().emit_err(errors::AsyncFnIn2015 {
|
||||
span,
|
||||
help: errors::HelpUseLatestEdition::new(),
|
||||
});
|
||||
|
@ -2589,7 +2590,7 @@ impl<'a> Parser<'a> {
|
|||
&& !self.token.is_keyword(kw::For)
|
||||
{
|
||||
// recover from missing argument list, e.g. `fn main -> () {}`
|
||||
self.sess
|
||||
self.dcx()
|
||||
.emit_err(errors::MissingFnParams { span: self.prev_token.span.shrink_to_hi() });
|
||||
return Ok(ThinVec::new());
|
||||
}
|
||||
|
@ -2721,7 +2722,7 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
// Recover for the grammar `*self`, `*const self`, and `*mut self`.
|
||||
let recover_self_ptr = |this: &mut Self| {
|
||||
self.sess.emit_err(errors::SelfArgumentPointer { span: this.token.span });
|
||||
this.dcx().emit_err(errors::SelfArgumentPointer { span: this.token.span });
|
||||
|
||||
Ok((SelfKind::Value(Mutability::Not), expect_self_ident(this), this.prev_token.span))
|
||||
};
|
||||
|
|
|
@ -601,7 +601,7 @@ impl<'a> Parser<'a> {
|
|||
&& let Some((ident, /* is_raw */ false)) = self.token.ident()
|
||||
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
|
||||
{
|
||||
self.sess.emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
|
||||
self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
|
||||
self.bump();
|
||||
return true;
|
||||
}
|
||||
|
@ -1423,7 +1423,8 @@ impl<'a> Parser<'a> {
|
|||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
|
||||
let path_str = pprust::path_to_string(&path);
|
||||
self.sess.emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
|
||||
self.dcx()
|
||||
.emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1449,7 +1450,7 @@ impl<'a> Parser<'a> {
|
|||
Err(Some(lit)) => match lit.kind {
|
||||
ast::LitKind::Err => None,
|
||||
_ => {
|
||||
self.sess.emit_err(NonStringAbiLiteral { span: lit.span });
|
||||
self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
|
||||
None
|
||||
}
|
||||
},
|
||||
|
|
|
@ -241,7 +241,7 @@ impl<'a> Parser<'a> {
|
|||
Some(TopLevelOrPatternNotAllowedSugg::WrapInParens { span, pat })
|
||||
};
|
||||
|
||||
let mut err = self.sess.create_err(match syntax_loc {
|
||||
let mut err = self.dcx().create_err(match syntax_loc {
|
||||
PatternLocation::LetBinding => {
|
||||
TopLevelOrPatternNotAllowed::LetBinding { span, sub }
|
||||
}
|
||||
|
@ -268,7 +268,7 @@ impl<'a> Parser<'a> {
|
|||
// a leading `||` probably doesn't indicate an or-pattern attempt, so we handle that
|
||||
// separately.
|
||||
if let token::OrOr = self.token.kind {
|
||||
self.sess.emit_err(UnexpectedVertVertBeforeFunctionParam { span: self.token.span });
|
||||
self.dcx().emit_err(UnexpectedVertVertBeforeFunctionParam { span: self.token.span });
|
||||
self.bump();
|
||||
}
|
||||
|
||||
|
@ -286,7 +286,7 @@ impl<'a> Parser<'a> {
|
|||
EatOrResult::TrailingVert
|
||||
} else if matches!(self.token.kind, token::OrOr) {
|
||||
// Found `||`; Recover and pretend we parsed `|`.
|
||||
self.sess.emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
|
||||
self.dcx().emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
|
||||
self.bump();
|
||||
EatOrResult::AteOr
|
||||
} else if self.eat(&token::BinOp(token::Or)) {
|
||||
|
@ -321,7 +321,7 @@ impl<'a> Parser<'a> {
|
|||
match (is_end_ahead, &self.token.kind) {
|
||||
(true, token::BinOp(token::Or) | token::OrOr) => {
|
||||
// A `|` or possibly `||` token shouldn't be here. Ban it.
|
||||
self.sess.emit_err(TrailingVertNotAllowed {
|
||||
self.dcx().emit_err(TrailingVertNotAllowed {
|
||||
span: self.token.span,
|
||||
start: lo,
|
||||
token: self.token.clone(),
|
||||
|
@ -349,7 +349,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
if self.token.is_keyword(kw::Let) && self.look_ahead(1, |tok| tok.can_begin_pattern()) {
|
||||
self.bump();
|
||||
self.sess.emit_err(RemoveLet { span: lo });
|
||||
self.dcx().emit_err(RemoveLet { span: lo });
|
||||
lo = self.token.span;
|
||||
}
|
||||
|
||||
|
@ -390,7 +390,7 @@ impl<'a> Parser<'a> {
|
|||
// Suggest `box ref`.
|
||||
let span = self.prev_token.span.to(self.token.span);
|
||||
self.bump();
|
||||
self.sess.emit_err(SwitchRefBoxOrder { span });
|
||||
self.dcx().emit_err(SwitchRefBoxOrder { span });
|
||||
}
|
||||
// Parse ref ident @ pat / ref mut ident @ pat
|
||||
let mutbl = self.parse_mutability();
|
||||
|
@ -493,7 +493,7 @@ impl<'a> Parser<'a> {
|
|||
self.bump(); // `...`
|
||||
|
||||
// The user probably mistook `...` for a rest pattern `..`.
|
||||
self.sess.emit_err(DotDotDotRestPattern { span: lo });
|
||||
self.dcx().emit_err(DotDotDotRestPattern { span: lo });
|
||||
PatKind::Rest
|
||||
}
|
||||
|
||||
|
@ -527,7 +527,7 @@ impl<'a> Parser<'a> {
|
|||
// The RHS is now the full pattern.
|
||||
*sub = Some(lhs);
|
||||
|
||||
self.sess.emit_err(PatternOnWrongSideOfAt {
|
||||
self.dcx().emit_err(PatternOnWrongSideOfAt {
|
||||
whole_span,
|
||||
whole_pat: pprust::pat_to_string(&rhs),
|
||||
pattern: lhs_span,
|
||||
|
@ -536,7 +536,7 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
// The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`.
|
||||
rhs.kind = PatKind::Wild;
|
||||
self.sess.emit_err(ExpectedBindingLeftOfAt {
|
||||
self.dcx().emit_err(ExpectedBindingLeftOfAt {
|
||||
whole_span,
|
||||
lhs: lhs.span,
|
||||
rhs: rhs.span,
|
||||
|
@ -558,7 +558,7 @@ impl<'a> Parser<'a> {
|
|||
_ => return,
|
||||
}
|
||||
|
||||
self.sess
|
||||
self.dcx()
|
||||
.emit_err(AmbiguousRangePattern { span: pat.span, pat: pprust::pat_to_string(pat) });
|
||||
}
|
||||
|
||||
|
@ -568,7 +568,7 @@ impl<'a> Parser<'a> {
|
|||
if let token::Lifetime(name) = self.token.kind {
|
||||
self.bump(); // `'a`
|
||||
|
||||
self.sess
|
||||
self.dcx()
|
||||
.emit_err(UnexpectedLifetimeInPattern { span: self.prev_token.span, symbol: name });
|
||||
}
|
||||
|
||||
|
@ -602,7 +602,7 @@ impl<'a> Parser<'a> {
|
|||
let mut_span = self.prev_token.span;
|
||||
|
||||
if self.eat_keyword(kw::Ref) {
|
||||
self.sess.emit_err(RefMutOrderIncorrect { span: mut_span.to(self.prev_token.span) });
|
||||
self.dcx().emit_err(RefMutOrderIncorrect { span: mut_span.to(self.prev_token.span) });
|
||||
return self.parse_pat_ident(BindingAnnotation::REF_MUT, syntax_loc);
|
||||
}
|
||||
|
||||
|
@ -656,7 +656,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Error on `mut $pat` where `$pat` is not an ident.
|
||||
fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) {
|
||||
self.sess.emit_err(if changed_any_binding {
|
||||
self.dcx().emit_err(if changed_any_binding {
|
||||
InvalidMutInPattern::NestedIdent {
|
||||
span: lo.to(pat.span),
|
||||
pat: pprust::pat_to_string(pat),
|
||||
|
@ -674,7 +674,7 @@ impl<'a> Parser<'a> {
|
|||
return;
|
||||
}
|
||||
|
||||
self.sess.emit_err(RepeatedMutInPattern { span: lo.to(self.prev_token.span) });
|
||||
self.dcx().emit_err(RepeatedMutInPattern { span: lo.to(self.prev_token.span) });
|
||||
}
|
||||
|
||||
/// Parse macro invocation
|
||||
|
@ -760,14 +760,14 @@ impl<'a> Parser<'a> {
|
|||
let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
|
||||
}
|
||||
|
||||
self.sess.emit_err(InclusiveRangeExtraEquals { span: span_with_eq });
|
||||
self.dcx().emit_err(InclusiveRangeExtraEquals { span: span_with_eq });
|
||||
}
|
||||
token::Gt if no_space => {
|
||||
let after_pat = span.with_hi(span.hi() - rustc_span::BytePos(1)).shrink_to_hi();
|
||||
self.sess.emit_err(InclusiveRangeMatchArrow { span, arrow: tok.span, after_pat });
|
||||
self.dcx().emit_err(InclusiveRangeMatchArrow { span, arrow: tok.span, after_pat });
|
||||
}
|
||||
_ => {
|
||||
self.sess.emit_err(InclusiveRangeNoEnd { span });
|
||||
self.dcx().emit_err(InclusiveRangeNoEnd { span });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -780,7 +780,7 @@ impl<'a> Parser<'a> {
|
|||
let end = self.parse_pat_range_end()?;
|
||||
if let RangeEnd::Included(syn @ RangeSyntax::DotDotDot) = &mut re.node {
|
||||
*syn = RangeSyntax::DotDotEq;
|
||||
self.sess.emit_err(DotDotDotRangeToPatternNotAllowed { span: re.span });
|
||||
self.dcx().emit_err(DotDotDotRangeToPatternNotAllowed { span: re.span });
|
||||
}
|
||||
Ok(PatKind::Range(None, Some(end), re))
|
||||
}
|
||||
|
@ -854,7 +854,7 @@ impl<'a> Parser<'a> {
|
|||
&& self.check_noexpect(&token::Lt)
|
||||
&& self.look_ahead(1, |t| t.can_begin_type())
|
||||
{
|
||||
return Err(self.sess.create_err(GenericArgsInPatRequireTurbofishSyntax {
|
||||
return Err(self.dcx().create_err(GenericArgsInPatRequireTurbofishSyntax {
|
||||
span: self.token.span,
|
||||
suggest_turbofish: self.token.span.shrink_to_lo(),
|
||||
}));
|
||||
|
@ -942,7 +942,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
if self.isnt_pattern_start() {
|
||||
let descr = super::token_descr(&self.token);
|
||||
self.sess.emit_err(errors::BoxNotPat {
|
||||
self.dcx().emit_err(errors::BoxNotPat {
|
||||
span: self.token.span,
|
||||
kw: box_span,
|
||||
lo: box_span.shrink_to_lo(),
|
||||
|
@ -1170,7 +1170,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
let token_str = pprust::token_to_string(&self.token);
|
||||
self.sess.emit_err(DotDotDotForRemainingFields { span: self.token.span, token_str });
|
||||
self.dcx().emit_err(DotDotDotForRemainingFields { span: self.token.span, token_str });
|
||||
}
|
||||
|
||||
fn parse_pat_field(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, PatField> {
|
||||
|
|
|
@ -175,7 +175,7 @@ impl<'a> Parser<'a> {
|
|||
.filter_map(|segment| segment.args.as_ref())
|
||||
.map(|arg| arg.span())
|
||||
.collect::<Vec<_>>();
|
||||
parser.sess.emit_err(errors::GenericsInPath { span });
|
||||
parser.dcx().emit_err(errors::GenericsInPath { span });
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -246,7 +246,7 @@ impl<'a> Parser<'a> {
|
|||
&& self.look_ahead(1, |token| self.token.span.hi() == token.span.lo())
|
||||
{
|
||||
self.bump(); // bump past the colon
|
||||
self.sess.emit_err(PathSingleColon {
|
||||
self.dcx().emit_err(PathSingleColon {
|
||||
span: self.prev_token.span,
|
||||
type_ascription: self
|
||||
.sess
|
||||
|
@ -350,7 +350,7 @@ impl<'a> Parser<'a> {
|
|||
&& self.look_ahead(1, |tok| tok.kind == token::DotDot)
|
||||
{
|
||||
self.bump();
|
||||
self.sess
|
||||
self.dcx()
|
||||
.emit_err(errors::BadReturnTypeNotationDotDot { span: self.token.span });
|
||||
self.bump();
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
|
@ -359,7 +359,7 @@ impl<'a> Parser<'a> {
|
|||
if self.eat_noexpect(&token::RArrow) {
|
||||
let lo = self.prev_token.span;
|
||||
let ty = self.parse_ty()?;
|
||||
self.sess
|
||||
self.dcx()
|
||||
.emit_err(errors::BadReturnTypeNotationOutput { span: lo.to(ty.span) });
|
||||
}
|
||||
|
||||
|
@ -535,7 +535,7 @@ impl<'a> Parser<'a> {
|
|||
// i.e. no multibyte characters, in this range.
|
||||
let span = lo
|
||||
.with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count.into()));
|
||||
self.sess.emit_err(errors::UnmatchedAngle {
|
||||
self.dcx().emit_err(errors::UnmatchedAngle {
|
||||
span,
|
||||
plural: snapshot.unmatched_angle_bracket_count > 1,
|
||||
});
|
||||
|
@ -678,7 +678,7 @@ impl<'a> Parser<'a> {
|
|||
c.into()
|
||||
}
|
||||
Some(GenericArg::Lifetime(lt)) => {
|
||||
self.sess.emit_err(errors::AssocLifetime { span, lifetime: lt.ident.span });
|
||||
self.dcx().emit_err(errors::AssocLifetime { span, lifetime: lt.ident.span });
|
||||
self.mk_ty(span, ast::TyKind::Err).into()
|
||||
}
|
||||
None => {
|
||||
|
|
|
@ -66,7 +66,7 @@ impl<'a> Parser<'a> {
|
|||
if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
|
||||
self.bump();
|
||||
let mut_let_span = lo.to(self.token.span);
|
||||
self.sess.emit_err(errors::InvalidVariableDeclaration {
|
||||
self.dcx().emit_err(errors::InvalidVariableDeclaration {
|
||||
span: mut_let_span,
|
||||
sub: errors::InvalidVariableDeclarationSub::SwitchMutLetOrder(mut_let_span),
|
||||
});
|
||||
|
@ -140,7 +140,7 @@ impl<'a> Parser<'a> {
|
|||
let bl = self.parse_block()?;
|
||||
// Destructuring assignment ... else.
|
||||
// This is not allowed, but point it out in a nice way.
|
||||
self.sess.emit_err(errors::AssignmentElseNotAllowed { span: e.span.to(bl.span) });
|
||||
self.dcx().emit_err(errors::AssignmentElseNotAllowed { span: e.span.to(bl.span) });
|
||||
}
|
||||
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
||||
} else {
|
||||
|
@ -233,12 +233,12 @@ impl<'a> Parser<'a> {
|
|||
&& let attrs @ [.., last] = &*attrs
|
||||
{
|
||||
if last.is_doc_comment() {
|
||||
self.sess.emit_err(errors::DocCommentDoesNotDocumentAnything {
|
||||
self.dcx().emit_err(errors::DocCommentDoesNotDocumentAnything {
|
||||
span: last.span,
|
||||
missing_comma: None,
|
||||
});
|
||||
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
|
||||
self.sess.emit_err(errors::ExpectedStatementAfterOuterAttr { span: last.span });
|
||||
self.dcx().emit_err(errors::ExpectedStatementAfterOuterAttr { span: last.span });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -258,7 +258,8 @@ impl<'a> Parser<'a> {
|
|||
TrailingToken::None,
|
||||
))
|
||||
})?;
|
||||
self.sess.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
|
||||
self.dcx()
|
||||
.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
|
||||
Ok(stmt)
|
||||
}
|
||||
|
||||
|
@ -286,7 +287,7 @@ impl<'a> Parser<'a> {
|
|||
let lo = self.prev_token.span;
|
||||
|
||||
if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
|
||||
self.sess.emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
|
||||
self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
|
||||
self.bump();
|
||||
}
|
||||
|
||||
|
@ -385,7 +386,7 @@ impl<'a> Parser<'a> {
|
|||
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
|
||||
if let ast::ExprKind::Binary(op, ..) = init.kind {
|
||||
if op.node.is_lazy() {
|
||||
self.sess.emit_err(errors::InvalidExpressionInLetElse {
|
||||
self.dcx().emit_err(errors::InvalidExpressionInLetElse {
|
||||
span: init.span,
|
||||
operator: op.node.as_str(),
|
||||
sugg: errors::WrapExpressionInParentheses {
|
||||
|
@ -399,7 +400,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
|
||||
if let Some(trailing) = classify::expr_trailing_brace(init) {
|
||||
self.sess.emit_err(errors::InvalidCurlyInLetElse {
|
||||
self.dcx().emit_err(errors::InvalidCurlyInLetElse {
|
||||
span: trailing.span.with_lo(trailing.span.hi() - BytePos(1)),
|
||||
sugg: errors::WrapExpressionInParentheses {
|
||||
left: trailing.span.shrink_to_lo(),
|
||||
|
@ -414,7 +415,7 @@ impl<'a> Parser<'a> {
|
|||
let eq_consumed = match self.token.kind {
|
||||
token::BinOpEq(..) => {
|
||||
// Recover `let x <op>= 1` as `let x = 1`
|
||||
self.sess
|
||||
self.dcx()
|
||||
.emit_err(errors::CompoundAssignmentExpressionInLet { span: self.token.span });
|
||||
self.bump();
|
||||
true
|
||||
|
@ -698,7 +699,7 @@ impl<'a> Parser<'a> {
|
|||
match self.parse_expr_labeled(label, false) {
|
||||
Ok(labeled_expr) => {
|
||||
e.delay_as_bug();
|
||||
self.sess.emit_err(MalformedLoopLabel {
|
||||
self.dcx().emit_err(MalformedLoopLabel {
|
||||
span: label.ident.span,
|
||||
correct_label: label.ident,
|
||||
});
|
||||
|
|
|
@ -213,7 +213,7 @@ impl<'a> Parser<'a> {
|
|||
// Don't `eat` to prevent `=>` from being added as an expected token which isn't
|
||||
// actually expected and could only confuse users
|
||||
self.bump();
|
||||
self.sess.emit_err(ReturnTypesUseThinArrow { span: self.prev_token.span });
|
||||
self.dcx().emit_err(ReturnTypesUseThinArrow { span: self.prev_token.span });
|
||||
let ty = self.parse_ty_common(
|
||||
allow_plus,
|
||||
AllowCVariadic::No,
|
||||
|
@ -288,7 +288,7 @@ impl<'a> Parser<'a> {
|
|||
let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
|
||||
let kind =
|
||||
self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?;
|
||||
let mut err = self.sess.create_err(errors::TransposeDynOrImpl {
|
||||
let mut err = self.dcx().create_err(errors::TransposeDynOrImpl {
|
||||
span: kw.span,
|
||||
kw: kw.name.as_str(),
|
||||
sugg: errors::TransposeDynOrImplSugg {
|
||||
|
@ -335,7 +335,7 @@ impl<'a> Parser<'a> {
|
|||
AllowCVariadic::No => {
|
||||
// FIXME(Centril): Should we just allow `...` syntactically
|
||||
// anywhere in a type and use semantic restrictions instead?
|
||||
self.sess.emit_err(NestedCVariadicType { span: lo.to(self.prev_token.span) });
|
||||
self.dcx().emit_err(NestedCVariadicType { span: lo.to(self.prev_token.span) });
|
||||
TyKind::Err
|
||||
}
|
||||
}
|
||||
|
@ -426,7 +426,7 @@ impl<'a> Parser<'a> {
|
|||
let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus());
|
||||
let bounds = self.parse_generic_bounds_common(allow_plus)?;
|
||||
if lt_no_plus {
|
||||
self.sess.emit_err(NeedPlusAfterTraitObjectLifetime { span: lo });
|
||||
self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime { span: lo });
|
||||
}
|
||||
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
|
||||
}
|
||||
|
@ -460,7 +460,7 @@ impl<'a> Parser<'a> {
|
|||
fn parse_ty_ptr(&mut self) -> PResult<'a, TyKind> {
|
||||
let mutbl = self.parse_const_or_mut().unwrap_or_else(|| {
|
||||
let span = self.prev_token.span;
|
||||
self.sess.emit_err(ExpectedMutOrConstInRawPointerType {
|
||||
self.dcx().emit_err(ExpectedMutOrConstInRawPointerType {
|
||||
span,
|
||||
after_asterisk: span.shrink_to_hi(),
|
||||
});
|
||||
|
@ -523,7 +523,7 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
(None, String::new())
|
||||
};
|
||||
self.sess.emit_err(LifetimeAfterMut { span, suggest_lifetime, snippet });
|
||||
self.dcx().emit_err(LifetimeAfterMut { span, suggest_lifetime, snippet });
|
||||
|
||||
opt_lifetime = Some(self.expect_lifetime());
|
||||
}
|
||||
|
@ -533,7 +533,7 @@ impl<'a> Parser<'a> {
|
|||
{
|
||||
// We have `&dyn mut ...`, which is invalid and should be `&mut dyn ...`.
|
||||
let span = and_span.to(self.look_ahead(1, |t| t.span));
|
||||
self.sess.emit_err(DynAfterMut { span });
|
||||
self.dcx().emit_err(DynAfterMut { span });
|
||||
|
||||
// Recovery
|
||||
mutbl = Mutability::Mut;
|
||||
|
@ -587,10 +587,10 @@ impl<'a> Parser<'a> {
|
|||
// If we ever start to allow `const fn()`, then update
|
||||
// feature gating for `#![feature(const_extern_fn)]` to
|
||||
// cover it.
|
||||
self.sess.emit_err(FnPointerCannotBeConst { span: whole_span, qualifier: span });
|
||||
self.dcx().emit_err(FnPointerCannotBeConst { span: whole_span, qualifier: span });
|
||||
}
|
||||
if let Some(ast::CoroutineKind::Async { span, .. }) = coroutine_kind {
|
||||
self.sess.emit_err(FnPointerCannotBeAsync { span: whole_span, qualifier: span });
|
||||
self.dcx().emit_err(FnPointerCannotBeAsync { span: whole_span, qualifier: span });
|
||||
}
|
||||
// FIXME(gen_blocks): emit a similar error for `gen fn()`
|
||||
let decl_span = span_start.to(self.token.span);
|
||||
|
@ -634,7 +634,7 @@ impl<'a> Parser<'a> {
|
|||
None
|
||||
};
|
||||
|
||||
self.sess.emit_err(FnPtrWithGenerics { span: generics.span, sugg });
|
||||
self.dcx().emit_err(FnPtrWithGenerics { span: generics.span, sugg });
|
||||
params.append(&mut lifetimes);
|
||||
Ok(())
|
||||
}
|
||||
|
@ -647,7 +647,7 @@ impl<'a> Parser<'a> {
|
|||
if let token::Ident(sym, _) = t.kind {
|
||||
// parse pattern with "'a Sized" we're supposed to give suggestion like
|
||||
// "'a + Sized"
|
||||
self.sess.emit_err(errors::MissingPlusBounds {
|
||||
self.dcx().emit_err(errors::MissingPlusBounds {
|
||||
span: self.token.span,
|
||||
hi: self.token.span.shrink_to_hi(),
|
||||
sym,
|
||||
|
@ -739,7 +739,7 @@ impl<'a> Parser<'a> {
|
|||
{
|
||||
if self.token.is_keyword(kw::Dyn) {
|
||||
// Account for `&dyn Trait + dyn Other`.
|
||||
self.sess.emit_err(InvalidDynKeyword { span: self.token.span });
|
||||
self.dcx().emit_err(InvalidDynKeyword { span: self.token.span });
|
||||
self.bump();
|
||||
}
|
||||
bounds.push(self.parse_generic_bound()?);
|
||||
|
@ -813,14 +813,14 @@ impl<'a> Parser<'a> {
|
|||
match modifiers.constness {
|
||||
BoundConstness::Never => {}
|
||||
BoundConstness::Maybe(span) => {
|
||||
self.sess.emit_err(errors::TildeConstLifetime { span });
|
||||
self.dcx().emit_err(errors::TildeConstLifetime { span });
|
||||
}
|
||||
}
|
||||
|
||||
match modifiers.polarity {
|
||||
BoundPolarity::Positive => {}
|
||||
BoundPolarity::Negative(span) | BoundPolarity::Maybe(span) => {
|
||||
self.sess.emit_err(errors::ModifierLifetime {
|
||||
self.dcx().emit_err(errors::ModifierLifetime {
|
||||
span,
|
||||
sigil: modifiers.polarity.as_str(),
|
||||
});
|
||||
|
@ -839,7 +839,7 @@ impl<'a> Parser<'a> {
|
|||
(None, String::new())
|
||||
};
|
||||
|
||||
self.sess.emit_err(errors::ParenthesizedLifetime { span, sugg, snippet });
|
||||
self.dcx().emit_err(errors::ParenthesizedLifetime { span, sugg, snippet });
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -860,7 +860,7 @@ impl<'a> Parser<'a> {
|
|||
} else if self.eat_keyword(kw::Const) {
|
||||
let span = self.prev_token.span;
|
||||
self.sess.gated_spans.gate(sym::const_trait_impl, span);
|
||||
self.sess.emit_err(errors::ConstMissingTilde { span, start: span.shrink_to_lo() });
|
||||
self.dcx().emit_err(errors::ConstMissingTilde { span, start: span.shrink_to_lo() });
|
||||
|
||||
BoundConstness::Maybe(span)
|
||||
} else {
|
||||
|
@ -960,7 +960,7 @@ impl<'a> Parser<'a> {
|
|||
let bounds = vec![];
|
||||
self.parse_remaining_bounds(bounds, true)?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
self.sess.emit_err(errors::IncorrectParensTraitBounds {
|
||||
self.dcx().emit_err(errors::IncorrectParensTraitBounds {
|
||||
span: vec![lo, self.prev_token.span],
|
||||
sugg: errors::IncorrectParensTraitBoundsSugg {
|
||||
wrong_span: leading_token.span.shrink_to_hi().to(lo),
|
||||
|
@ -984,7 +984,7 @@ impl<'a> Parser<'a> {
|
|||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
match self.parse_fn_decl(|_| false, AllowPlus::No, RecoverReturnSign::OnlyFatArrow) {
|
||||
Ok(decl) => {
|
||||
self.sess.emit_err(ExpectedFnPathFoundFnKeyword { fn_token_span });
|
||||
self.dcx().emit_err(ExpectedFnPathFoundFnKeyword { fn_token_span });
|
||||
Some(ast::Path {
|
||||
span: fn_token_span.to(self.prev_token.span),
|
||||
segments: thin_vec![ast::PathSegment {
|
||||
|
|
|
@ -105,7 +105,7 @@ pub fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: Delimiter)
|
|||
if let Delimiter::Parenthesis = delim {
|
||||
return;
|
||||
}
|
||||
sess.emit_err(errors::MetaBadDelim {
|
||||
sess.dcx.emit_err(errors::MetaBadDelim {
|
||||
span: span.entire(),
|
||||
sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
|
||||
});
|
||||
|
@ -115,7 +115,7 @@ pub fn check_cfg_attr_bad_delim(sess: &ParseSess, span: DelimSpan, delim: Delimi
|
|||
if let Delimiter::Parenthesis = delim {
|
||||
return;
|
||||
}
|
||||
sess.emit_err(errors::CfgAttrBadDelim {
|
||||
sess.dcx.emit_err(errors::CfgAttrBadDelim {
|
||||
span: span.entire(),
|
||||
sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
|
||||
});
|
||||
|
|
|
@ -364,13 +364,14 @@ pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span:
|
|||
}
|
||||
|
||||
let token::Lit { kind, symbol, suffix, .. } = lit;
|
||||
let dcx = &sess.dcx;
|
||||
match err {
|
||||
// `LexerError` is an error, but it was already reported
|
||||
// by lexer, so here we don't report it the second time.
|
||||
LitError::LexerError => {}
|
||||
LitError::InvalidSuffix => {
|
||||
if let Some(suffix) = suffix {
|
||||
sess.emit_err(InvalidLiteralSuffix { span, kind: kind.descr(), suffix });
|
||||
dcx.emit_err(InvalidLiteralSuffix { span, kind: kind.descr(), suffix });
|
||||
}
|
||||
}
|
||||
LitError::InvalidIntSuffix => {
|
||||
|
@ -378,11 +379,11 @@ pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span:
|
|||
let suf = suf.as_str();
|
||||
if looks_like_width_suffix(&['i', 'u'], suf) {
|
||||
// If it looks like a width, try to be helpful.
|
||||
sess.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
|
||||
dcx.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
|
||||
} else if let Some(fixed) = fix_base_capitalisation(symbol.as_str(), suf) {
|
||||
sess.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
|
||||
dcx.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
|
||||
} else {
|
||||
sess.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
|
||||
dcx.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
|
||||
}
|
||||
}
|
||||
LitError::InvalidFloatSuffix => {
|
||||
|
@ -390,16 +391,16 @@ pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span:
|
|||
let suf = suf.as_str();
|
||||
if looks_like_width_suffix(&['f'], suf) {
|
||||
// If it looks like a width, try to be helpful.
|
||||
sess.emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
|
||||
dcx.emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
|
||||
} else {
|
||||
sess.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
|
||||
dcx.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
|
||||
}
|
||||
}
|
||||
LitError::NonDecimalFloat(base) => {
|
||||
match base {
|
||||
16 => sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
|
||||
8 => sess.emit_err(OctalFloatLiteralNotSupported { span }),
|
||||
2 => sess.emit_err(BinaryFloatLiteralNotSupported { span }),
|
||||
16 => dcx.emit_err(HexadecimalFloatLiteralNotSupported { span }),
|
||||
8 => dcx.emit_err(OctalFloatLiteralNotSupported { span }),
|
||||
2 => dcx.emit_err(BinaryFloatLiteralNotSupported { span }),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
|
@ -411,13 +412,13 @@ pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span:
|
|||
16 => format!("{max:#x}"),
|
||||
_ => format!("{max}"),
|
||||
};
|
||||
sess.emit_err(IntLiteralTooLarge { span, limit });
|
||||
dcx.emit_err(IntLiteralTooLarge { span, limit });
|
||||
}
|
||||
LitError::NulInCStr(range) => {
|
||||
let lo = BytePos(span.lo().0 + range.start as u32 + 2);
|
||||
let hi = BytePos(span.lo().0 + range.end as u32 + 2);
|
||||
let span = span.with_lo(lo).with_hi(hi);
|
||||
sess.emit_err(NulInCStr { span });
|
||||
dcx.emit_err(NulInCStr { span });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use rustc_data_structures::sync::{AppendOnlyVec, Lock, Lrc};
|
|||
use rustc_errors::{emitter::SilentEmitter, DiagCtxt};
|
||||
use rustc_errors::{
|
||||
fallback_fluent_bundle, Diagnostic, DiagnosticBuilder, DiagnosticId, DiagnosticMessage,
|
||||
ErrorGuaranteed, FatalAbort, IntoDiagnostic, Level, MultiSpan, StashKey,
|
||||
MultiSpan, StashKey,
|
||||
};
|
||||
use rustc_feature::{find_feature_issue, GateIssue, UnstableFeatures};
|
||||
use rustc_span::edition::Edition;
|
||||
|
@ -108,7 +108,7 @@ pub fn feature_err_issue(
|
|||
}
|
||||
}
|
||||
|
||||
let mut err = sess.create_err(FeatureGateError { span, explain: explain.into() });
|
||||
let mut err = sess.dcx.create_err(FeatureGateError { span, explain: explain.into() });
|
||||
add_feature_diagnostics_for_issue(&mut err, sess, feature, issue, false);
|
||||
err
|
||||
}
|
||||
|
@ -316,74 +316,4 @@ impl ParseSess {
|
|||
// AppendOnlyVec, so we resort to this scheme.
|
||||
self.proc_macro_quoted_spans.iter_enumerated()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_err<'a>(&'a self, err: impl IntoDiagnostic<'a>) -> DiagnosticBuilder<'a> {
|
||||
err.into_diagnostic(&self.dcx, Level::Error { lint: false })
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_err<'a>(&'a self, err: impl IntoDiagnostic<'a>) -> ErrorGuaranteed {
|
||||
self.create_err(err).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_warning<'a>(
|
||||
&'a self,
|
||||
warning: impl IntoDiagnostic<'a, ()>,
|
||||
) -> DiagnosticBuilder<'a, ()> {
|
||||
warning.into_diagnostic(&self.dcx, Level::Warning(None))
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_warning<'a>(&'a self, warning: impl IntoDiagnostic<'a, ()>) {
|
||||
self.create_warning(warning).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_note<'a>(
|
||||
&'a self,
|
||||
note: impl IntoDiagnostic<'a, ()>,
|
||||
) -> DiagnosticBuilder<'a, ()> {
|
||||
note.into_diagnostic(&self.dcx, Level::Note)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_note<'a>(&'a self, note: impl IntoDiagnostic<'a, ()>) {
|
||||
self.create_note(note).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_fatal<'a>(
|
||||
&'a self,
|
||||
fatal: impl IntoDiagnostic<'a, FatalAbort>,
|
||||
) -> DiagnosticBuilder<'a, FatalAbort> {
|
||||
fatal.into_diagnostic(&self.dcx, Level::Fatal)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn emit_fatal<'a>(&'a self, fatal: impl IntoDiagnostic<'a, FatalAbort>) -> ! {
|
||||
self.create_fatal(fatal).emit()
|
||||
}
|
||||
|
||||
#[rustc_lint_diagnostics]
|
||||
#[track_caller]
|
||||
pub fn struct_err(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_> {
|
||||
self.dcx.struct_err(msg)
|
||||
}
|
||||
|
||||
#[rustc_lint_diagnostics]
|
||||
#[track_caller]
|
||||
pub fn struct_warn(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
|
||||
self.dcx.struct_warn(msg)
|
||||
}
|
||||
|
||||
#[rustc_lint_diagnostics]
|
||||
#[track_caller]
|
||||
pub fn struct_fatal(
|
||||
&self,
|
||||
msg: impl Into<DiagnosticMessage>,
|
||||
) -> DiagnosticBuilder<'_, FatalAbort> {
|
||||
self.dcx.struct_fatal(msg)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -371,7 +371,7 @@ impl Session {
|
|||
#[rustc_lint_diagnostics]
|
||||
#[track_caller]
|
||||
pub fn struct_err(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_> {
|
||||
self.parse_sess.struct_err(msg)
|
||||
self.dcx().struct_err(msg)
|
||||
}
|
||||
#[track_caller]
|
||||
#[rustc_lint_diagnostics]
|
||||
|
@ -461,7 +461,7 @@ impl Session {
|
|||
}
|
||||
#[track_caller]
|
||||
pub fn create_err<'a>(&'a self, err: impl IntoDiagnostic<'a>) -> DiagnosticBuilder<'a> {
|
||||
self.parse_sess.create_err(err)
|
||||
self.dcx().create_err(err)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn create_feature_err<'a>(
|
||||
|
@ -469,7 +469,7 @@ impl Session {
|
|||
err: impl IntoDiagnostic<'a>,
|
||||
feature: Symbol,
|
||||
) -> DiagnosticBuilder<'a> {
|
||||
let mut err = self.parse_sess.create_err(err);
|
||||
let mut err = self.dcx().create_err(err);
|
||||
if err.code.is_none() {
|
||||
err.code(error_code!(E0658));
|
||||
}
|
||||
|
@ -478,40 +478,40 @@ impl Session {
|
|||
}
|
||||
#[track_caller]
|
||||
pub fn emit_err<'a>(&'a self, err: impl IntoDiagnostic<'a>) -> ErrorGuaranteed {
|
||||
self.parse_sess.emit_err(err)
|
||||
self.dcx().emit_err(err)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn create_warning<'a>(
|
||||
&'a self,
|
||||
err: impl IntoDiagnostic<'a, ()>,
|
||||
) -> DiagnosticBuilder<'a, ()> {
|
||||
self.parse_sess.create_warning(err)
|
||||
self.dcx().create_warning(err)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn emit_warning<'a>(&'a self, warning: impl IntoDiagnostic<'a, ()>) {
|
||||
self.parse_sess.emit_warning(warning)
|
||||
self.dcx().emit_warning(warning)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn create_note<'a>(
|
||||
&'a self,
|
||||
note: impl IntoDiagnostic<'a, ()>,
|
||||
) -> DiagnosticBuilder<'a, ()> {
|
||||
self.parse_sess.create_note(note)
|
||||
self.dcx().create_note(note)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn emit_note<'a>(&'a self, note: impl IntoDiagnostic<'a, ()>) {
|
||||
self.parse_sess.emit_note(note)
|
||||
self.dcx().emit_note(note)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn create_fatal<'a>(
|
||||
&'a self,
|
||||
fatal: impl IntoDiagnostic<'a, FatalAbort>,
|
||||
) -> DiagnosticBuilder<'a, FatalAbort> {
|
||||
self.parse_sess.create_fatal(fatal)
|
||||
self.dcx().create_fatal(fatal)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn emit_fatal<'a>(&'a self, fatal: impl IntoDiagnostic<'a, FatalAbort>) -> ! {
|
||||
self.parse_sess.emit_fatal(fatal)
|
||||
self.dcx().emit_fatal(fatal)
|
||||
}
|
||||
#[inline]
|
||||
pub fn err_count(&self) -> usize {
|
||||
|
|
Loading…
Add table
Reference in a new issue