Rollup merge of #65376 - Centril:syntax-extractions-1, r=petrochenkov
syntax: misc extractions Part of https://github.com/rust-lang/rust/pull/65324. r? @petrochenkov
This commit is contained in:
commit
d24c66fba3
38 changed files with 406 additions and 389 deletions
|
@ -67,11 +67,11 @@ use syntax::errors;
|
|||
use syntax::ext::base::SpecialDerives;
|
||||
use syntax::ext::hygiene::ExpnId;
|
||||
use syntax::print::pprust;
|
||||
use syntax::source_map::{respan, ExpnData, ExpnKind, DesugaringKind, Spanned};
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
use syntax::parse::token::{self, Nonterminal, Token};
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{respan, ExpnData, ExpnKind, DesugaringKind, Spanned};
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax_pos::Span;
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use rustc_target::spec::abi::Abi;
|
||||
use syntax::ast;
|
||||
use syntax::source_map::{SourceMap, Spanned};
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::print::pp::{self, Breaks};
|
||||
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
|
||||
use syntax::print::pprust::{self, Comments, PrintState};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::kw;
|
||||
use syntax::util::parser::{self, AssocOp, Fixity};
|
||||
use syntax_pos::{self, BytePos, FileName};
|
||||
|
|
|
@ -16,8 +16,9 @@ use syntax;
|
|||
use syntax::ast::{self, IntTy, UintTy, MetaItemKind};
|
||||
use syntax::source_map::{FileName, FilePathMapping};
|
||||
use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION};
|
||||
use syntax::parse::{ParseSess, new_parser_from_source_str};
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax::parse::token;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
use syntax::source_map::SourceMap;
|
||||
|
|
|
@ -28,7 +28,7 @@ use syntax::ext::allocator::AllocatorKind;
|
|||
use syntax::feature_gate::{self, AttributeType};
|
||||
use syntax::json::JsonEmitter;
|
||||
use syntax::source_map;
|
||||
use syntax::parse::{self, ParseSess};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::{MultiSpan, Span};
|
||||
use crate::util::profiling::{SelfProfiler, SelfProfilerRef};
|
||||
|
@ -1159,7 +1159,7 @@ fn build_session_(
|
|||
);
|
||||
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
|
||||
|
||||
let parse_sess = parse::ParseSess::with_span_handler(
|
||||
let parse_sess = ParseSess::with_span_handler(
|
||||
span_diagnostic,
|
||||
source_map,
|
||||
);
|
||||
|
|
|
@ -9,7 +9,7 @@ use std::ops;
|
|||
|
||||
use syntax::symbol::{Symbol, sym};
|
||||
use syntax::ast::{MetaItem, MetaItemKind, NestedMetaItem, LitKind};
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::feature_gate::Features;
|
||||
|
||||
use syntax_pos::Span;
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::io::prelude::*;
|
|||
use syntax::source_map::{SourceMap, FilePathMapping};
|
||||
use syntax::parse::lexer;
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::parse;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax_pos::{Span, FileName};
|
||||
|
||||
|
@ -33,7 +33,7 @@ pub fn render_with_highlighting(
|
|||
class, tooltip).unwrap();
|
||||
}
|
||||
|
||||
let sess = parse::ParseSess::new(FilePathMapping::empty());
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let fm = sess.source_map().new_source_file(
|
||||
FileName::Custom(String::from("rustdoc-highlighting")),
|
||||
src.to_owned(),
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use errors::Applicability;
|
||||
use syntax::parse::lexer::{StringReader as Lexer};
|
||||
use syntax::parse::{ParseSess, token};
|
||||
use syntax::parse::token;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax_pos::{InnerSpan, FileName};
|
||||
|
||||
|
|
|
@ -394,7 +394,7 @@ pub fn make_test(s: &str,
|
|||
// Uses libsyntax to parse the doctest and find if there's a main fn and the extern
|
||||
// crate already is included.
|
||||
let (already_has_main, already_has_extern_crate, found_macro) = with_globals(edition, || {
|
||||
use crate::syntax::{parse::{self, ParseSess}, source_map::FilePathMapping};
|
||||
use crate::syntax::{parse, sess::ParseSess, source_map::FilePathMapping};
|
||||
use errors::emitter::EmitterWriter;
|
||||
use errors::Handler;
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ use crate::ast::{self, Attribute, MetaItem, NestedMetaItem};
|
|||
use crate::early_buffered_lints::BufferedEarlyLintId;
|
||||
use crate::ext::base::ExtCtxt;
|
||||
use crate::feature_gate::{Features, GatedCfg};
|
||||
use crate::parse::ParseSess;
|
||||
use crate::print::pprust;
|
||||
use crate::sess::ParseSess;
|
||||
|
||||
use errors::{Applicability, Handler};
|
||||
use syntax_pos::hygiene::Transparency;
|
||||
|
|
|
@ -16,9 +16,10 @@ use crate::mut_visit::visit_clobber;
|
|||
use crate::source_map::{BytePos, Spanned, DUMMY_SP};
|
||||
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::parse::{ParseSess, PResult};
|
||||
use crate::parse::PResult;
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{sym, Symbol};
|
||||
use crate::ThinVec;
|
||||
use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
|
||||
|
|
|
@ -10,8 +10,9 @@ use crate::attr;
|
|||
use crate::ast;
|
||||
use crate::edition::Edition;
|
||||
use crate::mut_visit::*;
|
||||
use crate::parse::{token, ParseSess};
|
||||
use crate::parse::token;
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::sym;
|
||||
use crate::util::map_in_place::MapInPlace;
|
||||
|
||||
|
|
|
@ -5,9 +5,10 @@ use crate::edition::Edition;
|
|||
use crate::ext::expand::{self, AstFragment, Invocation};
|
||||
use crate::ext::hygiene::ExpnId;
|
||||
use crate::mut_visit::{self, MutVisitor};
|
||||
use crate::parse::{self, parser, ParseSess, DirectoryOwnership};
|
||||
use crate::parse::{self, parser, DirectoryOwnership};
|
||||
use crate::parse::token;
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{kw, sym, Ident, Symbol};
|
||||
use crate::{ThinVec, MACRO_ARGUMENTS};
|
||||
use crate::tokenstream::{self, TokenStream};
|
||||
|
@ -892,7 +893,7 @@ pub struct ExpansionData {
|
|||
/// when a macro expansion occurs, the resulting nodes have the `backtrace()
|
||||
/// -> expn_data` of their expansion context stored into their span.
|
||||
pub struct ExtCtxt<'a> {
|
||||
pub parse_sess: &'a parse::ParseSess,
|
||||
pub parse_sess: &'a ParseSess,
|
||||
pub ecfg: expand::ExpansionConfig<'a>,
|
||||
pub root_path: PathBuf,
|
||||
pub resolver: &'a mut dyn Resolver,
|
||||
|
@ -901,7 +902,7 @@ pub struct ExtCtxt<'a> {
|
|||
}
|
||||
|
||||
impl<'a> ExtCtxt<'a> {
|
||||
pub fn new(parse_sess: &'a parse::ParseSess,
|
||||
pub fn new(parse_sess: &'a ParseSess,
|
||||
ecfg: expand::ExpansionConfig<'a>,
|
||||
resolver: &'a mut dyn Resolver)
|
||||
-> ExtCtxt<'a> {
|
||||
|
@ -935,7 +936,7 @@ impl<'a> ExtCtxt<'a> {
|
|||
parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
|
||||
}
|
||||
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
||||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||
pub fn parse_sess(&self) -> &'a ParseSess { self.parse_sess }
|
||||
pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config }
|
||||
pub fn call_site(&self) -> Span {
|
||||
self.current_expansion.id.expn_data().call_site
|
||||
|
|
|
@ -15,6 +15,7 @@ use crate::parse::token;
|
|||
use crate::parse::parser::Parser;
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{sym, Symbol};
|
||||
use crate::tokenstream::{TokenStream, TokenTree};
|
||||
use crate::visit::Visitor;
|
||||
|
|
|
@ -109,7 +109,7 @@ use crate::early_buffered_lints::BufferedEarlyLintId;
|
|||
use crate::ext::mbe::{KleeneToken, TokenTree};
|
||||
use crate::parse::token::TokenKind;
|
||||
use crate::parse::token::{DelimToken, Token};
|
||||
use crate::parse::ParseSess;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{kw, sym};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
|
|
|
@ -76,10 +76,11 @@ use TokenTreeOrTokenTreeSlice::*;
|
|||
|
||||
use crate::ast::{Ident, Name};
|
||||
use crate::ext::mbe::{self, TokenTree};
|
||||
use crate::parse::{Directory, ParseSess, PResult};
|
||||
use crate::parse::{Directory, PResult};
|
||||
use crate::parse::parser::{Parser, PathStyle};
|
||||
use crate::parse::token::{self, DocComment, Nonterminal, Token};
|
||||
use crate::print::pprust;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::tokenstream::{DelimSpan, TokenStream};
|
||||
|
||||
|
|
|
@ -14,8 +14,9 @@ use crate::feature_gate::Features;
|
|||
use crate::parse::parser::Parser;
|
||||
use crate::parse::token::TokenKind::*;
|
||||
use crate::parse::token::{self, NtTT, Token};
|
||||
use crate::parse::{Directory, ParseSess};
|
||||
use crate::parse::Directory;
|
||||
use crate::print::pprust;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
|
||||
|
|
|
@ -2,8 +2,8 @@ use crate::ast;
|
|||
use crate::ext::mbe::macro_parser;
|
||||
use crate::ext::mbe::{TokenTree, KleeneOp, KleeneToken, SequenceRepetition, Delimited};
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::parse::ParseSess;
|
||||
use crate::print::pprust;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::kw;
|
||||
use crate::tokenstream;
|
||||
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
use crate::ast;
|
||||
use crate::ext::base::ExtCtxt;
|
||||
use crate::parse::{self, token, ParseSess};
|
||||
use crate::parse::{self, token};
|
||||
use crate::parse::lexer::comments;
|
||||
use crate::print::pprust;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
||||
|
||||
use errors::Diagnostic;
|
||||
|
|
|
@ -9,8 +9,8 @@ use super::active::Features;
|
|||
|
||||
use crate::ast;
|
||||
use crate::attr::AttributeTemplate;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{Symbol, sym};
|
||||
use crate::parse::ParseSess;
|
||||
|
||||
use syntax_pos::Span;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
|
|
|
@ -5,14 +5,14 @@ use super::builtin_attrs::{AttributeGate, BUILTIN_ATTRIBUTE_MAP};
|
|||
|
||||
use crate::ast::{
|
||||
self, AssocTyConstraint, AssocTyConstraintKind, NodeId, GenericParam, GenericParamKind,
|
||||
PatKind, RangeEnd,
|
||||
PatKind, RangeEnd, VariantData,
|
||||
};
|
||||
use crate::attr::{self, check_builtin_attribute};
|
||||
use crate::source_map::Spanned;
|
||||
use crate::edition::{ALL_EDITIONS, Edition};
|
||||
use crate::visit::{self, FnKind, Visitor};
|
||||
use crate::parse::{token, ParseSess};
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::parse::token;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{Symbol, sym};
|
||||
use crate::tokenstream::TokenTree;
|
||||
|
||||
|
@ -246,6 +246,51 @@ impl<'a> PostExpansionVisitor<'a> {
|
|||
Abi::System => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_report_invalid_custom_discriminants(&self, variants: &[ast::Variant]) {
|
||||
let has_fields = variants.iter().any(|variant| match variant.data {
|
||||
VariantData::Tuple(..) | VariantData::Struct(..) => true,
|
||||
VariantData::Unit(..) => false,
|
||||
});
|
||||
|
||||
let discriminant_spans = variants.iter().filter(|variant| match variant.data {
|
||||
VariantData::Tuple(..) | VariantData::Struct(..) => false,
|
||||
VariantData::Unit(..) => true,
|
||||
})
|
||||
.filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !discriminant_spans.is_empty() && has_fields {
|
||||
let mut err = feature_err(
|
||||
self.parse_sess,
|
||||
sym::arbitrary_enum_discriminant,
|
||||
discriminant_spans.clone(),
|
||||
crate::feature_gate::GateIssue::Language,
|
||||
"custom discriminant values are not allowed in enums with tuple or struct variants",
|
||||
);
|
||||
for sp in discriminant_spans {
|
||||
err.span_label(sp, "disallowed custom discriminant");
|
||||
}
|
||||
for variant in variants.iter() {
|
||||
match &variant.data {
|
||||
VariantData::Struct(..) => {
|
||||
err.span_label(
|
||||
variant.span,
|
||||
"struct variant defined here",
|
||||
);
|
||||
}
|
||||
VariantData::Tuple(..) => {
|
||||
err.span_label(
|
||||
variant.span,
|
||||
"tuple variant defined here",
|
||||
);
|
||||
}
|
||||
VariantData::Unit(..) => {}
|
||||
}
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||
|
@ -353,7 +398,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||
|
||||
let has_feature = self.features.arbitrary_enum_discriminant;
|
||||
if !has_feature && !i.span.allows_unstable(sym::arbitrary_enum_discriminant) {
|
||||
Parser::maybe_report_invalid_custom_discriminants(self.parse_sess, &variants);
|
||||
self.maybe_report_invalid_custom_discriminants(&variants);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -147,6 +147,7 @@ pub mod ptr;
|
|||
pub mod show_span;
|
||||
pub use syntax_pos::edition;
|
||||
pub use syntax_pos::symbol;
|
||||
pub mod sess;
|
||||
pub mod tokenstream;
|
||||
pub mod visit;
|
||||
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
use crate::ast::{
|
||||
self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
|
||||
Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
|
||||
Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind,
|
||||
};
|
||||
use crate::feature_gate::feature_err;
|
||||
use crate::parse::{SeqSep, PResult, Parser, ParseSess};
|
||||
use crate::parse::{SeqSep, PResult, Parser};
|
||||
use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
|
||||
use crate::parse::token::{self, TokenKind};
|
||||
use crate::print::pprust;
|
||||
|
@ -715,55 +714,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
crate fn maybe_report_invalid_custom_discriminants(
|
||||
sess: &ParseSess,
|
||||
variants: &[ast::Variant],
|
||||
) {
|
||||
let has_fields = variants.iter().any(|variant| match variant.data {
|
||||
VariantData::Tuple(..) | VariantData::Struct(..) => true,
|
||||
VariantData::Unit(..) => false,
|
||||
});
|
||||
|
||||
let discriminant_spans = variants.iter().filter(|variant| match variant.data {
|
||||
VariantData::Tuple(..) | VariantData::Struct(..) => false,
|
||||
VariantData::Unit(..) => true,
|
||||
})
|
||||
.filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !discriminant_spans.is_empty() && has_fields {
|
||||
let mut err = feature_err(
|
||||
sess,
|
||||
sym::arbitrary_enum_discriminant,
|
||||
discriminant_spans.clone(),
|
||||
crate::feature_gate::GateIssue::Language,
|
||||
"custom discriminant values are not allowed in enums with tuple or struct variants",
|
||||
);
|
||||
for sp in discriminant_spans {
|
||||
err.span_label(sp, "disallowed custom discriminant");
|
||||
}
|
||||
for variant in variants.iter() {
|
||||
match &variant.data {
|
||||
VariantData::Struct(..) => {
|
||||
err.span_label(
|
||||
variant.span,
|
||||
"struct variant defined here",
|
||||
);
|
||||
}
|
||||
VariantData::Tuple(..) => {
|
||||
err.span_label(
|
||||
variant.span,
|
||||
"tuple variant defined here",
|
||||
);
|
||||
}
|
||||
VariantData::Unit(..) => {}
|
||||
}
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
|
||||
crate fn maybe_recover_from_bad_type_plus(
|
||||
pub(super) fn maybe_recover_from_bad_type_plus(
|
||||
&mut self,
|
||||
allow_plus: bool,
|
||||
ty: &Ty,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::parse::ParseSess;
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{sym, Symbol};
|
||||
use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
|
||||
|
||||
|
|
|
@ -1,14 +1,10 @@
|
|||
//! Code related to parsing literals.
|
||||
|
||||
use crate::ast::{self, Lit, LitKind};
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::parse::PResult;
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::print::pprust;
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::tokenstream::{TokenStream, TokenTree};
|
||||
|
||||
use errors::{Applicability, Handler};
|
||||
use log::debug;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use syntax_pos::Span;
|
||||
|
@ -28,72 +24,6 @@ crate enum LitError {
|
|||
IntTooLarge,
|
||||
}
|
||||
|
||||
impl LitError {
|
||||
fn report(&self, diag: &Handler, lit: token::Lit, span: Span) {
|
||||
let token::Lit { kind, suffix, .. } = lit;
|
||||
match *self {
|
||||
// `NotLiteral` is not an error by itself, so we don't report
|
||||
// it and give the parser opportunity to try something else.
|
||||
LitError::NotLiteral => {}
|
||||
// `LexerError` *is* an error, but it was already reported
|
||||
// by lexer, so here we don't report it the second time.
|
||||
LitError::LexerError => {}
|
||||
LitError::InvalidSuffix => {
|
||||
expect_no_suffix(
|
||||
diag, span, &format!("{} {} literal", kind.article(), kind.descr()), suffix
|
||||
);
|
||||
}
|
||||
LitError::InvalidIntSuffix => {
|
||||
let suf = suffix.expect("suffix error with no suffix").as_str();
|
||||
if looks_like_width_suffix(&['i', 'u'], &suf) {
|
||||
// If it looks like a width, try to be helpful.
|
||||
let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
|
||||
diag.struct_span_err(span, &msg)
|
||||
.help("valid widths are 8, 16, 32, 64 and 128")
|
||||
.emit();
|
||||
} else {
|
||||
let msg = format!("invalid suffix `{}` for integer literal", suf);
|
||||
diag.struct_span_err(span, &msg)
|
||||
.span_label(span, format!("invalid suffix `{}`", suf))
|
||||
.help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
LitError::InvalidFloatSuffix => {
|
||||
let suf = suffix.expect("suffix error with no suffix").as_str();
|
||||
if looks_like_width_suffix(&['f'], &suf) {
|
||||
// If it looks like a width, try to be helpful.
|
||||
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
|
||||
diag.struct_span_err(span, &msg)
|
||||
.help("valid widths are 32 and 64")
|
||||
.emit();
|
||||
} else {
|
||||
let msg = format!("invalid suffix `{}` for float literal", suf);
|
||||
diag.struct_span_err(span, &msg)
|
||||
.span_label(span, format!("invalid suffix `{}`", suf))
|
||||
.help("valid suffixes are `f32` and `f64`")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
LitError::NonDecimalFloat(base) => {
|
||||
let descr = match base {
|
||||
16 => "hexadecimal",
|
||||
8 => "octal",
|
||||
2 => "binary",
|
||||
_ => unreachable!(),
|
||||
};
|
||||
diag.struct_span_err(span, &format!("{} float literal is not supported", descr))
|
||||
.span_label(span, "not supported")
|
||||
.emit();
|
||||
}
|
||||
LitError::IntTooLarge => {
|
||||
diag.struct_span_err(span, "integer literal is too large")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LitKind {
|
||||
/// Converts literal token into a semantic literal.
|
||||
fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
|
||||
|
@ -254,7 +184,7 @@ impl LitKind {
|
|||
|
||||
impl Lit {
|
||||
/// Converts literal token into an AST literal.
|
||||
fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
|
||||
crate fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
|
||||
Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span })
|
||||
}
|
||||
|
||||
|
@ -296,99 +226,6 @@ impl Lit {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Matches `lit = true | false | token_lit`.
|
||||
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
||||
let mut recovered = None;
|
||||
if self.token == token::Dot {
|
||||
// Attempt to recover `.4` as `0.4`.
|
||||
recovered = self.look_ahead(1, |next_token| {
|
||||
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
|
||||
= next_token.kind {
|
||||
if self.token.span.hi() == next_token.span.lo() {
|
||||
let s = String::from("0.") + &symbol.as_str();
|
||||
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
|
||||
return Some(Token::new(kind, self.token.span.to(next_token.span)));
|
||||
}
|
||||
}
|
||||
None
|
||||
});
|
||||
if let Some(token) = &recovered {
|
||||
self.bump();
|
||||
self.diagnostic()
|
||||
.struct_span_err(token.span, "float literals must have an integer part")
|
||||
.span_suggestion(
|
||||
token.span,
|
||||
"must have an integer part",
|
||||
pprust::token_to_string(token),
|
||||
Applicability::MachineApplicable,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
||||
let token = recovered.as_ref().unwrap_or(&self.token);
|
||||
match Lit::from_token(token) {
|
||||
Ok(lit) => {
|
||||
self.bump();
|
||||
Ok(lit)
|
||||
}
|
||||
Err(LitError::NotLiteral) => {
|
||||
let msg = format!("unexpected token: {}", self.this_token_descr());
|
||||
Err(self.span_fatal(token.span, &msg))
|
||||
}
|
||||
Err(err) => {
|
||||
let (lit, span) = (token.expect_lit(), token.span);
|
||||
self.bump();
|
||||
err.report(&self.sess.span_diagnostic, lit, span);
|
||||
// Pack possible quotes and prefixes from the original literal into
|
||||
// the error literal's symbol so they can be pretty-printed faithfully.
|
||||
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
|
||||
let symbol = Symbol::intern(&suffixless_lit.to_string());
|
||||
let lit = token::Lit::new(token::Err, symbol, lit.suffix);
|
||||
Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
crate fn expect_no_suffix(diag: &Handler, sp: Span, kind: &str, suffix: Option<Symbol>) {
|
||||
if let Some(suf) = suffix {
|
||||
let mut err = if kind == "a tuple index" &&
|
||||
[sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) {
|
||||
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
||||
// through the ecosystem when people fix their macros
|
||||
let mut err = diag.struct_span_warn(
|
||||
sp,
|
||||
&format!("suffixes on {} are invalid", kind),
|
||||
);
|
||||
err.note(&format!(
|
||||
"`{}` is *temporarily* accepted on tuple index fields as it was \
|
||||
incorrectly accepted on stable for a few releases",
|
||||
suf,
|
||||
));
|
||||
err.help(
|
||||
"on proc macros, you'll want to use `syn::Index::from` or \
|
||||
`proc_macro::Literal::*_unsuffixed` for code that will desugar \
|
||||
to tuple field access",
|
||||
);
|
||||
err.note(
|
||||
"for more context, see https://github.com/rust-lang/rust/issues/60210",
|
||||
);
|
||||
err
|
||||
} else {
|
||||
diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
|
||||
};
|
||||
err.span_label(sp, format!("invalid suffix `{}`", suf));
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if `s` looks like i32 or u1234 etc.
|
||||
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||
s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
|
||||
}
|
||||
|
||||
fn strip_underscores(symbol: Symbol) -> Symbol {
|
||||
// Do not allocate a new string unless necessary.
|
||||
let s = symbol.as_str();
|
||||
|
|
|
@ -1,26 +1,20 @@
|
|||
//! The main parser interface.
|
||||
|
||||
use crate::ast::{self, CrateConfig, NodeId};
|
||||
use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
|
||||
use crate::source_map::{SourceMap, FilePathMapping};
|
||||
use crate::feature_gate::UnstableFeatures;
|
||||
use crate::ast;
|
||||
use crate::parse::parser::{Parser, emit_unclosed_delims};
|
||||
use crate::parse::token::{Nonterminal, TokenKind};
|
||||
use crate::tokenstream::{self, TokenStream, TokenTree};
|
||||
use crate::print::pprust;
|
||||
use crate::symbol::Symbol;
|
||||
use crate::sess::ParseSess;
|
||||
|
||||
use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use errors::{FatalError, Level, Diagnostic, DiagnosticBuilder};
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
use rustc_data_structures::static_assert_size;
|
||||
use rustc_data_structures::sync::{Lrc, Lock, Once};
|
||||
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
|
||||
use syntax_pos::edition::Edition;
|
||||
use syntax_pos::hygiene::ExpnId;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use syntax_pos::{Span, SourceFile, FileName};
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::str;
|
||||
|
||||
use log::info;
|
||||
|
@ -46,112 +40,6 @@ pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>;
|
|||
#[cfg(target_arch = "x86_64")]
|
||||
static_assert_size!(PResult<'_, bool>, 16);
|
||||
|
||||
/// Collected spans during parsing for places where a certain feature was
|
||||
/// used and should be feature gated accordingly in `check_crate`.
|
||||
#[derive(Default)]
|
||||
pub struct GatedSpans {
|
||||
/// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`.
|
||||
pub let_chains: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `async_closure`, e.g. `async || ..`.
|
||||
pub async_closure: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `yield e?` expressions (`generators` gate).
|
||||
pub yields: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `or_patterns`, e.g. `Some(Foo | Bar)`.
|
||||
pub or_patterns: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `const_extern_fn`, e.g. `const extern fn foo`.
|
||||
pub const_extern_fn: Lock<Vec<Span>>,
|
||||
}
|
||||
|
||||
/// Info about a parsing session.
|
||||
pub struct ParseSess {
|
||||
pub span_diagnostic: Handler,
|
||||
pub unstable_features: UnstableFeatures,
|
||||
pub config: CrateConfig,
|
||||
pub edition: Edition,
|
||||
pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
|
||||
/// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
|
||||
pub raw_identifier_spans: Lock<Vec<Span>>,
|
||||
/// Used to determine and report recursive module inclusions.
|
||||
included_mod_stack: Lock<Vec<PathBuf>>,
|
||||
source_map: Lrc<SourceMap>,
|
||||
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
|
||||
/// Contains the spans of block expressions that could have been incomplete based on the
|
||||
/// operation token that followed it, but that the parser cannot identify without further
|
||||
/// analysis.
|
||||
pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>,
|
||||
pub injected_crate_name: Once<Symbol>,
|
||||
pub gated_spans: GatedSpans,
|
||||
}
|
||||
|
||||
impl ParseSess {
|
||||
pub fn new(file_path_mapping: FilePathMapping) -> Self {
|
||||
let cm = Lrc::new(SourceMap::new(file_path_mapping));
|
||||
let handler = Handler::with_tty_emitter(
|
||||
ColorConfig::Auto,
|
||||
true,
|
||||
None,
|
||||
Some(cm.clone()),
|
||||
);
|
||||
ParseSess::with_span_handler(handler, cm)
|
||||
}
|
||||
|
||||
pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> Self {
|
||||
Self {
|
||||
span_diagnostic: handler,
|
||||
unstable_features: UnstableFeatures::from_environment(),
|
||||
config: FxHashSet::default(),
|
||||
edition: ExpnId::root().expn_data().edition,
|
||||
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
|
||||
raw_identifier_spans: Lock::new(Vec::new()),
|
||||
included_mod_stack: Lock::new(vec![]),
|
||||
source_map,
|
||||
buffered_lints: Lock::new(vec![]),
|
||||
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
|
||||
injected_crate_name: Once::new(),
|
||||
gated_spans: GatedSpans::default(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn source_map(&self) -> &SourceMap {
|
||||
&self.source_map
|
||||
}
|
||||
|
||||
pub fn buffer_lint<S: Into<MultiSpan>>(&self,
|
||||
lint_id: BufferedEarlyLintId,
|
||||
span: S,
|
||||
id: NodeId,
|
||||
msg: &str,
|
||||
) {
|
||||
self.buffered_lints.with_lock(|buffered_lints| {
|
||||
buffered_lints.push(BufferedEarlyLint{
|
||||
span: span.into(),
|
||||
id,
|
||||
msg: msg.into(),
|
||||
lint_id,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// Extend an error with a suggestion to wrap an expression with parentheses to allow the
|
||||
/// parser to continue parsing the following operation as part of the same expression.
|
||||
pub fn expr_parentheses_needed(
|
||||
&self,
|
||||
err: &mut DiagnosticBuilder<'_>,
|
||||
span: Span,
|
||||
alt_snippet: Option<String>,
|
||||
) {
|
||||
if let Some(snippet) = self.source_map().span_to_snippet(span).ok().or(alt_snippet) {
|
||||
err.span_suggestion(
|
||||
span,
|
||||
"parentheses are required to parse this as an expression",
|
||||
format!("({})", snippet),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Directory<'a> {
|
||||
pub path: Cow<'a, Path>,
|
||||
|
|
|
@ -15,12 +15,13 @@ use crate::ast::{
|
|||
self, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident,
|
||||
IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety,
|
||||
};
|
||||
use crate::parse::{ParseSess, PResult, Directory, DirectoryOwnership, SeqSep, literal, token};
|
||||
use crate::parse::{PResult, Directory, DirectoryOwnership, SeqSep};
|
||||
use crate::parse::lexer::UnmatchedBrace;
|
||||
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use crate::parse::token::{Token, TokenKind, DelimToken};
|
||||
use crate::parse::token::{self, Token, TokenKind, DelimToken};
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::source_map::respan;
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
||||
|
@ -637,10 +638,6 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
|
||||
literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
|
||||
}
|
||||
|
||||
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
|
||||
/// `<` and continue. If `<-` is seen, replaces it with a single `<`
|
||||
/// and continue. If a `<` is not seen, returns false.
|
||||
|
@ -1366,7 +1363,7 @@ impl<'a> Parser<'a> {
|
|||
],
|
||||
Applicability::MaybeIncorrect,
|
||||
).span_suggestion(
|
||||
self.sess.source_map.next_point(self.prev_span),
|
||||
self.sess.source_map().next_point(self.prev_span),
|
||||
"add a semicolon",
|
||||
';'.to_string(),
|
||||
Applicability::MaybeIncorrect,
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
use super::{
|
||||
Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode, SemiColonMode,
|
||||
SeqSep, TokenExpectType,
|
||||
};
|
||||
use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode};
|
||||
use super::{SemiColonMode, SeqSep, TokenExpectType};
|
||||
use super::pat::{GateOr, PARAM_EXPECTED};
|
||||
|
||||
use crate::parse::literal::LitError;
|
||||
|
||||
use crate::ast::{
|
||||
self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode,
|
||||
Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind,
|
||||
FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field,
|
||||
FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit,
|
||||
};
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
use crate::parse::classify;
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::parse::diagnostics::Error;
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
|
@ -20,6 +20,7 @@ use crate::symbol::{kw, sym};
|
|||
use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
|
||||
|
||||
use errors::Applicability;
|
||||
use syntax_pos::Symbol;
|
||||
use std::mem;
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
|
||||
|
@ -1072,6 +1073,165 @@ impl<'a> Parser<'a> {
|
|||
self.maybe_recover_from_bad_qpath(expr, true)
|
||||
}
|
||||
|
||||
/// Matches `lit = true | false | token_lit`.
|
||||
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
||||
let mut recovered = None;
|
||||
if self.token == token::Dot {
|
||||
// Attempt to recover `.4` as `0.4`.
|
||||
recovered = self.look_ahead(1, |next_token| {
|
||||
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
|
||||
= next_token.kind {
|
||||
if self.token.span.hi() == next_token.span.lo() {
|
||||
let s = String::from("0.") + &symbol.as_str();
|
||||
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
|
||||
return Some(Token::new(kind, self.token.span.to(next_token.span)));
|
||||
}
|
||||
}
|
||||
None
|
||||
});
|
||||
if let Some(token) = &recovered {
|
||||
self.bump();
|
||||
self.struct_span_err(token.span, "float literals must have an integer part")
|
||||
.span_suggestion(
|
||||
token.span,
|
||||
"must have an integer part",
|
||||
pprust::token_to_string(token),
|
||||
Applicability::MachineApplicable,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
||||
let token = recovered.as_ref().unwrap_or(&self.token);
|
||||
match Lit::from_token(token) {
|
||||
Ok(lit) => {
|
||||
self.bump();
|
||||
Ok(lit)
|
||||
}
|
||||
Err(LitError::NotLiteral) => {
|
||||
let msg = format!("unexpected token: {}", self.this_token_descr());
|
||||
Err(self.span_fatal(token.span, &msg))
|
||||
}
|
||||
Err(err) => {
|
||||
let (lit, span) = (token.expect_lit(), token.span);
|
||||
self.bump();
|
||||
self.error_literal_from_token(err, lit, span);
|
||||
// Pack possible quotes and prefixes from the original literal into
|
||||
// the error literal's symbol so they can be pretty-printed faithfully.
|
||||
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
|
||||
let symbol = Symbol::intern(&suffixless_lit.to_string());
|
||||
let lit = token::Lit::new(token::Err, symbol, lit.suffix);
|
||||
Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn error_literal_from_token(&self, err: LitError, lit: token::Lit, span: Span) {
|
||||
// Checks if `s` looks like i32 or u1234 etc.
|
||||
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||
s.len() > 1
|
||||
&& s.starts_with(first_chars)
|
||||
&& s[1..].chars().all(|c| c.is_ascii_digit())
|
||||
}
|
||||
|
||||
let token::Lit { kind, suffix, .. } = lit;
|
||||
match err {
|
||||
// `NotLiteral` is not an error by itself, so we don't report
|
||||
// it and give the parser opportunity to try something else.
|
||||
LitError::NotLiteral => {}
|
||||
// `LexerError` *is* an error, but it was already reported
|
||||
// by lexer, so here we don't report it the second time.
|
||||
LitError::LexerError => {}
|
||||
LitError::InvalidSuffix => {
|
||||
self.expect_no_suffix(
|
||||
span,
|
||||
&format!("{} {} literal", kind.article(), kind.descr()),
|
||||
suffix,
|
||||
);
|
||||
}
|
||||
LitError::InvalidIntSuffix => {
|
||||
let suf = suffix.expect("suffix error with no suffix").as_str();
|
||||
if looks_like_width_suffix(&['i', 'u'], &suf) {
|
||||
// If it looks like a width, try to be helpful.
|
||||
let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
|
||||
self.struct_span_err(span, &msg)
|
||||
.help("valid widths are 8, 16, 32, 64 and 128")
|
||||
.emit();
|
||||
} else {
|
||||
let msg = format!("invalid suffix `{}` for integer literal", suf);
|
||||
self.struct_span_err(span, &msg)
|
||||
.span_label(span, format!("invalid suffix `{}`", suf))
|
||||
.help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
LitError::InvalidFloatSuffix => {
|
||||
let suf = suffix.expect("suffix error with no suffix").as_str();
|
||||
if looks_like_width_suffix(&['f'], &suf) {
|
||||
// If it looks like a width, try to be helpful.
|
||||
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
|
||||
self.struct_span_err(span, &msg)
|
||||
.help("valid widths are 32 and 64")
|
||||
.emit();
|
||||
} else {
|
||||
let msg = format!("invalid suffix `{}` for float literal", suf);
|
||||
self.struct_span_err(span, &msg)
|
||||
.span_label(span, format!("invalid suffix `{}`", suf))
|
||||
.help("valid suffixes are `f32` and `f64`")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
LitError::NonDecimalFloat(base) => {
|
||||
let descr = match base {
|
||||
16 => "hexadecimal",
|
||||
8 => "octal",
|
||||
2 => "binary",
|
||||
_ => unreachable!(),
|
||||
};
|
||||
self.struct_span_err(span, &format!("{} float literal is not supported", descr))
|
||||
.span_label(span, "not supported")
|
||||
.emit();
|
||||
}
|
||||
LitError::IntTooLarge => {
|
||||
self.struct_span_err(span, "integer literal is too large")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) {
|
||||
if let Some(suf) = suffix {
|
||||
let mut err = if kind == "a tuple index"
|
||||
&& [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf)
|
||||
{
|
||||
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
||||
// through the ecosystem when people fix their macros
|
||||
let mut err = self.sess.span_diagnostic.struct_span_warn(
|
||||
sp,
|
||||
&format!("suffixes on {} are invalid", kind),
|
||||
);
|
||||
err.note(&format!(
|
||||
"`{}` is *temporarily* accepted on tuple index fields as it was \
|
||||
incorrectly accepted on stable for a few releases",
|
||||
suf,
|
||||
));
|
||||
err.help(
|
||||
"on proc macros, you'll want to use `syn::Index::from` or \
|
||||
`proc_macro::Literal::*_unsuffixed` for code that will desugar \
|
||||
to tuple field access",
|
||||
);
|
||||
err.note(
|
||||
"for more context, see https://github.com/rust-lang/rust/issues/60210",
|
||||
);
|
||||
err
|
||||
} else {
|
||||
self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
|
||||
};
|
||||
err.span_label(sp, format!("invalid suffix `{}`", suf));
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
|
||||
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
||||
crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
|
||||
maybe_whole_expr!(self);
|
||||
|
|
|
@ -6,10 +6,11 @@ use crate::attr;
|
|||
use crate::source_map::{self, SourceMap, Spanned};
|
||||
use crate::parse::token::{self, BinOpToken, DelimToken, Nonterminal, Token, TokenKind};
|
||||
use crate::parse::lexer::comments;
|
||||
use crate::parse::{self, ParseSess};
|
||||
use crate::parse;
|
||||
use crate::print::pp::{self, Breaks};
|
||||
use crate::print::pp::Breaks::{Consistent, Inconsistent};
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{kw, sym};
|
||||
use crate::tokenstream::{self, TokenStream, TokenTree};
|
||||
|
||||
|
|
124
src/libsyntax/sess.rs
Normal file
124
src/libsyntax/sess.rs
Normal file
|
@ -0,0 +1,124 @@
|
|||
//! Contains `ParseSess` which holds state living beyond what one `Parser` might.
|
||||
//! It also serves as an input to the parser itself.
|
||||
|
||||
use crate::ast::{CrateConfig, NodeId};
|
||||
use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
|
||||
use crate::source_map::{SourceMap, FilePathMapping};
|
||||
use crate::feature_gate::UnstableFeatures;
|
||||
|
||||
use errors::{Applicability, Handler, ColorConfig, DiagnosticBuilder};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use rustc_data_structures::sync::{Lrc, Lock, Once};
|
||||
use syntax_pos::{Symbol, Span, MultiSpan};
|
||||
use syntax_pos::edition::Edition;
|
||||
use syntax_pos::hygiene::ExpnId;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::str;
|
||||
|
||||
/// Collected spans during parsing for places where a certain feature was
|
||||
/// used and should be feature gated accordingly in `check_crate`.
|
||||
#[derive(Default)]
|
||||
crate struct GatedSpans {
|
||||
/// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`.
|
||||
crate let_chains: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `async_closure`, e.g. `async || ..`.
|
||||
crate async_closure: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `yield e?` expressions (`generators` gate).
|
||||
crate yields: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `or_patterns`, e.g. `Some(Foo | Bar)`.
|
||||
crate or_patterns: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `const_extern_fn`, e.g. `const extern fn foo`.
|
||||
crate const_extern_fn: Lock<Vec<Span>>,
|
||||
}
|
||||
|
||||
/// Info about a parsing session.
|
||||
pub struct ParseSess {
|
||||
pub span_diagnostic: Handler,
|
||||
crate unstable_features: UnstableFeatures,
|
||||
pub config: CrateConfig,
|
||||
pub edition: Edition,
|
||||
pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
|
||||
/// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
|
||||
pub raw_identifier_spans: Lock<Vec<Span>>,
|
||||
/// Used to determine and report recursive module inclusions.
|
||||
pub(super) included_mod_stack: Lock<Vec<PathBuf>>,
|
||||
source_map: Lrc<SourceMap>,
|
||||
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
|
||||
/// Contains the spans of block expressions that could have been incomplete based on the
|
||||
/// operation token that followed it, but that the parser cannot identify without further
|
||||
/// analysis.
|
||||
pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>,
|
||||
pub injected_crate_name: Once<Symbol>,
|
||||
crate gated_spans: GatedSpans,
|
||||
}
|
||||
|
||||
impl ParseSess {
|
||||
pub fn new(file_path_mapping: FilePathMapping) -> Self {
|
||||
let cm = Lrc::new(SourceMap::new(file_path_mapping));
|
||||
let handler = Handler::with_tty_emitter(
|
||||
ColorConfig::Auto,
|
||||
true,
|
||||
None,
|
||||
Some(cm.clone()),
|
||||
);
|
||||
ParseSess::with_span_handler(handler, cm)
|
||||
}
|
||||
|
||||
pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> Self {
|
||||
Self {
|
||||
span_diagnostic: handler,
|
||||
unstable_features: UnstableFeatures::from_environment(),
|
||||
config: FxHashSet::default(),
|
||||
edition: ExpnId::root().expn_data().edition,
|
||||
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
|
||||
raw_identifier_spans: Lock::new(Vec::new()),
|
||||
included_mod_stack: Lock::new(vec![]),
|
||||
source_map,
|
||||
buffered_lints: Lock::new(vec![]),
|
||||
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
|
||||
injected_crate_name: Once::new(),
|
||||
gated_spans: GatedSpans::default(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn source_map(&self) -> &SourceMap {
|
||||
&self.source_map
|
||||
}
|
||||
|
||||
pub fn buffer_lint(
|
||||
&self,
|
||||
lint_id: BufferedEarlyLintId,
|
||||
span: impl Into<MultiSpan>,
|
||||
id: NodeId,
|
||||
msg: &str,
|
||||
) {
|
||||
self.buffered_lints.with_lock(|buffered_lints| {
|
||||
buffered_lints.push(BufferedEarlyLint{
|
||||
span: span.into(),
|
||||
id,
|
||||
msg: msg.into(),
|
||||
lint_id,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// Extend an error with a suggestion to wrap an expression with parentheses to allow the
|
||||
/// parser to continue parsing the following operation as part of the same expression.
|
||||
pub fn expr_parentheses_needed(
|
||||
&self,
|
||||
err: &mut DiagnosticBuilder<'_>,
|
||||
span: Span,
|
||||
alt_snippet: Option<String>,
|
||||
) {
|
||||
if let Some(snippet) = self.source_map().span_to_snippet(span).ok().or(alt_snippet) {
|
||||
err.span_suggestion(
|
||||
span,
|
||||
"parentheses are required to parse this as an expression",
|
||||
format!("({})", snippet),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,7 +1,8 @@
|
|||
use crate::{ast, panictry};
|
||||
use crate::parse::{ParseSess, PResult, source_file_to_stream};
|
||||
use crate::parse::{PResult, source_file_to_stream};
|
||||
use crate::parse::new_parser_from_source_str;
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::source_map::{SourceMap, FilePathMapping};
|
||||
use crate::tokenstream::TokenStream;
|
||||
use crate::with_default_globals;
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
use syntax::ast::{self, AttrItem, AttrStyle};
|
||||
use syntax::attr::mk_attr;
|
||||
use syntax::panictry;
|
||||
use syntax::parse::{self, token, ParseSess};
|
||||
use syntax::parse::{self, token};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax_pos::FileName;
|
||||
|
||||
pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate {
|
||||
|
|
|
@ -190,8 +190,8 @@ use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
|
|||
use syntax::source_map::respan;
|
||||
use syntax::util::map_in_place::MapInPlace;
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{Symbol, kw, sym};
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax_pos::{Span};
|
||||
|
||||
use ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty};
|
||||
|
|
|
@ -6,9 +6,9 @@ use syntax::attr;
|
|||
use syntax::ext::base::ExtCtxt;
|
||||
use syntax::ext::expand::{AstFragment, ExpansionConfig};
|
||||
use syntax::ext::proc_macro::is_proc_macro_attr;
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
|
|
@ -3,8 +3,8 @@ use syntax::edition::Edition;
|
|||
use syntax::ext::expand::ExpansionConfig;
|
||||
use syntax::ext::hygiene::AstPass;
|
||||
use syntax::ext::base::{ExtCtxt, Resolver};
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{Ident, Symbol, kw, sym};
|
||||
use syntax_pos::DUMMY_SP;
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ use syntax::ext::base::{ExtCtxt, Resolver};
|
|||
use syntax::ext::expand::{AstFragment, ExpansionConfig};
|
||||
use syntax::feature_gate::Features;
|
||||
use syntax::mut_visit::{*, ExpectOne};
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::respan;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
|
|
@ -10,9 +10,10 @@ extern crate syntax;
|
|||
use syntax::ast::*;
|
||||
use syntax::attr::*;
|
||||
use syntax::ast;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{FilePathMapping, FileName};
|
||||
use syntax::parse;
|
||||
use syntax::parse::{ParseSess, PResult};
|
||||
use syntax::parse::PResult;
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::parse::token;
|
||||
|
|
|
@ -7,8 +7,9 @@
|
|||
extern crate syntax;
|
||||
|
||||
use std::path::Path;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax::parse::{self, ParseSess};
|
||||
use syntax::parse;
|
||||
|
||||
#[path = "mod_dir_simple/test.rs"]
|
||||
mod gravy;
|
||||
|
|
|
@ -24,14 +24,14 @@ extern crate syntax;
|
|||
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use syntax::ast::*;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{Spanned, DUMMY_SP, FileName};
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax::mut_visit::{self, MutVisitor, visit_clobber};
|
||||
use syntax::parse::{self, ParseSess};
|
||||
use syntax::parse;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
|
||||
|
||||
fn parse_expr(ps: &ParseSess, src: &str) -> Option<P<Expr>> {
|
||||
let src_as_string = src.to_string();
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue