parser: Cleanup LazyTokenStream
and avoid some clones
by using a named struct instead of a closure.
This commit is contained in:
parent
ffe52882ed
commit
d0c63bccc5
4 changed files with 77 additions and 87 deletions
|
@ -22,7 +22,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||||
use rustc_span::{Span, DUMMY_SP};
|
use rustc_span::{Span, DUMMY_SP};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
|
||||||
use std::{iter, mem};
|
use std::{fmt, iter, mem};
|
||||||
|
|
||||||
/// When the main rust parser encounters a syntax-extension invocation, it
|
/// When the main rust parser encounters a syntax-extension invocation, it
|
||||||
/// parses the arguments to the invocation as a token-tree. This is a very
|
/// parses the arguments to the invocation as a token-tree. This is a very
|
||||||
|
@ -120,72 +120,51 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// A cloneable callback which produces a `TokenStream`. Each clone
|
pub trait CreateTokenStream: sync::Send + sync::Sync {
|
||||||
// of this should produce the same `TokenStream`
|
fn create_token_stream(&self) -> TokenStream;
|
||||||
pub trait CreateTokenStream: sync::Send + sync::Sync + FnOnce() -> TokenStream {
|
|
||||||
// Workaround for the fact that `Clone` is not object-safe
|
|
||||||
fn clone_it(&self) -> Box<dyn CreateTokenStream>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<F: 'static + Clone + sync::Send + sync::Sync + FnOnce() -> TokenStream> CreateTokenStream
|
impl CreateTokenStream for TokenStream {
|
||||||
for F
|
fn create_token_stream(&self) -> TokenStream {
|
||||||
{
|
self.clone()
|
||||||
fn clone_it(&self) -> Box<dyn CreateTokenStream> {
|
|
||||||
Box::new(self.clone())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for Box<dyn CreateTokenStream> {
|
/// A lazy version of `TokenStream`, which defers creation
|
||||||
fn clone(&self) -> Self {
|
|
||||||
let val: &(dyn CreateTokenStream) = &**self;
|
|
||||||
val.clone_it()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A lazy version of `TokenStream`, which may defer creation
|
|
||||||
/// of an actual `TokenStream` until it is needed.
|
/// of an actual `TokenStream` until it is needed.
|
||||||
pub type LazyTokenStream = Lrc<LazyTokenStreamInner>;
|
/// `Box` is here only to reduce the structure size.
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum LazyTokenStreamInner {
|
pub struct LazyTokenStream(Lrc<Box<dyn CreateTokenStream>>);
|
||||||
Lazy(Box<dyn CreateTokenStream>),
|
|
||||||
Ready(TokenStream),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for LazyTokenStreamInner {
|
impl LazyTokenStream {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
pub fn new(inner: impl CreateTokenStream + 'static) -> LazyTokenStream {
|
||||||
match self {
|
LazyTokenStream(Lrc::new(Box::new(inner)))
|
||||||
LazyTokenStreamInner::Lazy(..) => f.debug_struct("LazyTokenStream::Lazy").finish(),
|
}
|
||||||
LazyTokenStreamInner::Ready(..) => f.debug_struct("LazyTokenStream::Ready").finish(),
|
|
||||||
}
|
pub fn create_token_stream(&self) -> TokenStream {
|
||||||
|
self.0.create_token_stream()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LazyTokenStreamInner {
|
impl fmt::Debug for LazyTokenStream {
|
||||||
pub fn into_token_stream(&self) -> TokenStream {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
fmt::Debug::fmt("LazyTokenStream", f)
|
||||||
// Note that we do not cache this. If this ever becomes a performance
|
|
||||||
// problem, we should investigate wrapping `LazyTokenStreamInner`
|
|
||||||
// in a lock
|
|
||||||
LazyTokenStreamInner::Lazy(cb) => (cb.clone())(),
|
|
||||||
LazyTokenStreamInner::Ready(stream) => stream.clone(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Encoder> Encodable<S> for LazyTokenStreamInner {
|
impl<S: Encoder> Encodable<S> for LazyTokenStream {
|
||||||
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
|
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
|
||||||
panic!("Attempted to encode LazyTokenStream");
|
panic!("Attempted to encode LazyTokenStream");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D: Decoder> Decodable<D> for LazyTokenStreamInner {
|
impl<D: Decoder> Decodable<D> for LazyTokenStream {
|
||||||
fn decode(_d: &mut D) -> Result<Self, D::Error> {
|
fn decode(_d: &mut D) -> Result<Self, D::Error> {
|
||||||
panic!("Attempted to decode LazyTokenStream");
|
panic!("Attempted to decode LazyTokenStream");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<CTX> HashStable<CTX> for LazyTokenStreamInner {
|
impl<CTX> HashStable<CTX> for LazyTokenStream {
|
||||||
fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
|
fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
|
||||||
panic!("Attempted to compute stable hash for LazyTokenStream");
|
panic!("Attempted to compute stable hash for LazyTokenStream");
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,12 +4,11 @@ use rustc_ast::attr::HasAttrs;
|
||||||
use rustc_ast::mut_visit::*;
|
use rustc_ast::mut_visit::*;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{DelimToken, Token, TokenKind};
|
use rustc_ast::token::{DelimToken, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing, TokenStream, TokenTree};
|
||||||
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
|
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
|
||||||
use rustc_attr as attr;
|
use rustc_attr as attr;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::map_in_place::MapInPlace;
|
use rustc_data_structures::map_in_place::MapInPlace;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
|
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
|
||||||
use rustc_feature::{Feature, Features, State as FeatureState};
|
use rustc_feature::{Feature, Features, State as FeatureState};
|
||||||
use rustc_feature::{
|
use rustc_feature::{
|
||||||
|
@ -303,7 +302,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
|
|
||||||
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
|
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
|
||||||
// for `attr` when we expand it to `#[attr]`
|
// for `attr` when we expand it to `#[attr]`
|
||||||
let pound_token = orig_tokens.into_token_stream().trees().next().unwrap();
|
let pound_token = orig_tokens.create_token_stream().trees().next().unwrap();
|
||||||
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
|
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
|
||||||
panic!("Bad tokens for attribute {:?}", attr);
|
panic!("Bad tokens for attribute {:?}", attr);
|
||||||
}
|
}
|
||||||
|
@ -313,16 +312,16 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
DelimSpan::from_single(pound_token.span()),
|
DelimSpan::from_single(pound_token.span()),
|
||||||
DelimToken::Bracket,
|
DelimToken::Bracket,
|
||||||
item.tokens
|
item.tokens
|
||||||
.clone()
|
.as_ref()
|
||||||
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
|
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
|
||||||
.into_token_stream(),
|
.create_token_stream(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut attr = attr::mk_attr_from_item(attr.style, item, span);
|
let mut attr = attr::mk_attr_from_item(attr.style, item, span);
|
||||||
attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![
|
attr.tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
|
||||||
(pound_token, Spacing::Alone),
|
(pound_token, Spacing::Alone),
|
||||||
(bracket_group, Spacing::Alone),
|
(bracket_group, Spacing::Alone),
|
||||||
]))));
|
])));
|
||||||
self.process_cfg_attr(attr)
|
self.process_cfg_attr(attr)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -249,29 +249,30 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
||||||
// came from. Here we attempt to extract these lossless token streams
|
// came from. Here we attempt to extract these lossless token streams
|
||||||
// before we fall back to the stringification.
|
// before we fall back to the stringification.
|
||||||
|
|
||||||
let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream());
|
let convert_tokens =
|
||||||
|
|tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
|
||||||
|
|
||||||
let tokens = match *nt {
|
let tokens = match *nt {
|
||||||
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
|
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
|
||||||
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()),
|
Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens),
|
||||||
Nonterminal::NtStmt(ref stmt) => {
|
Nonterminal::NtStmt(ref stmt) => {
|
||||||
// FIXME: We currently only collect tokens for `:stmt`
|
// FIXME: We currently only collect tokens for `:stmt`
|
||||||
// matchers in `macro_rules!` macros. When we start collecting
|
// matchers in `macro_rules!` macros. When we start collecting
|
||||||
// tokens for attributes on statements, we will need to prepend
|
// tokens for attributes on statements, we will need to prepend
|
||||||
// attributes here
|
// attributes here
|
||||||
convert_tokens(stmt.tokens.clone())
|
convert_tokens(&stmt.tokens)
|
||||||
}
|
}
|
||||||
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.clone()),
|
Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens),
|
||||||
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.clone()),
|
Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens),
|
||||||
Nonterminal::NtIdent(ident, is_raw) => {
|
Nonterminal::NtIdent(ident, is_raw) => {
|
||||||
Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
|
Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
|
||||||
}
|
}
|
||||||
Nonterminal::NtLifetime(ident) => {
|
Nonterminal::NtLifetime(ident) => {
|
||||||
Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
|
Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
|
||||||
}
|
}
|
||||||
Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.clone()),
|
Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens),
|
||||||
Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.clone()),
|
Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens),
|
||||||
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.clone()),
|
Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens),
|
||||||
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
|
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
|
||||||
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
|
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
|
||||||
if expr.tokens.is_none() {
|
if expr.tokens.is_none() {
|
||||||
|
@ -604,7 +605,7 @@ fn prepend_attrs(
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
tokens: Option<&tokenstream::LazyTokenStream>,
|
tokens: Option<&tokenstream::LazyTokenStream>,
|
||||||
) -> Option<tokenstream::TokenStream> {
|
) -> Option<tokenstream::TokenStream> {
|
||||||
let tokens = tokens?.clone().into_token_stream();
|
let tokens = tokens?.create_token_stream();
|
||||||
if attrs.is_empty() {
|
if attrs.is_empty() {
|
||||||
return Some(tokens);
|
return Some(tokens);
|
||||||
}
|
}
|
||||||
|
@ -617,9 +618,9 @@ fn prepend_attrs(
|
||||||
);
|
);
|
||||||
builder.push(
|
builder.push(
|
||||||
attr.tokens
|
attr.tokens
|
||||||
.clone()
|
.as_ref()
|
||||||
.unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr))
|
.unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr))
|
||||||
.into_token_stream(),
|
.create_token_stream(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
builder.push(tokens);
|
builder.push(tokens);
|
||||||
|
|
|
@ -16,8 +16,8 @@ pub use path::PathStyle;
|
||||||
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
|
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, LazyTokenStreamInner, Spacing};
|
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
|
||||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree};
|
||||||
use rustc_ast::DUMMY_NODE_ID;
|
use rustc_ast::DUMMY_NODE_ID;
|
||||||
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
|
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
|
||||||
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit};
|
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit};
|
||||||
|
@ -1199,15 +1199,12 @@ impl<'a> Parser<'a> {
|
||||||
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
||||||
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
|
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
|
||||||
let start_token = (self.token.clone(), self.token_spacing);
|
let start_token = (self.token.clone(), self.token_spacing);
|
||||||
let mut cursor_snapshot = self.token_cursor.clone();
|
let cursor_snapshot = self.token_cursor.clone();
|
||||||
|
|
||||||
let ret = f(self)?;
|
let ret = f(self)?;
|
||||||
|
|
||||||
let new_calls = self.token_cursor.num_next_calls;
|
|
||||||
let num_calls = new_calls - cursor_snapshot.num_next_calls;
|
|
||||||
let desugar_doc_comments = self.desugar_doc_comments;
|
|
||||||
|
|
||||||
// We didn't capture any tokens
|
// We didn't capture any tokens
|
||||||
|
let num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
|
||||||
if num_calls == 0 {
|
if num_calls == 0 {
|
||||||
return Ok((ret, None));
|
return Ok((ret, None));
|
||||||
}
|
}
|
||||||
|
@ -1220,27 +1217,41 @@ impl<'a> Parser<'a> {
|
||||||
//
|
//
|
||||||
// This also makes `Parser` very cheap to clone, since
|
// This also makes `Parser` very cheap to clone, since
|
||||||
// there is no intermediate collection buffer to clone.
|
// there is no intermediate collection buffer to clone.
|
||||||
let lazy_cb = move || {
|
struct LazyTokenStreamImpl {
|
||||||
// The token produced by the final call to `next` or `next_desugared`
|
start_token: (Token, Spacing),
|
||||||
// was not actually consumed by the callback. The combination
|
cursor_snapshot: TokenCursor,
|
||||||
// of chaining the initial token and using `take` produces the desired
|
num_calls: usize,
|
||||||
// result - we produce an empty `TokenStream` if no calls were made,
|
desugar_doc_comments: bool,
|
||||||
// and omit the final token otherwise.
|
}
|
||||||
let tokens = std::iter::once(start_token)
|
impl CreateTokenStream for LazyTokenStreamImpl {
|
||||||
.chain((0..num_calls).map(|_| {
|
fn create_token_stream(&self) -> TokenStream {
|
||||||
if desugar_doc_comments {
|
// The token produced by the final call to `next` or `next_desugared`
|
||||||
cursor_snapshot.next_desugared()
|
// was not actually consumed by the callback. The combination
|
||||||
} else {
|
// of chaining the initial token and using `take` produces the desired
|
||||||
cursor_snapshot.next()
|
// result - we produce an empty `TokenStream` if no calls were made,
|
||||||
}
|
// and omit the final token otherwise.
|
||||||
}))
|
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
||||||
.take(num_calls);
|
let tokens = std::iter::once(self.start_token.clone())
|
||||||
|
.chain((0..self.num_calls).map(|_| {
|
||||||
|
if self.desugar_doc_comments {
|
||||||
|
cursor_snapshot.next_desugared()
|
||||||
|
} else {
|
||||||
|
cursor_snapshot.next()
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.take(self.num_calls);
|
||||||
|
|
||||||
make_token_stream(tokens)
|
make_token_stream(tokens)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let lazy_impl = LazyTokenStreamImpl {
|
||||||
|
start_token,
|
||||||
|
cursor_snapshot,
|
||||||
|
num_calls,
|
||||||
|
desugar_doc_comments: self.desugar_doc_comments,
|
||||||
};
|
};
|
||||||
let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb)));
|
Ok((ret, Some(LazyTokenStream::new(lazy_impl))))
|
||||||
|
|
||||||
Ok((ret, Some(stream)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `::{` or `::*`
|
/// `::{` or `::*`
|
||||||
|
|
Loading…
Add table
Reference in a new issue