migrate mbe to the new rowan
This commit is contained in:
parent
d402974aa0
commit
08fd402ef2
4 changed files with 27 additions and 36 deletions
|
@ -599,7 +599,8 @@ mod tests {
|
||||||
let macro_definition =
|
let macro_definition =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
|
let (definition_tt, _) =
|
||||||
|
ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
|
||||||
crate::MacroRules::parse(&definition_tt).unwrap()
|
crate::MacroRules::parse(&definition_tt).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -611,7 +612,8 @@ mod tests {
|
||||||
let macro_invocation =
|
let macro_invocation =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
|
let (invocation_tt, _) =
|
||||||
|
ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
|
||||||
|
|
||||||
expand_rule(&rules.rules[0], &invocation_tt)
|
expand_rule(&rules.rules[0], &invocation_tt)
|
||||||
}
|
}
|
||||||
|
|
|
@ -179,7 +179,8 @@ mod tests {
|
||||||
let macro_definition =
|
let macro_definition =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
|
let (definition_tt, _) =
|
||||||
|
ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
|
||||||
parse(&definition_tt)
|
parse(&definition_tt)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ use crate::subtree_source::SubtreeTokenSource;
|
||||||
use crate::ExpandError;
|
use crate::ExpandError;
|
||||||
use ra_parser::{ParseError, TreeSink};
|
use ra_parser::{ParseError, TreeSink};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast, AstNode, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||||
SyntaxTreeBuilder, TextRange, TextUnit, T,
|
SyntaxTreeBuilder, TextRange, TextUnit, T,
|
||||||
};
|
};
|
||||||
use tt::buffer::{Cursor, TokenBuffer};
|
use tt::buffer::{Cursor, TokenBuffer};
|
||||||
|
@ -116,8 +116,6 @@ impl TokenMap {
|
||||||
/// and strips the ending `*/`
|
/// and strips the ending `*/`
|
||||||
/// And then quote the string, which is needed to convert to `tt::Literal`
|
/// And then quote the string, which is needed to convert to `tt::Literal`
|
||||||
fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
|
fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
|
||||||
use ast::AstToken;
|
|
||||||
|
|
||||||
let prefix_len = comment.prefix().len();
|
let prefix_len = comment.prefix().len();
|
||||||
let mut text = &comment.text()[prefix_len..];
|
let mut text = &comment.text()[prefix_len..];
|
||||||
|
|
||||||
|
@ -132,9 +130,8 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
|
||||||
text.into()
|
text.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_doc_comment<'a>(token: &ra_syntax::SyntaxToken<'a>) -> Option<Vec<tt::TokenTree>> {
|
fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
|
||||||
use ast::AstToken;
|
let comment = ast::Comment::cast(token.clone())?;
|
||||||
let comment = ast::Comment::cast(*token)?;
|
|
||||||
let doc = comment.kind().doc?;
|
let doc = comment.kind().doc?;
|
||||||
|
|
||||||
// Make `doc="\" Comments\""
|
// Make `doc="\" Comments\""
|
||||||
|
@ -245,7 +242,7 @@ fn convert_tt(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SyntaxElement::Node(node) => {
|
SyntaxElement::Node(node) => {
|
||||||
let child = convert_tt(token_map, global_offset, node)?.into();
|
let child = convert_tt(token_map, global_offset, &node)?.into();
|
||||||
token_trees.push(child);
|
token_trees.push(child);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -37,8 +37,8 @@ impl_froms!(TokenTree: Leaf, Subtree);
|
||||||
let macro_invocation =
|
let macro_invocation =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
|
let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
|
||||||
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
|
let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
|
||||||
let rules = crate::MacroRules::parse(&definition_tt).unwrap();
|
let rules = crate::MacroRules::parse(&definition_tt).unwrap();
|
||||||
let expansion = rules.expand(&invocation_tt).unwrap();
|
let expansion = rules.expand(&invocation_tt).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -53,7 +53,7 @@ pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
|
||||||
let macro_definition =
|
let macro_definition =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
|
let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
|
||||||
crate::MacroRules::parse(&definition_tt).unwrap()
|
crate::MacroRules::parse(&definition_tt).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,34 +62,25 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
|
||||||
let macro_invocation =
|
let macro_invocation =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
|
let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
|
||||||
|
|
||||||
rules.expand(&invocation_tt).unwrap()
|
rules.expand(&invocation_tt).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expand_to_items(
|
pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems {
|
||||||
rules: &MacroRules,
|
|
||||||
invocation: &str,
|
|
||||||
) -> ra_syntax::TreeArc<ast::MacroItems> {
|
|
||||||
let expanded = expand(rules, invocation);
|
let expanded = expand(rules, invocation);
|
||||||
token_tree_to_macro_items(&expanded).unwrap().tree().to_owned()
|
token_tree_to_macro_items(&expanded).unwrap().tree()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
pub(crate) fn expand_to_stmts(
|
pub(crate) fn expand_to_stmts(rules: &MacroRules, invocation: &str) -> ast::MacroStmts {
|
||||||
rules: &MacroRules,
|
|
||||||
invocation: &str,
|
|
||||||
) -> ra_syntax::TreeArc<ast::MacroStmts> {
|
|
||||||
let expanded = expand(rules, invocation);
|
let expanded = expand(rules, invocation);
|
||||||
token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned()
|
token_tree_to_macro_stmts(&expanded).unwrap().tree()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expand_to_expr(
|
pub(crate) fn expand_to_expr(rules: &MacroRules, invocation: &str) -> ast::Expr {
|
||||||
rules: &MacroRules,
|
|
||||||
invocation: &str,
|
|
||||||
) -> ra_syntax::TreeArc<ast::Expr> {
|
|
||||||
let expanded = expand(rules, invocation);
|
let expanded = expand(rules, invocation);
|
||||||
token_tree_to_expr(&expanded).unwrap().tree().to_owned()
|
token_tree_to_expr(&expanded).unwrap().tree()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
|
pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
|
||||||
|
@ -97,7 +88,7 @@ pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
|
||||||
let wrapped = format!("wrap_macro!( {} )", text);
|
let wrapped = format!("wrap_macro!( {} )", text);
|
||||||
let wrapped = ast::SourceFile::parse(&wrapped);
|
let wrapped = ast::SourceFile::parse(&wrapped);
|
||||||
let wrapped = wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let wrapped = wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let mut wrapped = ast_to_token_tree(wrapped).unwrap().0;
|
let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0;
|
||||||
wrapped.delimiter = tt::Delimiter::None;
|
wrapped.delimiter = tt::Delimiter::None;
|
||||||
|
|
||||||
wrapped
|
wrapped
|
||||||
|
@ -164,8 +155,8 @@ pub(crate) fn assert_expansion(
|
||||||
|
|
||||||
let (expanded_tree, expected_tree) = match kind {
|
let (expanded_tree, expected_tree) = match kind {
|
||||||
MacroKind::Items => {
|
MacroKind::Items => {
|
||||||
let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree().to_owned();
|
let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree();
|
||||||
let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree().to_owned();
|
let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree();
|
||||||
|
|
||||||
(
|
(
|
||||||
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
||||||
|
@ -174,8 +165,8 @@ pub(crate) fn assert_expansion(
|
||||||
}
|
}
|
||||||
|
|
||||||
MacroKind::Stmts => {
|
MacroKind::Stmts => {
|
||||||
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned();
|
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree();
|
||||||
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree().to_owned();
|
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree();
|
||||||
|
|
||||||
(
|
(
|
||||||
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
||||||
|
@ -419,7 +410,7 @@ fn test_expand_to_item_list() {
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
let expansion = expand(&rules, "structs!(Foo, Bar);");
|
let expansion = expand(&rules, "structs!(Foo, Bar);");
|
||||||
let tree = token_tree_to_macro_items(&expansion).unwrap().tree().to_owned();
|
let tree = token_tree_to_macro_items(&expansion).unwrap().tree();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tree.syntax().debug_dump().trim(),
|
tree.syntax().debug_dump().trim(),
|
||||||
r#"
|
r#"
|
||||||
|
@ -537,7 +528,7 @@ fn test_tt_to_stmts() {
|
||||||
);
|
);
|
||||||
|
|
||||||
let expanded = expand(&rules, "foo!{}");
|
let expanded = expand(&rules, "foo!{}");
|
||||||
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned();
|
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
stmts.syntax().debug_dump().trim(),
|
stmts.syntax().debug_dump().trim(),
|
||||||
|
|
Loading…
Add table
Reference in a new issue