proc-macro-srv: Reimplement token trees via ropes
diff --git a/Cargo.lock b/Cargo.lock
index c1dbe6a..22d41fc 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1849,9 +1849,7 @@
"proc-macro-test",
"ra-ap-rustc_lexer",
"span",
- "syntax-bridge",
"temp-dir",
- "tt",
]
[[package]]
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index d037e71..23734b5 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -18,8 +18,6 @@
memmap2.workspace = true
temp-dir.workspace = true
-tt.workspace = true
-syntax-bridge.workspace = true
paths.workspace = true
# span = {workspace = true, default-features = false} does not work
span = { path = "../span", version = "0.0.0", default-features = false}
@@ -38,8 +36,9 @@
proc-macro-test.path = "./proc-macro-test"
[features]
+default = ["sysroot-abi"]
sysroot-abi = []
-in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"]
+in-rust-tree = ["sysroot-abi"]
[lints]
workspace = true
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index c8513a1..0176868 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -12,8 +12,7 @@
use paths::{Utf8Path, Utf8PathBuf};
use crate::{
- PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros,
- server_impl::TopSubtree,
+ PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros, tt::TokenStream,
};
pub(crate) struct Expander {
@@ -40,18 +39,18 @@
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
- macro_body: TopSubtree<S>,
- attributes: Option<TopSubtree<S>>,
+ macro_body: TokenStream<S>,
+ attribute: Option<TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<TopSubtree<S>, PanicMessage>
+ ) -> Result<TokenStream<S>, PanicMessage>
where
<S::Server as bridge::server::Types>::TokenStream: Default,
{
self.inner
.proc_macros
- .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site)
+ .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site)
}
pub(crate) fn list_macros(&self) -> impl Iterator<Item = (&str, ProcMacroKind)> {
diff --git a/crates/proc-macro-srv/src/dylib/proc_macros.rs b/crates/proc-macro-srv/src/dylib/proc_macros.rs
index 9b5721e..0b29a1d 100644
--- a/crates/proc-macro-srv/src/dylib/proc_macros.rs
+++ b/crates/proc-macro-srv/src/dylib/proc_macros.rs
@@ -2,7 +2,7 @@
use proc_macro::bridge;
-use crate::{ProcMacroKind, ProcMacroSrvSpan, server_impl::TopSubtree};
+use crate::{ProcMacroKind, ProcMacroSrvSpan, tt::TokenStream};
#[repr(transparent)]
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
@@ -17,18 +17,13 @@
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
- macro_body: TopSubtree<S>,
- attributes: Option<TopSubtree<S>>,
+ macro_body: TokenStream<S>,
+ attribute: Option<TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<TopSubtree<S>, crate::PanicMessage> {
- let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
-
- let parsed_attributes = attributes
- .map_or_else(crate::server_impl::TokenStream::default, |attr| {
- crate::server_impl::TokenStream::with_subtree(attr)
- });
+ ) -> Result<TokenStream<S>, crate::PanicMessage> {
+ let parsed_attributes = attribute.unwrap_or_default();
for proc_macro in &self.0 {
match proc_macro {
@@ -38,35 +33,29 @@
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
parsed_attributes,
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
_ => continue,
}
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index cb97882..f4decb7 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -28,6 +28,7 @@
mod dylib;
mod server_impl;
+mod tt;
use std::{
collections::{HashMap, hash_map::Entry},
@@ -43,8 +44,6 @@
use span::Span;
use temp_dir::TempDir;
-use crate::server_impl::TokenStream;
-
pub use crate::server_impl::token_id::SpanId;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
@@ -81,12 +80,12 @@
env: &[(String, String)],
current_dir: Option<impl AsRef<Path>>,
macro_name: &str,
- macro_body: tt::TopSubtree<S>,
- attribute: Option<tt::TopSubtree<S>>,
+ macro_body: tt::TokenStream<S>,
+ attribute: Option<tt::TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<Vec<tt::TokenTree<S>>, PanicMessage> {
+ ) -> Result<tt::TokenStream<S>, PanicMessage> {
let snapped_env = self.env;
let expander = self.expander(lib.as_ref()).map_err(|err| PanicMessage {
message: Some(format!("failed to load macro: {err}")),
@@ -102,15 +101,7 @@
.name(macro_name.to_owned())
.spawn_scoped(s, move || {
expander
- .expand(
- macro_name,
- server_impl::TopSubtree(macro_body.0.into_vec()),
- attribute.map(|it| server_impl::TopSubtree(it.0.into_vec())),
- def_site,
- call_site,
- mixed_site,
- )
- .map(|tt| tt.0)
+ .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site)
});
match thread.unwrap().join() {
Ok(res) => res,
@@ -157,8 +148,8 @@
}
}
-pub trait ProcMacroSrvSpan: Copy + Send {
- type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
+pub trait ProcMacroSrvSpan: Copy + Send + Sync {
+ type Server: proc_macro::bridge::server::Server<TokenStream = crate::tt::TokenStream<Self>>;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
}
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index 32ad327..8d509fd 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -5,122 +5,15 @@
//! we could provide any TokenStream implementation.
//! The original idea from fedochet is using proc-macro2 as backend,
//! we use tt instead for better integration with RA.
-//!
-//! FIXME: No span and source file information is implemented yet
-
-use std::fmt;
-
-use intern::Symbol;
-use proc_macro::bridge;
-
-mod token_stream;
-pub use token_stream::TokenStream;
pub mod rust_analyzer_span;
pub mod token_id;
-use tt::Spacing;
-
-#[derive(Clone)]
-pub(crate) struct TopSubtree<S>(pub(crate) Vec<tt::TokenTree<S>>);
-
-impl<S: Copy + fmt::Debug> fmt::Debug for TopSubtree<S> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Debug::fmt(&tt::TokenTreesView::new(&self.0), f)
- }
-}
-
-impl<S: Copy> TopSubtree<S> {
- pub(crate) fn top_subtree(&self) -> &tt::Subtree<S> {
- let tt::TokenTree::Subtree(subtree) = &self.0[0] else {
- unreachable!("the first token tree is always the top subtree");
- };
- subtree
- }
-
- pub(crate) fn from_bridge(group: bridge::Group<TokenStream<S>, S>) -> Self {
- let delimiter = delim_to_internal(group.delimiter, group.span);
- let mut tts =
- group.stream.map(|it| it.token_trees).unwrap_or_else(|| Vec::with_capacity(1));
- tts.insert(0, tt::TokenTree::Subtree(tt::Subtree { delimiter, len: tts.len() as u32 }));
- TopSubtree(tts)
- }
-}
-
-fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> {
- let kind = match d {
- proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
- proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
- proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
- proc_macro::Delimiter::None => tt::DelimiterKind::Invisible,
- };
- tt::Delimiter { open: span.open, close: span.close, kind }
-}
-
-fn delim_to_external<S>(d: tt::Delimiter<S>) -> proc_macro::Delimiter {
- match d.kind {
- tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis,
- tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace,
- tt::DelimiterKind::Bracket => proc_macro::Delimiter::Bracket,
- tt::DelimiterKind::Invisible => proc_macro::Delimiter::None,
- }
-}
-
-#[allow(unused)]
-fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing {
- match spacing {
- proc_macro::Spacing::Alone => Spacing::Alone,
- proc_macro::Spacing::Joint => Spacing::Joint,
- }
-}
-
-#[allow(unused)]
-fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
- match spacing {
- Spacing::Alone | Spacing::JointHidden => proc_macro::Spacing::Alone,
- Spacing::Joint => proc_macro::Spacing::Joint,
- }
-}
-
-fn literal_kind_to_external(kind: tt::LitKind) -> bridge::LitKind {
- match kind {
- tt::LitKind::Byte => bridge::LitKind::Byte,
- tt::LitKind::Char => bridge::LitKind::Char,
- tt::LitKind::Integer => bridge::LitKind::Integer,
- tt::LitKind::Float => bridge::LitKind::Float,
- tt::LitKind::Str => bridge::LitKind::Str,
- tt::LitKind::StrRaw(r) => bridge::LitKind::StrRaw(r),
- tt::LitKind::ByteStr => bridge::LitKind::ByteStr,
- tt::LitKind::ByteStrRaw(r) => bridge::LitKind::ByteStrRaw(r),
- tt::LitKind::CStr => bridge::LitKind::CStr,
- tt::LitKind::CStrRaw(r) => bridge::LitKind::CStrRaw(r),
- tt::LitKind::Err(_) => bridge::LitKind::ErrWithGuar,
- }
-}
-
-fn literal_kind_to_internal(kind: bridge::LitKind) -> tt::LitKind {
- match kind {
- bridge::LitKind::Byte => tt::LitKind::Byte,
- bridge::LitKind::Char => tt::LitKind::Char,
- bridge::LitKind::Str => tt::LitKind::Str,
- bridge::LitKind::StrRaw(r) => tt::LitKind::StrRaw(r),
- bridge::LitKind::ByteStr => tt::LitKind::ByteStr,
- bridge::LitKind::ByteStrRaw(r) => tt::LitKind::ByteStrRaw(r),
- bridge::LitKind::CStr => tt::LitKind::CStr,
- bridge::LitKind::CStrRaw(r) => tt::LitKind::CStrRaw(r),
- bridge::LitKind::Integer => tt::LitKind::Integer,
- bridge::LitKind::Float => tt::LitKind::Float,
- bridge::LitKind::ErrWithGuar => tt::LitKind::Err(()),
- }
-}
-
pub(super) fn literal_from_str<Span: Copy>(
s: &str,
span: Span,
-) -> Result<bridge::Literal<Span, Symbol>, ()> {
- use proc_macro::bridge::LitKind;
+) -> Result<proc_macro::bridge::Literal<Span, intern::Symbol>, ()> {
use rustc_lexer::{LiteralKind, Token, TokenKind};
-
let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
@@ -142,98 +35,5 @@
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
- let (kind, start_offset, end_offset) = match kind {
- LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
- LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
- LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
- LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
- LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
- LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
- LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
- LiteralKind::RawStr { n_hashes } => (
- LitKind::StrRaw(n_hashes.unwrap_or_default()),
- 2 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawByteStr { n_hashes } => (
- LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawCStr { n_hashes } => (
- LitKind::CStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- };
-
- let (lit, suffix) = s.split_at(suffix_start as usize);
- let lit = &lit[start_offset..lit.len() - end_offset];
- let suffix = match suffix {
- "" | "_" => None,
- suffix => Some(Symbol::intern(suffix)),
- };
-
- Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span })
-}
-
-pub(super) fn from_token_tree<Span: Copy>(
- tree: bridge::TokenTree<TokenStream<Span>, Span, Symbol>,
-) -> TokenStream<Span> {
- match tree {
- bridge::TokenTree::Group(group) => {
- let group = TopSubtree::from_bridge(group);
- TokenStream { token_trees: group.0 }
- }
-
- bridge::TokenTree::Ident(ident) => {
- let text = ident.sym;
- let ident: tt::Ident<Span> = tt::Ident {
- sym: text,
- span: ident.span,
- is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
- };
- let leaf = tt::Leaf::from(ident);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
-
- bridge::TokenTree::Literal(literal) => {
- let mut token_trees = Vec::new();
- let mut symbol = literal.symbol;
- if matches!(
- literal.kind,
- proc_macro::bridge::LitKind::Integer | proc_macro::bridge::LitKind::Float
- ) && symbol.as_str().starts_with('-')
- {
- token_trees.push(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
- spacing: tt::Spacing::Alone,
- span: literal.span,
- char: '-',
- })));
- symbol = Symbol::intern(&symbol.as_str()[1..]);
- }
- let literal = tt::Literal {
- symbol,
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
- let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- token_trees.push(tree);
- TokenStream { token_trees }
- }
-
- bridge::TokenTree::Punct(p) => {
- let punct = tt::Punct {
- char: p.ch as char,
- spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
- span: p.span,
- };
- let leaf = tt::Leaf::from(punct);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
- }
+ Ok(crate::tt::literal_from_lexer(s, span, kind, suffix_start))
}
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index a1863ef..0d44cbb 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -11,12 +11,11 @@
use intern::Symbol;
use proc_macro::bridge::{self, server};
-use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
-use tt::{TextRange, TextSize};
+use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span, TextRange, TextSize};
-use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
+use crate::server_impl::literal_from_str;
-type TokenStream = crate::server_impl::TokenStream<Span>;
+type TokenStream = crate::tt::TokenStream<Span>;
pub struct FreeFunctions;
@@ -77,11 +76,12 @@
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
}
+
fn from_token_tree(
&mut self,
tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
) -> Self::TokenStream {
- from_token_tree(tree)
+ TokenStream::new(vec![tree])
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -98,14 +98,15 @@
base: Option<Self::TokenStream>,
trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ match base {
+ Some(mut base) => {
+ for tt in trees {
+ base.push_tree(tt);
+ }
+ base
+ }
+ None => TokenStream::new(trees),
}
- for tree in trees {
- builder.push(self.from_token_tree(tree));
- }
- builder.build()
}
fn concat_streams(
@@ -113,23 +114,18 @@
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ let mut stream = base.unwrap_or_default();
+ for s in streams {
+ stream.push_stream(s);
}
- for stream in streams {
- builder.push(stream);
- }
- builder.build()
+ stream
}
fn into_trees(
&mut self,
stream: Self::TokenStream,
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- stream.into_bridge(&mut |first, second| {
- server::Span::join(self, first, second).unwrap_or(first)
- })
+ (*stream.0).clone()
}
}
@@ -305,97 +301,3 @@
f(symbol.as_str())
}
}
-
-#[cfg(test)]
-mod tests {
- use span::{EditionedFileId, FileId, SyntaxContext};
-
- use super::*;
-
- #[test]
- fn test_ra_server_to_string() {
- let span = Span {
- range: TextRange::empty(TextSize::new(0)),
- anchor: span::SpanAnchor {
- file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
- ast_id: span::ROOT_ERASED_FILE_AST_ID,
- },
- ctx: SyntaxContext::root(span::Edition::CURRENT),
- };
- let s = TokenStream {
- token_trees: vec![
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("struct"),
- span,
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("T"),
- span,
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: span,
- close: span,
- kind: tt::DelimiterKind::Brace,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
- kind: tt::LitKind::Str,
- symbol: Symbol::intern("string"),
- suffix: None,
- span,
- })),
- ],
- };
-
- assert_eq!(s.to_string(), "struct T {\"string\"}");
- }
-
- #[test]
- fn test_ra_server_from_str() {
- let span = Span {
- range: TextRange::empty(TextSize::new(0)),
- anchor: span::SpanAnchor {
- file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
- ast_id: span::ROOT_ERASED_FILE_AST_ID,
- },
- ctx: SyntaxContext::root(span::Edition::CURRENT),
- };
- let subtree_paren_a = vec![
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: span,
- close: span,
- kind: tt::DelimiterKind::Parenthesis,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- is_raw: tt::IdentIsRaw::No,
- sym: Symbol::intern("a"),
- span,
- })),
- ];
-
- let t1 = TokenStream::from_str("(a)", span).unwrap();
- assert_eq!(t1.token_trees.len(), 2);
- assert!(t1.token_trees == subtree_paren_a);
-
- let t2 = TokenStream::from_str("(a);", span).unwrap();
- assert_eq!(t2.token_trees.len(), 3);
- assert!(t2.token_trees[0..2] == subtree_paren_a);
-
- let underscore = TokenStream::from_str("_", span).unwrap();
- assert!(
- underscore.token_trees[0]
- == tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("_"),
- span,
- is_raw: tt::IdentIsRaw::No,
- }))
- );
- }
-}
diff --git a/crates/proc-macro-srv/src/server_impl/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs
index 91e70ea..d637aeb 100644
--- a/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -5,7 +5,7 @@
use intern::Symbol;
use proc_macro::bridge::{self, server};
-use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
+use crate::server_impl::literal_from_str;
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct SpanId(pub u32);
@@ -17,7 +17,7 @@
}
type Span = SpanId;
-type TokenStream = crate::server_impl::TokenStream<Span>;
+type TokenStream = crate::tt::TokenStream<Span>;
pub struct FreeFunctions;
@@ -70,7 +70,7 @@
&mut self,
tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
) -> Self::TokenStream {
- from_token_tree(tree)
+ TokenStream::new(vec![tree])
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -82,14 +82,15 @@
base: Option<Self::TokenStream>,
trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ match base {
+ Some(mut base) => {
+ for tt in trees {
+ base.push_tree(tt);
+ }
+ base
+ }
+ None => TokenStream::new(trees),
}
- for tree in trees {
- builder.push(self.from_token_tree(tree));
- }
- builder.build()
}
fn concat_streams(
@@ -97,22 +98,18 @@
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ let mut stream = base.unwrap_or_default();
+ for s in streams {
+ stream.push_stream(s);
}
- for stream in streams {
- builder.push(stream);
- }
- builder.build()
+ stream
}
fn into_trees(
&mut self,
stream: Self::TokenStream,
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- // Can't join with `SpanId`.
- stream.into_bridge(&mut |first, _second| first)
+ (*stream.0).clone()
}
}
@@ -207,73 +204,3 @@
f(symbol.as_str())
}
}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_ra_server_to_string() {
- let s = TokenStream {
- token_trees: vec![
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("struct"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("T"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: SpanId(0),
- close: SpanId(0),
- kind: tt::DelimiterKind::Brace,
- },
- len: 0,
- }),
- ],
- };
-
- assert_eq!(s.to_string(), "struct T {}");
- }
-
- #[test]
- fn test_ra_server_from_str() {
- let subtree_paren_a = vec![
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: SpanId(0),
- close: SpanId(0),
- kind: tt::DelimiterKind::Parenthesis,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- is_raw: tt::IdentIsRaw::No,
- sym: Symbol::intern("a"),
- span: SpanId(0),
- })),
- ];
-
- let t1 = TokenStream::from_str("(a)", SpanId(0)).unwrap();
- assert_eq!(t1.token_trees.len(), 2);
- assert!(t1.token_trees[0..2] == subtree_paren_a);
-
- let t2 = TokenStream::from_str("(a);", SpanId(0)).unwrap();
- assert_eq!(t2.token_trees.len(), 3);
- assert!(t2.token_trees[0..2] == subtree_paren_a);
-
- let underscore = TokenStream::from_str("_", SpanId(0)).unwrap();
- assert!(
- underscore.token_trees[0]
- == tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("_"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- }))
- );
- }
-}
diff --git a/crates/proc-macro-srv/src/server_impl/token_stream.rs b/crates/proc-macro-srv/src/server_impl/token_stream.rs
deleted file mode 100644
index c5019a5..0000000
--- a/crates/proc-macro-srv/src/server_impl/token_stream.rs
+++ /dev/null
@@ -1,170 +0,0 @@
-//! TokenStream implementation used by sysroot ABI
-
-use proc_macro::bridge;
-
-use crate::server_impl::{TopSubtree, delim_to_external, literal_kind_to_external};
-
-#[derive(Clone)]
-pub struct TokenStream<S> {
- pub(super) token_trees: Vec<tt::TokenTree<S>>,
-}
-
-// #[derive(Default)] would mean that `S: Default`.
-impl<S> Default for TokenStream<S> {
- fn default() -> Self {
- Self { token_trees: Default::default() }
- }
-}
-
-impl<S: std::fmt::Debug + Copy> std::fmt::Debug for TokenStream<S> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.debug_struct("TokenStream")
- .field("token_trees", &tt::TokenTreesView::new(&self.token_trees))
- .finish()
- }
-}
-
-impl<S: Copy> TokenStream<S> {
- pub(crate) fn with_subtree(subtree: TopSubtree<S>) -> Self {
- let delimiter_kind = subtree.top_subtree().delimiter.kind;
- let mut token_trees = subtree.0;
- if delimiter_kind == tt::DelimiterKind::Invisible {
- token_trees.remove(0);
- }
- TokenStream { token_trees }
- }
-
- pub(crate) fn into_subtree(mut self, call_site: S) -> TopSubtree<S>
- where
- S: Copy,
- {
- self.token_trees.insert(
- 0,
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: call_site,
- close: call_site,
- kind: tt::DelimiterKind::Invisible,
- },
- len: self.token_trees.len() as u32,
- }),
- );
- TopSubtree(self.token_trees)
- }
-
- pub(super) fn is_empty(&self) -> bool {
- self.token_trees.is_empty()
- }
-
- pub(crate) fn into_bridge(
- self,
- join_spans: &mut dyn FnMut(S, S) -> S,
- ) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
- let mut result = Vec::new();
- let mut iter = self.token_trees.into_iter();
- while let Some(tree) = iter.next() {
- match tree {
- tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
- result.push(bridge::TokenTree::Ident(bridge::Ident {
- sym: ident.sym,
- is_raw: ident.is_raw.yes(),
- span: ident.span,
- }))
- }
- // Note, we do not have to assemble our `-` punct and literal split into a single
- // negative bridge literal here. As the proc-macro docs state
- // > Literals created from negative numbers might not survive round-trips through
- // > TokenStream or strings and may be broken into two tokens (- and positive
- // > literal).
- tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
- result.push(bridge::TokenTree::Literal(bridge::Literal {
- span: lit.span,
- kind: literal_kind_to_external(lit.kind),
- symbol: lit.symbol,
- suffix: lit.suffix,
- }))
- }
- tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
- result.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: punct.char as u8,
- joint: punct.spacing == tt::Spacing::Joint,
- span: punct.span,
- }))
- }
- tt::TokenTree::Subtree(subtree) => {
- result.push(bridge::TokenTree::Group(bridge::Group {
- delimiter: delim_to_external(subtree.delimiter),
- stream: if subtree.len == 0 {
- None
- } else {
- Some(TokenStream {
- token_trees: iter.by_ref().take(subtree.usize_len()).collect(),
- })
- },
- span: bridge::DelimSpan {
- open: subtree.delimiter.open,
- close: subtree.delimiter.close,
- entire: join_spans(subtree.delimiter.open, subtree.delimiter.close),
- },
- }))
- }
- }
- }
- result
- }
-}
-
-pub(super) struct TokenStreamBuilder<S> {
- acc: TokenStream<S>,
-}
-
-/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
-pub(super) mod token_stream_impls {
-
- use core::fmt;
-
- use super::{TokenStream, TopSubtree};
-
- /// Attempts to break the string into tokens and parse those tokens into a token stream.
- /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
- /// or characters not existing in the language.
- /// All tokens in the parsed stream get `Span::call_site()` spans.
- ///
- /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
- /// change these errors into `LexError`s later.
- impl<S: Copy + fmt::Debug> TokenStream<S> {
- pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
- let subtree = syntax_bridge::parse_to_token_tree_static_span(
- span::Edition::CURRENT_FIXME,
- call_site,
- src,
- )
- .ok_or_else(|| format!("lexing error: {src}"))?;
-
- Ok(TokenStream::with_subtree(TopSubtree(subtree.0.into_vec())))
- }
- }
-
- #[allow(clippy::to_string_trait_impl)]
- impl<S> ToString for TokenStream<S> {
- fn to_string(&self) -> String {
- ::tt::pretty(&self.token_trees)
- }
- }
-}
-
-impl<S: Copy> TokenStreamBuilder<S> {
- pub(super) fn push(&mut self, stream: TokenStream<S>) {
- self.acc.token_trees.extend(stream.token_trees)
- }
-
- pub(super) fn build(self) -> TokenStream<S> {
- self.acc
- }
-}
-
-impl<S: Copy> Default for TokenStreamBuilder<S> {
- fn default() -> Self {
- Self { acc: TokenStream::default() }
- }
-}
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index d4f9976..cfc1d86 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -12,23 +12,15 @@
"DeriveEmpty",
r#"struct S;"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT struct 1
- IDENT S 1
- PUNCH ; [alone] 1
-
-
-
- SUBTREE $$ 1 1"#]],
+ IDENT 1 struct
+ IDENT 1 S
+ PUNCT 1 ; [alone]
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT struct 42:Root[0000, 0]@0..6#ROOT2024
- IDENT S 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@8..9#ROOT2024
-
-
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..6#ROOT2024 struct
+ IDENT 42:Root[0000, 0]@7..8#ROOT2024 S
+ PUNCT 42:Root[0000, 0]@8..9#ROOT2024 ; [alone]
+ "#]],
);
}
@@ -36,35 +28,37 @@
fn test_derive_error() {
assert_expand(
"DeriveError",
- r#"struct S;"#,
+ r#"struct S { field: u32 }"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT struct 1
- IDENT S 1
- PUNCH ; [alone] 1
+ IDENT 1 struct
+ IDENT 1 S
+ GROUP {} 1 1 1
+ IDENT 1 field
+ PUNCT 1 : [alone]
+ IDENT 1 u32
-
- SUBTREE $$ 1 1
- IDENT compile_error 1
- PUNCH ! [alone] 1
- SUBTREE () 1 1
- LITERAL Str #[derive(DeriveError)] struct S ; 1
- PUNCH ; [alone] 1"#]],
+ IDENT 1 compile_error
+ PUNCT 1 ! [joint]
+ GROUP () 1 1 1
+ LITER 1 Str #[derive(DeriveError)] struct S {field 58 u32 }
+ PUNCT 1 ; [alone]
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT struct 42:Root[0000, 0]@0..6#ROOT2024
- IDENT S 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@8..9#ROOT2024
+ IDENT 42:Root[0000, 0]@0..6#ROOT2024 struct
+ IDENT 42:Root[0000, 0]@7..8#ROOT2024 S
+ GROUP {} 42:Root[0000, 0]@9..10#ROOT2024 42:Root[0000, 0]@22..23#ROOT2024 42:Root[0000, 0]@9..23#ROOT2024
+ IDENT 42:Root[0000, 0]@11..16#ROOT2024 field
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 : [alone]
+ IDENT 42:Root[0000, 0]@18..21#ROOT2024 u32
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT compile_error 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ! [alone] 42:Root[0000, 0]@0..100#ROOT2024
- SUBTREE () 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str #[derive(DeriveError)] struct S ; 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..13#ROOT2024 compile_error
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 ! [joint]
+ GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@64..65#ROOT2024 42:Root[0000, 0]@14..65#ROOT2024
+ LITER 42:Root[0000, 0]@15..64#ROOT2024 Str #[derive(DeriveError)] struct S {field 58 u32 }
+ PUNCT 42:Root[0000, 0]@65..66#ROOT2024 ; [alone]
+ "#]],
);
}
@@ -74,45 +68,41 @@
"fn_like_noop",
r#"ident, 0, 1, []"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- LITERAL Integer 0 1
- PUNCH , [alone] 1
- LITERAL Integer 1 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ LITER 1 Integer 0
+ PUNCT 1 , [alone]
+ LITER 1 Integer 1
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
-
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- LITERAL Integer 0 1
- PUNCH , [alone] 1
- LITERAL Integer 1 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1"#]],
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ LITER 1 Integer 0
+ PUNCT 1 , [alone]
+ LITER 1 Integer 1
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- LITERAL Integer 0 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@8..9#ROOT2024
- LITERAL Integer 1 42:Root[0000, 0]@10..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@7..8#ROOT2024 Integer 0
+ PUNCT 42:Root[0000, 0]@8..9#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@10..11#ROOT2024 Integer 1
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@13..15#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- LITERAL Integer 0 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@8..9#ROOT2024
- LITERAL Integer 1 42:Root[0000, 0]@10..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@7..8#ROOT2024 Integer 0
+ PUNCT 42:Root[0000, 0]@8..9#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@10..11#ROOT2024 Integer 1
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@13..15#ROOT2024
+ "#]],
);
}
@@ -122,29 +112,25 @@
"fn_like_clone_tokens",
r#"ident, []"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
-
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1"#]],
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@8..9#ROOT2024
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@8..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024
+ "#]],
);
}
@@ -154,21 +140,17 @@
"fn_like_clone_tokens",
"r#async",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT r#async 1
+ IDENT 1 r#async
-
- SUBTREE $$ 1 1
- IDENT r#async 1"#]],
+ IDENT 1 r#async
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#async 42:Root[0000, 0]@0..7#ROOT2024
+ IDENT 42:Root[0000, 0]@2..7#ROOT2024 r#async
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#async 42:Root[0000, 0]@0..7#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@2..7#ROOT2024 r#async
+ "#]],
);
}
@@ -178,23 +160,19 @@
"fn_like_span_join",
"foo bar",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT foo 1
- IDENT bar 1
+ IDENT 1 foo
+ IDENT 1 bar
-
- SUBTREE $$ 1 1
- IDENT r#joined 1"#]],
+ IDENT 1 r#joined
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT foo 42:Root[0000, 0]@0..3#ROOT2024
- IDENT bar 42:Root[0000, 0]@8..11#ROOT2024
+ IDENT 42:Root[0000, 0]@0..3#ROOT2024 foo
+ IDENT 42:Root[0000, 0]@8..11#ROOT2024 bar
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#joined 42:Root[0000, 0]@0..11#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..11#ROOT2024 r#joined
+ "#]],
);
}
@@ -204,29 +182,25 @@
"fn_like_span_ops",
"set_def_site resolved_at_def_site start_span",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT set_def_site 1
- IDENT resolved_at_def_site 1
- IDENT start_span 1
+ IDENT 1 set_def_site
+ IDENT 1 resolved_at_def_site
+ IDENT 1 start_span
-
- SUBTREE $$ 1 1
- IDENT set_def_site 0
- IDENT resolved_at_def_site 1
- IDENT start_span 1"#]],
+ IDENT 0 set_def_site
+ IDENT 1 resolved_at_def_site
+ IDENT 1 start_span
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT set_def_site 42:Root[0000, 0]@0..12#ROOT2024
- IDENT resolved_at_def_site 42:Root[0000, 0]@13..33#ROOT2024
- IDENT start_span 42:Root[0000, 0]@34..44#ROOT2024
+ IDENT 42:Root[0000, 0]@0..12#ROOT2024 set_def_site
+ IDENT 42:Root[0000, 0]@13..33#ROOT2024 resolved_at_def_site
+ IDENT 42:Root[0000, 0]@34..44#ROOT2024 start_span
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT set_def_site 41:Root[0000, 0]@0..150#ROOT2024
- IDENT resolved_at_def_site 42:Root[0000, 0]@13..33#ROOT2024
- IDENT start_span 42:Root[0000, 0]@34..34#ROOT2024"#]],
+ IDENT 41:Root[0000, 0]@0..150#ROOT2024 set_def_site
+ IDENT 42:Root[0000, 0]@13..33#ROOT2024 resolved_at_def_site
+ IDENT 42:Root[0000, 0]@34..34#ROOT2024 start_span
+ "#]],
);
}
@@ -236,51 +210,39 @@
"fn_like_mk_literals",
r#""#,
expect![[r#"
- SUBTREE $$ 1 1
-
- SUBTREE $$ 1 1
- LITERAL ByteStr byte_string 1
- LITERAL Char c 1
- LITERAL Str string 1
- LITERAL Str -string 1
- LITERAL CStr cstring 1
- LITERAL Float 3.14f64 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f64 1
- LITERAL Float 3.14 1
- PUNCH - [alone] 1
- LITERAL Float 3.14 1
- LITERAL Integer 123i64 1
- PUNCH - [alone] 1
- LITERAL Integer 123i64 1
- LITERAL Integer 123 1
- PUNCH - [alone] 1
- LITERAL Integer 123 1"#]],
+ LITER 1 ByteStr byte_string
+ LITER 1 Char c
+ LITER 1 Str string
+ LITER 1 Str -string
+ LITER 1 CStr cstring
+ LITER 1 Float 3.14f64
+ LITER 1 Float -3.14f64
+ LITER 1 Float 3.14
+ LITER 1 Float -3.14
+ LITER 1 Integer 123i64
+ LITER 1 Integer -123i64
+ LITER 1 Integer 123
+ LITER 1 Integer -123
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL ByteStr byte_string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Char c 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str -string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL CStr cstring 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14f64 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14f64 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123i64 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123i64 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 ByteStr byte_string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Char c
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Str string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Str -string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 CStr cstring
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float 3.14f64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float -3.14f64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float 3.14
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float -3.14
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 123i64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer -123i64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 123
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer -123
+ "#]],
);
}
@@ -290,21 +252,17 @@
"fn_like_mk_idents",
r#""#,
expect![[r#"
- SUBTREE $$ 1 1
-
- SUBTREE $$ 1 1
- IDENT standard 1
- IDENT r#raw 1"#]],
+ IDENT 1 standard
+ IDENT 1 r#raw
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT standard 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#raw 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..100#ROOT2024 standard
+ IDENT 42:Root[0000, 0]@0..100#ROOT2024 r#raw
+ "#]],
);
}
@@ -314,97 +272,93 @@
"fn_like_clone_tokens",
r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###,
expect![[r#"
- SUBTREE $$ 1 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 4i64 1
- PUNCH , [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- LITERAL Str hello bridge 1
- PUNCH , [alone] 1
- LITERAL Err(()) "suffixed"suffix 1
- PUNCH , [alone] 1
- LITERAL StrRaw(2) raw 1
- PUNCH , [alone] 1
- LITERAL Char a 1
- PUNCH , [alone] 1
- LITERAL Byte b 1
- PUNCH , [alone] 1
- LITERAL CStr null 1
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 4i64
+ PUNCT 1 , [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ LITER 1 Str hello bridge
+ PUNCT 1 , [alone]
+ LITER 1 Str suffixedsuffix
+ PUNCT 1 , [alone]
+ LITER 1 StrRaw(2) raw
+ PUNCT 1 , [alone]
+ LITER 1 Char a
+ PUNCT 1 , [alone]
+ LITER 1 Byte b
+ PUNCT 1 , [alone]
+ LITER 1 CStr null
-
- SUBTREE $$ 1 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 4i64 1
- PUNCH , [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- LITERAL Str hello bridge 1
- PUNCH , [alone] 1
- LITERAL Str suffixedsuffix 1
- PUNCH , [alone] 1
- LITERAL StrRaw(2) raw 1
- PUNCH , [alone] 1
- LITERAL Char a 1
- PUNCH , [alone] 1
- LITERAL Byte b 1
- PUNCH , [alone] 1
- LITERAL CStr null 1"#]],
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 4i64
+ PUNCT 1 , [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ LITER 1 Str hello bridge
+ PUNCT 1 , [alone]
+ LITER 1 Str suffixedsuffix
+ PUNCT 1 , [alone]
+ LITER 1 StrRaw(2) raw
+ PUNCT 1 , [alone]
+ LITER 1 Char a
+ PUNCT 1 , [alone]
+ LITER 1 Byte b
+ PUNCT 1 , [alone]
+ LITER 1 CStr null
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@0..4#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@4..5#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@6..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@13..14#ROOT2024
- LITERAL Integer 4i64 42:Root[0000, 0]@14..18#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@18..19#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@20..27#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@27..28#ROOT2024
- LITERAL Str hello bridge 42:Root[0000, 0]@29..43#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@43..44#ROOT2024
- LITERAL Err(()) "suffixed"suffix 42:Root[0000, 0]@45..61#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@61..62#ROOT2024
- LITERAL StrRaw(2) raw 42:Root[0000, 0]@63..73#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@73..74#ROOT2024
- LITERAL Char a 42:Root[0000, 0]@75..78#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@78..79#ROOT2024
- LITERAL Byte b 42:Root[0000, 0]@80..84#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@84..85#ROOT2024
- LITERAL CStr null 42:Root[0000, 0]@86..93#ROOT2024
+ LITER 42:Root[0000, 0]@0..4#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@4..5#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@6..11#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@14..18#ROOT2024 Integer 4i64
+ PUNCT 42:Root[0000, 0]@18..19#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@20..27#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@27..28#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@29..43#ROOT2024 Str hello bridge
+ PUNCT 42:Root[0000, 0]@43..44#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@45..61#ROOT2024 Str suffixedsuffix
+ PUNCT 42:Root[0000, 0]@61..62#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@63..73#ROOT2024 StrRaw(2) raw
+ PUNCT 42:Root[0000, 0]@73..74#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@75..78#ROOT2024 Char a
+ PUNCT 42:Root[0000, 0]@78..79#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@80..84#ROOT2024 Byte b
+ PUNCT 42:Root[0000, 0]@84..85#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@86..93#ROOT2024 CStr null
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@0..4#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@4..5#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@6..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@13..14#ROOT2024
- LITERAL Integer 4i64 42:Root[0000, 0]@14..18#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@18..19#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@20..27#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@27..28#ROOT2024
- LITERAL Str hello bridge 42:Root[0000, 0]@29..43#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@43..44#ROOT2024
- LITERAL Str suffixedsuffix 42:Root[0000, 0]@45..61#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@61..62#ROOT2024
- LITERAL StrRaw(2) raw 42:Root[0000, 0]@63..73#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@73..74#ROOT2024
- LITERAL Char a 42:Root[0000, 0]@75..78#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@78..79#ROOT2024
- LITERAL Byte b 42:Root[0000, 0]@80..84#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@84..85#ROOT2024
- LITERAL CStr null 42:Root[0000, 0]@86..93#ROOT2024"#]],
+ LITER 42:Root[0000, 0]@0..4#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@4..5#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@6..11#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@14..18#ROOT2024 Integer 4i64
+ PUNCT 42:Root[0000, 0]@18..19#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@20..27#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@27..28#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@29..43#ROOT2024 Str hello bridge
+ PUNCT 42:Root[0000, 0]@43..44#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@45..61#ROOT2024 Str suffixedsuffix
+ PUNCT 42:Root[0000, 0]@61..62#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@63..73#ROOT2024 StrRaw(2) raw
+ PUNCT 42:Root[0000, 0]@73..74#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@75..78#ROOT2024 Char a
+ PUNCT 42:Root[0000, 0]@78..79#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@80..84#ROOT2024 Byte b
+ PUNCT 42:Root[0000, 0]@84..85#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@86..93#ROOT2024 CStr null
+ "#]],
);
}
@@ -414,61 +368,57 @@
"fn_like_clone_tokens",
r###"-1u16, - 2_u32, -3.14f32, - 2.7"###,
expect![[r#"
- SUBTREE $$ 1 1
- PUNCH - [alone] 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 2.7 1
+ PUNCT 1 - [alone]
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 2.7
-
- SUBTREE $$ 1 1
- PUNCH - [alone] 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 2.7 1"#]],
+ PUNCT 1 - [alone]
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 2.7
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..1#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@1..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@7..8#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@9..14#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@14..15#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@16..17#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@17..24#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@24..25#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@26..27#ROOT2024
- LITERAL Float 2.7 42:Root[0000, 0]@28..31#ROOT2024
+ PUNCT 42:Root[0000, 0]@0..1#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@1..5#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@7..8#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@9..14#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@14..15#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@17..24#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@24..25#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@26..27#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@28..31#ROOT2024 Float 2.7
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..1#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@1..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@7..8#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@9..14#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@14..15#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@16..17#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@17..24#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@24..25#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@26..27#ROOT2024
- LITERAL Float 2.7 42:Root[0000, 0]@28..31#ROOT2024"#]],
+ PUNCT 42:Root[0000, 0]@0..1#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@1..5#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@7..8#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@9..14#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@14..15#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@17..24#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@24..25#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@26..27#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@28..31#ROOT2024 Float 2.7
+ "#]],
);
}
@@ -482,37 +432,37 @@
r#"mod m {}"#,
r#"some arguments"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT mod 1
- IDENT m 1
- SUBTREE {} 1 1
+ IDENT 1 mod
+ IDENT 1 m
+ GROUP {} 1 1 1
- SUBTREE $$ 1 1
- IDENT some 1
- IDENT arguments 1
- SUBTREE $$ 1 1
- IDENT compile_error 1
- PUNCH ! [alone] 1
- SUBTREE () 1 1
- LITERAL Str #[attr_error(some arguments)] mod m {} 1
- PUNCH ; [alone] 1"#]],
+ IDENT 1 some
+ IDENT 1 arguments
+
+
+ IDENT 1 compile_error
+ PUNCT 1 ! [joint]
+ GROUP () 1 1 1
+ LITER 1 Str #[attr_error(some arguments )] mod m {}
+ PUNCT 1 ; [alone]
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT mod 42:Root[0000, 0]@0..3#ROOT2024
- IDENT m 42:Root[0000, 0]@4..5#ROOT2024
- SUBTREE {} 42:Root[0000, 0]@6..7#ROOT2024 42:Root[0000, 0]@7..8#ROOT2024
+ IDENT 42:Root[0000, 0]@0..3#ROOT2024 mod
+ IDENT 42:Root[0000, 0]@4..5#ROOT2024 m
+ GROUP {} 42:Root[0000, 0]@6..7#ROOT2024 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@6..8#ROOT2024
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT some 42:Root[0000, 0]@0..4#ROOT2024
- IDENT arguments 42:Root[0000, 0]@5..14#ROOT2024
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT compile_error 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ! [alone] 42:Root[0000, 0]@0..100#ROOT2024
- SUBTREE () 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str #[attr_error(some arguments)] mod m {} 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..4#ROOT2024 some
+ IDENT 42:Root[0000, 0]@5..14#ROOT2024 arguments
+
+
+ IDENT 42:Root[0000, 0]@0..13#ROOT2024 compile_error
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 ! [joint]
+ GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@56..57#ROOT2024 42:Root[0000, 0]@14..57#ROOT2024
+ LITER 42:Root[0000, 0]@15..56#ROOT2024 Str #[attr_error(some arguments )] mod m {}
+ PUNCT 42:Root[0000, 0]@57..58#ROOT2024 ; [alone]
+ "#]],
);
}
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index f5a76e3..52b2849 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,31 +1,25 @@
//! utils used in proc-macro tests
use expect_test::Expect;
-use span::{EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext};
-use tt::TextRange;
+use span::{
+ EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TextRange,
+};
-use crate::{EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path};
+use crate::{
+ EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, tt::TokenStream,
+};
-fn parse_string(call_site: SpanId, src: &str) -> crate::server_impl::TokenStream<SpanId> {
- crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
- syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
- .unwrap()
- .0
- .into_vec(),
- ))
+fn parse_string(call_site: SpanId, src: &str) -> TokenStream<SpanId> {
+ TokenStream::from_str(src, call_site).unwrap()
}
fn parse_string_spanned(
anchor: SpanAnchor,
call_site: SyntaxContext,
src: &str,
-) -> crate::server_impl::TokenStream<Span> {
- crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
- syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src)
- .unwrap()
- .0
- .into_vec(),
- ))
+) -> TokenStream<Span> {
+ TokenStream::from_str(src, Span { range: TextRange::default(), anchor, ctx: call_site })
+ .unwrap()
}
pub fn assert_expand(
@@ -60,16 +54,18 @@
let def_site = SpanId(0);
let call_site = SpanId(1);
let mixed_site = SpanId(2);
- let input_ts = parse_string(call_site, input).into_subtree(call_site);
- let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site));
+ let input_ts = parse_string(call_site, input);
+ let attr_ts = attr.map(|attr| parse_string(call_site, attr));
let input_ts_string = format!("{input_ts:?}");
let attr_ts_string = attr_ts.as_ref().map(|it| format!("{it:?}"));
let res =
expander.expand(macro_name, input_ts, attr_ts, def_site, call_site, mixed_site).unwrap();
expect.assert_eq(&format!(
- "{input_ts_string}\n\n{}\n\n{res:?}",
- attr_ts_string.unwrap_or_default()
+ "{input_ts_string}{}{}{}",
+ if attr_ts_string.is_some() { "\n\n" } else { "" },
+ attr_ts_string.unwrap_or_default(),
+ if res.is_empty() { String::new() } else { format!("\n\n{res:?}") }
));
let def_site = Span {
@@ -90,17 +86,18 @@
};
let mixed_site = call_site;
- let fixture =
- parse_string_spanned(call_site.anchor, call_site.ctx, input).into_subtree(call_site);
- let attr = attr.map(|attr| {
- parse_string_spanned(call_site.anchor, call_site.ctx, attr).into_subtree(call_site)
- });
+ let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, input);
+ let attr = attr.map(|attr| parse_string_spanned(call_site.anchor, call_site.ctx, attr));
let fixture_string = format!("{fixture:?}");
let attr_string = attr.as_ref().map(|it| format!("{it:?}"));
let res = expander.expand(macro_name, fixture, attr, def_site, call_site, mixed_site).unwrap();
- expect_spanned
- .assert_eq(&format!("{fixture_string}\n\n{}\n\n{res:#?}", attr_string.unwrap_or_default()));
+ expect_spanned.assert_eq(&format!(
+ "{fixture_string}{}{}{}",
+ if attr_string.is_some() { "\n\n" } else { "" },
+ attr_string.unwrap_or_default(),
+ if res.is_empty() { String::new() } else { format!("\n\n{res:?}") }
+ ));
}
pub(crate) fn list() -> Vec<String> {
diff --git a/crates/proc-macro-srv/src/tt.rs b/crates/proc-macro-srv/src/tt.rs
new file mode 100644
index 0000000..14cf7ca
--- /dev/null
+++ b/crates/proc-macro-srv/src/tt.rs
@@ -0,0 +1,805 @@
+use core::fmt;
+use std::sync::Arc;
+
+use intern::Symbol;
+use proc_macro::{Delimiter, bridge};
+use rustc_lexer::{DocStyle, LiteralKind};
+
+pub type TokenTree<S> = bridge::TokenTree<TokenStream<S>, S, Symbol>;
+
+/// Trait for allowing integration tests to parse tokenstreams with dynamic span ranges
+pub trait SpanLike {
+ fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self;
+}
+
+impl SpanLike for crate::SpanId {
+ fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
+ *self
+ }
+}
+
+impl SpanLike for () {
+ fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
+ *self
+ }
+}
+
+impl SpanLike for crate::Span {
+ fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self {
+ crate::Span {
+ range: span::TextRange::new(
+ span::TextSize::new(range.start as u32),
+ span::TextSize::new(range.end as u32),
+ ),
+ anchor: self.anchor,
+ ctx: self.ctx,
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct TokenStream<S>(pub(crate) Arc<Vec<TokenTree<S>>>);
+
+impl<S> Default for TokenStream<S> {
+ fn default() -> Self {
+ Self(Default::default())
+ }
+}
+
+impl<S> TokenStream<S> {
+ pub fn new(tts: Vec<TokenTree<S>>) -> TokenStream<S> {
+ TokenStream(Arc::new(tts))
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+
+ pub fn get(&self, index: usize) -> Option<&TokenTree<S>> {
+ self.0.get(index)
+ }
+
+ pub fn iter(&self) -> TokenStreamIter<'_, S> {
+ TokenStreamIter::new(self)
+ }
+
+ pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree<S>> {
+ self.0.chunks(chunk_size)
+ }
+
+ pub fn from_str(s: &str, span: S) -> Result<Self, String>
+ where
+ S: SpanLike + Copy,
+ {
+ let mut groups = Vec::new();
+ groups.push((proc_macro::Delimiter::None, 0..0, vec![]));
+ let mut offset = 0;
+ let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No).peekable();
+ while let Some(token) = tokens.next() {
+ let range = offset..offset + token.len as usize;
+ offset += token.len as usize;
+
+ let mut is_joint = || {
+ tokens.peek().is_some_and(|token| {
+ matches!(
+ token.kind,
+ rustc_lexer::TokenKind::RawLifetime
+ | rustc_lexer::TokenKind::GuardedStrPrefix
+ | rustc_lexer::TokenKind::Lifetime { .. }
+ | rustc_lexer::TokenKind::Semi
+ | rustc_lexer::TokenKind::Comma
+ | rustc_lexer::TokenKind::Dot
+ | rustc_lexer::TokenKind::OpenParen
+ | rustc_lexer::TokenKind::CloseParen
+ | rustc_lexer::TokenKind::OpenBrace
+ | rustc_lexer::TokenKind::CloseBrace
+ | rustc_lexer::TokenKind::OpenBracket
+ | rustc_lexer::TokenKind::CloseBracket
+ | rustc_lexer::TokenKind::At
+ | rustc_lexer::TokenKind::Pound
+ | rustc_lexer::TokenKind::Tilde
+ | rustc_lexer::TokenKind::Question
+ | rustc_lexer::TokenKind::Colon
+ | rustc_lexer::TokenKind::Dollar
+ | rustc_lexer::TokenKind::Eq
+ | rustc_lexer::TokenKind::Bang
+ | rustc_lexer::TokenKind::Lt
+ | rustc_lexer::TokenKind::Gt
+ | rustc_lexer::TokenKind::Minus
+ | rustc_lexer::TokenKind::And
+ | rustc_lexer::TokenKind::Or
+ | rustc_lexer::TokenKind::Plus
+ | rustc_lexer::TokenKind::Star
+ | rustc_lexer::TokenKind::Slash
+ | rustc_lexer::TokenKind::Percent
+ | rustc_lexer::TokenKind::Caret
+ )
+ })
+ };
+
+ let Some((open_delim, _, tokenstream)) = groups.last_mut() else {
+ return Err("Unbalanced delimiters".to_owned());
+ };
+ match token.kind {
+ rustc_lexer::TokenKind::OpenParen => {
+ groups.push((proc_macro::Delimiter::Parenthesis, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseParen if *open_delim != Delimiter::Parenthesis => {
+ return Err("Expected ')'".to_owned());
+ }
+ rustc_lexer::TokenKind::CloseParen => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(bridge::Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: bridge::DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::OpenBrace => {
+ groups.push((proc_macro::Delimiter::Brace, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseBrace if *open_delim != Delimiter::Brace => {
+ return Err("Expected '}'".to_owned());
+ }
+ rustc_lexer::TokenKind::CloseBrace => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(bridge::Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: bridge::DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::OpenBracket => {
+ groups.push((proc_macro::Delimiter::Bracket, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseBracket if *open_delim != Delimiter::Bracket => {
+ return Err("Expected ']'".to_owned());
+ }
+ rustc_lexer::TokenKind::CloseBracket => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(bridge::Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: bridge::DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::LineComment { doc_style: None }
+ | rustc_lexer::TokenKind::BlockComment { doc_style: None, terminated: _ } => {
+ continue;
+ }
+ rustc_lexer::TokenKind::LineComment { doc_style: Some(doc_style) } => {
+ let text = &s[range.start + 2..range.end];
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'#',
+ joint: false,
+ span,
+ }));
+ if doc_style == DocStyle::Inner {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'!',
+ joint: false,
+ span,
+ }));
+ }
+ tokenstream.push(bridge::TokenTree::Group(bridge::Group {
+ delimiter: Delimiter::Bracket,
+ stream: Some(TokenStream::new(vec![
+ bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern("doc"),
+ is_raw: false,
+ span,
+ }),
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'=',
+ joint: false,
+ span,
+ }),
+ bridge::TokenTree::Literal(bridge::Literal {
+ kind: bridge::LitKind::Str,
+ symbol: Symbol::intern(&text.escape_debug().to_string()),
+ suffix: None,
+ span: span.derive_ranged(range),
+ }),
+ ])),
+ span: bridge::DelimSpan { open: span, close: span, entire: span },
+ }));
+ }
+ rustc_lexer::TokenKind::BlockComment { doc_style: Some(doc_style), terminated } => {
+ let text =
+ &s[range.start + 2..if terminated { range.end - 2 } else { range.end }];
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'#',
+ joint: false,
+ span,
+ }));
+ if doc_style == DocStyle::Inner {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'!',
+ joint: false,
+ span,
+ }));
+ }
+ tokenstream.push(bridge::TokenTree::Group(bridge::Group {
+ delimiter: Delimiter::Bracket,
+ stream: Some(TokenStream::new(vec![
+ bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern("doc"),
+ is_raw: false,
+ span,
+ }),
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'=',
+ joint: false,
+ span,
+ }),
+ bridge::TokenTree::Literal(bridge::Literal {
+ kind: bridge::LitKind::Str,
+ symbol: Symbol::intern(&text.escape_debug().to_string()),
+ suffix: None,
+ span: span.derive_ranged(range),
+ }),
+ ])),
+ span: bridge::DelimSpan { open: span, close: span, entire: span },
+ }));
+ }
+ rustc_lexer::TokenKind::Whitespace => continue,
+ rustc_lexer::TokenKind::Frontmatter { .. } => unreachable!(),
+ rustc_lexer::TokenKind::Unknown => return Err("Unknown token".to_owned()),
+ rustc_lexer::TokenKind::UnknownPrefix => return Err("Unknown prefix".to_owned()),
+ rustc_lexer::TokenKind::UnknownPrefixLifetime => {
+ return Err("Unknown lifetime prefix".to_owned());
+ }
+ // FIXME: Error on edition >= 2024 ... I dont think the proc-macro server can fetch editions currently
+ // and whose edition is this?
+ rustc_lexer::TokenKind::GuardedStrPrefix => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: s.as_bytes()[range.start],
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: s.as_bytes()[range.start + 1],
+ joint: is_joint(),
+ span: span.derive_ranged(range.start + 1..range.end),
+ }))
+ }
+ rustc_lexer::TokenKind::Ident => {
+ tokenstream.push(bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: false,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::InvalidIdent => return Err("Invalid identifier".to_owned()),
+ rustc_lexer::TokenKind::RawIdent => {
+ let range = range.start + 2..range.end;
+ tokenstream.push(bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: true,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
+ tokenstream.push(bridge::TokenTree::Literal(literal_from_lexer(
+ &s[range.clone()],
+ span.derive_ranged(range),
+ kind,
+ suffix_start,
+ )))
+ }
+ rustc_lexer::TokenKind::RawLifetime => {
+ let range = range.start + 1 + 2..range.end;
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'\'',
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: true,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+ if starts_with_number {
+ return Err("Lifetime cannot start with a number".to_owned());
+ }
+ let range = range.start + 1..range.end;
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'\'',
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: false,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Semi => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b';',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Comma => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b',',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Dot => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'.',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::At => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'@',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Pound => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'#',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Tilde => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'~',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Question => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'?',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Colon => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b':',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Dollar => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'$',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Eq => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'=',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Bang => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'!',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Lt => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'<',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Gt => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'>',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Minus => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'-',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::And => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'&',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Or => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'|',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Plus => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'+',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Star => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'*',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Slash => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'/',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Caret => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'^',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Percent => {
+ tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ ch: b'%',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Eof => break,
+ }
+ }
+ if let Some((Delimiter::None, _, tokentrees)) = groups.pop()
+ && groups.is_empty()
+ {
+ Ok(TokenStream::new(tokentrees))
+ } else {
+ Err("Mismatched token groups".to_owned())
+ }
+ }
+}
+
+impl<S> fmt::Display for TokenStream<S> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ for tt in self.0.iter() {
+ display_token_tree(tt, f)?;
+ }
+ Ok(())
+ }
+}
+
+fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match tt {
+ bridge::TokenTree::Group(bridge::Group { delimiter, stream, span: _ }) => {
+ write!(
+ f,
+ "{}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => "(",
+ proc_macro::Delimiter::Brace => "{",
+ proc_macro::Delimiter::Bracket => "[",
+ proc_macro::Delimiter::None => "",
+ }
+ )?;
+ if let Some(stream) = stream {
+ write!(f, "{stream}")?;
+ }
+ write!(
+ f,
+ "{}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => ")",
+ proc_macro::Delimiter::Brace => "}",
+ proc_macro::Delimiter::Bracket => "]",
+ proc_macro::Delimiter::None => "",
+ }
+ )?;
+ }
+ bridge::TokenTree::Punct(bridge::Punct { ch, joint, span: _ }) => {
+ write!(f, "{ch}{}", if *joint { "" } else { " " })?
+ }
+ bridge::TokenTree::Ident(bridge::Ident { sym, is_raw, span: _ }) => {
+ if *is_raw {
+ write!(f, "r#")?;
+ }
+ write!(f, "{sym} ")?;
+ }
+ bridge::TokenTree::Literal(lit) => {
+ display_fmt_literal(lit, f)?;
+ let joint = match lit.kind {
+ bridge::LitKind::Str
+ | bridge::LitKind::StrRaw(_)
+ | bridge::LitKind::ByteStr
+ | bridge::LitKind::ByteStrRaw(_)
+ | bridge::LitKind::CStr
+ | bridge::LitKind::CStrRaw(_) => true,
+ _ => false,
+ };
+ if !joint {
+ write!(f, " ")?;
+ }
+ }
+ }
+ Ok(())
+}
+
+fn display_fmt_literal<S>(
+ literal: &bridge::Literal<S, Symbol>,
+ f: &mut fmt::Formatter<'_>,
+) -> fmt::Result {
+ match literal.kind {
+ bridge::LitKind::Byte => write!(f, "b'{}'", literal.symbol),
+ bridge::LitKind::Char => write!(f, "'{}'", literal.symbol),
+ bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => {
+ write!(f, "{}", literal.symbol)
+ }
+ bridge::LitKind::Str => write!(f, "\"{}\"", literal.symbol),
+ bridge::LitKind::ByteStr => write!(f, "b\"{}\"", literal.symbol),
+ bridge::LitKind::CStr => write!(f, "c\"{}\"", literal.symbol),
+ bridge::LitKind::StrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ bridge::LitKind::ByteStrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ bridge::LitKind::CStrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ }?;
+ if let Some(suffix) = &literal.suffix {
+ write!(f, "{suffix}")?;
+ }
+ Ok(())
+}
+
+impl<S: fmt::Debug> fmt::Debug for TokenStream<S> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ debug_token_stream(self, 0, f)
+ }
+}
+
+fn debug_token_stream<S: fmt::Debug>(
+ ts: &TokenStream<S>,
+ depth: usize,
+ f: &mut std::fmt::Formatter<'_>,
+) -> std::fmt::Result {
+ for tt in ts.0.iter() {
+ debug_token_tree(tt, depth, f)?;
+ }
+ Ok(())
+}
+
+fn debug_token_tree<S: fmt::Debug>(
+ tt: &TokenTree<S>,
+ depth: usize,
+ f: &mut std::fmt::Formatter<'_>,
+) -> std::fmt::Result {
+ write!(f, "{:indent$}", "", indent = depth * 2)?;
+ match tt {
+ bridge::TokenTree::Group(bridge::Group { delimiter, stream, span }) => {
+ writeln!(
+ f,
+ "GROUP {}{} {:#?} {:#?} {:#?}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => "(",
+ proc_macro::Delimiter::Brace => "{",
+ proc_macro::Delimiter::Bracket => "[",
+ proc_macro::Delimiter::None => "$",
+ },
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => ")",
+ proc_macro::Delimiter::Brace => "}",
+ proc_macro::Delimiter::Bracket => "]",
+ proc_macro::Delimiter::None => "$",
+ },
+ span.open,
+ span.close,
+ span.entire,
+ )?;
+ if let Some(stream) = stream {
+ debug_token_stream(stream, depth + 1, f)?;
+ }
+ return Ok(());
+ }
+ bridge::TokenTree::Punct(bridge::Punct { ch, joint, span }) => write!(
+ f,
+ "PUNCT {span:#?} {} {}",
+ *ch as char,
+ if *joint { "[joint]" } else { "[alone]" }
+ )?,
+ bridge::TokenTree::Ident(bridge::Ident { sym, is_raw, span }) => {
+ write!(f, "IDENT {span:#?} ")?;
+ if *is_raw {
+ write!(f, "r#")?;
+ }
+ write!(f, "{sym}")?;
+ }
+ bridge::TokenTree::Literal(bridge::Literal { kind, symbol, suffix, span }) => write!(
+ f,
+ "LITER {span:#?} {kind:?} {symbol}{} ",
+ match suffix {
+ Some(suffix) => suffix.clone(),
+ None => Symbol::intern(""),
+ }
+ )?,
+ }
+ writeln!(f)
+}
+
+impl<S: Copy> TokenStream<S> {
+ /// Push `tt` onto the end of the stream, possibly gluing it to the last
+ /// token. Uses `make_mut` to maximize efficiency.
+ pub fn push_tree(&mut self, tt: TokenTree<S>) {
+ let vec_mut = Arc::make_mut(&mut self.0);
+ vec_mut.push(tt);
+ }
+
+ /// Push `stream` onto the end of the stream, possibly gluing the first
+ /// token tree to the last token. (No other token trees will be glued.)
+ /// Uses `make_mut` to maximize efficiency.
+ pub fn push_stream(&mut self, stream: TokenStream<S>) {
+ let vec_mut = Arc::make_mut(&mut self.0);
+
+ let stream_iter = stream.0.iter().cloned();
+
+ vec_mut.extend(stream_iter);
+ }
+}
+
+impl<S> FromIterator<TokenTree<S>> for TokenStream<S> {
+ fn from_iter<I: IntoIterator<Item = TokenTree<S>>>(iter: I) -> Self {
+ TokenStream::new(iter.into_iter().collect::<Vec<TokenTree<S>>>())
+ }
+}
+
+#[derive(Clone)]
+pub struct TokenStreamIter<'t, S> {
+ stream: &'t TokenStream<S>,
+ index: usize,
+}
+
+impl<'t, S> TokenStreamIter<'t, S> {
+ fn new(stream: &'t TokenStream<S>) -> Self {
+ TokenStreamIter { stream, index: 0 }
+ }
+
+ // Peeking could be done via `Peekable`, but most iterators need peeking,
+ // and this is simple and avoids the need to use `peekable` and `Peekable`
+ // at all the use sites.
+ pub fn peek(&self) -> Option<&'t TokenTree<S>> {
+ self.stream.0.get(self.index)
+ }
+}
+
+impl<'t, S> Iterator for TokenStreamIter<'t, S> {
+ type Item = &'t TokenTree<S>;
+
+ fn next(&mut self) -> Option<&'t TokenTree<S>> {
+ self.stream.0.get(self.index).map(|tree| {
+ self.index += 1;
+ tree
+ })
+ }
+}
+
+pub(super) fn literal_from_lexer<Span>(
+ s: &str,
+ span: Span,
+ kind: rustc_lexer::LiteralKind,
+ suffix_start: u32,
+) -> bridge::Literal<Span, Symbol> {
+ let (kind, start_offset, end_offset) = match kind {
+ LiteralKind::Int { .. } => (bridge::LitKind::Integer, 0, 0),
+ LiteralKind::Float { .. } => (bridge::LitKind::Float, 0, 0),
+ LiteralKind::Char { terminated } => (bridge::LitKind::Char, 1, terminated as usize),
+ LiteralKind::Byte { terminated } => (bridge::LitKind::Byte, 2, terminated as usize),
+ LiteralKind::Str { terminated } => (bridge::LitKind::Str, 1, terminated as usize),
+ LiteralKind::ByteStr { terminated } => (bridge::LitKind::ByteStr, 2, terminated as usize),
+ LiteralKind::CStr { terminated } => (bridge::LitKind::CStr, 2, terminated as usize),
+ LiteralKind::RawStr { n_hashes } => (
+ bridge::LitKind::StrRaw(n_hashes.unwrap_or_default()),
+ 2 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ LiteralKind::RawByteStr { n_hashes } => (
+ bridge::LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
+ 3 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ LiteralKind::RawCStr { n_hashes } => (
+ bridge::LitKind::CStrRaw(n_hashes.unwrap_or_default()),
+ 3 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ };
+
+ let (lit, suffix) = s.split_at(suffix_start as usize);
+ let lit = &lit[start_offset..lit.len() - end_offset];
+ let suffix = match suffix {
+ "" | "_" => None,
+ suffix => Some(Symbol::intern(suffix)),
+ };
+
+ bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn roundtrip() {
+ let token_stream = TokenStream::from_str("struct T {\"string\"}", ()).unwrap();
+ token_stream.to_string();
+ assert_eq!(token_stream.to_string(), "struct T {\"string\"}");
+ }
+}
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index f9a547f..ea07522 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -826,58 +826,6 @@
}
}
-impl<S> Literal<S> {
- pub fn display_no_minus(&self) -> impl fmt::Display {
- struct NoMinus<'a, S>(&'a Literal<S>);
- impl<S> fmt::Display for NoMinus<'_, S> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let symbol =
- self.0.symbol.as_str().strip_prefix('-').unwrap_or(self.0.symbol.as_str());
- match self.0.kind {
- LitKind::Byte => write!(f, "b'{symbol}'"),
- LitKind::Char => write!(f, "'{symbol}'"),
- LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{symbol}"),
- LitKind::Str => write!(f, "\"{symbol}\""),
- LitKind::ByteStr => write!(f, "b\"{symbol}\""),
- LitKind::CStr => write!(f, "c\"{symbol}\""),
- LitKind::StrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- LitKind::ByteStrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- LitKind::CStrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- }?;
- if let Some(suffix) = &self.0.suffix {
- write!(f, "{suffix}")?;
- }
- Ok(())
- }
- }
- NoMinus(self)
- }
-}
-
impl<S> fmt::Display for Literal<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {