Would this proc macro help understand how rust tokeniser work and making proc macro without need of dependencies
โ Rust ๐ 2025-11-16 ๐ค surdeus ๐๏ธ 7#[proc_macro_attribute]
pub fn debug_parse(attritube: TokenStream, input: TokenStream) -> TokenStream {
let mut token_stream = TokenStream::new();
fn group_print() -> TokenStream {
[
TokenTree::Ident(Ident::new("println", Span::call_site())),
TokenTree::Punct(Punct::new('!', proc_macro::Spacing::Joint)),
TokenTree::Group(Group::new(
proc_macro::Delimiter::Parenthesis,
[TokenTree::Literal(Literal::string("Group encountered"))]
.into_iter()
.collect(),
)),
]
.into_iter()
.collect()
}
fn group_print_ended() -> TokenStream {
[
TokenTree::Ident(Ident::new("println", Span::call_site())),
TokenTree::Punct(Punct::new('!', proc_macro::Spacing::Joint)),
TokenTree::Group(Group::new(
proc_macro::Delimiter::Parenthesis,
[TokenTree::Literal(Literal::string("Group ended"))]
.into_iter()
.collect(),
)),
]
.into_iter()
.collect()
}
fn attritube_going_in() -> TokenStream {
[
TokenTree::Ident(Ident::new("println", Span::call_site())),
TokenTree::Punct(Punct::new('!', proc_macro::Spacing::Joint)),
TokenTree::Group(Group::new(
proc_macro::Delimiter::Parenthesis,
[TokenTree::Literal(Literal::string("Attritube Now"))]
.into_iter()
.collect(),
)),
]
.into_iter()
.collect()
}
fn println_stuff_literal(arg: Literal) -> TokenStream {
[
TokenTree::Ident(Ident::new("println", Span::call_site())),
TokenTree::Punct(Punct::new('!', proc_macro::Spacing::Joint)),
TokenTree::Group(Group::new(
proc_macro::Delimiter::Parenthesis,
[
TokenTree::Literal(Literal::string("{}, literal")),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(arg),
]
.into_iter()
.collect(),
)),
]
.into_iter()
.collect()
}
fn println_stuff_punct(arg: Punct) -> TokenStream {
let a;
match arg.spacing() {
proc_macro::Spacing::Joint => a = Literal::string("joint"),
proc_macro::Spacing::Alone => a = Literal::string("alone"),
}
[
TokenTree::Ident(Ident::new("println", Span::call_site())),
TokenTree::Punct(Punct::new('!', proc_macro::Spacing::Joint)),
TokenTree::Group(Group::new(
proc_macro::Delimiter::Parenthesis,
[
TokenTree::Literal(Literal::string("{}, punct {}")),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(Literal::string(arg.as_char().to_string().as_str())),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(a),
]
.into_iter()
.collect(),
)),
]
.into_iter()
.collect()
}
fn println_stuff_ident(arg: Ident) -> TokenStream {
[
TokenTree::Ident(Ident::new("println", Span::call_site())),
TokenTree::Punct(Punct::new('!', proc_macro::Spacing::Joint)),
TokenTree::Group(Group::new(
proc_macro::Delimiter::Parenthesis,
[
TokenTree::Literal(Literal::string("{}, ident")),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(Literal::string(arg.to_string().as_str())),
]
.into_iter()
.collect(),
)),
]
.into_iter()
.collect()
}
fn println_stuff_delimiter(arg: Delimiter) -> TokenStream {
let a;
match arg {
Delimiter::Brace => a = TokenTree::Literal(Literal::string("{{}}")),
Delimiter::Bracket => a = TokenTree::Literal(Literal::string("[]")),
Delimiter::Parenthesis => a = TokenTree::Literal(Literal::string("()")),
Delimiter::None => a = TokenTree::Literal(Literal::string("00")),
}
[
TokenTree::Ident(Ident::new("println", Span::call_site())),
TokenTree::Punct(Punct::new('!', proc_macro::Spacing::Joint)),
TokenTree::Group(Group::new(
proc_macro::Delimiter::Parenthesis,
[a].into_iter().collect(),
)),
]
.into_iter()
.collect()
}
fn semi_colon() -> TokenStream {
[TokenTree::Punct(Punct::new(
';',
proc_macro::Spacing::Alone,
))]
.into_iter()
.collect()
}
fn println_span(arg: Span) -> TokenStream {
[
TokenTree::Ident(Ident::new("println", Span::call_site())),
TokenTree::Punct(Punct::new('!', proc_macro::Spacing::Joint)),
TokenTree::Group(Group::new(
proc_macro::Delimiter::Parenthesis,
[
TokenTree::Literal(Literal::string("{}, {}, {}, {:?}, {:?}, span")),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(Literal::usize_suffixed(arg.line())),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(Literal::usize_suffixed(arg.column())),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(Literal::string(arg.file().as_str())),
// xxx
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::Parenthesis,
[
TokenTree::Literal(Literal::usize_suffixed(arg.start().line())),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(Literal::usize_suffixed(arg.start().column())),
]
.into_iter()
.collect(),
)),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::Parenthesis,
[
TokenTree::Literal(Literal::usize_suffixed(arg.end().line())),
TokenTree::Punct(Punct::new(',', proc_macro::Spacing::Alone)),
TokenTree::Literal(Literal::usize_suffixed(arg.end().column())),
]
.into_iter()
.collect(),
)),
]
.into_iter()
.collect(),
)),
]
.into_iter()
.collect()
}
fn input_traserval(arg: TokenTree, token_stream: &mut TokenStream) {
match arg {
TokenTree::Group(a) => {
token_stream.extend(group_print());
token_stream.extend(semi_colon());
token_stream.extend(println_stuff_delimiter(a.delimiter()));
token_stream.extend(semi_colon());
// --------
token_stream.extend(println_span(a.span()));
token_stream.extend(semi_colon());
token_stream.extend(println_span(a.span_open()));
token_stream.extend(semi_colon());
token_stream.extend(println_span(a.span_close()));
token_stream.extend(semi_colon());
// --------
for iter in a.stream() {
input_traserval(iter, token_stream);
}
token_stream.extend(group_print_ended());
token_stream.extend(semi_colon());
}
TokenTree::Literal(a) => {
// --------
token_stream.extend(println_span(a.span()));
token_stream.extend(semi_colon());
// --------
token_stream.extend(println_stuff_literal(a));
token_stream.extend(semi_colon());
}
TokenTree::Ident(a) => {
// --------
token_stream.extend(println_span(a.span()));
token_stream.extend(semi_colon());
// --------
token_stream.extend(println_stuff_ident(a));
token_stream.extend(semi_colon());
}
TokenTree::Punct(a) => {
// --------
token_stream.extend(println_span(a.span()));
token_stream.extend(semi_colon());
// --------
token_stream.extend(println_stuff_punct(a));
token_stream.extend(semi_colon());
}
}
}
for input in input.into_iter() {
input_traserval(input, &mut token_stream);
}
token_stream.extend(attritube_going_in());
token_stream.extend(semi_colon());
for attritube in attritube.into_iter() {
input_traserval(attritube, &mut token_stream);
}
token_stream
}
I know I should put in repo or something but does this help understand proc macro when used into structs enum etc, and also needs a bit of polishing of course but is this useful or useless due to syn as main dependencies for proc macro
Also this blog post help me writing this proc macro,
1 post - 1 participant
๐ท๏ธ Rust_feed