Skip to content

Commit 8317733

Browse files
committed
WIP
1 parent cba9415 commit 8317733

File tree

2 files changed

+243
-75
lines changed

2 files changed

+243
-75
lines changed

library/proc_macro/src/quote.rs

Lines changed: 213 additions & 75 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
//! This quasiquoter uses macros 2.0 hygiene to reliably access
55
//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
66
7+
use core::iter::Peekable;
78
use crate::{
89
BitOr, Delimiter, Group, Ident, Literal, Punct, Spacing, Span, ToTokens, TokenStream, TokenTree,
910
};
@@ -203,6 +204,8 @@ macro_rules! minimal_quote_tt {
203204
(_) => { Ident::new("_", Span::def_site()) };
204205
($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
205206
($lit:literal) => { stringify!($lit).parse::<Literal>().unwrap() };
207+
($lifetime:lifetime) => { stringify!($lit).parse::<Lifetime>().unwrap() };
208+
($other:tt) => { stringify!($other).parse().unwrap() };
206209
}
207210

208211
macro_rules! minimal_quote_ts {
@@ -328,9 +331,36 @@ macro_rules! minimal_quote_ts {
328331
[c.0, c.1].into_iter().collect::<TokenStream>()
329332
}
330333
};
331-
($t:t) => { TokenTree::from(minimal_quote_tt!($t)) };
334+
($t:tt) => { TokenTree::from(minimal_quote_tt!($t)) };
332335
}
333336

337+
// macro_rules! minimal_quote_sep {}
338+
// macro_rules! quote_token {
339+
// ($ident:ident) => {
340+
// Ident::new(stringify!($ident), Span::def_site());
341+
// };
342+
343+
// (::) => {
344+
// {
345+
// let mut c = (
346+
// TokenTree::from(Punct::new(':', Spacing::Joint)),
347+
// TokenTree::from(Punct::new(':', Spacing::Alone))
348+
// );
349+
// c.0.set_span(Span::def_site());
350+
// c.1.set_span(Span::def_site());
351+
// [c.0, c.1].into_iter().collect::<TokenStream>()
352+
// }
353+
// };
354+
355+
// (( $($inner:tt)* )) => {
356+
// $crate::__private::push_group(
357+
// &mut $tokens,
358+
// $crate::__private::Delimiter::Parenthesis,
359+
// $crate::quote!($($inner)*),
360+
// );
361+
// };
362+
// }
363+
334364
/// Simpler version of the real `quote!` macro, implemented solely
335365
/// through `macro_rules`, for bootstrapping the real implementation
336366
/// (see the `quote` function), which does not have access to the
@@ -368,84 +398,41 @@ pub fn quote(stream: TokenStream) -> TokenStream {
368398
if after_dollar {
369399
after_dollar = false;
370400
match tree {
371-
TokenTree::Group(tt) => {
401+
TokenTree::Group(ref tt) => {
372402
// Handles repetition by expanding `$( CONTENTS ) SEP_OPT *` to `{ REP_EXPANDED }`.
373403
let contents = tt.stream();
404+
consume_dollar_group(contents.clone(), &mut iter).to_tokens(&mut tokens); // iterをそのまま参照したいけど。
405+
continue;
374406

375407
// The `*` token is also consumed here.
376-
let sep_opt: Option<Punct> = match (iter.next(), iter.peek()) {
377-
(Some(TokenTree::Punct(sep)), Some(TokenTree::Punct(star)))
378-
if sep.spacing() == Spacing::Joint && star.as_char() == '*' =>
379-
{
380-
iter.next();
381-
Some(sep)
382-
}
383-
(Some(TokenTree::Punct(star)), _) if star.as_char() == '*' => None,
384-
_ => panic!("`$(...)` must be followed by `*` in `quote!`"),
385-
};
386-
387-
let mut rep_expanded = TokenStream::new();
388-
389-
// Append setup code for a `while`, where recursively quoted `CONTENTS`
390-
// and `SEP_OPT` are repeatedly processed, to `REP_EXPANDED`.
391-
let meta_vars = collect_meta_vars(contents.clone());
392-
minimal_quote!(
393-
use crate::ext::*;
394-
(@ if sep_opt.is_some() {
395-
minimal_quote!(let mut _i = 0usize;)
396-
} else {
397-
minimal_quote!(();)
398-
})
399-
let has_iter = crate::ThereIsNoIteratorInRepetition;
400-
)
401-
.to_tokens(&mut rep_expanded);
402-
for meta_var in &meta_vars {
403-
minimal_quote!(
404-
#[allow(unused_mut)]
405-
let (mut (@ meta_var), i) = (@ meta_var).quote_into_iter();
406-
let has_iter = has_iter | i;
407-
)
408-
.to_tokens(&mut rep_expanded);
409-
}
410-
minimal_quote!(let _: crate::HasIterator = has_iter;)
411-
.to_tokens(&mut rep_expanded);
412-
413-
// Append the `while` to `REP_EXPANDED`.
414-
let mut while_body = TokenStream::new();
415-
for meta_var in &meta_vars {
416-
minimal_quote!(
417-
let (@ meta_var) = match (@ meta_var).next() {
418-
Some(_x) => crate::RepInterp(_x),
419-
None => break,
420-
};
421-
)
422-
.to_tokens(&mut while_body);
423-
}
424-
minimal_quote!(
425-
(@ if let Some(sep) = sep_opt {
426-
minimal_quote!(
427-
if _i > 0 {
428-
(@ minimal_quote!(crate::ToTokens::to_tokens(&crate::TokenTree::Punct(crate::Punct::new(
429-
(@ TokenTree::from(Literal::character(sep.as_char()))),
430-
(@ minimal_quote!(crate::Spacing::Alone)),
431-
)), &mut ts);))
432-
}
433-
_i += 1;
434-
)
435-
} else {
436-
minimal_quote!(();)
437-
})
438-
(@ quote(contents.clone())).to_tokens(&mut ts);
439-
)
440-
.to_tokens(&mut while_body);
441-
rep_expanded.extend(vec![
442-
TokenTree::Ident(Ident::new("while", Span::call_site())),
443-
TokenTree::Ident(Ident::new("true", Span::call_site())),
444-
TokenTree::Group(Group::new(Delimiter::Brace, while_body)),
445-
]);
446-
447-
minimal_quote!((@ TokenTree::Group(Group::new(Delimiter::Brace, rep_expanded)))).to_tokens(&mut tokens);
448-
continue;
408+
// let sep_opt: Option<Punct> = match (iter.next(), iter.peek()) {
409+
// // (Some(TokenTree::Ident(sep)), Some(TokenTree::Punct(star)))
410+
// // if sep.spacing() == Spacing::Joint && star.as_char() == '*' =>
411+
// // {
412+
// // iter.next();
413+
// // Some(sep)
414+
// // }
415+
// // (Some(TokenTree::Punct(sep)), Some(TokenTree::Punct(star)))
416+
// // if sep.spacing() == Spacing::Joint && star.as_char() == '*' =>
417+
// // {
418+
// // iter.next();
419+
// // Some(sep)
420+
// // }
421+
// // (Some(TokenTree::Punct(sep)), Some(TokenTree::Punct(star)))
422+
// // if sep.spacing() == Spacing::Joint && star.as_char() == '*' =>
423+
// // {
424+
// // iter.next();
425+
// // Some(sep)
426+
// // }
427+
// (Some(TokenTree::Punct(sep)), Some(TokenTree::Punct(star)))
428+
// if sep.spacing() == Spacing::Joint && star.as_char() == '*' =>
429+
// {
430+
// iter.next();
431+
// Some(sep)
432+
// }
433+
// (Some(TokenTree::Punct(star)), _) if star.as_char() == '*' => None,
434+
// x => panic!("`$(...)` must be followed by `*` in `quote!`: {:?}", x),
435+
// };
449436
}
450437
TokenTree::Ident(_) => {
451438
minimal_quote!(crate::ToTokens::to_tokens(&(@ tree), &mut ts);)
@@ -556,6 +543,157 @@ fn collect_meta_vars(content_stream: TokenStream) -> Vec<Ident> {
556543
vars
557544
}
558545

546+
fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<impl Iterator<Item = TokenTree>>) -> TokenStream {
547+
let mut tokens = crate::TokenStream::new();
548+
549+
let mut sep_cand = Vec::new();
550+
// #[allow(unused_assignments)]
551+
// let mut sep_opt: Option<TokenStream> = None;
552+
553+
let mut current_contents: TokenStream = content_stream;
554+
555+
// sep内の$-GROUPに*が結合する場合は、このGruopは単なるTokenStreamとして扱う。(after_dollar=falseとして処理なので、再呼び出しでいい。)
556+
// そうではない場合は、このGruopはrepで処理する。
557+
loop {
558+
match (iter.next(), iter.peek()) { // *=*
559+
560+
// 1. starを消費する。
561+
// *= じゃない *: nestしたほうがきれいかも。
562+
(Some(TokenTree::Punct(star)), opt)
563+
if star.as_char() == '*' && !matches!(opt, Some(TokenTree::Punct(_))) => {
564+
eprintln!("DEBUG1");
565+
let sep_opt: Option<TokenStream> = (!sep_cand.is_empty()).then(|| sep_cand.into_iter().collect::<TokenStream>());
566+
567+
expand_dollar_group(current_contents.clone(), sep_opt).to_tokens(&mut tokens);
568+
break;
569+
570+
}
571+
(Some(TokenTree::Punct(star)), Some(TokenTree::Punct(not_assign)))
572+
if star.as_char() == '*' && !(star.spacing() == Spacing::Joint && not_assign.as_char() == '=') => {
573+
eprintln!("DEBUG1");
574+
let sep_opt: Option<TokenStream> = (!sep_cand.is_empty()).then(|| sep_cand.into_iter().collect::<TokenStream>());
575+
576+
// let t = sep_cand.into_iter().collect::<TokenStream>();
577+
// sep_opt = Some(quote_token!(sep_cand.to_tokens()));
578+
// sep_opt = Some(minimal_quote_ts!(t).into()); // XXX TOO AUTHORATIVE
579+
// sep_opt = Some(minimal_quote_ts!((@ t)).into()); // XXX TOO AUTHORATIVE SEP内の再帰的解決は後ほど。decl_macroに通して検査したほうが安全。
580+
expand_dollar_group(current_contents.clone(), sep_opt).to_tokens(&mut tokens);
581+
break;
582+
}
583+
584+
// 2. starの前に$groupを見つけた場合は、現在の$groupをリテラル通り先に消費して、次の$groupからの処理に移る。
585+
(Some(TokenTree::Punct(dollar)), Some(TokenTree::Group(next_group)))
586+
if dollar.as_char() == '$' => {
587+
// 現在の$groupに*がないのが確定
588+
minimal_quote!((@ TokenTree::from(Punct::new('$', Spacing::Joint)))).to_tokens(&mut tokens); // $
589+
// quote([tree].into_iter().collect::<TokenStream>()).to_tokens(&mut tokens); // group
590+
// quote(stream.clone()).to_to\
591+
// XXX sep_candの処理が必要。
592+
593+
// 次の$groupからの処理に移る。
594+
eprintln!("DEBUG3: XXXXXXXXXXXXXXXXX100");
595+
// consume_dollar_group(next_group.stream().clone(), &mut iter).to_tokens(&mut tokens); // iterをそのまま参照したいけど。
596+
current_contents = next_group.stream().clone();
597+
sep_cand = Vec::new();
598+
599+
// minimal_quote!((@ TokenTree::from(Punct::new('$', Spacing::Joint)))).to_tokens(&mut tokens); // $
600+
// quote([tree].into_iter().collect::<TokenStream>()).to_tokens(&mut tokens); // group
601+
// quote(stream.clone()).to_tokens(&mut tokens); // rest stream
602+
// continue 'outer;
603+
}
604+
605+
// sep候補を追加する。ただし、groupは一回だけ許容。punct列も既知の演算の場合だけ一回認められる。多分declマクロで処理したほうがいい。sep_candと合わせて最大3つみれるから、それでクリアできる気がする。
606+
// !matches!(x, TokenTree::Group(_))
607+
(Some(x), _) =>
608+
{
609+
eprintln!("DEBUG2: XXXXXXXXXXXXXXXXX");
610+
sep_cand.push(x.clone());
611+
612+
// XXX 候補集合をもっておいて絞っていき、それが空になったら、sep_candを処理しbreakしちゃえば?
613+
}
614+
615+
// 現在の$Groupをstarなしでする。次の$Groupでここに来た場合は、tokenstreamに再度くっつけないとだめ。
616+
(_, _) =>
617+
{
618+
eprintln!("DEBUG3: XXXXXXXXXXXXXXXXX");
619+
minimal_quote!((@ TokenTree::from(Punct::new('$', Spacing::Joint)))).to_tokens(&mut tokens); // $
620+
// quote([tree].into_iter().collect::<TokenStream>()).to_tokens(&mut tokens); // group
621+
// sep_opt
622+
// continue 'outer;
623+
}
624+
}
625+
626+
}
627+
628+
return tokens;
629+
630+
// expand_dollar_group(content_stream, &mut iter).to_tokens(&mut tokens);
631+
}
632+
633+
fn expand_dollar_group(contents: TokenStream, sep_opt: Option<TokenStream>) -> TokenStream {
634+
let mut tokens = crate::TokenStream::new();
635+
636+
let mut rep_expanded = TokenStream::new();
637+
638+
// Append setup code for a `while`, where recursively quoted `CONTENTS`
639+
// and `SEP_OPT` are repeatedly processed, to `REP_EXPANDED`.
640+
let meta_vars = collect_meta_vars(contents.clone());
641+
minimal_quote!(
642+
use crate::ext::*;
643+
(@ if sep_opt.is_some() {
644+
minimal_quote!(let mut _i = 0usize;)
645+
} else {
646+
minimal_quote!(();)
647+
})
648+
let has_iter = crate::ThereIsNoIteratorInRepetition;
649+
)
650+
.to_tokens(&mut rep_expanded);
651+
for meta_var in &meta_vars {
652+
minimal_quote!(
653+
#[allow(unused_mut)]
654+
let (mut (@ meta_var), i) = (@ meta_var).quote_into_iter();
655+
let has_iter = has_iter | i;
656+
)
657+
.to_tokens(&mut rep_expanded);
658+
}
659+
minimal_quote!(let _: crate::HasIterator = has_iter;)
660+
.to_tokens(&mut rep_expanded);
661+
662+
// Append the `while` to `REP_EXPANDED`.
663+
let mut while_body = TokenStream::new();
664+
for meta_var in &meta_vars {
665+
minimal_quote!(
666+
let (@ meta_var) = match (@ meta_var).next() {
667+
Some(_x) => crate::RepInterp(_x),
668+
None => break,
669+
};
670+
)
671+
.to_tokens(&mut while_body);
672+
}
673+
minimal_quote!(
674+
(@ if let Some(sep) = sep_opt {
675+
minimal_quote!(
676+
if _i > 0 {
677+
(@ quote(sep)).to_tokens(&mut ts);
678+
}
679+
_i += 1;
680+
)
681+
} else {
682+
minimal_quote!(();)
683+
})
684+
(@ quote(contents.clone())).to_tokens(&mut ts);
685+
)
686+
.to_tokens(&mut while_body); // tsに入れてるのは怪しい。
687+
rep_expanded.extend(vec![
688+
TokenTree::Ident(Ident::new("while", Span::call_site())),
689+
TokenTree::Ident(Ident::new("true", Span::call_site())),
690+
TokenTree::Group(Group::new(Delimiter::Brace, while_body)),
691+
]);
692+
693+
minimal_quote!((@ TokenTree::Group(Group::new(Delimiter::Brace, rep_expanded)))).to_tokens(&mut tokens);
694+
return tokens;
695+
}
696+
559697
/// Quote a `Span` into a `TokenStream`.
560698
/// This is needed to implement a custom quoter.
561699
#[unstable(feature = "proc_macro_quote", issue = "54722")]

tests/ui/proc-macro/quote/auxiliary/basic.rs

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,9 @@ use proc_macro::*;
1111

1212
#[proc_macro]
1313
pub fn run_tests(_: TokenStream) -> TokenStream {
14+
test_aaa();
15+
test_bbb();
16+
1417
test_quote_impl();
1518
test_substitution();
1619
test_iter();
@@ -50,6 +53,33 @@ pub fn run_tests(_: TokenStream) -> TokenStream {
5053
TokenStream::new()
5154
}
5255

56+
// ./x.py test tests/ui/proc-macro/quote/auxiliary/basic.rs
57+
// ./x.py test tests/ui/proc-macro/quote
58+
fn test_aaa() {
59+
let iter = vec!["a", "b"].into_iter();
60+
let tokens = quote!($($iter) << *);
61+
// quote!($($iter)=*);
62+
// quote!($($iter)'static*);
63+
quote!($($iter)<<*;let);
64+
65+
eprintln!("{}", tokens);
66+
67+
// let expected = "a << b";
68+
69+
// assert_eq!(expected, tokens.to_string());
70+
}
71+
72+
fn test_bbb() {
73+
let iter = vec!["a", "b"].into_iter();
74+
let j = "j";
75+
let tokens = quote!($$ j $($$ j $iter)*);
76+
eprintln!("{}", tokens);
77+
78+
// let expected = "j a b";
79+
80+
// assert_eq!(expected, tokens.to_string());
81+
}
82+
5383
// Based on https://github.com/dtolnay/quote/blob/0245506323a3616daa2ee41c6ad0b871e4d78ae4/tests/test.rs
5484
//
5585
// FIXME(quote):

0 commit comments

Comments
 (0)