Skip to content

Commit f0dd1ac

Browse files
committed
WIP
1 parent 8317733 commit f0dd1ac

File tree

1 file changed

+138
-9
lines changed

1 file changed

+138
-9
lines changed

library/proc_macro/src/quote.rs

Lines changed: 138 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,7 @@ pub fn quote(stream: TokenStream) -> TokenStream {
393393
let mut after_dollar = false;
394394

395395
let mut tokens = crate::TokenStream::new();
396-
let mut iter = stream.into_iter().peekable();
396+
let mut iter: Peekable<crate::token_stream::IntoIter> = stream.into_iter().peekable();
397397
while let Some(tree) = iter.next() {
398398
if after_dollar {
399399
after_dollar = false;
@@ -402,6 +402,7 @@ pub fn quote(stream: TokenStream) -> TokenStream {
402402
// Handles repetition by expanding `$( CONTENTS ) SEP_OPT *` to `{ REP_EXPANDED }`.
403403
let contents = tt.stream();
404404
consume_dollar_group(contents.clone(), &mut iter).to_tokens(&mut tokens); // iterをそのまま参照したいけど。
405+
eprintln!("DEBUG1 {:?}", iter.clone().collect::<TokenStream>());
405406
continue;
406407

407408
// The `*` token is also consumed here.
@@ -543,7 +544,8 @@ fn collect_meta_vars(content_stream: TokenStream) -> Vec<Ident> {
543544
vars
544545
}
545546

546-
fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<impl Iterator<Item = TokenTree>>) -> TokenStream {
547+
// fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<impl Iterator<Item = TokenTree>>) -> TokenStream {
548+
fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<crate::token_stream::IntoIter>) -> TokenStream {
547549
let mut tokens = crate::TokenStream::new();
548550

549551
let mut sep_cand = Vec::new();
@@ -552,6 +554,8 @@ fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<impl It
552554

553555
let mut current_contents: TokenStream = content_stream;
554556

557+
let mut flag = false;
558+
555559
// sep内の$-GROUPに*が結合する場合は、このGruopは単なるTokenStreamとして扱う。(after_dollar=falseとして処理なので、再呼び出しでいい。)
556560
// そうではない場合は、このGruopはrepで処理する。
557561
loop {
@@ -572,6 +576,7 @@ fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<impl It
572576
if star.as_char() == '*' && !(star.spacing() == Spacing::Joint && not_assign.as_char() == '=') => {
573577
eprintln!("DEBUG1");
574578
let sep_opt: Option<TokenStream> = (!sep_cand.is_empty()).then(|| sep_cand.into_iter().collect::<TokenStream>());
579+
// let sep_opt: Option<TokenStream> = (!sep_cand.is_empty()).then(|| minimal_quote_ts!((@ sep_cand.into_iter().collect::<TokenStream>())).into());
575580

576581
// let t = sep_cand.into_iter().collect::<TokenStream>();
577582
// sep_opt = Some(quote_token!(sep_cand.to_tokens()));
@@ -595,6 +600,7 @@ fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<impl It
595600
// consume_dollar_group(next_group.stream().clone(), &mut iter).to_tokens(&mut tokens); // iterをそのまま参照したいけど。
596601
current_contents = next_group.stream().clone();
597602
sep_cand = Vec::new();
603+
flag = false;
598604

599605
// minimal_quote!((@ TokenTree::from(Punct::new('$', Spacing::Joint)))).to_tokens(&mut tokens); // $
600606
// quote([tree].into_iter().collect::<TokenStream>()).to_tokens(&mut tokens); // group
@@ -604,22 +610,43 @@ fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<impl It
604610

605611
// sep候補を追加する。ただし、groupは一回だけ許容。punct列も既知の演算の場合だけ一回認められる。多分declマクロで処理したほうがいい。sep_candと合わせて最大3つみれるから、それでクリアできる気がする。
606612
// !matches!(x, TokenTree::Group(_))
607-
(Some(x), _) =>
613+
(Some(x), _) if !flag =>
608614
{
609615
eprintln!("DEBUG2: XXXXXXXXXXXXXXXXX");
610616
sep_cand.push(x.clone());
617+
flag = judge(sep_cand.as_slice());
611618

612619
// XXX 候補集合をもっておいて絞っていき、それが空になったら、sep_candを処理しbreakしちゃえば?
620+
// なにかにsep_candがまっちしたら、フラグを有効にしてここには入れないようにしたらいいのでは。
613621
}
614622

615-
// 現在の$Groupをstarなしでする。次の$Groupでここに来た場合は、tokenstreamに再度くっつけないとだめ。
616-
(_, _) =>
623+
// 現在の$Groupをstarなしでする。
624+
// 次の$Groupでここに来た場合は、tokenstreamに再度くっつけないとだめ。なさそう。
625+
(x, _) =>
617626
{
618627
eprintln!("DEBUG3: XXXXXXXXXXXXXXXXX");
619-
minimal_quote!((@ TokenTree::from(Punct::new('$', Spacing::Joint)))).to_tokens(&mut tokens); // $
620-
// quote([tree].into_iter().collect::<TokenStream>()).to_tokens(&mut tokens); // group
621-
// sep_opt
622-
// continue 'outer;
628+
629+
minimal_quote!((@ TokenTree::from(Punct::new('$', Spacing::Joint)))).to_tokens(&mut tokens);
630+
631+
eprintln!("DEBUG3.1: XXXXXXXXXXXXXXXXX");
632+
633+
quote(
634+
[TokenTree::Group(Group::new(Delimiter::Brace, current_contents.clone()))].into_iter().collect::<TokenStream>()
635+
).to_tokens(&mut tokens);
636+
eprintln!("DEBUG3.2: XXXXXXXXXXXXXXXXX");
637+
638+
let sep_opt: Option<TokenStream> = (!sep_cand.is_empty()).then(|| sep_cand.clone().into_iter().collect::<TokenStream>());
639+
sep_opt.into_iter().for_each(|sep| {
640+
quote(sep).to_tokens(&mut tokens);
641+
});
642+
643+
let mut new_stream = x.into_iter().collect::<TokenStream>();
644+
new_stream.extend(iter.by_ref());
645+
*iter = new_stream.into_iter().peekable();
646+
647+
eprintln!("DEBUG3.3: XXXXXXXXXXXXXXXXX");
648+
649+
break
623650
}
624651
}
625652

@@ -630,6 +657,108 @@ fn consume_dollar_group(content_stream: TokenStream, iter: &mut Peekable<impl It
630657
// expand_dollar_group(content_stream, &mut iter).to_tokens(&mut tokens);
631658
}
632659

660+
// https://github.com/dtolnay/quote/blob/master/src/lib.rs#L1014
661+
fn judge(ts: &[TokenTree]) -> bool {
662+
match ts {
663+
[TokenTree::Ident(_)] => true,
664+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
665+
if t1.as_char() == ':' && t2.as_char() == ':' => true,
666+
[TokenTree::Group(_)] => true,
667+
[TokenTree::Punct(t1)]
668+
if t1.as_char() == '#' => true,
669+
[TokenTree::Punct(t1)]
670+
if t1.as_char() == ',' => true,
671+
[TokenTree::Punct(t1)]
672+
if t1.as_char() == '.' => true,
673+
[TokenTree::Punct(t1)]
674+
if t1.as_char() == ';' => true,
675+
[TokenTree::Punct(t1)]
676+
if t1.as_char() == ':' => true,
677+
[TokenTree::Punct(t1)]
678+
if t1.as_char() == '+' => true,
679+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
680+
if t1.as_char() == '+' && t2.as_char() == '=' => true,
681+
[TokenTree::Punct(t1)]
682+
if t1.as_char() == '&' => true,
683+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
684+
if t1.as_char() == '&' && t2.as_char() == '&' => true,
685+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
686+
if t1.as_char() == '&' && t2.as_char() == '=' => true,
687+
[TokenTree::Punct(t1)]
688+
if t1.as_char() == '@' => true,
689+
[TokenTree::Punct(t1)]
690+
if t1.as_char() == '!' => true,
691+
[TokenTree::Punct(t1)]
692+
if t1.as_char() == '^' => true,
693+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
694+
if t1.as_char() == '^' && t2.as_char() == '=' => true,
695+
[TokenTree::Punct(t1)]
696+
if t1.as_char() == '/' => true,
697+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
698+
if t1.as_char() == '/' && t2.as_char() == '=' => true,
699+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
700+
if t1.as_char() == '.' && t2.as_char() == '.' => true,
701+
[TokenTree::Punct(t1), TokenTree::Punct(t2), TokenTree::Punct(t3)]
702+
if t1.as_char() == '.' && t2.as_char() == '.' && t3.as_char() == '.' => true,
703+
[TokenTree::Punct(t1), TokenTree::Punct(t2), TokenTree::Punct(t3)]
704+
if t1.as_char() == '.' && t2.as_char() == '.' && t3.as_char() == '=' => true,
705+
[TokenTree::Punct(t1)]
706+
if t1.as_char() == '=' => true,
707+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
708+
if t1.as_char() == '=' && t2.as_char() == '=' => true,
709+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
710+
if t1.as_char() == '>' && t2.as_char() == '=' => true,
711+
[TokenTree::Punct(t1)]
712+
if t1.as_char() == '>' => true,
713+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
714+
if t1.as_char() == '<' && t2.as_char() == '=' => true,
715+
[TokenTree::Punct(t1)]
716+
if t1.as_char() == '<' => true,
717+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
718+
if t1.as_char() == '*' && t2.as_char() == '=' => true,
719+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
720+
if t1.as_char() == '!' && t2.as_char() == '=' => true,
721+
[TokenTree::Punct(t1)]
722+
if t1.as_char() == '|' => true,
723+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
724+
if t1.as_char() == '|' && t2.as_char() == '=' => true,
725+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
726+
if t1.as_char() == '|' && t2.as_char() == '|' => true,
727+
[TokenTree::Punct(t1)]
728+
if t1.as_char() == '?' => true,
729+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
730+
if t1.as_char() == '-' && t2.as_char() == '>' => true,
731+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
732+
if t1.as_char() == '<' && t2.as_char() == '-' => true,
733+
[TokenTree::Punct(t1)]
734+
if t1.as_char() == '%' => true,
735+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
736+
if t1.as_char() == '%' && t2.as_char() == '=' => true,
737+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
738+
if t1.as_char() == '=' && t2.as_char() == '>' => true,
739+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
740+
if t1.as_char() == '<' && t2.as_char() == '<' => true,
741+
[TokenTree::Punct(t1), TokenTree::Punct(t2), TokenTree::Punct(t3)]
742+
if t1.as_char() == '<' && t2.as_char() == '<' && t3.as_char() == '=' => true,
743+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
744+
if t1.as_char() == '>' && t2.as_char() == '>' => true,
745+
[TokenTree::Punct(t1), TokenTree::Punct(t2), TokenTree::Punct(t3)]
746+
if t1.as_char() == '>' && t2.as_char() == '>' && t3.as_char() == '=' => true,
747+
[TokenTree::Punct(t1)]
748+
if t1.as_char() == '*' => true,
749+
[TokenTree::Punct(t1)]
750+
if t1.as_char() == '-' => true,
751+
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
752+
if t1.as_char() == '-' && t2.as_char() == '=' => true,
753+
[TokenTree::Punct(single_quote), TokenTree::Ident(_)]
754+
if single_quote.as_char() == '\'' => true,
755+
[TokenTree::Punct(t1)]
756+
if t1.as_char() == '_' => true,
757+
[_x] => true,
758+
_ => false,
759+
}
760+
}
761+
633762
fn expand_dollar_group(contents: TokenStream, sep_opt: Option<TokenStream>) -> TokenStream {
634763
let mut tokens = crate::TokenStream::new();
635764

0 commit comments

Comments
 (0)