@@ -26,7 +26,7 @@ use tokenstream::{self, TokenTree};
26
26
use util:: small_vector:: SmallVector ;
27
27
28
28
use std:: cell:: RefCell ;
29
- use std:: collections:: { HashMap } ;
29
+ use std:: collections:: { HashMap , HashSet } ;
30
30
use std:: collections:: hash_map:: { Entry } ;
31
31
32
32
struct ParserAnyMacro < ' a > {
@@ -366,7 +366,7 @@ fn check_lhs_firsts(cx: &ExtCtxt, lhs: &TokenTree, lhs_: &TokenTree)
366
366
match ( lhs, lhs_) {
367
367
( & TokenTree :: Delimited ( _, ref tta) ,
368
368
& TokenTree :: Delimited ( _, ref ttb) ) =>
369
- check_matcher_firsts ( cx, & tta. tts , & ttb. tts ) ,
369
+ check_matcher_firsts ( cx, & tta. tts , & ttb. tts , & mut HashSet :: new ( ) ) ,
370
370
_ => cx. span_bug ( lhs. get_span ( ) , "malformed macro lhs" )
371
371
}
372
372
}
@@ -593,7 +593,8 @@ enum AnalysisResult {
593
593
Error
594
594
}
595
595
596
- fn check_matcher_firsts ( cx : & ExtCtxt , ma : & [ TokenTree ] , mb : & [ TokenTree ] )
596
+ fn check_matcher_firsts ( cx : & ExtCtxt , ma : & [ TokenTree ] , mb : & [ TokenTree ] ,
597
+ visited_spans : & mut HashSet < ( Span , Span ) > )
597
598
-> AnalysisResult {
598
599
use self :: AnalysisResult :: * ;
599
600
let mut need_disambiguation = false ;
@@ -611,7 +612,13 @@ fn check_matcher_firsts(cx: &ExtCtxt, ma: &[TokenTree], mb: &[TokenTree])
611
612
// matches A will never match B or vice-versa
612
613
// * we find a case that is too complex to handle and reject it
613
614
// * we reach the end of the macro
614
- for ( ( idx_a, ta) , tb) in ma. iter ( ) . enumerate ( ) . zip ( mb. iter ( ) ) {
615
+ for ( ( idx_a, ta) , ( idx_b, tb) ) in ma. iter ( ) . enumerate ( ) . zip ( mb. iter ( ) . enumerate ( ) ) {
616
+ if visited_spans. contains ( & ( ta. get_span ( ) , tb. get_span ( ) ) ) {
617
+ break
618
+ }
619
+
620
+ visited_spans. insert ( ( ta. get_span ( ) , tb. get_span ( ) ) ) ;
621
+
615
622
if match_same_input ( ta, tb) {
616
623
continue ;
617
624
}
@@ -625,6 +632,96 @@ fn check_matcher_firsts(cx: &ExtCtxt, ma: &[TokenTree], mb: &[TokenTree])
625
632
// not tt, ident, or block (that is, either A or B could match several
626
633
// token trees), we cannot know where we should continue the analysis.
627
634
match ( ta, tb) {
635
+ ( & TokenTree :: Sequence ( sp_a, ref seq_a) , & TokenTree :: Sequence ( sp_b, ref seq_b) ) => {
636
+ let new_tt_a = TokenTree :: Sequence ( sp_a, tokenstream:: SequenceRepetition { op : tokenstream:: KleeneOp :: ZeroOrMore , .. seq_a. clone ( ) } ) ;
637
+ let mut new_a = seq_a. tts . iter ( ) . map ( |x| x. clone ( ) ) . collect :: < Vec < _ > > ( ) ;
638
+ if let Some ( ref sep) = seq_a. separator { new_a. push ( TokenTree :: Token ( sp_a, sep. clone ( ) ) ) } ;
639
+ new_a. push ( new_tt_a) ;
640
+ new_a. extend ( ma[ idx_a + 1 ..] . iter ( ) . map ( |x| x. clone ( ) ) ) ;
641
+
642
+ let new_tt_b = TokenTree :: Sequence ( sp_b, tokenstream:: SequenceRepetition { op : tokenstream:: KleeneOp :: ZeroOrMore , .. seq_b. clone ( ) } ) ;
643
+ let mut new_b = seq_b. tts . iter ( ) . map ( |x| x. clone ( ) ) . collect :: < Vec < _ > > ( ) ;
644
+ if let Some ( ref sep) = seq_b. separator { new_b. push ( TokenTree :: Token ( sp_b, sep. clone ( ) ) ) } ;
645
+ new_b. push ( new_tt_b) ;
646
+ new_b. extend ( mb[ idx_b + 1 ..] . iter ( ) . map ( |x| x. clone ( ) ) ) ;
647
+
648
+ let mut ret = match check_matcher_firsts ( cx, & new_a, & mb[ idx_b ..] , visited_spans) {
649
+ Error => return Error ,
650
+ ret => ret
651
+ } ;
652
+
653
+ match check_matcher_firsts ( cx, & ma[ idx_a ..] , & new_b, visited_spans) {
654
+ Error => return Error ,
655
+ Unsure => ret = Unsure ,
656
+ Ok => ( )
657
+ } ;
658
+
659
+ if seq_a. op == tokenstream:: KleeneOp :: ZeroOrMore {
660
+ match check_matcher_firsts ( cx, & ma[ idx_a + 1 ..] , & mb[ idx_b ..] , visited_spans) {
661
+ Error => return Error ,
662
+ Unsure => ret = Unsure ,
663
+ Ok => ( )
664
+ } ;
665
+ }
666
+
667
+ if seq_b. op == tokenstream:: KleeneOp :: ZeroOrMore {
668
+ match check_matcher_firsts ( cx, & ma[ idx_a ..] , & mb[ idx_b + 1 ..] , visited_spans) {
669
+ Error => return Error ,
670
+ Unsure => ret = Unsure ,
671
+ Ok => ( )
672
+ } ;
673
+ }
674
+
675
+ return ret;
676
+ }
677
+
678
+ ( & TokenTree :: Sequence ( sp, ref seq) , _) => {
679
+ // unroll 1 step.
680
+ let new_tt = TokenTree :: Sequence ( sp, tokenstream:: SequenceRepetition { op : tokenstream:: KleeneOp :: ZeroOrMore , .. seq. clone ( ) } ) ;
681
+ let mut new_a = seq. tts . iter ( ) . map ( |x| x. clone ( ) ) . collect :: < Vec < _ > > ( ) ;
682
+ if let Some ( ref sep) = seq. separator { new_a. push ( TokenTree :: Token ( sp, sep. clone ( ) ) ) } ;
683
+ new_a. push ( new_tt) ;
684
+ new_a. extend ( ma[ idx_a + 1 ..] . iter ( ) . map ( |x| x. clone ( ) ) ) ;
685
+ let mut ret = match check_matcher_firsts ( cx, & new_a, & mb[ idx_b ..] , visited_spans) {
686
+ Error => return Error ,
687
+ ret => ret
688
+ } ;
689
+
690
+ if seq. op == tokenstream:: KleeneOp :: ZeroOrMore {
691
+ match check_matcher_firsts ( cx, & ma[ idx_a + 1 ..] , & mb[ idx_b ..] , visited_spans) {
692
+ Error => return Error ,
693
+ Unsure => ret = Unsure ,
694
+ Ok => ( )
695
+ } ;
696
+ }
697
+
698
+ return ret;
699
+ }
700
+
701
+ ( _, & TokenTree :: Sequence ( sp, ref seq) ) => {
702
+ // unroll 1 step.
703
+ let new_tt = TokenTree :: Sequence ( sp, tokenstream:: SequenceRepetition { op : tokenstream:: KleeneOp :: ZeroOrMore , .. seq. clone ( ) } ) ;
704
+ let mut new_b = seq. tts . iter ( ) . map ( |x| x. clone ( ) ) . collect :: < Vec < _ > > ( ) ;
705
+ if let Some ( ref sep) = seq. separator { new_b. push ( TokenTree :: Token ( sp, sep. clone ( ) ) ) } ;
706
+ new_b. push ( new_tt) ;
707
+ new_b. extend ( mb[ idx_b + 1 ..] . iter ( ) . map ( |x| x. clone ( ) ) ) ;
708
+ let mut ret = match check_matcher_firsts ( cx, & ma[ idx_a ..] , & new_b, visited_spans) {
709
+ Error => return Error ,
710
+ ret => ret
711
+ } ;
712
+
713
+ if seq. op == tokenstream:: KleeneOp :: ZeroOrMore {
714
+ match check_matcher_firsts ( cx, & ma[ idx_a ..] , & mb[ idx_b + 1 ..] , visited_spans) {
715
+ Error => return Error ,
716
+ Unsure => ret = Unsure ,
717
+ Ok => ( )
718
+ } ;
719
+
720
+ }
721
+
722
+ return ret;
723
+ }
724
+
628
725
( & TokenTree :: Token ( _, MatchNt ( _, nta) ) ,
629
726
& TokenTree :: Token ( _, MatchNt ( _, ntb) ) ) =>
630
727
if !( nt_is_single_tt ( nta) && nt_is_single_tt ( ntb) ) {
@@ -638,12 +735,6 @@ fn check_matcher_firsts(cx: &ExtCtxt, ma: &[TokenTree], mb: &[TokenTree])
638
735
// followed by the end of the macro invocation, then we can accept
639
736
// it.
640
737
641
- ( & TokenTree :: Sequence ( _, _) , _) |
642
- ( _, & TokenTree :: Sequence ( _, _) ) =>
643
- return if only_simple_tokens ( & ma[ idx_a..] ) && !need_disambiguation {
644
- Unsure
645
- } else { Error } ,
646
-
647
738
( _ , & TokenTree :: Token ( _, MatchNt ( _, nt) ) ) if !nt_is_single_tt ( nt) =>
648
739
return if only_simple_tokens ( & ma[ idx_a..] ) && !need_disambiguation {
649
740
Unsure
@@ -714,7 +805,7 @@ fn check_matcher_firsts(cx: &ExtCtxt, ma: &[TokenTree], mb: &[TokenTree])
714
805
( & TokenTree :: Delimited ( _, ref d1) ,
715
806
& TokenTree :: Delimited ( _, ref d2) ) => {
716
807
// they have the same delim. as above.
717
- match check_matcher_firsts ( cx, & d1. tts , & d2. tts ) {
808
+ match check_matcher_firsts ( cx, & d1. tts , & d2. tts , visited_spans ) {
718
809
Ok => return Ok ,
719
810
Unsure => continue ,
720
811
Error => {
0 commit comments