@@ -11,11 +11,13 @@ use crate::char_stream::CharStream;
11
11
use crate :: dfa:: ScopeExt ;
12
12
use crate :: errors:: { ANTLRError , FailedPredicateError , InputMisMatchError , NoViableAltError , RecognitionError } ;
13
13
use crate :: interval_set:: IntervalSet ;
14
- use crate :: parser:: Parser ;
15
- use crate :: parser_rule_context:: { ParserRuleContext , ParserRuleContextType } ;
14
+ use crate :: parser:: { Parser , ParserNodeType } ;
15
+ use crate :: parser_rule_context:: ParserRuleContext ;
16
+ use crate :: rule_context:: { CustomRuleContext , RuleContext } ;
16
17
use crate :: token:: { OwningToken , Token , TOKEN_DEFAULT_CHANNEL , TOKEN_EOF , TOKEN_EPSILON , TOKEN_INVALID_TYPE } ;
17
18
use crate :: token_factory:: TokenFactory ;
18
19
use crate :: transition:: RuleTransition ;
20
+ use crate :: tree:: Tree ;
19
21
use crate :: utils:: escape_whitespaces;
20
22
21
23
/// The interface for defining strategies to deal with syntax errors encountered
@@ -41,16 +43,16 @@ pub trait ErrorStrategy<'a, T: Parser<'a>> {
41
43
fn report_match ( & mut self , recognizer : & mut T ) ;
42
44
}
43
45
44
- pub struct DefaultErrorStrategy < ' input , TF : TokenFactory < ' input > + ' input > {
46
+ pub struct DefaultErrorStrategy < ' input , Ctx : ParserNodeType < ' input > > {
45
47
error_recovery_mode : bool ,
46
48
last_error_index : isize ,
47
49
last_error_states : Option < IntervalSet > ,
48
50
next_tokens_state : isize ,
49
- next_tokens_ctx : Option < ParserRuleContextType < ' input , TF > > ,
51
+ next_tokens_ctx : Option < Rc < Ctx :: Type > > ,
50
52
}
51
53
52
54
53
- impl < ' input , TF : TokenFactory < ' input > + ' input > DefaultErrorStrategy < ' input , TF > {
55
+ impl < ' input , Ctx : ParserNodeType < ' input > > DefaultErrorStrategy < ' input , Ctx > {
54
56
pub fn new ( ) -> Self {
55
57
Self {
56
58
error_recovery_mode : false ,
@@ -61,17 +63,17 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
61
63
}
62
64
}
63
65
64
- fn begin_error_condition < T : Parser < ' input , TF =TF > > ( & mut self , _recognizer : & T ) {
66
+ fn begin_error_condition < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & mut self , _recognizer : & T ) {
65
67
self . error_recovery_mode = true ;
66
68
}
67
69
68
- fn end_error_condition < T : Parser < ' input , TF =TF > > ( & mut self , _recognizer : & T ) {
70
+ fn end_error_condition < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & mut self , _recognizer : & T ) {
69
71
self . error_recovery_mode = false ;
70
72
self . last_error_index = -1 ;
71
73
self . last_error_states = None ;
72
74
}
73
75
74
- fn report_no_viable_alternative < T : Parser < ' input , TF =TF > > ( & self , recognizer : & mut T , e : & NoViableAltError ) -> String {
76
+ fn report_no_viable_alternative < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & self , recognizer : & mut T , e : & NoViableAltError ) -> String {
75
77
let input = if e. start_token . token_type == TOKEN_EOF {
76
78
"<EOF>" . to_owned ( )
77
79
} else {
@@ -81,19 +83,19 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
81
83
format ! ( "no viable alternative at input '{}'" , input)
82
84
}
83
85
84
- fn report_input_mismatch < T : Parser < ' input , TF =TF > > ( & self , recognizer : & T , e : & InputMisMatchError ) -> String {
86
+ fn report_input_mismatch < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & self , recognizer : & T , e : & InputMisMatchError ) -> String {
85
87
format ! ( "mismatched input {} expecting {}" ,
86
88
self . get_token_error_display( & e. base. offending_token) ,
87
89
e. base. get_expected_tokens( recognizer) . to_token_string( recognizer. get_vocabulary( ) ) )
88
90
}
89
91
90
- fn report_failed_predicate < T : Parser < ' input , TF =TF > > ( & self , recognizer : & T , e : & FailedPredicateError ) -> String {
92
+ fn report_failed_predicate < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & self , recognizer : & T , e : & FailedPredicateError ) -> String {
91
93
format ! ( "rule {} {}" ,
92
94
recognizer. get_rule_names( ) [ recognizer. get_parser_rule_context( ) . get_rule_index( ) ] ,
93
95
e. base. message)
94
96
}
95
97
96
- fn report_unwanted_token < T : Parser < ' input , TF =TF > > ( & mut self , recognizer : & mut T ) {
98
+ fn report_unwanted_token < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & mut self , recognizer : & mut T ) {
97
99
if self . in_error_recovery_mode ( recognizer) {
98
100
return ;
99
101
}
@@ -108,7 +110,7 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
108
110
recognizer. notify_error_listeners ( msg, Some ( t) , None ) ;
109
111
}
110
112
111
- fn report_missing_token < T : Parser < ' input , TF =TF > > ( & mut self , recognizer : & mut T ) {
113
+ fn report_missing_token < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & mut self , recognizer : & mut T ) {
112
114
if self . in_error_recovery_mode ( recognizer) {
113
115
return ;
114
116
}
@@ -126,13 +128,13 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
126
128
recognizer. notify_error_listeners ( msg, Some ( t) , None ) ;
127
129
}
128
130
129
- fn single_token_insertion < T : Parser < ' input , TF =TF > > ( & mut self , recognizer : & mut T ) -> bool {
131
+ fn single_token_insertion < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & mut self , recognizer : & mut T ) -> bool {
130
132
let current_token = recognizer. get_input_stream_mut ( ) . la ( 1 ) ;
131
133
132
134
let atn = recognizer. get_interpreter ( ) . atn ( ) ;
133
135
let current_state = atn. states [ recognizer. get_state ( ) as usize ] . as_ref ( ) ;
134
136
let next = current_state. get_transitions ( ) . first ( ) . unwrap ( ) . get_target ( ) ;
135
- let expect_at_ll2 = atn. next_tokens_in_ctx (
137
+ let expect_at_ll2 = atn. next_tokens_in_ctx :: < ' input , Ctx > (
136
138
atn. states [ next] . as_ref ( ) ,
137
139
Some ( recognizer. get_parser_rule_context ( ) . deref ( ) ) ,
138
140
) ;
@@ -143,7 +145,7 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
143
145
false
144
146
}
145
147
146
- fn single_token_deletion < ' a , T : Parser < ' input , TF =TF > > ( & mut self , recognizer : & ' a mut T ) -> Option < & ' a ( TF :: Tok ) > {
148
+ fn single_token_deletion < ' a , T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & mut self , recognizer : & ' a mut T ) -> Option < & ' a < T :: TF as TokenFactory < ' input > > :: Tok > {
147
149
let next_token_type = recognizer. get_input_stream_mut ( ) . la ( 2 ) ;
148
150
let expecting = self . get_expected_tokens ( recognizer) ;
149
151
// println!("expecting {}", expecting.to_token_string(recognizer.get_vocabulary()));
@@ -157,7 +159,7 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
157
159
None
158
160
}
159
161
160
- fn get_missing_symbol < T : Parser < ' input , TF =TF > > ( & self , recognizer : & mut T ) -> TF :: Tok {
162
+ fn get_missing_symbol < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & self , recognizer : & mut T ) -> < T :: TF as TokenFactory < ' input > > :: Tok {
161
163
let expected = self . get_expected_tokens ( recognizer) ;
162
164
let expected_token_type = expected. get_min ( ) . unwrap_or ( TOKEN_INVALID_TYPE ) ;
163
165
let token_text = if expected_token_type == TOKEN_EOF {
@@ -189,7 +191,7 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
189
191
// .modify_with(|it| it.text = token_text)
190
192
}
191
193
192
- fn get_expected_tokens < T : Parser < ' input , TF =TF > > ( & self , recognizer : & T ) -> IntervalSet {
194
+ fn get_expected_tokens < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & self , recognizer : & T ) -> IntervalSet {
193
195
recognizer. get_expected_tokens ( )
194
196
}
195
197
@@ -202,7 +204,7 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
202
204
format ! ( "'{}'" , escape_whitespaces( s, false ) )
203
205
}
204
206
205
- fn get_error_recovery_set < T : Parser < ' input , TF =TF > > ( & self , recognizer : & T ) -> IntervalSet {
207
+ fn get_error_recovery_set < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & self , recognizer : & T ) -> IntervalSet {
206
208
let atn = recognizer. get_interpreter ( ) . atn ( ) ;
207
209
let mut ctx = Some ( recognizer. get_parser_rule_context ( ) . clone ( ) ) ;
208
210
let mut recover_set = IntervalSet :: new ( ) ;
@@ -220,7 +222,7 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
220
222
return recover_set;
221
223
}
222
224
223
- fn consume_until < T : Parser < ' input , TF =TF > > ( & mut self , recognizer : & mut T , set : & IntervalSet ) {
225
+ fn consume_until < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & mut self , recognizer : & mut T , set : & IntervalSet ) {
224
226
let mut ttype = recognizer. get_input_stream_mut ( ) . la ( 1 ) ;
225
227
while ttype != TOKEN_EOF && !set. contains ( ttype) {
226
228
recognizer. consume ( self ) ;
@@ -229,7 +231,7 @@ impl<'input, TF: TokenFactory<'input> + 'input> DefaultErrorStrategy<'input, TF>
229
231
}
230
232
}
231
233
232
- impl < ' a , T : Parser < ' a > > ErrorStrategy < ' a , T > for DefaultErrorStrategy < ' a , T :: TF > {
234
+ impl < ' a , T : Parser < ' a > > ErrorStrategy < ' a , T > for DefaultErrorStrategy < ' a , T :: Node > {
233
235
fn reset ( & mut self , _recognizer : & mut T ) {
234
236
unimplemented ! ( )
235
237
}
@@ -367,12 +369,12 @@ myparser.err_handler = BailErrorStrategy::new();
367
369
368
370
[`ParserRuleContext.exception`]: todo
369
371
*/
370
- pub struct BailErrorStrategy < ' input , TF : TokenFactory < ' input > > ( DefaultErrorStrategy < ' input , TF > ) ;
372
+ pub struct BailErrorStrategy < ' input , Ctx : ParserNodeType < ' input > > ( DefaultErrorStrategy < ' input , Ctx > ) ;
371
373
372
- impl < ' input , TF : TokenFactory < ' input > > BailErrorStrategy < ' input , TF > {
374
+ impl < ' input , Ctx : ParserNodeType < ' input > > BailErrorStrategy < ' input , Ctx > {
373
375
pub fn new ( ) -> Self { Self ( DefaultErrorStrategy :: new ( ) ) }
374
376
375
- fn process_error < T : Parser < ' input , TF =TF > > ( & self , recognizer : & mut T , e : & ANTLRError ) -> ANTLRError {
377
+ fn process_error < T : Parser < ' input , Node = Ctx , TF =Ctx :: TF > > ( & self , recognizer : & mut T , e : & ANTLRError ) -> ANTLRError {
376
378
let mut ctx = recognizer. get_parser_rule_context ( ) . clone ( ) ;
377
379
let _: Option < ( ) > = try {
378
380
loop {
@@ -401,7 +403,7 @@ impl Display for ParseCancelledError {
401
403
}
402
404
}
403
405
404
- impl < ' a , T : Parser < ' a > > ErrorStrategy < ' a , T > for BailErrorStrategy < ' a , T :: TF > {
406
+ impl < ' a , T : Parser < ' a > > ErrorStrategy < ' a , T > for BailErrorStrategy < ' a , T :: Node > {
405
407
fn reset ( & mut self , recognizer : & mut T ) { self . 0 . reset ( recognizer) }
406
408
407
409
fn recover_inline ( & mut self , recognizer : & mut T ) -> Result < <T :: TF as TokenFactory < ' a > >:: Tok , ANTLRError > {
0 commit comments