@@ -216,6 +216,30 @@ struct TokenCursorFrame {
216
216
open_delim : bool ,
217
217
tree_cursor : tokenstream:: Cursor ,
218
218
close_delim : bool ,
219
+ last_token : LastToken ,
220
+ }
221
+
222
+ /// This is used in `TokenCursorFrame` above to track tokens that are consumed
223
+ /// by the parser, and then that's transitively used to record the tokens that
224
+ /// each parse AST item is created with.
225
+ ///
226
+ /// Right now this has two states, either collecting tokens or not collecting
227
+ /// tokens. If we're collecting tokens we just save everything off into a local
228
+ /// `Vec`. This should eventually though likely save tokens from the original
229
+ /// token stream and just use slicing of token streams to avoid creation of a
230
+ /// whole new vector.
231
+ ///
232
+ /// The second state is where we're passively not recording tokens, but the last
233
+ /// token is still tracked for when we want to start recording tokens. This
234
+ /// "last token" means that when we start recording tokens we'll want to ensure
235
+ /// that this, the first token, is included in the output.
236
+ ///
237
+ /// You can find some more example usage of this in the `collect_tokens` method
238
+ /// on the parser.
239
+ #[ derive( Clone ) ]
240
+ enum LastToken {
241
+ Collecting ( Vec < TokenTree > ) ,
242
+ Was ( Option < TokenTree > ) ,
219
243
}
220
244
221
245
impl TokenCursorFrame {
@@ -226,6 +250,7 @@ impl TokenCursorFrame {
226
250
open_delim : delimited. delim == token:: NoDelim ,
227
251
tree_cursor : delimited. stream ( ) . into_trees ( ) ,
228
252
close_delim : delimited. delim == token:: NoDelim ,
253
+ last_token : LastToken :: Was ( None ) ,
229
254
}
230
255
}
231
256
}
@@ -250,6 +275,11 @@ impl TokenCursor {
250
275
return TokenAndSpan { tok : token:: Eof , sp : syntax_pos:: DUMMY_SP }
251
276
} ;
252
277
278
+ match self . frame . last_token {
279
+ LastToken :: Collecting ( ref mut v) => v. push ( tree. clone ( ) ) ,
280
+ LastToken :: Was ( ref mut t) => * t = Some ( tree. clone ( ) ) ,
281
+ }
282
+
253
283
match tree {
254
284
TokenTree :: Token ( sp, tok) => return TokenAndSpan { tok : tok, sp : sp } ,
255
285
TokenTree :: Delimited ( sp, ref delimited) => {
@@ -1209,7 +1239,20 @@ impl<'a> Parser<'a> {
1209
1239
/// Parse the items in a trait declaration
1210
1240
pub fn parse_trait_item ( & mut self , at_end : & mut bool ) -> PResult < ' a , TraitItem > {
1211
1241
maybe_whole ! ( self , NtTraitItem , |x| x) ;
1212
- let mut attrs = self . parse_outer_attributes ( ) ?;
1242
+ let attrs = self . parse_outer_attributes ( ) ?;
1243
+ let ( mut item, tokens) = self . collect_tokens ( |this| {
1244
+ this. parse_trait_item_ ( at_end, attrs)
1245
+ } ) ?;
1246
+ // See `parse_item` for why this clause is here.
1247
+ if !item. attrs . iter ( ) . any ( |attr| attr. style == AttrStyle :: Inner ) {
1248
+ item. tokens = Some ( tokens) ;
1249
+ }
1250
+ Ok ( item)
1251
+ }
1252
+
1253
+ fn parse_trait_item_ ( & mut self ,
1254
+ at_end : & mut bool ,
1255
+ mut attrs : Vec < Attribute > ) -> PResult < ' a , TraitItem > {
1213
1256
let lo = self . span ;
1214
1257
1215
1258
let ( name, node) = if self . eat_keyword ( keywords:: Type ) {
@@ -1304,6 +1347,7 @@ impl<'a> Parser<'a> {
1304
1347
attrs : attrs,
1305
1348
node : node,
1306
1349
span : lo. to ( self . prev_span ) ,
1350
+ tokens : None ,
1307
1351
} )
1308
1352
}
1309
1353
@@ -4653,7 +4697,7 @@ impl<'a> Parser<'a> {
4653
4697
node : node,
4654
4698
vis : vis,
4655
4699
span : span,
4656
- tokens : None , // TODO: fill this in
4700
+ tokens : None ,
4657
4701
} )
4658
4702
}
4659
4703
@@ -4709,8 +4753,21 @@ impl<'a> Parser<'a> {
4709
4753
/// Parse an impl item.
4710
4754
pub fn parse_impl_item ( & mut self , at_end : & mut bool ) -> PResult < ' a , ImplItem > {
4711
4755
maybe_whole ! ( self , NtImplItem , |x| x) ;
4756
+ let attrs = self . parse_outer_attributes ( ) ?;
4757
+ let ( mut item, tokens) = self . collect_tokens ( |this| {
4758
+ this. parse_impl_item_ ( at_end, attrs)
4759
+ } ) ?;
4760
+
4761
+ // See `parse_item` for why this clause is here.
4762
+ if !item. attrs . iter ( ) . any ( |attr| attr. style == AttrStyle :: Inner ) {
4763
+ item. tokens = Some ( tokens) ;
4764
+ }
4765
+ Ok ( item)
4766
+ }
4712
4767
4713
- let mut attrs = self . parse_outer_attributes ( ) ?;
4768
+ fn parse_impl_item_ ( & mut self ,
4769
+ at_end : & mut bool ,
4770
+ mut attrs : Vec < Attribute > ) -> PResult < ' a , ImplItem > {
4714
4771
let lo = self . span ;
4715
4772
let vis = self . parse_visibility ( false ) ?;
4716
4773
let defaultness = self . parse_defaultness ( ) ?;
@@ -4742,7 +4799,8 @@ impl<'a> Parser<'a> {
4742
4799
vis : vis,
4743
4800
defaultness : defaultness,
4744
4801
attrs : attrs,
4745
- node : node
4802
+ node : node,
4803
+ tokens : None ,
4746
4804
} )
4747
4805
}
4748
4806
@@ -6018,9 +6076,71 @@ impl<'a> Parser<'a> {
6018
6076
Ok ( None )
6019
6077
}
6020
6078
6079
+ fn collect_tokens < F , R > ( & mut self , f : F ) -> PResult < ' a , ( R , TokenStream ) >
6080
+ where F : FnOnce ( & mut Self ) -> PResult < ' a , R >
6081
+ {
6082
+ // Record all tokens we parse when parsing this item.
6083
+ let mut tokens = Vec :: new ( ) ;
6084
+ match self . token_cursor . frame . last_token {
6085
+ LastToken :: Collecting ( _) => {
6086
+ panic ! ( "cannot collect tokens recursively yet" )
6087
+ }
6088
+ LastToken :: Was ( ref mut last) => tokens. extend ( last. take ( ) ) ,
6089
+ }
6090
+ self . token_cursor . frame . last_token = LastToken :: Collecting ( tokens) ;
6091
+ let prev = self . token_cursor . stack . len ( ) ;
6092
+ let ret = f ( self ) ;
6093
+ let last_token = if self . token_cursor . stack . len ( ) == prev {
6094
+ & mut self . token_cursor . frame . last_token
6095
+ } else {
6096
+ & mut self . token_cursor . stack [ prev] . last_token
6097
+ } ;
6098
+ let mut tokens = match * last_token {
6099
+ LastToken :: Collecting ( ref mut v) => mem:: replace ( v, Vec :: new ( ) ) ,
6100
+ LastToken :: Was ( _) => panic ! ( "our vector went away?" ) ,
6101
+ } ;
6102
+
6103
+ // If we're not at EOF our current token wasn't actually consumed by
6104
+ // `f`, but it'll still be in our list that we pulled out. In that case
6105
+ // put it back.
6106
+ if self . token == token:: Eof {
6107
+ * last_token = LastToken :: Was ( None ) ;
6108
+ } else {
6109
+ * last_token = LastToken :: Was ( tokens. pop ( ) ) ;
6110
+ }
6111
+
6112
+ Ok ( ( ret?, tokens. into_iter ( ) . collect ( ) ) )
6113
+ }
6114
+
6021
6115
pub fn parse_item ( & mut self ) -> PResult < ' a , Option < P < Item > > > {
6022
6116
let attrs = self . parse_outer_attributes ( ) ?;
6023
- self . parse_item_ ( attrs, true , false )
6117
+
6118
+ let ( ret, tokens) = self . collect_tokens ( |this| {
6119
+ this. parse_item_ ( attrs, true , false )
6120
+ } ) ?;
6121
+
6122
+ // Once we've parsed an item and recorded the tokens we got while
6123
+ // parsing we may want to store `tokens` into the item we're about to
6124
+ // return. Note, though, that we specifically didn't capture tokens
6125
+ // related to outer attributes. The `tokens` field here may later be
6126
+ // used with procedural macros to convert this item back into a token
6127
+ // stream, but during expansion we may be removing attributes as we go
6128
+ // along.
6129
+ //
6130
+ // If we've got inner attributes then the `tokens` we've got above holds
6131
+ // these inner attributes. If an inner attribute is expanded we won't
6132
+ // actually remove it from the token stream, so we'll just keep yielding
6133
+ // it (bad!). To work around this case for now we just avoid recording
6134
+ // `tokens` if we detect any inner attributes. This should help keep
6135
+ // expansion correct, but we should fix this bug one day!
6136
+ Ok ( ret. map ( |item| {
6137
+ item. map ( |mut i| {
6138
+ if !i. attrs . iter ( ) . any ( |attr| attr. style == AttrStyle :: Inner ) {
6139
+ i. tokens = Some ( tokens) ;
6140
+ }
6141
+ i
6142
+ } )
6143
+ } ) )
6024
6144
}
6025
6145
6026
6146
fn parse_path_list_items ( & mut self ) -> PResult < ' a , Vec < ast:: PathListItem > > {
0 commit comments