Skip to content

Commit ad11b0b

Browse files
committed
Simplify LazyAttrTokenStream.
This commit does the following. - Changes it from `Lrc<Box<dyn ToAttrTokenStream>>` to `Lrc<LazyAttrTokenStreamInner>` trait. - Reworks `LazyAttrTokenStreamImpl` as `LazyAttrTokenStreamInner`, which is a two-variant enum handling. - Remove the `ToAttrTokenStream` trait and the two impls of it.
1 parent 88aa7ad commit ad11b0b

File tree

4 files changed

+128
-106
lines changed

4 files changed

+128
-106
lines changed

compiler/rustc_ast/src/mut_visit.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -744,7 +744,7 @@ fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyAttrTokenStre
744744
if let Some(lazy_tts) = lazy_tts {
745745
let mut tts = lazy_tts.to_attr_token_stream();
746746
visit_attr_tts(&mut tts, vis);
747-
*lazy_tts = LazyAttrTokenStream::new(tts);
747+
*lazy_tts = LazyAttrTokenStream::new_direct(tts);
748748
}
749749
}
750750
}

compiler/rustc_ast/src/tokenstream.rs

+119-96
Original file line numberDiff line numberDiff line change
@@ -107,25 +107,30 @@ where
107107
}
108108
}
109109

110-
pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync {
111-
fn to_attr_token_stream(&self) -> AttrTokenStream;
112-
}
113-
114-
impl ToAttrTokenStream for AttrTokenStream {
115-
fn to_attr_token_stream(&self) -> AttrTokenStream {
116-
self.clone()
117-
}
118-
}
119-
120-
/// A lazy version of [`TokenStream`], which defers creation
121-
/// of an actual `TokenStream` until it is needed.
122-
/// `Box` is here only to reduce the structure size.
110+
/// A lazy version of [`AttrTokenStream`], which defers creation of an actual
111+
/// `AttrTokenStream` until it is needed.
123112
#[derive(Clone)]
124-
pub struct LazyAttrTokenStream(Lrc<Box<dyn ToAttrTokenStream>>);
113+
pub struct LazyAttrTokenStream(Lrc<LazyAttrTokenStreamInner>);
125114

126115
impl LazyAttrTokenStream {
127-
pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream {
128-
LazyAttrTokenStream(Lrc::new(Box::new(inner)))
116+
pub fn new_direct(stream: AttrTokenStream) -> LazyAttrTokenStream {
117+
LazyAttrTokenStream(Lrc::new(LazyAttrTokenStreamInner::Direct(stream)))
118+
}
119+
120+
pub fn new_pending(
121+
start_token: (Token, Spacing),
122+
cursor_snapshot: TokenCursor,
123+
num_calls: u32,
124+
break_last_token: bool,
125+
replace_ranges: Box<[ReplaceRange]>,
126+
) -> LazyAttrTokenStream {
127+
LazyAttrTokenStream(Lrc::new(LazyAttrTokenStreamInner::Pending {
128+
start_token,
129+
cursor_snapshot,
130+
num_calls,
131+
break_last_token,
132+
replace_ranges,
133+
}))
129134
}
130135

131136
pub fn to_attr_token_stream(&self) -> AttrTokenStream {
@@ -174,90 +179,108 @@ impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
174179
/// attributes get inserted into the proper place in the token stream.
175180
pub type ReplaceRange = (Range<u32>, Option<AttrsTarget>);
176181

177-
// Produces a `TokenStream` on-demand. Using `cursor_snapshot` and `num_calls`,
178-
// we can reconstruct the `TokenStream` seen by the callback. This allows us to
179-
// avoid producing a `TokenStream` if it is never needed - for example, a
180-
// captured `macro_rules!` argument that is never passed to a proc macro. In
181-
// practice token stream creation happens rarely compared to calls to
182-
// `collect_tokens` (see some statistics in #78736), so we are doing as little
183-
// up-front work as possible.
184-
//
185-
// This also makes `Parser` very cheap to clone, since there is no intermediate
186-
// collection buffer to clone.
187-
pub struct LazyAttrTokenStreamImpl {
188-
pub start_token: (Token, Spacing),
189-
pub cursor_snapshot: TokenCursor,
190-
pub num_calls: u32,
191-
pub break_last_token: bool,
192-
pub replace_ranges: Box<[ReplaceRange]>,
182+
enum LazyAttrTokenStreamInner {
183+
// The token stream has already been produced.
184+
Direct(AttrTokenStream),
185+
186+
// Produces a `TokenStream` on-demand. Using `cursor_snapshot` and `num_calls`,
187+
// we can reconstruct the `TokenStream` seen by the callback. This allows us to
188+
// avoid producing a `TokenStream` if it is never needed - for example, a
189+
// captured `macro_rules!` argument that is never passed to a proc macro. In
190+
// practice token stream creation happens rarely compared to calls to
191+
// `collect_tokens` (see some statistics in #78736), so we are doing as little
192+
// up-front work as possible.
193+
//
194+
// This also makes `Parser` very cheap to clone, since there is no intermediate
195+
// collection buffer to clone.
196+
Pending {
197+
start_token: (Token, Spacing),
198+
cursor_snapshot: TokenCursor,
199+
num_calls: u32,
200+
break_last_token: bool,
201+
replace_ranges: Box<[ReplaceRange]>,
202+
},
193203
}
194204

195-
impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
205+
impl LazyAttrTokenStreamInner {
196206
fn to_attr_token_stream(&self) -> AttrTokenStream {
197-
// The token produced by the final call to `{,inlined_}next` was not
198-
// actually consumed by the callback. The combination of chaining the
199-
// initial token and using `take` produces the desired result - we
200-
// produce an empty `TokenStream` if no calls were made, and omit the
201-
// final token otherwise.
202-
let mut cursor_snapshot = self.cursor_snapshot.clone();
203-
let tokens = iter::once(FlatToken::Token(self.start_token.clone()))
204-
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
205-
.take(self.num_calls as usize);
206-
207-
if self.replace_ranges.is_empty() {
208-
make_attr_token_stream(tokens, self.break_last_token)
209-
} else {
210-
let mut tokens: Vec<_> = tokens.collect();
211-
let mut replace_ranges = self.replace_ranges.to_vec();
212-
replace_ranges.sort_by_key(|(range, _)| range.start);
213-
214-
#[cfg(debug_assertions)]
215-
{
216-
for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() {
217-
assert!(
218-
range.end <= next_range.start || range.end >= next_range.end,
219-
"Replace ranges should either be disjoint or nested: \
220-
({:?}, {:?}) ({:?}, {:?})",
221-
range,
222-
tokens,
223-
next_range,
224-
next_tokens,
225-
);
226-
}
227-
}
207+
match self {
208+
LazyAttrTokenStreamInner::Direct(stream) => stream.clone(),
209+
LazyAttrTokenStreamInner::Pending {
210+
start_token,
211+
cursor_snapshot,
212+
num_calls,
213+
break_last_token,
214+
replace_ranges,
215+
} => {
216+
// The token produced by the final call to `{,inlined_}next`
217+
// was not actually consumed by the callback. The combination
218+
// of chaining the initial token and using `take` produces the
219+
// desired result - we produce an empty `TokenStream` if no
220+
// calls were made, and omit the final token otherwise.
221+
let mut cursor_snapshot = cursor_snapshot.clone();
222+
let tokens = iter::once(FlatToken::Token(start_token.clone()))
223+
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
224+
.take(*num_calls as usize);
225+
226+
if replace_ranges.is_empty() {
227+
make_attr_token_stream(tokens, *break_last_token)
228+
} else {
229+
let mut tokens: Vec<_> = tokens.collect();
230+
let mut replace_ranges = replace_ranges.to_vec();
231+
replace_ranges.sort_by_key(|(range, _)| range.start);
232+
233+
#[cfg(debug_assertions)]
234+
{
235+
for [(range, tokens), (next_range, next_tokens)] in
236+
replace_ranges.array_windows()
237+
{
238+
assert!(
239+
range.end <= next_range.start || range.end >= next_range.end,
240+
"Replace ranges should either be disjoint or nested: \
241+
({:?}, {:?}) ({:?}, {:?})",
242+
range,
243+
tokens,
244+
next_range,
245+
next_tokens,
246+
);
247+
}
248+
}
228249

229-
// Process the replace ranges, starting from the highest start
230-
// position and working our way back. If have tokens like:
231-
//
232-
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
233-
//
234-
// Then we will generate replace ranges for both the `#[cfg(FALSE)]
235-
// field: bool` and the entire `#[cfg(FALSE)] struct Foo {
236-
// #[cfg(FALSE)] field: bool }`
237-
//
238-
// By starting processing from the replace range with the greatest
239-
// start position, we ensure that any replace range which encloses
240-
// another replace range will capture the *replaced* tokens for the
241-
// inner range, not the original tokens.
242-
for (range, target) in replace_ranges.into_iter().rev() {
243-
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
244-
245-
// Replace the tokens in range with zero or one
246-
// `FlatToken::AttrsTarget`s, plus enough `FlatToken::Empty`s
247-
// to fill up the rest of the range. This keeps the total
248-
// length of `tokens` constant throughout the replacement
249-
// process, allowing us to use all of the `ReplaceRanges`
250-
// entries without adjusting indices.
251-
let target_len = target.is_some() as usize;
252-
tokens.splice(
253-
(range.start as usize)..(range.end as usize),
254-
target
255-
.into_iter()
256-
.map(|target| FlatToken::AttrsTarget(target))
257-
.chain(iter::repeat(FlatToken::Empty).take(range.len() - target_len)),
258-
);
250+
// Process the replace ranges, starting from the highest
251+
// start position and working our way back. If have tokens
252+
// like:
253+
//
254+
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
255+
//
256+
// Then we will generate replace ranges for both the
257+
// `#[cfg(FALSE)] field: bool` and the entire
258+
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
259+
//
260+
// By starting processing from the replace range with the
261+
// greatest start position, we ensure that any replace
262+
// range which encloses another replace range will capture
263+
// the *replaced* tokens for the inner range, not the
264+
// original tokens.
265+
for (range, target) in replace_ranges.into_iter().rev() {
266+
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
267+
268+
// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s,
269+
// plus enough `FlatToken::Empty`s to fill up the rest of the range. This
270+
// keeps the total length of `tokens` constant throughout the replacement
271+
// process, allowing us to use all of the `ReplaceRanges` entries without
272+
// adjusting indices.
273+
let target_len = target.is_some() as usize;
274+
tokens.splice(
275+
(range.start as usize)..(range.end as usize),
276+
target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
277+
iter::repeat(FlatToken::Empty).take(range.len() - target_len),
278+
),
279+
);
280+
}
281+
make_attr_token_stream(tokens.into_iter(), *break_last_token)
282+
}
259283
}
260-
make_attr_token_stream(tokens.into_iter(), self.break_last_token)
261284
}
262285
}
263286
}
@@ -1025,7 +1048,7 @@ mod size_asserts {
10251048
static_assert_size!(AttrTokenStream, 8);
10261049
static_assert_size!(AttrTokenTree, 32);
10271050
static_assert_size!(LazyAttrTokenStream, 8);
1028-
static_assert_size!(LazyAttrTokenStreamImpl, 96);
1051+
static_assert_size!(LazyAttrTokenStreamInner, 96);
10291052
static_assert_size!(Option<LazyAttrTokenStream>, 8); // must be small, used in many AST nodes
10301053
static_assert_size!(TokenStream, 8);
10311054
static_assert_size!(TokenTree, 32);

compiler/rustc_expand/src/config.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ impl<'a> StripUnconfigured<'a> {
160160
if self.config_tokens {
161161
if let Some(Some(tokens)) = node.tokens_mut() {
162162
let attr_stream = tokens.to_attr_token_stream();
163-
*tokens = LazyAttrTokenStream::new(self.configure_tokens(&attr_stream));
163+
*tokens = LazyAttrTokenStream::new_direct(self.configure_tokens(&attr_stream));
164164
}
165165
}
166166
}
@@ -190,7 +190,7 @@ impl<'a> StripUnconfigured<'a> {
190190
target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
191191

192192
if self.in_cfg(&target.attrs) {
193-
target.tokens = LazyAttrTokenStream::new(
193+
target.tokens = LazyAttrTokenStream::new_direct(
194194
self.configure_tokens(&target.tokens.to_attr_token_stream()),
195195
);
196196
Some(AttrTokenTree::AttrsTarget(target))
@@ -335,7 +335,7 @@ impl<'a> StripUnconfigured<'a> {
335335
} else {
336336
vec![AttrTokenTree::Token(pound_token, Spacing::JointHidden), bracket_group]
337337
};
338-
let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees)));
338+
let tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::new(trees)));
339339
let attr = attr::mk_attr_from_item(
340340
&self.sess.psess.attr_id_generator,
341341
item,

compiler/rustc_parse/src/parser/attr_wrapper.rs

+5-6
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
use super::{Capturing, ForceCollect, Parser, TrailingToken};
22
use rustc_ast::token;
3-
use rustc_ast::tokenstream::ReplaceRange;
4-
use rustc_ast::tokenstream::{AttrsTarget, LazyAttrTokenStream, LazyAttrTokenStreamImpl};
3+
use rustc_ast::tokenstream::{AttrsTarget, LazyAttrTokenStream, ReplaceRange};
54
use rustc_ast::{self as ast};
65
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
76
use rustc_errors::PResult;
@@ -225,13 +224,13 @@ impl<'a> Parser<'a> {
225224
.collect()
226225
};
227226

228-
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
227+
let tokens = LazyAttrTokenStream::new_pending(
229228
start_token,
230-
num_calls,
231229
cursor_snapshot,
232-
break_last_token: self.break_last_token,
230+
num_calls,
231+
self.break_last_token,
233232
replace_ranges,
234-
});
233+
);
235234

236235
// If we support tokens and don't already have them, store the newly captured tokens.
237236
if let Some(target_tokens @ None) = ret.tokens_mut() {

0 commit comments

Comments
 (0)