Skip to content

Commit

Permalink
Auto merge of rust-lang#96210 - nnethercote:speed-up-TokenCursor, r=p…
Browse files Browse the repository at this point in the history
…etrochenkov

Speed up `TokenCursor`

Plus a few related clean-ups.

r? `@petrochenkov`
  • Loading branch information
bors committed Apr 21, 2022
2 parents 1dec35a + 643e9f7 commit b04c532
Show file tree
Hide file tree
Showing 4 changed files with 118 additions and 140 deletions.
27 changes: 12 additions & 15 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,16 +94,6 @@ impl TokenTree {
TokenTree::Token(Token::new(kind, span))
}

/// Returns the opening delimiter as a token tree.
pub fn open_tt(span: DelimSpan, delim: DelimToken) -> TokenTree {
TokenTree::token(token::OpenDelim(delim), span.open)
}

/// Returns the closing delimiter as a token tree.
pub fn close_tt(span: DelimSpan, delim: DelimToken) -> TokenTree {
TokenTree::token(token::CloseDelim(delim), span.close)
}

pub fn uninterpolate(self) -> TokenTree {
match self {
TokenTree::Token(token) => TokenTree::Token(token.uninterpolate().into_owned()),
Expand Down Expand Up @@ -585,13 +575,20 @@ impl Cursor {
Cursor { stream, index: 0 }
}

#[inline]
pub fn next_with_spacing(&mut self) -> Option<TreeAndSpacing> {
if self.index < self.stream.len() {
self.stream.0.get(self.index).map(|tree| {
self.index += 1;
Some(self.stream.0[self.index - 1].clone())
} else {
None
}
tree.clone()
})
}

#[inline]
pub fn next_with_spacing_ref(&mut self) -> Option<&TreeAndSpacing> {
self.stream.0.get(self.index).map(|tree| {
self.index += 1;
tree
})
}

pub fn index(&self) -> usize {
Expand Down
17 changes: 6 additions & 11 deletions compiler/rustc_parse/src/parser/attr_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,21 +100,16 @@ rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);

impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
// The token produced by the final call to `{,inlined_}next` or
// `{,inlined_}next_desugared` was not actually consumed by the
// callback. The combination of chaining the initial token and using
// `take` produces the desired result - we produce an empty
// `TokenStream` if no calls were made, and omit the final token
// otherwise.
// The token produced by the final call to `{,inlined_}next` was not
// actually consumed by the callback. The combination of chaining the
// initial token and using `take` produces the desired result - we
// produce an empty `TokenStream` if no calls were made, and omit the
// final token otherwise.
let mut cursor_snapshot = self.cursor_snapshot.clone();
let tokens =
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
.chain((0..self.num_calls).map(|_| {
let token = if cursor_snapshot.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
};
let token = cursor_snapshot.next(cursor_snapshot.desugar_doc_comments);
(FlatToken::Token(token.0), token.1)
}))
.take(self.num_calls);
Expand Down
Loading

0 comments on commit b04c532

Please sign in to comment.