Skip to content

Commit

Permalink
Auto merge of #36527 - nnethercote:last_token_kind, r=jseyfried
Browse files Browse the repository at this point in the history
Optimize the parser's last token handling.

The parser currently makes a heap copy of the last token in four cases:
identifiers, paths, doc comments, and commas. The identifier and
interpolation cases are unused, and for doc comments and commas we only
need to record their presence, not their value.

This commit consolidates the last token handling and avoids the
unnecessary copies by replacing `last_token`, `last_token_eof`, and
`last_token_interpolated` with a new field `last_token_kind`. This
simplifies the parser slightly and speeds up parsing on some files by
3--4%.
  • Loading branch information
bors authored Sep 18, 2016
2 parents 0b03ba1 + 8075d54 commit f39039e
Showing 1 changed file with 39 additions and 43 deletions.
82 changes: 39 additions & 43 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,15 @@ fn maybe_append(mut lhs: Vec<Attribute>, rhs: Option<Vec<Attribute>>)
lhs
}

#[derive(PartialEq)]
enum LastTokenKind {
DocComment,
Comma,
Interpolated,
Eof,
Other,
}

/* ident is handled by common.rs */

pub struct Parser<'a> {
Expand All @@ -248,10 +257,8 @@ pub struct Parser<'a> {
/// the span of the prior token:
pub last_span: Span,
pub cfg: CrateConfig,
/// the previous token or None (only stashed sometimes).
pub last_token: Option<Box<token::Token>>,
last_token_interpolated: bool,
last_token_eof: bool,
/// the previous token kind
last_token_kind: LastTokenKind,
pub buffer: [TokenAndSpan; 4],
pub buffer_start: isize,
pub buffer_end: isize,
Expand Down Expand Up @@ -362,9 +369,7 @@ impl<'a> Parser<'a> {
token: tok0.tok,
span: span,
last_span: span,
last_token: None,
last_token_interpolated: false,
last_token_eof: false,
last_token_kind: LastTokenKind::Other,
buffer: [
placeholder.clone(),
placeholder.clone(),
Expand Down Expand Up @@ -500,7 +505,7 @@ impl<'a> Parser<'a> {
expr: PResult<'a, P<Expr>>)
-> PResult<'a, (Span, P<Expr>)> {
expr.map(|e| {
if self.last_token_interpolated {
if self.last_token_kind == LastTokenKind::Interpolated {
(self.last_span, e)
} else {
(e.span, e)
Expand All @@ -520,21 +525,19 @@ impl<'a> Parser<'a> {
self.bug("ident interpolation not converted to real token");
}
_ => {
let last_token = self.last_token.clone().map(|t| *t);
Err(match last_token {
Some(token::DocComment(_)) => self.span_fatal_help(self.last_span,
Err(if self.last_token_kind == LastTokenKind::DocComment {
self.span_fatal_help(self.last_span,
"found a documentation comment that doesn't document anything",
"doc comments must come before what they document, maybe a comment was \
intended with `//`?"),
_ => {
intended with `//`?")
} else {
let mut err = self.fatal(&format!("expected identifier, found `{}`",
self.this_token_to_string()));
if self.token == token::Underscore {
err.note("`_` is a wildcard pattern, not an identifier");
}
err
}
})
})
}
}
}
Expand Down Expand Up @@ -925,26 +928,22 @@ impl<'a> Parser<'a> {

/// Advance the parser by one token
pub fn bump(&mut self) {
if self.last_token_eof {
if self.last_token_kind == LastTokenKind::Eof {
// Bumping after EOF is a bad sign, usually an infinite loop.
self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
}

if self.token == token::Eof {
self.last_token_eof = true;
}

self.last_span = self.span;
// Stash token for error recovery (sometimes; clone is not necessarily cheap).
self.last_token = if self.token.is_ident() ||
self.token.is_path() ||
self.token.is_doc_comment() ||
self.token == token::Comma {
Some(Box::new(self.token.clone()))
} else {
None

// Record last token kind for possible error recovery.
self.last_token_kind = match self.token {
token::DocComment(..) => LastTokenKind::DocComment,
token::Comma => LastTokenKind::Comma,
token::Interpolated(..) => LastTokenKind::Interpolated,
token::Eof => LastTokenKind::Eof,
_ => LastTokenKind::Other,
};
self.last_token_interpolated = self.token.is_interpolated();

let next = if self.buffer_start == self.buffer_end {
self.reader.real_token()
} else {
Expand Down Expand Up @@ -981,11 +980,10 @@ impl<'a> Parser<'a> {
lo: BytePos,
hi: BytePos) {
self.last_span = mk_sp(self.span.lo, lo);
// It would be incorrect to just stash current token, but fortunately
// for tokens currently using `bump_with`, last_token will be of no
// use anyway.
self.last_token = None;
self.last_token_interpolated = false;
// It would be incorrect to record the kind of the current token, but
// fortunately for tokens currently using `bump_with`, the
// last_token_kind will be of no use anyway.
self.last_token_kind = LastTokenKind::Other;
self.span = mk_sp(lo, hi);
self.token = next;
self.expected_tokens.clear();
Expand Down Expand Up @@ -2950,7 +2948,7 @@ impl<'a> Parser<'a> {
self.expected_tokens.push(TokenType::Operator);
while let Some(op) = AssocOp::from_token(&self.token) {

let lhs_span = if self.last_token_interpolated {
let lhs_span = if self.last_token_kind == LastTokenKind::Interpolated {
self.last_span
} else {
lhs.span
Expand Down Expand Up @@ -4012,13 +4010,13 @@ impl<'a> Parser<'a> {
None => {
let unused_attrs = |attrs: &[_], s: &mut Self| {
if attrs.len() > 0 {
let last_token = s.last_token.clone().map(|t| *t);
match last_token {
Some(token::DocComment(_)) => s.span_err_help(s.last_span,
if s.last_token_kind == LastTokenKind::DocComment {
s.span_err_help(s.last_span,
"found a documentation comment that doesn't document anything",
"doc comments must come before what they document, maybe a \
comment was intended with `//`?"),
_ => s.span_err(s.span, "expected statement after outer attribute"),
comment was intended with `//`?");
} else {
s.span_err(s.span, "expected statement after outer attribute");
}
}
};
Expand Down Expand Up @@ -4308,9 +4306,7 @@ impl<'a> Parser<'a> {

let missing_comma = !lifetimes.is_empty() &&
!self.token.is_like_gt() &&
self.last_token
.as_ref().map_or(true,
|x| &**x != &token::Comma);
self.last_token_kind != LastTokenKind::Comma;

if missing_comma {

Expand Down

0 comments on commit f39039e

Please sign in to comment.