Skip to content

Commit

Permalink
reduce the amount of times tokens are cleaned of whitespace
Browse files Browse the repository at this point in the history
  • Loading branch information
sc1f committed Jun 16, 2020
1 parent 95ae70b commit d9250fd
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 38 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,6 @@ class PerspectiveComputedExpressionParser {
this._check_initialized();
const result = this._lexer.tokenize(expression);

if (result.errors.length > 0) {
let message = result.errors[0].message;
throw new Error(message);
}

// Remove whitespace tokens
result.tokens = clean_tokens(result.tokens);

Expand All @@ -110,6 +105,11 @@ class PerspectiveComputedExpressionParser {
this._check_initialized();
const lex_result = this.lex(expression);

if (lex_result.errors.length > 0) {
let message = lex_result.errors[0].message;
throw new Error(message);
}

// calling `parser.input` resets state.
this._parser.input = lex_result.tokens;

Expand Down Expand Up @@ -210,13 +210,13 @@ class PerspectiveComputedExpressionParser {
}

/**
* Return the last non-whitespace token from a lexer result, or undefined
* if there are no non-whitespace tokens or no tokens at all.
* Return the last token from a lexer result, or undefined if there are no
* tokens at all. Whitespace tokens are NOT removed before search.
*
* @param {ILexingResult} lexer_result
*/
get_last_token(lexer_result) {
const tokens = clean_tokens(lexer_result.tokens);
const tokens = lexer_result.tokens;
const last_idx = tokens.length - 1;
if (last_idx >= 0) {
return tokens[last_idx];
Expand All @@ -226,7 +226,7 @@ class PerspectiveComputedExpressionParser {
/**
* Look backwards through a list of tokens, checking whether each token is
* of a type in the `types` array, stopping after `limit` tokens.
* Whitespace tokens are removed from the token list before the search.
* Whitespace tokens are NOT removed before search.
*
* @param {Array{TokenType}} types An array of token types to look through.
* @param {ILexingResult} lexer_result A result from the lexer, containing
Expand All @@ -236,7 +236,7 @@ class PerspectiveComputedExpressionParser {
* undefined, search all tokens.
*/
get_last_token_with_types(types, lexer_result, limit) {
const tokens = clean_tokens(lexer_result.tokens);
const tokens = lexer_result.tokens;
if (!limit || limit <= 0 || limit >= tokens.length) {
limit = tokens.length;
}
Expand All @@ -249,30 +249,6 @@ class PerspectiveComputedExpressionParser {
}
}

/**
* Look backwards through a list of tokens, checking whether each token is
* of the specific `name`, stopping after `limit` tokens. Whitespace tokens
* are removed from the token list before the search.
*
* @param {String} name A string name of a token to match.
* @param {ILexingResult} lexer_result A result from the lexer, containing
* valid tokens and errors.
* @param {Number} limit the number of tokens to search through before
* exiting or returning a valid result. If limit > tokens.length or is
* undefined, search all tokens.
*/
get_last_token_with_name(name, lexer_result, limit) {
const tokens = clean_tokens(lexer_result.tokens);
if (!limit || limit <= 0 || limit >= tokens.length) {
limit = tokens.length;
}
for (let i = tokens.length - 1; i >= tokens.length - limit; i--) {
if (tokens[i].tokenType.name === name) {
return tokens[i];
}
}
}

/**
* Given a metadata object containing information about computed
* functions, construct tokens and a vocabulary object for the parser.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,8 @@ class ComputedExpressionWidget extends HTMLElement {
* @param {String} expression
*/
render_expression(expression) {
// Call `tokenize()` and not `lex()`, as `lex` cleans whitespace
// tokens and we need whitespace tokens to render the expressions.
const lex_result = this._computed_expression_parser._lexer.tokenize(expression);

// Track a sorted array of integer offsets into the expression, and
Expand Down Expand Up @@ -231,7 +233,10 @@ class ComputedExpressionWidget extends HTMLElement {
// Show autocomplete OR error, but not both
this._clear_error();
this._disable_save_button();
const lex_result = this._computed_expression_parser._lexer.tokenize(expression);

// Generate a list of tokens from the expression, cleaning out
// whitespace tokens and without throwing any errors.
const lex_result = this._computed_expression_parser.lex(expression);

// Check if the expression has a fragment of a column name,
// i.e. if it's been opened with a quote but not closed
Expand Down Expand Up @@ -271,8 +276,8 @@ class ComputedExpressionWidget extends HTMLElement {
// Filter down those suggestions by an input type, if possible
let input_types, match_types;

// Go to the last function or operator token - not necessarily the
// last token, and get the input types from it.
// Go to the last function or operator token present in the
// entire expression, and use it to calculate input types.
const last_function_or_operator = this._computed_expression_parser.get_last_token_with_types([FunctionTokenType, OperatorTokenType], lex_result);

if (last_function_or_operator) {
Expand Down Expand Up @@ -478,7 +483,7 @@ class ComputedExpressionWidget extends HTMLElement {

this._expression_editor._edit_area.innerText = final_value;
} else {
if (!last_word_is_column_name && (last_word[last_word.length - 1] === '"' || last_word[last_word.length - 1] === '"')) {
if (!last_word_is_column_name && (last_word[last_word.length - 1] === '"' || last_word[last_word.length - 1] === "'")) {
// Remove the last quote in strings like `pow2("
const stripped_last = this._expression_editor._edit_area.innerText.substring(0, this._expression_editor._edit_area.innerText.length - 1);
this._expression_editor._edit_area.innerText = stripped_last;
Expand Down

0 comments on commit d9250fd

Please sign in to comment.