Skip to content

Commit

Permalink
[flake8-pie] Reuse parsed tokens (PIE800) (#15438)
Browse files Browse the repository at this point in the history
## Summary

Follow-up to #15394. See [this review
comment](#15394 (comment)).

## Test Plan

`cargo nextest run` and `cargo insta test`.
  • Loading branch information
InSyncWithFoo authored Jan 13, 2025
1 parent 347ab5b commit 70c3be8
Showing 1 changed file with 16 additions and 13 deletions.
29 changes: 16 additions & 13 deletions crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_spread.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, ViolationMetadata};
use ruff_python_ast::{self as ast, Expr};
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_text_size::{Ranged, TextSize};

use crate::checkers::ast::Checker;
use crate::Locator;

/// ## What it does
/// Checks for unnecessary dictionary unpacking operators (`**`).
Expand Down Expand Up @@ -54,7 +53,7 @@ pub(crate) fn unnecessary_spread(checker: &mut Checker, dict: &ast::ExprDict) {
// inside a dict.
if let Expr::Dict(inner) = value {
let mut diagnostic = Diagnostic::new(UnnecessarySpread, value.range());
if let Some(fix) = unnecessary_spread_fix(inner, prev_end, checker.locator()) {
if let Some(fix) = unnecessary_spread_fix(inner, prev_end, checker.tokens()) {
diagnostic.set_fix(fix);
}
checker.diagnostics.push(diagnostic);
Expand All @@ -68,24 +67,27 @@ pub(crate) fn unnecessary_spread(checker: &mut Checker, dict: &ast::ExprDict) {
fn unnecessary_spread_fix(
dict: &ast::ExprDict,
prev_end: TextSize,
locator: &Locator,
tokens: &Tokens,
) -> Option<Fix> {
// Find the `**` token preceding the spread.
let doublestar = SimpleTokenizer::starts_at(prev_end, locator.contents())
.find(|tok| matches!(tok.kind(), SimpleTokenKind::DoubleStar))?;
let doublestar = tokens
.after(prev_end)
.iter()
.find(|tok| matches!(tok.kind(), TokenKind::DoubleStar))?;

if let Some(last) = dict.iter_values().last() {
// Ex) `**{a: 1, b: 2}`
let mut edits = vec![];
let mut open_parens: u32 = 0;

for tok in SimpleTokenizer::starts_at(doublestar.end(), locator.contents()).skip_trivia() {
for tok in tokens.after(doublestar.end()) {
match tok.kind() {
SimpleTokenKind::LParen => {
kind if kind.is_trivia() => {}
TokenKind::Lpar => {
edits.push(Edit::range_deletion(tok.range()));
open_parens += 1;
}
SimpleTokenKind::LBrace => {
TokenKind::Lbrace => {
edits.push(Edit::range_deletion(tok.range()));
break;
}
Expand All @@ -97,22 +99,23 @@ fn unnecessary_spread_fix(
}

let mut found_r_curly = false;
for tok in SimpleTokenizer::starts_at(last.end(), locator.contents()).skip_trivia() {
for tok in tokens.after(last.end()) {
if found_r_curly && open_parens == 0 {
break;
}

match tok.kind() {
SimpleTokenKind::Comma => {
kind if kind.is_trivia() => {}
TokenKind::Comma => {
edits.push(Edit::range_deletion(tok.range()));
}
SimpleTokenKind::RParen => {
TokenKind::Rpar => {
if found_r_curly {
edits.push(Edit::range_deletion(tok.range()));
open_parens -= 1;
}
}
SimpleTokenKind::RBrace => {
TokenKind::Rbrace => {
edits.push(Edit::range_deletion(tok.range()));
found_r_curly = true;
}
Expand Down

0 comments on commit 70c3be8

Please sign in to comment.