Skip to content

Commit

Permalink
ruff: enable ruff isort and other rules (#470)
Browse files Browse the repository at this point in the history
* ruff: enable ruff isort and other rules

* Update pyproject.toml
  • Loading branch information
skshetry authored Dec 7, 2023
1 parent cfb565e commit 14dc0be
Show file tree
Hide file tree
Showing 44 changed files with 275 additions and 202 deletions.
13 changes: 0 additions & 13 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,10 @@ updates:
interval: "weekly"
labels:
- "maintenance"
# Update via cruft
ignore:
- dependency-name: "mkdocs*"
- dependency-name: "pytest*"
- dependency-name: "pylint"
- dependency-name: "mypy"

- directory: "/"
package-ecosystem: "github-actions"
schedule:
interval: "weekly"
labels:
- "maintenance"
# Update via cruft
ignore:
- dependency-name: "actions/checkout"
- dependency-name: "actions/setup-python"
- dependency-name: "pypa/gh-action-pypi-publish"
- dependency-name: "codecov/codecov-action"
- dependency-name: "peter-evans/create-pull-request"
10 changes: 5 additions & 5 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,18 @@ jobs:
env:
PYTEST_BENCHMARK_STORAGE: file://${{ github.workspace }}/.benchmarks
steps:
- name: Set up Python 3.10
uses: actions/setup-python@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: '3.10'
python-version: '3.11'

- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.base.sha }}
fetch-depth: 0
path: base

- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0
path: pr
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ jobs:
with:
fetch-depth: 0

- name: Set up Python 3.10
uses: actions/setup-python@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: '3.10'
python-version: '3.11'

- name: Upgrade pip and nox
run: |
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
fetch-depth: 0

- name: Set up Python ${{ matrix.pyv }}
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.pyv }}

Expand All @@ -49,7 +49,7 @@ jobs:
run: nox -s tests-${{ matrix.nox_pyv || matrix.pyv }} -- --cov-report=xml

- name: Upload coverage report
uses: codecov/codecov-action@v3.1.4
uses: codecov/codecov-action@v3

- name: Build package
run: nox -s build
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ default_language_version:
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
Expand All @@ -20,18 +20,18 @@ repos:
- id: sort-simple-yaml
- id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: 'v0.1.5'
rev: 'v0.1.7'
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
- repo: https://github.com/codespell-project/codespell
rev: v2.2.5
rev: v2.2.6
hooks:
- id: codespell
additional_dependencies: ["tomli"]
- repo: https://github.com/asottile/pyupgrade
rev: v3.10.1
rev: v3.15.0
hooks:
- id: pyupgrade
args: [--py38-plus]
94 changes: 86 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "dvc-data"
description = "dvc data"
description = "DVC's data management subsystem"
readme = "README.rst"
license = {text = "Apache-2.0"}
authors = [{ name = "Iterative", email = "support@dvc.org" }]
Expand Down Expand Up @@ -44,12 +44,12 @@ all = [
"dvc-data[cli]",
]
tests = [
"pytest==7.2.0",
"pytest-sugar==0.9.6",
"pytest-cov==4.0.0",
"pytest-mock==3.10.0",
"pytest-benchmark==4.0.0",
"mypy==1.5.1",
"pytest<8,>=7",
"pytest-sugar",
"pytest-cov>=4.1.0",
"pytest-mock",
"pytest-benchmark",
"mypy==1.7.1",
"pytest-servers[s3]==0.1.3",
]
dev = [
Expand All @@ -62,7 +62,7 @@ dev = [
dvc-data = "dvc_data.__main__:main"

[tool.setuptools.package-data]
dvc_objects = ["py.typed"]
dvc_data = ["py.typed"]

[tool.setuptools.packages.find]
where = ["src"]
Expand Down Expand Up @@ -121,3 +121,81 @@ module = [

[tool.codespell]
ignore-words-list = "fo"

[tool.ruff]
ignore = [
"ISC001", # single-line-implicit-string-concatenation
"PLR2004", # magic-value-comparison
"PLW2901", # redefined-loop-name
"RET501", # unnecessary-return-none
"RET502", # implicit-return-value
"RET503", # implicit-return
"S101", # assert
"SIM105", # suppressible-exception
"SIM108", # if-else-block-instead-of-if-exp
"SIM117", # multiple-with-statements
]
select = [
"A", # flake8-buitlins
"ASYNC", # flake8-async
"B", # flake8-bugbear
"BLE", # flake8-blind-except
"C4", # flake8-comprehensions
"C90", # mccabe
"DTZ", # flake8-datetimez
"E", # pycodestyle - Error
"EXE", # flake8-executable
"F", # pyflakes
"FLY", # flynt-rules
"G", # flake8-logging-format
"I", # isort
"ICN", # flake8-import-conventions
"INP", # flake8-no-pep420
"ISC", # flake8-implicit-str-concat
"N", # pep8-naming
"PERF101", # perflint
"PGH", # pygrep-hooks
"PIE", # flake8-pie
"PL", # pylint
"PT", # flake8-pytest-style
"PYI", # flake8-pyi
"Q", # flae8-quotes
"RET", # flake8-return
"RSE", # flake8-raise
"RUF", # ruff
"S", # flake8-bandit
"SIM", # flake8-simplify
"SLOT", # flake8-slots
"T10", # flake8-debugger
"T20", # flake8-print
"TCH", # flake8-type-checking
"TCH", # flake8-type-checking
"TID", # flake8-tidy-imports
"UP", # pyupgrade
"W", # pycodestyle - Warning
"YTT", # flake8-2020
]
show-source = true
show-fixes = true

[tool.ruff.lint.flake8-pytest-style]
fixture-parentheses = false
mark-parentheses = false
parametrize-names-type = "csv"

[tool.ruff.lint.flake8-tidy-imports]
[tool.ruff.lint.flake8-tidy-imports.banned-api]
"funcy.cached_property" = {msg = "use `from dvc_data.utils import cached_property` instead."}
"functools.cached_property" = {msg = "use `from dvc_data.utils import cached_property` instead."}

[tool.ruff.lint.flake8-type-checking]
strict = true

[tool.ruff.lint.isort]
known-first-party = ["dvc_data"]

[tool.ruff.lint.pylint]
max-args = 10

[tool.ruff.per-file-ignores]
"src/dvc_data/cli.py" = ["T201", "B008"]
2 changes: 1 addition & 1 deletion src/dvc_data/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
def main(): # type: ignore[misc]
import sys

print(
print( # noqa: T201
"dvc-data could not run because the required "
"dependencies are not installed.\n"
"Please install it with: pip install 'dvc-data[cli]'"
Expand Down
21 changes: 10 additions & 11 deletions src/dvc_data/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from typing import List, cast

import click
import typer # pylint: disable=import-error
import typer
from attrs import asdict
from dvc_objects._tqdm import Tqdm
from dvc_objects.errors import ObjectFormatError
Expand All @@ -34,10 +34,9 @@
from dvc_data.hashfile.obj import HashFile
from dvc_data.hashfile.state import State
from dvc_data.hashfile.transfer import transfer as _transfer
from dvc_data.hashfile.tree import Tree
from dvc_data.hashfile.tree import Tree, merge
from dvc_data.hashfile.tree import du as _du
from dvc_data.hashfile.tree import merge
from dvc_data.repo import NotARepo, Repo
from dvc_data.repo import NotARepoError, Repo

install(show_locals=True, suppress=[typer, click])

Expand Down Expand Up @@ -224,9 +223,9 @@ def from_shortoid(odb: HashFileDB, oid: str) -> str:
def get_odb(**config):
try:
repo = Repo.discover()
except NotARepo as exc:
except NotARepoError as exc:
typer.echo(exc, err=True)
raise typer.Abort(1)
raise typer.Abort(1) # noqa: B904

if "state" not in config:
config.setdefault("state", State(root_dir=repo.root, tmp_dir=repo.tmp_dir))
Expand Down Expand Up @@ -324,7 +323,7 @@ def show(oid: str = typer.Argument(..., allow_dash=True)):
obj = load(odb, odb.get(oid).hash_info)
if isinstance(obj, Tree):
return _ls_tree(obj)
elif isinstance(obj, HashFile):
if isinstance(obj, HashFile):
return _cat_object(odb, obj.oid)
raise AssertionError(f"unknown object of type {type(obj)}")

Expand Down Expand Up @@ -429,14 +428,15 @@ def merge_tree(oid1: str, oid2: str, force: bool = False):
oid2 = from_shortoid(odb, oid2)
obj1 = load(odb, odb.get(oid1).hash_info)
obj2 = load(odb, odb.get(oid2).hash_info)
assert isinstance(obj1, Tree) and isinstance(obj2, Tree), "not a tree obj"
assert isinstance(obj1, Tree)
assert isinstance(obj2, Tree), "not a tree obj"

if not force:
# detect conflicts
d = _diff(obj1, obj2, odb)
modified = [
posixpath.join(*change.old.key)
for change in d.modified # pylint: disable=not-an-iterable
for change in d.modified
if change.old.key != ROOT
]
if modified:
Expand Down Expand Up @@ -478,7 +478,6 @@ def apply_op(odb, obj, application):
op = application["op"]
path = application["path"]
keys = tuple(path.split("/"))
# pylint: disable=protected-access
if op in ("add", "modify"):
new = tuple(application["to"].split("/"))
if op == "add" and new in obj._dict:
Expand Down Expand Up @@ -594,7 +593,7 @@ def checkout(
path: Path = typer.Argument(..., resolve_path=True),
relink: bool = False,
force: bool = False,
type: List[LinkEnum] = typer.Option(["copy"]), # pylint: disable=redefined-builtin
type: List[LinkEnum] = typer.Option(["copy"]), # noqa: A002
):
odb = get_odb(type=[t.value for t in type])
oid = from_shortoid(odb, oid)
Expand Down
14 changes: 6 additions & 8 deletions src/dvc_data/fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@
from dvc_objects.fs.callbacks import DEFAULT_CALLBACK
from fsspec import AbstractFileSystem

from dvc_data.hashfile.db import HashFileDB

from .utils import cached_property

if typing.TYPE_CHECKING:
from dvc_objects.fs.base import AnyFSPath, FileSystem
from dvc_objects.fs.callbacks import Callback
from dvc_objects.fs.path import Path

from dvc_data.hashfile.db import HashFileDB

from .hashfile.hash_info import HashInfo
from .index import DataIndex, DataIndexEntry, ObjectStorage

Expand All @@ -33,7 +33,7 @@ class FileInfo(NamedTuple):
fs_path: "AnyFSPath"


class DataFileSystem(AbstractFileSystem): # pylint:disable=abstract-method
class DataFileSystem(AbstractFileSystem):
root_marker = "/"

def __init__(self, index: "DataIndex", **kwargs: Any):
Expand All @@ -55,7 +55,7 @@ def _get_key(self, path: str) -> Tuple[str, ...]:
return ()

key = self.path.relparts(path, self.root_marker)
if key == (".",) or key == ("",):
if key in ((".",), ("",)):
key = ()

return key
Expand Down Expand Up @@ -108,9 +108,7 @@ def _cache_remote_file(
odb.add(path, fs, oid)
return odb.fs, odb.oid_to_path(oid)

def _open( # pylint: disable=arguments-differ
self, path: "AnyFSPath", **kwargs: Any
) -> "BinaryIO":
def _open(self, path: "AnyFSPath", **kwargs: Any) -> "BinaryIO":
typ, _, cache_storage, hi, fs, fspath = self._get_fs_path(path)

if kwargs.get("cache", False) and typ == "remote" and cache_storage:
Expand Down Expand Up @@ -156,7 +154,7 @@ def info(self, path: "AnyFSPath", **kwargs: Any):
info["name"] = path
return info

def get_file( # pylint: disable=arguments-differ
def get_file(
self,
rpath: "AnyFSPath",
lpath: "AnyFSPath",
Expand Down
2 changes: 0 additions & 2 deletions src/dvc_data/hashfile/_ignore.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
if TYPE_CHECKING:
from dvc_objects.fs.base import AnyFSPath, FileSystem

# pylint: disable=unused-argument


class Ignore(Protocol):
def find(self, fs: "FileSystem", path: "AnyFSPath") -> Iterator["AnyFSPath"]:
Expand Down
Loading

0 comments on commit 14dc0be

Please sign in to comment.