Skip to content

Commit

Permalink
dev: move from flake8,isort to ruff (#12375)
Browse files Browse the repository at this point in the history
  • Loading branch information
anshbansal authored Jan 17, 2025
1 parent 0c597d3 commit 436b74c
Show file tree
Hide file tree
Showing 55 changed files with 244 additions and 187 deletions.
2 changes: 1 addition & 1 deletion .github/scripts/pre-commit-override.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@ repos:
name: smoke-test cypress Lint Fix
entry: ./gradlew :smoke-test:cypressLintFix
language: system
files: ^smoke-test/tests/cypress/.*$
files: ^smoke-test/tests/cypress/.*\.tsx$
pass_filenames: false
5 changes: 3 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Auto-generated by .github/scripts/generate_pre_commit.py at 2025-01-09 10:08:09 UTC
# Auto-generated by .github/scripts/generate_pre_commit.py at 2025-01-17 16:43:31 UTC
# Do not edit this file directly. Run the script to regenerate.
# Add additional hooks in .github/scripts/pre-commit-override.yaml
repos:
Expand Down Expand Up @@ -442,4 +442,5 @@ repos:
name: smoke-test cypress Lint Fix
entry: ./gradlew :smoke-test:cypressLintFix
language: system
files: ^smoke-test/tests/cypress/.*$
files: ^smoke-test/tests/cypress/.*\.tsx$
pass_filenames: false
2 changes: 1 addition & 1 deletion metadata-ingestion-modules/airflow-plugin/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def get_long_description():
*mypy_stubs,
"black==22.12.0",
"coverage>=5.1",
"ruff==0.9.1",
"ruff==0.9.2",
"mypy==1.10.1",
# pydantic 1.8.2 is incompatible with mypy 0.910.
# See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910.
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion-modules/dagster-plugin/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def get_long_description():
"dagster-snowflake-pandas >= 0.11.0",
"black==22.12.0",
"coverage>=5.1",
"ruff==0.9.1",
"ruff==0.9.2",
"mypy>=1.4.0",
# pydantic 1.8.2 is incompatible with mypy 0.910.
# See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910.
Expand Down
6 changes: 2 additions & 4 deletions metadata-ingestion-modules/gx-plugin/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -55,16 +55,14 @@ task lint(type: Exec, dependsOn: installDev) {
commandLine 'bash', '-c',
"source ${venv_name}/bin/activate && set -x && " +
"black --check --diff src/ tests/ && " +
"isort --check --diff src/ tests/ && " +
"flake8 --count --statistics src/ tests/ && " +
"ruff check src/ tests/ && " +
"mypy --show-traceback --show-error-codes src/ tests/"
}
task lintFix(type: Exec, dependsOn: installDev) {
commandLine 'bash', '-x', '-c',
"source ${venv_name}/bin/activate && " +
"black src/ tests/ && " +
"isort src/ tests/ && " +
"flake8 src/ tests/ && " +
"ruff check --fix src/ tests/"
"mypy src/ tests/"
}

Expand Down
51 changes: 47 additions & 4 deletions metadata-ingestion-modules/gx-plugin/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,50 @@ extend-exclude = '''
'''
include = '\.pyi?$'

[tool.isort]
indent = ' '
profile = 'black'
sections = 'FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER'
[tool.ruff.lint.isort]
combine-as-imports = true
known-first-party = ["datahub"]
extra-standard-library = ["__future__", "datahub.utilities._markupsafe_compat", "datahub.sql_parsing._sqlglot_patch"]
section-order = ["future", "standard-library", "third-party", "first-party", "local-folder"]
force-sort-within-sections = false
force-wrap-aliases = false
split-on-trailing-comma = false
order-by-type = true
relative-imports-order = "closest-to-furthest"
force-single-line = false
single-line-exclusions = ["typing"]
length-sort = false
from-first = false
required-imports = []
classes = ["typing"]

[tool.ruff.lint]
select = [
"B",
"C90",
"E",
"F",
"I", # For isort
"TID",
]
ignore = [
# Ignore line length violations (handled by Black)
"E501",
# Ignore whitespace before ':' (matches Black)
"E203",
"E203",
# Allow usages of functools.lru_cache
"B019",
# Allow function call in argument defaults
"B008",
]

[tool.ruff.lint.mccabe]
max-complexity = 15

[tool.ruff.lint.flake8-tidy-imports]
# Disallow all relative imports.
ban-relative-imports = "all"

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"]
21 changes: 0 additions & 21 deletions metadata-ingestion-modules/gx-plugin/setup.cfg
Original file line number Diff line number Diff line change
@@ -1,24 +1,3 @@
[flake8]
max-complexity = 15
ignore =
# Ignore: line length issues, since black's formatter will take care of them.
E501,
# Ignore: 1 blank line required before class docstring.
D203,
# See https://stackoverflow.com/a/57074416.
W503,
# See https://github.com/psf/black/issues/315.
E203
exclude =
.git,
venv,
.tox,
__pycache__
per-file-ignores =
# imported but unused
__init__.py: F401
ban-relative-imports = true

[mypy]
plugins =
pydantic.mypy
Expand Down
5 changes: 1 addition & 4 deletions metadata-ingestion-modules/gx-plugin/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,7 @@ def get_long_description():
*mypy_stubs,
"black==22.12.0",
"coverage>=5.1",
"flake8>=6.0.0",
"flake8-tidy-imports>=4.3.0",
"flake8-bugbear==23.3.12",
"isort>=5.7.0",
"ruff==0.9.2",
"mypy>=1.4.0",
# pydantic 1.8.2 is incompatible with mypy 0.910.
# See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,37 @@
import sys
import time
from dataclasses import dataclass
from datahub.utilities._markupsafe_compat import MARKUPSAFE_PATCHED
from datetime import timezone
from decimal import Decimal
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union

import datahub.emitter.mce_builder as builder
import packaging.version
from great_expectations.checkpoint.actions import ValidationAction
from great_expectations.core.batch import Batch
from great_expectations.core.batch_spec import (
RuntimeDataBatchSpec,
RuntimeQueryBatchSpec,
SqlAlchemyDatasourceBatchSpec,
)
from great_expectations.core.expectation_validation_result import (
ExpectationSuiteValidationResult,
)
from great_expectations.data_asset.data_asset import DataAsset
from great_expectations.data_context import AbstractDataContext
from great_expectations.data_context.types.resource_identifiers import (
ExpectationSuiteIdentifier,
ValidationResultIdentifier,
)
from great_expectations.execution_engine import PandasExecutionEngine
from great_expectations.execution_engine.sqlalchemy_execution_engine import (
SqlAlchemyExecutionEngine,
)
from great_expectations.validator.validator import Validator
from sqlalchemy.engine.base import Connection, Engine
from sqlalchemy.engine.url import make_url

import datahub.emitter.mce_builder as builder
from datahub.cli.env_utils import get_boolean_env_variable
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.emitter.rest_emitter import DatahubRestEmitter
Expand All @@ -35,31 +60,7 @@
from datahub.metadata.com.linkedin.pegasus2avro.common import DataPlatformInstance
from datahub.metadata.schema_classes import PartitionSpecClass, PartitionTypeClass
from datahub.sql_parsing.sqlglot_lineage import create_lineage_sql_parsed_result
from datahub.utilities._markupsafe_compat import MARKUPSAFE_PATCHED
from datahub.utilities.urns.dataset_urn import DatasetUrn
from great_expectations.checkpoint.actions import ValidationAction
from great_expectations.core.batch import Batch
from great_expectations.core.batch_spec import (
RuntimeDataBatchSpec,
RuntimeQueryBatchSpec,
SqlAlchemyDatasourceBatchSpec,
)
from great_expectations.core.expectation_validation_result import (
ExpectationSuiteValidationResult,
)
from great_expectations.data_asset.data_asset import DataAsset
from great_expectations.data_context import AbstractDataContext
from great_expectations.data_context.types.resource_identifiers import (
ExpectationSuiteIdentifier,
ValidationResultIdentifier,
)
from great_expectations.execution_engine import PandasExecutionEngine
from great_expectations.execution_engine.sqlalchemy_execution_engine import (
SqlAlchemyExecutionEngine,
)
from great_expectations.validator.validator import Validator
from sqlalchemy.engine.base import Connection, Engine
from sqlalchemy.engine.url import make_url

# TODO: move this and version check used in tests to some common module
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@

import packaging.version
import pytest
from freezegun import freeze_time
from great_expectations.data_context import FileDataContext

from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.ingestion.sink.file import write_metadata_file
from datahub.testing.compare_metadata_json import assert_metadata_files_equal
from datahub.testing.docker_utils import wait_for_port
from freezegun import freeze_time
from great_expectations.data_context import FileDataContext

try:
from great_expectations import __version__ as GX_VERSION # type: ignore
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,6 @@

import pandas as pd
import pytest
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.metadata.schema_classes import (
AssertionInfoClass,
AssertionResultClass,
AssertionResultTypeClass,
AssertionRunEventClass,
AssertionRunStatusClass,
AssertionStdParameterClass,
AssertionStdParametersClass,
AssertionTypeClass,
BatchSpecClass,
DataPlatformInstanceClass,
DatasetAssertionInfoClass,
DatasetAssertionScopeClass,
PartitionSpecClass,
)
from great_expectations.core.batch import Batch, BatchDefinition, BatchRequest
from great_expectations.core.batch_spec import (
RuntimeDataBatchSpec,
Expand All @@ -46,6 +30,22 @@
)
from great_expectations.validator.validator import Validator

from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.metadata.schema_classes import (
AssertionInfoClass,
AssertionResultClass,
AssertionResultTypeClass,
AssertionRunEventClass,
AssertionRunStatusClass,
AssertionStdParameterClass,
AssertionStdParametersClass,
AssertionTypeClass,
BatchSpecClass,
DataPlatformInstanceClass,
DatasetAssertionInfoClass,
DatasetAssertionScopeClass,
PartitionSpecClass,
)
from datahub_gx_plugin.action import DataHubValidationAction

logger = logging.getLogger(__name__)
Expand Down
6 changes: 2 additions & 4 deletions metadata-ingestion-modules/prefect-plugin/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -55,16 +55,14 @@ task lint(type: Exec, dependsOn: installDev) {
commandLine 'bash', '-c',
"source ${venv_name}/bin/activate && set -x && " +
"black --check --diff src/ tests/ && " +
"isort --check --diff src/ tests/ && " +
"flake8 --count --statistics src/ tests/ && " +
"ruff check src/ tests/ && " +
"mypy --show-traceback --show-error-codes src/ tests/"
}
task lintFix(type: Exec, dependsOn: installDev) {
commandLine 'bash', '-x', '-c',
"source ${venv_name}/bin/activate && " +
"black src/ tests/ && " +
"isort src/ tests/ && " +
"flake8 src/ tests/ && " +
"ruff check --fix src/ tests/"
"mypy src/ tests/ "
}

Expand Down
51 changes: 47 additions & 4 deletions metadata-ingestion-modules/prefect-plugin/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,50 @@ extend-exclude = '''
'''
include = '\.pyi?$'

[tool.isort]
indent = ' '
profile = 'black'
sections = 'FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER'
[tool.ruff.lint.isort]
combine-as-imports = true
known-first-party = ["datahub"]
extra-standard-library = ["__future__", "datahub.utilities._markupsafe_compat", "datahub.sql_parsing._sqlglot_patch"]
section-order = ["future", "standard-library", "third-party", "first-party", "local-folder"]
force-sort-within-sections = false
force-wrap-aliases = false
split-on-trailing-comma = false
order-by-type = true
relative-imports-order = "closest-to-furthest"
force-single-line = false
single-line-exclusions = ["typing"]
length-sort = false
from-first = false
required-imports = []
classes = ["typing"]

[tool.ruff.lint]
select = [
"B",
"C90",
"E",
"F",
"I", # For isort
"TID",
]
ignore = [
# Ignore line length violations (handled by Black)
"E501",
# Ignore whitespace before ':' (matches Black)
"E203",
"E203",
# Allow usages of functools.lru_cache
"B019",
# Allow function call in argument defaults
"B008",
]

[tool.ruff.lint.mccabe]
max-complexity = 15

[tool.ruff.lint.flake8-tidy-imports]
# Disallow all relative imports.
ban-relative-imports = "all"

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"]
21 changes: 0 additions & 21 deletions metadata-ingestion-modules/prefect-plugin/setup.cfg
Original file line number Diff line number Diff line change
@@ -1,24 +1,3 @@
[flake8]
max-complexity = 15
ignore =
# Ignore: line length issues, since black's formatter will take care of them.
E501,
# Ignore: 1 blank line required before class docstring.
D203,
# See https://stackoverflow.com/a/57074416.
W503,
# See https://github.com/psf/black/issues/315.
E203
exclude =
.git,
venv,
.tox,
__pycache__
per-file-ignores =
# imported but unused
__init__.py: F401
ban-relative-imports = true

[mypy]
plugins =
sqlmypy,
Expand Down
4 changes: 1 addition & 3 deletions metadata-ingestion-modules/prefect-plugin/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,7 @@ def get_long_description():
*mypy_stubs,
"black==22.12.0",
"coverage>=5.1",
"flake8>=3.8.3",
"flake8-tidy-imports>=4.3.0",
"isort>=5.7.0",
"ruff==0.9.1",
"mypy>=1.4.0",
# pydantic 1.8.2 is incompatible with mypy 0.910.
# See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910.
Expand Down
Loading

0 comments on commit 436b74c

Please sign in to comment.