diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b5d69da2b..689e38cfd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.9 + rev: v0.7.1 hooks: - id: ruff files: datashader/ diff --git a/datashader/compiler.py b/datashader/compiler.py index f714e4612..4034058b9 100644 --- a/datashader/compiler.py +++ b/datashader/compiler.py @@ -272,8 +272,7 @@ def traverse_aggregation(agg): """Yield a left->right traversal of an aggregation""" if isinstance(agg, summary): for a in agg.values: - for a2 in traverse_aggregation(a): - yield a2 + yield from traverse_aggregation(a) else: yield agg diff --git a/datashader/datashape/__init__.py b/datashader/datashape/__init__.py index beb8c0f57..803216053 100644 --- a/datashader/datashape/__init__.py +++ b/datashader/datashape/__init__.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import from . import lexer, parser # noqa (API import) from .coretypes import * # noqa (API import) diff --git a/datashader/datashape/coretypes.py b/datashader/datashape/coretypes.py index 5078faad0..2c4d9d5f3 100644 --- a/datashader/datashape/coretypes.py +++ b/datashader/datashape/coretypes.py @@ -2,7 +2,6 @@ This defines the DataShape type system, with unified shape and data type. """ -from __future__ import print_function, division, absolute_import import ctypes import operator @@ -26,7 +25,7 @@ class Type(type): _registry = {} def __new__(meta, name, bases, dct): - cls = super(Type, meta).__new__(meta, name, bases, dct) + cls = super(Type, meta).__new__(meta, name, bases, dct) # noqa: UP008 # Don't register abstract classes if not dct.get('abstract'): Type._registry[name] = cls @@ -223,7 +222,7 @@ def __init__(self, tz=None): self.tz = tz def __str__(self): - basename = super(Time, self).__str__() + basename = super().__str__() if self.tz is None: return basename else: @@ -244,7 +243,7 @@ def __init__(self, tz=None): self.tz = tz def __str__(self): - basename = super(DateTime, self).__str__() + basename = super().__str__() if self.tz is None: return basename else: @@ -340,20 +339,20 @@ class Bytes(Unit): _canonical_string_encodings = { - u'A': u'A', - u'ascii': u'A', - u'U8': u'U8', - u'utf-8': u'U8', - u'utf_8': u'U8', - u'utf8': u'U8', - u'U16': u'U16', - u'utf-16': u'U16', - u'utf_16': u'U16', - u'utf16': u'U16', - u'U32': u'U32', - u'utf-32': u'U32', - u'utf_32': u'U32', - u'utf32': u'U32', + 'A': 'A', + 'ascii': 'A', + 'U8': 'U8', + 'utf-8': 'U8', + 'utf_8': 'U8', + 'utf8': 'U8', + 'U16': 'U16', + 'utf-16': 'U16', + 'utf_16': 'U16', + 'utf16': 'U16', + 'U32': 'U32', + 'utf-32': 'U32', + 'utf_32': 'U32', + 'utf32': 'U32', } diff --git a/datashader/datashape/discovery.py b/datashader/datashape/discovery.py index 3831812e1..93466639b 100644 --- a/datashader/datashape/discovery.py +++ b/datashader/datashape/discovery.py @@ -1,4 +1,3 @@ -from __future__ import print_function, division, absolute_import from collections import OrderedDict from datetime import datetime, date, time, timedelta @@ -435,10 +434,7 @@ def descendents(d, x): try: from unittest.mock import Mock except ImportError: - try: - from mock import Mock - except ImportError: - pass + pass if Mock is not None: @dispatch(Mock) diff --git a/datashader/datashape/internal_utils.py b/datashader/datashape/internal_utils.py index e5188dd22..1f4d51895 100644 --- a/datashader/datashape/internal_utils.py +++ b/datashader/datashape/internal_utils.py @@ -4,7 +4,6 @@ Do not import datashape modules into this module. See util.py in that case """ -from __future__ import print_function, division, absolute_import import keyword import re @@ -76,7 +75,7 @@ def _toposort(edges): """ incoming_edges = reverse_dict(edges) incoming_edges = dict((k, set(val)) for k, val in incoming_edges.items()) - S = set((v for v in edges if v not in incoming_edges)) + S = {v for v in edges if v not in incoming_edges} L = [] while S: diff --git a/datashader/datashape/lexer.py b/datashader/datashape/lexer.py index 23fd21e4e..7e8f4f87e 100644 --- a/datashader/datashape/lexer.py +++ b/datashader/datashape/lexer.py @@ -2,7 +2,6 @@ Lexer for the datashape grammar. """ -from __future__ import absolute_import, division, print_function import re import ast diff --git a/datashader/datashape/parser.py b/datashader/datashape/parser.py index 06601cbec..68be0c9d8 100644 --- a/datashader/datashape/parser.py +++ b/datashader/datashape/parser.py @@ -2,7 +2,6 @@ Parser for the datashape grammar. """ -from __future__ import absolute_import, division, print_function from . import lexer, error # TODO: Remove coretypes dependency, make 100% of interaction through diff --git a/datashader/datashape/promote.py b/datashader/datashape/promote.py index 2403a4482..5760a160b 100644 --- a/datashader/datashape/promote.py +++ b/datashader/datashape/promote.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import numpy as np from datashader import datashape diff --git a/datashader/datashape/tests/test_coretypes.py b/datashader/datashape/tests/test_coretypes.py index eeb19f5d5..178c459be 100644 --- a/datashader/datashape/tests/test_coretypes.py +++ b/datashader/datashape/tests/test_coretypes.py @@ -337,7 +337,7 @@ def test_record_string(): def test_record_with_unicode_name_as_numpy_dtype(): - r = Record([(str('a'), 'int32')]) + r = Record([('a', 'int32')]) assert r.to_numpy_dtype() == np.dtype([('a', 'i4')]) @@ -651,7 +651,7 @@ def test_invalid_record_literal(invalid): [ (['foo', b'\xc4\x87'.decode('utf8')], str), (['foo', 'bar'], str), - (list(u'ab'), str) + (list('ab'), str) ] ) def test_unicode_record_names(names, typ): diff --git a/datashader/datashape/tests/test_creation.py b/datashader/datashape/tests/test_creation.py index e7872fe45..baa97f71a 100644 --- a/datashader/datashape/tests/test_creation.py +++ b/datashader/datashape/tests/test_creation.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import, division, print_function import ctypes import unittest diff --git a/datashader/datashape/tests/test_discovery.py b/datashader/datashape/tests/test_discovery.py index 1ca5a2e9e..dd4e5b10c 100644 --- a/datashader/datashape/tests/test_discovery.py +++ b/datashader/datashape/tests/test_discovery.py @@ -336,10 +336,7 @@ def test_lowest_common_dshape_varlen_strings(): def test_discover_mock(): - try: - from unittest.mock import Mock - except ImportError: - from mock import Mock + from unittest.mock import Mock # This used to segfault because we were sending mocks into numpy with pytest.raises(NotImplementedError): diff --git a/datashader/datashape/tests/test_lexer.py b/datashader/datashape/tests/test_lexer.py index e56e6a427..e961faad7 100644 --- a/datashader/datashape/tests/test_lexer.py +++ b/datashader/datashape/tests/test_lexer.py @@ -2,7 +2,6 @@ Test the DataShape lexer. """ -from __future__ import absolute_import, division, print_function import unittest @@ -81,9 +80,9 @@ def test_string(self): self.check_isolated_token("'test'", 'STRING', 'test') # Valid escaped characters self.check_isolated_token(r'"\"\b\f\n\r\t\ub155"', 'STRING', - u'"\b\f\n\r\t\ub155') + '"\b\f\n\r\t\ub155') self.check_isolated_token(r"'\'\b\f\n\r\t\ub155'", 'STRING', - u"'\b\f\n\r\t\ub155") + "'\b\f\n\r\t\ub155") # A sampling of invalid escaped characters self.check_failing_token(r'''"\'"''') self.check_failing_token(r"""'\"'""") @@ -95,8 +94,8 @@ def test_string(self): self.check_failing_token(r"'\u123g'") self.check_failing_token(r"'\u123'") # Some unescaped and escapted unicode characters - self.check_isolated_token(u'"\uc548\ub155 \\uc548\\ub155"', 'STRING', - u'\uc548\ub155 \uc548\ub155') + self.check_isolated_token('"\uc548\ub155 \\uc548\\ub155"', 'STRING', + '\uc548\ub155 \uc548\ub155') def test_failing_tokens(self): self.check_failing_token('~') diff --git a/datashader/datashape/tests/test_parser.py b/datashader/datashape/tests/test_parser.py index 6258b72c4..7372923c9 100644 --- a/datashader/datashape/tests/test_parser.py +++ b/datashader/datashape/tests/test_parser.py @@ -2,7 +2,6 @@ Test the DataShape parser. """ -from __future__ import absolute_import, division, print_function import unittest import pytest @@ -162,8 +161,8 @@ def assertExpectedParse(ds_str, expected): # String parameter (positional) assertExpectedParse('unary["test"]', 'test') assertExpectedParse("unary['test']", 'test') - assertExpectedParse('unary["\\uc548\\ub155"]', u'\uc548\ub155') - assertExpectedParse(u'unary["\uc548\ub155"]', u'\uc548\ub155') + assertExpectedParse('unary["\\uc548\\ub155"]', '\uc548\ub155') + assertExpectedParse('unary["\uc548\ub155"]', '\uc548\ub155') # DataShape parameter (positional) assertExpectedParse('unary[int8]', ct.DataShape(ct.int8)) assertExpectedParse('unary[X]', ct.DataShape(ct.TypeVar('X'))) @@ -185,8 +184,8 @@ def assertExpectedParse(ds_str, expected): # String parameter (keyword) assertExpectedParse('unary[blah="test"]', 'test') assertExpectedParse("unary[blah='test']", 'test') - assertExpectedParse('unary[blah="\\uc548\\ub155"]', u'\uc548\ub155') - assertExpectedParse(u'unary[blah="\uc548\ub155"]', u'\uc548\ub155') + assertExpectedParse('unary[blah="\\uc548\\ub155"]', '\uc548\ub155') + assertExpectedParse('unary[blah="\uc548\ub155"]', '\uc548\ub155') # DataShape parameter (keyword) assertExpectedParse('unary[blah=int8]', ct.DataShape(ct.int8)) assertExpectedParse('unary[blah=X]', ct.DataShape(ct.TypeVar('X'))) diff --git a/datashader/datashape/type_symbol_table.py b/datashader/datashape/type_symbol_table.py index 21a4b985c..8f733606e 100644 --- a/datashader/datashape/type_symbol_table.py +++ b/datashader/datashape/type_symbol_table.py @@ -2,7 +2,6 @@ A symbol table object to hold types for the parser. """ -from __future__ import absolute_import, division, print_function import ctypes from itertools import chain diff --git a/datashader/datashape/user.py b/datashader/datashape/user.py index 0aa9b4764..39b623939 100644 --- a/datashader/datashape/user.py +++ b/datashader/datashape/user.py @@ -1,4 +1,3 @@ -from __future__ import print_function, division, absolute_import from .dispatch import dispatch from .coretypes import ( CType, Date, DateTime, DataShape, Record, String, Time, Var, from_numpy, to_numpy_dtype) diff --git a/datashader/datashape/util/__init__.py b/datashader/datashape/util/__init__.py index 3556bcec2..bbde43355 100644 --- a/datashader/datashape/util/__init__.py +++ b/datashader/datashape/util/__init__.py @@ -1,4 +1,3 @@ -from __future__ import print_function, division, absolute_import from itertools import chain import operator diff --git a/datashader/glyphs/trimesh.py b/datashader/glyphs/trimesh.py index 5c6b3fba0..b2d88a116 100644 --- a/datashader/glyphs/trimesh.py +++ b/datashader/glyphs/trimesh.py @@ -18,7 +18,7 @@ class _PolygonLike(_PointLike): * interp (bool): Whether to interpolate (True), or to have one color per shape (False) """ def __init__(self, x, y, z=None, weight_type=True, interp=True): - super(_PolygonLike, self).__init__(x, y) + super().__init__(x, y) if z is None: self.z = [] else: diff --git a/datashader/macros.py b/datashader/macros.py index 742de999f..a602303e7 100644 --- a/datashader/macros.py +++ b/datashader/macros.py @@ -13,7 +13,7 @@ class NameVisitor(ast.NodeVisitor): NodeVisitor that builds a set of all of the named identifiers in an AST """ def __init__(self, *args, **kwargs): - super(NameVisitor, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.names = set() def visit_Name(self, node): @@ -71,7 +71,7 @@ def __init__(self, starred_name, expand_names, *args, **kwargs): variable """ - super(ExpandVarargTransformer, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.starred_name = starred_name self.expand_names = expand_names diff --git a/datashader/mpl_ext.py b/datashader/mpl_ext.py index 9902b9e5f..2997f6354 100644 --- a/datashader/mpl_ext.py +++ b/datashader/mpl_ext.py @@ -92,7 +92,7 @@ def alpha_colormap(color, min_alpha=40, max_alpha=255, N=256): class EqHistNormalize(mpl.colors.Normalize): def __init__(self, vmin=None, vmax=None, clip=False, nbins=256 ** 2, ncolors=256): - super(EqHistNormalize, self).__init__(vmin, vmax, clip) + super().__init__(vmin, vmax, clip) self._nbins = nbins self._bin_edges = None self._ncolors = ncolors @@ -164,15 +164,15 @@ def inverse(self, value): return np.interp([value], self._color_bins, self._bin_edges)[0] def autoscale(self, A): - super(EqHistNormalize, self).autoscale(A) + super().autoscale(A) self._bin_edges = self._binning(A, self._ncolors) def autoscale_None(self, A): - super(EqHistNormalize, self).autoscale_None(A) + super().autoscale_None(A) self._bin_edges = self._binning(A, self._ncolors) def scaled(self): - return super(EqHistNormalize, self).scaled() and self._bin_edges is not None + return super().scaled() and self._bin_edges is not None class DSArtist(_ImageBase): diff --git a/datashader/reductions.py b/datashader/reductions.py index c11e90823..ec9c6cae9 100644 --- a/datashader/reductions.py +++ b/datashader/reductions.py @@ -1105,7 +1105,7 @@ def uses_cuda_mutex(self) -> UsesCudaMutex: return UsesCudaMutex.Global def _build_append(self, dshape, schema, cuda, antialias, self_intersect): - return super(m2, self)._build_append(dshape, schema, cuda, antialias, self_intersect) + return super()._build_append(dshape, schema, cuda, antialias, self_intersect) def _build_create(self, required_dshape): return self._create_float64_zero @@ -1263,7 +1263,7 @@ class count_cat(by): categories present. """ def __init__(self, column): - super(count_cat, self).__init__(column, count()) + super().__init__(column, count()) class mean(Reduction): diff --git a/datashader/tests/test_colors.py b/datashader/tests/test_colors.py index 7711658cd..f70c38814 100644 --- a/datashader/tests/test_colors.py +++ b/datashader/tests/test_colors.py @@ -15,10 +15,8 @@ def test_hex_to_rgb(): def test_rgb(): - assert rgb(u'#FAFBFC') == (250, 251, 252) assert rgb('#FAFBFC') == (250, 251, 252) assert rgb('blue') == (0, 0, 255) - assert rgb(u'blue') == (0, 0, 255) assert rgb((255, 255, 255)) == (255, 255, 255) with pytest.raises(ValueError): rgb((255, 256, 255)) diff --git a/datashader/tests/test_geopandas.py b/datashader/tests/test_geopandas.py index e7eafdd06..60c009bc9 100644 --- a/datashader/tests/test_geopandas.py +++ b/datashader/tests/test_geopandas.py @@ -14,6 +14,8 @@ pytest.param(False, id="dask"), ] +_extras = ["spatialpandas.dask", "dask_geopandas.backends", "dask_geopandas"] + with contextlib.suppress(ImportError): import dask_geopandas @@ -23,12 +25,12 @@ @pytest.fixture(params=_backends) def dask_both(request): - with dask_switcher(query=request.param, extras=["spatialpandas.dask", "dask_geopandas.backends", "dask_geopandas"]): ... + with dask_switcher(query=request.param, extras=_extras): ... return request.param @pytest.fixture def dask_classic(request): - with dask_switcher(query=False, extras=["spatialpandas.dask", "dask_geopandas.backends", "dask_geopandas"]): ... + with dask_switcher(query=False, extras=_extras): ... try: import dask_geopandas diff --git a/datashader/tests/test_mpl_ext.py b/datashader/tests/test_mpl_ext.py index 06c08bc70..070931df6 100644 --- a/datashader/tests/test_mpl_ext.py +++ b/datashader/tests/test_mpl_ext.py @@ -16,8 +16,8 @@ df = pd.DataFrame( { - "x": np.array(([0.0] * 10 + [1] * 10)), - "y": np.array(([0.0] * 5 + [1] * 5 + [0] * 5 + [1] * 5)), + "x": np.array([0.0] * 10 + [1] * 10), + "y": np.array([0.0] * 5 + [1] * 5 + [0] * 5 + [1] * 5), } ) diff --git a/datashader/tests/test_pandas.py b/datashader/tests/test_pandas.py index 967750d5a..6faed6e8f 100644 --- a/datashader/tests/test_pandas.py +++ b/datashader/tests/test_pandas.py @@ -34,10 +34,10 @@ def _pandas(): plusminus 0 -1 nan -3 4 -5 6 -7 8 -9 10 -11 12 -13 14 -15 16 -17 18 -19 cat2 a b c d a b c d a b c d a b c d a b c d """ - df_pd = pd.DataFrame({'x': np.array(([0.] * 10 + [1] * 10)), - 'y': np.array(([0.] * 5 + [1] * 5 + [0] * 5 + [1] * 5)), - 'log_x': np.array(([1.] * 10 + [10] * 10)), - 'log_y': np.array(([1.] * 5 + [10] * 5 + [1] * 5 + [10] * 5)), + df_pd = pd.DataFrame({'x': np.array([0.] * 10 + [1] * 10), + 'y': np.array([0.] * 5 + [1] * 5 + [0] * 5 + [1] * 5), + 'log_x': np.array([1.] * 10 + [10] * 10), + 'log_y': np.array([1.] * 5 + [10] * 5 + [1] * 5 + [10] * 5), 'i32': np.arange(20, dtype='i4'), 'i64': np.arange(20, dtype='i8'), 'f32': np.arange(20, dtype='f4'), diff --git a/datashader/tests/test_pipeline.py b/datashader/tests/test_pipeline.py index c8fbc5938..3128e18b5 100644 --- a/datashader/tests/test_pipeline.py +++ b/datashader/tests/test_pipeline.py @@ -6,8 +6,8 @@ import datashader.transfer_functions as tf -df = pd.DataFrame({'x': np.array(([0.] * 10 + [1] * 10)), - 'y': np.array(([0.] * 5 + [1] * 5 + [0] * 5 + [1] * 5)), +df = pd.DataFrame({'x': np.array([0.] * 10 + [1] * 10), + 'y': np.array([0.] * 5 + [1] * 5 + [0] * 5 + [1] * 5), 'f64': np.arange(20, dtype='f8')}) df.loc['f64', 2] = np.nan diff --git a/datashader/tests/test_xarray.py b/datashader/tests/test_xarray.py index 71f3f1f40..fa85a9150 100644 --- a/datashader/tests/test_xarray.py +++ b/datashader/tests/test_xarray.py @@ -14,10 +14,10 @@ except ImportError: cupy = None -xda = xr.DataArray(data=np.array(([1.] * 10 + [10] * 10)), +xda = xr.DataArray(data=np.array([1.] * 10 + [10] * 10), dims=('record'), - coords={'x': xr.DataArray(np.array(([0.]*10 + [1]*10)), dims=('record')), - 'y': xr.DataArray(np.array(([0.]*5 + [1]*5 + [0]*5 + [1]*5)), + coords={'x': xr.DataArray(np.array([0.]*10 + [1]*10), dims=('record')), + 'y': xr.DataArray(np.array([0.]*5 + [1]*5 + [0]*5 + [1]*5), dims=('record')), 'i32': xr.DataArray(np.arange(20, dtype='i4'), dims=('record')), 'i64': xr.DataArray(np.arange(20, dtype='i8'), dims=('record')), diff --git a/datashader/tiles.py b/datashader/tiles.py index 524ab7f29..7f0969ac3 100644 --- a/datashader/tiles.py +++ b/datashader/tiles.py @@ -379,7 +379,7 @@ def tile_previewer(full_extent, tileset_url, class FileSystemTileRenderer(TileRenderer): def render(self, da, level): - for img, x, y, z in super(FileSystemTileRenderer, self).render(da, level): + for img, x, y, z in super().render(da, level): tile_file_name = '{}.{}'.format(y, self.tile_format.lower()) tile_directory = os.path.join(self.output_location, str(z), str(x)) output_file = os.path.join(tile_directory, tile_file_name) @@ -396,15 +396,12 @@ def render(self, da, level): except ImportError: raise ImportError('install boto3 to enable rendering to S3') - try: - from urlparse import urlparse - except ImportError: - from urllib.parse import urlparse + from urllib.parse import urlparse s3_info = urlparse(self.output_location) bucket = s3_info.netloc client = boto3.client('s3') - for img, x, y, z in super(S3TileRenderer, self).render(da, level): + for img, x, y, z in super().render(da, level): tile_file_name = '{}.{}'.format(y, self.tile_format.lower()) key = os.path.join(s3_info.path, str(z), str(x), tile_file_name).lstrip('/') output_buf = BytesIO() diff --git a/pyproject.toml b/pyproject.toml index 689fe4777..d6c0f5fea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,6 +76,22 @@ ignore-words-list = "trough,thi,ser" [tool.ruff] line-length = 100 +fix = true + +[tool.ruff.lint] +select = [ + "E", + "F", + "UP", + "W", +] +ignore = [ + "UP027", # unpacked-list-comprehension (deprecated) + # The following should be enabled in the future + "UP030", # format-literals + "UP031", # printf-string-formatting + "UP032", # f-string +] [tool.ruff.lint.per-file-ignores] "test_mpl_ext.py" = ["E402"] # Module level import not at top of file