Skip to content

Commit

Permalink
pull????
Browse files Browse the repository at this point in the history
  • Loading branch information
F-Bk committed Oct 19, 2023
2 parents f5247e0 + b084222 commit c5c2483
Show file tree
Hide file tree
Showing 80 changed files with 1,086 additions and 385 deletions.
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
repos:
- repo: https://github.com/psf/black
rev: stable
hooks:
- id: black
20 changes: 13 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,22 @@ extend-exclude = '''
source = ["asammdf"]
omit = ["*/asammdf/gui/ui/*"]


[tool.isort]
force_sort_within_sections = true
order_by_type = false
profile = "black"
skip_glob = ["src/asammdf/gui/ui"]

[tool.cibuildwheel]
test-requires = "pytest"
test-command = "pytest {project}/test"
build-frontend = "build"
archs = ["auto64"] # only build for 64bit architectures
skip = "pp* *_ppc64le *_s390x *-musllinux* cp312-*" # skip pypy and irrelevant architectures

[tool.ruff]
select = [
"UP", # pyupgrade
"I", # isort
]
exclude = ["./src/asammdf/gui/ui"]
target-version = "py38"

[tool.ruff.isort]
known-first-party = ["asammdf"]
order-by-type = false
force-sort-within-sections = true
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ pandas
typing_extensions
python-dateutil
isal; platform_machine == "x86_64" or platform_machine == "AMD64"
lxml<=4.9.2
lxml>=4.9.3
10 changes: 5 additions & 5 deletions requirements_exe_build.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ numpy>=1.23.0
pandas
typing_extensions
isal; platform_machine == "x86_64" or platform_machine == "AMD64"
lxml<=4.9.2
lxml==4.9.3
natsort
psutil
PySide6==6.2.2
pyqtgraph==0.12.4
PySide6==6.6.0
pyqtgraph==0.13.3
QtPy==2.3.1
pyqtlet2
pyopengl
Expand All @@ -21,8 +21,8 @@ cChardet==2.1.5
chardet
cryptography
keyring
pyinstaller<6.0; sys_platform=="win32"
pyinstaller; sys_platform=="win32"
pyinstaller<6.0; sys_platform=="darwin"
pyinstaller==4.10; sys_platform=="linux"
pyinstaller<6.0; sys_platform=="linux"
scipy
sympy
7 changes: 0 additions & 7 deletions run_black_and_isort.bat

This file was deleted.

6 changes: 6 additions & 0 deletions run_black_and_ruff.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pip install -U black ruff && ^
ruff check --fix ./src && ^
ruff check --fix ./setup.py && ^
black --config pyproject.toml . && ^
black --config pyproject.toml asammdf.spec && ^
black --config pyproject.toml setup.py
11 changes: 6 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,18 +104,19 @@ def _get_ext_modules():
"export": [
"fastparquet",
"h5py",
"hdf5storage>=0.1.17",
"hdf5storage>=0.1.19",
"python-snappy",
],
"export_matlab_v5": "scipy",
"gui": [
"lxml<=4.9.2",
"lxml>=4.9.2",
"natsort",
"psutil",
"PySide6<=6.3.1",
"pyqtgraph>=0.12.4",
"pyqtlet2>=0.8.0",
"PySide6==6.6.0",
"pyqtgraph==0.13.3",
"pyqtlet2==0.9.3",
"packaging",
"QtPy==2.3.1",
],
"encryption": ["cryptography", "keyring"],
"symbolic_math": "sympy",
Expand Down
1 change: 0 additions & 1 deletion src/asammdf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
""" asammdf is a parser and editor for ASAM MDF files """

import logging
Expand Down
5 changes: 2 additions & 3 deletions src/asammdf/blocks/conversion_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
"""
asammdf utility functions for channel conversions
"""
Expand All @@ -8,11 +7,11 @@
from copy import deepcopy
from typing import Any, Union

from ..types import ChannelConversionType
from . import v2_v3_blocks as v3b
from . import v2_v3_constants as v3c
from . import v4_blocks as v4b
from . import v4_constants as v4c
from ..types import ChannelConversionType

__all__ = ["conversion_transfer", "from_dict"]

Expand Down Expand Up @@ -379,7 +378,7 @@ def from_dict(conversion: dict[str, Any]) -> v4b.ChannelConversion:
return conversion


def to_dict(conversion: ChannelConversionType) -> Union[dict, None]:
def to_dict(conversion: ChannelConversionType) -> dict | None:
if not conversion:
return None

Expand Down
1 change: 0 additions & 1 deletion src/asammdf/blocks/mdf_v2.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
""" ASAM MDF version 2 file format module """

from __future__ import annotations
Expand Down
5 changes: 2 additions & 3 deletions src/asammdf/blocks/mdf_v3.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
""" ASAM MDF version 3 file format module """

from __future__ import annotations
Expand Down Expand Up @@ -44,11 +43,11 @@
from pandas import DataFrame
from typing_extensions import Literal, TypedDict

from . import v2_v3_constants as v23c
from .. import tool
from ..signal import Signal
from ..types import ChannelsType, CompressionType, RasterType, StrPathType
from ..version import __version__
from . import v2_v3_constants as v23c
from .conversion_utils import conversion_transfer
from .cutils import get_channel_raw_bytes
from .mdf_common import MDF_Common
Expand Down Expand Up @@ -1799,7 +1798,7 @@ def append(
sd_nr = len(component_samples)
kargs = {"sd_nr": sd_nr}
for i, dim in enumerate(shape[::-1]):
kargs["dim_{}".format(i)] = dim
kargs[f"dim_{i}"] = dim
parent_dep = ChannelDependency(**kargs)
new_gp_dep.append(parent_dep)

Expand Down
4 changes: 2 additions & 2 deletions src/asammdf/blocks/mdf_v4.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,6 @@
from numpy.typing import NDArray
from pandas import DataFrame

from . import bus_logging_utils
from . import v4_constants as v4c
from .. import tool
from ..signal import Signal
from ..types import (
Expand All @@ -89,6 +87,8 @@
WritableBufferType,
)
from ..version import __version__
from . import bus_logging_utils
from . import v4_constants as v4c
from .conversion_utils import conversion_transfer
from .mdf_common import MDF_Common
from .options import get_global_option
Expand Down
3 changes: 1 addition & 2 deletions src/asammdf/blocks/source_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
"""
asammdf utility functions for source information
"""
Expand All @@ -7,11 +6,11 @@

from functools import lru_cache

from ..types import SourceType
from . import v2_v3_blocks as v3b
from . import v2_v3_constants as v3c
from . import v4_blocks as v4b
from . import v4_constants as v4c
from ..types import SourceType


class Source:
Expand Down
23 changes: 10 additions & 13 deletions src/asammdf/blocks/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
"""
asammdf utility functions and classes
"""
Expand Down Expand Up @@ -94,8 +93,6 @@ def detect(text: bytes) -> DetectDict:
from numpy.typing import NDArray
from pandas import Series

from . import v2_v3_constants as v3c
from . import v4_constants as v4c
from ..types import (
ChannelType,
DataGroupType,
Expand All @@ -104,6 +101,8 @@ def detect(text: bytes) -> DetectDict:
ReadableBufferType,
StrPathType,
)
from . import v2_v3_constants as v3c
from . import v4_constants as v4c

UINT8_u = Struct("<B").unpack
UINT16_u = Struct("<H").unpack
Expand Down Expand Up @@ -1119,7 +1118,7 @@ def is_file_like(obj: object) -> bool:
return True


class UniqueDB(object):
class UniqueDB:
def __init__(self) -> None:
self._db = {}

Expand Down Expand Up @@ -1279,8 +1278,7 @@ def clear(self) -> None:
self.data_blocks_info_generator = None

def get_data_blocks(self) -> Iterator[DataBlockInfo]:
for blk in self.data_blocks:
yield blk
yield from self.data_blocks

while True:
try:
Expand All @@ -1294,8 +1292,7 @@ def get_signal_data_blocks(self, index: int) -> Iterator[SignalDataBlockInfo]:
signal_data = self.signal_data[index]
if signal_data is not None:
signal_data, signal_generator = signal_data
for blk in signal_data:
yield blk
yield from signal_data

while True:
try:
Expand Down Expand Up @@ -2322,7 +2319,7 @@ def load_channel_names_from_file(file_name, lab_section=""):
channels = load_dsp(file_name, flat=True)

elif extension == ".dspf":
with open(file_name, "r") as infile:
with open(file_name) as infile:
info = json.load(infile)

channels = []
Expand All @@ -2345,16 +2342,16 @@ def load_channel_names_from_file(file_name, lab_section=""):
channels = [name.split(";")[0] for name in channels]

elif extension == ".cfg":
with open(file_name, "r") as infile:
with open(file_name) as infile:
info = json.load(infile)
channels = info.get("selected_channels", [])
elif extension == ".txt":
try:
with open(file_name, "r") as infile:
with open(file_name) as infile:
info = json.load(infile)
channels = info.get("selected_channels", [])
except:
with open(file_name, "r") as infile:
with open(file_name) as infile:
channels = [line.strip() for line in infile.readlines()]
channels = [name for name in channels if name]

Expand All @@ -2363,7 +2360,7 @@ def load_channel_names_from_file(file_name, lab_section=""):

def load_lab(file):
sections = {}
with open(file, "r") as lab:
with open(file) as lab:
for line in lab:
line = line.strip()
if not line:
Expand Down
Loading

0 comments on commit c5c2483

Please sign in to comment.