Skip to content

Commit

Permalink
Merge pull request #1349 from RandallPittmanOrSt/rwp_typing
Browse files Browse the repository at this point in the history
Corrections and enchancements to type stubs
  • Loading branch information
jswhit authored Oct 22, 2024
2 parents d181813 + 1004467 commit 14012a5
Show file tree
Hide file tree
Showing 35 changed files with 593 additions and 405 deletions.
43 changes: 26 additions & 17 deletions .github/stubtest-allowlist
Original file line number Diff line number Diff line change
@@ -1,33 +1,42 @@
netCDF4.AccessModeOptions
netCDF4.CompressionLevelOptions
netCDF4.CompressionOptions
netCDF4.DatatypeOptions
netCDF4.DimensionsOptions
netCDF4.DiskFormatOptions
netCDF4.EndianOptions
netCDF4.FormatOptions
netCDF4.QuantizeOptions
netCDF4.CalendarOptions
netCDF4.ellipsis
netCDF4.RealTypeLiteral
netCDF4.ComplexTypeLiteral
netCDF4.NumericTypeLiteral
netCDF4.CharTypeLiteral
netCDF4.TypeLiteral
netCDF4.NumPyRealType
netCDF4.NumPyComplexType
netCDF4.NumPyNumericType
netCDF4.NetCDFUDTClass
netCDF4.AccessMode
netCDF4.CompressionLevel
netCDF4.CompressionType
netCDF4.DatatypeType
netCDF4.DimensionsType
netCDF4.DiskFormat
netCDF4.EndianType
netCDF4.Format
netCDF4.QuantizeMode
netCDF4.CalendarType
netCDF4.DateTimeArray
netCDF4.FiltersDict
netCDF4.SzipInfo
netCDF4.BloscInfo
netCDF4.BoolInt
netCDF4.GetSetItemKey
netCDF4.T_Datatype
netCDF4.T_DatatypeNC
netCDF4.Dataset.__dealloc
netCDF4.VarT
netCDF4.RealVarT
netCDF4.ComplexVarT
netCDF4.NumericVarT
netCDF4.Dimension.__reduce_cython__
netCDF4.Dimension.__setstate_cython__
netCDF4.Variable.auto_complex
netCDF4._netCDF4.Dataset.__dealloc
netCDF4.Variable.__iter__
netCDF4._netCDF4.Dimension.__reduce_cython__
netCDF4._netCDF4.Dimension.__setstate_cython__
netCDF4._netCDF4.NC_DISKLESS
netCDF4._netCDF4.NC_PERSIST
netCDF4._netCDF4.Variable.auto_complex
netCDF4._netCDF4.Variable.__iter__
netCDF4._netCDF4.__reduce_cython__
netCDF4._netCDF4.__setstate_cython__
netCDF4._netCDF4.__test__
netCDF4.utils.bytes
netCDF4.utils
2 changes: 1 addition & 1 deletion .github/workflows/build_latest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ jobs:
- name: Install python dependencies via pip
run: |
python -m pip install --upgrade pip
pip install numpy cython cftime pytest twine wheel check-manifest mpi4py
pip install numpy cython cftime pytest twine wheel check-manifest mpi4py typing-extensions
- name: Install netcdf4-python
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
- name: Install python dependencies via pip
run: |
python -m pip install --upgrade pip
pip install numpy cython cftime pytest twine wheel check-manifest mpi4py mypy types-setuptools
pip install numpy cython cftime pytest twine wheel check-manifest mpi4py mypy types-setuptools typing-extensions
- name: Install netcdf4-python
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_old.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ jobs:
- name: Install python dependencies via pip
run: |
python -m pip install --upgrade pip
pip install numpy cython cftime pytest twine wheel check-manifest mpi4py
pip install numpy cython cftime pytest twine wheel check-manifest mpi4py typing-extensions
- name: Install netcdf4-python
run: |
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/cibuildwheel.yml
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ jobs:
CIBW_TEST_SKIP: "cp38-*_aarch64 cp39-*_aarch64 cp310-*_aarch64 cp311-*_aarch64"
CIBW_ENVIRONMENT: ${{ matrix.CIBW_ENVIRONMENT }}
CIBW_BEFORE_BUILD_MACOS: brew install hdf5 netcdf
CIBW_TEST_REQUIRES: pytest cython packaging
CIBW_TEST_REQUIRES: pytest cython packaging typing-extensions
CIBW_TEST_COMMAND: >
python -c "import netCDF4; print(f'netCDF4 v{netCDF4.__version__}')"
&& pytest -s -rxs -v {project}/test
Expand Down Expand Up @@ -155,7 +155,7 @@ jobs:
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: >
delvewheel show {wheel}
&& delvewheel repair -w {dest_dir} {wheel}
CIBW_TEST_REQUIRES: pytest cython packaging
CIBW_TEST_REQUIRES: pytest cython packaging typing-extensions
CIBW_TEST_COMMAND: >
python -c "import netCDF4; print(f'netCDF4 v{netCDF4.__version__}')"
&& pytest -s -rxs -v {project}\\test
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/miniconda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
os: [windows-latest, ubuntu-latest, macos-latest]
platform: [x64, x32]
exclude:
- os: macos-latest
- os: macos-latest
platform: x32
fail-fast: false
defaults:
Expand All @@ -35,7 +35,7 @@ jobs:
init-shell: bash
create-args: >-
python=${{ matrix.python-version }}
numpy cython pip pytest hdf5 libnetcdf cftime zlib certifi
numpy cython pip pytest hdf5 libnetcdf cftime zlib certifi typing-extensions
--channel conda-forge
- name: Install netcdf4-python
Expand Down Expand Up @@ -69,7 +69,7 @@ jobs:
init-shell: bash
create-args: >-
python=${{ matrix.python-version }}
numpy cython pip pytest openmpi mpi4py hdf5=*=mpi* libnetcdf=*=mpi* cftime zlib certifi
numpy cython pip pytest openmpi mpi4py hdf5=*=mpi* libnetcdf=*=mpi* cftime zlib certifi typing-extensions
--channel conda-forge
- name: Install netcdf4-python with mpi
Expand All @@ -82,7 +82,7 @@ jobs:
run: |
cd test && python run_all.py
cd ../examples
export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH"
export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH"
which mpirun
mpirun --version
mpirun -np 4 --oversubscribe python mpi_example.py # for openmpi
Expand Down
7 changes: 6 additions & 1 deletion examples/bench.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
# benchmark reads and writes, with and without compression.
# tests all four supported file formats.
from typing import TYPE_CHECKING, Any
from numpy.random.mtrand import uniform
import netCDF4
from timeit import Timer
import os, sys
if TYPE_CHECKING:
from netCDF4 import Format as NCFormat
else:
NCFormat = Any

# create an n1dim by n2dim by n3dim random array.
n1dim = 30
Expand All @@ -14,7 +19,7 @@
sys.stdout.write('reading and writing a %s by %s by %s by %s random array ..\n'%(n1dim,n2dim,n3dim,n4dim))
array = uniform(size=(n1dim,n2dim,n3dim,n4dim))

def write_netcdf(filename,zlib=False,least_significant_digit=None,format='NETCDF4'):
def write_netcdf(filename,zlib=False,least_significant_digit=None,format: NCFormat='NETCDF4'):
file = netCDF4.Dataset(filename,'w',format=format)
file.createDimension('n1', n1dim)
file.createDimension('n2', n2dim)
Expand Down
11 changes: 8 additions & 3 deletions examples/bench_compress.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
# benchmark reads and writes, with and without compression.
# tests all four supported file formats.
from typing import TYPE_CHECKING, Any
from numpy.random.mtrand import uniform
import netCDF4
import netCDF4.utils
from timeit import Timer
import os, sys
if TYPE_CHECKING:
from netCDF4 import CompressionLevel
else:
CompressionLevel = Any

# create an n1dim by n2dim by n3dim random array.
n1dim = 30
Expand All @@ -13,10 +19,9 @@
ntrials = 10
sys.stdout.write('reading and writing a %s by %s by %s by %s random array ..\n'%(n1dim,n2dim,n3dim,n4dim))
sys.stdout.write('(average of %s trials)\n' % ntrials)
array = netCDF4.utils._quantize(uniform(size=(n1dim,n2dim,n3dim,n4dim)),4) # type: ignore
array = netCDF4.utils._quantize(uniform(size=(n1dim,n2dim,n3dim,n4dim)),4)


def write_netcdf(filename,zlib=False,shuffle=False,complevel=6):
def write_netcdf(filename,zlib=False,shuffle=False,complevel: CompressionLevel = 6):
file = netCDF4.Dataset(filename,'w',format='NETCDF4')
file.createDimension('n1', n1dim)
file.createDimension('n2', n2dim)
Expand Down
7 changes: 6 additions & 1 deletion examples/bench_compress4.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# benchmark reads and writes, with and without compression.
# tests all four supported file formats.
from typing import Literal
from numpy.random.mtrand import uniform
import netCDF4
from timeit import Timer
Expand All @@ -19,7 +20,11 @@
array = nc.variables['hgt'][0:n1dim,5,:,:]


def write_netcdf(filename,nsd,quantize_mode='BitGroom'):
def write_netcdf(
filename,
nsd,
quantize_mode: Literal["BitGroom", "BitRound", "GranularBitRound"] = "BitGroom"
):
file = netCDF4.Dataset(filename,'w',format='NETCDF4')
file.createDimension('n1', None)
file.createDimension('n3', n3dim)
Expand Down
11 changes: 8 additions & 3 deletions examples/bench_diskless.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
# benchmark reads and writes, with and without compression.
# tests all four supported file formats.
from typing import TYPE_CHECKING, Any, Literal
from numpy.random.mtrand import uniform
import netCDF4
from timeit import Timer
import os, sys
if TYPE_CHECKING:
from netCDF4 import Format as NCFormat
else:
NCFormat = Any

# create an n1dim by n2dim by n3dim random array.
n1dim = 30
Expand All @@ -14,7 +19,7 @@
sys.stdout.write('reading and writing a %s by %s by %s by %s random array ..\n'%(n1dim,n2dim,n3dim,n4dim))
array = uniform(size=(n1dim,n2dim,n3dim,n4dim))

def write_netcdf(filename,zlib=False,least_significant_digit=None,format='NETCDF4',closeit=False):
def write_netcdf(filename, zlib=False, least_significant_digit=None, format: NCFormat='NETCDF4',closeit=False):
file = netCDF4.Dataset(filename,'w',format=format,diskless=True,persist=True)
file.createDimension('n1', n1dim)
file.createDimension('n2', n2dim)
Expand Down Expand Up @@ -42,13 +47,13 @@ def read_netcdf(ncfile):
sys.stdout.write('writing took %s seconds\n' %\
repr(sum(t.repeat(ntrials,1))/ntrials))
# test reading.
ncfile = write_netcdf('test1.nc',format=format)
ncfile = write_netcdf('test1.nc',format=format) # type: ignore
t = Timer("read_netcdf(ncfile)","from __main__ import read_netcdf,ncfile")
sys.stdout.write('reading took %s seconds\n' %
repr(sum(t.repeat(ntrials,1))/ntrials))

# test diskless=True in nc_open
format='NETCDF3_CLASSIC'
format: Literal["NETCDF3_CLASSIC"] = 'NETCDF3_CLASSIC' # mypy should know this but it needs help...
trials=50
sys.stdout.write('test caching of file in memory on open for %s\n' % format)
sys.stdout.write('testing file format %s ...\n' % format)
Expand Down
26 changes: 11 additions & 15 deletions examples/mpi_example.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,23 @@
# to run: mpirun -np 4 python mpi_example.py
import sys
from typing import Literal
from mpi4py import MPI
import numpy as np
from netCDF4 import Dataset

format: Literal[
'NETCDF4',
'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC',
'NETCDF3_64BIT_OFFSET',
'NETCDF3_64BIT_DATA'
]
if len(sys.argv) == 2:
format = sys.argv[1] # type: ignore
else:
format = 'NETCDF4_CLASSIC'

nc_format = 'NETCDF4_CLASSIC' if len(sys.argv) < 2 else sys.argv[1]

rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run)
if rank == 0:
print('Creating file with format {}'.format(format))
nc = Dataset('parallel_test.nc', 'w', parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info(), format=format)
print('Creating file with format {}'.format(nc_format))
nc = Dataset(
"parallel_test.nc",
"w",
parallel=True,
comm=MPI.COMM_WORLD,
info=MPI.Info(),
format=nc_format, # type: ignore # we'll assume it's OK
)
# below should work also - MPI_COMM_WORLD and MPI_INFO_NULL will be used.
#nc = Dataset('parallel_test.nc', 'w', parallel=True)
d = nc.createDimension('dim',4)
Expand Down
6 changes: 6 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,12 @@ filterwarnings = [

[tool.mypy]
files = ["src/netCDF4"]
exclude = "utils.py"
check_untyped_defs = true
allow_redefinition = true
# next 2 lines workarounds for mypy dealing with type_guards.py
mypy_path = "test"
explicit_package_bases = true

[[tool.mypy.overrides]]
ignore_missing_imports = true
Expand Down
Loading

0 comments on commit 14012a5

Please sign in to comment.