Skip to content

Commit

Permalink
Merge branch 'main' into use_latest_version_of_ndx_pose
Browse files Browse the repository at this point in the history
  • Loading branch information
h-mayorquin authored Jan 14, 2025
2 parents 8046d0d + fe5bfb4 commit 0636d87
Show file tree
Hide file tree
Showing 36 changed files with 14 additions and 48 deletions.
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,13 @@ __pycache__/
# C extensions
*.so


# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
uv.lock

# Distribution / packaging
.Python
build/
Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@ Small fixes should be here.
## Deprecations

## Bug Fixes
* Temporary set a ceiling for hdmf to avoid a chunking bug [PR #1175](https://github.com/catalystneuro/neuroconv/pull/1175)

## Features

## Improvements
* Source validation is no longer performed when initializing interfaces or converters [PR #1168](https://github.com/catalystneuro/neuroconv/pull/1168)


# v0.6.6 (December 20, 2024)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ Convert LightningPose pose estimation data to NWB using :py:class:`~neuroconv.da
>>> labeled_video_file_path = str(folder_path / "labeled_videos/test_vid_labeled.mp4")
>>> converter = LightningPoseConverter(file_path=file_path, original_video_file_path=original_video_file_path, labeled_video_file_path=labeled_video_file_path, verbose=False)
Source data is valid!
>>> metadata = converter.get_metadata()
>>> # For data provenance we add the time zone information to the conversion
>>> session_start_time = metadata["NWBFile"]["session_start_time"]
Expand Down
1 change: 0 additions & 1 deletion docs/conversion_examples_gallery/recording/spikeglx.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ We can easily convert all data stored in the native SpikeGLX folder structure to
>>>
>>> folder_path = f"{ECEPHY_DATA_PATH}/spikeglx/Noise4Sam_g0"
>>> converter = SpikeGLXConverterPipe(folder_path=folder_path)
Source data is valid!
>>> # Extract what metadata we can from the source files
>>> metadata = converter.get_metadata()
>>> # For data provenance we add the time zone information to the conversion
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ dependencies = [
"PyYAML>=5.4",
"scipy>=1.4.1",
"h5py>=3.9.0",
"hdmf>=3.13.0",
"hdmf>=3.13.0,<=3.14.5", # Chunking bug
"hdmf_zarr>=0.7.0",
"pynwb>=2.7.0",
"pydantic>=2.0.0",
Expand Down
2 changes: 0 additions & 2 deletions src/neuroconv/basedatainterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,6 @@ def __init__(self, verbose: bool = False, **source_data):
self.verbose = verbose
self.source_data = source_data

self._validate_source_data(source_data=source_data, verbose=verbose)

def get_metadata_schema(self) -> dict:
"""Retrieve JSON schema for metadata."""
metadata_schema = load_dict_from_file(Path(__file__).parent / "schemas" / "base_metadata_schema.json")
Expand Down

This file was deleted.

This file was deleted.

Empty file.

This file was deleted.

This file was deleted.

This file was deleted.

3 changes: 0 additions & 3 deletions src/neuroconv/datainterfaces/behavior/sleap/requirements.txt

This file was deleted.

This file was deleted.

This file was deleted.

1 change: 0 additions & 1 deletion src/neuroconv/datainterfaces/ecephys/edf/requirements.txt

This file was deleted.

Empty file.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

2 changes: 0 additions & 2 deletions src/neuroconv/datainterfaces/ecephys/requirements.txt

This file was deleted.

This file was deleted.

1 change: 0 additions & 1 deletion src/neuroconv/datainterfaces/icephys/abf/requirements.txt

This file was deleted.

1 change: 0 additions & 1 deletion src/neuroconv/datainterfaces/icephys/requirements.txt

This file was deleted.

This file was deleted.

This file was deleted.

2 changes: 0 additions & 2 deletions src/neuroconv/datainterfaces/ophys/miniscope/requirements.txt

This file was deleted.

1 change: 0 additions & 1 deletion src/neuroconv/datainterfaces/ophys/requirements.txt

This file was deleted.

This file was deleted.

2 changes: 0 additions & 2 deletions src/neuroconv/datainterfaces/ophys/tdt_fp/requirements.txt

This file was deleted.

1 change: 0 additions & 1 deletion src/neuroconv/datainterfaces/ophys/tiff/requirements.txt

This file was deleted.

2 changes: 0 additions & 2 deletions src/neuroconv/datainterfaces/text/excel/requirements.txt

This file was deleted.

1 change: 0 additions & 1 deletion src/neuroconv/nwbconverter.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ def _validate_source_data(self, source_data: dict[str, dict], verbose: bool = Tr
def __init__(self, source_data: dict[str, dict], verbose: bool = True):
"""Validate source_data against source_schema and initialize all data interfaces."""
self.verbose = verbose
self._validate_source_data(source_data=source_data, verbose=self.verbose)
self.data_interface_objects = {
name: data_interface(**source_data[name])
for name, data_interface in self.data_interface_classes.items()
Expand Down
9 changes: 4 additions & 5 deletions tests/test_on_data/setup_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,19 @@


# Load the configuration for the data tests

project_root_path = Path(__file__).parent.parent.parent

if os.getenv("CI"):
LOCAL_PATH = Path(".") # Must be set to "." for CI
print("Running GIN tests on Github CI!")
else:
# Override LOCAL_PATH in the `gin_test_config.json` file to a point on your system that contains the dataset folder
# Use DANDIHub at hub.dandiarchive.org for open, free use of data found in the /shared/catalystneuro/ directory
test_config_path = Path(__file__).parent / "gin_test_config.json"
test_config_path = project_root_path / "tests" / "test_on_data" / "gin_test_config.json"
config_file_exists = test_config_path.exists()
if not config_file_exists:

root = test_config_path.parent.parent
base_test_config_path = root / "base_gin_test_config.json"
base_test_config_path = project_root_path / "base_gin_test_config.json"

test_config_path.parent.mkdir(parents=True, exist_ok=True)
copy(src=base_test_config_path, dst=test_config_path)
Expand All @@ -40,4 +39,4 @@
ECEPHY_DATA_PATH = LOCAL_PATH / "ephy_testing_data"
OPHYS_DATA_PATH = LOCAL_PATH / "ophys_testing_data"

TEXT_DATA_PATH = Path(__file__).parent.parent.parent / "tests" / "test_text"
TEXT_DATA_PATH = project_root_path / "tests" / "test_text"

0 comments on commit 0636d87

Please sign in to comment.