Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Data Objects #196

Merged
merged 78 commits into from
Nov 23, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
78 commits
Select commit Hold shift + click to select a range
d4fcb5c
Add data model
robertdstein Nov 17, 2022
95d2232
Apply data model to base processor
robertdstein Nov 17, 2022
aec5fd2
Clean up data model
robertdstein Nov 17, 2022
c33fa98
Transition some processors
robertdstein Nov 17, 2022
47e98be
Fix typo
robertdstein Nov 17, 2022
08346cd
Get ImageLoader working
robertdstein Nov 17, 2022
d654bf6
Mirror order in magic methods
robertdstein Nov 17, 2022
9440ef5
Update data model
robertdstein Nov 17, 2022
f85f5c2
Update autoastrometry
robertdstein Nov 17, 2022
a77eb6b
Update candidate processors
robertdstein Nov 17, 2022
55cdef4
Update database processors
robertdstein Nov 17, 2022
7ef80c7
Update photometry processors
robertdstein Nov 17, 2022
f14f547
update_dataset rather than batches
robertdstein Nov 17, 2022
d76a6dd
Update util processors
robertdstein Nov 17, 2022
b0dc5cf
Remove duplicated file
robertdstein Nov 18, 2022
0b023eb
Update Region_file to region_bool
robertdstein Nov 18, 2022
322ca64
Update gitignore
robertdstein Nov 18, 2022
4223446
Remove old image
robertdstein Nov 18, 2022
b206082
Clean up datasets
robertdstein Nov 18, 2022
62ddd95
Update calibration processors:
robertdstein Nov 18, 2022
32e9b7a
Substantially deverboseify Fritz
robertdstein Nov 18, 2022
6eb34dc
Add keys() to data
robertdstein Nov 18, 2022
dac6ee1
add maths
robertdstein Nov 18, 2022
d5a8963
Add string/maths to images
robertdstein Nov 18, 2022
91452c0
remove redundant imports
robertdstein Nov 18, 2022
fe7a76f
Bug fix
robertdstein Nov 18, 2022
b86d34f
Add ref img key
robertdstein Nov 18, 2022
35dc2d5
Bugfix typos
robertdstein Nov 18, 2022
0ed8ecf
Clean imports
robertdstein Nov 18, 2022
239e2fd
progress on imsubv2
robertdstein Nov 18, 2022
c2729a0
Fix typo
robertdstein Nov 18, 2022
f68e85c
Deprint
robertdstein Nov 18, 2022
40f77f5
Port summer test
robertdstein Nov 18, 2022
51293c5
Bugfix
robertdstein Nov 18, 2022
fa8a39d
Port wirc test
robertdstein Nov 18, 2022
3d175b7
Delete old code
robertdstein Nov 18, 2022
a271c68
Option to initialise without list
robertdstein Nov 18, 2022
fc395b3
Port test_errors
robertdstein Nov 18, 2022
9a503fc
Type hint calhunter
robertdstein Nov 18, 2022
3b3ba21
Rename Data to DataBlock class
robertdstein Nov 18, 2022
b3f8d19
add missing Image(DataBlock)
robertdstein Nov 18, 2022
7c42793
Wrapper up _load_raw_image()
robertdstein Nov 18, 2022
31cd052
Name ewrapper load_raw_data()
robertdstein Nov 18, 2022
85196ec
Dataset not DataSet
robertdstein Nov 18, 2022
b42f5e8
Get calhunter integrated
robertdstein Nov 18, 2022
73f6760
New dataset-compliant call of reduce images
robertdstein Nov 18, 2022
473d096
Add monitor test
robertdstein Nov 19, 2022
6fb36ec
Working reference.py
robertdstein Nov 19, 2022
65c5d57
Take pathlibs for open_fits
robertdstein Nov 19, 2022
6fd4a3a
Require dataset for reduce_images
robertdstein Nov 19, 2022
2dec63e
Deprotect protected variables
robertdstein Nov 19, 2022
d3399ce
dedebug
robertdstein Nov 19, 2022
90e6c57
Typehint imagehandler
robertdstein Nov 19, 2022
6ac6463
Path for zogy
robertdstein Nov 19, 2022
906e2a9
Update reference image naming
robertdstein Nov 21, 2022
94810ea
delete errant print
robertdstein Nov 21, 2022
d315f07
More text
robertdstein Nov 22, 2022
206ce6f
Use sci sextractor
robertdstein Nov 22, 2022
67d1083
Reference generator yields Path objects
robertdstein Nov 22, 2022
e45400c
Fix typo
robertdstein Nov 22, 2022
fe8fc65
Make imsub more readable
robertdstein Nov 22, 2022
ad23647
stop
robertdstein Nov 22, 2022
7f6d419
Update
robertdstein Nov 22, 2022
cac2f21
delete redundant imports
robertdstein Nov 23, 2022
9ecd59a
Fix memory loading
robertdstein Nov 23, 2022
d696e40
New tests version
robertdstein Nov 23, 2022
e9d9635
print vaks
robertdstein Nov 23, 2022
7dec8fe
more logging for debugging
virajkaram Nov 23, 2022
1cca253
add date to sci sextractor
virajkaram Nov 23, 2022
68969c1
Deverboesify
robertdstein Nov 23, 2022
1920e55
normal logging
robertdstein Nov 23, 2022
b16aa3b
Merge branch 'data' of github.com:winter-telescope/winterdrp into data
virajkaram Nov 23, 2022
639e1d0
Remove logging
robertdstein Nov 23, 2022
21a37b4
Update main
robertdstein Nov 23, 2022
382fece
Add ImageBatch for no caks
robertdstein Nov 23, 2022
ce37382
Add ImageBatch for no cals
robertdstein Nov 23, 2022
e012771
Fix logger statements
robertdstein Nov 23, 2022
0293b03
Fix typo
robertdstein Nov 23, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 20 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,21 @@
winterdrp/processors/astromatic/config/sex.config
.env
.env
__pycache__/
*.pyc
.DS_Store
.idea/

# Distribution / packaging
bin/
build/
develop-eggs/
dist/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
8 changes: 4 additions & 4 deletions tests/test_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
from winterdrp.pipelines import get_pipeline
from winterdrp.errors import ImageNotFoundError
from datetime import datetime
from winterdrp.data import ImageBatch, Dataset

logger = logging.getLogger(__name__)

Expand All @@ -14,7 +14,7 @@

expected_error = {
'processor_name': 'winterdrp.processors.utils.image_loader',
'contents': [''],
'contents': [],
'known_error_bool': True,
'non_critical_bool': False
}
Expand All @@ -28,11 +28,11 @@ def setUp(self):
def test_pipeline(self):
self.logger.info("\n\n Testing summer pipeline \n\n")

res, errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=True)
res, errorstack = pipeline.reduce_images(Dataset(ImageBatch()), catch_all_errors=True)

errorstack.summarise_error_stack(verbose=True)

self.assertEqual(len(errorstack.failed_images), 1)
self.assertEqual(len(errorstack.failed_images), 0)
self.assertEqual(len(errorstack.noncritical_reports), 0)
self.assertEqual(len(errorstack.reports), 1)

Expand Down
2 changes: 0 additions & 2 deletions tests/test_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

logger = logging.getLogger(__name__)

logging.getLogger("winterdrp").setLevel("DEBUG")

summer_cal_requirements = [
CalRequirement(target_name="bias", required_field="EXPTIME", required_values=["0.0"]),
CalRequirement(target_name="flat", required_field="FILTERID", required_values=["r"]),
Expand Down
33 changes: 4 additions & 29 deletions tests/test_summer_imsub_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,10 @@
import unittest
import logging
from winterdrp.processors.utils.image_loader import ImageLoader
from winterdrp.processors.utils.image_selector import ImageSelector
from winterdrp.paths import base_name_key
from winterdrp.pipelines.summer.summer_pipeline import SummerPipeline
from winterdrp.downloader.get_test_data import get_test_data_dir
from winterdrp.pipelines.summer.blocks import subtract
from winterdrp.pipelines.summer.load_summer_image import load_proc_summer_image
from winterdrp.data import Dataset, ImageBatch

logger = logging.getLogger(__name__)


expected_values = {
'SCORSTD': 1.120988782614284,
'SCORMED': 0.0010565268947477073,
Expand All @@ -28,11 +22,11 @@ def setUp(self):
def test_pipeline(self):
self.logger.info("\n\n Testing summer pipeline \n\n")

res, errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=False)
res, errorstack = pipeline.reduce_images(Dataset([ImageBatch()]), catch_all_errors=False)

self.assertEqual(len(res[0][0]), 1)
self.assertEqual(len(res), 1)

header = res[0][1][0]
header = res[0][0].get_header()

for key, value in expected_values.items():
if isinstance(value, float):
Expand All @@ -42,22 +36,3 @@ def test_pipeline(self):
else:
raise TypeError(f"Type for value ({type(value)} is neither float not int.")


if __name__ == "__main__":

print("Calculating latest scorr metrics dictionary")

# Code to generate updated ZP dict of the results change

new_res, new_errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=False)

new_header = new_res[0][1][0]

new_exp = "expected_values = { \n"
for header_key in new_header.keys():
if "SCOR" in header_key:
new_exp += f' "{header_key}": {new_header[header_key]}, \n'
new_exp += "}"
print(new_exp)


27 changes: 4 additions & 23 deletions tests/test_summer_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import unittest
import logging
from winterdrp.pipelines import get_pipeline
from winterdrp.data import Dataset, ImageBatch

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -45,11 +46,11 @@ def setUp(self):
def test_pipeline(self):
self.logger.info("\n\n Testing summer pipeline \n\n")

res, errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=False)
res, errorstack = pipeline.reduce_images(Dataset([ImageBatch()]), catch_all_errors=False)

self.assertEqual(len(res[0][0]), 1)
self.assertEqual(len(res[0]), 1)

header = res[0][1][0]
header = res[0][0].get_header()

for key, value in expected_zp.items():
if isinstance(value, float):
Expand All @@ -58,23 +59,3 @@ def test_pipeline(self):
self.assertEqual(value, header[key])
else:
raise TypeError(f"Type for value ({type(value)} is neither float not int.")


if __name__ == "__main__":

print("Calculating latest ZP dictionary")

# Code to generate updated ZP dict of the results change

new_res, new_errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=False)

new_header = new_res[0][1][0]

new_exp = "expected_zp = { \n"
for header_key in new_header.keys():
if "ZP_" in header_key:
new_exp += f' "{header_key}": {new_header[header_key]}, \n'
new_exp += "}"
print(new_exp)


15 changes: 7 additions & 8 deletions tests/test_wirc_imsub_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import unittest
from winterdrp.processors.utils.image_loader import ImageLoader
from winterdrp.processors.utils.image_selector import ImageSelector, ImageBatcher, ImageDebatcher
import logging
import os
from winterdrp.downloader.get_test_data import get_test_data_dir
from winterdrp.paths import base_name_key
from winterdrp.pipelines.wirc.load_wirc_image import load_raw_wirc_image
from winterdrp.pipelines.wirc.wirc_pipeline import WircPipeline
from astropy.io import fits
Expand All @@ -13,13 +11,14 @@
from winterdrp.pipelines.wirc.blocks import subtract
from winterdrp.pipelines.wirc.generator import wirc_reference_image_resampler, wirc_reference_sextractor, \
wirc_reference_psfex
from winterdrp.data import Dataset, ImageBatch


logger = logging.getLogger(__name__)

test_data_dir = get_test_data_dir()

ref_img_directory = os.path.join(test_data_dir, 'wirc/ref')
print(ref_img_directory)


def test_reference_image_generator(
Expand Down Expand Up @@ -49,8 +48,8 @@ def test_reference_image_generator(
),
Reference(
ref_image_generator=test_reference_image_generator,
ref_swarp_resampler=wirc_reference_image_resampler,
ref_sextractor=wirc_reference_sextractor,
swarp_resampler=wirc_reference_image_resampler,
sextractor=wirc_reference_sextractor,
ref_psfex=wirc_reference_psfex
),
] + subtract
Expand All @@ -68,11 +67,11 @@ def setUp(self):
def test_pipeline(self):
self.logger.info("\n\n Testing wirc imsub pipeline \n\n")

res, errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=False)
res, errorstack = pipeline.reduce_images(dataset=Dataset(ImageBatch()), catch_all_errors=False)

self.assertEqual(len(res), 1)

header = res[0][1][0]
header = res[0][0].get_header()

for key, value in expected_values.items():
if isinstance(value, float):
Expand All @@ -89,7 +88,7 @@ def test_pipeline(self):

# Code to generate updated ZP dict of the results change

new_res, new_errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=False)
new_res, new_errorstack = pipeline.reduce_images(catch_all_errors=False)

new_header = new_res[0][1][0]

Expand Down
23 changes: 3 additions & 20 deletions tests/test_wirc_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from winterdrp.pipelines.wirc.generator import wirc_astrometric_catalog_generator, wirc_photometric_catalog_generator
from winterdrp.processors.csvlog import CSVLog
from winterdrp.downloader.get_test_data import get_test_data_dir
from winterdrp.data import Dataset, ImageBatch

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -101,11 +102,11 @@ def setUp(self):
def test_pipeline(self):
self.logger.info("\n\n Testing wirc pipeline \n\n")

res, errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=False)
res, errorstack = pipeline.reduce_images(Dataset([ImageBatch()]), catch_all_errors=False)

self.assertEqual(len(res), 1)

header = res[0][1][0]
header = res[0][0].get_header()

for key, value in expected_zp.items():
if isinstance(value, float):
Expand All @@ -115,21 +116,3 @@ def test_pipeline(self):
else:
raise TypeError(f"Type for value ({type(value)} is neither float not int.")


if __name__ == "__main__":

print("Calculating latest ZP dictionary")

# Code to generate updated ZP dict of the results change

new_res, new_errorstack = pipeline.reduce_images([[[], []]], catch_all_errors=False)

new_header = new_res[0][1][0]

new_exp = "expected_zp = { \n"
for header_key in new_header.keys():
if "ZP_" in header_key:
new_exp += f' "{header_key}": {new_header[header_key]}, \n'
new_exp += "}"
print(new_exp)

6 changes: 3 additions & 3 deletions winterdrp/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
from astropy.time import Time
from astropy import units as u
from winterdrp.monitor.base_monitor import Monitor
from winterdrp.paths import base_raw_dir
from datetime import datetime
from winterdrp.data import Dataset, ImageBatch
from winterdrp.processors.utils import ImageLoader

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -167,7 +167,7 @@
night=night,
)

batches, errorstack = pipe.reduce_images([[[], []]], catch_all_errors=True)
batches, errorstack = pipe.reduce_images(Dataset([ImageBatch()]), catch_all_errors=True)
if args.postprocessconfig is not None:
post_config = [x for x in pipe.set_configuration(config) if isinstance(x, ImageLoader)][:1]
post_config += pipe.postprocess_configuration(
Expand All @@ -181,7 +181,7 @@
pipe.set_configuration(protected_key)

_, new_errorstack = pipe.reduce_images(
batches=[[[], []]],
Dataset([ImageBatch()]),
selected_configurations=protected_key,
catch_all_errors=True
)
Expand Down
3 changes: 3 additions & 0 deletions winterdrp/data/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from winterdrp.data.base_data import DataBlock, DataBatch, Dataset
from winterdrp.data.image_data import Image, ImageBatch
from winterdrp.data.source_data import SourceTable, SourceBatch
90 changes: 90 additions & 0 deletions winterdrp/data/base_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@

class DataBlock:

def __getitem__(self, item):
raise NotImplementedError

def __setitem__(self, key, value):
raise NotImplementedError


class DataBatch:

def __init__(self, batch: list[DataBlock] | DataBlock = None):

if batch is None:
batch = []
elif isinstance(batch, DataBlock):
batch = [batch]

self._batch = batch

def get_batch(self) -> list[DataBlock]:
raise NotImplementedError

def append(self, item):
self._batch.append(item)

def __getitem__(self, item):
return self._batch.__getitem__(item)

def __setitem__(self, key, value):
return self._batch.__setitem__(key, value)

def __add__(self, other):
return DataBatch(self._batch + other.get_batch())

def __iadd__(self, other):
self._batch += other.get_batch()
return self

def __len__(self):
return self._batch.__len__()

def __iter__(self):
return self._batch.__iter__()


class Dataset:

def __init__(
self,
batches: list[DataBatch] | DataBatch = None
):

if batches is None:
batches = []
elif isinstance(batches, DataBatch):
batches = [batches]

self._batches = batches

def get_batches(self):
return self._batches

def append(self, batch: DataBatch):

if len(self._batches) > 0:
assert type(self._batches[0]) == type(batch)

self._batches.append(batch)

def __getitem__(self, item):
return self._batches.__getitem__(item)

def __setitem__(self, key, value):
return self._batches.__setitem__(key, value)

def __len__(self):
return self._batches.__len__()

def __add__(self, other):
return DataBatch(self._batches + other.get_batches())

def __iadd__(self, other):
self._batches += other.get_batches()
return self

def __iter__(self):
return self._batches.__iter__()

Loading