From 5871e471ca3d43cacd9f329fa5403aef4ce6121d Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Wed, 7 Dec 2022 20:08:03 -0800 Subject: [PATCH 01/12] Typehint error report --- poetry.lock | 67 ++++++++++++++++++++------------ winterdrp/errors/error_report.py | 5 ++- 2 files changed, 47 insertions(+), 25 deletions(-) diff --git a/poetry.lock b/poetry.lock index 819103954..435872f2d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -266,7 +266,7 @@ dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0 [[package]] name = "certifi" -version = "2022.9.24" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -321,6 +321,20 @@ category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +[[package]] +name = "comm" +version = "0.1.1" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +traitlets = ">5.3" + +[package.extras] +test = ["pytest"] + [[package]] name = "confluent-kafka" version = "1.9.2" @@ -678,7 +692,7 @@ python-versions = "*" [[package]] name = "ipykernel" -version = "6.17.1" +version = "6.19.0" description = "IPython Kernel for Jupyter" category = "main" optional = false @@ -686,6 +700,7 @@ python-versions = ">=3.8" [package.dependencies] appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" debugpy = ">=1.0" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" @@ -695,11 +710,14 @@ packaging = "*" psutil = "*" pyzmq = ">=17" tornado = ">=6.1" -traitlets = ">=5.1.0" +traitlets = ">=5.4.0" [package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-cov", "pytest-timeout"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +typing = ["mypy (>=0.990)"] [[package]] name = "ipython" @@ -746,7 +764,7 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "8.0.2" +version = "8.0.3" description = "Jupyter interactive widgets" category = "main" optional = false @@ -1023,7 +1041,7 @@ python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "3.0.3" +version = "3.0.4" description = "Jupyter interactive widgets for JupyterLab" category = "main" optional = false @@ -1311,14 +1329,11 @@ python-versions = ">=3.8" [[package]] name = "packaging" -version = "21.3" +version = "22.0" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" [[package]] name = "pandas" @@ -2309,7 +2324,7 @@ python-versions = "*" [[package]] name = "widgetsnbextension" -version = "4.0.3" +version = "4.0.4" description = "Jupyter interactive widgets for Jupyter Notebook" category = "main" optional = false @@ -2479,8 +2494,8 @@ bleach = [ {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, ] certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] cffi = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, @@ -2564,6 +2579,10 @@ colorama = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +comm = [ + {file = "comm-0.1.1-py3-none-any.whl", hash = "sha256:788a4ec961956c1cb2b0ba3c21f2458ff5757bb2f552032b140787af88d670a3"}, + {file = "comm-0.1.1.tar.gz", hash = "sha256:f395ea64f4f261f35ffc2fbf80a62ec071375dac48cd3ea56092711e74dd063e"}, +] confluent-kafka = [ {file = "confluent-kafka-1.9.2.tar.gz", hash = "sha256:2fb97bd25d436bd59fe079885aa77a3a2f23cface9c6359d4700053665849262"}, {file = "confluent_kafka-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff08b9f978f8b37f2961614a68f9fdb4fabd10cdd940234e80200806d93a1c30"}, @@ -2934,8 +2953,8 @@ iniconfig = [ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] ipykernel = [ - {file = "ipykernel-6.17.1-py3-none-any.whl", hash = "sha256:3a9a1b2ad6dbbd5879855aabb4557f08e63fa2208bffed897f03070e2bb436f6"}, - {file = "ipykernel-6.17.1.tar.gz", hash = "sha256:e178c1788399f93a459c241fe07c3b810771c607b1fb064a99d2c5d40c90c5d4"}, + {file = "ipykernel-6.19.0-py3-none-any.whl", hash = "sha256:851aa3f9cbbec6918136ada733069a6709934b8106a743495070cf46917eb9a9"}, + {file = "ipykernel-6.19.0.tar.gz", hash = "sha256:7aabde9e201c4a8f43000f4be0d057f91df13b906ea646acd9047fcb85600b9b"}, ] ipython = [ {file = "ipython-8.7.0-py3-none-any.whl", hash = "sha256:352042ddcb019f7c04e48171b4dd78e4c4bb67bf97030d170e154aac42b656d9"}, @@ -2946,8 +2965,8 @@ ipython-genutils = [ {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] ipywidgets = [ - {file = "ipywidgets-8.0.2-py3-none-any.whl", hash = "sha256:1dc3dd4ee19ded045ea7c86eb273033d238d8e43f9e7872c52d092683f263891"}, - {file = "ipywidgets-8.0.2.tar.gz", hash = "sha256:08cb75c6e0a96836147cbfdc55580ae04d13e05d26ffbc377b4e1c68baa28b1f"}, + {file = "ipywidgets-8.0.3-py3-none-any.whl", hash = "sha256:db7dd35fb1217636cbdbe0ba0bd2216d91a7695cb28b5c1dca17e62cd51378de"}, + {file = "ipywidgets-8.0.3.tar.gz", hash = "sha256:2ec50df8538a1d4ddd5d454830d010922ad1015e81ac23efb27c0908bbc1eece"}, ] isoduration = [ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, @@ -3015,8 +3034,8 @@ jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] jupyterlab-widgets = [ - {file = "jupyterlab_widgets-3.0.3-py3-none-any.whl", hash = "sha256:6aa1bc0045470d54d76b9c0b7609a8f8f0087573bae25700a370c11f82cb38c8"}, - {file = "jupyterlab_widgets-3.0.3.tar.gz", hash = "sha256:c767181399b4ca8b647befe2d913b1260f51bf9d8ef9b7a14632d4c1a7b536bd"}, + {file = "jupyterlab_widgets-3.0.4-py3-none-any.whl", hash = "sha256:4c9275daa6d20fc96c3aea45756ece7110850d035b0b93a6a40e918016b927da"}, + {file = "jupyterlab_widgets-3.0.4.tar.gz", hash = "sha256:9a568e022b8bb53ab23291f6ddb52f8002b789c2c5763378cbc882be1d619be8"}, ] keyring = [ {file = "keyring-23.11.0-py3-none-any.whl", hash = "sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e"}, @@ -3281,8 +3300,8 @@ numpy = [ {file = "numpy-1.23.5.tar.gz", hash = "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a"}, ] packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, + {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, ] pandas = [ {file = "pandas-1.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9dbacd22555c2d47f262ef96bb4e30880e5956169741400af8b306bbb24a273"}, @@ -4086,8 +4105,8 @@ wget = [ {file = "wget-3.2.zip", hash = "sha256:35e630eca2aa50ce998b9b1a127bb26b30dfee573702782aa982f875e3f16061"}, ] widgetsnbextension = [ - {file = "widgetsnbextension-4.0.3-py3-none-any.whl", hash = "sha256:7f3b0de8fda692d31ef03743b598620e31c2668b835edbd3962d080ccecf31eb"}, - {file = "widgetsnbextension-4.0.3.tar.gz", hash = "sha256:34824864c062b0b3030ad78210db5ae6a3960dfb61d5b27562d6631774de0286"}, + {file = "widgetsnbextension-4.0.4-py3-none-any.whl", hash = "sha256:fa0e840719ec95dd2ec85c3a48913f1a0c29d323eacbcdb0b29bfed0cc6da678"}, + {file = "widgetsnbextension-4.0.4.tar.gz", hash = "sha256:44c69f18237af0f610557d6c1c7ef76853f5856a0e604c0a517f2320566bb775"}, ] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, diff --git a/winterdrp/errors/error_report.py b/winterdrp/errors/error_report.py index 2bc1c4193..8562c9be8 100644 --- a/winterdrp/errors/error_report.py +++ b/winterdrp/errors/error_report.py @@ -7,6 +7,7 @@ import logging import traceback from datetime import datetime +from pathlib import Path from winterdrp.errors.exceptions import BaseProcessorError, NoncriticalProcessingError @@ -18,7 +19,9 @@ class ErrorReport: Class representing a single error raised during processing """ - def __init__(self, error, processor_name, contents: list[str]): + def __init__( + self, error: Exception, processor_name: str, contents: list[str] | list[Path] + ): self.error = error self.processor_name = processor_name self.contents = contents From 5062dc128a90837f61159af7c242f87fc5dedd30 Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Wed, 7 Dec 2022 20:08:44 -0800 Subject: [PATCH 02/12] Add linebreak --- winterdrp/data/image_data.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/winterdrp/data/image_data.py b/winterdrp/data/image_data.py index 6ad6fae24..46e68e811 100644 --- a/winterdrp/data/image_data.py +++ b/winterdrp/data/image_data.py @@ -1,6 +1,7 @@ """ -Module to specify the input data classes for :class:`winterdrp.processors.base_processor.ImageHandler` +Module to specify the input data classes for +:class:`winterdrp.processors.base_processor.ImageHandler` """ import logging From b63f27b26a649bb0a26e421aed094f9f1cfdd24d Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Wed, 7 Dec 2022 20:34:07 -0800 Subject: [PATCH 03/12] lintify BaseProcessor --- winterdrp/processors/base_processor.py | 129 ++++++++++++++++++------- 1 file changed, 94 insertions(+), 35 deletions(-) diff --git a/winterdrp/processors/base_processor.py b/winterdrp/processors/base_processor.py index 71487d3f0..e0cb2b350 100644 --- a/winterdrp/processors/base_processor.py +++ b/winterdrp/processors/base_processor.py @@ -1,4 +1,6 @@ -import copy +""" +Module containing the :class:`~wintedrp.processors.BaseProcessor` +""" import datetime import getpass import hashlib @@ -10,7 +12,6 @@ from queue import Queue from threading import Thread -import astropy.io.fits import numpy as np from winterdrp.data import DataBatch, Dataset, Image, ImageBatch, SourceBatch @@ -38,34 +39,38 @@ class PrerequisiteError(ProcessorError): - pass + """ + An error raised if a processor requires another one as a prerequisite, + but that processor is not present + """ class NoCandidatesError(ProcessorError): - pass + """ + An error raised if a :class:`~wintedrp.processors.CandidateGenerator` produces + no candidates + """ -class BaseDPU: - def base_apply(self, dataset: Dataset) -> tuple[Dataset, ErrorStack]: - raise NotImplementedError() - - def generate_error_report( - self, exception: Exception, batch: DataBatch - ) -> ErrorReport: - return ErrorReport(exception, self.__module__, batch.get_raw_image_names()) - +class BaseProcessor: + """ + Base processor class, to be inherited from for all processors + """ -class BaseProcessor(BaseDPU): @property def base_key(self): + """ + Unique key for the processor, to be used e.g in processing history tracking + + :return: None + """ raise NotImplementedError max_n_cpu: int = max_n_cpu subclasses = {} - def __init__(self, *args, **kwargs): - + def __init__(self): self.night = None self.night_sub_dir = None self.preceding_steps = None @@ -77,14 +82,30 @@ def __init_subclass__(cls, **kwargs): cls.subclasses[cls.base_key] = cls def set_preceding_steps(self, previous_steps: list): + """ + Provides processor with the list of preceding processors, and saves this + + :param previous_steps: list of processors + :return: None + """ self.preceding_steps = previous_steps def set_night(self, night_sub_dir: str | int = ""): + """ + Sets the night subdirectory for the processor to read/write data + + :param night_sub_dir: String/int subdirectory for night + :return: None + """ self.night_sub_dir = night_sub_dir self.night = night_sub_dir.split("/")[-1] - @staticmethod - def update_dataset(dataset: Dataset) -> Dataset: + def generate_error_report( + self, exception: Exception, batch: DataBatch + ) -> ErrorReport: + return ErrorReport(exception, self.__module__, batch.get_raw_image_names()) + + def update_dataset(self, dataset: Dataset) -> Dataset: return dataset def check_prerequisites( @@ -93,9 +114,20 @@ def check_prerequisites( pass def clean_cache(self): + """ + Function to clean the internal cache filled by base_apply + + :return: None + """ self.passed_dataset = self.err_stack = None def base_apply(self, dataset: Dataset) -> tuple[Dataset, ErrorStack]: + """ + Core function to act on a dataset, and return an updated dataset + + :param dataset: Input dataset + :return: Updated dataset, and any caught errors + """ self.passed_dataset = Dataset() self.err_stack = ErrorStack() @@ -108,7 +140,7 @@ def base_apply(self, dataset: Dataset) -> tuple[Dataset, ErrorStack]: workers = [] - for i in range(n_cpu): + for _ in range(n_cpu): # Set up a worker thread to process database load worker = Thread(target=self.apply_to_batch, args=(watchdog_queue,)) worker.daemon = True @@ -116,7 +148,7 @@ def base_apply(self, dataset: Dataset) -> tuple[Dataset, ErrorStack]: workers.append(worker) - for i, batch in enumerate(dataset): + for batch in dataset: watchdog_queue.put(item=batch) watchdog_queue.join() @@ -128,35 +160,64 @@ def base_apply(self, dataset: Dataset) -> tuple[Dataset, ErrorStack]: return dataset, err_stack - def apply_to_batch(self, q): + def apply_to_batch(self, queue): + """ + Function to run self.apply on a batch in the queue, catch any errors, and then + update the internal cache with the results. + + :param queue: python threading queue + :return: None + """ while True: - batch = q.get() + batch = queue.get() try: batch = self.apply(batch) self.passed_dataset.append(batch) - except NoncriticalProcessingError as e: - err = self.generate_error_report(e, batch) + except NoncriticalProcessingError as exc: + err = self.generate_error_report(exc, batch) logger.error(err.generate_log_message()) self.err_stack.add_report(err) self.passed_dataset.append(batch) - except Exception as e: - err = self.generate_error_report(e, batch) + except Exception as exc: # pylint: disable=broad-except + err = self.generate_error_report(exc, batch) logger.error(err.generate_log_message()) self.err_stack.add_report(err) - q.task_done() + queue.task_done() def apply(self, batch: DataBatch): + """ + Function applying the processor to a + :class:`~winterdrp.data.base_data.DataBatch`. + Also updates the processing history. + + :param batch: input data batch + :return: updated data batch + """ batch = self._apply(batch) batch = self._update_processing_history(batch) return batch - def _apply(self, batch: DataBatch): + def _apply(self, batch: DataBatch) -> DataBatch: + """ + Core function to update the :class:`~winterdrp.data.base_data.DataBatch` + + :param batch: Input data batch + :return: updated data batch + """ raise NotImplementedError def _update_processing_history( self, batch: DataBatch, ) -> DataBatch: + """ + Function to update the processing history of each + :class:`~winterdrp.data.base_data.DataBlock` object in a + :class:`~winterdrp.data.base_data.DataBatch`. + + :param batch: Input data batch + :return: Updated data batch + """ for i, data_block in enumerate(batch): data_block[proc_history_key] += self.base_key + "," data_block["REDUCER"] = getpass.getuser() @@ -227,11 +288,11 @@ def _apply_to_images( class ProcessorWithCache(BaseImageProcessor, ABC): def __init__( self, + *args, try_load_cache: bool = True, write_to_cache: bool = True, overwrite: bool = True, cache_sub_dir: str = cal_output_sub_dir, - *args, **kwargs, ): super().__init__(*args, **kwargs) @@ -272,13 +333,11 @@ def get_cache_file(self, images: ImageBatch) -> Image: logger.info(f"Loading cached file {path}") return self.open_fits(path) - else: - - image = self.make_image(images) + image = self.make_image(images) - if self.write_to_cache: - if np.sum([not exists, self.overwrite]) > 0: - self.save_fits(image, path) + if self.write_to_cache: + if np.sum([not exists, self.overwrite]) > 0: + self.save_fits(image, path) return image From 61e447dc56f2f70e86eb08f41763394aa9974fce Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Wed, 7 Dec 2022 20:44:49 -0800 Subject: [PATCH 04/12] linitify send_email --- winterdrp/utils/send_email.py | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/winterdrp/utils/send_email.py b/winterdrp/utils/send_email.py index 7a1a6fb05..04899e103 100644 --- a/winterdrp/utils/send_email.py +++ b/winterdrp/utils/send_email.py @@ -1,3 +1,6 @@ +""" +Module containing gmail integration functions +""" import getpass import gzip import logging @@ -13,7 +16,7 @@ logger = logging.getLogger(__name__) -port = 465 # For SSL +GMAIL_PORT = 465 # For SSL def send_gmail( @@ -23,8 +26,21 @@ def send_gmail( email_sender: str = os.getenv("WATCHDOG_EMAIL"), email_password: str = os.getenv("WATCHDOG_EMAIL_PASSWORD"), attachments: str | list[str] = None, - autocompress: bool = True, + auto_compress: bool = True, ): + """ + Function to send an email to a list of recipients from a gmail account. + + :param email_recipients: recipients for email + :param email_subject: subject for the email + :param email_text: Text to send + :param email_sender: Gmail to send from + :param email_password: Password for sender gmail account + :param attachments: Any files to attach + :param auto_compress: Boolean to compress large attachments before sending + :return: + """ + # pylint: disable=too-many-arguments # Create a text/plain message msg = MIMEMultipart() @@ -33,8 +49,6 @@ def send_gmail( if not isinstance(email_recipients, list): email_recipients = [email_recipients] - # # me == the sender's email address - # # you == the recipient's email address msg["Subject"] = email_subject msg["From"] = email_sender msg["To"] = ", ".join(email_recipients) @@ -54,17 +68,18 @@ def send_gmail( if not isinstance(file_path, Path): file_path = Path(file_path) - with open(file_path, "rb") as f: + with open(file_path, "rb") as attachment: if np.logical_and( - autocompress, file_path.stat().st_size > (1024 * 1024) + auto_compress, file_path.stat().st_size > (1024 * 1024) ): - data = gzip.compress(f.read()) + data = gzip.compress(attachment.read()) base_name += ".gzip" else: - data = f.read() + data = attachment.read() part = MIMEApplication(data, Name=base_name) + # After the file is closed part["Content-Disposition"] = f"attachment; filename={base_name}" msg.attach(part) @@ -80,6 +95,6 @@ def send_gmail( # Create a secure SSL context context = ssl.create_default_context() - with smtplib.SMTP_SSL("smtp.gmail.com", port, context=context) as server: + with smtplib.SMTP_SSL("smtp.gmail.com", GMAIL_PORT, context=context) as server: server.login(email_sender, email_password) server.send_message(msg) From dc48276f555fe80dc4ac400d7d5fc0676b7e5d96 Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Wed, 7 Dec 2022 21:11:31 -0800 Subject: [PATCH 05/12] lintify image selector --- winterdrp/processors/utils/image_selector.py | 77 ++++++++++++++------ 1 file changed, 56 insertions(+), 21 deletions(-) diff --git a/winterdrp/processors/utils/image_selector.py b/winterdrp/processors/utils/image_selector.py index b1b2f2d4c..85ca8433f 100644 --- a/winterdrp/processors/utils/image_selector.py +++ b/winterdrp/processors/utils/image_selector.py @@ -1,9 +1,9 @@ +""" +Module containing processors and functions to select a subset of images from a batch +""" import logging -import astropy.io.fits -import numpy as np - -from winterdrp.data import DataBatch, Dataset, Image, ImageBatch +from winterdrp.data import Dataset, ImageBatch from winterdrp.errors import ProcessorError from winterdrp.processors.base_processor import BaseImageProcessor, CleanupProcessor @@ -11,7 +11,9 @@ class ParsingError(KeyError, ProcessorError): - pass + """ + Exception arising due to errors in parsing Image headers + """ def select_from_images( @@ -19,6 +21,15 @@ def select_from_images( key: str = "target", target_values: str | list[str] = "science", ) -> ImageBatch: + """ + Returns a subset of images in a batch with have values of equal to + a value in + + :param batch: image batch to sort + :param key: header key to filter on + :param target_values: accepted value(s) for key + :return: image batch containing the subset of images which pass + """ # Enforce string in list for later matching if not isinstance(target_values, list): @@ -28,32 +39,35 @@ def select_from_images( new_batch = ImageBatch() - for i, image in enumerate(batch): + for image in batch: try: if str(image[key]) in target_values: new_batch.append(image) - except KeyError as e: - logger.error(e) - raise ParsingError(e) + except KeyError as exc: + logger.error(exc) + raise ParsingError(exc) from exc return new_batch class ImageSelector(BaseImageProcessor, CleanupProcessor): + """ + Processor to only select a subset of images from a batch + """ base_key = "select" - def __init__(self, *args: tuple[str, str | list[str]], **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *args: tuple[str, str | list[str]]): + super().__init__() self.targets = args def __str__(self): reqs = [] - for x in self.targets: - if isinstance(x[1], list): - reqs.append(f"{x[0]} = {' or '.join(x[1])}") + for target in self.targets: + if isinstance(target[1], list): + reqs.append(f"{target[0]} = {' or '.join(target[1])}") else: - reqs.append(f"{x[0]} = {x[1]}") + reqs.append(f"{target[0]} = {target[1]}") return f"Processor to select images where {', and '.join(reqs)}" @@ -74,13 +88,23 @@ def _apply_to_images( def split_images_into_batches( images: ImageBatch, split_key: str | list[str] ) -> Dataset: + """ + Function to split a single :class:`~winterdrp.data.image_data.ImageBatch` object + into multiple :class:`~winterdrp.data.base_data.DataBatch` objects. + Each new batch will have the same value of . + Returns a dataset containing the new batches + + :param images: Image batch to split + :param split_key: Key to split batch + :return: Dataset containing new image batches + """ if isinstance(split_key, str): split_key = [split_key] - groups = dict() + groups = {} - for i, image in enumerate(images): + for image in images: uid = [] for key in split_key: @@ -88,7 +112,7 @@ def split_images_into_batches( uid = "_".join(uid) - if uid not in groups.keys(): + if uid not in groups: groups[uid] = [image] else: groups[uid] += [image] @@ -99,11 +123,15 @@ def split_images_into_batches( class ImageBatcher(BaseImageProcessor): + """ + Module to split :class:`~winterdrp.data.image_data.ImageBatch` object + into multiple :class:`~winterdrp.data.base_data.DataBatch` objects. + """ base_key = "batch" - def __init__(self, split_key: str | list[str], *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, split_key: str | list[str]): + super().__init__() self.split_key = split_key def __str__(self) -> str: @@ -113,7 +141,10 @@ def __str__(self) -> str: else: split = [self.split_key] - return f"Groups images into batches, with each batch having the same value of {' and '.join(split)}" + return ( + f"Groups images into batches, with each batch having " + f"the same value of {' and '.join(split)}" + ) def _apply_to_images( self, @@ -133,6 +164,10 @@ def update_dataset(self, dataset: Dataset) -> Dataset: class ImageDebatcher(BaseImageProcessor): + """ + Processor to group all incoming :class:`~winterdrp.data.image_data.ImageBatch` + objects into a single batch. + """ base_key = "debatch" From df995b6a77d8db0241f05fb94a67c8d10a6a20af Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Wed, 7 Dec 2022 21:36:54 -0800 Subject: [PATCH 06/12] lintify sextractor --- .../astromatic/sextractor/sextractor.py | 132 ++++++++---------- 1 file changed, 56 insertions(+), 76 deletions(-) diff --git a/winterdrp/processors/astromatic/sextractor/sextractor.py b/winterdrp/processors/astromatic/sextractor/sextractor.py index d42bcdf0b..55a3dc459 100644 --- a/winterdrp/processors/astromatic/sextractor/sextractor.py +++ b/winterdrp/processors/astromatic/sextractor/sextractor.py @@ -1,9 +1,13 @@ +""" +Module to run +:func:`~winterdrp.processors.astromatic.sextractor.sourceextractor.run_sextractor_single + as a processor. +""" + import logging import os import shutil - -import astropy.io.fits -import numpy as np +from pathlib import Path from winterdrp.data import ImageBatch from winterdrp.paths import ( @@ -15,7 +19,6 @@ from winterdrp.processors.astromatic.sextractor.sourceextractor import ( default_saturation, parse_checkimage, - run_sextractor_dual, run_sextractor_single, ) from winterdrp.processors.base_processor import BaseImageProcessor @@ -24,9 +27,9 @@ logger = logging.getLogger(__name__) -sextractor_header_key = "SRCCAT" +SEXTRACTOR_HEADER_KEY = "SRCCAT" -sextractor_checkimg_keys = { +sextractor_checkimg_map = { "BACKGROUND": "BKGPT", "BACKGROUND_RMS": "BKGRMS", "MINIBACKGROUND": "MINIBKG", @@ -35,7 +38,12 @@ class Sextractor(BaseImageProcessor): + """ + Processor to run sextractor on images + """ + base_key = "sextractor" + # pylint: disable=too-many-instance-attributes def __init__( self, @@ -53,10 +61,9 @@ def __init__( cache: bool = False, mag_zp: float = None, write_regions_bool: bool = False, - *args, - **kwargs, ): - super(Sextractor, self).__init__(*args, **kwargs) + # pylint: disable=too-many-arguments + super().__init__() self.output_sub_dir = output_sub_dir self.config = config_path @@ -79,7 +86,12 @@ def __str__(self) -> str: f"and save detected sources to the '{self.output_sub_dir}' directory." ) - def get_sextractor_output_dir(self): + def get_sextractor_output_dir(self) -> str: + """ + Get the directory to output + + :return: output directory + """ return get_output_dir(self.output_sub_dir, self.night_sub_dir) def _apply_to_images(self, batch: ImageBatch) -> ImageBatch: @@ -93,32 +105,18 @@ def _apply_to_images(self, batch: ImageBatch) -> ImageBatch: for image in batch: - det_image, measure_image, det_header, measure_header = ( - None, - None, - None, - None, - ) - if self.gain is None: - if "GAIN" in image.keys(): - self.gain = image["GAIN"] - - # I think this is broken???? - # if self.dual: - # det_header = image[0] - # measure_header = image[1] - # det_image = image[0] - # measure_image = image[1] - # header = det_header - # data = det_image - # self.gain = measure_header["GAIN"] + if self.gain is None and "GAIN" in image.keys(): + self.gain = image["GAIN"] temp_path = get_temp_path(sextractor_out_dir, image[base_name_key]) + if not os.path.exists(temp_path): self.save_fits(image, temp_path) + temp_files = [temp_path] mask_path = None + if latest_mask_save_key in image.keys(): image_mask_path = os.path.join( sextractor_out_dir, image[latest_mask_save_key] @@ -129,13 +127,14 @@ def _apply_to_images(self, batch: ImageBatch) -> ImageBatch: if os.path.exists(image_mask_path): shutil.copyfile(image_mask_path, temp_mask_path) mask_path = temp_mask_path - temp_files.append(mask_path) + temp_files.append(Path(mask_path)) else: mask_path = None if mask_path is None: mask_path = self.save_mask(image, temp_path) - temp_files.append(mask_path) + temp_files.append(Path(mask_path)) + output_cat = os.path.join( sextractor_out_dir, image[base_name_key].replace(".fits", ".cat") ) @@ -146,43 +145,23 @@ def _apply_to_images(self, batch: ImageBatch) -> ImageBatch: image=os.path.join(sextractor_out_dir, image[base_name_key]), ) - if not self.dual: - output_cat, checkimage_name = run_sextractor_single( - img=temp_path, - config=self.config, - output_dir=sextractor_out_dir, - parameters_name=self.parameters_name, - filter_name=self.filter_name, - starnnw_name=self.starnnw_name, - saturation=self.saturation, - weight_image=mask_path, - verbose_type=self.verbose_type, - checkimage_name=self.checkimage_name, - checkimage_type=self.checkimage_type, - gain=self.gain, - catalog_name=output_cat, - ) - - if self.dual: - output_cat = run_sextractor_dual( - det_image=det_image, - measure_image=measure_image, - config=self.config, - output_dir=sextractor_out_dir, - parameters_name=self.parameters_name, - filter_name=self.filter_name, - starnnw_name=self.starnnw_name, - saturation=self.saturation, - weight_image=mask_path, - verbose_type=self.verbose_type, - checkimage_name=self.checkimage_name, - checkimage_type=self.checkimage_type, - gain=self.gain, - catalog_name=output_cat, - mag_zp=self.mag_zp, - ) + output_cat, checkimage_name = run_sextractor_single( + img=temp_path, + config=self.config, + output_dir=sextractor_out_dir, + parameters_name=self.parameters_name, + filter_name=self.filter_name, + starnnw_name=self.starnnw_name, + saturation=self.saturation, + weight_image=mask_path, + verbose_type=self.verbose_type, + checkimage_name=self.checkimage_name, + checkimage_type=self.checkimage_type, + gain=self.gain, + catalog_name=output_cat, + ) - logger.info(f"Cache save is {self.cache}") + logger.debug(f"Cache save is {self.cache}") if not self.cache: for temp_file in temp_files: os.remove(temp_file) @@ -190,25 +169,26 @@ def _apply_to_images(self, batch: ImageBatch) -> ImageBatch: if self.write_regions: output_catalog = get_table_from_ldac(output_cat) - x, y = output_catalog["X_IMAGE"], output_catalog["Y_IMAGE"] + + x_coords = output_catalog["X_IMAGE"] + y_coords = output_catalog["Y_IMAGE"] + regions_path = output_cat + ".reg" + write_regions_file( regions_path=regions_path, - x_coords=x, - y_coords=y, + x_coords=x_coords, + y_coords=y_coords, system="image", region_radius=5, ) - image[sextractor_header_key] = os.path.join(sextractor_out_dir, output_cat) + image[SEXTRACTOR_HEADER_KEY] = os.path.join(sextractor_out_dir, output_cat) if len(checkimage_name) > 0: if isinstance(self.checkimage_type, str): self.checkimage_type = [self.checkimage_type] - for ind in range(len(self.checkimage_type)): - checkimg_type = self.checkimage_type[ind] - image[sextractor_checkimg_keys[checkimg_type]] = checkimage_name[ - ind - ] + for i, checkimg_type in enumerate(self.checkimage_type): + image[sextractor_checkimg_map[checkimg_type]] = checkimage_name[i] return batch From 6b79c363ab8ae2b183ad3ad66126f094b3e98d51 Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Wed, 7 Dec 2022 22:31:54 -0800 Subject: [PATCH 07/12] remove bad arguments in blocks --- .pre-commit-config.yaml | 2 +- winterdrp/pipelines/summer/blocks.py | 20 ++++++++++++++------ winterdrp/pipelines/wirc/blocks.py | 10 ++++------ 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f56e782ad..d67478acc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: types: [python] args: [ - "--disable=C", + "--disable=C,fixme", "-rn", # Only display messages "-sn", # Don't display the score ] diff --git a/winterdrp/pipelines/summer/blocks.py b/winterdrp/pipelines/summer/blocks.py index 1672f421a..1bf7a6989 100644 --- a/winterdrp/pipelines/summer/blocks.py +++ b/winterdrp/pipelines/summer/blocks.py @@ -1,11 +1,12 @@ """ -Script containing the various :class:`~winterdrp.processors.base_processor.BaseProcessor` +Script containing the various +:class:`~winterdrp.processors.base_processor.BaseProcessor` lists which are used to build configurations for the :class:`~winterdrp.pipelines.summer.summer_pipeline.SummerPipeline`. """ from winterdrp.downloader.get_test_data import get_test_data_dir from winterdrp.paths import base_name_key, core_fields -from winterdrp.pipelines.summer.config import ( +from winterdrp.pipelines.summer.config import ( # summer_weight_path, DB_NAME, PIPELINE_NAME, SUMMER_PIXEL_SCALE, @@ -17,7 +18,6 @@ sextractor_photometry_config, summer_cal_requirements, summer_mask_path, - summer_weight_path, swarp_config_path, ) from winterdrp.pipelines.summer.config.schema import summer_schema_dir @@ -150,7 +150,8 @@ ImageSaver(output_dir_name="detrend", write_mask=True), Sextractor( output_sub_dir="sextractor", - weight_image=summer_weight_path, + # TODO: work out why this was ever here... + # weight_image=summer_weight_path, checkimage_name=None, checkimage_type=None, **sextractor_astrometry_config @@ -159,7 +160,11 @@ ref_catalog_generator=summer_astrometric_catalog_generator, scamp_config_path=scamp_path, ), - Swarp(swarp_config_path=swarp_config_path, imgpixsize=2400), + Swarp( + swarp_config_path=swarp_config_path, + # TODO: work out why this was ever here... + # imgpixsize=2400 + ), ImageSaver(output_dir_name="processed", write_mask=True), Sextractor( output_sub_dir="photprocess", @@ -168,7 +173,10 @@ ), PhotCalibrator(ref_catalog_generator=summer_photometric_catalog_generator), ImageSaver( - output_dir_name="processed", additional_headers=["PROCIMG"], write_mask=True + output_dir_name="processed", + # TODO: work out why this was ever here... + # additional_headers=["PROCIMG"], + write_mask=True, ), HeaderEditor(edit_keys="procflag", values=1), DatabaseImageExporter( diff --git a/winterdrp/pipelines/wirc/blocks.py b/winterdrp/pipelines/wirc/blocks.py index 3b999da30..1ba0dfd8d 100644 --- a/winterdrp/pipelines/wirc/blocks.py +++ b/winterdrp/pipelines/wirc/blocks.py @@ -1,7 +1,9 @@ +""" +Module containing standard processing blocks for WIRC +""" import os from winterdrp.catalog.kowalski import PS1, TMASS -from winterdrp.paths import base_name_key, core_fields from winterdrp.pipelines.wirc.generator import ( wirc_astrometric_catalog_generator, wirc_photometric_catalog_generator, @@ -25,7 +27,6 @@ from winterdrp.processors.alert_packets.avro_alert import AvroPacketMaker from winterdrp.processors.astromatic import Scamp, Sextractor, Swarp from winterdrp.processors.astromatic.psfex import PSFex -from winterdrp.processors.astromatic.sextractor.sextractor import Sextractor from winterdrp.processors.autoastrometry import AutoAstrometry from winterdrp.processors.candidates.candidate_detector import DetectCandidates from winterdrp.processors.candidates.namer import CandidateNamer @@ -34,17 +35,14 @@ from winterdrp.processors.dark import DarkCalibrator from winterdrp.processors.database.database_exporter import DatabaseDataframeExporter from winterdrp.processors.database.database_importer import DatabaseHistoryImporter -from winterdrp.processors.database.postgres import get_colnames_from_schema from winterdrp.processors.flat import SkyFlatCalibrator from winterdrp.processors.mask import MaskPixels from winterdrp.processors.photcal import PhotCalibrator from winterdrp.processors.photometry.aperture_photometry import AperturePhotometry from winterdrp.processors.photometry.psf_photometry import PSFPhotometry from winterdrp.processors.reference import Reference -from winterdrp.processors.send_to_fritz import SendToFritz from winterdrp.processors.sky import NightSkyMedianCalibrator from winterdrp.processors.utils import ImageLoader, ImageSaver -from winterdrp.processors.utils.image_loader import ImageLoader from winterdrp.processors.utils.image_selector import ( ImageBatcher, ImageDebatcher, @@ -124,7 +122,7 @@ history_duration_days=500, db_name="wirc", db_user=os.environ.get("DB_USER"), - db_pwd=os.environ.get("DB_PWD"), + db_password=os.environ.get("DB_PWD"), db_table="candidates", db_output_columns=candidate_colnames, schema_path=wirc_candidate_schema_path, From 3e41fba3b51132193f4773238e9e9c12db234daa Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Wed, 7 Dec 2022 23:05:26 -0800 Subject: [PATCH 08/12] Lintify --- winterdrp/pipelines/summer/generator.py | 29 +++++---- .../processors/astromatic/scamp/scamp.py | 8 +-- .../processors/astromatic/swarp/swarp.py | 4 +- .../database/base_database_processor.py | 5 +- winterdrp/processors/photcal.py | 10 +-- winterdrp/processors/utils/image_saver.py | 4 +- winterdrp/utils/pipeline_visualisation.py | 65 +++++++++++-------- 7 files changed, 65 insertions(+), 60 deletions(-) diff --git a/winterdrp/pipelines/summer/generator.py b/winterdrp/pipelines/summer/generator.py index d03cbd996..4e6dba5b2 100644 --- a/winterdrp/pipelines/summer/generator.py +++ b/winterdrp/pipelines/summer/generator.py @@ -1,26 +1,29 @@ +""" +Module containing functions to generate astrometric/photometric calibration catalogs +for SUMMER +""" import logging -import astropy from astropy.io import fits -from winterdrp.catalog import PS1, Gaia2Mass, SkyMapper +from winterdrp.catalog import PS1, BaseCatalog, Gaia2Mass, SkyMapper from winterdrp.catalog.sdss import SDSS, NotInSDSSError, in_sdss -from winterdrp.paths import sextractor_header_key +from winterdrp.data.image_data import Image from winterdrp.pipelines.summer.config import ( psfex_config_path, sextractor_photometry_config, swarp_config_path, ) from winterdrp.processors.astromatic import PSFex, Sextractor, Swarp -from winterdrp.processors.astromatic.sextractor.sextractor import sextractor_header_key +from winterdrp.processors.astromatic.sextractor.sextractor import SEXTRACTOR_HEADER_KEY from winterdrp.references.ps1 import PS1Ref from winterdrp.references.sdss import SDSSRef logger = logging.getLogger(__name__) -def summer_astrometric_catalog_generator(header: astropy.io.fits.Header): - temp_cat_path = header[sextractor_header_key] +def summer_astrometric_catalog_generator(image: Image) -> BaseCatalog: + temp_cat_path = image[SEXTRACTOR_HEADER_KEY] cat = Gaia2Mass( min_mag=10, max_mag=20, @@ -32,11 +35,11 @@ def summer_astrometric_catalog_generator(header: astropy.io.fits.Header): return cat -def summer_photometric_catalog_generator(header: astropy.io.fits.Header): - filter_name = header["FILTERID"] - dec = header["DEC"] +def summer_photometric_catalog_generator(image: Image) -> BaseCatalog: + filter_name = image["FILTERID"] + dec = image["DEC"] if filter_name in ["u", "U"]: - if in_sdss(header["RA"], header["DEC"]): + if in_sdss(image["RA"], image["DEC"]): return SDSS( min_mag=10, max_mag=20, @@ -61,11 +64,11 @@ def summer_photometric_catalog_generator(header: astropy.io.fits.Header): def summer_reference_image_generator( - header: fits.header, + image: Image, ): - filter_name = header["FILTER"] + filter_name = image["FILTER"] logger.info(f"Filter is {filter_name}") - if filter_name == "u": + if filter_name in ["u", "U"]: logger.info(f"Will query reference image from SDSS") return SDSSRef(filter_name=filter_name) else: diff --git a/winterdrp/processors/astromatic/scamp/scamp.py b/winterdrp/processors/astromatic/scamp/scamp.py index 68ae714df..6c4fe9381 100644 --- a/winterdrp/processors/astromatic/scamp/scamp.py +++ b/winterdrp/processors/astromatic/scamp/scamp.py @@ -16,8 +16,8 @@ get_untemp_path, ) from winterdrp.processors.astromatic.sextractor.sextractor import ( + SEXTRACTOR_HEADER_KEY, Sextractor, - sextractor_header_key, ) from winterdrp.processors.base_processor import BaseImageProcessor, ProcessorWithCache from winterdrp.utils import execute @@ -53,10 +53,8 @@ def __init__( ref_catalog_generator: Callable[[astropy.io.fits.Header], BaseCatalog], scamp_config_path: str, temp_output_sub_dir: str = "scamp", - *args, - **kwargs, ): - super(Scamp, self).__init__(*args, **kwargs) + super().__init__() self.scamp_config = scamp_config_path self.ref_catalog_generator = ref_catalog_generator self.temp_output_sub_dir = temp_output_sub_dir @@ -100,7 +98,7 @@ def _apply_to_images(self, batch: ImageBatch) -> ImageBatch: for i, image in enumerate(batch): temp_cat_path = copy_temp_file( - output_dir=scamp_output_dir, file_path=image[sextractor_header_key] + output_dir=scamp_output_dir, file_path=image[SEXTRACTOR_HEADER_KEY] ) temp_img_path = get_temp_path(scamp_output_dir, image[base_name_key]) diff --git a/winterdrp/processors/astromatic/swarp/swarp.py b/winterdrp/processors/astromatic/swarp/swarp.py index ccd6031de..d1fc449c0 100644 --- a/winterdrp/processors/astromatic/swarp/swarp.py +++ b/winterdrp/processors/astromatic/swarp/swarp.py @@ -119,10 +119,8 @@ def __init__( combine: bool = False, cache: bool = False, subtract_bkg: bool = False, - *args, - **kwargs, ): - super(Swarp, self).__init__(*args, **kwargs) + super().__init__() self.swarp_config = swarp_config_path self.temp_output_sub_dir = temp_output_sub_dir self.pixscale = pixscale diff --git a/winterdrp/processors/database/base_database_processor.py b/winterdrp/processors/database/base_database_processor.py index 33934e1d9..93ea794fa 100644 --- a/winterdrp/processors/database/base_database_processor.py +++ b/winterdrp/processors/database/base_database_processor.py @@ -4,7 +4,6 @@ import numpy as np -from winterdrp.data import DataBatch from winterdrp.processors.base_processor import BaseProcessor from winterdrp.processors.database.postgres import ( DataBaseError, @@ -36,10 +35,8 @@ def __init__( schema_dir: str = None, duplicate_protocol: str = "fail", q3c: bool = False, - *args, - **kwargs, ): - super().__init__(*args, **kwargs) + super().__init__() self.db_name = db_name self.db_table = db_table self.db_user = db_user diff --git a/winterdrp/processors/photcal.py b/winterdrp/processors/photcal.py index 189bc9263..58e34520b 100644 --- a/winterdrp/processors/photcal.py +++ b/winterdrp/processors/photcal.py @@ -14,9 +14,9 @@ from winterdrp.errors import ProcessorError from winterdrp.paths import copy_temp_file, get_output_dir from winterdrp.processors.astromatic.sextractor.sextractor import ( + SEXTRACTOR_HEADER_KEY, Sextractor, - sextractor_checkimg_keys, - sextractor_header_key, + sextractor_checkimg_map, ) from winterdrp.processors.base_processor import BaseImageProcessor, PrerequisiteError from winterdrp.utils.ldac_tools import get_table_from_ldac @@ -212,7 +212,7 @@ def _apply_to_images( ref_catalog = self.ref_catalog_generator(image) ref_cat_path = ref_catalog.write_catalog(image, output_dir=phot_output_dir) temp_cat_path = copy_temp_file( - output_dir=phot_output_dir, file_path=image[sextractor_header_key] + output_dir=phot_output_dir, file_path=image[SEXTRACTOR_HEADER_KEY] ) ( @@ -243,9 +243,9 @@ def _apply_to_images( aperture_diameters.append(med_fwhm_pix) zp_values.append(image["ZP_AUTO"]) - if sextractor_checkimg_keys["BACKGROUND_RMS"] in image.keys(): + if sextractor_checkimg_map["BACKGROUND_RMS"] in image.keys(): limmags = self.get_maglim( - image[sextractor_checkimg_keys["BACKGROUND_RMS"]], + image[sextractor_checkimg_map["BACKGROUND_RMS"]], zp_values, np.array(aperture_diameters) / 2.0, ) diff --git a/winterdrp/processors/utils/image_saver.py b/winterdrp/processors/utils/image_saver.py index b117dbaf2..603894ae0 100644 --- a/winterdrp/processors/utils/image_saver.py +++ b/winterdrp/processors/utils/image_saver.py @@ -24,10 +24,8 @@ def __init__( output_dir_name: str, write_mask: bool = True, output_dir: str = base_output_dir, - *args, - **kwargs, ): - super().__init__(*args, **kwargs) + super().__init__() self.output_dir_name = output_dir_name self.write_mask = write_mask self.output_dir = output_dir diff --git a/winterdrp/utils/pipeline_visualisation.py b/winterdrp/utils/pipeline_visualisation.py index 7ea2d9664..de73bf103 100644 --- a/winterdrp/utils/pipeline_visualisation.py +++ b/winterdrp/utils/pipeline_visualisation.py @@ -1,4 +1,7 @@ -import argparse +""" +Module for generating visualisations of +:class:`~winterdrp.pipeline.base_pipeline.Pipeline` objects. +""" import logging from datetime import datetime from pathlib import Path @@ -18,13 +21,29 @@ def get_save_path(pipeline: str, configs: str) -> Path: + """ + Get output save path for a pipeline diagram + + :param pipeline: Pipeline ised + :param configs: Configs used + :return: path to save + """ return doc_dir.joinpath(f"flowcharts/{pipeline}/{configs}.png") def flowify(processor_list: list[BaseProcessor], output_path: Path): + """ + Function to generate a diagram summarising all + :class:`~wintedrp.processors.BaseProcessor` objects + in a given pipeline configuration + + :param processor_list: list of processors to visualise + :param output_path: Path to save diagram + :return: None + """ plt.figure(figsize=(12.0, 2.0 + 0.3 * len(processor_list)), dpi=300.0) - ax = plt.subplot(111) + plt.subplot(111) y_scale = 1.0 / float(len(processor_list)) @@ -97,22 +116,29 @@ def flowify(processor_list: list[BaseProcessor], output_path: Path): def iterate_flowify(config: str | list[str] = None, pipelines: str | list[str] = None): + """ + Function to iterate the visualisation of all configurations and pipelines + + :param config: config(s) to visualise (default of all) + :param pipelines: pipeline(s) to visualise + :return: None + """ if pipelines is None: pipelines = Pipeline.pipelines.keys() elif not isinstance(pipelines, list): - pipelines = [args.pipeline] + pipelines = [pipelines] if config is None: - gc = "default" + selected_config = "default" else: - gc = config + selected_config = config for pipeline in pipelines: pipe = get_pipeline( pipeline, - selected_configurations=gc, - night=str(datetime.now()).split(" ")[0].replace("-", ""), + selected_configurations=selected_config, + night=str(datetime.now()).split(" ", maxsplit=1)[0].replace("-", ""), ) if config is None: @@ -120,23 +146,8 @@ def iterate_flowify(config: str | list[str] = None, pipelines: str | list[str] = else: config_list = pipe.selected_configurations - for c in config_list: - flowify(pipe.set_configuration(c), get_save_path(pipeline, c)) - - -if __name__ == "__main__": - - logger.setLevel("INFO") - - parser = argparse.ArgumentParser( - description="winterdrp: An automated image reduction pipeline, developed for WINTER" - ) - - parser.add_argument("-p", "--pipeline", default=None, help="Pipeline to be used") - parser.add_argument( - "-c", "--config", default=None, help="Pipeline configuration to be used" - ) - - args = parser.parse_args() - - iterate_flowify(config=args.config, pipelines=args.pipeline) + for single_config in config_list: + flowify( + pipe.set_configuration(single_config), + get_save_path(pipeline, single_config), + ) From 4197449088e3cada2e86b700f1a94e7d4719b0dd Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Thu, 8 Dec 2022 01:25:20 -0800 Subject: [PATCH 09/12] Remove erroneous kwarg in swarp --- winterdrp/processors/reference.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/winterdrp/processors/reference.py b/winterdrp/processors/reference.py index bf3cc6d34..b3731a6c3 100644 --- a/winterdrp/processors/reference.py +++ b/winterdrp/processors/reference.py @@ -7,7 +7,6 @@ from astropy.wcs import WCS from winterdrp.data import Image, ImageBatch -from winterdrp.io import open_fits from winterdrp.paths import ( base_name_key, get_output_dir, @@ -35,10 +34,8 @@ def __init__( sextractor: Callable[..., Sextractor], ref_psfex: Callable[..., PSFex], temp_output_subtract_dir: str = "subtract", - *args, - **kwargs, ): - super(Reference, self).__init__(*args, **kwargs) + super().__init__() self.ref_image_generator = ref_image_generator self.swarp_resampler = swarp_resampler self.sextractor = sextractor @@ -87,7 +84,7 @@ def _apply_to_images( new_batch = ImageBatch() - for ind, image in enumerate(batch): + for image in batch: ref_writer = self.ref_image_generator(image) @@ -131,7 +128,6 @@ def _apply_to_images( center_dec=sci_dec_cent, propogate_headerlist=propogate_headerlist, temp_output_sub_dir=self.temp_output_subtract_dir, - night_sub_dir=self.night_sub_dir, include_scamp=False, combine=False, gain=ref_gain, @@ -147,8 +143,8 @@ def _apply_to_images( self.save_fits(resampled_ref_img, resampled_ref_path) ( - ref_resamp_x_cent, - ref_resamp_y_cent, + _, + _, ref_resamp_ra_cent, ref_resamp_dec_cent, ref_resamp_pixscale, @@ -166,7 +162,6 @@ def _apply_to_images( center_dec=ref_resamp_dec_cent, propogate_headerlist=propogate_headerlist, temp_output_sub_dir=self.temp_output_subtract_dir, - night_sub_dir=self.night_sub_dir, include_scamp=False, combine=False, gain=sci_gain, From d5f249d933b24b8f4940a48ee03f595f81b45c16 Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Thu, 8 Dec 2022 01:29:00 -0800 Subject: [PATCH 10/12] cache --- .github/workflows/continous_integration.yml | 9 +++++-- .github/workflows/pylint.yml | 29 ++++++++++++--------- 2 files changed, 23 insertions(+), 15 deletions(-) diff --git a/.github/workflows/continous_integration.yml b/.github/workflows/continous_integration.yml index 4b5a7b3ac..f015d751b 100644 --- a/.github/workflows/continous_integration.yml +++ b/.github/workflows/continous_integration.yml @@ -31,13 +31,18 @@ jobs: # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 + + - uses: actions/checkout@v3 + + - name: Install poetry + run: pipx install poetry # Set up the python versions - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} + cache: "poetry" # Runs a set of commands using the runners shell - name: set up ssh diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 3df6d9058..0fc3b7110 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -8,16 +8,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.10 - uses: actions/setup-python@v1 - with: - python-version: "3.10" - - name: Install dependencies - run: | - pip install --upgrade pip - python -m pip install --upgrade poetry - poetry install - - name: Analysing the code with pylint - run: | - poetry run pylint winterdrp + - uses: actions/checkout@v3 + - name: Install poetry + run: pipx install poetry + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "poetry" + - name: Install dependencies + run: | + pip install --upgrade pip + python -m pip install --upgrade poetry + poetry install + - name: Analysing the code with pylint + run: | + poetry run pylint winterdrp From 9c247a86659d5b9c64f5397cebbe59e5973c72b8 Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Thu, 8 Dec 2022 03:07:01 -0800 Subject: [PATCH 11/12] Don't propagate non-existent args --- winterdrp/pipelines/summer/generator.py | 33 +++---- .../processors/astromatic/psfex/psfex.py | 14 +-- winterdrp/processors/photcal.py | 95 +++++++++++-------- 3 files changed, 77 insertions(+), 65 deletions(-) diff --git a/winterdrp/pipelines/summer/generator.py b/winterdrp/pipelines/summer/generator.py index 4e6dba5b2..24611681a 100644 --- a/winterdrp/pipelines/summer/generator.py +++ b/winterdrp/pipelines/summer/generator.py @@ -4,8 +4,6 @@ """ import logging -from astropy.io import fits - from winterdrp.catalog import PS1, BaseCatalog, Gaia2Mass, SkyMapper from winterdrp.catalog.sdss import SDSS, NotInSDSSError, in_sdss from winterdrp.data.image_data import Image @@ -38,7 +36,9 @@ def summer_astrometric_catalog_generator(image: Image) -> BaseCatalog: def summer_photometric_catalog_generator(image: Image) -> BaseCatalog: filter_name = image["FILTERID"] dec = image["DEC"] + if filter_name in ["u", "U"]: + if in_sdss(image["RA"], image["DEC"]): return SDSS( min_mag=10, @@ -46,21 +46,22 @@ def summer_photometric_catalog_generator(image: Image) -> BaseCatalog: search_radius_arcmin=7.5, filter_name=filter_name, ) - elif dec < 0.0: + + if dec < 0.0: return SkyMapper( min_mag=10, max_mag=20, search_radius_arcmin=7.5, filter_name=filter_name, ) - else: - err = "U band image is in a field with no reference image." - logger.error(err) - raise NotInSDSSError(err) - else: - return PS1( - min_mag=10, max_mag=20, search_radius_arcmin=7.5, filter_name=filter_name - ) + + err = "U band image is in a field with no reference image." + logger.error(err) + raise NotInSDSSError(err) + + return PS1( + min_mag=10, max_mag=20, search_radius_arcmin=7.5, filter_name=filter_name + ) def summer_reference_image_generator( @@ -68,12 +69,13 @@ def summer_reference_image_generator( ): filter_name = image["FILTER"] logger.info(f"Filter is {filter_name}") + if filter_name in ["u", "U"]: - logger.info(f"Will query reference image from SDSS") + logger.info("Will query reference image from SDSS") return SDSSRef(filter_name=filter_name) - else: - logger.info(f"Will query reference image from PS1") - return PS1Ref(filter_name=filter_name) + + logger.info("Will query reference image from PS1") + return PS1Ref(filter_name=filter_name) def summer_reference_image_resampler(**kwargs) -> Swarp: @@ -94,5 +96,4 @@ def summer_reference_psfex(output_sub_dir, norm_fits): config_path=psfex_config_path, output_sub_dir=output_sub_dir, norm_fits=norm_fits, - cache=True, ) diff --git a/winterdrp/processors/astromatic/psfex/psfex.py b/winterdrp/processors/astromatic/psfex/psfex.py index 04e4adba0..544c59bc8 100644 --- a/winterdrp/processors/astromatic/psfex/psfex.py +++ b/winterdrp/processors/astromatic/psfex/psfex.py @@ -1,7 +1,6 @@ import logging import os -import astropy.io.fits import numpy as np from astropy.io import fits @@ -34,8 +33,8 @@ def run_psfex( if norm_psf_output_name is not None: psf_path = sextractor_cat_path.replace(".cat", ".psf") - with fits.open(psf_path) as f: - psf_model_data = f[1].data[0][0][0] + with fits.open(psf_path) as data_file: + psf_model_data = data_file[1].data[0][0][0] psf_model_data = psf_model_data / np.sum(psf_model_data) psf_model_hdu = fits.PrimaryHDU(psf_model_data) psf_model_hdu.writeto(norm_psf_output_name, overwrite=True) @@ -49,18 +48,16 @@ def __init__( config_path: str = None, output_sub_dir: str = "psf", norm_fits: bool = True, - *args, - **kwargs, ): - super(PSFex, self).__init__(*args, **kwargs) + super().__init__() self.config_path = config_path self.output_sub_dir = output_sub_dir self.norm_fits = norm_fits def __str__(self) -> str: return ( - f"Processor to apply PSFEx to images, measuring the PSF of detected sources " - f"and saving these to the '{self.output_sub_dir}' directory." + f"Processor to apply PSFEx to images, measuring the PSF of detected " + f"sources and saving these to the '{self.output_sub_dir}' directory." ) def get_psfex_output_dir(self): @@ -88,7 +85,6 @@ def _apply_to_images(self, batch: ImageBatch) -> ImageBatch: image[psfex_header_key] = psf_path image[norm_psfex_header_key] = norm_psf_path - # assert False return batch def check_prerequisites( diff --git a/winterdrp/processors/photcal.py b/winterdrp/processors/photcal.py index 58e34520b..43344a8fa 100644 --- a/winterdrp/processors/photcal.py +++ b/winterdrp/processors/photcal.py @@ -1,8 +1,11 @@ +""" +Module for running photometric calibration +""" import logging import os from collections.abc import Callable +from pathlib import Path -import astropy.io.fits import astropy.units as u import numpy as np from astropy.coordinates import SkyCoord @@ -10,7 +13,7 @@ from astropy.stats import sigma_clip, sigma_clipped_stats from winterdrp.catalog.base_catalog import BaseCatalog -from winterdrp.data import ImageBatch +from winterdrp.data import Image, ImageBatch from winterdrp.errors import ProcessorError from winterdrp.paths import copy_temp_file, get_output_dir from winterdrp.processors.astromatic.sextractor.sextractor import ( @@ -37,31 +40,35 @@ class PhotometryError(ProcessorError): - pass + """Base error for photometric calibration""" class PhotometryReferenceError(PhotometryError): - pass + """Error related to the photometic reference catalogue""" class PhotometrySourceError(PhotometryError): - pass + """Error related to the photometic source catalogue""" class PhotometryCrossMatchError(PhotometryError): - pass + """Error related to cross-matching photometic reference and source catalogues""" class PhotometryCalculationError(PhotometryError): - pass + """Error related to the photometic calibration""" class PhotCalibrator(BaseImageProcessor): + """ + Photometric calibrator processor + """ + base_key = "photcalibrator" def __init__( self, - ref_catalog_generator: Callable[[astropy.io.fits.Header], BaseCatalog], + ref_catalog_generator: Callable[[Image], BaseCatalog], temp_output_sub_dir: str = "phot", redo: bool = True, x_lower_limit: float = 100, @@ -70,10 +77,8 @@ def __init__( y_upper_limit: float = 2800, fwhm_threshold_arcsec: float = 4.0, num_matches_threshold: int = 5, - *args, - **kwargs, ): - super(PhotCalibrator, self).__init__(*args, **kwargs) + super().__init__() self.redo = redo # What is this for? self.ref_catalog_generator = ref_catalog_generator self.temp_output_sub_dir = temp_output_sub_dir @@ -81,15 +86,21 @@ def __init__( self.x_upper_limit = x_upper_limit self.y_lower_limit = y_lower_limit self.y_upper_limit = y_upper_limit - self.fwhm_threshold_arcsec = ( - fwhm_threshold_arcsec # Why is this here not in catalog? - ) + + # Why is this here not in catalog? + self.fwhm_threshold_arcsec = fwhm_threshold_arcsec + self.num_matches_threshold = num_matches_threshold def __str__(self) -> str: - return f"Processor to perform photometric calibration." + return "Processor to perform photometric calibration." def get_phot_output_dir(self): + """ + Return the + + :return: + """ return get_output_dir(self.temp_output_sub_dir, self.night_sub_dir) def calculate_zeropoint(self, ref_cat_path: str, img_cat_path: str) -> list[dict]: @@ -126,7 +137,7 @@ def calculate_zeropoint(self, ref_cat_path: str, img_cat_path: str) -> list[dict logger.error(err) raise PhotometrySourceError(err) - idx, d2d, d3d = ref_coords.match_to_catalog_sky(clean_img_coords) + idx, d2d, _ = ref_coords.match_to_catalog_sky(clean_img_coords) match_mask = d2d < 1.0 * u.arcsec matched_ref_cat = ref_cat[match_mask] matched_img_cat = clean_img_cat[idx[match_mask]] @@ -135,7 +146,10 @@ def calculate_zeropoint(self, ref_cat_path: str, img_cat_path: str) -> list[dict ) if len(matched_img_cat) < self.num_matches_threshold: - err = f"Not enough cross-matched sources found to calculate a reliable zeropoint." + err = ( + "Not enough cross-matched sources " + "found to calculate a reliable zeropoint." + ) logger.error(err) raise PhotometryCrossMatchError(err) @@ -215,14 +229,7 @@ def _apply_to_images( output_dir=phot_output_dir, file_path=image[SEXTRACTOR_HEADER_KEY] ) - ( - fwhm_med, - fwhm_mean, - fwhm_std, - med_fwhm_pix, - mean_fwhm_pix, - std_fwhm_pix, - ) = self.get_fwhm(temp_cat_path) + fwhm_med, _, fwhm_std, med_fwhm_pix, _, _ = self.get_fwhm(temp_cat_path) image["FWHM_MED"] = fwhm_med image["FWHM_STD"] = fwhm_std @@ -231,9 +238,9 @@ def _apply_to_images( aperture_diameters = [] zp_values = [] for zpvals in zp_dicts: - image["ZP_%s" % (zpvals["diameter"])] = zpvals["zp_mean"] - image["ZP_%s_std" % (zpvals["diameter"])] = zpvals["zp_std"] - image["ZP_%s_nstars" % (zpvals["diameter"])] = zpvals["nstars"] + image[f"ZP_{zpvals['diameter']}"] = zpvals["zp_mean"] + image[f"ZP_{zpvals['diameter']}_std"] = zpvals["zp_std"] + image[f"ZP_{zpvals['diameter']}_nstars"] = zpvals["nstars"] try: aperture_diameters.append(float(zpvals["diameter"])) zp_values.append(zpvals["zp_mean"]) @@ -243,7 +250,7 @@ def _apply_to_images( aperture_diameters.append(med_fwhm_pix) zp_values.append(image["ZP_AUTO"]) - if sextractor_checkimg_map["BACKGROUND_RMS"] in image.keys(): + if sextractor_checkimg_map["BACKGROUND_RMS"] in image: limmags = self.get_maglim( image[sextractor_checkimg_map["BACKGROUND_RMS"]], zp_values, @@ -277,18 +284,22 @@ def get_fwhm(img_cat_path): return med_fwhm, mean_fwhm, std_fwhm, med_fwhm_pix, mean_fwhm_pix, std_fwhm_pix @staticmethod - def get_maglim(bkg_rms_image_path, zp, aperture_radius_pixels): - if isinstance(zp, float): - zp = [zp] + def get_maglim( + bkg_rms_image_path: str | Path, + zeropoint: float | list[float], + aperture_radius_pixels: float | list[float], + ) -> float: + if isinstance(zeropoint, float): + zeropoint = [zeropoint] if isinstance(aperture_radius_pixels, float): aperture_radius_pixels = [aperture_radius_pixels] - zp = np.array(zp, dtype=float) + zeropoint = np.array(zeropoint, dtype=float) aperture_radius_pixels = np.array(aperture_radius_pixels, dtype=float) bkg_rms_image = fits.getdata(bkg_rms_image_path) bkg_rms_med = np.nanmedian(bkg_rms_image) noise = bkg_rms_med * np.sqrt(np.pi * aperture_radius_pixels) - maglim = -2.5 * np.log10(5 * noise) + zp + maglim = -2.5 * np.log10(5 * noise) + zeropoint return maglim def get_sextractor_module(self) -> Sextractor: @@ -312,9 +323,9 @@ def check_prerequisites( logger.debug(f"Checking file {sextractor_param_path}") - with open(sextractor_param_path, "rb") as f: + with open(sextractor_param_path, "rb") as param_file: sextractor_params = [ - x.strip().decode() for x in f.readlines() if len(x.strip()) > 0 + x.strip().decode() for x in param_file.readlines() if len(x.strip()) > 0 ] sextractor_params = [ x.split("(")[0] for x in sextractor_params if x[0] not in ["#"] @@ -324,7 +335,8 @@ def check_prerequisites( if param not in sextractor_params: err = ( f"Missing parameter: {self.__module__} requires {param} to run, " - f"but this parameter was not found in sextractor config file '{sextractor_param_path}' . " + f"but this parameter was not found in sextractor config file " + f"'{sextractor_param_path}' . " f"Please add the parameter to this list!" ) logger.error(err) @@ -333,15 +345,18 @@ def check_prerequisites( def get_sextractor_apetures(self) -> list[float]: sextractor_config_path = self.get_sextractor_module().config - with open(sextractor_config_path, "rb") as f: + with open(sextractor_config_path, "rb") as sextractor_config_file: apeture_lines = [ x.decode() - for x in f.readlines() + for x in sextractor_config_file.readlines() if np.logical_and(b"PHOT_APERTURES" in x, x.decode()[0] != "#") ] if len(apeture_lines) > 1: - err = f"The config file {sextractor_config_path} has multiple entries for PHOT_APETURES." + err = ( + f"The config file {sextractor_config_path} has " + f"multiple entries for PHOT_APETURES." + ) logger.error(err) raise ProcessorError(err) From d933034a988b2ca60c8293cf731528cc2cdfccbf Mon Sep 17 00:00:00 2001 From: Robert Stein Date: Thu, 8 Dec 2022 03:21:06 -0800 Subject: [PATCH 12/12] Remove unrecognised kwarg --- winterdrp/pipelines/wirc/generator.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/winterdrp/pipelines/wirc/generator.py b/winterdrp/pipelines/wirc/generator.py index 23a89ff3c..8e0bff987 100644 --- a/winterdrp/pipelines/wirc/generator.py +++ b/winterdrp/pipelines/wirc/generator.py @@ -1,10 +1,8 @@ import logging import os -import astropy -from astropy.io import fits - from winterdrp.catalog import Gaia2Mass +from winterdrp.data import Image from winterdrp.pipelines.wirc.wirc_files import ( psfex_path, sextractor_reference_config, @@ -16,23 +14,23 @@ logger = logging.getLogger(__name__) -def wirc_astrometric_catalog_generator(header: astropy.io.fits.Header) -> Gaia2Mass: +def wirc_astrometric_catalog_generator(*args) -> Gaia2Mass: return Gaia2Mass(min_mag=10, max_mag=20, search_radius_arcmin=30) -def wirc_photometric_catalog_generator(header: astropy.io.fits.Header) -> Gaia2Mass: - filter_name = header["FILTER"] +def wirc_photometric_catalog_generator(image: Image) -> Gaia2Mass: + filter_name = image["FILTER"] return Gaia2Mass( min_mag=10, max_mag=20, search_radius_arcmin=30, filter_name=filter_name ) def wirc_reference_image_generator( - header: fits.header, + image: Image, images_directory: str = os.environ.get("REF_IMG_DIR"), ) -> WIRCRef: - object_name = header["OBJECT"] - filter_name = header["FILTER"] + object_name = image["OBJECT"] + filter_name = image["FILTER"] return WIRCRef( object_name=object_name, filter_name=filter_name, @@ -60,5 +58,4 @@ def wirc_reference_psfex(output_sub_dir: str, norm_fits: bool) -> PSFex: config_path=psfex_path, output_sub_dir=output_sub_dir, norm_fits=norm_fits, - cache=True, )