diff --git a/winterdrp/paths.py b/winterdrp/paths.py index d4cc2f185..f992ba569 100644 --- a/winterdrp/paths.py +++ b/winterdrp/paths.py @@ -154,7 +154,7 @@ def parse_image_list( return object_dict -raw_img_key = "RAWIMAGEPATH" +raw_img_key = "RAWPATH" base_name_key = "BASENAME" proc_history_key = "CALSTEPS" latest_save_key = "SAVEPATH" @@ -166,8 +166,9 @@ def parse_image_list( flat_frame_key = 'FLATNAME' bias_frame_key = 'BIASNAME' dark_frame_key = 'DARKNAME' +coadd_key = "COADDS" -core_fields = ["OBSCLASS", "TARGET", "UTCTIME", "COADDS", latest_save_key, "RAWPATH"] +core_fields = ["OBSCLASS", "TARGET", "UTCTIME", coadd_key, proc_history_key] class ProcessingError(Exception): diff --git a/winterdrp/pipelines/wirc/wirc_pipeline.py b/winterdrp/pipelines/wirc/wirc_pipeline.py index aded285e3..72bb05949 100644 --- a/winterdrp/pipelines/wirc/wirc_pipeline.py +++ b/winterdrp/pipelines/wirc/wirc_pipeline.py @@ -18,6 +18,10 @@ from winterdrp.downloader.caltech import download_via_ssh from winterdrp.processors.utils.image_loader import ImageLoader from winterdrp.processors.utils.image_selector import ImageSelector, ImageBatcher, ImageDebatcher +from winterdrp.paths import coadd_key, proc_history_key +import logging + +logger = logging.getLogger(__name__) def wirc_astrometric_catalog_generator( @@ -49,6 +53,13 @@ def load_raw_wirc_image( header["TARGET"] = header["OBJECT"].lower() header["UTCTIME"] = header["UTSHUT"] header["MJD-OBS"] = Time(header['UTSHUT']).mjd + if coadd_key not in header.keys(): + logger.debug(f"No {coadd_key} entry. Setting coadds to 1.") + header[coadd_key] = 1 + if proc_history_key not in header.keys(): + header[proc_history_key] = "" + + data[data == 0] = np.nan return data, header diff --git a/winterdrp/pipelines/wirc_imsub/wirc_imsub_pipeline.py b/winterdrp/pipelines/wirc_imsub/wirc_imsub_pipeline.py index 0409c7e09..9c3397561 100644 --- a/winterdrp/pipelines/wirc_imsub/wirc_imsub_pipeline.py +++ b/winterdrp/pipelines/wirc_imsub/wirc_imsub_pipeline.py @@ -21,6 +21,7 @@ from penquins import Kowalski from winterdrp.catalog.kowalski import TMASS, PS1 from winterdrp.processors.xmatch import XMatch +from winterdrp.pipelines.wirc.wirc_pipeline import load_raw_wirc_image logger = logging.getLogger(__name__) @@ -110,33 +111,6 @@ def get_kowalski(): return k -def load_raw_wirc_image( - path: str -) -> tuple[np.array, fits.Header]: - with fits.open(path) as img: - data = img[0].data - header = img[0].header - header["FILTER"] = header["AFT"].split("__")[0] - header["OBSCLASS"] = ["calibration", "science"][header["OBSTYPE"] == "object"] - header["CALSTEPS"] = "" - header["BASENAME"] = os.path.basename(path) - logger.info(header["BASENAME"]) - header["TARGET"] = header["OBJECT"].lower() - header["UTCTIME"] = header["UTSHUT"] - header["MJD-OBS"] = Time(header['UTSHUT']).mjd - # header.append(('GAIN', self.gain, 'Gain in electrons / ADU'), end=True) - # header = self.set_saturation(header) - if not 'COADDS' in header.keys(): - logger.debug('Setting COADDS to 1') - header['COADDS'] = 1 - if not 'CALSTEPS' in header.keys(): - logger.debug('Setting CALSTEPS to blank') - header['CALSTEPS'] = '' - - data[data == 0] = np.nan - return data, header - - class WircImsubPipeline(Pipeline): name = "wirc_imsub" diff --git a/winterdrp/processors/database/base_database_processor.py b/winterdrp/processors/database/base_database_processor.py index 9db1d01e9..33cda1d2c 100644 --- a/winterdrp/processors/database/base_database_processor.py +++ b/winterdrp/processors/database/base_database_processor.py @@ -4,8 +4,7 @@ import logging from abc import ABC from winterdrp.processors.database.postgres import check_if_db_exists, check_if_user_exists, check_if_table_exists,\ - create_db, create_table, create_new_user, grant_privileges, create_tables_from_schema, DataBaseError, \ - default_db_user + create_db, create_table, create_new_user, grant_privileges, create_tables_from_schema, DataBaseError logger = logging.getLogger(__name__) @@ -18,7 +17,7 @@ def __init__( db_name: str, db_table: str, schema_path: str, - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), db_password: str = os.environ.get('PG_DEFAULT_PWD'), full_setup: bool = False, schema_dir: str = None, @@ -33,33 +32,7 @@ def __init__( self.full_setup = full_setup self.schema_path = schema_path self.schema_dir = schema_dir - - # logger.error("Here...") - - if np.logical_and(self.db_exists(), np.invert(self.user_exists())): - err = 'Database exists but user does not exist' - logger.error(err) - raise DataBaseError(err) - - if not self.db_exists(): - self.make_db() - - if not self.user_exists(): - self.make_user() - - self.grant_privileges() - - if self.full_setup: - if self.schema_dir is None: - self.schema_dir = os.path.dirname(self.schema_path) - logger.warning(f'Warning, full db setup requested, but no schema directory specified. \ - Will search for schema files in {self.schema_dir} directory.') - logger.info(f'Looking in {self.schema_dir} directory to search for schema files') - - create_tables_from_schema(self.schema_dir, self.db_name, self.db_user, self.db_password) - - if not self.table_exists(): - self.make_table(schema_path) + self.db_check = False def db_exists(self): return check_if_db_exists( @@ -97,4 +70,39 @@ def make_table(self, schema_path: str): password=self.db_password ) + def apply(self, batch): + + if not self.db_check: + self.set_up_databases() + self.db_check = True + + super(BaseDatabaseProcessor, self).apply(batch) + + def set_up_databases(self): + + if np.logical_and(self.db_exists(), np.invert(self.user_exists())): + err = 'Database exists but user does not exist' + logger.error(err) + raise DataBaseError(err) + + if not self.db_exists(): + self.make_db() + + if not self.user_exists(): + self.make_user() + + self.grant_privileges() + + if self.full_setup: + if self.schema_dir is None: + self.schema_dir = os.path.dirname(self.schema_path) + logger.warning(f'Warning, full db setup requested, but no schema directory specified. \ + Will search for schema files in {self.schema_dir} directory.') + logger.info(f'Looking in {self.schema_dir} directory to search for schema files') + + create_tables_from_schema(self.schema_dir, self.db_name, self.db_user, self.db_password) + + if not self.table_exists(): + self.make_table(self.schema_path) + diff --git a/winterdrp/processors/database/postgres.py b/winterdrp/processors/database/postgres.py index 1ad0d7738..cda02c4e5 100644 --- a/winterdrp/processors/database/postgres.py +++ b/winterdrp/processors/database/postgres.py @@ -1,20 +1,13 @@ import astropy.io.fits import psycopg -from astropy.io import fits import os from glob import glob -from astropy.time import Time import numpy as np -from astropy.coordinates import SkyCoord -import astropy.units as u -from psycopg.errors import Error import logging from winterdrp.errors import ProcessorError logger = logging.getLogger(__name__) -default_db_user = os.path.basename(os.environ["HOME"]) - schema_dir = os.path.join(os.path.dirname(__file__), "schema") @@ -41,7 +34,7 @@ def validate_credentials( def create_db( db_name: str, - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), password: str = os.environ.get('PG_DEFAULT_PWD') ): validate_credentials(db_user=db_user, password=password) @@ -56,7 +49,7 @@ def create_db( def create_table( schema_path: str, db_name: str, - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), password: str = os.environ.get('PG_DEFAULT_PWD') ): validate_credentials(db_user, password) @@ -99,7 +92,7 @@ def grant_privileges( def check_if_user_exists( user_name: str, - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), password: str = os.environ.get('PG_DEFAULT_PWD') ) -> bool: @@ -119,7 +112,7 @@ def check_if_user_exists( def check_if_db_exists( db_name: str, - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), password: str = os.environ.get('PG_DEFAULT_PWD') ) -> bool: @@ -142,7 +135,7 @@ def check_if_db_exists( def check_if_table_exists( db_name: str, db_table: str, - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), password: str = os.environ.get('PG_DEFAULT_PWD') ) -> bool: @@ -215,7 +208,7 @@ def get_ordered_schema_list( def create_tables_from_schema( schema_dir: str, db_name: str, - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), password: str = os.environ.get('PG_DEFAULT_PWD') ): schema_files = glob(f'{schema_dir}/*.sql') @@ -229,7 +222,7 @@ def export_to_db( value_dict: dict | astropy.io.fits.Header, db_name: str, db_table: str, - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), password: str = os.environ.get('PG_DEFAULT_PWD'), ) -> tuple[str, list]: with psycopg.connect(f"dbname={db_name} user={db_user} password={password}") as conn: @@ -289,7 +282,7 @@ def import_from_db( db_accepted_values: str | int | float | list[str | float | int], db_output_columns: str | list[str], output_alias_map: str | list[str], - db_user: str = os.environ.get('PG_DEFAULT_USER', default_db_user), + db_user: str = os.environ.get('PG_DEFAULT_USER'), password: str = os.environ.get('PG_DEFAULT_PWD'), ) -> list[dict]: """Query an SQL database with constraints, and return a list of dictionaries.