diff --git a/.DS_Store b/.DS_Store index 9de2f19..cb1de3d 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/.github/.DS_Store b/.github/.DS_Store new file mode 100644 index 0000000..3d7e18d Binary files /dev/null and b/.github/.DS_Store differ diff --git a/.github/workflows/.DS_Store b/.github/workflows/.DS_Store new file mode 100644 index 0000000..761d7dd Binary files /dev/null and b/.github/workflows/.DS_Store differ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..b46190f --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,34 @@ +name: CI + +on: [push, pull_request] + +jobs: + run-unittests: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + + - name: Create and activate virtual environment + run: | + python -m venv venv # Create a virtual environment + source venv/bin/activate # Activate the virtual environment + + - name: Install dependencies + run: | + source venv/bin/activate # Ensure the virtual environment is active + pip install --upgrade pip # Upgrade pip within the virtual environment + pip install -r requirements.txt # Install dependencies from requirements.txt + + - name: Set up environment + run: | + source venv/bin/activate # Ensure the virtual environment is active + source ./add_root_to_path.sh # Run the script to modify PYTHONPATH + + - name: Run tests + run: | + source venv/bin/activate # Ensure the virtual environment is active + pytest # Run tests directly with pytest diff --git a/.gitignore b/.gitignore index b1e4f80..e53deea 100644 --- a/.gitignore +++ b/.gitignore @@ -4,12 +4,13 @@ *.tfvars # Logs and Logging-Related -logs/* -data/* +tmp/logs/* +tmp/data/* *.parquet *.feather *.log + # Python Module Data #(e.g __pychache__ directories) __pycache__/ diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..6ab83f9 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 J J Marden + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/pipeline/extract_carbon.py b/pipeline/extract_carbon.py index 9846165..4a70cb0 100644 --- a/pipeline/extract_carbon.py +++ b/pipeline/extract_carbon.py @@ -52,7 +52,7 @@ def fetch_data(self) -> Optional[Dict[str, Any]]: response.raise_for_status() return response.json() except requests.exceptions.RequestException as e: - self.logger.error(f"An error occurred: {e}") + self.logger.error("An error occurred: %s", e) return None @@ -135,19 +135,18 @@ def execute(self) -> Optional[pd.DataFrame]: if data: self.logger.info("Data fetched successfully from API.") - self.logger.debug(f"Fetched Data: {data}") + self.logger.debug("Fetched Data: %s", data) self.logger.info("Processing the fetched data.") df = self.data_processor.process_data(data) if df is not None: self.logger.info("Data processed successfully into DataFrame.") - self.logger.debug("DataFrame of Carbon Forecast Data:") - self.logger.debug(df.to_string()) + self.logger.debug("DataFrame of Carbon Forecast Data:\n%s", df.to_string()) self.logger.info("Saving the processed data locally.") self.data_processor.save_data_locally(df) - self.logger.info(f"Data saved locally at `{self.data_processor.save_location}`.") + self.logger.info("Data saved locally at `%s`.", self.data_processor.save_location) self.logger.info("Attempting to get S3 client.") s3_client = self.data_processor.get_s3_client() @@ -156,7 +155,8 @@ def execute(self) -> Optional[pd.DataFrame]: self.logger.info("S3 client retrieved successfully.") self.logger.info("Uploading the data to S3.") self.data_processor.save_data_to_s3() - self.logger.info(f"Data successfully uploaded to S3 bucket `{self.s3_bucket}` as `{self.s3_file_name}`.") + self.logger.info("Data successfully uploaded to S3 bucket `%s` as `%s`.", + self.s3_bucket, self.s3_file_name) else: self.logger.error("Failed to get S3 client. Data was not uploaded to S3.") return df diff --git a/pipeline/extract_demand.py b/pipeline/extract_demand.py index f3d29ea..f6b0996 100644 --- a/pipeline/extract_demand.py +++ b/pipeline/extract_demand.py @@ -64,7 +64,7 @@ def fetch_data(self) -> Optional[Dict[str, Any]]: response.raise_for_status() return response.json() except requests.exceptions.RequestException as e: - self.logger.error(f"An error occurred: {e}") + self.logger.error("An error occurred: %s", e) return None @@ -100,7 +100,6 @@ def process_data(self, data: Dict[str, Any], a pd.DataFrame, the second element is a dictionary containing the time window over which the data was fetched. """ - logger = logger or self.logger if not data or "data" not in data: logger.warning("No data found in response.") @@ -159,16 +158,16 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, Dict[str, datetime]]]: df, time_period = result self.logger.debug("DataFrame of Demand Data:") - self.logger.debug(df.to_string()) # Log the entire DataFrame as a string + self.logger.debug("%s", df.to_string()) # Log the entire DataFrame as a string self.logger.info("Head of the DataFrame:") - self.logger.info("\n" + df.head().to_string()) + self.logger.info("\n%s", df.head().to_string()) self.logger.info("Time Period of Data:") - self.logger.info(time_period) + self.logger.info("%s", time_period) # Saving data locally self.logger.info("Saving data locally.") local_save_path = self.data_processor.save_data_locally(df) - self.logger.info(f"Data successfully saved locally at {local_save_path}.") + self.logger.info("Data successfully saved locally at %s.", local_save_path) # Uploading data to S3 self.logger.info("Preparing to upload data to S3.") @@ -177,7 +176,7 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, Dict[str, datetime]]]: if s3_client: self.logger.info("S3 client initialized successfully.") self.data_processor.save_data_to_s3() - self.logger.info(f"Data successfully uploaded to S3 at `{self.s3_file_name}`.") + self.logger.info("Data successfully uploaded to S3 at `%s`.", self.s3_file_name) else: self.logger.error("Failed to initialize S3 client.") @@ -187,7 +186,7 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, Dict[str, datetime]]]: else: self.logger.error("Failed to retrieve data from API.") except Exception as e: - self.logger.error(f"An error occurred during the execution: {e}") + self.logger.error("An error occurred during the execution: %s", e) self.logger.info("Execution of the workflow completed.") return None diff --git a/pipeline/extract_generation.py b/pipeline/extract_generation.py index 112ad26..c1b5022 100644 --- a/pipeline/extract_generation.py +++ b/pipeline/extract_generation.py @@ -67,7 +67,7 @@ def fetch_data(self) -> Optional[Dict[str, Any]]: response.raise_for_status() return response.json() except requests.exceptions.RequestException as e: - self.logger.error(f"An error occurred: {e}") + self.logger.error("An error occurred: %s", e) return None @@ -103,7 +103,7 @@ def process_data(self, data: Dict[str, Any]) -> Optional[Tuple[pd.DataFrame, Dic """ if not data or "data" not in data: - logger.warning("No data found in response.") + self.logger.warning("No data found in response.") return None df = pd.DataFrame(data["data"]) @@ -159,19 +159,14 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, Dict[str, datetime]]]: if result is not None: df, time_period = result - self.logger.debug("DataFrame of Demand Data:") - # Log the entire DataFrame as a string - self.logger.debug(df.to_string()) - self.logger.info("Head of the DataFrame:") - self.logger.info("\n" + df.head().to_string()) - self.logger.info("Time Period of Data:") - self.logger.info(time_period) + self.logger.debug("DataFrame of Demand Data:\n%s", df.to_string()) + self.logger.info("Head of the DataFrame:\n%s", df.head().to_string()) + self.logger.info("Time Period of Data: %s", time_period) # Saving data locally self.logger.info("Saving data locally.") local_save_path = self.data_processor.save_data_locally(df) - self.logger.info(f"Data successfully saved locally at { - local_save_path}.") + self.logger.info("Data successfully saved locally at %s.", local_save_path) # Uploading data to S3 self.logger.info("Preparing to upload data to S3.") @@ -180,8 +175,7 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, Dict[str, datetime]]]: if s3_client: self.logger.info("S3 client initialized successfully.") self.data_processor.save_data_to_s3() - self.logger.info(f"Data successfully uploaded to S3 at `{ - self.s3_file_name}`.") + self.logger.info("Data successfully uploaded to S3 at `%s`.", self.s3_file_name) else: self.logger.error("Failed to initialize S3 client.") @@ -191,7 +185,7 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, Dict[str, datetime]]]: else: self.logger.error("Failed to retrieve data from API.") except Exception as e: - self.logger.error(f"An error occurred during the execution: {e}") + self.logger.error("An error occurred during the execution: %s", e) self.logger.info("Execution of the workflow completed.") return None diff --git a/pipeline/extract_price.py b/pipeline/extract_price.py index a2f81f0..f828ded 100644 --- a/pipeline/extract_price.py +++ b/pipeline/extract_price.py @@ -52,7 +52,7 @@ def get_settlement_periods(self, df = pd.read_feather(path_to_reference_data) periods = df.groupby('settlementDate')['settlementPeriod'].unique().to_dict() periods = {k: list(v) for k, v in periods.items()} - logger.info(f"Getting price data for {periods}") + logger.info("Getting price data for %s", periods) return periods def construct_default_params(self, date: str, period: int) -> str: @@ -76,8 +76,8 @@ def fetch_data(self, periods: Dict[str, List[int]]) -> List[Dict[str, Any]]: response.raise_for_status() response_list.append(response.json()) except requests.exceptions.RequestException as e: - self.logger.warning(f"An error occurred when requesting fuel data for {date}, period {period}!") - self.logger.warning(f"{e}") + self.logger.warning("An error occurred when requesting fuel data for %s, period %d!", date, period) + self.logger.warning("%s", e) return response_list @@ -110,7 +110,6 @@ def process_data(self, response_list: List[Dict[str, Any]], """ Takes a list of responses, merges them into a DataFrame, and returns it. """ - logger = logger or self.logger if not response_list: logger.warning("No data found in response.") @@ -153,7 +152,7 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, int]]: try: periods = self.api_client.get_settlement_periods(self.reference_data_path) - self.logger.info(f"Retrieved settlement periods: {periods}") + self.logger.info("Retrieved settlement periods: %s", periods) response_list = self.api_client.fetch_data(periods) if response_list: @@ -163,17 +162,14 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, int]]: if result is not None: df, number_of_settlement_periods = result - self.logger.debug("DataFrame of Price Data:") - self.logger.debug(df.to_string()) # Log the entire DataFrame as a string - self.logger.info("Head of the DataFrame:") - self.logger.info("\n" + df.head().to_string()) - self.logger.info("Number of Settlement Periods:") - self.logger.info(number_of_settlement_periods) + self.logger.debug("DataFrame of Price Data:\n%s", df.to_string()) + self.logger.info("Head of the DataFrame:\n%s", df.head().to_string()) + self.logger.info("Number of Settlement Periods: %d", number_of_settlement_periods) # Saving data locally self.logger.info("Saving data locally.") local_save_path = self.data_processor.save_data_locally(df) - self.logger.info(f"Data successfully saved locally at {local_save_path}.") + self.logger.info("Data successfully saved locally at %s.", local_save_path) # Uploading data to S3 self.logger.info("Preparing to upload data to S3.") @@ -182,7 +178,7 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, int]]: if s3_client: self.logger.info("S3 client initialized successfully.") self.data_processor.save_data_to_s3() - self.logger.info(f"Data successfully uploaded to S3 at `{self.data_processor.s3_file_name}`.") + self.logger.info("Data successfully uploaded to S3 at `%s`.", self.data_processor.s3_file_name) else: self.logger.error("Failed to initialize S3 client.") @@ -192,7 +188,7 @@ def execute(self) -> Optional[Tuple[pd.DataFrame, int]]: else: self.logger.error("Failed to retrieve data from the API.") except Exception as e: - self.logger.error(f"An error occurred during the execution: {e}") + self.logger.error("An error occurred during the execution: %s", e) self.logger.info("Execution of the workflow completed.") return None @@ -227,6 +223,5 @@ def main() -> None: logger.info("---> Data inserted and process completed for %s.", script_name) - if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/pipeline/extract_to_s3.py b/pipeline/extract_to_s3.py index 6f0493a..caa543a 100644 --- a/pipeline/extract_to_s3.py +++ b/pipeline/extract_to_s3.py @@ -5,9 +5,10 @@ from extract_demand import main as extract_demand from extract_price import main as extract_price - +from constants import Constants as ct import config as cg -save_directory = cg.DATA + +save_directory = ct.DATA if not os.path.exists(save_directory): os.makedirs(save_directory) diff --git a/pipeline/logs/__pycache__/pandas.cpython-312.pyc b/pipeline/logs/__pycache__/pandas.cpython-312.pyc deleted file mode 100644 index 5755394..0000000 Binary files a/pipeline/logs/__pycache__/pandas.cpython-312.pyc and /dev/null differ diff --git a/pipeline/logs/extract_carbon.log b/pipeline/logs/extract_carbon.log deleted file mode 100644 index 918fcdd..0000000 --- a/pipeline/logs/extract_carbon.log +++ /dev/null @@ -1,100 +0,0 @@ -2024-08-13 15:17:32,859 - extract_carbon - INFO - DataFrame of Carbon Forecast Data: -2024-08-13 15:17:32,863 - extract_carbon - DEBUG - from to forecast carbon level -0 2024-08-12T23:00Z 2024-08-12T23:30Z 85 low -1 2024-08-12T23:30Z 2024-08-13T00:00Z 83 low -2 2024-08-13T00:00Z 2024-08-13T00:30Z 73 low -3 2024-08-13T00:30Z 2024-08-13T01:00Z 68 low -4 2024-08-13T01:00Z 2024-08-13T01:30Z 72 low -5 2024-08-13T01:30Z 2024-08-13T02:00Z 79 low -6 2024-08-13T02:00Z 2024-08-13T02:30Z 86 low -7 2024-08-13T02:30Z 2024-08-13T03:00Z 79 low -8 2024-08-13T03:00Z 2024-08-13T03:30Z 75 low -9 2024-08-13T03:30Z 2024-08-13T04:00Z 70 low -10 2024-08-13T04:00Z 2024-08-13T04:30Z 70 low -11 2024-08-13T04:30Z 2024-08-13T05:00Z 77 low -12 2024-08-13T05:00Z 2024-08-13T05:30Z 73 low -13 2024-08-13T05:30Z 2024-08-13T06:00Z 79 low -14 2024-08-13T06:00Z 2024-08-13T06:30Z 76 low -15 2024-08-13T06:30Z 2024-08-13T07:00Z 80 low -16 2024-08-13T07:00Z 2024-08-13T07:30Z 80 low -17 2024-08-13T07:30Z 2024-08-13T08:00Z 74 low -18 2024-08-13T08:00Z 2024-08-13T08:30Z 67 low -19 2024-08-13T08:30Z 2024-08-13T09:00Z 67 low -20 2024-08-13T09:00Z 2024-08-13T09:30Z 63 low -21 2024-08-13T09:30Z 2024-08-13T10:00Z 55 low -22 2024-08-13T10:00Z 2024-08-13T10:30Z 51 low -23 2024-08-13T10:30Z 2024-08-13T11:00Z 42 low -24 2024-08-13T11:00Z 2024-08-13T11:30Z 41 low -25 2024-08-13T11:30Z 2024-08-13T12:00Z 41 low -26 2024-08-13T12:00Z 2024-08-13T12:30Z 40 low -27 2024-08-13T12:30Z 2024-08-13T13:00Z 42 low -28 2024-08-13T13:00Z 2024-08-13T13:30Z 43 low -29 2024-08-13T13:30Z 2024-08-13T14:00Z 45 low -30 2024-08-13T14:00Z 2024-08-13T14:30Z 48 low -31 2024-08-13T14:30Z 2024-08-13T15:00Z 51 low -32 2024-08-13T15:00Z 2024-08-13T15:30Z 57 low -33 2024-08-13T15:30Z 2024-08-13T16:00Z 62 low -34 2024-08-13T16:00Z 2024-08-13T16:30Z 64 low -35 2024-08-13T16:30Z 2024-08-13T17:00Z 64 low -36 2024-08-13T17:00Z 2024-08-13T17:30Z 66 low -37 2024-08-13T17:30Z 2024-08-13T18:00Z 70 low -38 2024-08-13T18:00Z 2024-08-13T18:30Z 74 low -39 2024-08-13T18:30Z 2024-08-13T19:00Z 74 low -40 2024-08-13T19:00Z 2024-08-13T19:30Z 77 low -41 2024-08-13T19:30Z 2024-08-13T20:00Z 80 low -42 2024-08-13T20:00Z 2024-08-13T20:30Z 84 low -43 2024-08-13T20:30Z 2024-08-13T21:00Z 84 low -44 2024-08-13T21:00Z 2024-08-13T21:30Z 84 low -45 2024-08-13T21:30Z 2024-08-13T22:00Z 84 low -46 2024-08-13T22:00Z 2024-08-13T22:30Z 85 low -47 2024-08-13T22:30Z 2024-08-13T23:00Z 87 low -2024-08-13 15:17:55,018 - extract_carbon - INFO - DataFrame of Carbon Forecast Data: -2024-08-13 15:17:55,021 - extract_carbon - DEBUG - from to forecast carbon level -0 2024-08-12T23:00Z 2024-08-12T23:30Z 85 low -1 2024-08-12T23:30Z 2024-08-13T00:00Z 83 low -2 2024-08-13T00:00Z 2024-08-13T00:30Z 73 low -3 2024-08-13T00:30Z 2024-08-13T01:00Z 68 low -4 2024-08-13T01:00Z 2024-08-13T01:30Z 72 low -5 2024-08-13T01:30Z 2024-08-13T02:00Z 79 low -6 2024-08-13T02:00Z 2024-08-13T02:30Z 86 low -7 2024-08-13T02:30Z 2024-08-13T03:00Z 79 low -8 2024-08-13T03:00Z 2024-08-13T03:30Z 75 low -9 2024-08-13T03:30Z 2024-08-13T04:00Z 70 low -10 2024-08-13T04:00Z 2024-08-13T04:30Z 70 low -11 2024-08-13T04:30Z 2024-08-13T05:00Z 77 low -12 2024-08-13T05:00Z 2024-08-13T05:30Z 73 low -13 2024-08-13T05:30Z 2024-08-13T06:00Z 79 low -14 2024-08-13T06:00Z 2024-08-13T06:30Z 76 low -15 2024-08-13T06:30Z 2024-08-13T07:00Z 80 low -16 2024-08-13T07:00Z 2024-08-13T07:30Z 80 low -17 2024-08-13T07:30Z 2024-08-13T08:00Z 74 low -18 2024-08-13T08:00Z 2024-08-13T08:30Z 67 low -19 2024-08-13T08:30Z 2024-08-13T09:00Z 67 low -20 2024-08-13T09:00Z 2024-08-13T09:30Z 63 low -21 2024-08-13T09:30Z 2024-08-13T10:00Z 55 low -22 2024-08-13T10:00Z 2024-08-13T10:30Z 51 low -23 2024-08-13T10:30Z 2024-08-13T11:00Z 42 low -24 2024-08-13T11:00Z 2024-08-13T11:30Z 41 low -25 2024-08-13T11:30Z 2024-08-13T12:00Z 41 low -26 2024-08-13T12:00Z 2024-08-13T12:30Z 40 low -27 2024-08-13T12:30Z 2024-08-13T13:00Z 42 low -28 2024-08-13T13:00Z 2024-08-13T13:30Z 43 low -29 2024-08-13T13:30Z 2024-08-13T14:00Z 45 low -30 2024-08-13T14:00Z 2024-08-13T14:30Z 48 low -31 2024-08-13T14:30Z 2024-08-13T15:00Z 51 low -32 2024-08-13T15:00Z 2024-08-13T15:30Z 57 low -33 2024-08-13T15:30Z 2024-08-13T16:00Z 62 low -34 2024-08-13T16:00Z 2024-08-13T16:30Z 64 low -35 2024-08-13T16:30Z 2024-08-13T17:00Z 64 low -36 2024-08-13T17:00Z 2024-08-13T17:30Z 66 low -37 2024-08-13T17:30Z 2024-08-13T18:00Z 70 low -38 2024-08-13T18:00Z 2024-08-13T18:30Z 74 low -39 2024-08-13T18:30Z 2024-08-13T19:00Z 74 low -40 2024-08-13T19:00Z 2024-08-13T19:30Z 77 low -41 2024-08-13T19:30Z 2024-08-13T20:00Z 80 low -42 2024-08-13T20:00Z 2024-08-13T20:30Z 84 low -43 2024-08-13T20:30Z 2024-08-13T21:00Z 84 low -44 2024-08-13T21:00Z 2024-08-13T21:30Z 84 low -45 2024-08-13T21:30Z 2024-08-13T22:00Z 84 low -46 2024-08-13T22:00Z 2024-08-13T22:30Z 85 low -47 2024-08-13T22:30Z 2024-08-13T23:00Z 87 low diff --git a/pipeline/logs/extract_carbon_issues.log b/pipeline/logs/extract_carbon_issues.log deleted file mode 100644 index e69de29..0000000 diff --git a/requirements.txt b/requirements.txt index b66ac6a..54aa499 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,152 @@ -requests -pandas -pyarrow -snakeviz -pylint -boto3 -psycopg2 -python-dotenv -datetime +anyio==4.4.0 +appnope==0.1.4 +argon2-cffi==23.1.0 +argon2-cffi-bindings==21.2.0 +arrow==1.3.0 +astroid==3.2.4 +asttokens==2.4.1 +async-lru==2.0.4 +attrs==24.2.0 +babel==2.16.0 +backcall==0.2.0 +beautifulsoup4==4.12.3 +bleach==6.1.0 +boto3==1.34.159 +botocore==1.34.159 +build==1.2.1 +CacheControl==0.14.0 +certifi==2024.7.4 +cffi==1.17.0 +charset-normalizer==3.3.2 +cleo==2.1.0 +comm==0.2.2 +coverage==7.6.1 +crashtest==0.4.1 +debugpy==1.8.5 +decorator==5.1.1 +defusedxml==0.7.1 +dill==0.3.8 +distlib==0.3.8 +docopt==0.6.2 +dulwich==0.21.7 +executing==2.0.1 +fastjsonschema==2.20.0 +filelock==3.15.4 +fqdn==1.5.1 +git-filter-repo==2.45.0 +h11==0.14.0 +httpcore==1.0.5 +httplib2==0.22.0 +httpx==0.27.0 +idna==3.7 +iniconfig==2.0.0 +installer==0.7.0 +ipykernel==6.29.5 +ipython==8.12.3 +ipywidgets==8.1.3 +isoduration==20.11.0 +isort==5.13.2 +jaraco.classes==3.4.0 +jedi==0.19.1 +Jinja2==3.1.4 +jmespath==1.0.1 +json5==0.9.25 +jsonpointer==3.0.0 +jsonschema==4.23.0 +jsonschema-specifications==2023.12.1 +jupyter==1.0.0 +jupyter-console==6.6.3 +jupyter-events==0.10.0 +jupyter-lsp==2.2.5 +jupyter_client==8.6.2 +jupyter_core==5.7.2 +jupyter_server==2.14.2 +jupyter_server_terminals==0.5.3 +jupyterlab==4.2.4 +jupyterlab_pygments==0.3.0 +jupyterlab_server==2.27.3 +jupyterlab_widgets==3.0.11 +keyring==24.3.1 +magicmock==0.3 +MarkupSafe==2.1.5 +matplotlib-inline==0.1.7 +mccabe==0.7.0 +mistune==3.0.2 +more-itertools==10.4.0 +msgpack==1.0.8 +nbclient==0.10.0 +nbconvert==7.16.4 +nbformat==5.10.4 +nest-asyncio==1.6.0 +notebook==7.2.1 +notebook_shim==0.2.4 +numpy==2.0.1 +overrides==7.7.0 +packaging==24.1 +pandas==2.2.2 +pandocfilters==1.5.1 +parso==0.8.4 +pexpect==4.9.0 +pickleshare==0.7.5 +pkginfo==1.11.1 +platformdirs==4.2.2 +pluggy==1.5.0 +poetry==1.8.3 +poetry-core==1.9.0 +poetry-plugin-export==1.8.0 +prometheus_client==0.20.0 +prompt_toolkit==3.0.47 +psutil==6.0.0 +psycopg2==2.9.9 +ptyprocess==0.7.0 +pure_eval==0.2.3 +pyarrow==17.0.0 +pycparser==2.22 +Pygments==2.18.0 +pylint==3.2.6 +pyparsing==3.1.2 +pyproject_hooks==1.1.0 +pytest==8.3.2 +pytest-cov==5.0.0 +python-dateutil==2.9.0.post0 +python-dotenv==1.0.1 +python-json-logger==2.0.7 +pytz==2024.1 +PyYAML==6.0.2 +pyzmq==26.1.0 +qtconsole==5.5.2 +QtPy==2.4.1 +rapidfuzz==3.9.6 +referencing==0.35.1 +requests==2.32.3 +requests-toolbelt==1.0.0 +rfc3339-validator==0.1.4 +rfc3986-validator==0.1.1 +rpds-py==0.20.0 +s3transfer==0.10.2 +Send2Trash==1.8.3 +setuptools==72.1.0 +shellingham==1.5.4 +six==1.16.0 +snakeviz==2.2.0 +sniffio==1.3.1 +soupsieve==2.5 +stack-data==0.6.3 +terminado==0.18.1 +tinycss2==1.3.0 +tomlkit==0.13.2 +tornado==6.4.1 +traitlets==5.14.3 +trove-classifiers==2024.7.2 +types-python-dateutil==2.9.0.20240316 +tzdata==2024.1 +uri-template==1.3.0 +urllib3==2.2.2 +virtualenv==20.26.3 +wcwidth==0.2.13 +webcolors==24.8.0 +webencodings==0.5.1 +websocket-client==1.8.0 +widgetsnbextension==4.0.11 +xattr==1.1.0 +yarg==0.1.9 \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..1128c83 --- /dev/null +++ b/setup.py @@ -0,0 +1,19 @@ +# Exists to run workflows + + +from setuptools import setup, find_packages + +setup( + name='energy_monitor', + version='0.1.0', + description='tracks the key energy metrics and presents them in a dashbaord', + author='Joshua', + packages=find_packages(), + install_requires=[], + entry_points={ + 'console_scripts': [ + + ], + }, + python_requires='>=3.12', +) \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index d7c6c54..f319e60 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,9 @@ """ Fixtures for pytesting """ -import pytest -from unittest.mock import MagicMock import logging +from unittest.mock import MagicMock +import pytest from pipeline.common import DataProcessor from pipeline.extract_generation import APIClient as APIClientGeneration @@ -11,7 +11,7 @@ from pipeline.extract_price import APIClient as APIClientPrice from pipeline.extract_carbon import APIClient as APIClientCarbon -from tests.mock_data.mock_dataframes import get_simple_mock_dataframe +from mock_data.mock_dataframes import get_simple_mock_dataframe @pytest.fixture def mock_logger(): diff --git a/tests/test_extract_carbon.py b/tests/test_extract_carbon.py index efbe18b..2929424 100644 --- a/tests/test_extract_carbon.py +++ b/tests/test_extract_carbon.py @@ -5,7 +5,6 @@ from pipeline.extract_carbon import CustomDataProcessor from unittest.mock import patch, MagicMock from requests.exceptions import RequestException -from tests.mock_data.mock_dataframes import get_dated_mock_dataframe @patch('pipeline.extract_carbon.requests.get') @@ -24,7 +23,7 @@ def test_fetch_data_success(mock_get, api_client_carbon): mock_get.assert_called_once_with(api_client_carbon.base_url, headers={'Accept': 'application/json'}) assert result == {'data': 'Battle of Orléans'} -@patch('pipeline.extract_generation.requests.get') +@patch('pipeline.extract_carbon.requests.get') def test_fetch_data_failure(mock_get, api_client_carbon): """ Test the fetch_data method when the API request fails. @@ -34,9 +33,9 @@ def test_fetch_data_failure(mock_get, api_client_carbon): result = api_client_carbon.fetch_data() assert result is None - api_client_carbon.logger.error.assert_called_once_with("An error occurred: API request failed") -from tests.mock_data.mock_dataframes import get_dated_mock_dataframe + api_client_carbon.logger.error.assert_called_once_with("An error occurred: %s", mock_get.side_effect) + def test_process_data_with_valid_data(): """ diff --git a/tests/test_extract_demand.py b/tests/test_extract_demand.py index d19e71a..be1122a 100644 --- a/tests/test_extract_demand.py +++ b/tests/test_extract_demand.py @@ -6,7 +6,7 @@ from unittest.mock import patch, MagicMock from datetime import datetime, timezone, timedelta from requests.exceptions import RequestException -from tests.mock_data.mock_dataframes import get_dated_mock_dataframe +from mock_data.mock_dataframes import get_dated_mock_dataframe def test_construct_default_params_manual(api_client_demand): @@ -88,7 +88,7 @@ def test_fetch_data_failure(mock_get, api_client_demand): assert result is None - api_client_demand.logger.error.assert_called_once_with("An error occurred: API request failed") + api_client_demand.logger.error.assert_called_once_with("An error occurred: %s", mock_get.side_effect) def test_process_data_with_valid_data(): """ diff --git a/tests/test_extract_generation.py b/tests/test_extract_generation.py index 974eccd..57fb3fa 100644 --- a/tests/test_extract_generation.py +++ b/tests/test_extract_generation.py @@ -6,7 +6,7 @@ from unittest.mock import patch, MagicMock from datetime import datetime, timezone, timedelta from requests.exceptions import RequestException -from tests.mock_data.mock_dataframes import get_dated_mock_dataframe +from mock_data.mock_dataframes import get_dated_mock_dataframe def test_construct_default_params_manual(api_client_generation): @@ -50,7 +50,7 @@ def test_construct_default_params_dynamic(api_client_generation): assert params == expected_params - + @patch('pipeline.extract_generation.APIClient.construct_default_params') @patch('pipeline.extract_generation.requests.get') def test_fetch_data_success(mock_get, mock_construct_default_params, api_client_generation): @@ -88,7 +88,7 @@ def test_fetch_data_failure(mock_get, api_client_generation): assert result is None - api_client_generation.logger.error.assert_called_once_with("An error occurred: API request failed") + api_client_generation.logger.error.assert_called_once_with("An error occurred: %s", mock_get.side_effect) def test_process_data_with_valid_data(): """ diff --git a/tests/test_extract_price.py b/tests/test_extract_price.py index 072bd27..73b0957 100644 --- a/tests/test_extract_price.py +++ b/tests/test_extract_price.py @@ -4,7 +4,7 @@ import pandas as pd import numpy as np from unittest.mock import patch, MagicMock, call -from tests.mock_data.mock_dataframes import get_dated_mock_dataframe +from mock_data.mock_dataframes import get_dated_mock_dataframe from pipeline.extract_demand import CustomDataProcessor