Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Issue663 update python dependancies #690

Open
wants to merge 15 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 52 additions & 30 deletions data/data_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,10 @@

'''

import matplotlib.pyplot as plt

import pandas as pd
import numpy as np
import zipfile
from scipy import interpolate
import warnings
import os
import json
Expand Down Expand Up @@ -264,7 +263,7 @@ def save_data_and_jsons(self, fmu_path):
self.z_fmu.close()

def get_data(self, horizon=24*3600, interval=None, index=None,
variables=None, category=None, plot=False):
variables=None, category=None):
'''Retrieve test case data from the fmu. The data
is stored within the csv files that are
located in the resources folder of the test case fmu.
Expand Down Expand Up @@ -293,9 +292,7 @@ def get_data(self, horizon=24*3600, interval=None, index=None,
The possible options are specified at categories.json.
This argument cannot be used together with the `variables`
argument.
plot : Boolean, default is False
True if desired to plot the retrieved data


Returns
-------
data: dict
Expand Down Expand Up @@ -372,16 +369,6 @@ def get_data(self, horizon=24*3600, interval=None, index=None,
# Add starting year back to index desired by user
data_slice_reindexed.index = data_slice_reindexed.index + year_start

if plot:
if category is None:
to_plot = data_slice_reindexed.keys()
else:
to_plot = self.categories[category]
for var in to_plot:
data_slice_reindexed[var].plot()
plt.legend()
plt.show()

# Reset the index to keep the 'time' column in the data
# Transform data frame to dictionary
return data_slice_reindexed.reset_index().to_dict('list')
Expand Down Expand Up @@ -465,17 +452,16 @@ def load_data_and_jsons(self):
for category in self.categories:
# Use linear interpolation for continuous variables
if any(col.startswith(key) for key in self.categories['weather']):
g = interpolate.interp1d(df['time'],df[col],
kind='linear')
self.case.data.loc[:,col] = \
g(self.case.data.index)

self.case.data.loc[:,col] = np.interp(self.case.data.index,\
df['time'],df[col])
# Use forward fill for discrete variables
elif any(col.startswith(key) for key in self.categories[category]):
g = interpolate.interp1d(df['time'],df[col],
kind='zero')
self.case.data.loc[:,col] = \
g(self.case.data.index)
else:

self.case.data.loc[:,col] = self.interp0(self.case.data.index,\
df['time'].values,df[col].values)

else:
warnings.warn('The following file does not have '\
'time column and therefore no data is going to '\
'be used from this file as test case data.', Warning)
Expand Down Expand Up @@ -549,14 +535,50 @@ def interpolate_data(self,df,index):
for key in df.keys():
# Use linear interpolation for continuous variables
if key in self.categories['weather']:
f = interpolate.interp1d(self.case.data.index,
self.case.data[key], kind='linear')
df.loc[:,key] = np.interp(index,self.case.data.index,
self.case.data[key])
# Use forward fill for discrete variables
else:
f = interpolate.interp1d(self.case.data.index,
self.case.data[key], kind='zero')
df.loc[:,key] = f(index)
df.loc[:,key] = self.interp0(index,self.case.data.index.values,
self.case.data[key].values)
return df

def interp0(self,x, xp, yp):
""" Zeroth order hold interpolation w/ same
(base) signature as numpy.interp.
Parameters
----------
x : np.array
The x-coordinates at which to evaluate the interpolated values.

xp : np.array
The x-coordinates of the data points, must be increasing.

yp : np.array
The y-coordinates of the data points, same length as xp.

Returns
-------
y : np.array
The interpolated values, same length as x.
"""

def func(x0,k):
if x0 <= xp[0]:
return yp[0], k
if x0 >= xp[-1]:
return yp[-1], k

while x0 >= xp[k]:
k += 1
return yp[k-1], k
k = 0
y = list()
for x0 in x:
y0,k = func(x0,k)
y.append(y0)
return np.array(y)


if __name__ == "__main__":
import sys
Expand Down
7 changes: 2 additions & 5 deletions forecast/forecaster.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, testcase):
self.case = testcase

def get_forecast(self,point_names, horizon=24*3600, interval=3600,
category=None, plot=False):
category=None):
'''Returns forecast of the test case data

Parameters
Expand All @@ -50,8 +50,6 @@ def get_forecast(self,point_names, horizon=24*3600, interval=3600,
data without filtering it by any category.
Possible options are 'weather', 'prices',
'emissions', 'occupancy', internalGains, 'setpoints'
plot : boolean, default is False
True if desired to plot the forecast

Returns
-------
Expand All @@ -68,7 +66,6 @@ def get_forecast(self,point_names, horizon=24*3600, interval=3600,
forecast = self.case.data_manager.get_data(variables=point_names,
horizon=horizon,
interval=interval,
category=category,
plot=plot)
category=category)

return forecast
13 changes: 6 additions & 7 deletions kpis/kpi_calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

import numpy as np
import pandas as pd
from scipy.integrate import trapz
from collections import OrderedDict

class KPI_Calculator(object):
Expand Down Expand Up @@ -252,9 +251,9 @@ def get_thermal_discomfort(self):
dT_upper = data - UpperSetp
dT_upper[dT_upper<0]=0
self.tdis_dict[signal[:-1]+'dTlower_y'] += \
trapz(dT_lower,self._get_data_from_last_index('time',self.i_last_tdis))/3600.
np.trapezoid(dT_lower,self._get_data_from_last_index('time',self.i_last_tdis))/3600.
self.tdis_dict[signal[:-1]+'dTupper_y'] += \
trapz(dT_upper,self._get_data_from_last_index('time',self.i_last_tdis))/3600.
np.trapezoid(dT_upper,self._get_data_from_last_index('time',self.i_last_tdis))/3600.
self.tdis_tot = self.tdis_tot + \
self.tdis_dict[signal[:-1]+'dTlower_y']/len(self.sources_tdis) + \
self.tdis_dict[signal[:-1]+'dTupper_y']/len(self.sources_tdis) # Normalize total by number of sources
Expand Down Expand Up @@ -299,7 +298,7 @@ def get_iaq_discomfort(self):
dI_upper = data - UpperSetp
dI_upper[dI_upper<0]=0
self.idis_dict[signal[:-1]+'dIupper_y'] += \
trapz(dI_upper, self._get_data_from_last_index('time',self.i_last_idis))/3600.
np.trapezoid(dI_upper, self._get_data_from_last_index('time',self.i_last_idis))/3600.
self.idis_tot = self.idis_tot + \
self.idis_dict[signal[:-1]+'dIupper_y']/len(self.sources_idis) # Normalize total by number of sources

Expand Down Expand Up @@ -334,7 +333,7 @@ def get_energy(self):
for signal in self.case.kpi_json[source]:
pow_data = np.array(self._get_data_from_last_index(signal,self.i_last_ener))
self.ener_dict[signal] += \
trapz(pow_data,
np.trapezoid(pow_data,
self._get_data_from_last_index('time',self.i_last_ener))*2.77778e-7 # Convert to kWh
self.ener_dict_by_source[source+'_'+signal] += \
self.ener_dict[signal]
Expand Down Expand Up @@ -542,7 +541,7 @@ def get_cost(self, scenario='Constant'):
for signal in self.case.kpi_json[source]:
pow_data = np.array(self._get_data_from_last_index(signal,self.i_last_cost))
self.cost_dict[signal] += \
trapz(np.multiply(source_price_data,pow_data),
np.trapezoid(np.multiply(source_price_data,pow_data),
self._get_data_from_last_index('time',self.i_last_cost))*factor
self.cost_dict_by_source[source+'_'+signal] += \
self.cost_dict[signal]
Expand Down Expand Up @@ -586,7 +585,7 @@ def get_emissions(self):
for signal in self.case.kpi_json[source]:
pow_data = np.array(self._get_data_from_last_index(signal,self.i_last_emis))
self.emis_dict[signal] += \
trapz(np.multiply(source_emissions_data,pow_data),
np.trapezoid(np.multiply(source_emissions_data,pow_data),
self._get_data_from_last_index('time',self.i_last_emis))*2.77778e-7 # Convert to kWh
self.emis_dict_by_source[source+'_'+signal] += \
self.emis_dict[signal]
Expand Down
5 changes: 5 additions & 0 deletions releasenotes.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ Released on xx/xx/xxxx.

- Add note to ``README.md`` about using environment variable ``BOPTEST_TIMEOUT`` to edit the timeout period for idle workers. This is for [#715](https://github.com/ibpsa/project1-boptest/issues/715).
- Add note to ``README.md`` about a Julia interface implemented by [BOPTestAPI.jl](https://terion-io.github.io/BOPTestAPI.jl/stable/). This is for [#707](https://github.com/ibpsa/project1-boptest/issues/707).
- Remove scipy and matplotlib dependencies from worker container. scipy.integrate.trapz was substituted with numpy.trapezoid in ``kpis/kpi_calculator.py``,
scipy.interp1d linear with numpy.interp, and scipy.inter1d zero with a custom zero hold interpolation in ``data/data_manager.py``.
Update pyfmi from 2.12 to 2.14, update numpy from 1.26.4 to 2.2.1, and update pandas from 1.5.3 to 2.2.3.
Update worker Python from 3.10 to 3.11, and miniconda version from py310_24.30-1-Linux-x86_64 to py311_24.7.1-0-Linux-x86_64.
This is for [#663](https://github.com/ibpsa/project1-boptest/issues/663).


## BOPTEST v0.7.0
Expand Down
6 changes: 3 additions & 3 deletions service/worker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@ RUN wget https://spawn.s3.amazonaws.com/custom/Spawn-$SPAWN_VERSION-Linux.tar.gz
COPY service/worker/requirements/production.txt /boptest/

# Download and install miniconda, pyfmi, and remaining python requirements
RUN wget https://repo.anaconda.com/miniconda/Miniconda3-py310_24.3.0-0-Linux-x86_64.sh -O /miniconda.sh \
RUN wget https://repo.anaconda.com/miniconda/Miniconda3-py311_24.7.1-0-Linux-x86_64.sh -O /miniconda.sh \
&& /bin/bash /miniconda.sh -b -p /miniconda \
&& . miniconda/bin/activate \
&& conda update -n base -c defaults conda \
&& conda create --name pyfmi3 python=3.10 -y \
&& conda create --name pyfmi3 python=3.11 -y \
&& conda activate pyfmi3 \
&& conda install -c conda-forge pyfmi=2.12 -y \
&& conda install -c conda-forge pyfmi=2.14 -y \
&& pip install -U pip setuptools \
&& python -m pip install -r /boptest/production.txt

Expand Down
7 changes: 3 additions & 4 deletions service/worker/requirements/production.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ redis==3.3.11
requests==2.22.0
msgpack==1.0.4
kubernetes==18.20.0
matplotlib==3.7.1
scipy==1.13
pandas==1.5.3
numpy==1.26.4
numpy==2.1
pandas==2.2.3