Skip to content

Commit

Permalink
(draft) update model
Browse files Browse the repository at this point in the history
  • Loading branch information
dwreeves committed May 12, 2024
1 parent 4c13192 commit 0f9509e
Show file tree
Hide file tree
Showing 11 changed files with 573 additions and 57 deletions.
150 changes: 150 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
docs/site/
.vscode/
*.code-workspace
.idea/
.DS_Store
Pipfile
Pipfile.lock
pgadmin4/
mkdocs_env/
latest.dump


# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
.pybuilder/
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# pytype static type analyzer
.pytype/

# Cython debug symbols
cython_debug/l
7 changes: 5 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,12 @@ FROM python:3.8
MAINTAINER Daniel Reeves "xdanielreeves@gmail.com"

WORKDIR /

ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh
RUN /install.sh && rm /install.sh

COPY requirements.txt app/requirements.txt
RUN pip install --no-cache-dir -r app/requirements.txt
RUN /root/.cargo/bin/uv pip install --system --no-cache -r app/requirements.txt

COPY ./ /home/
WORKDIR /home/
Expand All @@ -13,6 +17,5 @@ ENV PYTHONPATH=/home
EXPOSE 80

CMD ["gunicorn", \
"-k", "egg:meinheld#gunicorn_worker", \
"-c", "gunicorn_conf.py", \
"app.main:create_app()"]
70 changes: 58 additions & 12 deletions app/admin/views/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@
from app.data.celery import celery_app
from app.data.celery import combine_data_v1_task
from app.data.celery import combine_data_v2_task
from app.data.celery import combine_data_v3_task
from app.data.celery import live_hobolink_data_task
from app.data.celery import live_usgs_data_task
from app.data.celery import predict_v1_task
from app.data.celery import predict_v2_task
from app.data.celery import predict_v3_task
from app.data.celery import update_db_task
from app.data.database import execute_sql
from app.data.database import get_current_time
Expand Down Expand Up @@ -151,6 +153,17 @@ def source_usgs(self):
data_source='usgs'
))

@expose('/csv/src/processed_data_v1_source')
def source_combine_data_v1(self):
async_result = combine_data_v1_task.s(
export_name='code_for_boston_export_90d',
days_ago=90).delay()
return redirect(url_for(
'admin_downloadview.csv_wait',
task_id=async_result.id,
data_source='combined'
))

@expose('/csv/src/processed_data_v2_source')
def source_combine_data_v2(self):
async_result = combine_data_v2_task.s(
Expand All @@ -162,9 +175,9 @@ def source_combine_data_v2(self):
data_source='combined'
))

@expose('/csv/src/processed_data_v1_source')
def source_combine_data_v1(self):
async_result = combine_data_v1_task.s(
@expose('/csv/src/processed_data_v3_source')
def source_combine_data_v3(self):
async_result = combine_data_v3_task.s(
export_name='code_for_boston_export_90d',
days_ago=90).delay()
return redirect(url_for(
Expand All @@ -173,6 +186,17 @@ def source_combine_data_v1(self):
data_source='combined'
))

@expose('/csv/src/prediction_v1_source')
def source_prediction_v1(self):
async_result = predict_v1_task.s(
export_name='code_for_boston_export_90d',
days_ago=90).delay()
return redirect(url_for(
'admin_downloadview.csv_wait',
task_id=async_result.id,
data_source='prediction'
))

@expose('/csv/src/prediction_v2_source')
def source_prediction_v2(self):
async_result = predict_v2_task.s(
Expand All @@ -184,9 +208,9 @@ def source_prediction_v2(self):
data_source='prediction'
))

@expose('/csv/src/prediction_v1_source')
def source_prediction_v1(self):
async_result = predict_v1_task.s(
@expose('/csv/src/prediction_v3_source')
def source_prediction_v3(self):
async_result = predict_v3_task.s(
export_name='code_for_boston_export_90d',
days_ago=90).delay()
return redirect(url_for(
Expand Down Expand Up @@ -251,6 +275,17 @@ def sync_source_usgs(self):
filename='usgs_source.csv'
)

@expose('/csv/src_sync/processed_data_v1_source')
def sync_source_combine_data_v1(self):
df = combine_data_v1_task.run(
days_ago=90,
export_name='code_for_boston_export_90d'
)
return send_csv_attachment_of_dataframe(
df=pd.DataFrame(df),
filename='model_processed_data.csv'
)

@expose('/csv/src_sync/processed_data_v2_source')
def sync_source_combine_data_v2(self):
df = combine_data_v2_task.run(
Expand All @@ -262,9 +297,9 @@ def sync_source_combine_data_v2(self):
filename='model_processed_data.csv'
)

@expose('/csv/src_sync/processed_data_v1_source')
def sync_source_combine_data_v1(self):
df = combine_data_v1_task.run(
@expose('/csv/src_sync/processed_data_v3_source')
def sync_source_combine_data_v3(self):
df = combine_data_v3_task.run(
days_ago=90,
export_name='code_for_boston_export_90d'
)
Expand All @@ -273,6 +308,17 @@ def sync_source_combine_data_v1(self):
filename='model_processed_data.csv'
)

@expose('/csv/src_sync/prediction_v1_source')
def sync_source_prediction_v1(self):
df = predict_v1_task.run(
days_ago=90,
export_name='code_for_boston_export_90d'
)
return send_csv_attachment_of_dataframe(
df=pd.DataFrame(df),
filename='prediction_source.csv'
)

@expose('/csv/src_sync/prediction_v2_source')
def sync_source_prediction_v2(self):
df = predict_v2_task.run(
Expand All @@ -284,9 +330,9 @@ def sync_source_prediction_v2(self):
filename='prediction_source.csv'
)

@expose('/csv/src_sync/prediction_v1_source')
def sync_source_prediction_v1(self):
df = predict_v1_task.run(
@expose('/csv/src_sync/prediction_v3_source')
def sync_source_prediction_v3(self):
df = predict_v3_task.run(
days_ago=90,
export_name='code_for_boston_export_90d'
)
Expand Down
4 changes: 2 additions & 2 deletions app/blueprints/api_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from app.data.globals import cache
from app.data.globals import reaches
from app.data.globals import website_options
from app.data.processing.predictive_models.v2 import MODEL_VERSION
from app.data.processing.predictive_models.v3 import MODEL_YEAR


bp = Blueprint('api', __name__, url_prefix='/api')
Expand All @@ -40,7 +40,7 @@ def predictive_model_api():
selected_hours = max(selected_hours, 1)

return jsonify({
'model_version': MODEL_VERSION,
'model_version': MODEL_YEAR,
'time_returned': get_current_time(),
'is_boating_season': website_options.boating_season,
'model_outputs': [
Expand Down
26 changes: 22 additions & 4 deletions app/data/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,8 @@ def live_usgs_data_task(*args, **kwargs) -> RecordsType:

@celery_app.task
def combine_data_v1_task(*args, **kwargs) -> RecordsType:
from app.data.processing.core import combine_v2_job
df = combine_v2_job(*args, **kwargs)
from app.data.processing.core import combine_v1_job
df = combine_v1_job(*args, **kwargs)
return df.to_dict(orient='records')


Expand All @@ -87,6 +87,13 @@ def combine_data_v2_task(*args, **kwargs) -> RecordsType:
return df.to_dict(orient='records')


@celery_app.task
def combine_data_v3_task(*args, **kwargs) -> RecordsType:
from app.data.processing.core import combine_v3_job
df = combine_v3_job(*args, **kwargs)
return df.to_dict(orient='records')


@celery_app.task
def predict_v1_task(*args, **kwargs) -> RecordsType:
from app.data.processing.core import predict_v1_job
Expand All @@ -101,6 +108,13 @@ def predict_v2_task(*args, **kwargs) -> RecordsType:
return df.to_dict(orient='records')


@celery_app.task
def predict_v3_task(*args, **kwargs) -> RecordsType:
from app.data.processing.core import predict_v3_job
df = predict_v3_job(*args, **kwargs)
return df.to_dict(orient='records')


@celery_app.task
def update_db_task() -> None:
from app.data.processing.core import update_db
Expand All @@ -127,9 +141,13 @@ def send_database_exports_task() -> None:
# Down here, we define the types for the tasks to help the IDE.
live_hobolink_data_task: WithAppContextTask
live_usgs_data_task: WithAppContextTask
combine_data_task: WithAppContextTask
combine_data_v1_task: WithAppContextTask
combine_data_v2_task: WithAppContextTask
combine_data_v3_task: WithAppContextTask
clear_cache_task: WithAppContextTask
prediction_task: WithAppContextTask
predict_v1_task: WithAppContextTask
predict_v2_task: WithAppContextTask
predict_v3_task: WithAppContextTask
update_db_task: WithAppContextTask
update_website_task: WithAppContextTask
send_database_exports_task: WithAppContextTask
Loading

0 comments on commit 0f9509e

Please sign in to comment.