Skip to content

Commit

Permalink
Adherence to always report withdrawal and completed after (#4363)
Browse files Browse the repository at this point in the history
Adherence report changes:
- always include a row for withdrawal, and use 'completed date' column
for date of withdrawal
- include completed visits even if they are completed after date of
withdrawal

qb_timeline changes:
- include rows after withdrawal for implementation of the above

migration changes:
- now that we must rebuild the timeline and adherence data for every
withdrawn user, it takes far too long to run and would render the
service unavailable until the migration completes. therefore, it only
removes the timelines and adherence data for that set of users, and will
rebuild when first requested.

NB: this PR merges into another branch, such that all may be a single
update on the develop branch.

---------

Co-authored-by: Justin McReynolds <mcjustin@uw.edu>
Co-authored-by: Ivan Cvitkovic <ivanc@uw.edu>
  • Loading branch information
3 people authored Mar 5, 2024
1 parent bef7625 commit a6af532
Show file tree
Hide file tree
Showing 18 changed files with 337 additions and 271 deletions.
88 changes: 0 additions & 88 deletions manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -615,94 +615,6 @@ def update_qnr(qnr_id, link_id, actor, noop, replacement):
click.echo(message)


@click.option('--subject_id', type=int, multiple=True, help="Subject user ID", required=True)
@click.option(
'--actor',
default="__system__",
required=False,
help='email address of user taking this action, for audit trail'
)
@app.cli.command()
def remove_post_withdrawn_qnrs(subject_id, actor):
"""Remove QNRs posted beyond subject's withdrawal date"""
from sqlalchemy.types import DateTime
from portal.cache import cache
from portal.models.questionnaire_bank import trigger_date

rs_id = 0 # only base study till need arises
acting_user = get_actor(actor, require_admin=True)

for subject_id in subject_id:
# Confirm user has withdrawn
subject = get_target(id=subject_id)
study_id = subject.external_study_id

# Make sure we're not working w/ stale timeline data
QuestionnaireResponse.purge_qb_relationship(
subject_id=subject_id,
research_study_id=rs_id,
acting_user_id=acting_user.id)
cache.delete_memoized(trigger_date)
update_users_QBT(
subject_id,
research_study_id=rs_id,
invalidate_existing=True)

deceased_date = None if not subject.deceased else subject.deceased.timestamp
withdrawn_visit = QBT.withdrawn_qbd(subject_id, rs_id)
if not withdrawn_visit:
raise ValueError("Only applicable to withdrawn users")

# Obtain all QNRs submitted beyond withdrawal date
query = QuestionnaireResponse.query.filter(
QuestionnaireResponse.document["authored"].astext.cast(DateTime) >
withdrawn_visit.relative_start
).filter(
QuestionnaireResponse.subject_id == subject_id).with_entities(
QuestionnaireResponse.id,
QuestionnaireResponse.questionnaire_bank_id,
QuestionnaireResponse.qb_iteration,
QuestionnaireResponse.document["questionnaire"]["reference"].
label("instrument"),
QuestionnaireResponse.document["authored"].
label("authored")
).order_by(QuestionnaireResponse.document["authored"])

for qnr in query:
# match format in bug report for easy diff
sub_padding = " "*(11 - len(str(subject_id)))
stdy_padding = " "*(12 - len(study_id))
out = (
f"{sub_padding}{subject_id} | "
f"{study_id}{stdy_padding}| "
f"{withdrawn_visit.relative_start.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]} | "
f"{qnr.authored} | ")

# do not include any belonging to the last active visit, unless
# they came in after deceased date
if (
qnr.questionnaire_bank_id == withdrawn_visit.qb_id and
qnr.qb_iteration == withdrawn_visit.iteration and
(not deceased_date or FHIR_datetime.parse(
qnr.authored) < deceased_date)):
print(f"{out}keep")
continue
if "irondemog" in qnr.instrument:
print(f"{out}keep (indefinite)")
continue
print(f"{out}delete")
db.session.delete(QuestionnaireResponse.query.get(qnr.id))
auditable_event(
message=(
"deleted questionnaire response submitted beyond "
"withdrawal visit as per request by PCCTC"),
context="assessment",
user_id=acting_user.id,
subject_id=subject_id)
db.session.commit()
return


@click.option('--src_id', type=int, help="Source Patient ID (WILL BE DELETED!)")
@click.option('--tgt_id', type=int, help="Target Patient ID")
@click.option(
Expand Down
5 changes: 2 additions & 3 deletions portal/config/config.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
"""Configuration"""
import os

import redis

from portal.factories.redis import create_redis
from portal.models.role import ROLE

SITE_CFG = 'site.cfg'
Expand Down Expand Up @@ -152,7 +151,7 @@ class BaseConfig(object):
REDIS_URL
)

SESSION_REDIS = redis.from_url(SESSION_REDIS_URL)
SESSION_REDIS = create_redis(SESSION_REDIS_URL)

UPDATE_PATIENT_TASK_BATCH_SIZE = int(
os.environ.get('UPDATE_PATIENT_TASK_BATCH_SIZE', 16)
Expand Down
36 changes: 36 additions & 0 deletions portal/config/eproms/Questionnaire.json
Original file line number Diff line number Diff line change
Expand Up @@ -5851,6 +5851,42 @@
"display": "Other",
"code": "irondemog_v3.26.8"
}
},
{
"valueCoding": {
"display": "African",
"code": "irondemog_v3.26.9"
}
},
{
"valueCoding": {
"display": "Black",
"code": "irondemog_v3.26.10"
}
},
{
"valueCoding": {
"display": "Coloured",
"code": "irondemog_v3.26.11"
}
},
{
"valueCoding": {
"display": "Indian",
"code": "irondemog_v3.26.12"
}
},
{
"valueCoding": {
"display": "White / Caucasian",
"code": "irondemog_v3.26.13"
}
},
{
"valueCoding": {
"display": "Other",
"code": "irondemog_v3.26.14"
}
}
]
},
Expand Down
4 changes: 4 additions & 0 deletions portal/factories/redis.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
import redis

def create_redis(url):
return redis.Redis.from_url(url)
120 changes: 72 additions & 48 deletions portal/migrations/versions/3c871e710277_.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,12 @@
"""
from alembic import op
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql.functions import count
from sqlalchemy.sql.functions import func

from portal.cache import cache
from portal.models.adherence_data import AdherenceData
from portal.models.research_study import BASE_RS_ID, EMPRO_RS_ID
from portal.models.qb_timeline import update_users_QBT
from portal.models.qb_timeline import QBT, update_users_QBT
from portal.models.questionnaire_bank import trigger_date
from portal.models.questionnaire_response import (
QuestionnaireResponse,
Expand Down Expand Up @@ -129,21 +130,25 @@ def upgrade():
session = Session(bind=bind)

for study_id in (BASE_RS_ID, EMPRO_RS_ID):
subquery = session.query(UserConsent.user_id).distinct().filter(
# due to changes in adherence report for withdrawn users
# this query is now simply any withdrawn patient who isn't
# deleted from the system.
subquery = session.query(User.id).filter(
User.deleted_id.is_(None)).subquery()
query = session.query(UserConsent.user_id.distinct()).filter(
UserConsent.research_study_id == study_id).filter(
UserConsent.status == 'suspended').subquery()
query = session.query(
count(UserConsent.user_id), UserConsent.user_id).filter(
UserConsent.research_study_id == study_id).filter(
UserConsent.user_id.in_(subquery)).group_by(
UserConsent.user_id).having(count(UserConsent.user_id) > 2)
for num, patient_id in query:
UserConsent.status == "suspended").filter(
UserConsent.user_id.in_(subquery))

delay_timeline_updates_till_after_migration = True
slow_report_details = False
delete_adh_ids = []
for row in query:
patient_id = row[0]
if patient_id in (719, 1186, 1305):
# special cases best left alone
continue
user = User.query.get(patient_id)
if user.deleted:
continue
consent_date, withdrawal_date = consent_withdrawal_dates(
user, study_id)
if withdrawal_date is None:
Expand All @@ -152,49 +157,68 @@ def upgrade():
# no change needed in this situation
continue

# report if dates don't match spreadsheet in IRONN-210
cd_str = '{dt.day}-{dt:%b}-{dt:%y}'.format(dt=consent_date)
wd_str = '{dt.day}-{dt:%b}-{dt:%y}'.format(dt=withdrawal_date)
try:
match = verified_user_consent_dates[study_id][patient_id]
if (cd_str, wd_str) != match:
print(f"user_id {patient_id} \t {cd_str} \t {wd_str}")
print(" vs expected:")
print(f"\t\t {match[0]} \t {match[1]}")
except KeyError:
# user found to not see timeline change
pass

# fake an adherence cache run to avoid unnecessary and more
# important, to prevent from locking out a subsequent update
# needed after recognizing a real change below
adherence_cache_moderation = CacheModeration(key=ADHERENCE_DATA_KEY.format(
patient_id=patient_id,
research_study_id=study_id))
adherence_cache_moderation.run_now()

b4_state = capture_patient_state(patient_id)
update_users_QBT(
patient_id,
research_study_id=study_id,
invalidate_existing=True)
_, _, _, any_changes = present_before_after_state(
patient_id, study_id, b4_state)
if not any_changes:
continue
if slow_report_details:
# report if dates don't match spreadsheet in IRONN-210
cd_str = '{dt.day}-{dt:%b}-{dt:%y}'.format(dt=consent_date)
wd_str = '{dt.day}-{dt:%b}-{dt:%y}'.format(dt=withdrawal_date)
try:
match = verified_user_consent_dates[study_id][patient_id]
if (cd_str, wd_str) != match:
print(f"user_id {patient_id} \t {cd_str} \t {wd_str}")
print(" vs expected:")
print(f"\t\t {match[0]} \t {match[1]}")
except KeyError:
# user found to not see timeline change
pass

# fake an adherence cache run to avoid unnecessary and more
# important, to prevent from locking out a subsequent update
# needed after recognizing a real change below
adherence_cache_moderation = CacheModeration(key=ADHERENCE_DATA_KEY.format(
patient_id=patient_id,
research_study_id=study_id))
adherence_cache_moderation.run_now()

b4_state = capture_patient_state(patient_id)
update_users_QBT(
patient_id,
research_study_id=study_id,
invalidate_existing=True)
_, _, _, any_changes = present_before_after_state(
patient_id, study_id, b4_state)
if not any_changes:
continue

print(f"{patient_id} changed, purge old adherence data and relationships")
adherence_cache_moderation.reset()

print(f"{patient_id} changed, purge old adherence data and relationships")
adherence_cache_moderation.reset()
QuestionnaireResponse.purge_qb_relationship(
subject_id=patient_id,
research_study_id=study_id,
acting_user_id=patient_id)
cache.delete_memoized(trigger_date)
update_users_QBT(
patient_id,
research_study_id=study_id,
invalidate_existing=True)

if delay_timeline_updates_till_after_migration:
session.query(QBT).filter(QBT.user_id == patient_id).filter(
QBT.research_study_id == study_id).delete()
adh_ids = session.query(AdherenceData.id).filter(
AdherenceData.patient_id == patient_id).filter(
AdherenceData.rs_id_visit.like(f"{study_id}:%")
)
for ad_id in adh_ids:
delete_adh_ids.append(ad_id)
else:
update_users_QBT(
patient_id,
research_study_id=study_id,
invalidate_existing=True)

# SQL alchemy can't combine `like` expression with delete op.
for ad_id in delete_adh_ids:
# yes this should be possible in a single stmt,
# not a loop, but no dice
session.query(AdherenceData).filter(
AdherenceData.id == ad_id).delete()

def downgrade():
"""no downgrade available"""
Expand Down
12 changes: 11 additions & 1 deletion portal/migrations/versions/66368e673005_.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@
"""
from alembic import op
from datetime import datetime
import sqlalchemy as sa
from sqlalchemy.orm import sessionmaker

from portal.models.user import User
from portal.models.user_consent import consent_withdrawal_dates


# revision identifiers, used by Alembic.
revision = '66368e673005'
Expand Down Expand Up @@ -49,6 +51,14 @@ def upgrade():
if status and status[0] != "Not Yet Available":
continue

# if the patient is withdrawn, skip over, will get picked
# up in migration 3c871e710277, going out in same release
patient = User.query.get(patient_id)
_, withdrawal_date = consent_withdrawal_dates(
patient, 1)
if withdrawal_date:
continue

# purge the user's EMPRO adherence rows to force refresh
session.execute(
"DELETE FROM adherence_data WHERE"
Expand Down
Loading

0 comments on commit a6af532

Please sign in to comment.