Skip to content

Commit

Permalink
[PP-2055] Use Ensure that local analytics events are committed to the…
Browse files Browse the repository at this point in the history
… database by running the event collection within a transaction. (#2250)

Also use one transaction per event collected to minimize database lock durations.
  • Loading branch information
dbernstein authored Jan 15, 2025
1 parent f417b02 commit c362024
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 4 deletions.
8 changes: 6 additions & 2 deletions src/palace/manager/api/monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,13 @@ def delete(self, row) -> None:
def after_commit(self) -> None:
super().after_commit()
copy_of_list = list(self._events_to_be_logged)

for event in copy_of_list:
self.services.analytics.collect_event(**event)
self._events_to_be_logged.remove(event)
# start a separate transaction for each event in order to
# minimize database lock durations
with self._db.begin() as transaction:
self.services.analytics.collect_event(**event)
self._events_to_be_logged.remove(event)


class LoanReaper(LoanlikeReaperMonitor):
Expand Down
6 changes: 4 additions & 2 deletions src/palace/manager/celery/tasks/opds_odl.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,8 +216,10 @@ def collect_events(
We perform this operation outside after completed the transaction to ensure that any row locks
are held for the shortest possible duration in case writing to the s3 analytics provider is slow.
"""
with task.session() as session:
for e in events:

for e in events:
with task.transaction() as session:
# one transaction per event to minimize possible database lock durations
library = session.merge(e.library)
license_pool = session.merge(e.license_pool)
patron = session.merge(e.patron)
Expand Down

0 comments on commit c362024

Please sign in to comment.