From c52f25df27a9ceceb41dd8add8de5b9d4c4683c5 Mon Sep 17 00:00:00 2001 From: kena vyas Date: Tue, 21 Jan 2025 15:54:46 +0000 Subject: [PATCH] fix query and set to_sql to 'replace' --- digital_land/expectations/operation.py | 2 +- tests/integration/expectations/test_operation.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/digital_land/expectations/operation.py b/digital_land/expectations/operation.py index 737979c0..b8905756 100644 --- a/digital_land/expectations/operation.py +++ b/digital_land/expectations/operation.py @@ -136,7 +136,7 @@ def count_deleted_entities( # get dataset specific active resource list params = urllib.parse.urlencode( { - "sql": f"""select *,o.entity from reporting_historic_endpoints rhe join organisation o on rhe.organisation=o.organisation + "sql": f"""select * from reporting_historic_endpoints rhe join organisation o on rhe.organisation=o.organisation where pipeline == '{db_name}' and o.entity='{organisation_entity}' and resource_end_date == "" group by endpoint""", "_size": "max", } diff --git a/tests/integration/expectations/test_operation.py b/tests/integration/expectations/test_operation.py index 7ea3dd46..aabb020d 100644 --- a/tests/integration/expectations/test_operation.py +++ b/tests/integration/expectations/test_operation.py @@ -152,11 +152,11 @@ def test_count_deleted_entities(dataset_path, mocker): with spatialite.connect(dataset_path) as conn: # load data into required tables - test_entity_data.to_sql("entity", conn, if_exists="append", index=False) + test_entity_data.to_sql("entity", conn, if_exists="replace", index=False) test_fact_resource_data.to_sql( - "fact_resource", conn, if_exists="append", index=False + "fact_resource", conn, if_exists="replace", index=False ) - test_fact_data.to_sql("fact", conn, if_exists="append", index=False) + test_fact_data.to_sql("fact", conn, if_exists="replace", index=False) # run expectation passed, message, details = count_deleted_entities(