Skip to content

Commit 1be07dc

Browse files
authored
Merge pull request #189 from OpenUpSA/cleanup-resubmit
Enhance isolate deletion process to remove entries from Elasticsearch…
2 parents 5271f81 + daba4d1 commit 1be07dc

File tree

1 file changed

+17
-1
lines changed

1 file changed

+17
-1
lines changed

app.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2390,14 +2390,30 @@ def post(self, project_id, submission_id):
23902390

23912391
# Delete all existing isolates for this submission first (clean slate)
23922392
with get_db_cursor() as cursor:
2393+
# First, get all isolate IDs to delete from Elasticsearch
2394+
cursor.execute("""
2395+
SELECT id FROM isolates
2396+
WHERE submission_id = %s
2397+
""", (submission_id,))
2398+
2399+
isolates_to_delete = cursor.fetchall()
2400+
2401+
# Delete from Elasticsearch
2402+
for isolate in isolates_to_delete:
2403+
try:
2404+
delete_from_elastic(isolate['id'])
2405+
except Exception as es_error:
2406+
logger.warning(f"Failed to delete isolate {isolate['id']} from Elasticsearch: {str(es_error)}")
2407+
2408+
# Now delete from database
23932409
cursor.execute("""
23942410
DELETE FROM isolates
23952411
WHERE submission_id = %s
23962412
""", (submission_id,))
23972413

23982414
deleted_count = cursor.rowcount
23992415
if deleted_count > 0:
2400-
print(f"Deleted {deleted_count} existing isolates for submission {submission_id}")
2416+
print(f"Deleted {deleted_count} existing isolates for submission {submission_id} from database and Elasticsearch")
24012417

24022418
# Insert all rows fresh from the TSV, checking for duplicate isolate_ids
24032419
for row_index, row in enumerate(tsv_json):

0 commit comments

Comments
 (0)