From 641d168370d1a3ee23998c16cef2bd5af66c4204 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 1 Oct 2024 12:58:31 -0700 Subject: [PATCH] fix properly --- app/aws/s3.py | 13 +++++++++++-- app/delivery/send_to_providers.py | 28 ++-------------------------- 2 files changed, 13 insertions(+), 28 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index 3f2115e5b..a6b9ba01f 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -366,7 +366,9 @@ def extract_phones(job): def extract_personalisation(job): - job = job[0].split("\r\n") + if isinstance(job, dict): + job = job[0] + job = job.split("\r\n") first_row = job[0] job.pop(0) first_row = first_row.split(",") @@ -416,7 +418,14 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number): # At the same time we don't want to store it in redis or the db # So this is a little recycling mechanism to reduce the number of downloads. job = job_cache.get(job_id) - + if job is None: + current_app.logger.info(f"job {job_id} was not in the cache") + job = get_job_from_s3(service_id, job_id) + # Even if it is None, put it here to avoid KeyErrors + set_job_cache(job_cache, job_id, job) + else: + # skip expiration date from cache, we don't need it here + job = job[0] # If the job is None after our attempt to retrieve it from s3, it # probably means the job is old and has been deleted from s3, in # which case there is nothing we can do. It's unlikely to run into diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index c0670cee9..745b46cab 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -13,11 +13,7 @@ from app import ( notification_provider_clients, redis_store, ) -from app.aws.s3 import ( - get_job_from_s3, - get_personalisation_from_s3, - get_phone_number_from_s3, -) +from app.aws.s3 import get_personalisation_from_s3, get_phone_number_from_s3 from app.celery.test_key_tasks import send_email_response, send_sms_response from app.dao.email_branding_dao import dao_get_email_branding_by_id from app.dao.notifications_dao import dao_update_notification @@ -47,27 +43,7 @@ def send_sms_to_provider(notification): notification.job_id, notification.job_row_number, ) - - # For one-off sends, they may not get into the cache - # by the time we get here, so do this slow direct-from-s3 - # approach. It is okay to be slow, since one-offs have - # to be typed in by hand. - if personalisation == {}: - job = get_job_from_s3(notification.service_id, notification.job_id) - job = job.split("\r\n") - first_row = job[0] - job.pop(0) - first_row = first_row.split(",") - personalisation = {} - job_row = 0 - for row in job: - row = row.split(",") - temp = dict(zip(first_row, row)) - personalisation[job_row] = temp - job_row = job_row + 1 - notification.personalisation = personalisation[notification.job_row_number] - else: - notification.personalisation = personalisation + notification.personalisation = personalisation service = SerialisedService.from_id(notification.service_id) message_id = None