From e5ac50b694b698a1384907eb8f8a6153a76aba35 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 26 Sep 2024 07:17:12 -0700 Subject: [PATCH] add test --- .ds.baseline | 4 ++-- app/aws/s3.py | 13 +++++++++---- app/config.py | 2 +- tests/app/aws/test_s3.py | 16 ++++++++++++++++ 4 files changed, 28 insertions(+), 7 deletions(-) diff --git a/.ds.baseline b/.ds.baseline index 6ef3c9108..26b862646 100644 --- a/.ds.baseline +++ b/.ds.baseline @@ -209,7 +209,7 @@ "filename": "tests/app/aws/test_s3.py", "hashed_secret": "67a74306b06d0c01624fe0d0249a570f4d093747", "is_verified": false, - "line_number": 27, + "line_number": 28, "is_secret": false } ], @@ -384,5 +384,5 @@ } ] }, - "generated_at": "2024-09-10T18:12:39Z" + "generated_at": "2024-09-26T14:17:05Z" } diff --git a/app/aws/s3.py b/app/aws/s3.py index 3f2af6183..bbb06e602 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -116,6 +116,13 @@ def cleanup_old_s3_objects(): ) +def get_job_id_from_s3_object_key(key): + object_arr = key.split("/") + job_id = object_arr[1] # get the job_id + job_id = job_id.replace(".csv", "") # we just want the job_id + return job_id + + def read_s3_file(bucket_name, object_key, s3res): """ This method runs during the 'regenerate job cache' task. @@ -132,10 +139,7 @@ def read_s3_file(bucket_name, object_key, s3res): in wait time, to this back end process. """ try: - - object_arr = object_key.split("/") - job_id = object_arr[1] # get the job_id - job_id = job_id.replace(".csv", "") # we just want the job_id + job_id = get_job_id_from_s3_object_key(object_key) if JOBS.get(job_id) is None: object = ( s3res.Object(bucket_name, object_key) @@ -147,6 +151,7 @@ def read_s3_file(bucket_name, object_key, s3res): JOBS[job_id] = object JOBS[f"{job_id}_phones"] = extract_phones(object) JOBS[f"{job_id}_personalisation"] = extract_personalisation(object) + except LookupError: # perhaps our key is not formatted as we expected. If so skip it. current_app.logger.exception("LookupError #notify-admin-1200") diff --git a/app/config.py b/app/config.py index 71fa4ed23..9a4412615 100644 --- a/app/config.py +++ b/app/config.py @@ -256,7 +256,7 @@ class Config(object): }, "regenerate-job-cache": { "task": "regenerate-job-cache", - "schedule": crontab(minute="*/30"), + "schedule": crontab(minute="*/3"), "options": {"queue": QueueNames.PERIODIC}, }, "regenerate-job-cache-on-startup": { diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py index dcc1cbe44..17222d2f0 100644 --- a/tests/app/aws/test_s3.py +++ b/tests/app/aws/test_s3.py @@ -8,6 +8,7 @@ from app.aws.s3 import ( cleanup_old_s3_objects, file_exists, get_job_from_s3, + get_job_id_from_s3_object_key, get_personalisation_from_s3, get_phone_number_from_s3, get_s3_file, @@ -102,6 +103,21 @@ def test_get_phone_number_from_s3( assert phone_number == expected_phone_number +@pytest.mark.parametrize( + "key, expected_job_id", + [ + ("service-blahblahblah-notify/abcde.csv", "abcde"), + ( + "service-x-notify/4c99f361-4ed7-49b1-bd6f-02fe0c807c53.csv", + "4c99f361-4ed7-49b1-bd6f-02fe0c807c53", + ), + ], +) +def test_get_job_id_from_s3_object_key(key, expected_job_id): + actual_job_id = get_job_id_from_s3_object_key(key) + assert actual_job_id == expected_job_id + + def mock_s3_get_object_slowdown(*args, **kwargs): error_response = { "Error": {