Merge pull request #1275 from GSA/main

8/16/2024 Production Deploy
This commit is contained in:
Carlo Costino
2024-08-16 10:34:21 -04:00
committed by GitHub
2 changed files with 15 additions and 6 deletions

View File

@@ -173,7 +173,7 @@ def get_job_and_metadata_from_s3(service_id, job_id):
def get_job_from_s3(service_id, job_id): def get_job_from_s3(service_id, job_id):
retries = 0 retries = 0
max_retries = 5 max_retries = 3
backoff_factor = 1 backoff_factor = 1
while retries < max_retries: while retries < max_retries:
@@ -190,11 +190,20 @@ def get_job_from_s3(service_id, job_id):
sleep_time = backoff_factor * (2**retries) # Exponential backoff sleep_time = backoff_factor * (2**retries) # Exponential backoff
time.sleep(sleep_time) time.sleep(sleep_time)
continue continue
except Exception as e: else:
current_app.logger.error(f"Failed to get object from bucket {e}") current_app.logger.error(
raise f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} from bucket",
exc_info=True,
)
return None
except Exception:
current_app.logger.error(
f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} from bucket",
exc_info=True,
)
return None
raise Exception("Failed to get object after 5 attempts") raise Exception("Failed to get object after 3 attempts")
def incr_jobs_cache_misses(): def incr_jobs_cache_misses():

View File

@@ -102,7 +102,7 @@ def test_get_job_from_s3_exponential_backoff(mocker):
mocker.patch("app.aws.s3.get_s3_object", side_effect=mock_s3_get_object_slowdown) mocker.patch("app.aws.s3.get_s3_object", side_effect=mock_s3_get_object_slowdown)
with pytest.raises(Exception) as exc_info: with pytest.raises(Exception) as exc_info:
get_job_from_s3("service_id", "job_id") get_job_from_s3("service_id", "job_id")
assert "Failed to get object after 5 attempts" in str(exc_info) assert "Failed to get object after 3 attempts" in str(exc_info)
@pytest.mark.parametrize( @pytest.mark.parametrize(