Merge pull request #1901 from GSA/report_performance

fix race condition
This commit is contained in:
ccostino
2025-08-07 17:14:45 -04:00
committed by GitHub

View File

@@ -39,7 +39,7 @@ def set_job_cache(key, value):
def get_job_cache(key):
key = str(key)
ret = job_cache.get(key)
return ret
@@ -53,14 +53,15 @@ def len_job_cache():
def clean_cache():
current_time = time.time()
keys_to_delete = []
for key, (_, expiry_time) in job_cache.items():
if expiry_time < current_time:
keys_to_delete.append(key)
current_app.logger.debug(
f"Deleting the following keys from the job_cache: {keys_to_delete}"
)
with job_cache_lock:
for key, (_, expiry_time) in job_cache.items():
if expiry_time < current_time:
keys_to_delete.append(key)
current_app.logger.debug(
f"Deleting the following keys from the job_cache: {keys_to_delete}"
)
for key in keys_to_delete:
del job_cache[key]
@@ -524,6 +525,7 @@ def extract_personalisation(job):
def get_phone_number_from_s3(service_id, job_id, job_row_number):
job = get_job_cache(job_id)
if job is None:
job = get_job_from_s3(service_id, job_id)
@@ -566,6 +568,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):
# At the same time we don't want to store it in redis or the db
# So this is a little recycling mechanism to reduce the number of downloads.
job = get_job_cache(job_id)
if job is None:
job = get_job_from_s3(service_id, job_id)
# Even if it is None, put it here to avoid KeyErrors