Fix report download buttons showing as disabled (#2840)

* Fix for lazy loading while checking if file exists

* Fixed linting errors

* Fixed for static scan
This commit is contained in:
Alex Janousek
2025-08-19 10:06:50 -04:00
committed by GitHub
parent 808aeabbe1
commit 74c1648c8d
4 changed files with 100 additions and 19 deletions

View File

@@ -5,8 +5,6 @@ from app import current_service, job_api_client
from app.enums import NotificationStatus, ServicePermission
from app.formatters import get_time_left
from app.main import main
from app.s3_client import check_s3_file_exists
from app.s3_client.s3_csv_client import get_csv_upload
from app.utils.pagination import (
generate_next_dict,
generate_pagination_pages,
@@ -16,37 +14,75 @@ from app.utils.pagination import (
from app.utils.user import user_has_permissions
def get_report_info(service_id, report_name):
def get_report_info(service_id, report_name, s3_config):
try:
obj = get_csv_upload(service_id, report_name)
if check_s3_file_exists(obj):
from app.s3_client import check_s3_file_exists, get_s3_object
from app.s3_client.s3_csv_client import NEW_FILE_LOCATION_STRUCTURE
key = NEW_FILE_LOCATION_STRUCTURE.format(service_id, report_name)
obj = get_s3_object(
s3_config["bucket"],
key,
s3_config["access_key_id"],
s3_config["secret_access_key"],
s3_config["region"],
)
exists = check_s3_file_exists(obj)
if exists:
# check_s3_file_exists already called obj.load(), so metadata should be populated
size_bytes = obj.content_length
if size_bytes < 1024:
size_str = f"{size_bytes} B"
elif size_bytes < 1024 * 1024:
size_str = f"{size_bytes / 1024:.1f} KB"
else:
size_str = f"{size_bytes / (1024 * 1024):.1f} MB"
return {"available": True, "size": size_str}
except Exception:
return {"available": False, "size": None}
except Exception: # nosec B110
pass
return {"available": False, "size": None}
def get_download_availability(service_id):
from flask import current_app
# Get S3 config before spawning greenlets
s3_config = {
"bucket": current_app.config["CSV_UPLOAD_BUCKET"]["bucket"],
"access_key_id": current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"],
"secret_access_key": current_app.config["CSV_UPLOAD_BUCKET"][
"secret_access_key"
],
"region": current_app.config["CSV_UPLOAD_BUCKET"]["region"],
}
report_names = ["1-day-report", "3-day-report", "5-day-report", "7-day-report"]
greenlets = [
gevent.spawn(get_report_info, service_id, name) for name in report_names
gevent.spawn(get_report_info, service_id, name, s3_config)
for name in report_names
]
gevent.joinall(greenlets)
results = [g.value for g in greenlets]
return {
"report_1_day": results[0],
"report_3_day": results[1],
"report_5_day": results[2],
"report_7_day": results[3],
"report_1_day": (
results[0] if results[0] else {"available": False, "size": None}
),
"report_3_day": (
results[1] if results[1] else {"available": False, "size": None}
),
"report_5_day": (
results[2] if results[2] else {"available": False, "size": None}
),
"report_7_day": (
results[3] if results[3] else {"available": False, "size": None}
),
}

View File

@@ -55,11 +55,11 @@ def check_s3_file_exists(obj):
obj.load()
return True
except botocore.exceptions.ClientError as client_error:
if client_error.response["Error"]["Code"] in ["404", "NoSuchKey"]:
error_code = client_error.response["Error"]["Code"]
if error_code in ["404", "NoSuchKey"]:
return False
current_app.logger.error(
f"Error checking S3 file {obj.bucket_name}/{obj.key}: {client_error}"
)
return False
except Exception:
return False

View File

@@ -16,12 +16,16 @@ NEW_FILE_LOCATION_STRUCTURE = "{}-service-notify/{}.csv"
def get_csv_location(service_id, upload_id):
bucket = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]
key = NEW_FILE_LOCATION_STRUCTURE.format(service_id, upload_id)
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
return (
current_app.config["CSV_UPLOAD_BUCKET"]["bucket"],
NEW_FILE_LOCATION_STRUCTURE.format(service_id, upload_id),
bucket,
key,
current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"],
current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"],
current_app.config["CSV_UPLOAD_BUCKET"]["region"],
region,
)

View File

@@ -0,0 +1,41 @@
from unittest.mock import Mock, patch
from app.main.views.activity import get_report_info
def test_report_metadata_loading_fix(mocker):
"""Test that metadata is loaded correctly after check_s3_file_exists"""
mock_s3_obj = Mock()
# After check_s3_file_exists calls load(), content_length is populated
mock_s3_obj.content_length = 86528
with patch("app.s3_client.get_s3_object", return_value=mock_s3_obj):
with patch("app.s3_client.check_s3_file_exists", return_value=True):
s3_config = {
"bucket": "test-bucket",
"access_key_id": "test-key",
"secret_access_key": "test-secret", # pragma: allowlist secret
"region": "us-east-1",
}
result = get_report_info("service-123", "1-day-report", s3_config)
assert result == {"available": True, "size": "84.5 KB"}
def test_report_not_found(mocker):
"""Test when report doesn't exist"""
mock_s3_obj = Mock()
with patch("app.s3_client.get_s3_object", return_value=mock_s3_obj):
with patch("app.s3_client.check_s3_file_exists", return_value=False):
s3_config = {
"bucket": "test-bucket",
"access_key_id": "test-key",
"secret_access_key": "test-secret", # pragma: allowlist secret
"region": "us-east-1",
}
result = get_report_info("service-123", "1-day-report", s3_config)
assert result == {"available": False, "size": None}