mirror of
https://github.com/GSA/notifications-admin.git
synced 2026-05-09 02:19:31 -04:00
add methods for asynchronous report generation
This commit is contained in:
@@ -10,10 +10,15 @@ from flask import (
|
||||
stream_with_context,
|
||||
url_for,
|
||||
)
|
||||
from flask_login import current_user
|
||||
|
||||
from app import current_service, job_api_client, notification_api_client
|
||||
from app.main import main
|
||||
from app.notify_client.api_key_api_client import KEY_TYPE_TEST
|
||||
from app.s3_client.s3_csv_client import (
|
||||
delete_report,
|
||||
report_upload,
|
||||
)
|
||||
from app.utils import (
|
||||
DELIVERED_STATUSES,
|
||||
FAILURE_STATUSES,
|
||||
@@ -144,19 +149,38 @@ def download_notifications_csv(service_id):
|
||||
file_time = datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
file_time = f"{file_time} {get_user_preferred_timezone()}"
|
||||
|
||||
csv = generate_notifications_csv(
|
||||
service_id=service_id,
|
||||
job_id=None,
|
||||
status=filter_args.get("status"),
|
||||
page=request.args.get("page", 1),
|
||||
page_size=10000,
|
||||
format_for_csv=True,
|
||||
template_type=filter_args.get("message_type"),
|
||||
limit_days=service_data_retention_days,
|
||||
)
|
||||
|
||||
# START asynchronous reporting block
|
||||
csv_file = "".join(csv)
|
||||
file_location = f"reports/{service_id}/{current_user.id}/{service_data_retention_days}/report.csv"
|
||||
|
||||
# TODO these are some capabilities we will probably need when
|
||||
# report generation becomes asynchronous.
|
||||
|
||||
# old_content = report_download(file_location)
|
||||
# current_app.logger.info(f"OLD CONTENT IS {old_content}")
|
||||
# reports = get_downloadable_reports(current_user.id, service_id)
|
||||
|
||||
# TODO these are to support asynchronous report generation.
|
||||
# Leaving them commented in so they get exercised.
|
||||
delete_report(file_location)
|
||||
report_upload(file_location, csv_file)
|
||||
# END asynchronous reporting block
|
||||
|
||||
# TODO eventually we want to remove this, when reports become fully asynchronous
|
||||
# The UI should be retrieving the report elsewhere via the download_report() method call
|
||||
return Response(
|
||||
stream_with_context(
|
||||
generate_notifications_csv(
|
||||
service_id=service_id,
|
||||
job_id=None,
|
||||
status=filter_args.get("status"),
|
||||
page=request.args.get("page", 1),
|
||||
page_size=10000,
|
||||
format_for_csv=True,
|
||||
template_type=filter_args.get("message_type"),
|
||||
limit_days=service_data_retention_days,
|
||||
)
|
||||
),
|
||||
stream_with_context(csv),
|
||||
mimetype="text/csv",
|
||||
headers={
|
||||
"Content-Disposition": 'inline; filename="{} - {} - {} report.csv"'.format(
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import os
|
||||
import uuid
|
||||
|
||||
import boto3
|
||||
from flask import current_app
|
||||
|
||||
from app.s3_client import (
|
||||
AWS_CLIENT_CONFIG,
|
||||
get_s3_contents,
|
||||
get_s3_metadata,
|
||||
get_s3_object,
|
||||
@@ -63,3 +65,70 @@ def set_metadata_on_csv_upload(service_id, upload_id, **kwargs):
|
||||
|
||||
def get_csv_metadata(service_id, upload_id):
|
||||
return get_s3_metadata(get_csv_upload(service_id, upload_id))
|
||||
|
||||
|
||||
def report_upload(file_location, report_content):
|
||||
bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]
|
||||
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
|
||||
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
|
||||
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
|
||||
|
||||
utils_s3upload(
|
||||
filedata=report_content,
|
||||
region=region,
|
||||
bucket_name=bucket_name,
|
||||
file_location=file_location,
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
)
|
||||
current_app.logger.info(f"Succcessfully uploaded report to {file_location}")
|
||||
|
||||
|
||||
def report_download(file_location):
|
||||
current_app.logger.info(f"Downloading report from {file_location}")
|
||||
bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]
|
||||
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
|
||||
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
|
||||
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
|
||||
|
||||
return get_s3_contents(
|
||||
get_s3_object(bucket_name, file_location, access_key, secret_key, region)
|
||||
)
|
||||
|
||||
|
||||
def delete_report(file_location):
|
||||
current_app.logger.info(f"Deleting report from {file_location}")
|
||||
bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]
|
||||
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
|
||||
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
|
||||
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
|
||||
|
||||
obj = get_s3_object(bucket_name, file_location, access_key, secret_key, region)
|
||||
if obj is None:
|
||||
return None
|
||||
return obj.delete()
|
||||
|
||||
|
||||
def get_downloadable_reports(user_id, service_id):
|
||||
prefix = f"reports/{service_id}/{user_id}/"
|
||||
bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]
|
||||
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
|
||||
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
|
||||
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
|
||||
|
||||
session = boto3.Session(
|
||||
aws_access_key_id=access_key,
|
||||
aws_secret_access_key=secret_key,
|
||||
region_name=region,
|
||||
)
|
||||
s3 = session.client(
|
||||
"s3",
|
||||
config=AWS_CLIENT_CONFIG,
|
||||
)
|
||||
|
||||
response = s3.list_objects_v2(Bucket=bucket_name, Prefix=prefix)
|
||||
object_keys = []
|
||||
if "Contents" in response:
|
||||
for obj in response["Contents"]:
|
||||
object_keys.append(obj["Key"])
|
||||
return object_keys
|
||||
|
||||
Reference in New Issue
Block a user