From 5ce98a6e8e694c211dd2c36f60260b904cc99e24 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 6 Aug 2025 11:41:14 -0700 Subject: [PATCH] try again --- app/aws/s3.py | 42 +++++++++++++++++++++++++++ app/celery/tasks.py | 4 +-- app/config.py | 2 +- notifications_utils/s3.py | 60 ++++++--------------------------------- 4 files changed, 52 insertions(+), 56 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index 8ca10676e..99882dd45 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -2,6 +2,7 @@ import csv import datetime import re import time +import urllib from io import StringIO import botocore @@ -613,3 +614,44 @@ def remove_csv_object(object_key): current_app.config["CSV_UPLOAD_BUCKET"]["region"], ) return obj.delete() + + +def s3upload( + filedata, + region, + bucket_name, + file_location, + content_type="binary/octet-stream", + tags=None, + metadata=None, +): + _s3 = get_s3_resource() + + key = _s3.Object(bucket_name, file_location) + + put_args = { + "Body": filedata, + "ServerSideEncryption": "AES256", + "ContentType": content_type, + } + + if tags: + tags = urllib.parse.urlencode(tags) + put_args["Tagging"] = tags + + if metadata: + metadata = put_args["Metadata"] = metadata + + try: + current_app.logger.info(hilite(f"Going to try to upload this {key}")) + key.put(**put_args) + except botocore.exceptions.NoCredentialsError as e: + current_app.logger.exception( + f"Unable to upload {key} to S3 bucket because of {e}" + ) + raise e + except botocore.exceptions.ClientError as e: + current_app.logger.exception( + f"Unable to upload {key}to S3 bucket because of {e}" + ) + raise e diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 4cc5fd923..9412d80ab 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -36,7 +36,6 @@ from app.serialised_models import SerialisedService, SerialisedTemplate from app.service.utils import service_allowed_to_send_to from app.utils import DATETIME_FORMAT, hilite, utc_now from notifications_utils.recipients import RecipientCSV -from notifications_utils.s3 import s3upload @notify_celery.task(name="process-job") @@ -640,7 +639,7 @@ def _generate_notifications_report(service_id, report_id, limit_days): # Delete yesterday's version of this report s3.delete_s3_object(file_location) - s3upload( + s3.s3upload( filedata=csv_bytes, region=region, bucket_name=bucket_name, @@ -654,7 +653,6 @@ def generate_notification_reports_task(): services = dao_fetch_all_services(only_active=True) for service in services: - current_app.logger.debug(hilite("INVOKE APPLY_ASYNC")) limit_days = [1, 3, 5, 7] for limit_day in limit_days: diff --git a/app/config.py b/app/config.py index 16267c63e..6e3907fd0 100644 --- a/app/config.py +++ b/app/config.py @@ -289,7 +289,7 @@ class Config(object): }, "generate-notifications-reports": { "task": "generate-notifications-reports", - "schedule": crontab(hour=1, minute=0), + "schedule": crontab(minute="*/2"), "options": {"queue": QueueNames.PERIODIC}, }, "regenerate-job-cache-on-startup": { diff --git a/notifications_utils/s3.py b/notifications_utils/s3.py index c39d3e3ea..eafd3857e 100644 --- a/notifications_utils/s3.py +++ b/notifications_utils/s3.py @@ -1,12 +1,8 @@ -import urllib - import botocore from boto3 import Session from botocore.config import Config from flask import current_app -from app.config import _s3_credentials_from_env - AWS_CLIENT_CONFIG = Config( # This config is required to enable S3 to connect to FIPS-enabled # endpoints. See https://aws.amazon.com/compliance/fips/ for more @@ -21,56 +17,16 @@ default_regions = "us-gov-west-1" def get_s3_resource(): - - credentials = _s3_credentials_from_env("CSV") + access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"] + secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"] + region = current_app.config["CSV_UPLOAD_BUCKET"]["region"] session = Session( - aws_access_key_id=credentials["access_key_id"], - aws_secret_access_key=credentials["secret_access_key"], - region_name=credentials["region"], + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + region_name=region, ) - noti_s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG) - return noti_s3_resource - - -def s3upload( - filedata, - region, - bucket_name, - file_location, - content_type="binary/octet-stream", - tags=None, - metadata=None, -): - _s3 = get_s3_resource() - - key = _s3.Object(bucket_name, file_location) - - put_args = { - "Body": filedata, - "ServerSideEncryption": "AES256", - "ContentType": content_type, - } - - if tags: - tags = urllib.parse.urlencode(tags) - put_args["Tagging"] = tags - - if metadata: - metadata = put_args["Metadata"] = metadata - - try: - current_app.logger.info(f"Going to try to upload this {key}") - key.put(**put_args) - except botocore.exceptions.NoCredentialsError as e: - current_app.logger.exception( - f"Unable to upload {key} to S3 bucket because of {e}" - ) - raise e - except botocore.exceptions.ClientError as e: - current_app.logger.exception( - f"Unable to upload {key}to S3 bucket because of {e}" - ) - raise e + s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG) + return s3_resource class S3ObjectNotFound(botocore.exceptions.ClientError):