From eb2c878eddf6e53d39262911100b61abba87060f Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 13 Aug 2018 11:33:19 +0100 Subject: [PATCH] Fix bug with deleting the S3 file. Removed the duplicate method. --- app/aws/s3.py | 11 ----------- app/dao/notifications_dao.py | 6 +++--- .../test_notification_dao_delete_notifications.py | 10 +++++----- 3 files changed, 8 insertions(+), 19 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index 299d7de76..a440745da 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -66,17 +66,6 @@ def get_s3_bucket_objects(bucket_name, subfolder='', older_than=7, limit_days=2) return all_objects_in_bucket -def get_s3_object_by_prefix(bucket_name, prefix): - boto_client = client('s3', current_app.config['AWS_REGION']) - paginator = boto_client.get_paginator('list_objects_v2') - page_iterator = paginator.paginate( - Bucket=bucket_name, - Prefix=prefix - ) - - return page_iterator - - def filter_s3_bucket_objects_within_date_range(bucket_objects, older_than=7, limit_days=2): """ S3 returns the Object['LastModified'] as an 'offset-aware' timestamp so the diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 7ecd86352..22fdb4257 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -22,7 +22,7 @@ from sqlalchemy.sql import functions from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES from app import db, create_uuid -from app.aws.s3 import get_s3_object_by_prefix +from app.aws.s3 import remove_s3_object, get_s3_bucket_objects from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE from app.utils import midnight_n_days_ago, escape_special_characters from app.errors import InvalidRequest @@ -352,9 +352,9 @@ def _delete_letters_from_s3(query): crown="C" if letter.service.crown else "N", date='' ).upper()[:-5] - s3_objects = get_s3_object_by_prefix(bucket_name=bucket_name, prefix=prefix) + s3_objects = get_s3_bucket_objects(bucket_name=bucket_name, subfolder=prefix) for s3_object in s3_objects: - s3_object.delete() + remove_s3_object(bucket_name, s3_object['Key']) @statsd(namespace="dao") diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index 7d56ea35c..8284947ca 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -34,7 +34,7 @@ def test_should_delete_notifications_by_type_after_seven_days( expected_email_count, expected_letter_count ): - mocker.patch("app.dao.notifications_dao.get_s3_object_by_prefix") + mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") assert len(Notification.query.all()) == 0 email_template, letter_template, sms_template = _create_templates(sample_service) # create one notification a day between 1st and 10th from 11:00 to 19:00 of each type @@ -69,7 +69,7 @@ def test_should_delete_notifications_by_type_after_seven_days( @pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter']) @freeze_time("2016-01-10 12:00:00.000000") def test_should_not_delete_notification_history(sample_service, notification_type, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_object_by_prefix") + mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") with freeze_time('2016-01-01 12:00'): email_template, letter_template, sms_template = _create_templates(sample_service) create_notification(template=email_template, status='permanent-failure') @@ -84,7 +84,7 @@ def test_should_not_delete_notification_history(sample_service, notification_typ @pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter']) def test_delete_notifications_for_days_of_retention(sample_service, notification_type, mocker): - mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_object_by_prefix") + mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") service_with_default_data_retention = create_service(service_name='default data retention') email_template, letter_template, sms_template = _create_templates(sample_service) default_email_template, default_letter_template, default_sms_template = _create_templates( @@ -114,7 +114,7 @@ def test_delete_notifications_for_days_of_retention(sample_service, notification assert len(Notification.query.filter_by(notification_type=notification_type).all()) == 1 if notification_type == 'letter': mock_get_s3.assert_called_with(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], - prefix="{}NOTIFY.LETTER_REF.D.2.C.C".format(str(datetime.utcnow().date())) + subfolder="{}NOTIFY.LETTER_REF.D.2.C.C".format(str(datetime.utcnow().date())) ) assert mock_get_s3.call_count == 2 else: @@ -123,7 +123,7 @@ def test_delete_notifications_for_days_of_retention(sample_service, notification @pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter']) def test_delete_notifications_keep_data_for_days_of_retention_is_longer(sample_service, notification_type, mocker): - mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_object_by_prefix") + mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") create_service_data_retention(service_id=sample_service.id, notification_type=notification_type, days_of_retention=15) email_template, letter_template, sms_template = _create_templates(sample_service)