mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-21 07:51:13 -05:00
Fix bug with deleting the S3 file.
Removed the duplicate method.
This commit is contained in:
@@ -66,17 +66,6 @@ def get_s3_bucket_objects(bucket_name, subfolder='', older_than=7, limit_days=2)
|
|||||||
return all_objects_in_bucket
|
return all_objects_in_bucket
|
||||||
|
|
||||||
|
|
||||||
def get_s3_object_by_prefix(bucket_name, prefix):
|
|
||||||
boto_client = client('s3', current_app.config['AWS_REGION'])
|
|
||||||
paginator = boto_client.get_paginator('list_objects_v2')
|
|
||||||
page_iterator = paginator.paginate(
|
|
||||||
Bucket=bucket_name,
|
|
||||||
Prefix=prefix
|
|
||||||
)
|
|
||||||
|
|
||||||
return page_iterator
|
|
||||||
|
|
||||||
|
|
||||||
def filter_s3_bucket_objects_within_date_range(bucket_objects, older_than=7, limit_days=2):
|
def filter_s3_bucket_objects_within_date_range(bucket_objects, older_than=7, limit_days=2):
|
||||||
"""
|
"""
|
||||||
S3 returns the Object['LastModified'] as an 'offset-aware' timestamp so the
|
S3 returns the Object['LastModified'] as an 'offset-aware' timestamp so the
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ from sqlalchemy.sql import functions
|
|||||||
from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES
|
from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES
|
||||||
|
|
||||||
from app import db, create_uuid
|
from app import db, create_uuid
|
||||||
from app.aws.s3 import get_s3_object_by_prefix
|
from app.aws.s3 import remove_s3_object, get_s3_bucket_objects
|
||||||
from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE
|
from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE
|
||||||
from app.utils import midnight_n_days_ago, escape_special_characters
|
from app.utils import midnight_n_days_ago, escape_special_characters
|
||||||
from app.errors import InvalidRequest
|
from app.errors import InvalidRequest
|
||||||
@@ -352,9 +352,9 @@ def _delete_letters_from_s3(query):
|
|||||||
crown="C" if letter.service.crown else "N",
|
crown="C" if letter.service.crown else "N",
|
||||||
date=''
|
date=''
|
||||||
).upper()[:-5]
|
).upper()[:-5]
|
||||||
s3_objects = get_s3_object_by_prefix(bucket_name=bucket_name, prefix=prefix)
|
s3_objects = get_s3_bucket_objects(bucket_name=bucket_name, subfolder=prefix)
|
||||||
for s3_object in s3_objects:
|
for s3_object in s3_objects:
|
||||||
s3_object.delete()
|
remove_s3_object(bucket_name, s3_object['Key'])
|
||||||
|
|
||||||
|
|
||||||
@statsd(namespace="dao")
|
@statsd(namespace="dao")
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ def test_should_delete_notifications_by_type_after_seven_days(
|
|||||||
expected_email_count,
|
expected_email_count,
|
||||||
expected_letter_count
|
expected_letter_count
|
||||||
):
|
):
|
||||||
mocker.patch("app.dao.notifications_dao.get_s3_object_by_prefix")
|
mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects")
|
||||||
assert len(Notification.query.all()) == 0
|
assert len(Notification.query.all()) == 0
|
||||||
email_template, letter_template, sms_template = _create_templates(sample_service)
|
email_template, letter_template, sms_template = _create_templates(sample_service)
|
||||||
# create one notification a day between 1st and 10th from 11:00 to 19:00 of each type
|
# create one notification a day between 1st and 10th from 11:00 to 19:00 of each type
|
||||||
@@ -69,7 +69,7 @@ def test_should_delete_notifications_by_type_after_seven_days(
|
|||||||
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
|
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
|
||||||
@freeze_time("2016-01-10 12:00:00.000000")
|
@freeze_time("2016-01-10 12:00:00.000000")
|
||||||
def test_should_not_delete_notification_history(sample_service, notification_type, mocker):
|
def test_should_not_delete_notification_history(sample_service, notification_type, mocker):
|
||||||
mocker.patch("app.dao.notifications_dao.get_s3_object_by_prefix")
|
mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects")
|
||||||
with freeze_time('2016-01-01 12:00'):
|
with freeze_time('2016-01-01 12:00'):
|
||||||
email_template, letter_template, sms_template = _create_templates(sample_service)
|
email_template, letter_template, sms_template = _create_templates(sample_service)
|
||||||
create_notification(template=email_template, status='permanent-failure')
|
create_notification(template=email_template, status='permanent-failure')
|
||||||
@@ -84,7 +84,7 @@ def test_should_not_delete_notification_history(sample_service, notification_typ
|
|||||||
|
|
||||||
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
|
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
|
||||||
def test_delete_notifications_for_days_of_retention(sample_service, notification_type, mocker):
|
def test_delete_notifications_for_days_of_retention(sample_service, notification_type, mocker):
|
||||||
mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_object_by_prefix")
|
mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects")
|
||||||
service_with_default_data_retention = create_service(service_name='default data retention')
|
service_with_default_data_retention = create_service(service_name='default data retention')
|
||||||
email_template, letter_template, sms_template = _create_templates(sample_service)
|
email_template, letter_template, sms_template = _create_templates(sample_service)
|
||||||
default_email_template, default_letter_template, default_sms_template = _create_templates(
|
default_email_template, default_letter_template, default_sms_template = _create_templates(
|
||||||
@@ -114,7 +114,7 @@ def test_delete_notifications_for_days_of_retention(sample_service, notification
|
|||||||
assert len(Notification.query.filter_by(notification_type=notification_type).all()) == 1
|
assert len(Notification.query.filter_by(notification_type=notification_type).all()) == 1
|
||||||
if notification_type == 'letter':
|
if notification_type == 'letter':
|
||||||
mock_get_s3.assert_called_with(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'],
|
mock_get_s3.assert_called_with(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'],
|
||||||
prefix="{}NOTIFY.LETTER_REF.D.2.C.C".format(str(datetime.utcnow().date()))
|
subfolder="{}NOTIFY.LETTER_REF.D.2.C.C".format(str(datetime.utcnow().date()))
|
||||||
)
|
)
|
||||||
assert mock_get_s3.call_count == 2
|
assert mock_get_s3.call_count == 2
|
||||||
else:
|
else:
|
||||||
@@ -123,7 +123,7 @@ def test_delete_notifications_for_days_of_retention(sample_service, notification
|
|||||||
|
|
||||||
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
|
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
|
||||||
def test_delete_notifications_keep_data_for_days_of_retention_is_longer(sample_service, notification_type, mocker):
|
def test_delete_notifications_keep_data_for_days_of_retention_is_longer(sample_service, notification_type, mocker):
|
||||||
mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_object_by_prefix")
|
mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects")
|
||||||
create_service_data_retention(service_id=sample_service.id, notification_type=notification_type,
|
create_service_data_retention(service_id=sample_service.id, notification_type=notification_type,
|
||||||
days_of_retention=15)
|
days_of_retention=15)
|
||||||
email_template, letter_template, sms_template = _create_templates(sample_service)
|
email_template, letter_template, sms_template = _create_templates(sample_service)
|
||||||
|
|||||||
Reference in New Issue
Block a user