mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-15 17:52:26 -05:00
Delete delete_dvla_response_files_older_than_seven_days task
This was not being used.
This commit is contained in:
@@ -1,8 +1,5 @@
|
|||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
import pytz
|
|
||||||
from boto3 import client, resource
|
from boto3 import client, resource
|
||||||
import botocore
|
import botocore
|
||||||
|
|
||||||
@@ -89,26 +86,6 @@ def get_s3_bucket_objects(bucket_name, subfolder=''):
|
|||||||
return all_objects_in_bucket
|
return all_objects_in_bucket
|
||||||
|
|
||||||
|
|
||||||
def filter_s3_bucket_objects_within_date_range(bucket_objects, older_than=7, limit_days=2):
|
|
||||||
"""
|
|
||||||
S3 returns the Object['LastModified'] as an 'offset-aware' timestamp so the
|
|
||||||
date range filter must take this into account.
|
|
||||||
|
|
||||||
Additionally an additional Object is returned by S3 corresponding to the
|
|
||||||
container directory. This is redundant and should be removed.
|
|
||||||
|
|
||||||
"""
|
|
||||||
end_date = datetime.now(tz=pytz.utc) - timedelta(days=older_than)
|
|
||||||
start_date = end_date - timedelta(days=limit_days)
|
|
||||||
filtered_items = [item for item in bucket_objects if all([
|
|
||||||
not item['Key'].endswith('/'),
|
|
||||||
item['LastModified'] > start_date,
|
|
||||||
item['LastModified'] < end_date
|
|
||||||
])]
|
|
||||||
|
|
||||||
return filtered_items
|
|
||||||
|
|
||||||
|
|
||||||
def remove_s3_object(bucket_name, object_key):
|
def remove_s3_object(bucket_name, object_key):
|
||||||
obj = get_s3_object(bucket_name, object_key)
|
obj = get_s3_object(bucket_name, object_key)
|
||||||
return obj.delete()
|
return obj.delete()
|
||||||
|
|||||||
@@ -214,33 +214,6 @@ def delete_inbound_sms():
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
# TODO: remove me, i'm not being run by anything
|
|
||||||
@notify_celery.task(name="delete_dvla_response_files")
|
|
||||||
@statsd(namespace="tasks")
|
|
||||||
def delete_dvla_response_files_older_than_seven_days():
|
|
||||||
try:
|
|
||||||
start = datetime.utcnow()
|
|
||||||
bucket_objects = s3.get_s3_bucket_objects(
|
|
||||||
current_app.config['DVLA_RESPONSE_BUCKET_NAME'],
|
|
||||||
'root/dispatch'
|
|
||||||
)
|
|
||||||
older_than_seven_days = s3.filter_s3_bucket_objects_within_date_range(bucket_objects)
|
|
||||||
|
|
||||||
for f in older_than_seven_days:
|
|
||||||
s3.remove_s3_object(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], f['Key'])
|
|
||||||
|
|
||||||
current_app.logger.info(
|
|
||||||
"Delete dvla response files started {} finished {} deleted {} files".format(
|
|
||||||
start,
|
|
||||||
datetime.utcnow(),
|
|
||||||
len(older_than_seven_days)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except SQLAlchemyError:
|
|
||||||
current_app.logger.exception("Failed to delete dvla response files")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
@notify_celery.task(name="raise-alert-if-letter-notifications-still-sending")
|
@notify_celery.task(name="raise-alert-if-letter-notifications-still-sending")
|
||||||
@cronitor("raise-alert-if-letter-notifications-still-sending")
|
@cronitor("raise-alert-if-letter-notifications-still-sending")
|
||||||
@statsd(namespace="tasks")
|
@statsd(namespace="tasks")
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ from freezegun import freeze_time
|
|||||||
from app.aws.s3 import (
|
from app.aws.s3 import (
|
||||||
get_s3_bucket_objects,
|
get_s3_bucket_objects,
|
||||||
get_s3_file,
|
get_s3_file,
|
||||||
filter_s3_bucket_objects_within_date_range,
|
|
||||||
get_list_of_files_by_suffix,
|
get_list_of_files_by_suffix,
|
||||||
)
|
)
|
||||||
from tests.app.conftest import datetime_in_past
|
from tests.app.conftest import datetime_in_past
|
||||||
@@ -65,70 +64,6 @@ def test_get_s3_bucket_objects_builds_objects_list_from_paginator(notify_api, mo
|
|||||||
assert set(bucket_objects[0].keys()) == set(['ETag', 'Key', 'LastModified'])
|
assert set(bucket_objects[0].keys()) == set(['ETag', 'Key', 'LastModified'])
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2016-01-01 11:00:00")
|
|
||||||
def test_get_s3_bucket_objects_removes_redundant_root_object(notify_api, mocker):
|
|
||||||
AFTER_SEVEN_DAYS = datetime_in_past(days=8)
|
|
||||||
s3_objects_stub = [
|
|
||||||
single_s3_object_stub('bar/', AFTER_SEVEN_DAYS),
|
|
||||||
single_s3_object_stub('bar/foo.txt', AFTER_SEVEN_DAYS),
|
|
||||||
]
|
|
||||||
|
|
||||||
filtered_items = filter_s3_bucket_objects_within_date_range(s3_objects_stub)
|
|
||||||
|
|
||||||
assert len(filtered_items) == 1
|
|
||||||
|
|
||||||
assert filtered_items[0]["Key"] == 'bar/foo.txt'
|
|
||||||
assert filtered_items[0]["LastModified"] == datetime_in_past(days=8)
|
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2016-01-01 11:00:00")
|
|
||||||
def test_filter_s3_bucket_objects_within_date_range_filters_by_date_range(notify_api, mocker):
|
|
||||||
START_DATE = datetime_in_past(days=9)
|
|
||||||
JUST_BEFORE_START_DATE = START_DATE - timedelta(seconds=1)
|
|
||||||
JUST_AFTER_START_DATE = START_DATE + timedelta(seconds=1)
|
|
||||||
END_DATE = datetime_in_past(days=7)
|
|
||||||
JUST_BEFORE_END_DATE = END_DATE - timedelta(seconds=1)
|
|
||||||
JUST_AFTER_END_DATE = END_DATE + timedelta(seconds=1)
|
|
||||||
|
|
||||||
s3_objects_stub = [
|
|
||||||
single_s3_object_stub('bar/', JUST_BEFORE_START_DATE),
|
|
||||||
single_s3_object_stub('bar/foo.txt', START_DATE),
|
|
||||||
single_s3_object_stub('bar/foo2.txt', JUST_AFTER_START_DATE),
|
|
||||||
single_s3_object_stub('bar/foo3.txt', JUST_BEFORE_END_DATE),
|
|
||||||
single_s3_object_stub('bar/foo4.txt', END_DATE),
|
|
||||||
single_s3_object_stub('bar/foo5.txt', JUST_AFTER_END_DATE),
|
|
||||||
]
|
|
||||||
|
|
||||||
filtered_items = filter_s3_bucket_objects_within_date_range(s3_objects_stub)
|
|
||||||
|
|
||||||
assert len(filtered_items) == 2
|
|
||||||
|
|
||||||
assert filtered_items[0]["Key"] == 'bar/foo2.txt'
|
|
||||||
assert filtered_items[0]["LastModified"] == JUST_AFTER_START_DATE
|
|
||||||
|
|
||||||
assert filtered_items[1]["Key"] == 'bar/foo3.txt'
|
|
||||||
assert filtered_items[1]["LastModified"] == JUST_BEFORE_END_DATE
|
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2016-01-01 11:00:00")
|
|
||||||
def test_get_s3_bucket_objects_does_not_return_outside_of_date_range(notify_api, mocker):
|
|
||||||
START_DATE = datetime_in_past(days=9)
|
|
||||||
JUST_BEFORE_START_DATE = START_DATE - timedelta(seconds=1)
|
|
||||||
END_DATE = datetime_in_past(days=7)
|
|
||||||
JUST_AFTER_END_DATE = END_DATE + timedelta(seconds=1)
|
|
||||||
|
|
||||||
s3_objects_stub = [
|
|
||||||
single_s3_object_stub('bar/', JUST_BEFORE_START_DATE),
|
|
||||||
single_s3_object_stub('bar/foo1.txt', START_DATE),
|
|
||||||
single_s3_object_stub('bar/foo2.txt', END_DATE),
|
|
||||||
single_s3_object_stub('bar/foo3.txt', JUST_AFTER_END_DATE)
|
|
||||||
]
|
|
||||||
|
|
||||||
filtered_items = filter_s3_bucket_objects_within_date_range(s3_objects_stub)
|
|
||||||
|
|
||||||
assert len(filtered_items) == 0
|
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2018-01-11 00:00:00")
|
@freeze_time("2018-01-11 00:00:00")
|
||||||
@pytest.mark.parametrize('suffix_str, days_before, returned_no', [
|
@pytest.mark.parametrize('suffix_str, days_before, returned_no', [
|
||||||
('.ACK.txt', None, 1),
|
('.ACK.txt', None, 1),
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from notifications_utils.clients.zendesk.zendesk_client import ZendeskClient
|
|||||||
|
|
||||||
from app.celery import nightly_tasks
|
from app.celery import nightly_tasks
|
||||||
from app.celery.nightly_tasks import (
|
from app.celery.nightly_tasks import (
|
||||||
delete_dvla_response_files_older_than_seven_days,
|
|
||||||
delete_email_notifications_older_than_retention,
|
delete_email_notifications_older_than_retention,
|
||||||
delete_inbound_sms,
|
delete_inbound_sms,
|
||||||
delete_letter_notifications_older_than_retention,
|
delete_letter_notifications_older_than_retention,
|
||||||
@@ -33,7 +32,6 @@ from app.models import (
|
|||||||
SMS_TYPE,
|
SMS_TYPE,
|
||||||
EMAIL_TYPE
|
EMAIL_TYPE
|
||||||
)
|
)
|
||||||
from tests.app.aws.test_s3 import single_s3_object_stub
|
|
||||||
from tests.app.db import (
|
from tests.app.db import (
|
||||||
create_notification,
|
create_notification,
|
||||||
create_service,
|
create_service,
|
||||||
@@ -44,8 +42,6 @@ from tests.app.db import (
|
|||||||
create_ft_notification_status
|
create_ft_notification_status
|
||||||
)
|
)
|
||||||
|
|
||||||
from tests.app.conftest import datetime_in_past
|
|
||||||
|
|
||||||
|
|
||||||
def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None):
|
def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None):
|
||||||
if subfolder == '2018-01-11/zips_sent':
|
if subfolder == '2018-01-11/zips_sent':
|
||||||
@@ -290,52 +286,6 @@ def test_should_call_delete_inbound_sms(notify_api, mocker):
|
|||||||
assert nightly_tasks.delete_inbound_sms_older_than_retention.call_count == 1
|
assert nightly_tasks.delete_inbound_sms_older_than_retention.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2016-01-01 11:00:00")
|
|
||||||
def test_delete_dvla_response_files_older_than_seven_days_removes_old_files(notify_api, mocker):
|
|
||||||
AFTER_SEVEN_DAYS = datetime_in_past(days=8)
|
|
||||||
single_page_s3_objects = [{
|
|
||||||
"Contents": [
|
|
||||||
single_s3_object_stub('bar/foo1.txt', AFTER_SEVEN_DAYS),
|
|
||||||
single_s3_object_stub('bar/foo2.txt', AFTER_SEVEN_DAYS),
|
|
||||||
]
|
|
||||||
}]
|
|
||||||
mocker.patch(
|
|
||||||
'app.celery.nightly_tasks.s3.get_s3_bucket_objects', return_value=single_page_s3_objects[0]["Contents"]
|
|
||||||
)
|
|
||||||
remove_s3_mock = mocker.patch('app.celery.nightly_tasks.s3.remove_s3_object')
|
|
||||||
|
|
||||||
delete_dvla_response_files_older_than_seven_days()
|
|
||||||
|
|
||||||
remove_s3_mock.assert_has_calls([
|
|
||||||
call(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], single_page_s3_objects[0]["Contents"][0]["Key"]),
|
|
||||||
call(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], single_page_s3_objects[0]["Contents"][1]["Key"])
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2016-01-01 11:00:00")
|
|
||||||
def test_delete_dvla_response_files_older_than_seven_days_does_not_remove_files(notify_api, mocker):
|
|
||||||
START_DATE = datetime_in_past(days=9)
|
|
||||||
JUST_BEFORE_START_DATE = datetime_in_past(days=9, seconds=1)
|
|
||||||
END_DATE = datetime_in_past(days=7)
|
|
||||||
JUST_AFTER_END_DATE = END_DATE + timedelta(seconds=1)
|
|
||||||
|
|
||||||
single_page_s3_objects = [{
|
|
||||||
"Contents": [
|
|
||||||
single_s3_object_stub('bar/foo1.txt', JUST_BEFORE_START_DATE),
|
|
||||||
single_s3_object_stub('bar/foo2.txt', START_DATE),
|
|
||||||
single_s3_object_stub('bar/foo3.txt', END_DATE),
|
|
||||||
single_s3_object_stub('bar/foo4.txt', JUST_AFTER_END_DATE),
|
|
||||||
]
|
|
||||||
}]
|
|
||||||
mocker.patch(
|
|
||||||
'app.celery.nightly_tasks.s3.get_s3_bucket_objects', return_value=single_page_s3_objects[0]["Contents"]
|
|
||||||
)
|
|
||||||
remove_s3_mock = mocker.patch('app.celery.nightly_tasks.s3.remove_s3_object')
|
|
||||||
delete_dvla_response_files_older_than_seven_days()
|
|
||||||
|
|
||||||
remove_s3_mock.assert_not_called()
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_ticket_if_letter_notifications_still_sending(mocker):
|
def test_create_ticket_if_letter_notifications_still_sending(mocker):
|
||||||
mock_get_letters = mocker.patch(
|
mock_get_letters = mocker.patch(
|
||||||
"app.celery.nightly_tasks.get_letter_notifications_still_sending_when_they_shouldnt_be"
|
"app.celery.nightly_tasks.get_letter_notifications_still_sending_when_they_shouldnt_be"
|
||||||
|
|||||||
Reference in New Issue
Block a user