mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-01 07:35:34 -05:00
Merge pull request #1032 from alphagov/imdad-feat-remove-dvla-response-files
Remove DVLA response files older than 7 days
This commit is contained in:
@@ -1,8 +1,25 @@
|
||||
from unittest.mock import call
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.aws.s3 import get_s3_file, remove_transformed_dvla_file
|
||||
from freezegun import freeze_time
|
||||
|
||||
from app.aws.s3 import (
|
||||
get_s3_bucket_objects,
|
||||
get_s3_file,
|
||||
filter_s3_bucket_objects_within_date_range,
|
||||
remove_transformed_dvla_file
|
||||
)
|
||||
from tests.app.conftest import datetime_in_past
|
||||
|
||||
|
||||
def single_s3_object_stub(key='foo', last_modified=datetime.utcnow()):
|
||||
return {
|
||||
'ETag': '"d41d8cd98f00b204e9800998ecf8427e"',
|
||||
'Key': key,
|
||||
'LastModified': last_modified
|
||||
}
|
||||
|
||||
|
||||
def test_get_s3_file_makes_correct_call(notify_api, mocker):
|
||||
@@ -25,3 +42,100 @@ def test_remove_transformed_dvla_file_makes_correct_call(notify_api, mocker):
|
||||
call(current_app.config['DVLA_UPLOAD_BUCKET_NAME'], '{}-dvla-job.text'.format(fake_uuid)),
|
||||
call().delete()
|
||||
])
|
||||
|
||||
|
||||
def test_get_s3_bucket_objects_make_correct_pagination_call(notify_api, mocker):
|
||||
paginator_mock = mocker.patch('app.aws.s3.client')
|
||||
|
||||
get_s3_bucket_objects('foo-bucket', subfolder='bar')
|
||||
|
||||
paginator_mock.assert_has_calls([
|
||||
call().get_paginator().paginate(Bucket='foo-bucket', Prefix='bar')
|
||||
])
|
||||
|
||||
|
||||
def test_get_s3_bucket_objects_builds_objects_list_from_paginator(notify_api, mocker):
|
||||
AFTER_SEVEN_DAYS = datetime_in_past(days=8)
|
||||
paginator_mock = mocker.patch('app.aws.s3.client')
|
||||
multiple_pages_s3_object = [
|
||||
{
|
||||
"Contents": [
|
||||
single_s3_object_stub('bar/foo.txt', AFTER_SEVEN_DAYS),
|
||||
]
|
||||
},
|
||||
{
|
||||
"Contents": [
|
||||
single_s3_object_stub('bar/foo1.txt', AFTER_SEVEN_DAYS),
|
||||
]
|
||||
}
|
||||
]
|
||||
paginator_mock.return_value.get_paginator.return_value.paginate.return_value = multiple_pages_s3_object
|
||||
|
||||
bucket_objects = get_s3_bucket_objects('foo-bucket', subfolder='bar')
|
||||
|
||||
assert len(bucket_objects) == 2
|
||||
assert set(bucket_objects[0].keys()) == set(['ETag', 'Key', 'LastModified'])
|
||||
|
||||
|
||||
@freeze_time("2016-01-01 11:00:00")
|
||||
def test_get_s3_bucket_objects_removes_redundant_root_object(notify_api, mocker):
|
||||
AFTER_SEVEN_DAYS = datetime_in_past(days=8)
|
||||
s3_objects_stub = [
|
||||
single_s3_object_stub('bar/', AFTER_SEVEN_DAYS),
|
||||
single_s3_object_stub('bar/foo.txt', AFTER_SEVEN_DAYS),
|
||||
]
|
||||
|
||||
filtered_items = filter_s3_bucket_objects_within_date_range(s3_objects_stub)
|
||||
|
||||
assert len(filtered_items) == 1
|
||||
|
||||
assert filtered_items[0]["Key"] == 'bar/foo.txt'
|
||||
assert filtered_items[0]["LastModified"] == datetime_in_past(days=8)
|
||||
|
||||
|
||||
@freeze_time("2016-01-01 11:00:00")
|
||||
def test_filter_s3_bucket_objects_within_date_range_filters_by_date_range(notify_api, mocker):
|
||||
START_DATE = datetime_in_past(days=9)
|
||||
JUST_BEFORE_START_DATE = START_DATE - timedelta(seconds=1)
|
||||
JUST_AFTER_START_DATE = START_DATE + timedelta(seconds=1)
|
||||
END_DATE = datetime_in_past(days=7)
|
||||
JUST_BEFORE_END_DATE = END_DATE - timedelta(seconds=1)
|
||||
JUST_AFTER_END_DATE = END_DATE + timedelta(seconds=1)
|
||||
|
||||
s3_objects_stub = [
|
||||
single_s3_object_stub('bar/', JUST_BEFORE_START_DATE),
|
||||
single_s3_object_stub('bar/foo.txt', START_DATE),
|
||||
single_s3_object_stub('bar/foo2.txt', JUST_AFTER_START_DATE),
|
||||
single_s3_object_stub('bar/foo3.txt', JUST_BEFORE_END_DATE),
|
||||
single_s3_object_stub('bar/foo4.txt', END_DATE),
|
||||
single_s3_object_stub('bar/foo5.txt', JUST_AFTER_END_DATE),
|
||||
]
|
||||
|
||||
filtered_items = filter_s3_bucket_objects_within_date_range(s3_objects_stub)
|
||||
|
||||
assert len(filtered_items) == 2
|
||||
|
||||
assert filtered_items[0]["Key"] == 'bar/foo2.txt'
|
||||
assert filtered_items[0]["LastModified"] == JUST_AFTER_START_DATE
|
||||
|
||||
assert filtered_items[1]["Key"] == 'bar/foo3.txt'
|
||||
assert filtered_items[1]["LastModified"] == JUST_BEFORE_END_DATE
|
||||
|
||||
|
||||
@freeze_time("2016-01-01 11:00:00")
|
||||
def test_get_s3_bucket_objects_does_not_return_outside_of_date_range(notify_api, mocker):
|
||||
START_DATE = datetime_in_past(days=9)
|
||||
JUST_BEFORE_START_DATE = START_DATE - timedelta(seconds=1)
|
||||
END_DATE = datetime_in_past(days=7)
|
||||
JUST_AFTER_END_DATE = END_DATE + timedelta(seconds=1)
|
||||
|
||||
s3_objects_stub = [
|
||||
single_s3_object_stub('bar/', JUST_BEFORE_START_DATE),
|
||||
single_s3_object_stub('bar/foo1.txt', START_DATE),
|
||||
single_s3_object_stub('bar/foo2.txt', END_DATE),
|
||||
single_s3_object_stub('bar/foo3.txt', JUST_AFTER_END_DATE)
|
||||
]
|
||||
|
||||
filtered_items = filter_s3_bucket_objects_within_date_range(s3_objects_stub)
|
||||
|
||||
assert len(filtered_items) == 0
|
||||
|
||||
@@ -9,6 +9,7 @@ from freezegun import freeze_time
|
||||
|
||||
from app.celery import scheduled_tasks
|
||||
from app.celery.scheduled_tasks import (
|
||||
delete_dvla_response_files_older_than_seven_days,
|
||||
delete_email_notifications_older_than_seven_days,
|
||||
delete_inbound_sms_older_than_seven_days,
|
||||
delete_invitations,
|
||||
@@ -42,7 +43,10 @@ from tests.app.db import create_notification, create_service, create_template, c
|
||||
from tests.app.conftest import (
|
||||
sample_job as create_sample_job,
|
||||
sample_notification_history as create_notification_history,
|
||||
create_custom_template)
|
||||
create_custom_template,
|
||||
datetime_in_past
|
||||
)
|
||||
from tests.app.aws.test_s3 import single_s3_object_stub
|
||||
from tests.conftest import set_config_values
|
||||
|
||||
|
||||
@@ -71,14 +75,13 @@ def _create_slow_delivery_notification(provider='mmg'):
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def prepare_current_provider(restore_provider_details):
|
||||
initial_provider = get_current_provider('sms')
|
||||
initial_provider.updated_at = datetime.utcnow() - timedelta(minutes=30)
|
||||
dao_update_provider_details(initial_provider)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="This doesn't actually test the celery task wraps the function")
|
||||
def test_should_have_decorated_tasks_functions():
|
||||
"""
|
||||
TODO: This test needs to be reviewed as this doesn't actually
|
||||
test that the celery task is wrapping the function. We're also
|
||||
running similar tests elsewhere which also need review.
|
||||
"""
|
||||
assert delete_verify_codes.__wrapped__.__name__ == 'delete_verify_codes'
|
||||
assert delete_notifications_created_more_than_a_week_ago_by_type.__wrapped__.__name__ == \
|
||||
'delete_notifications_created_more_than_a_week_ago_by_type'
|
||||
@@ -93,6 +96,15 @@ def test_should_have_decorated_tasks_functions():
|
||||
'delete_inbound_sms_older_than_seven_days'
|
||||
assert remove_transformed_dvla_files.__wrapped__.__name__ == \
|
||||
'remove_transformed_dvla_files'
|
||||
assert delete_dvla_response_files_older_than_seven_days.__wrapped__.__name__ == \
|
||||
'delete_dvla_response_files_older_than_seven_days'
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def prepare_current_provider(restore_provider_details):
|
||||
initial_provider = get_current_provider('sms')
|
||||
initial_provider.updated_at = datetime.utcnow() - timedelta(minutes=30)
|
||||
dao_update_provider_details(initial_provider)
|
||||
|
||||
|
||||
def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker):
|
||||
@@ -548,3 +560,49 @@ def test_remove_dvla_transformed_files_does_not_remove_files(mocker, sample_serv
|
||||
remove_transformed_dvla_files()
|
||||
|
||||
s3.remove_transformed_dvla_file.assert_has_calls([])
|
||||
|
||||
|
||||
@freeze_time("2016-01-01 11:00:00")
|
||||
def test_delete_dvla_response_files_older_than_seven_days_removes_old_files(notify_api, mocker):
|
||||
AFTER_SEVEN_DAYS = datetime_in_past(days=8)
|
||||
single_page_s3_objects = [{
|
||||
"Contents": [
|
||||
single_s3_object_stub('bar/foo1.txt', AFTER_SEVEN_DAYS),
|
||||
single_s3_object_stub('bar/foo2.txt', AFTER_SEVEN_DAYS),
|
||||
]
|
||||
}]
|
||||
mocker.patch(
|
||||
'app.celery.scheduled_tasks.s3.get_s3_bucket_objects', return_value=single_page_s3_objects[0]["Contents"]
|
||||
)
|
||||
remove_s3_mock = mocker.patch('app.celery.scheduled_tasks.s3.remove_s3_object')
|
||||
|
||||
delete_dvla_response_files_older_than_seven_days()
|
||||
|
||||
remove_s3_mock.assert_has_calls([
|
||||
call(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], single_page_s3_objects[0]["Contents"][0]["Key"]),
|
||||
call(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], single_page_s3_objects[0]["Contents"][1]["Key"])
|
||||
])
|
||||
|
||||
|
||||
@freeze_time("2016-01-01 11:00:00")
|
||||
def test_delete_dvla_response_files_older_than_seven_days_does_not_remove_files(notify_api, mocker):
|
||||
START_DATE = datetime_in_past(days=9)
|
||||
JUST_BEFORE_START_DATE = datetime_in_past(days=9, seconds=1)
|
||||
END_DATE = datetime_in_past(days=7)
|
||||
JUST_AFTER_END_DATE = END_DATE + timedelta(seconds=1)
|
||||
|
||||
single_page_s3_objects = [{
|
||||
"Contents": [
|
||||
single_s3_object_stub('bar/foo1.txt', JUST_BEFORE_START_DATE),
|
||||
single_s3_object_stub('bar/foo2.txt', START_DATE),
|
||||
single_s3_object_stub('bar/foo3.txt', END_DATE),
|
||||
single_s3_object_stub('bar/foo4.txt', JUST_AFTER_END_DATE),
|
||||
]
|
||||
}]
|
||||
mocker.patch(
|
||||
'app.celery.scheduled_tasks.s3.get_s3_bucket_objects', return_value=single_page_s3_objects[0]["Contents"]
|
||||
)
|
||||
remove_s3_mock = mocker.patch('app.celery.scheduled_tasks.s3.remove_s3_object')
|
||||
delete_dvla_response_files_older_than_seven_days()
|
||||
|
||||
remove_s3_mock.assert_not_called()
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
from datetime import (datetime, date, timedelta)
|
||||
import json
|
||||
import uuid
|
||||
from datetime import (datetime, date, timedelta)
|
||||
|
||||
import requests_mock
|
||||
from flask import current_app, url_for
|
||||
|
||||
import pytest
|
||||
import pytz
|
||||
import requests_mock
|
||||
from sqlalchemy import asc
|
||||
from sqlalchemy.orm.session import make_transient
|
||||
from flask import current_app, url_for
|
||||
|
||||
from app import db
|
||||
from app.models import (
|
||||
@@ -35,7 +37,6 @@ from app.dao.notifications_dao import dao_create_notification
|
||||
from app.dao.invited_user_dao import save_invited_user
|
||||
from app.dao.provider_rates_dao import create_provider_rates
|
||||
from app.clients.sms.firetext import FiretextClient
|
||||
|
||||
from tests import create_authorization_header
|
||||
from tests.app.db import create_user, create_template, create_notification
|
||||
|
||||
@@ -1018,3 +1019,7 @@ def admin_request(client):
|
||||
return json_resp
|
||||
|
||||
return AdminRequest
|
||||
|
||||
|
||||
def datetime_in_past(days=0, seconds=0):
|
||||
return datetime.now(tz=pytz.utc) - timedelta(days=days, seconds=seconds)
|
||||
|
||||
Reference in New Issue
Block a user