2021-05-06 09:18:44 +01:00
|
|
|
import uuid
|
2021-03-10 13:55:06 +00:00
|
|
|
from collections import namedtuple
|
|
|
|
|
from datetime import datetime, timedelta
|
2020-03-10 15:15:58 +00:00
|
|
|
from unittest.mock import call
|
2017-12-19 14:18:05 +00:00
|
|
|
|
2018-08-24 15:12:02 +01:00
|
|
|
import boto3
|
2017-12-11 11:00:27 +00:00
|
|
|
import pytest
|
|
|
|
|
from botocore.exceptions import ClientError
|
2020-03-10 15:15:58 +00:00
|
|
|
from celery.exceptions import MaxRetriesExceededError
|
2021-03-10 13:55:06 +00:00
|
|
|
from flask import current_app
|
|
|
|
|
from freezegun import freeze_time
|
|
|
|
|
from moto import mock_s3
|
2017-12-11 11:00:27 +00:00
|
|
|
from sqlalchemy.orm.exc import NoResultFound
|
|
|
|
|
|
2020-01-24 09:08:27 +00:00
|
|
|
from app import encryption
|
2017-12-11 11:00:27 +00:00
|
|
|
from app.celery.letters_pdf_tasks import (
|
2021-03-10 13:55:06 +00:00
|
|
|
_move_invalid_letter_and_update_status,
|
2020-02-19 14:31:57 +00:00
|
|
|
collate_letter_pdfs_to_be_sent,
|
2020-02-17 15:59:53 +00:00
|
|
|
get_key_and_size_of_letters_to_be_sent_to_print,
|
2020-05-06 18:23:56 +01:00
|
|
|
get_pdf_for_templated_letter,
|
2018-01-15 17:00:00 +00:00
|
|
|
group_letters,
|
2019-12-04 16:02:46 +00:00
|
|
|
process_sanitised_letter,
|
2018-08-24 15:12:02 +01:00
|
|
|
process_virus_scan_error,
|
2021-03-10 13:55:06 +00:00
|
|
|
process_virus_scan_failed,
|
2018-08-24 15:12:02 +01:00
|
|
|
replay_letters_in_error,
|
2019-12-04 16:02:46 +00:00
|
|
|
sanitise_letter,
|
2021-02-17 17:47:00 +00:00
|
|
|
send_letters_volume_email_to_dvla,
|
2020-04-30 17:49:12 +01:00
|
|
|
update_billable_units_for_letter,
|
2018-07-02 10:59:55 +01:00
|
|
|
)
|
2019-12-04 16:02:46 +00:00
|
|
|
from app.config import QueueNames, TaskNames
|
2021-02-17 17:47:00 +00:00
|
|
|
from app.dao.notifications_dao import get_notifications
|
2021-03-10 13:55:06 +00:00
|
|
|
from app.errors import VirusScanError
|
|
|
|
|
from app.exceptions import NotificationTechnicalFailureException
|
2019-09-17 17:34:47 +01:00
|
|
|
from app.letters.utils import ScanErrorType
|
2018-03-19 13:52:01 +00:00
|
|
|
from app.models import (
|
2020-05-01 14:26:20 +01:00
|
|
|
INTERNATIONAL_LETTERS,
|
2018-03-23 12:04:37 +00:00
|
|
|
KEY_TYPE_NORMAL,
|
|
|
|
|
KEY_TYPE_TEST,
|
2020-05-01 14:26:20 +01:00
|
|
|
LETTER_TYPE,
|
2018-03-19 13:52:01 +00:00
|
|
|
NOTIFICATION_CREATED,
|
2018-03-23 12:04:37 +00:00
|
|
|
NOTIFICATION_DELIVERED,
|
2018-10-18 16:01:59 +01:00
|
|
|
NOTIFICATION_PENDING_VIRUS_CHECK,
|
2018-10-16 17:20:34 +01:00
|
|
|
NOTIFICATION_TECHNICAL_FAILURE,
|
2018-10-18 16:01:59 +01:00
|
|
|
NOTIFICATION_VALIDATION_FAILED,
|
2018-10-16 15:08:15 +01:00
|
|
|
NOTIFICATION_VIRUS_SCAN_FAILED,
|
2021-03-10 13:55:06 +00:00
|
|
|
Notification,
|
2018-10-16 17:20:34 +01:00
|
|
|
)
|
2020-10-20 18:53:59 +01:00
|
|
|
from tests.app.db import (
|
|
|
|
|
create_letter_branding,
|
|
|
|
|
create_notification,
|
2021-05-06 09:18:44 +01:00
|
|
|
create_organisation,
|
2020-10-20 18:53:59 +01:00
|
|
|
create_service,
|
2021-03-10 13:55:06 +00:00
|
|
|
create_template,
|
2020-10-20 18:53:59 +01:00
|
|
|
)
|
2017-12-11 11:00:27 +00:00
|
|
|
from tests.conftest import set_config_values
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_have_decorated_tasks_functions():
|
2020-05-06 18:23:56 +01:00
|
|
|
assert get_pdf_for_templated_letter.__wrapped__.__name__ == 'get_pdf_for_templated_letter'
|
2020-02-19 14:31:57 +00:00
|
|
|
assert collate_letter_pdfs_to_be_sent.__wrapped__.__name__ == 'collate_letter_pdfs_to_be_sent'
|
2019-06-11 11:00:04 +01:00
|
|
|
assert process_virus_scan_failed.__wrapped__.__name__ == 'process_virus_scan_failed'
|
|
|
|
|
assert process_virus_scan_error.__wrapped__.__name__ == 'process_virus_scan_error'
|
2019-12-04 16:02:46 +00:00
|
|
|
assert sanitise_letter.__wrapped__.__name__ == 'sanitise_letter'
|
|
|
|
|
assert process_sanitised_letter.__wrapped__.__name__ == 'process_sanitised_letter'
|
2017-12-11 11:00:27 +00:00
|
|
|
|
|
|
|
|
|
2020-05-06 17:16:09 +01:00
|
|
|
@pytest.mark.parametrize('branding_name,logo_filename', [(None, None), ['Test Brand', 'test-brand']])
|
2020-05-06 18:23:56 +01:00
|
|
|
def test_get_pdf_for_templated_letter_happy_path(mocker, sample_letter_notification, branding_name, logo_filename):
|
2020-05-06 17:16:09 +01:00
|
|
|
if branding_name:
|
|
|
|
|
letter_branding = create_letter_branding(name=branding_name, filename=logo_filename)
|
|
|
|
|
sample_letter_notification.service.letter_branding = letter_branding
|
2020-04-30 17:49:12 +01:00
|
|
|
mock_celery = mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task')
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
mock_generate_letter_pdf_filename = mocker.patch(
|
|
|
|
|
'app.celery.letters_pdf_tasks.generate_letter_pdf_filename',
|
2020-09-21 13:46:31 +01:00
|
|
|
return_value='LETTER.PDF'
|
|
|
|
|
)
|
2020-05-06 18:23:56 +01:00
|
|
|
get_pdf_for_templated_letter(sample_letter_notification.id)
|
2020-04-30 17:49:12 +01:00
|
|
|
|
|
|
|
|
letter_data = {
|
|
|
|
|
'letter_contact_block': sample_letter_notification.reply_to_text,
|
2017-12-11 11:00:27 +00:00
|
|
|
'template': {
|
2020-04-30 17:49:12 +01:00
|
|
|
"subject": sample_letter_notification.template.subject,
|
|
|
|
|
"content": sample_letter_notification.template.content,
|
|
|
|
|
"template_type": sample_letter_notification.template.template_type
|
|
|
|
|
},
|
|
|
|
|
'values': sample_letter_notification.personalisation,
|
2020-05-06 17:16:09 +01:00
|
|
|
'logo_filename': logo_filename,
|
2020-04-30 17:49:12 +01:00
|
|
|
'letter_filename': 'LETTER.PDF',
|
|
|
|
|
"notification_id": str(sample_letter_notification.id),
|
|
|
|
|
'key_type': sample_letter_notification.key_type
|
2017-12-11 11:00:27 +00:00
|
|
|
}
|
|
|
|
|
|
2020-04-30 17:49:12 +01:00
|
|
|
encrypted_data = encryption.encrypt(letter_data)
|
2017-12-11 11:00:27 +00:00
|
|
|
|
2020-04-30 17:49:12 +01:00
|
|
|
mock_celery.assert_called_once_with(
|
2020-05-06 17:16:09 +01:00
|
|
|
name=TaskNames.CREATE_PDF_FOR_TEMPLATED_LETTER,
|
2020-04-30 17:49:12 +01:00
|
|
|
args=(encrypted_data,),
|
|
|
|
|
queue=QueueNames.SANITISE_LETTERS
|
2017-12-11 11:00:27 +00:00
|
|
|
)
|
|
|
|
|
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
mock_generate_letter_pdf_filename.assert_called_once_with(
|
2020-09-21 13:46:31 +01:00
|
|
|
reference=sample_letter_notification.reference,
|
2020-09-21 14:40:22 +01:00
|
|
|
created_at=sample_letter_notification.created_at,
|
2020-09-21 13:46:31 +01:00
|
|
|
ignore_folder=False,
|
|
|
|
|
postage='second'
|
|
|
|
|
)
|
|
|
|
|
|
2017-12-11 11:00:27 +00:00
|
|
|
|
2020-09-21 14:40:22 +01:00
|
|
|
def test_get_pdf_for_templated_letter_non_existent_notification(notify_db_session, mocker, fake_uuid):
|
2017-12-11 11:00:27 +00:00
|
|
|
with pytest.raises(expected_exception=NoResultFound):
|
2020-05-06 18:23:56 +01:00
|
|
|
get_pdf_for_templated_letter(fake_uuid)
|
2017-12-11 11:00:27 +00:00
|
|
|
|
|
|
|
|
|
2020-05-06 18:23:56 +01:00
|
|
|
def test_get_pdf_for_templated_letter_retries_upon_error(mocker, sample_letter_notification):
|
2020-04-30 17:49:12 +01:00
|
|
|
mock_celery = mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task', side_effect=Exception())
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.generate_letter_pdf_filename', return_value='LETTER.PDF')
|
2020-05-06 18:23:56 +01:00
|
|
|
mock_retry = mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.retry')
|
2020-05-06 17:43:34 +01:00
|
|
|
mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception')
|
2017-12-11 11:00:27 +00:00
|
|
|
|
2020-05-06 18:23:56 +01:00
|
|
|
get_pdf_for_templated_letter(sample_letter_notification.id)
|
2017-12-11 11:00:27 +00:00
|
|
|
|
2020-04-30 17:49:12 +01:00
|
|
|
assert mock_celery.called
|
2017-12-11 11:00:27 +00:00
|
|
|
assert mock_retry.called
|
2020-05-06 17:43:34 +01:00
|
|
|
mock_logger.assert_called_once_with(
|
|
|
|
|
f"RETRY: calling create-letter-pdf task for notification {sample_letter_notification.id} failed"
|
|
|
|
|
)
|
2017-12-11 11:00:27 +00:00
|
|
|
|
|
|
|
|
|
2020-05-06 18:23:56 +01:00
|
|
|
def test_get_pdf_for_templated_letter_sets_technical_failure_max_retries(mocker, sample_letter_notification):
|
2020-04-30 17:49:12 +01:00
|
|
|
mock_celery = mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task', side_effect=Exception())
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.generate_letter_pdf_filename', return_value='LETTER.PDF')
|
2017-12-11 11:00:27 +00:00
|
|
|
mock_retry = mocker.patch(
|
2020-05-06 18:23:56 +01:00
|
|
|
'app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.retry', side_effect=MaxRetriesExceededError)
|
2017-12-11 11:00:27 +00:00
|
|
|
mock_update_noti = mocker.patch('app.celery.letters_pdf_tasks.update_notification_status_by_id')
|
|
|
|
|
|
2020-05-06 17:43:34 +01:00
|
|
|
with pytest.raises(NotificationTechnicalFailureException) as e:
|
2020-05-06 18:23:56 +01:00
|
|
|
get_pdf_for_templated_letter(sample_letter_notification.id)
|
2017-12-11 11:00:27 +00:00
|
|
|
|
2020-05-06 17:43:34 +01:00
|
|
|
assert e.value.args[0] == f"RETRY FAILED: Max retries reached. " \
|
|
|
|
|
f"The task create-letter-pdf failed for notification id {sample_letter_notification.id}. " \
|
|
|
|
|
f"Notification has been updated to technical-failure"
|
2020-04-30 17:49:12 +01:00
|
|
|
assert mock_celery.called
|
2017-12-11 11:00:27 +00:00
|
|
|
assert mock_retry.called
|
|
|
|
|
mock_update_noti.assert_called_once_with(sample_letter_notification.id, 'technical-failure')
|
2017-12-19 14:18:05 +00:00
|
|
|
|
|
|
|
|
|
2020-05-05 11:17:22 +01:00
|
|
|
@pytest.mark.parametrize('number_of_pages, expected_billable_units', [(2, 1), (3, 2), (10, 5)])
|
|
|
|
|
def test_update_billable_units_for_letter(mocker, sample_letter_notification, number_of_pages, expected_billable_units):
|
|
|
|
|
sample_letter_notification.billable_units = 0
|
|
|
|
|
mock_logger = mocker.patch('app.celery.tasks.current_app.logger.info')
|
|
|
|
|
|
|
|
|
|
update_billable_units_for_letter(sample_letter_notification.id, number_of_pages)
|
|
|
|
|
|
|
|
|
|
notification = Notification.query.filter(Notification.reference == sample_letter_notification.reference).one()
|
|
|
|
|
assert notification.billable_units == expected_billable_units
|
|
|
|
|
mock_logger.assert_called_once_with(
|
|
|
|
|
f"Letter notification id: {sample_letter_notification.id} reference {sample_letter_notification.reference}:"
|
|
|
|
|
f" billable units set to {expected_billable_units}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_update_billable_units_for_letter_doesnt_update_if_sent_with_test_key(mocker, sample_letter_notification):
|
|
|
|
|
sample_letter_notification.billable_units = 0
|
|
|
|
|
sample_letter_notification.key_type = KEY_TYPE_TEST
|
|
|
|
|
mock_logger = mocker.patch('app.celery.tasks.current_app.logger.info')
|
|
|
|
|
|
2020-04-30 17:49:12 +01:00
|
|
|
update_billable_units_for_letter(sample_letter_notification.id, 2)
|
2020-05-05 11:17:22 +01:00
|
|
|
|
|
|
|
|
notification = Notification.query.filter(Notification.reference == sample_letter_notification.reference).one()
|
|
|
|
|
assert notification.billable_units == 0
|
|
|
|
|
mock_logger.assert_not_called()
|
2020-04-30 17:49:12 +01:00
|
|
|
|
|
|
|
|
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
@mock_s3
|
2020-02-17 15:59:53 +00:00
|
|
|
@freeze_time('2020-02-17 18:00:00')
|
2021-05-06 09:18:44 +01:00
|
|
|
def test_get_key_and_size_of_letters_to_be_sent_to_print(
|
|
|
|
|
notify_api,
|
|
|
|
|
mocker,
|
|
|
|
|
sample_letter_template,
|
|
|
|
|
sample_organisation,
|
|
|
|
|
):
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
pdf_bucket = current_app.config['LETTERS_PDF_BUCKET_NAME']
|
|
|
|
|
s3 = boto3.client('s3', region_name='eu-west-1')
|
|
|
|
|
s3.create_bucket(Bucket=pdf_bucket, CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'})
|
2021-03-18 13:01:12 +00:00
|
|
|
s3.put_object(Bucket=pdf_bucket, Key='2020-02-17/NOTIFY.REF0.D.2.C.20200217160000.PDF', Body=b'1'),
|
|
|
|
|
s3.put_object(Bucket=pdf_bucket, Key='2020-02-17/NOTIFY.REF1.D.2.C.20200217150000.PDF', Body=b'22'),
|
|
|
|
|
s3.put_object(Bucket=pdf_bucket, Key='2020-02-16/NOTIFY.REF2.D.2.C.20200215180000.PDF', Body=b'333'),
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
|
2021-05-06 09:18:44 +01:00
|
|
|
sample_letter_template.service.organisation = sample_organisation
|
|
|
|
|
|
2020-04-06 17:57:16 +01:00
|
|
|
# second class
|
2020-02-17 15:59:53 +00:00
|
|
|
create_notification(
|
|
|
|
|
template=sample_letter_template,
|
|
|
|
|
status='created',
|
|
|
|
|
reference='ref0',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(hours=2))
|
2020-02-17 15:59:53 +00:00
|
|
|
)
|
|
|
|
|
create_notification(
|
|
|
|
|
template=sample_letter_template,
|
|
|
|
|
status='created',
|
|
|
|
|
reference='ref1',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(hours=3))
|
2020-02-17 15:59:53 +00:00
|
|
|
)
|
|
|
|
|
create_notification(
|
|
|
|
|
template=sample_letter_template,
|
|
|
|
|
status='created',
|
|
|
|
|
reference='ref2',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(days=2))
|
2020-02-17 15:59:53 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# notifications we don't expect to get sent to print as they are in the wrong status
|
|
|
|
|
for status in ['delivered', 'validation-failed', 'cancelled', 'sending']:
|
|
|
|
|
create_notification(
|
|
|
|
|
template=sample_letter_template,
|
|
|
|
|
status=status,
|
|
|
|
|
reference='ref3',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(days=2))
|
2020-02-17 15:59:53 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# notification we don't expect to get sent as instead will make into this evenings print run
|
|
|
|
|
create_notification(
|
|
|
|
|
template=sample_letter_template,
|
|
|
|
|
status='created',
|
|
|
|
|
reference='ref4',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(minutes=1))
|
2020-02-17 15:59:53 +00:00
|
|
|
)
|
|
|
|
|
|
2020-02-19 13:36:05 +00:00
|
|
|
# test notification we don't expect to get sent
|
|
|
|
|
create_notification(
|
|
|
|
|
template=sample_letter_template,
|
|
|
|
|
status='created',
|
|
|
|
|
reference='ref4',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(days=1)),
|
2020-02-19 13:36:05 +00:00
|
|
|
key_type=KEY_TYPE_TEST
|
|
|
|
|
)
|
|
|
|
|
|
2020-10-23 10:21:52 +01:00
|
|
|
results = list(
|
|
|
|
|
get_key_and_size_of_letters_to_be_sent_to_print(datetime.now() - timedelta(minutes=30), postage='second')
|
|
|
|
|
)
|
2020-02-17 15:59:53 +00:00
|
|
|
|
2020-06-30 17:54:47 +01:00
|
|
|
assert len(results) == 3
|
|
|
|
|
|
|
|
|
|
assert results == [
|
2020-10-20 17:51:35 +01:00
|
|
|
{
|
2021-02-17 17:47:00 +00:00
|
|
|
|
2021-03-18 13:01:12 +00:00
|
|
|
'Key': '2020-02-16/NOTIFY.REF2.D.2.C.20200215180000.PDF',
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
'Size': 3,
|
2021-05-06 09:18:44 +01:00
|
|
|
'ServiceId': str(sample_letter_template.service_id),
|
|
|
|
|
'OrganisationId': str(sample_organisation.id)
|
2020-10-20 17:51:35 +01:00
|
|
|
},
|
|
|
|
|
{
|
2021-03-18 13:01:12 +00:00
|
|
|
'Key': '2020-02-17/NOTIFY.REF1.D.2.C.20200217150000.PDF',
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
'Size': 2,
|
2021-05-06 09:18:44 +01:00
|
|
|
'ServiceId': str(sample_letter_template.service_id),
|
|
|
|
|
'OrganisationId': str(sample_organisation.id)
|
2020-10-20 17:51:35 +01:00
|
|
|
},
|
|
|
|
|
{
|
2021-03-18 13:01:12 +00:00
|
|
|
'Key': '2020-02-17/NOTIFY.REF0.D.2.C.20200217160000.PDF',
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
'Size': 1,
|
2021-05-06 09:18:44 +01:00
|
|
|
'ServiceId': str(sample_letter_template.service_id),
|
|
|
|
|
'OrganisationId': str(sample_organisation.id)
|
2020-10-20 17:51:35 +01:00
|
|
|
},
|
2020-06-30 17:54:47 +01:00
|
|
|
]
|
2020-02-17 15:59:53 +00:00
|
|
|
|
|
|
|
|
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
@mock_s3
|
2020-03-19 09:15:38 +00:00
|
|
|
@freeze_time('2020-02-17 18:00:00')
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
def test_get_key_and_size_of_letters_to_be_sent_to_print_handles_file_not_found(
|
2021-05-06 09:18:44 +01:00
|
|
|
notify_api, mocker, sample_letter_template, sample_organisation
|
2020-03-19 09:15:38 +00:00
|
|
|
):
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
pdf_bucket = current_app.config['LETTERS_PDF_BUCKET_NAME']
|
|
|
|
|
s3 = boto3.client('s3', region_name='eu-west-1')
|
|
|
|
|
s3.create_bucket(Bucket=pdf_bucket, CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'})
|
2021-03-18 13:01:12 +00:00
|
|
|
s3.put_object(Bucket=pdf_bucket, Key='2020-02-17/NOTIFY.REF1.D.2.C.20200217150000.PDF', Body=b'12'),
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
# no object for ref1
|
|
|
|
|
|
2021-05-06 09:18:44 +01:00
|
|
|
sample_letter_template.service.organisation = sample_organisation
|
|
|
|
|
|
2020-03-19 09:15:38 +00:00
|
|
|
create_notification(
|
|
|
|
|
template=sample_letter_template,
|
|
|
|
|
status='created',
|
|
|
|
|
reference='ref0',
|
|
|
|
|
created_at=(datetime.now() - timedelta(hours=2))
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
create_notification(
|
|
|
|
|
template=sample_letter_template,
|
|
|
|
|
status='created',
|
|
|
|
|
reference='ref1',
|
|
|
|
|
created_at=(datetime.now() - timedelta(hours=3))
|
|
|
|
|
)
|
|
|
|
|
|
2020-10-23 10:21:52 +01:00
|
|
|
results = list(
|
|
|
|
|
get_key_and_size_of_letters_to_be_sent_to_print(datetime.now() - timedelta(minutes=30), postage='second')
|
|
|
|
|
)
|
2020-03-19 09:15:38 +00:00
|
|
|
|
2020-10-20 17:51:35 +01:00
|
|
|
assert results == [{
|
2021-03-18 13:01:12 +00:00
|
|
|
'Key': '2020-02-17/NOTIFY.REF1.D.2.C.20200217150000.PDF',
|
2020-10-20 17:51:35 +01:00
|
|
|
'Size': 2,
|
2021-05-06 09:18:44 +01:00
|
|
|
'ServiceId': str(sample_letter_template.service_id),
|
|
|
|
|
'OrganisationId': str(sample_organisation.id)}
|
2020-10-20 17:51:35 +01:00
|
|
|
]
|
2020-03-19 09:15:38 +00:00
|
|
|
|
|
|
|
|
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
@mock_s3
|
2020-02-21 16:27:15 +00:00
|
|
|
@pytest.mark.parametrize('time_to_run_task', [
|
|
|
|
|
"2020-02-17 18:00:00", # after 5:30pm
|
|
|
|
|
"2020-02-18 02:00:00", # the next day after midnight, before 5:30pm we expect the same results
|
|
|
|
|
])
|
2020-10-20 17:51:35 +01:00
|
|
|
def test_collate_letter_pdfs_to_be_sent(
|
2021-05-06 09:18:44 +01:00
|
|
|
notify_api, mocker, time_to_run_task, sample_organisation
|
2020-10-20 17:51:35 +01:00
|
|
|
):
|
2020-02-21 16:27:15 +00:00
|
|
|
with freeze_time("2020-02-17 18:00:00"):
|
2020-10-20 17:51:35 +01:00
|
|
|
service_1 = create_service(service_name="service 1", service_id='f2fe37b0-1301-11eb-aba9-4c3275916899')
|
2021-05-06 09:18:44 +01:00
|
|
|
service_1.organisation = sample_organisation
|
2020-10-20 17:51:35 +01:00
|
|
|
letter_template_1 = create_template(service_1, template_type=LETTER_TYPE)
|
2020-04-06 17:57:16 +01:00
|
|
|
# second class
|
2020-02-21 16:27:15 +00:00
|
|
|
create_notification(
|
2020-10-20 17:51:35 +01:00
|
|
|
template=letter_template_1,
|
2020-02-21 16:27:15 +00:00
|
|
|
status='created',
|
|
|
|
|
reference='ref0',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(hours=2))
|
2020-02-21 16:27:15 +00:00
|
|
|
)
|
|
|
|
|
create_notification(
|
2020-10-20 17:51:35 +01:00
|
|
|
template=letter_template_1,
|
2020-02-21 16:27:15 +00:00
|
|
|
status='created',
|
|
|
|
|
reference='ref1',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(hours=3))
|
2020-02-21 16:27:15 +00:00
|
|
|
)
|
|
|
|
|
create_notification(
|
2020-10-20 17:51:35 +01:00
|
|
|
template=letter_template_1,
|
2020-02-21 16:27:15 +00:00
|
|
|
status='created',
|
|
|
|
|
reference='ref2',
|
2020-02-21 16:42:37 +00:00
|
|
|
created_at=(datetime.now() - timedelta(days=2))
|
2020-02-21 16:27:15 +00:00
|
|
|
)
|
2020-02-17 15:59:53 +00:00
|
|
|
|
2020-04-06 17:57:16 +01:00
|
|
|
# first class
|
|
|
|
|
create_notification(
|
2020-10-20 17:51:35 +01:00
|
|
|
template=letter_template_1,
|
2020-04-06 17:57:16 +01:00
|
|
|
status='created',
|
|
|
|
|
reference='first_class',
|
|
|
|
|
created_at=(datetime.now() - timedelta(hours=4)),
|
|
|
|
|
postage="first"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# international
|
|
|
|
|
create_notification(
|
2020-10-20 17:51:35 +01:00
|
|
|
template=letter_template_1,
|
2020-04-06 17:57:16 +01:00
|
|
|
status='created',
|
|
|
|
|
reference='international',
|
|
|
|
|
created_at=(datetime.now() - timedelta(days=3)),
|
|
|
|
|
postage="europe"
|
|
|
|
|
)
|
|
|
|
|
create_notification(
|
2020-10-20 17:51:35 +01:00
|
|
|
template=letter_template_1,
|
2020-04-06 17:57:16 +01:00
|
|
|
status='created',
|
|
|
|
|
reference='international',
|
|
|
|
|
created_at=(datetime.now() - timedelta(days=4)),
|
|
|
|
|
postage="rest-of-world"
|
|
|
|
|
)
|
|
|
|
|
|
2021-05-06 09:18:44 +01:00
|
|
|
# different service second class, belonging to a different organisation
|
|
|
|
|
org_2_id = uuid.uuid4()
|
|
|
|
|
organisation_two = create_organisation('Org 2', organisation_id=org_2_id)
|
|
|
|
|
service_2 = create_service(service_name="service 2",
|
|
|
|
|
service_id='3a5cea08-29fd-4bb9-b582-8dedd928b149',
|
|
|
|
|
organisation=organisation_two)
|
2020-10-20 17:51:35 +01:00
|
|
|
letter_template_2 = create_template(service_2, template_type=LETTER_TYPE)
|
|
|
|
|
create_notification(
|
|
|
|
|
template=letter_template_2,
|
|
|
|
|
status='created',
|
|
|
|
|
reference='another_service',
|
|
|
|
|
created_at=(datetime.now() - timedelta(hours=2))
|
|
|
|
|
)
|
|
|
|
|
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME']
|
|
|
|
|
s3 = boto3.client('s3', region_name='eu-west-1')
|
|
|
|
|
s3.create_bucket(
|
|
|
|
|
Bucket=bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
filenames = [
|
2021-03-18 13:01:12 +00:00
|
|
|
'2020-02-17/NOTIFY.FIRST_CLASS.D.1.C.20200217140000.PDF',
|
|
|
|
|
'2020-02-16/NOTIFY.REF2.D.2.C.20200215180000.PDF',
|
|
|
|
|
'2020-02-17/NOTIFY.REF1.D.2.C.20200217150000.PDF',
|
|
|
|
|
'2020-02-17/NOTIFY.REF0.D.2.C.20200217160000.PDF',
|
|
|
|
|
'2020-02-15/NOTIFY.INTERNATIONAL.D.E.C.20200214180000.PDF',
|
|
|
|
|
'2020-02-14/NOTIFY.INTERNATIONAL.D.N.C.20200213180000.PDF',
|
|
|
|
|
'2020-02-17/NOTIFY.ANOTHER_SERVICE.D.2.C.20200217160000.PDF'
|
Relax lookup of letter PDFs in S3 buckets
Previously we generated the filename we expected a letter PDF to be
stored at in S3, and used that to retrieve it. However, the generated
filename can change over the course of a notification's lifetime e.g.
if the service changes from crown ('.C.') to non-crown ('.N.').
The prefix of the filename is stable: it's based on properties of the
notification - reference and creation - that don't change. This commit
changes the way we interact with letter PDFs in S3:
- Uploading uses the original method to generate the full file name.
The method is renamed to 'generate_' to distinguish it from the new one.
- Downloading uses a new 'find_' method to get the filename using just
its prefix, which makes it agnostic to changes in the filename suffix.
Making this change helps to decouple our code from the requirements DVLA
have on the filenames. While it means more traffic to S3, we rely on S3
in any case to download the files. From experience, we know S3 is highly
reliable and performant, so don't anticipate any issues.
In the tests we favour using moto to mock S3, so that the behaviour is
realistic. There are a couple of places where we just mock the method,
since what it returns isn't important for the test.
Note that, since the new method requires a notification object, we need
to change a query in one place, the columns of which were only selected
to appease the original method to generate a filename.
2021-03-08 15:23:37 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
for filename in filenames:
|
|
|
|
|
s3.put_object(Bucket=bucket_name, Key=filename, Body=b'f')
|
2020-02-21 16:19:47 +00:00
|
|
|
|
2017-12-19 14:18:05 +00:00
|
|
|
mock_celery = mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task')
|
2021-02-17 17:47:00 +00:00
|
|
|
mock_send_email_to_dvla = mocker.patch(
|
|
|
|
|
'app.celery.letters_pdf_tasks.send_letters_volume_email_to_dvla'
|
|
|
|
|
)
|
2017-12-19 14:18:05 +00:00
|
|
|
|
2020-02-21 16:19:47 +00:00
|
|
|
with set_config_values(notify_api, {'MAX_LETTER_PDF_COUNT_PER_ZIP': 2}):
|
2020-02-21 16:27:15 +00:00
|
|
|
with freeze_time(time_to_run_task):
|
|
|
|
|
collate_letter_pdfs_to_be_sent()
|
2020-02-17 15:59:53 +00:00
|
|
|
|
2021-02-17 17:47:00 +00:00
|
|
|
mock_send_email_to_dvla.assert_called_once_with([
|
|
|
|
|
(1, 1, 'europe'), (1, 1, 'first'), (1, 1, 'rest-of-world'), (4, 4, 'second')
|
2021-02-23 18:57:36 +00:00
|
|
|
], datetime(2020, 2, 17).date())
|
2021-02-17 17:47:00 +00:00
|
|
|
|
2020-10-23 09:58:28 +01:00
|
|
|
assert len(mock_celery.call_args_list) == 6
|
2020-02-17 15:59:53 +00:00
|
|
|
assert mock_celery.call_args_list[0] == call(
|
2020-04-06 17:57:16 +01:00
|
|
|
name='zip-and-send-letter-pdfs',
|
|
|
|
|
kwargs={
|
|
|
|
|
'filenames_to_zip': [
|
2021-03-18 13:01:12 +00:00
|
|
|
'2020-02-17/NOTIFY.FIRST_CLASS.D.1.C.20200217140000.PDF'
|
2020-04-06 17:57:16 +01:00
|
|
|
],
|
2021-05-06 09:18:44 +01:00
|
|
|
'upload_filename':
|
|
|
|
|
f'NOTIFY.2020-02-17.1.001.sO6RKzPyNrkxrR8OLonl.{letter_template_1.service_id}.{sample_organisation.id}.ZIP'
|
2020-04-06 17:57:16 +01:00
|
|
|
},
|
|
|
|
|
queue='process-ftp-tasks',
|
|
|
|
|
compression='zlib'
|
|
|
|
|
)
|
|
|
|
|
assert mock_celery.call_args_list[1] == call(
|
2020-10-20 18:53:59 +01:00
|
|
|
name='zip-and-send-letter-pdfs',
|
|
|
|
|
kwargs={
|
2021-03-18 13:01:12 +00:00
|
|
|
'filenames_to_zip': ['2020-02-17/NOTIFY.ANOTHER_SERVICE.D.2.C.20200217160000.PDF'],
|
2021-05-06 09:18:44 +01:00
|
|
|
'upload_filename':
|
|
|
|
|
f'NOTIFY.2020-02-17.2.001.bGS-FKKV0QHcOUZgacEu.{service_2.id}.{org_2_id}.ZIP'
|
2020-10-20 18:53:59 +01:00
|
|
|
},
|
|
|
|
|
queue='process-ftp-tasks',
|
|
|
|
|
compression='zlib'
|
|
|
|
|
)
|
2020-10-23 09:58:28 +01:00
|
|
|
assert mock_celery.call_args_list[2] == call(
|
2020-02-17 15:59:53 +00:00
|
|
|
name='zip-and-send-letter-pdfs',
|
|
|
|
|
kwargs={
|
2020-02-21 16:19:47 +00:00
|
|
|
'filenames_to_zip': [
|
2021-03-18 13:01:12 +00:00
|
|
|
'2020-02-16/NOTIFY.REF2.D.2.C.20200215180000.PDF',
|
|
|
|
|
'2020-02-17/NOTIFY.REF1.D.2.C.20200217150000.PDF'
|
2020-02-21 16:19:47 +00:00
|
|
|
],
|
2021-05-06 09:18:44 +01:00
|
|
|
'upload_filename':
|
|
|
|
|
f'NOTIFY.2020-02-17.2.002.AmmswUYqPToXwlSZiFyK.{letter_template_1.service_id}.{sample_organisation.id}.ZIP'
|
2020-02-17 15:59:53 +00:00
|
|
|
},
|
|
|
|
|
queue='process-ftp-tasks',
|
|
|
|
|
compression='zlib'
|
|
|
|
|
)
|
2020-10-23 09:58:28 +01:00
|
|
|
assert mock_celery.call_args_list[3] == call(
|
2020-02-17 15:59:53 +00:00
|
|
|
name='zip-and-send-letter-pdfs',
|
|
|
|
|
kwargs={
|
2020-02-21 16:19:47 +00:00
|
|
|
'filenames_to_zip': [
|
2021-03-18 13:01:12 +00:00
|
|
|
'2020-02-17/NOTIFY.REF0.D.2.C.20200217160000.PDF'
|
2020-02-21 16:19:47 +00:00
|
|
|
],
|
2021-05-06 09:18:44 +01:00
|
|
|
'upload_filename':
|
|
|
|
|
f'NOTIFY.2020-02-17.2.003.36PwhyI9lFKjzbPiWxwv.{letter_template_1.service_id}.{sample_organisation.id}.ZIP'
|
2020-04-06 17:57:16 +01:00
|
|
|
},
|
|
|
|
|
queue='process-ftp-tasks',
|
|
|
|
|
compression='zlib'
|
|
|
|
|
)
|
2020-10-23 09:58:28 +01:00
|
|
|
assert mock_celery.call_args_list[4] == call(
|
2020-04-06 17:57:16 +01:00
|
|
|
name='zip-and-send-letter-pdfs',
|
|
|
|
|
kwargs={
|
|
|
|
|
'filenames_to_zip': [
|
2021-03-18 13:01:12 +00:00
|
|
|
'2020-02-15/NOTIFY.INTERNATIONAL.D.E.C.20200214180000.PDF'
|
2020-04-06 17:57:16 +01:00
|
|
|
],
|
2021-05-06 09:18:44 +01:00
|
|
|
'upload_filename':
|
|
|
|
|
f'NOTIFY.2020-02-17.E.001.lDBwqhnG__URJeGz3tH1.{letter_template_1.service_id}.{sample_organisation.id}.ZIP'
|
2020-06-24 14:59:10 +01:00
|
|
|
},
|
|
|
|
|
queue='process-ftp-tasks',
|
|
|
|
|
compression='zlib'
|
|
|
|
|
)
|
2020-10-23 09:58:28 +01:00
|
|
|
assert mock_celery.call_args_list[5] == call(
|
2020-06-24 14:59:10 +01:00
|
|
|
name='zip-and-send-letter-pdfs',
|
|
|
|
|
kwargs={
|
|
|
|
|
'filenames_to_zip': [
|
2021-03-18 13:01:12 +00:00
|
|
|
'2020-02-14/NOTIFY.INTERNATIONAL.D.N.C.20200213180000.PDF',
|
2020-06-24 14:59:10 +01:00
|
|
|
],
|
2021-05-06 09:18:44 +01:00
|
|
|
'upload_filename':
|
|
|
|
|
f'NOTIFY.2020-02-17.N.001.ZE7k_jm7Bg5sYwLswkr4.{letter_template_1.service_id}.{sample_organisation.id}.ZIP'
|
2020-02-17 15:59:53 +00:00
|
|
|
},
|
|
|
|
|
queue='process-ftp-tasks',
|
|
|
|
|
compression='zlib'
|
|
|
|
|
)
|
2018-09-12 17:16:34 +01:00
|
|
|
|
|
|
|
|
|
2021-02-17 17:47:00 +00:00
|
|
|
def test_send_letters_volume_email_to_dvla(notify_api, notify_db_session, mocker, letter_volumes_email_template):
|
|
|
|
|
MockVolume = namedtuple('LettersVolume', ['postage', 'letters_count', 'sheets_count'])
|
|
|
|
|
letters_volumes = [
|
|
|
|
|
MockVolume('first', 5, 7),
|
|
|
|
|
MockVolume('second', 4, 12),
|
|
|
|
|
MockVolume('europe', 1, 3),
|
|
|
|
|
MockVolume('rest-of-world', 1, 2),
|
|
|
|
|
]
|
|
|
|
|
send_mock = mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
|
|
|
|
|
|
2021-02-23 18:57:36 +00:00
|
|
|
send_letters_volume_email_to_dvla(letters_volumes, datetime(2020, 2, 17).date())
|
2021-02-17 17:47:00 +00:00
|
|
|
|
2021-02-26 11:01:24 +00:00
|
|
|
emails_to_dvla = get_notifications().all()
|
|
|
|
|
assert len(emails_to_dvla) == 2
|
|
|
|
|
send_mock.called = 2
|
|
|
|
|
send_mock.assert_any_call([str(emails_to_dvla[0].id)], queue=QueueNames.NOTIFY)
|
|
|
|
|
send_mock.assert_any_call([str(emails_to_dvla[1].id)], queue=QueueNames.NOTIFY)
|
|
|
|
|
for email in emails_to_dvla:
|
|
|
|
|
assert str(email.template_id) == current_app.config['LETTERS_VOLUME_EMAIL_TEMPLATE_ID']
|
|
|
|
|
assert email.to in current_app.config['DVLA_EMAIL_ADDRESSES']
|
|
|
|
|
assert email.personalisation == {
|
|
|
|
|
'total_volume': 11,
|
|
|
|
|
'first_class_volume': 5,
|
|
|
|
|
'second_class_volume': 4,
|
|
|
|
|
'international_volume': 2,
|
|
|
|
|
'total_sheets': 24,
|
|
|
|
|
'first_class_sheets': 7,
|
|
|
|
|
"second_class_sheets": 12,
|
|
|
|
|
'international_sheets': 5,
|
|
|
|
|
'date': '17 February 2020'
|
|
|
|
|
}
|
2021-02-17 17:47:00 +00:00
|
|
|
|
|
|
|
|
|
2020-02-19 12:54:06 +00:00
|
|
|
def test_group_letters_splits_on_file_size(notify_api):
|
2017-12-19 14:18:05 +00:00
|
|
|
letters = [
|
|
|
|
|
# ends under max but next one is too big
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'A.pdf', 'Size': 1, 'ServiceId': '123'}, {'Key': 'B.pdf', 'Size': 2, 'ServiceId': '123'},
|
2017-12-19 14:18:05 +00:00
|
|
|
# ends on exactly max
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'C.pdf', 'Size': 3, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'D.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'E.pdf', 'Size': 1, 'ServiceId': '123'},
|
2017-12-19 14:18:05 +00:00
|
|
|
# exactly max goes in next file
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'F.pdf', 'Size': 5, 'ServiceId': '123'},
|
2017-12-19 14:18:05 +00:00
|
|
|
# if it's bigger than the max, still gets included
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'G.pdf', 'Size': 6, 'ServiceId': '123'},
|
2017-12-19 14:18:05 +00:00
|
|
|
# whatever's left goes in last list
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'H.pdf', 'Size': 1, 'ServiceId': '123'}, {'Key': 'I.pdf', 'Size': 1, 'ServiceId': '123'},
|
2017-12-19 14:18:05 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
with set_config_values(notify_api, {'MAX_LETTER_PDF_ZIP_FILESIZE': 5}):
|
|
|
|
|
x = group_letters(letters)
|
|
|
|
|
|
2020-10-20 17:51:35 +01:00
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'A.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'B.pdf', 'Size': 2, 'ServiceId': '123'}
|
|
|
|
|
]
|
|
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'C.pdf', 'Size': 3, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'D.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'E.pdf', 'Size': 1, 'ServiceId': '123'}
|
|
|
|
|
]
|
|
|
|
|
assert next(x) == [{'Key': 'F.pdf', 'Size': 5, 'ServiceId': '123'}]
|
|
|
|
|
assert next(x) == [{'Key': 'G.pdf', 'Size': 6, 'ServiceId': '123'}]
|
|
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'H.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'I.pdf', 'Size': 1, 'ServiceId': '123'}
|
|
|
|
|
]
|
2017-12-19 14:18:05 +00:00
|
|
|
# make sure iterator is exhausted
|
|
|
|
|
assert next(x, None) is None
|
|
|
|
|
|
|
|
|
|
|
2020-02-19 12:54:06 +00:00
|
|
|
def test_group_letters_splits_on_file_count(notify_api):
|
2018-01-02 17:18:01 +00:00
|
|
|
letters = [
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'A.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'B.pdf', 'Size': 2, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'C.pdf', 'Size': 3, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'D.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'E.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'F.pdf', 'Size': 5, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'G.pdf', 'Size': 6, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'H.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'I.pdf', 'Size': 1, 'ServiceId': '123'},
|
2018-01-02 17:18:01 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
with set_config_values(notify_api, {'MAX_LETTER_PDF_COUNT_PER_ZIP': 3}):
|
|
|
|
|
x = group_letters(letters)
|
|
|
|
|
|
2020-10-20 17:51:35 +01:00
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'A.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'B.pdf', 'Size': 2, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'C.pdf', 'Size': 3, 'ServiceId': '123'}
|
|
|
|
|
]
|
|
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'D.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'E.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'F.pdf', 'Size': 5, 'ServiceId': '123'}
|
|
|
|
|
]
|
|
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'G.pdf', 'Size': 6, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'H.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'I.pdf', 'Size': 1, 'ServiceId': '123'}
|
|
|
|
|
]
|
2018-01-02 17:18:01 +00:00
|
|
|
# make sure iterator is exhausted
|
|
|
|
|
assert next(x, None) is None
|
|
|
|
|
|
|
|
|
|
|
2020-02-19 12:54:06 +00:00
|
|
|
def test_group_letters_splits_on_file_size_and_file_count(notify_api):
|
2018-01-03 10:42:37 +00:00
|
|
|
letters = [
|
|
|
|
|
# ends under max file size but next file is too big
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'A.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'B.pdf', 'Size': 2, 'ServiceId': '123'},
|
2018-01-03 10:42:37 +00:00
|
|
|
# ends on exactly max number of files and file size
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'C.pdf', 'Size': 3, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'D.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'E.pdf', 'Size': 1, 'ServiceId': '123'},
|
2018-01-03 10:42:37 +00:00
|
|
|
# exactly max file size goes in next file
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'F.pdf', 'Size': 5, 'ServiceId': '123'},
|
2018-01-03 10:42:37 +00:00
|
|
|
# file size is within max but number of files reaches limit
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'G.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'H.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'I.pdf', 'Size': 1, 'ServiceId': '123'},
|
2018-01-03 10:42:37 +00:00
|
|
|
# whatever's left goes in last list
|
2020-10-20 17:51:35 +01:00
|
|
|
{'Key': 'J.pdf', 'Size': 1, 'ServiceId': '123'},
|
2018-01-03 10:42:37 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
with set_config_values(notify_api, {
|
|
|
|
|
'MAX_LETTER_PDF_ZIP_FILESIZE': 5,
|
|
|
|
|
'MAX_LETTER_PDF_COUNT_PER_ZIP': 3
|
|
|
|
|
}):
|
|
|
|
|
x = group_letters(letters)
|
|
|
|
|
|
2020-10-20 17:51:35 +01:00
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'A.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'B.pdf', 'Size': 2, 'ServiceId': '123'}
|
|
|
|
|
]
|
|
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'C.pdf', 'Size': 3, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'D.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'E.pdf', 'Size': 1, 'ServiceId': '123'}
|
|
|
|
|
]
|
|
|
|
|
assert next(x) == [{'Key': 'F.pdf', 'Size': 5, 'ServiceId': '123'}]
|
|
|
|
|
assert next(x) == [
|
|
|
|
|
{'Key': 'G.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'H.pdf', 'Size': 1, 'ServiceId': '123'},
|
|
|
|
|
{'Key': 'I.pdf', 'Size': 1, 'ServiceId': '123'}
|
|
|
|
|
]
|
|
|
|
|
assert next(x) == [{'Key': 'J.pdf', 'Size': 1, 'ServiceId': '123'}]
|
2018-01-03 10:42:37 +00:00
|
|
|
# make sure iterator is exhausted
|
|
|
|
|
assert next(x, None) is None
|
|
|
|
|
|
|
|
|
|
|
2020-02-19 13:21:30 +00:00
|
|
|
@pytest.mark.parametrize('key', ["A.ZIP", "B.zip"])
|
|
|
|
|
def test_group_letters_ignores_non_pdfs(key):
|
|
|
|
|
letters = [{'Key': key, 'Size': 1}]
|
2017-12-22 15:38:49 +00:00
|
|
|
assert list(group_letters(letters)) == []
|
|
|
|
|
|
|
|
|
|
|
2020-02-19 13:21:30 +00:00
|
|
|
@pytest.mark.parametrize('key', ["A.PDF", "B.pdf", "C.PdF"])
|
|
|
|
|
def test_group_letters_includes_pdf_files(key):
|
2020-10-20 17:51:35 +01:00
|
|
|
letters = [{'Key': key, 'Size': 1, 'ServiceId': '123'}]
|
|
|
|
|
assert list(group_letters(letters)) == [[{'Key': key, 'Size': 1, 'ServiceId': '123'}]]
|
2020-02-19 13:21:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_group_letters_with_no_letters():
|
2017-12-19 14:18:05 +00:00
|
|
|
assert list(group_letters([])) == []
|
2018-01-15 17:00:00 +00:00
|
|
|
|
|
|
|
|
|
2019-06-11 11:00:04 +01:00
|
|
|
def test_move_invalid_letter_and_update_status_logs_error_and_sets_tech_failure_state_if_s3_error(
|
|
|
|
|
mocker,
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
):
|
|
|
|
|
error_response = {
|
|
|
|
|
'Error': {
|
|
|
|
|
'Code': 'InvalidParameterValue',
|
|
|
|
|
'Message': 'some error message from amazon',
|
|
|
|
|
'Type': 'Sender'
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.move_scan_to_invalid_pdf_bucket',
|
|
|
|
|
side_effect=ClientError(error_response, 'operation_name'))
|
|
|
|
|
mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception')
|
|
|
|
|
|
2019-12-04 16:02:46 +00:00
|
|
|
with pytest.raises(NotificationTechnicalFailureException):
|
|
|
|
|
_move_invalid_letter_and_update_status(
|
|
|
|
|
notification=sample_letter_notification,
|
|
|
|
|
filename='filename',
|
|
|
|
|
scan_pdf_object=mocker.Mock()
|
|
|
|
|
)
|
2019-06-11 11:00:04 +01:00
|
|
|
|
|
|
|
|
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
|
|
|
|
|
mock_logger.assert_called_once_with(
|
|
|
|
|
'Error when moving letter with id {} to invalid PDF bucket'.format(sample_letter_notification.id)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2020-05-01 14:26:20 +01:00
|
|
|
@pytest.mark.parametrize('permissions, expected_international_letters_allowed', (
|
|
|
|
|
([LETTER_TYPE], False),
|
|
|
|
|
([LETTER_TYPE, INTERNATIONAL_LETTERS], True),
|
|
|
|
|
))
|
|
|
|
|
def test_sanitise_letter_calls_template_preview_sanitise_task(
|
|
|
|
|
mocker,
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
permissions,
|
|
|
|
|
expected_international_letters_allowed,
|
|
|
|
|
):
|
2019-12-04 16:02:46 +00:00
|
|
|
mock_celery = mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task')
|
|
|
|
|
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
|
2020-05-01 14:26:20 +01:00
|
|
|
sample_letter_notification.service = create_service(
|
|
|
|
|
service_permissions=permissions
|
|
|
|
|
)
|
2019-12-04 16:02:46 +00:00
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
|
|
|
|
|
sanitise_letter(filename)
|
|
|
|
|
|
|
|
|
|
mock_celery.assert_called_once_with(
|
|
|
|
|
name=TaskNames.SANITISE_LETTER,
|
2020-05-01 14:26:20 +01:00
|
|
|
kwargs={
|
|
|
|
|
'notification_id': str(sample_letter_notification.id),
|
|
|
|
|
'filename': filename,
|
|
|
|
|
'allow_international_letters': expected_international_letters_allowed,
|
|
|
|
|
},
|
2019-12-04 16:02:46 +00:00
|
|
|
queue=QueueNames.SANITISE_LETTERS,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_sanitise_letter_does_not_call_template_preview_sanitise_task_if_notification_in_wrong_state(
|
|
|
|
|
mocker,
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
):
|
|
|
|
|
mock_celery = mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task')
|
|
|
|
|
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
|
|
|
|
|
|
|
|
|
|
sanitise_letter(filename)
|
|
|
|
|
|
|
|
|
|
assert not mock_celery.called
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_sanitise_letter_does_not_call_template_preview_sanitise_task_if_there_is_an_exception(
|
|
|
|
|
mocker,
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
):
|
|
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task', side_effect=Exception())
|
|
|
|
|
mock_celery_retry = mocker.patch('app.celery.letters_pdf_tasks.sanitise_letter.retry')
|
|
|
|
|
|
|
|
|
|
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
|
|
|
|
|
sanitise_letter(filename)
|
|
|
|
|
|
|
|
|
|
mock_celery_retry.assert_called_once_with(queue='retry-tasks')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_sanitise_letter_puts_letter_into_technical_failure_if_max_retries_exceeded(sample_letter_notification, mocker):
|
|
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task', side_effect=Exception())
|
|
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.sanitise_letter.retry', side_effect=MaxRetriesExceededError())
|
|
|
|
|
|
|
|
|
|
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
|
|
|
|
|
with pytest.raises(NotificationTechnicalFailureException):
|
|
|
|
|
sanitise_letter(filename)
|
|
|
|
|
|
|
|
|
|
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@mock_s3
|
2020-09-15 16:17:33 +01:00
|
|
|
@pytest.mark.parametrize('key_type, destination_bucket, expected_status, postage, destination_filename', [
|
|
|
|
|
(
|
|
|
|
|
KEY_TYPE_NORMAL,
|
|
|
|
|
'LETTERS_PDF_BUCKET_NAME',
|
|
|
|
|
NOTIFICATION_CREATED,
|
|
|
|
|
'first',
|
2021-03-18 13:01:12 +00:00
|
|
|
'2018-07-01/NOTIFY.FOO.D.1.C.20180701120000.PDF'
|
2020-09-15 16:17:33 +01:00
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
KEY_TYPE_NORMAL,
|
|
|
|
|
'LETTERS_PDF_BUCKET_NAME',
|
|
|
|
|
NOTIFICATION_CREATED,
|
|
|
|
|
'second',
|
2021-03-18 13:01:12 +00:00
|
|
|
'2018-07-01/NOTIFY.FOO.D.2.C.20180701120000.PDF'
|
2020-09-15 16:17:33 +01:00
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
KEY_TYPE_NORMAL,
|
|
|
|
|
'LETTERS_PDF_BUCKET_NAME',
|
|
|
|
|
NOTIFICATION_CREATED,
|
|
|
|
|
'europe',
|
2021-03-18 13:01:12 +00:00
|
|
|
'2018-07-01/NOTIFY.FOO.D.E.C.20180701120000.PDF'
|
2020-09-15 16:17:33 +01:00
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
KEY_TYPE_NORMAL,
|
|
|
|
|
'LETTERS_PDF_BUCKET_NAME',
|
|
|
|
|
NOTIFICATION_CREATED,
|
|
|
|
|
'rest-of-world',
|
2021-03-18 13:01:12 +00:00
|
|
|
'2018-07-01/NOTIFY.FOO.D.N.C.20180701120000.PDF'
|
2020-09-15 16:17:33 +01:00
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
KEY_TYPE_TEST,
|
|
|
|
|
'TEST_LETTERS_BUCKET_NAME',
|
|
|
|
|
NOTIFICATION_DELIVERED,
|
|
|
|
|
'second',
|
2021-03-18 13:01:12 +00:00
|
|
|
'NOTIFY.FOO.D.2.C.20180701120000.PDF',
|
2020-09-15 16:17:33 +01:00
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
KEY_TYPE_TEST,
|
|
|
|
|
'TEST_LETTERS_BUCKET_NAME',
|
|
|
|
|
NOTIFICATION_DELIVERED,
|
|
|
|
|
'first',
|
2021-03-18 13:01:12 +00:00
|
|
|
'NOTIFY.FOO.D.1.C.20180701120000.PDF',
|
2020-09-15 16:17:33 +01:00
|
|
|
),
|
2019-12-04 16:02:46 +00:00
|
|
|
])
|
|
|
|
|
def test_process_sanitised_letter_with_valid_letter(
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
key_type,
|
|
|
|
|
destination_bucket,
|
|
|
|
|
expected_status,
|
2020-09-15 16:17:33 +01:00
|
|
|
postage,
|
2019-12-04 16:02:46 +00:00
|
|
|
destination_filename,
|
|
|
|
|
):
|
2020-09-15 16:17:33 +01:00
|
|
|
# We save the letter as if it's 2nd class initially, and the task changes the filename to have the correct postage
|
2021-03-18 13:01:12 +00:00
|
|
|
filename = 'NOTIFY.FOO.D.2.C.20180701120000.PDF'
|
2019-12-04 16:02:46 +00:00
|
|
|
|
|
|
|
|
scan_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME']
|
|
|
|
|
template_preview_bucket_name = current_app.config['LETTER_SANITISE_BUCKET_NAME']
|
|
|
|
|
destination_bucket_name = current_app.config[destination_bucket]
|
|
|
|
|
conn = boto3.resource('s3', region_name='eu-west-1')
|
|
|
|
|
|
2020-12-07 15:03:41 +00:00
|
|
|
scan_bucket = conn.create_bucket(
|
|
|
|
|
Bucket=scan_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
|
|
|
|
template_preview_bucket = conn.create_bucket(
|
|
|
|
|
Bucket=template_preview_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
|
|
|
|
destination_bucket = conn.create_bucket(
|
|
|
|
|
Bucket=destination_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
2019-12-04 16:02:46 +00:00
|
|
|
|
|
|
|
|
s3 = boto3.client('s3', region_name='eu-west-1')
|
|
|
|
|
s3.put_object(Bucket=scan_bucket_name, Key=filename, Body=b'original_pdf_content')
|
|
|
|
|
s3.put_object(Bucket=template_preview_bucket_name, Key=filename, Body=b'sanitised_pdf_content')
|
|
|
|
|
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
sample_letter_notification.key_type = key_type
|
|
|
|
|
sample_letter_notification.billable_units = 1
|
|
|
|
|
sample_letter_notification.created_at = datetime(2018, 7, 1, 12)
|
2020-09-15 16:17:33 +01:00
|
|
|
sample_letter_notification.postage = postage
|
2019-12-04 16:02:46 +00:00
|
|
|
|
2020-01-24 09:08:27 +00:00
|
|
|
encrypted_data = encryption.encrypt({
|
|
|
|
|
'page_count': 2,
|
|
|
|
|
'message': None,
|
|
|
|
|
'invalid_pages': None,
|
|
|
|
|
'validation_status': 'passed',
|
|
|
|
|
'filename': filename,
|
2020-01-24 10:15:19 +00:00
|
|
|
'notification_id': str(sample_letter_notification.id),
|
|
|
|
|
'address': 'A. User\nThe house on the corner'
|
2020-01-24 09:08:27 +00:00
|
|
|
})
|
|
|
|
|
process_sanitised_letter(encrypted_data)
|
2019-12-04 16:02:46 +00:00
|
|
|
|
|
|
|
|
assert sample_letter_notification.status == expected_status
|
|
|
|
|
assert sample_letter_notification.billable_units == 1
|
2020-01-24 10:15:19 +00:00
|
|
|
assert sample_letter_notification.to == 'A. User\nThe house on the corner'
|
2019-12-04 16:02:46 +00:00
|
|
|
|
|
|
|
|
assert not [x for x in scan_bucket.objects.all()]
|
|
|
|
|
assert not [x for x in template_preview_bucket.objects.all()]
|
|
|
|
|
assert len([x for x in destination_bucket.objects.all()]) == 1
|
|
|
|
|
|
|
|
|
|
file_contents = conn.Object(destination_bucket_name, destination_filename).get()['Body'].read().decode('utf-8')
|
|
|
|
|
assert file_contents == 'sanitised_pdf_content'
|
|
|
|
|
|
|
|
|
|
|
2020-07-29 14:52:18 +01:00
|
|
|
@mock_s3
|
|
|
|
|
@pytest.mark.parametrize('address, expected_postage, expected_international',
|
|
|
|
|
[('Lady Lou, 123 Main Street, SW1 1AA', 'second', False),
|
|
|
|
|
('Lady Lou, 123 Main Street, France', 'europe', True),
|
|
|
|
|
('Lady Lou, 123 Main Street, New Zealand', 'rest-of-world', True),
|
|
|
|
|
])
|
|
|
|
|
def test_process_sanitised_letter_sets_postage_international(
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
expected_postage,
|
|
|
|
|
expected_international,
|
|
|
|
|
address
|
|
|
|
|
):
|
|
|
|
|
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
|
|
|
|
|
|
|
|
|
|
scan_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME']
|
|
|
|
|
template_preview_bucket_name = current_app.config['LETTER_SANITISE_BUCKET_NAME']
|
|
|
|
|
destination_bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME']
|
|
|
|
|
conn = boto3.resource('s3', region_name='eu-west-1')
|
2020-12-07 15:03:41 +00:00
|
|
|
conn.create_bucket(
|
|
|
|
|
Bucket=scan_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
|
|
|
|
conn.create_bucket(
|
|
|
|
|
Bucket=template_preview_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
|
|
|
|
conn.create_bucket(
|
|
|
|
|
Bucket=destination_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
2020-07-29 14:52:18 +01:00
|
|
|
|
|
|
|
|
s3 = boto3.client('s3', region_name='eu-west-1')
|
|
|
|
|
s3.put_object(Bucket=scan_bucket_name, Key=filename, Body=b'original_pdf_content')
|
|
|
|
|
s3.put_object(Bucket=template_preview_bucket_name, Key=filename, Body=b'sanitised_pdf_content')
|
|
|
|
|
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
sample_letter_notification.billable_units = 1
|
|
|
|
|
sample_letter_notification.created_at = datetime(2018, 7, 1, 12)
|
|
|
|
|
|
|
|
|
|
encrypted_data = encryption.encrypt({
|
|
|
|
|
'page_count': 2,
|
|
|
|
|
'message': None,
|
|
|
|
|
'invalid_pages': None,
|
|
|
|
|
'validation_status': 'passed',
|
|
|
|
|
'filename': filename,
|
|
|
|
|
'notification_id': str(sample_letter_notification.id),
|
|
|
|
|
'address': address
|
|
|
|
|
})
|
|
|
|
|
process_sanitised_letter(encrypted_data)
|
|
|
|
|
|
|
|
|
|
assert sample_letter_notification.status == 'created'
|
|
|
|
|
assert sample_letter_notification.billable_units == 1
|
|
|
|
|
assert sample_letter_notification.to == address
|
|
|
|
|
assert sample_letter_notification.postage == expected_postage
|
|
|
|
|
assert sample_letter_notification.international == expected_international
|
|
|
|
|
|
|
|
|
|
|
2019-12-04 16:02:46 +00:00
|
|
|
@mock_s3
|
|
|
|
|
@pytest.mark.parametrize('key_type', [KEY_TYPE_NORMAL, KEY_TYPE_TEST])
|
|
|
|
|
def test_process_sanitised_letter_with_invalid_letter(sample_letter_notification, key_type):
|
|
|
|
|
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
|
|
|
|
|
|
|
|
|
|
scan_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME']
|
|
|
|
|
template_preview_bucket_name = current_app.config['LETTER_SANITISE_BUCKET_NAME']
|
|
|
|
|
invalid_letter_bucket_name = current_app.config['INVALID_PDF_BUCKET_NAME']
|
|
|
|
|
conn = boto3.resource('s3', region_name='eu-west-1')
|
|
|
|
|
|
2020-12-07 15:03:41 +00:00
|
|
|
scan_bucket = conn.create_bucket(
|
|
|
|
|
Bucket=scan_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
|
|
|
|
template_preview_bucket = conn.create_bucket(
|
|
|
|
|
Bucket=template_preview_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
|
|
|
|
invalid_letter_bucket = conn.create_bucket(
|
|
|
|
|
Bucket=invalid_letter_bucket_name,
|
|
|
|
|
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}
|
|
|
|
|
)
|
2019-12-04 16:02:46 +00:00
|
|
|
|
|
|
|
|
s3 = boto3.client('s3', region_name='eu-west-1')
|
|
|
|
|
s3.put_object(Bucket=scan_bucket_name, Key=filename, Body=b'original_pdf_content')
|
|
|
|
|
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
sample_letter_notification.key_type = key_type
|
|
|
|
|
sample_letter_notification.billable_units = 1
|
|
|
|
|
sample_letter_notification.created_at = datetime(2018, 7, 1, 12)
|
|
|
|
|
|
2020-01-24 09:08:27 +00:00
|
|
|
encrypted_data = encryption.encrypt({
|
|
|
|
|
'page_count': 2,
|
|
|
|
|
'message': 'content-outside-printable-area',
|
|
|
|
|
'invalid_pages': [1],
|
|
|
|
|
'validation_status': 'failed',
|
|
|
|
|
'filename': filename,
|
2020-01-24 10:15:19 +00:00
|
|
|
'notification_id': str(sample_letter_notification.id),
|
|
|
|
|
'address': None,
|
2020-01-24 09:08:27 +00:00
|
|
|
})
|
|
|
|
|
process_sanitised_letter(encrypted_data)
|
2019-12-04 16:02:46 +00:00
|
|
|
|
|
|
|
|
assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED
|
|
|
|
|
assert sample_letter_notification.billable_units == 0
|
|
|
|
|
|
|
|
|
|
assert not [x for x in scan_bucket.objects.all()]
|
|
|
|
|
assert not [x for x in template_preview_bucket.objects.all()]
|
|
|
|
|
assert len([x for x in invalid_letter_bucket.objects.all()]) == 1
|
|
|
|
|
|
|
|
|
|
file_contents = conn.Object(invalid_letter_bucket_name, filename).get()['Body'].read().decode('utf-8')
|
|
|
|
|
assert file_contents == 'original_pdf_content'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_process_sanitised_letter_when_letter_status_is_not_pending_virus_scan(
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
mocker,
|
|
|
|
|
):
|
|
|
|
|
mock_s3 = mocker.patch('app.celery.letters_pdf_tasks.s3')
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_CREATED
|
|
|
|
|
|
2020-01-24 09:08:27 +00:00
|
|
|
encrypted_data = encryption.encrypt({
|
|
|
|
|
'page_count': 2,
|
|
|
|
|
'message': None,
|
|
|
|
|
'invalid_pages': None,
|
|
|
|
|
'validation_status': 'passed',
|
|
|
|
|
'filename': 'NOTIFY.{}'.format(sample_letter_notification.reference),
|
2020-01-24 10:15:19 +00:00
|
|
|
'notification_id': str(sample_letter_notification.id),
|
|
|
|
|
'address': None
|
2020-01-24 09:08:27 +00:00
|
|
|
})
|
|
|
|
|
process_sanitised_letter(encrypted_data)
|
2019-12-04 16:02:46 +00:00
|
|
|
|
|
|
|
|
assert not mock_s3.called
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_process_sanitised_letter_puts_letter_into_tech_failure_for_boto_errors(
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
mocker,
|
|
|
|
|
):
|
|
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.s3.get_s3_object', side_effect=ClientError({}, 'operation_name'))
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
|
2020-01-24 09:08:27 +00:00
|
|
|
encrypted_data = encryption.encrypt({
|
|
|
|
|
'page_count': 2,
|
|
|
|
|
'message': None,
|
|
|
|
|
'invalid_pages': None,
|
|
|
|
|
'validation_status': 'passed',
|
|
|
|
|
'filename': 'NOTIFY.{}'.format(sample_letter_notification.reference),
|
2020-01-24 10:15:19 +00:00
|
|
|
'notification_id': str(sample_letter_notification.id),
|
|
|
|
|
'address': None
|
2020-01-24 09:08:27 +00:00
|
|
|
})
|
|
|
|
|
|
2019-12-04 16:02:46 +00:00
|
|
|
with pytest.raises(NotificationTechnicalFailureException):
|
2020-01-24 09:08:27 +00:00
|
|
|
process_sanitised_letter(encrypted_data)
|
2019-12-04 16:02:46 +00:00
|
|
|
|
|
|
|
|
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
|
|
|
|
|
|
|
|
|
|
|
2020-03-27 14:28:58 +00:00
|
|
|
def test_process_sanitised_letter_retries_if_there_is_an_exception(
|
|
|
|
|
mocker,
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
):
|
|
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.update_letter_pdf_status', side_effect=Exception())
|
|
|
|
|
mock_celery_retry = mocker.patch('app.celery.letters_pdf_tasks.process_sanitised_letter.retry')
|
|
|
|
|
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
encrypted_data = encryption.encrypt({
|
|
|
|
|
'page_count': 2,
|
|
|
|
|
'message': None,
|
|
|
|
|
'invalid_pages': None,
|
|
|
|
|
'validation_status': 'passed',
|
|
|
|
|
'filename': 'NOTIFY.{}'.format(sample_letter_notification.reference),
|
|
|
|
|
'notification_id': str(sample_letter_notification.id),
|
|
|
|
|
'address': None
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
process_sanitised_letter(encrypted_data)
|
|
|
|
|
|
|
|
|
|
mock_celery_retry.assert_called_once_with(queue='retry-tasks')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_process_sanitised_letter_puts_letter_into_technical_failure_if_max_retries_exceeded(
|
|
|
|
|
mocker,
|
|
|
|
|
sample_letter_notification,
|
|
|
|
|
):
|
|
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.update_letter_pdf_status', side_effect=Exception())
|
|
|
|
|
mocker.patch('app.celery.letters_pdf_tasks.process_sanitised_letter.retry', side_effect=MaxRetriesExceededError())
|
|
|
|
|
|
|
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
encrypted_data = encryption.encrypt({
|
|
|
|
|
'page_count': 2,
|
|
|
|
|
'message': None,
|
|
|
|
|
'invalid_pages': None,
|
|
|
|
|
'validation_status': 'passed',
|
|
|
|
|
'filename': 'NOTIFY.{}'.format(sample_letter_notification.reference),
|
|
|
|
|
'notification_id': str(sample_letter_notification.id),
|
|
|
|
|
'address': None
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
with pytest.raises(NotificationTechnicalFailureException):
|
|
|
|
|
process_sanitised_letter(encrypted_data)
|
|
|
|
|
|
|
|
|
|
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
|
|
|
|
|
|
|
|
|
|
|
2018-03-19 13:52:01 +00:00
|
|
|
def test_process_letter_task_check_virus_scan_failed(sample_letter_notification, mocker):
|
|
|
|
|
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
|
2018-10-16 17:20:34 +01:00
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
2018-03-23 15:27:24 +00:00
|
|
|
mock_move_failed_pdf = mocker.patch('app.celery.letters_pdf_tasks.move_failed_pdf')
|
2018-03-19 13:52:01 +00:00
|
|
|
|
2018-04-03 12:31:52 +01:00
|
|
|
with pytest.raises(VirusScanError) as e:
|
|
|
|
|
process_virus_scan_failed(filename)
|
2018-03-19 13:52:01 +00:00
|
|
|
|
2019-10-31 15:38:44 +00:00
|
|
|
assert "Virus scan failed:" in str(e.value)
|
2018-03-23 15:27:24 +00:00
|
|
|
mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.FAILURE)
|
2018-03-23 12:04:37 +00:00
|
|
|
assert sample_letter_notification.status == NOTIFICATION_VIRUS_SCAN_FAILED
|
2018-03-23 15:27:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_process_letter_task_check_virus_scan_error(sample_letter_notification, mocker):
|
|
|
|
|
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
|
2018-10-16 17:20:34 +01:00
|
|
|
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
|
2018-03-23 15:27:24 +00:00
|
|
|
mock_move_failed_pdf = mocker.patch('app.celery.letters_pdf_tasks.move_failed_pdf')
|
|
|
|
|
|
2018-04-03 12:31:52 +01:00
|
|
|
with pytest.raises(VirusScanError) as e:
|
|
|
|
|
process_virus_scan_error(filename)
|
2018-03-23 15:27:24 +00:00
|
|
|
|
2019-09-13 11:40:05 +01:00
|
|
|
assert "Virus scan error:" in str(e.value)
|
2018-03-23 15:27:24 +00:00
|
|
|
mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.ERROR)
|
|
|
|
|
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
|
2018-07-02 10:59:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_replay_letters_in_error_for_all_letters_in_error_bucket(notify_api, mocker):
|
|
|
|
|
mockObject = boto3.resource('s3').Object('ERROR', 'ERROR/file_name')
|
|
|
|
|
mocker.patch("app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", return_value=[mockObject])
|
|
|
|
|
mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket")
|
|
|
|
|
mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task")
|
|
|
|
|
replay_letters_in_error()
|
|
|
|
|
mock_move.assert_called_once_with('file_name')
|
|
|
|
|
mock_celery.assert_called_once_with(name='scan-file', kwargs={'filename': 'file_name'}, queue='antivirus-tasks')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_replay_letters_in_error_for_one_file(notify_api, mocker):
|
|
|
|
|
mockObject = boto3.resource('s3').Object('ERROR', 'ERROR/file_name')
|
|
|
|
|
mocker.patch("app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", return_value=[mockObject])
|
|
|
|
|
mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket")
|
|
|
|
|
mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task")
|
|
|
|
|
replay_letters_in_error("file_name")
|
|
|
|
|
mock_move.assert_called_once_with('file_name')
|
|
|
|
|
mock_celery.assert_called_once_with(name='scan-file', kwargs={'filename': 'file_name'}, queue='antivirus-tasks')
|