Create new task to build dvla file.

This will transform each notification in a job to a row in a file.
The file is then uploaded to S3.
The files will later be aggregated by the notifications-ftp app to send to dvla.

The method to upload the file to S3 should be pulled into notifications-utils package.
It is the same method used in notifications-admin.
This commit is contained in:
Rebecca Law
2017-03-15 15:26:58 +00:00
parent ea4214c7d5
commit 140179b4b6
6 changed files with 169 additions and 20 deletions

View File

@@ -1,8 +1,9 @@
import uuid
from datetime import datetime
from unittest.mock import Mock, ANY, call
from unittest.mock import Mock
import pytest
from flask import current_app
from freezegun import freeze_time
from sqlalchemy.exc import SQLAlchemyError
from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate
@@ -11,7 +12,7 @@ from celery.exceptions import Retry
from app import (encryption, DATETIME_FORMAT)
from app.celery import provider_tasks
from app.celery import tasks
from app.celery.tasks import s3
from app.celery.tasks import s3, build_dvla_file
from app.celery.tasks import (
process_job,
process_row,
@@ -31,7 +32,7 @@ from tests.app.conftest import (
sample_email_template,
sample_notification
)
from tests.app.db import create_user
from tests.app.db import create_user, create_notification, create_job
class AnyStringWith(str):
@@ -73,7 +74,9 @@ def test_should_process_sms_job(sample_job, mocker):
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
mocker.patch('app.celery.tasks.send_sms.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
mocker.patch('app.celery.tasks.build_dvla_file')
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
mocker.patch('app.celery.tasks.build_dvla_file')
process_job(sample_job.id)
s3.get_job_from_s3.assert_called_once_with(
@@ -94,6 +97,7 @@ def test_should_process_sms_job(sample_job, mocker):
)
job = jobs_dao.dao_get_job_by_id(sample_job.id)
assert job.job_status == 'finished'
tasks.build_dvla_file.assert_not_called()
@freeze_time("2016-01-01 11:09:00.061258")
@@ -105,6 +109,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db,
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_sms'))
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.build_dvla_file')
process_job(job.id)
@@ -112,6 +117,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db,
assert job.job_status == 'sending limits exceeded'
assert s3.get_job_from_s3.called is False
assert tasks.process_row.called is False
tasks.build_dvla_file.assert_not_called()
def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify_db,
@@ -124,6 +130,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.build_dvla_file')
process_job(job.id)
@@ -131,6 +138,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify
assert job.job_status == 'sending limits exceeded'
assert s3.get_job_from_s3.called is False
assert tasks.process_row.called is False
tasks.build_dvla_file.assert_not_called()
def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db, notify_db_session, mocker):
@@ -142,6 +150,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(noti
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.build_dvla_file')
process_job(job.id)
@@ -149,6 +158,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(noti
assert job.job_status == 'sending limits exceeded'
assert s3.get_job_from_s3.called is False
assert tasks.process_row.called is False
tasks.build_dvla_file.assert_not_called()
@freeze_time("2016-01-01 11:09:00.061258")
@@ -159,6 +169,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits(notify_db, not
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.build_dvla_file')
process_job(job.id)
@@ -166,6 +177,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits(notify_db, not
assert job.job_status == 'sending limits exceeded'
assert s3.get_job_from_s3.called is False
assert tasks.process_row.called is False
tasks.build_dvla_file.assert_not_called()
def test_should_not_process_job_if_already_pending(notify_db, notify_db_session, mocker):
@@ -173,11 +185,13 @@ def test_should_not_process_job_if_already_pending(notify_db, notify_db_session,
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.build_dvla_file')
process_job(job.id)
assert s3.get_job_from_s3.called is False
assert tasks.process_row.called is False
tasks.build_dvla_file.assert_not_called()
@freeze_time("2016-01-01 11:09:00.061258")
@@ -269,6 +283,7 @@ def test_should_process_letter_job(sample_letter_job, mocker):
mocker.patch('app.celery.tasks.send_email.apply_async')
process_row_mock = mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
mocker.patch('app.celery.tasks.build_dvla_file')
process_job(sample_letter_job.id)
@@ -294,6 +309,7 @@ def test_should_process_letter_job(sample_letter_job, mocker):
assert process_row_mock.call_count == 1
assert sample_letter_job.job_status == 'finished'
tasks.build_dvla_file.apply_async.assert_called_once_with([str(sample_letter_job.id)], queue="process-job")
def test_should_process_all_sms_job(sample_job,
@@ -930,6 +946,7 @@ def test_should_cancel_job_if_service_is_inactive(sample_service,
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
mocker.patch('app.celery.tasks.process_row')
mock_dvla_file_task = mocker.patch('app.celery.tasks.build_dvla_file')
process_job(sample_job.id)
@@ -937,6 +954,7 @@ def test_should_cancel_job_if_service_is_inactive(sample_service,
assert job.job_status == 'cancelled'
s3.get_job_from_s3.assert_not_called()
tasks.process_row.assert_not_called()
mock_dvla_file_task.assert_not_called()
@pytest.mark.parametrize('template_type, expected_class', [
@@ -946,3 +964,35 @@ def test_should_cancel_job_if_service_is_inactive(sample_service,
])
def test_get_template_class(template_type, expected_class):
assert get_template_class(template_type) == expected_class
def test_build_dvla_file(sample_letter_template, mocker):
job = create_job(template=sample_letter_template, notification_count=2)
create_notification(template=job.template, job=job)
create_notification(template=job.template, job=job)
mocked = mocker.patch("app.celery.tasks.s3upload")
mocker.patch("app.celery.tasks.LetterDVLATemplate.__str__", return_value="dvla|string")
build_dvla_file(job.id)
file = "dvla|string\ndvla|string\n"
assert mocked.called
mocked.assert_called_once_with(filedata=file,
region=current_app.config['AWS_REGION'],
bucket_name=current_app.config['DVLA_UPLOAD_BUCKET_NAME'],
file_location="{}-dvla-job.text".format(job.id))
def test_build_dvla_file_retries_if_all_notifications_are_not_created(sample_letter_template, mocker):
job = create_job(template=sample_letter_template, notification_count=2)
create_notification(template=job.template, job=job)
mocked = mocker.patch("app.celery.tasks.s3upload")
mocker.patch('app.celery.tasks.build_dvla_file.retry', side_effect=Retry)
with pytest.raises(Retry):
build_dvla_file(job.id)
mocked.assert_not_called()
tasks.build_dvla_file.retry.assert_called_with(queue='retry',
exc="All notifications for job {} are not persisted".format(job.id))

View File

@@ -13,7 +13,8 @@ from app.dao.jobs_dao import (
dao_get_future_scheduled_job_by_id_and_service_id,
dao_get_notification_outcomes_for_job,
dao_get_jobs_older_than,
are_all_notifications_created_for_job)
all_notifications_are_created_for_job,
dao_get_all_notifications_for_job)
from app.models import Job
from tests.app.conftest import sample_notification as create_notification
@@ -316,18 +317,28 @@ def test_get_jobs_for_service_doesnt_return_test_messages(notify_db, notify_db_s
assert jobs == [sample_job]
def test_are_all_notifications_created_for_job_returns_true(notify_db, notify_db_session, sample_job):
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
job_is_complete = are_all_notifications_created_for_job(sample_job.id)
def test_all_notifications_are_created_for_job_returns_true(notify_db, notify_db_session):
job = create_job(notify_db=notify_db, notify_db_session=notify_db_session, notification_count=2)
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=job)
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=job)
job_is_complete = all_notifications_are_created_for_job(job.id)
assert job_is_complete
def test_are_all_notifications_created_for_job_returns_false(notify_db, notify_db_session):
def test_all_notifications_are_created_for_job_returns_false(notify_db, notify_db_session):
job = create_job(notify_db=notify_db, notify_db_session=notify_db_session, notification_count=2)
job_is_complete = are_all_notifications_created_for_job(job.id)
job_is_complete = all_notifications_are_created_for_job(job.id)
assert not job_is_complete
def test_are_all_notifications_created_for_job_returns_false_when_job_does_not_exist(notify_db, notify_db_session):
job_is_complete = are_all_notifications_created_for_job(uuid.uuid4())
def test_are_all_notifications_created_for_job_returns_false_when_job_does_not_exist():
job_is_complete = all_notifications_are_created_for_job(uuid.uuid4())
assert not job_is_complete
def test_dao_get_all_notifications_for_job(notify_db, notify_db_session, sample_job):
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
assert len(dao_get_all_notifications_for_job(sample_job.id)) == 3

View File

@@ -1,7 +1,8 @@
from datetime import datetime
import uuid
from app.models import Service, User, Template, Notification, SMS_TYPE, KEY_TYPE_NORMAL
from app.dao.jobs_dao import dao_create_job
from app.models import Service, User, Template, Notification, SMS_TYPE, KEY_TYPE_NORMAL, Job
from app.dao.users_dao import save_model_user
from app.dao.notifications_dao import dao_create_notification
from app.dao.templates_dao import dao_create_template
@@ -105,3 +106,30 @@ def create_notification(
notification = Notification(**data)
dao_create_notification(notification)
return notification
def create_job(template,
notification_count=1,
created_at=None,
job_status='pending',
scheduled_for=None,
processing_started=None,
original_file_name='some.csv'):
data = {
'id': uuid.uuid4(),
'service_id': template.service_id,
'service': template.service,
'template_id': template.id,
'template_version': template.version,
'original_file_name': original_file_name,
'notification_count': notification_count,
'created_at': created_at or datetime.utcnow(),
'created_by': template.created_by,
'job_status': job_status,
'scheduled_for': scheduled_for,
'processing_started': processing_started
}
job = Job(**data)
dao_create_job(job)
return job