mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-02 17:31:14 -05:00
Create new task to build dvla file.
This will transform each notification in a job to a row in a file. The file is then uploaded to S3. The files will later be aggregated by the notifications-ftp app to send to dvla. The method to upload the file to S3 should be pulled into notifications-utils package. It is the same method used in notifications-admin.
This commit is contained in:
@@ -1,10 +1,12 @@
|
|||||||
|
import botocore
|
||||||
|
from boto3 import resource
|
||||||
from datetime import (datetime)
|
from datetime import (datetime)
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from notifications_utils.recipients import (
|
from notifications_utils.recipients import (
|
||||||
RecipientCSV
|
RecipientCSV
|
||||||
)
|
)
|
||||||
from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate
|
from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate, LetterDVLATemplate
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from app import (
|
from app import (
|
||||||
create_uuid,
|
create_uuid,
|
||||||
@@ -16,8 +18,9 @@ from app.aws import s3
|
|||||||
from app.celery import provider_tasks
|
from app.celery import provider_tasks
|
||||||
from app.dao.jobs_dao import (
|
from app.dao.jobs_dao import (
|
||||||
dao_update_job,
|
dao_update_job,
|
||||||
dao_get_job_by_id
|
dao_get_job_by_id,
|
||||||
)
|
all_notifications_are_created_for_job,
|
||||||
|
dao_get_all_notifications_for_job)
|
||||||
from app.dao.notifications_dao import get_notification_by_id
|
from app.dao.notifications_dao import get_notification_by_id
|
||||||
from app.dao.services_dao import dao_fetch_service_by_id, fetch_todays_total_message_count
|
from app.dao.services_dao import dao_fetch_service_by_id, fetch_todays_total_message_count
|
||||||
from app.dao.templates_dao import dao_get_template_by_id
|
from app.dao.templates_dao import dao_get_template_by_id
|
||||||
@@ -76,6 +79,8 @@ def process_job(job_id):
|
|||||||
current_app.logger.info(
|
current_app.logger.info(
|
||||||
"Job {} created at {} started at {} finished at {}".format(job_id, job.created_at, start, finished)
|
"Job {} created at {} started at {} finished at {}".format(job_id, job.created_at, start, finished)
|
||||||
)
|
)
|
||||||
|
if template.template_type == LETTER_TYPE:
|
||||||
|
build_dvla_file.apply_async([str(job.id)], queue='process-job')
|
||||||
|
|
||||||
|
|
||||||
def process_row(row_number, recipient, personalisation, template, job, service):
|
def process_row(row_number, recipient, personalisation, template, job, service):
|
||||||
@@ -248,13 +253,61 @@ def persist_letter(
|
|||||||
notification_id=notification_id
|
notification_id=notification_id
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: deliver letters
|
|
||||||
|
|
||||||
current_app.logger.info("Letter {} created at {}".format(saved_notification.id, created_at))
|
current_app.logger.info("Letter {} created at {}".format(saved_notification.id, created_at))
|
||||||
except SQLAlchemyError as e:
|
except SQLAlchemyError as e:
|
||||||
handle_exception(self, notification, notification_id, e)
|
handle_exception(self, notification, notification_id, e)
|
||||||
|
|
||||||
|
|
||||||
|
@notify_celery.task(bind=True, name="build-dvla-file", max_retries=5, default_retry_delay=300)
|
||||||
|
@statsd(namespace="tasks")
|
||||||
|
def build_dvla_file(self, job_id):
|
||||||
|
if all_notifications_are_created_for_job(job_id):
|
||||||
|
notifications = dao_get_all_notifications_for_job(job_id)
|
||||||
|
file = ""
|
||||||
|
for n in notifications:
|
||||||
|
t = {"content": n.template.content, "subject": n.template.subject}
|
||||||
|
template = LetterDVLATemplate(t, n.personalisation, 1)
|
||||||
|
# print(str(template))
|
||||||
|
file = file + str(template) + "\n"
|
||||||
|
s3upload(filedata=file,
|
||||||
|
region=current_app.config['AWS_REGION'],
|
||||||
|
bucket_name=current_app.config['DVLA_UPLOAD_BUCKET_NAME'],
|
||||||
|
file_location="{}-dvla-job.text".format(job_id))
|
||||||
|
else:
|
||||||
|
self.retry(queue="retry", exc="All notifications for job {} are not persisted".format(job_id))
|
||||||
|
|
||||||
|
|
||||||
|
def s3upload(filedata, region, bucket_name, file_location):
|
||||||
|
# TODO: move this method to utils. Will need to change the filedata from here to send contents in filedata['data']
|
||||||
|
_s3 = resource('s3')
|
||||||
|
# contents = filedata['data']
|
||||||
|
contents = filedata
|
||||||
|
|
||||||
|
exists = True
|
||||||
|
try:
|
||||||
|
_s3.meta.client.head_bucket(
|
||||||
|
Bucket=bucket_name)
|
||||||
|
except botocore.exceptions.ClientError as e:
|
||||||
|
error_code = int(e.response['Error']['Code'])
|
||||||
|
if error_code == 404:
|
||||||
|
exists = False
|
||||||
|
else:
|
||||||
|
current_app.logger.error(
|
||||||
|
"Unable to create s3 bucket {}".format(bucket_name))
|
||||||
|
raise e
|
||||||
|
|
||||||
|
if not exists:
|
||||||
|
_s3.create_bucket(Bucket=bucket_name,
|
||||||
|
CreateBucketConfiguration={'LocationConstraint': region})
|
||||||
|
|
||||||
|
upload_id = create_uuid()
|
||||||
|
upload_file_name = file_location
|
||||||
|
key = _s3.Object(bucket_name, upload_file_name)
|
||||||
|
key.put(Body=contents, ServerSideEncryption='AES256')
|
||||||
|
|
||||||
|
return upload_id
|
||||||
|
|
||||||
|
|
||||||
def handle_exception(task, notification, notification_id, exc):
|
def handle_exception(task, notification, notification_id, exc):
|
||||||
if not get_notification_by_id(notification_id):
|
if not get_notification_by_id(notification_id):
|
||||||
retry_msg = '{task} notification for job {job} row number {row} and notification id {noti}'.format(
|
retry_msg = '{task} notification for job {job} row number {row} and notification id {noti}'.format(
|
||||||
|
|||||||
@@ -175,6 +175,8 @@ class Config(object):
|
|||||||
FUNCTIONAL_TEST_PROVIDER_SERVICE_ID = None
|
FUNCTIONAL_TEST_PROVIDER_SERVICE_ID = None
|
||||||
FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID = None
|
FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID = None
|
||||||
|
|
||||||
|
DVLA_UPLOAD_BUCKET_NAME = "{}-dvla-file-per-job".format(os.getenv('NOTIFY_ENVIRONMENT'))
|
||||||
|
|
||||||
|
|
||||||
######################
|
######################
|
||||||
# Config overrides ###
|
# Config overrides ###
|
||||||
|
|||||||
@@ -29,12 +29,12 @@ def dao_get_notification_outcomes_for_job(service_id, job_id):
|
|||||||
|
|
||||||
|
|
||||||
@statsd(namespace="dao")
|
@statsd(namespace="dao")
|
||||||
def are_all_notifications_created_for_job(job_id):
|
def all_notifications_are_created_for_job(job_id):
|
||||||
query = db.session.query(func.count(Notification.id))\
|
query = db.session.query(func.count(Notification.id), Job.id)\
|
||||||
.join(Job)\
|
.join(Job)\
|
||||||
.filter(Job.id == job_id)\
|
.filter(Job.id == job_id)\
|
||||||
.group_by(Job.id)\
|
.group_by(Job.id)\
|
||||||
.having(func.count(Notification.id) == Job.notification_count).first()
|
.having(func.count(Notification.id) == Job.notification_count).all()
|
||||||
|
|
||||||
if query:
|
if query:
|
||||||
return True
|
return True
|
||||||
@@ -42,6 +42,11 @@ def are_all_notifications_created_for_job(job_id):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@statsd(namespace="dao")
|
||||||
|
def dao_get_all_notifications_for_job(job_id):
|
||||||
|
return db.session.query(Notification).filter(Notification.job_id == job_id).all()
|
||||||
|
|
||||||
|
|
||||||
def dao_get_job_by_service_id_and_job_id(service_id, job_id):
|
def dao_get_job_by_service_id_and_job_id(service_id, job_id):
|
||||||
return Job.query.filter_by(service_id=service_id, id=job_id).one()
|
return Job.query.filter_by(service_id=service_id, id=job_id).one()
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from unittest.mock import Mock, ANY, call
|
from unittest.mock import Mock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from flask import current_app
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate
|
from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate
|
||||||
@@ -11,7 +12,7 @@ from celery.exceptions import Retry
|
|||||||
from app import (encryption, DATETIME_FORMAT)
|
from app import (encryption, DATETIME_FORMAT)
|
||||||
from app.celery import provider_tasks
|
from app.celery import provider_tasks
|
||||||
from app.celery import tasks
|
from app.celery import tasks
|
||||||
from app.celery.tasks import s3
|
from app.celery.tasks import s3, build_dvla_file
|
||||||
from app.celery.tasks import (
|
from app.celery.tasks import (
|
||||||
process_job,
|
process_job,
|
||||||
process_row,
|
process_row,
|
||||||
@@ -31,7 +32,7 @@ from tests.app.conftest import (
|
|||||||
sample_email_template,
|
sample_email_template,
|
||||||
sample_notification
|
sample_notification
|
||||||
)
|
)
|
||||||
from tests.app.db import create_user
|
from tests.app.db import create_user, create_notification, create_job
|
||||||
|
|
||||||
|
|
||||||
class AnyStringWith(str):
|
class AnyStringWith(str):
|
||||||
@@ -73,7 +74,9 @@ def test_should_process_sms_job(sample_job, mocker):
|
|||||||
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
|
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
|
||||||
mocker.patch('app.celery.tasks.send_sms.apply_async')
|
mocker.patch('app.celery.tasks.send_sms.apply_async')
|
||||||
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
|
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
|
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
|
|
||||||
process_job(sample_job.id)
|
process_job(sample_job.id)
|
||||||
s3.get_job_from_s3.assert_called_once_with(
|
s3.get_job_from_s3.assert_called_once_with(
|
||||||
@@ -94,6 +97,7 @@ def test_should_process_sms_job(sample_job, mocker):
|
|||||||
)
|
)
|
||||||
job = jobs_dao.dao_get_job_by_id(sample_job.id)
|
job = jobs_dao.dao_get_job_by_id(sample_job.id)
|
||||||
assert job.job_status == 'finished'
|
assert job.job_status == 'finished'
|
||||||
|
tasks.build_dvla_file.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2016-01-01 11:09:00.061258")
|
@freeze_time("2016-01-01 11:09:00.061258")
|
||||||
@@ -105,6 +109,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db,
|
|||||||
|
|
||||||
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_sms'))
|
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_sms'))
|
||||||
mocker.patch('app.celery.tasks.process_row')
|
mocker.patch('app.celery.tasks.process_row')
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
|
|
||||||
process_job(job.id)
|
process_job(job.id)
|
||||||
|
|
||||||
@@ -112,6 +117,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db,
|
|||||||
assert job.job_status == 'sending limits exceeded'
|
assert job.job_status == 'sending limits exceeded'
|
||||||
assert s3.get_job_from_s3.called is False
|
assert s3.get_job_from_s3.called is False
|
||||||
assert tasks.process_row.called is False
|
assert tasks.process_row.called is False
|
||||||
|
tasks.build_dvla_file.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify_db,
|
def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify_db,
|
||||||
@@ -124,6 +130,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify
|
|||||||
|
|
||||||
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
|
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
|
||||||
mocker.patch('app.celery.tasks.process_row')
|
mocker.patch('app.celery.tasks.process_row')
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
|
|
||||||
process_job(job.id)
|
process_job(job.id)
|
||||||
|
|
||||||
@@ -131,6 +138,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify
|
|||||||
assert job.job_status == 'sending limits exceeded'
|
assert job.job_status == 'sending limits exceeded'
|
||||||
assert s3.get_job_from_s3.called is False
|
assert s3.get_job_from_s3.called is False
|
||||||
assert tasks.process_row.called is False
|
assert tasks.process_row.called is False
|
||||||
|
tasks.build_dvla_file.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db, notify_db_session, mocker):
|
def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db, notify_db_session, mocker):
|
||||||
@@ -142,6 +150,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(noti
|
|||||||
|
|
||||||
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
|
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
|
||||||
mocker.patch('app.celery.tasks.process_row')
|
mocker.patch('app.celery.tasks.process_row')
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
|
|
||||||
process_job(job.id)
|
process_job(job.id)
|
||||||
|
|
||||||
@@ -149,6 +158,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(noti
|
|||||||
assert job.job_status == 'sending limits exceeded'
|
assert job.job_status == 'sending limits exceeded'
|
||||||
assert s3.get_job_from_s3.called is False
|
assert s3.get_job_from_s3.called is False
|
||||||
assert tasks.process_row.called is False
|
assert tasks.process_row.called is False
|
||||||
|
tasks.build_dvla_file.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2016-01-01 11:09:00.061258")
|
@freeze_time("2016-01-01 11:09:00.061258")
|
||||||
@@ -159,6 +169,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits(notify_db, not
|
|||||||
|
|
||||||
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
|
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
|
||||||
mocker.patch('app.celery.tasks.process_row')
|
mocker.patch('app.celery.tasks.process_row')
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
|
|
||||||
process_job(job.id)
|
process_job(job.id)
|
||||||
|
|
||||||
@@ -166,6 +177,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits(notify_db, not
|
|||||||
assert job.job_status == 'sending limits exceeded'
|
assert job.job_status == 'sending limits exceeded'
|
||||||
assert s3.get_job_from_s3.called is False
|
assert s3.get_job_from_s3.called is False
|
||||||
assert tasks.process_row.called is False
|
assert tasks.process_row.called is False
|
||||||
|
tasks.build_dvla_file.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_should_not_process_job_if_already_pending(notify_db, notify_db_session, mocker):
|
def test_should_not_process_job_if_already_pending(notify_db, notify_db_session, mocker):
|
||||||
@@ -173,11 +185,13 @@ def test_should_not_process_job_if_already_pending(notify_db, notify_db_session,
|
|||||||
|
|
||||||
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
|
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
|
||||||
mocker.patch('app.celery.tasks.process_row')
|
mocker.patch('app.celery.tasks.process_row')
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
|
|
||||||
process_job(job.id)
|
process_job(job.id)
|
||||||
|
|
||||||
assert s3.get_job_from_s3.called is False
|
assert s3.get_job_from_s3.called is False
|
||||||
assert tasks.process_row.called is False
|
assert tasks.process_row.called is False
|
||||||
|
tasks.build_dvla_file.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2016-01-01 11:09:00.061258")
|
@freeze_time("2016-01-01 11:09:00.061258")
|
||||||
@@ -269,6 +283,7 @@ def test_should_process_letter_job(sample_letter_job, mocker):
|
|||||||
mocker.patch('app.celery.tasks.send_email.apply_async')
|
mocker.patch('app.celery.tasks.send_email.apply_async')
|
||||||
process_row_mock = mocker.patch('app.celery.tasks.process_row')
|
process_row_mock = mocker.patch('app.celery.tasks.process_row')
|
||||||
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
|
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
|
|
||||||
process_job(sample_letter_job.id)
|
process_job(sample_letter_job.id)
|
||||||
|
|
||||||
@@ -294,6 +309,7 @@ def test_should_process_letter_job(sample_letter_job, mocker):
|
|||||||
assert process_row_mock.call_count == 1
|
assert process_row_mock.call_count == 1
|
||||||
|
|
||||||
assert sample_letter_job.job_status == 'finished'
|
assert sample_letter_job.job_status == 'finished'
|
||||||
|
tasks.build_dvla_file.apply_async.assert_called_once_with([str(sample_letter_job.id)], queue="process-job")
|
||||||
|
|
||||||
|
|
||||||
def test_should_process_all_sms_job(sample_job,
|
def test_should_process_all_sms_job(sample_job,
|
||||||
@@ -930,6 +946,7 @@ def test_should_cancel_job_if_service_is_inactive(sample_service,
|
|||||||
|
|
||||||
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
|
mocker.patch('app.celery.tasks.s3.get_job_from_s3')
|
||||||
mocker.patch('app.celery.tasks.process_row')
|
mocker.patch('app.celery.tasks.process_row')
|
||||||
|
mock_dvla_file_task = mocker.patch('app.celery.tasks.build_dvla_file')
|
||||||
|
|
||||||
process_job(sample_job.id)
|
process_job(sample_job.id)
|
||||||
|
|
||||||
@@ -937,6 +954,7 @@ def test_should_cancel_job_if_service_is_inactive(sample_service,
|
|||||||
assert job.job_status == 'cancelled'
|
assert job.job_status == 'cancelled'
|
||||||
s3.get_job_from_s3.assert_not_called()
|
s3.get_job_from_s3.assert_not_called()
|
||||||
tasks.process_row.assert_not_called()
|
tasks.process_row.assert_not_called()
|
||||||
|
mock_dvla_file_task.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('template_type, expected_class', [
|
@pytest.mark.parametrize('template_type, expected_class', [
|
||||||
@@ -946,3 +964,35 @@ def test_should_cancel_job_if_service_is_inactive(sample_service,
|
|||||||
])
|
])
|
||||||
def test_get_template_class(template_type, expected_class):
|
def test_get_template_class(template_type, expected_class):
|
||||||
assert get_template_class(template_type) == expected_class
|
assert get_template_class(template_type) == expected_class
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_dvla_file(sample_letter_template, mocker):
|
||||||
|
job = create_job(template=sample_letter_template, notification_count=2)
|
||||||
|
create_notification(template=job.template, job=job)
|
||||||
|
create_notification(template=job.template, job=job)
|
||||||
|
|
||||||
|
mocked = mocker.patch("app.celery.tasks.s3upload")
|
||||||
|
mocker.patch("app.celery.tasks.LetterDVLATemplate.__str__", return_value="dvla|string")
|
||||||
|
build_dvla_file(job.id)
|
||||||
|
|
||||||
|
file = "dvla|string\ndvla|string\n"
|
||||||
|
|
||||||
|
assert mocked.called
|
||||||
|
mocked.assert_called_once_with(filedata=file,
|
||||||
|
region=current_app.config['AWS_REGION'],
|
||||||
|
bucket_name=current_app.config['DVLA_UPLOAD_BUCKET_NAME'],
|
||||||
|
file_location="{}-dvla-job.text".format(job.id))
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_dvla_file_retries_if_all_notifications_are_not_created(sample_letter_template, mocker):
|
||||||
|
job = create_job(template=sample_letter_template, notification_count=2)
|
||||||
|
create_notification(template=job.template, job=job)
|
||||||
|
|
||||||
|
mocked = mocker.patch("app.celery.tasks.s3upload")
|
||||||
|
mocker.patch('app.celery.tasks.build_dvla_file.retry', side_effect=Retry)
|
||||||
|
with pytest.raises(Retry):
|
||||||
|
build_dvla_file(job.id)
|
||||||
|
mocked.assert_not_called()
|
||||||
|
|
||||||
|
tasks.build_dvla_file.retry.assert_called_with(queue='retry',
|
||||||
|
exc="All notifications for job {} are not persisted".format(job.id))
|
||||||
|
|||||||
@@ -13,7 +13,8 @@ from app.dao.jobs_dao import (
|
|||||||
dao_get_future_scheduled_job_by_id_and_service_id,
|
dao_get_future_scheduled_job_by_id_and_service_id,
|
||||||
dao_get_notification_outcomes_for_job,
|
dao_get_notification_outcomes_for_job,
|
||||||
dao_get_jobs_older_than,
|
dao_get_jobs_older_than,
|
||||||
are_all_notifications_created_for_job)
|
all_notifications_are_created_for_job,
|
||||||
|
dao_get_all_notifications_for_job)
|
||||||
from app.models import Job
|
from app.models import Job
|
||||||
|
|
||||||
from tests.app.conftest import sample_notification as create_notification
|
from tests.app.conftest import sample_notification as create_notification
|
||||||
@@ -316,18 +317,28 @@ def test_get_jobs_for_service_doesnt_return_test_messages(notify_db, notify_db_s
|
|||||||
assert jobs == [sample_job]
|
assert jobs == [sample_job]
|
||||||
|
|
||||||
|
|
||||||
def test_are_all_notifications_created_for_job_returns_true(notify_db, notify_db_session, sample_job):
|
def test_all_notifications_are_created_for_job_returns_true(notify_db, notify_db_session):
|
||||||
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
|
job = create_job(notify_db=notify_db, notify_db_session=notify_db_session, notification_count=2)
|
||||||
job_is_complete = are_all_notifications_created_for_job(sample_job.id)
|
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=job)
|
||||||
|
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=job)
|
||||||
|
job_is_complete = all_notifications_are_created_for_job(job.id)
|
||||||
assert job_is_complete
|
assert job_is_complete
|
||||||
|
|
||||||
|
|
||||||
def test_are_all_notifications_created_for_job_returns_false(notify_db, notify_db_session):
|
def test_all_notifications_are_created_for_job_returns_false(notify_db, notify_db_session):
|
||||||
job = create_job(notify_db=notify_db, notify_db_session=notify_db_session, notification_count=2)
|
job = create_job(notify_db=notify_db, notify_db_session=notify_db_session, notification_count=2)
|
||||||
job_is_complete = are_all_notifications_created_for_job(job.id)
|
job_is_complete = all_notifications_are_created_for_job(job.id)
|
||||||
assert not job_is_complete
|
assert not job_is_complete
|
||||||
|
|
||||||
|
|
||||||
def test_are_all_notifications_created_for_job_returns_false_when_job_does_not_exist(notify_db, notify_db_session):
|
def test_are_all_notifications_created_for_job_returns_false_when_job_does_not_exist():
|
||||||
job_is_complete = are_all_notifications_created_for_job(uuid.uuid4())
|
job_is_complete = all_notifications_are_created_for_job(uuid.uuid4())
|
||||||
assert not job_is_complete
|
assert not job_is_complete
|
||||||
|
|
||||||
|
|
||||||
|
def test_dao_get_all_notifications_for_job(notify_db, notify_db_session, sample_job):
|
||||||
|
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
|
||||||
|
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
|
||||||
|
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
|
||||||
|
|
||||||
|
assert len(dao_get_all_notifications_for_job(sample_job.id)) == 3
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from app.models import Service, User, Template, Notification, SMS_TYPE, KEY_TYPE_NORMAL
|
from app.dao.jobs_dao import dao_create_job
|
||||||
|
from app.models import Service, User, Template, Notification, SMS_TYPE, KEY_TYPE_NORMAL, Job
|
||||||
from app.dao.users_dao import save_model_user
|
from app.dao.users_dao import save_model_user
|
||||||
from app.dao.notifications_dao import dao_create_notification
|
from app.dao.notifications_dao import dao_create_notification
|
||||||
from app.dao.templates_dao import dao_create_template
|
from app.dao.templates_dao import dao_create_template
|
||||||
@@ -105,3 +106,30 @@ def create_notification(
|
|||||||
notification = Notification(**data)
|
notification = Notification(**data)
|
||||||
dao_create_notification(notification)
|
dao_create_notification(notification)
|
||||||
return notification
|
return notification
|
||||||
|
|
||||||
|
|
||||||
|
def create_job(template,
|
||||||
|
notification_count=1,
|
||||||
|
created_at=None,
|
||||||
|
job_status='pending',
|
||||||
|
scheduled_for=None,
|
||||||
|
processing_started=None,
|
||||||
|
original_file_name='some.csv'):
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'id': uuid.uuid4(),
|
||||||
|
'service_id': template.service_id,
|
||||||
|
'service': template.service,
|
||||||
|
'template_id': template.id,
|
||||||
|
'template_version': template.version,
|
||||||
|
'original_file_name': original_file_name,
|
||||||
|
'notification_count': notification_count,
|
||||||
|
'created_at': created_at or datetime.utcnow(),
|
||||||
|
'created_by': template.created_by,
|
||||||
|
'job_status': job_status,
|
||||||
|
'scheduled_for': scheduled_for,
|
||||||
|
'processing_started': processing_started
|
||||||
|
}
|
||||||
|
job = Job(**data)
|
||||||
|
dao_create_job(job)
|
||||||
|
return job
|
||||||
|
|||||||
Reference in New Issue
Block a user