2018-11-15 17:24:37 +00:00
|
|
|
import uuid
|
2016-09-21 14:35:23 +01:00
|
|
|
from datetime import datetime, timedelta
|
2016-10-08 11:44:55 +01:00
|
|
|
from functools import partial
|
2016-09-21 14:35:23 +01:00
|
|
|
|
2018-11-15 17:24:37 +00:00
|
|
|
import pytest
|
2016-09-07 15:36:07 +01:00
|
|
|
from freezegun import freeze_time
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
from app.dao.jobs_dao import (
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_get_job_by_service_id_and_job_id,
|
|
|
|
|
dao_create_job,
|
|
|
|
|
dao_update_job,
|
2016-08-24 16:24:30 +01:00
|
|
|
dao_get_jobs_by_service_id,
|
2016-10-07 12:55:48 +01:00
|
|
|
dao_set_scheduled_jobs_to_pending,
|
2016-09-01 14:31:01 +01:00
|
|
|
dao_get_future_scheduled_job_by_id_and_service_id,
|
2016-09-07 15:36:07 +01:00
|
|
|
dao_get_notification_outcomes_for_job,
|
2018-11-19 17:09:27 +00:00
|
|
|
dao_get_jobs_older_than_data_retention,
|
2019-06-10 17:40:28 +01:00
|
|
|
dao_cancel_letter_job
|
2018-11-15 17:24:37 +00:00
|
|
|
)
|
2017-06-06 16:01:27 +01:00
|
|
|
from app.models import (
|
2018-03-05 17:16:17 +00:00
|
|
|
Job,
|
2018-11-15 17:24:37 +00:00
|
|
|
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE
|
2017-06-06 16:01:27 +01:00
|
|
|
)
|
2018-12-12 12:57:33 +00:00
|
|
|
from tests.app.db import create_job, create_service, create_template, create_notification
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_have_decorated_notifications_dao_functions():
|
|
|
|
|
assert dao_get_notification_outcomes_for_job.__wrapped__.__name__ == 'dao_get_notification_outcomes_for_job' # noqa
|
|
|
|
|
|
|
|
|
|
|
2018-12-12 12:57:33 +00:00
|
|
|
def test_should_count_of_statuses_for_notifications_associated_with_job(sample_template, sample_job):
|
|
|
|
|
create_notification(sample_template, job=sample_job, status='created')
|
|
|
|
|
create_notification(sample_template, job=sample_job, status='created')
|
|
|
|
|
create_notification(sample_template, job=sample_job, status='created')
|
|
|
|
|
create_notification(sample_template, job=sample_job, status='sending')
|
|
|
|
|
create_notification(sample_template, job=sample_job, status='delivered')
|
|
|
|
|
|
|
|
|
|
results = dao_get_notification_outcomes_for_job(sample_template.service_id, sample_job.id)
|
|
|
|
|
assert {row.status: row.count for row in results} == {
|
|
|
|
|
'created': 3,
|
|
|
|
|
'sending': 1,
|
|
|
|
|
'delivered': 1,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_return_zero_length_array_if_no_notifications_for_job(sample_service, sample_job):
|
|
|
|
|
assert len(dao_get_notification_outcomes_for_job(sample_job.id, sample_service.id)) == 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_return_notifications_only_for_this_job(sample_template):
|
|
|
|
|
job_1 = create_job(sample_template)
|
|
|
|
|
job_2 = create_job(sample_template)
|
|
|
|
|
|
|
|
|
|
create_notification(sample_template, job=job_1, status='created')
|
|
|
|
|
create_notification(sample_template, job=job_2, status='sent')
|
|
|
|
|
|
|
|
|
|
results = dao_get_notification_outcomes_for_job(sample_template.service_id, job_1.id)
|
|
|
|
|
assert {row.status: row.count for row in results} == {'created': 1}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_return_notifications_only_for_this_service(sample_notification_with_job):
|
|
|
|
|
other_service = create_service(service_name='one')
|
|
|
|
|
other_template = create_template(service=other_service)
|
|
|
|
|
other_job = create_job(other_template)
|
|
|
|
|
|
|
|
|
|
create_notification(other_template, job=other_job)
|
|
|
|
|
|
|
|
|
|
assert len(dao_get_notification_outcomes_for_job(sample_notification_with_job.service_id, other_job.id)) == 0
|
|
|
|
|
assert len(dao_get_notification_outcomes_for_job(other_service.id, sample_notification_with_job.id)) == 0
|
|
|
|
|
|
|
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
def test_create_sample_job(sample_template):
|
2016-01-15 11:12:05 +00:00
|
|
|
assert Job.query.count() == 0
|
2016-01-16 10:14:48 +00:00
|
|
|
|
2016-01-15 11:12:05 +00:00
|
|
|
job_id = uuid.uuid4()
|
|
|
|
|
data = {
|
|
|
|
|
'id': job_id,
|
2016-01-16 10:14:48 +00:00
|
|
|
'service_id': sample_template.service.id,
|
2016-01-15 12:16:07 +00:00
|
|
|
'template_id': sample_template.id,
|
2016-05-13 16:25:05 +01:00
|
|
|
'template_version': sample_template.version,
|
2016-02-22 14:56:09 +00:00
|
|
|
'original_file_name': 'some.csv',
|
2016-04-26 16:15:34 +01:00
|
|
|
'notification_count': 1,
|
|
|
|
|
'created_by': sample_template.created_by
|
2016-01-15 11:12:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
job = Job(**data)
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_create_job(job)
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
assert Job.query.count() == 1
|
|
|
|
|
job_from_db = Job.query.get(job_id)
|
|
|
|
|
assert job == job_from_db
|
2016-05-23 15:44:57 +01:00
|
|
|
assert job_from_db.notifications_delivered == 0
|
|
|
|
|
assert job_from_db.notifications_failed == 0
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
def test_get_job_by_id(sample_job):
|
|
|
|
|
job_from_db = dao_get_job_by_service_id_and_job_id(sample_job.service.id, sample_job.id)
|
2016-01-16 10:14:48 +00:00
|
|
|
assert sample_job == job_from_db
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
def test_get_jobs_for_service(sample_template):
|
|
|
|
|
one_job = create_job(sample_template)
|
2016-01-18 09:57:04 +00:00
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
other_service = create_service(service_name="other service")
|
|
|
|
|
other_template = create_template(service=other_service)
|
|
|
|
|
other_job = create_job(other_template)
|
2016-01-18 09:57:04 +00:00
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
one_job_from_db = dao_get_jobs_by_service_id(one_job.service_id).items
|
|
|
|
|
other_job_from_db = dao_get_jobs_by_service_id(other_job.service_id).items
|
2016-01-18 09:57:04 +00:00
|
|
|
|
|
|
|
|
assert len(one_job_from_db) == 1
|
|
|
|
|
assert one_job == one_job_from_db[0]
|
|
|
|
|
|
|
|
|
|
assert len(other_job_from_db) == 1
|
|
|
|
|
assert other_job == other_job_from_db[0]
|
2016-01-15 11:12:05 +00:00
|
|
|
|
2016-01-18 09:57:04 +00:00
|
|
|
assert one_job_from_db != other_job_from_db
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
def test_get_jobs_for_service_with_limit_days_param(sample_template):
|
|
|
|
|
one_job = create_job(sample_template)
|
|
|
|
|
old_job = create_job(sample_template, created_at=datetime.now() - timedelta(days=8))
|
2016-05-24 17:21:04 +01:00
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
jobs = dao_get_jobs_by_service_id(one_job.service_id).items
|
2016-05-24 17:21:04 +01:00
|
|
|
|
|
|
|
|
assert len(jobs) == 2
|
|
|
|
|
assert one_job in jobs
|
|
|
|
|
assert old_job in jobs
|
|
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
jobs_limit_days = dao_get_jobs_by_service_id(one_job.service_id, limit_days=7).items
|
2016-05-24 17:21:04 +01:00
|
|
|
assert len(jobs_limit_days) == 1
|
|
|
|
|
assert one_job in jobs_limit_days
|
|
|
|
|
assert old_job not in jobs_limit_days
|
|
|
|
|
|
|
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
@freeze_time('2017-06-10')
|
|
|
|
|
def test_get_jobs_for_service_with_limit_days_edge_case(sample_template):
|
|
|
|
|
one_job = create_job(sample_template)
|
|
|
|
|
just_after_midnight_job = create_job(sample_template, created_at=datetime(2017, 6, 2, 23, 0, 1))
|
|
|
|
|
just_before_midnight_job = create_job(sample_template, created_at=datetime(2017, 6, 2, 22, 59, 0))
|
2016-05-25 11:13:49 +01:00
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
jobs_limit_days = dao_get_jobs_by_service_id(one_job.service_id, limit_days=7).items
|
2018-12-11 17:40:11 +00:00
|
|
|
assert len(jobs_limit_days) == 2
|
2016-05-25 11:13:49 +01:00
|
|
|
assert one_job in jobs_limit_days
|
|
|
|
|
assert just_after_midnight_job in jobs_limit_days
|
2018-12-11 17:40:11 +00:00
|
|
|
assert just_before_midnight_job not in jobs_limit_days
|
2016-05-25 11:13:49 +01:00
|
|
|
|
|
|
|
|
|
2016-10-08 11:44:55 +01:00
|
|
|
def test_get_jobs_for_service_in_processed_at_then_created_at_order(notify_db, notify_db_session, sample_template):
|
2016-10-10 13:10:53 +01:00
|
|
|
from_hour = partial(datetime, 2001, 1, 1)
|
2016-10-10 12:45:48 +01:00
|
|
|
|
|
|
|
|
created_jobs = [
|
2018-12-11 17:40:11 +00:00
|
|
|
create_job(sample_template, created_at=from_hour(2), processing_started=None),
|
|
|
|
|
create_job(sample_template, created_at=from_hour(1), processing_started=None),
|
|
|
|
|
create_job(sample_template, created_at=from_hour(1), processing_started=from_hour(4)),
|
|
|
|
|
create_job(sample_template, created_at=from_hour(2), processing_started=from_hour(3)),
|
2016-10-10 12:45:48 +01:00
|
|
|
]
|
2016-03-14 16:15:39 +00:00
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
jobs = dao_get_jobs_by_service_id(sample_template.service.id).items
|
2016-03-14 16:15:39 +00:00
|
|
|
|
2016-10-10 12:45:48 +01:00
|
|
|
assert len(jobs) == len(created_jobs)
|
2016-10-08 11:44:55 +01:00
|
|
|
|
2016-10-10 12:45:48 +01:00
|
|
|
for index in range(0, len(created_jobs)):
|
|
|
|
|
assert jobs[index].id == created_jobs[index].id
|
2016-03-14 16:15:39 +00:00
|
|
|
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
def test_update_job(sample_job):
|
2016-08-25 16:59:38 +01:00
|
|
|
assert sample_job.job_status == 'pending'
|
2016-02-04 20:55:09 +00:00
|
|
|
|
2016-08-25 16:59:38 +01:00
|
|
|
sample_job.job_status = 'in progress'
|
2016-02-04 20:55:09 +00:00
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_update_job(sample_job)
|
2016-02-04 20:55:09 +00:00
|
|
|
|
|
|
|
|
job_from_db = Job.query.get(sample_job.id)
|
|
|
|
|
|
2016-08-25 16:59:38 +01:00
|
|
|
assert job_from_db.job_status == 'in progress'
|
2016-08-24 16:24:30 +01:00
|
|
|
|
|
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
def test_set_scheduled_jobs_to_pending_gets_all_jobs_in_scheduled_state_before_now(sample_template):
|
2016-08-24 16:24:30 +01:00
|
|
|
one_minute_ago = datetime.utcnow() - timedelta(minutes=1)
|
|
|
|
|
one_hour_ago = datetime.utcnow() - timedelta(minutes=60)
|
2018-12-11 17:40:11 +00:00
|
|
|
job_new = create_job(sample_template, scheduled_for=one_minute_ago, job_status='scheduled')
|
|
|
|
|
job_old = create_job(sample_template, scheduled_for=one_hour_ago, job_status='scheduled')
|
2016-10-07 12:55:48 +01:00
|
|
|
jobs = dao_set_scheduled_jobs_to_pending()
|
2016-08-24 16:24:30 +01:00
|
|
|
assert len(jobs) == 2
|
|
|
|
|
assert jobs[0].id == job_old.id
|
|
|
|
|
assert jobs[1].id == job_new.id
|
|
|
|
|
|
|
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_not_scheduled(sample_template, sample_job):
|
2016-08-24 16:24:30 +01:00
|
|
|
one_minute_ago = datetime.utcnow() - timedelta(minutes=1)
|
2018-12-11 17:40:11 +00:00
|
|
|
job_scheduled = create_job(sample_template, scheduled_for=one_minute_ago, job_status='scheduled')
|
2016-10-07 12:55:48 +01:00
|
|
|
jobs = dao_set_scheduled_jobs_to_pending()
|
2016-08-24 16:24:30 +01:00
|
|
|
assert len(jobs) == 1
|
|
|
|
|
assert jobs[0].id == job_scheduled.id
|
|
|
|
|
|
|
|
|
|
|
2016-10-07 12:55:48 +01:00
|
|
|
def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_scheduled_in_the_future(sample_scheduled_job):
|
|
|
|
|
jobs = dao_set_scheduled_jobs_to_pending()
|
2016-08-24 16:24:30 +01:00
|
|
|
assert len(jobs) == 0
|
2016-09-01 14:31:01 +01:00
|
|
|
|
|
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
def test_set_scheduled_jobs_to_pending_updates_rows(sample_template):
|
2016-10-07 12:55:48 +01:00
|
|
|
one_minute_ago = datetime.utcnow() - timedelta(minutes=1)
|
|
|
|
|
one_hour_ago = datetime.utcnow() - timedelta(minutes=60)
|
2018-12-11 17:40:11 +00:00
|
|
|
create_job(sample_template, scheduled_for=one_minute_ago, job_status='scheduled')
|
|
|
|
|
create_job(sample_template, scheduled_for=one_hour_ago, job_status='scheduled')
|
2016-10-07 12:55:48 +01:00
|
|
|
jobs = dao_set_scheduled_jobs_to_pending()
|
|
|
|
|
assert len(jobs) == 2
|
|
|
|
|
assert jobs[0].job_status == 'pending'
|
|
|
|
|
assert jobs[1].job_status == 'pending'
|
|
|
|
|
|
|
|
|
|
|
2016-09-02 23:18:55 +01:00
|
|
|
def test_get_future_scheduled_job_gets_a_job_yet_to_send(sample_scheduled_job):
|
2016-09-02 12:24:14 +01:00
|
|
|
result = dao_get_future_scheduled_job_by_id_and_service_id(sample_scheduled_job.id, sample_scheduled_job.service_id)
|
|
|
|
|
assert result.id == sample_scheduled_job.id
|
2016-09-07 15:36:07 +01:00
|
|
|
|
|
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
@freeze_time('2016-10-31 10:00:00')
|
2018-12-11 17:40:11 +00:00
|
|
|
def test_should_get_jobs_seven_days_old(sample_template):
|
2017-06-06 16:01:27 +01:00
|
|
|
"""
|
|
|
|
|
Jobs older than seven days are deleted, but only two day's worth (two-day window)
|
|
|
|
|
"""
|
|
|
|
|
seven_days_ago = datetime.utcnow() - timedelta(days=7)
|
|
|
|
|
within_seven_days = seven_days_ago + timedelta(seconds=1)
|
2016-09-07 15:36:07 +01:00
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
eight_days_ago = seven_days_ago - timedelta(days=1)
|
2016-09-07 15:36:07 +01:00
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
nine_days_ago = eight_days_ago - timedelta(days=2)
|
|
|
|
|
nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1)
|
Updates to the delete CSV file job to reduce the number of eligible jobs in any run
- previously this was unbounded, so it got all jobs older then 7 days. In excess of 75,000 🔥
- this meant that the job took (a) a long time and (b) a lot memory and (c) doing the same thing every day
These changes mean that the job has a 2 day eligible window for jobs, minimising the number of eligible jobs in a run, whilst still retaining some leeway in event if it failing one night.
In principle the job runs early morning on a given day. The previous 7 days are left along, and then the previous 2 days worth of files are deleted:
so:
runs on
31st
30,29,28,27,26,25,24 are ignored
23,22 jobs here have files deleted
21 and earlier are ignored.
2017-04-05 16:23:41 +01:00
|
|
|
|
2018-12-11 17:40:11 +00:00
|
|
|
create_job(sample_template, created_at=seven_days_ago)
|
|
|
|
|
create_job(sample_template, created_at=within_seven_days)
|
|
|
|
|
job_to_delete = create_job(sample_template, created_at=eight_days_ago)
|
|
|
|
|
create_job(sample_template, created_at=nine_days_ago, archived=True)
|
|
|
|
|
create_job(sample_template, created_at=nine_days_one_second_ago, archived=True)
|
Updates to the delete CSV file job to reduce the number of eligible jobs in any run
- previously this was unbounded, so it got all jobs older then 7 days. In excess of 75,000 🔥
- this meant that the job took (a) a long time and (b) a lot memory and (c) doing the same thing every day
These changes mean that the job has a 2 day eligible window for jobs, minimising the number of eligible jobs in a run, whilst still retaining some leeway in event if it failing one night.
In principle the job runs early morning on a given day. The previous 7 days are left along, and then the previous 2 days worth of files are deleted:
so:
runs on
31st
30,29,28,27,26,25,24 are ignored
23,22 jobs here have files deleted
21 and earlier are ignored.
2017-04-05 16:23:41 +01:00
|
|
|
|
2018-11-19 17:09:27 +00:00
|
|
|
jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type])
|
Updates to the delete CSV file job to reduce the number of eligible jobs in any run
- previously this was unbounded, so it got all jobs older then 7 days. In excess of 75,000 🔥
- this meant that the job took (a) a long time and (b) a lot memory and (c) doing the same thing every day
These changes mean that the job has a 2 day eligible window for jobs, minimising the number of eligible jobs in a run, whilst still retaining some leeway in event if it failing one night.
In principle the job runs early morning on a given day. The previous 7 days are left along, and then the previous 2 days worth of files are deleted:
so:
runs on
31st
30,29,28,27,26,25,24 are ignored
23,22 jobs here have files deleted
21 and earlier are ignored.
2017-04-05 16:23:41 +01:00
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
assert len(jobs) == 1
|
|
|
|
|
assert jobs[0].id == job_to_delete.id
|
2016-09-21 14:35:23 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_service, sample_template):
|
|
|
|
|
with freeze_time('2015-01-01T00:00:00') as the_time:
|
|
|
|
|
for _ in range(10):
|
|
|
|
|
the_time.tick(timedelta(hours=1))
|
2018-12-11 17:40:11 +00:00
|
|
|
create_job(sample_template)
|
2016-09-21 14:35:23 +01:00
|
|
|
|
|
|
|
|
res = dao_get_jobs_by_service_id(sample_service.id, page=1, page_size=2)
|
|
|
|
|
|
|
|
|
|
assert res.per_page == 2
|
|
|
|
|
assert res.total == 10
|
|
|
|
|
assert len(res.items) == 2
|
|
|
|
|
assert res.items[0].created_at == datetime(2015, 1, 1, 10)
|
|
|
|
|
assert res.items[1].created_at == datetime(2015, 1, 1, 9)
|
|
|
|
|
|
|
|
|
|
res = dao_get_jobs_by_service_id(sample_service.id, page=2, page_size=2)
|
|
|
|
|
|
|
|
|
|
assert len(res.items) == 2
|
|
|
|
|
assert res.items[0].created_at == datetime(2015, 1, 1, 8)
|
|
|
|
|
assert res.items[1].created_at == datetime(2015, 1, 1, 7)
|
2016-10-11 14:30:40 +01:00
|
|
|
|
|
|
|
|
|
2017-06-01 09:35:13 +01:00
|
|
|
@pytest.mark.parametrize('file_name', [
|
|
|
|
|
'Test message',
|
2017-06-01 13:56:47 +01:00
|
|
|
'Report',
|
2017-06-01 09:35:13 +01:00
|
|
|
])
|
|
|
|
|
def test_get_jobs_for_service_doesnt_return_test_messages(
|
2018-11-15 17:24:37 +00:00
|
|
|
sample_template,
|
|
|
|
|
sample_job,
|
|
|
|
|
file_name,
|
2017-06-01 09:35:13 +01:00
|
|
|
):
|
2018-12-11 17:40:11 +00:00
|
|
|
create_job(sample_template, original_file_name=file_name,)
|
2016-10-11 14:30:40 +01:00
|
|
|
|
|
|
|
|
jobs = dao_get_jobs_by_service_id(sample_job.service_id).items
|
|
|
|
|
|
|
|
|
|
assert jobs == [sample_job]
|
2017-03-14 10:50:09 +00:00
|
|
|
|
|
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
@freeze_time('2016-10-31 10:00:00')
|
2018-12-11 17:40:11 +00:00
|
|
|
def test_should_get_jobs_seven_days_old_filters_type(sample_service):
|
2017-06-06 16:01:27 +01:00
|
|
|
eight_days_ago = datetime.utcnow() - timedelta(days=8)
|
2018-12-11 17:40:11 +00:00
|
|
|
letter_template = create_template(sample_service, template_type=LETTER_TYPE)
|
|
|
|
|
sms_template = create_template(sample_service, template_type=SMS_TYPE)
|
|
|
|
|
email_template = create_template(sample_service, template_type=EMAIL_TYPE)
|
|
|
|
|
|
|
|
|
|
job_to_remain = create_job(letter_template, created_at=eight_days_ago)
|
|
|
|
|
create_job(sms_template, created_at=eight_days_ago)
|
|
|
|
|
create_job(email_template, created_at=eight_days_ago)
|
2017-06-06 16:01:27 +01:00
|
|
|
|
2018-11-19 17:09:27 +00:00
|
|
|
jobs = dao_get_jobs_older_than_data_retention(
|
|
|
|
|
notification_types=[EMAIL_TYPE, SMS_TYPE]
|
2017-06-06 16:01:27 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
assert len(jobs) == 2
|
|
|
|
|
assert job_to_remain.id not in [job.id for job in jobs]
|
2017-06-12 14:25:17 +01:00
|
|
|
|
|
|
|
|
|
2017-06-13 10:56:03 +01:00
|
|
|
def assert_job_stat(job, result, sent, delivered, failed):
|
2017-06-12 14:25:17 +01:00
|
|
|
assert result.job_id == job.id
|
|
|
|
|
assert result.original_file_name == job.original_file_name
|
|
|
|
|
assert result.created_at == job.created_at
|
2017-06-12 17:15:32 +01:00
|
|
|
assert result.scheduled_for == job.scheduled_for
|
2017-06-13 10:56:03 +01:00
|
|
|
assert result.template_id == job.template_id
|
|
|
|
|
assert result.template_version == job.template_version
|
|
|
|
|
assert result.job_status == job.job_status
|
|
|
|
|
assert result.service_id == job.service_id
|
|
|
|
|
assert result.notification_count == job.notification_count
|
2017-06-12 14:25:17 +01:00
|
|
|
assert result.sent == sent
|
|
|
|
|
assert result.delivered == delivered
|
|
|
|
|
assert result.failed == failed
|
2019-06-10 17:40:28 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_dao_cancel_letter_job_does_not_allow_cancel_if_notification_in_sending(sample_job):
|
|
|
|
|
create_notification(template=sample_job.template, job=sample_job, status='sending')
|
|
|
|
|
create_notification(template=sample_job.template, job=sample_job, status='created')
|
|
|
|
|
assert not dao_cancel_letter_job(service_id=sample_job.service_id, job_id=sample_job.id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_dao_cancel_letter_job_updates_notifications_and_job_to_cancelled(sample_job):
|
|
|
|
|
notification = create_notification(template=sample_job.template, job=sample_job, status='created')
|
|
|
|
|
assert dao_cancel_letter_job(service_id=sample_job.service_id, job_id=sample_job.id) == 1
|
|
|
|
|
assert notification.status == 'cancelled'
|
|
|
|
|
assert sample_job.job_status == 'cancelled'
|
|
|
|
|
|