2018-11-15 17:24:37 +00:00
|
|
|
import uuid
|
2016-09-21 14:35:23 +01:00
|
|
|
from datetime import datetime, timedelta
|
2016-10-08 11:44:55 +01:00
|
|
|
from functools import partial
|
2016-09-21 14:35:23 +01:00
|
|
|
|
2018-11-15 17:24:37 +00:00
|
|
|
import pytest
|
2016-09-07 15:36:07 +01:00
|
|
|
from freezegun import freeze_time
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
from app.dao.jobs_dao import (
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_get_job_by_service_id_and_job_id,
|
|
|
|
|
dao_create_job,
|
|
|
|
|
dao_update_job,
|
2016-08-24 16:24:30 +01:00
|
|
|
dao_get_jobs_by_service_id,
|
2016-10-07 12:55:48 +01:00
|
|
|
dao_set_scheduled_jobs_to_pending,
|
2016-09-01 14:31:01 +01:00
|
|
|
dao_get_future_scheduled_job_by_id_and_service_id,
|
2016-09-07 15:36:07 +01:00
|
|
|
dao_get_notification_outcomes_for_job,
|
2018-11-19 17:09:27 +00:00
|
|
|
dao_get_jobs_older_than_data_retention,
|
2018-11-15 17:24:37 +00:00
|
|
|
)
|
2017-06-06 16:01:27 +01:00
|
|
|
from app.models import (
|
2018-03-05 17:16:17 +00:00
|
|
|
Job,
|
2018-11-15 17:24:37 +00:00
|
|
|
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE
|
2017-06-06 16:01:27 +01:00
|
|
|
)
|
2016-09-21 15:45:26 +01:00
|
|
|
from tests.app.conftest import sample_job as create_job
|
2018-11-15 17:24:37 +00:00
|
|
|
from tests.app.conftest import sample_notification as create_notification
|
2016-09-21 15:45:26 +01:00
|
|
|
from tests.app.conftest import sample_service as create_service
|
|
|
|
|
from tests.app.conftest import sample_template as create_template
|
2017-08-22 09:55:47 +01:00
|
|
|
from tests.app.db import (
|
2018-11-15 17:24:37 +00:00
|
|
|
create_user
|
2017-08-22 09:55:47 +01:00
|
|
|
)
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_have_decorated_notifications_dao_functions():
|
|
|
|
|
assert dao_get_notification_outcomes_for_job.__wrapped__.__name__ == 'dao_get_notification_outcomes_for_job' # noqa
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_get_all_statuses_for_notifications_associated_with_job(
|
|
|
|
|
notify_db,
|
|
|
|
|
notify_db_session,
|
|
|
|
|
sample_service,
|
2017-07-04 14:30:00 +01:00
|
|
|
sample_job
|
|
|
|
|
):
|
2016-10-11 14:40:23 +01:00
|
|
|
notification = partial(create_notification, notify_db, notify_db_session, service=sample_service, job=sample_job)
|
|
|
|
|
notification(status='created')
|
|
|
|
|
notification(status='sending')
|
|
|
|
|
notification(status='delivered')
|
|
|
|
|
notification(status='pending')
|
|
|
|
|
notification(status='failed')
|
|
|
|
|
notification(status='technical-failure')
|
|
|
|
|
notification(status='temporary-failure')
|
|
|
|
|
notification(status='permanent-failure')
|
2017-04-28 13:32:23 +01:00
|
|
|
notification(status='sent')
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
results = dao_get_notification_outcomes_for_job(sample_service.id, sample_job.id)
|
2017-07-04 14:30:00 +01:00
|
|
|
assert set([(row.count, row.status) for row in results]) == set([
|
2016-08-23 16:46:58 +01:00
|
|
|
(1, 'created'),
|
|
|
|
|
(1, 'sending'),
|
|
|
|
|
(1, 'delivered'),
|
|
|
|
|
(1, 'pending'),
|
|
|
|
|
(1, 'failed'),
|
|
|
|
|
(1, 'technical-failure'),
|
|
|
|
|
(1, 'temporary-failure'),
|
2017-04-28 13:32:23 +01:00
|
|
|
(1, 'permanent-failure'),
|
|
|
|
|
(1, 'sent')
|
2017-07-04 14:30:00 +01:00
|
|
|
])
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_count_of_statuses_for_notifications_associated_with_job(
|
|
|
|
|
notify_db,
|
|
|
|
|
notify_db_session,
|
|
|
|
|
sample_service,
|
2017-07-04 14:30:00 +01:00
|
|
|
sample_job
|
|
|
|
|
):
|
2016-10-11 14:40:23 +01:00
|
|
|
notification = partial(create_notification, notify_db, notify_db_session, service=sample_service, job=sample_job)
|
|
|
|
|
notification(status='created')
|
|
|
|
|
notification(status='created')
|
2017-07-04 14:30:00 +01:00
|
|
|
|
2016-10-11 14:40:23 +01:00
|
|
|
notification(status='sending')
|
|
|
|
|
notification(status='sending')
|
|
|
|
|
notification(status='sending')
|
|
|
|
|
notification(status='sending')
|
|
|
|
|
notification(status='delivered')
|
|
|
|
|
notification(status='delivered')
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
results = dao_get_notification_outcomes_for_job(sample_service.id, sample_job.id)
|
2017-07-04 14:30:00 +01:00
|
|
|
assert set([(row.count, row.status) for row in results]) == set([
|
2016-08-23 16:46:58 +01:00
|
|
|
(2, 'created'),
|
|
|
|
|
(4, 'sending'),
|
|
|
|
|
(2, 'delivered')
|
2017-07-04 14:30:00 +01:00
|
|
|
])
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_return_zero_length_array_if_no_notifications_for_job(sample_service, sample_job):
|
|
|
|
|
assert len(dao_get_notification_outcomes_for_job(sample_job.id, sample_service.id)) == 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_return_notifications_only_for_this_job(notify_db, notify_db_session, sample_service):
|
2016-09-21 15:45:26 +01:00
|
|
|
job_1 = create_job(notify_db, notify_db_session, service=sample_service)
|
|
|
|
|
job_2 = create_job(notify_db, notify_db_session, service=sample_service)
|
2016-08-23 16:46:58 +01:00
|
|
|
|
2016-09-21 15:45:26 +01:00
|
|
|
create_notification(notify_db, notify_db_session, service=sample_service, job=job_1, status='created')
|
|
|
|
|
create_notification(notify_db, notify_db_session, service=sample_service, job=job_2, status='created')
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
results = dao_get_notification_outcomes_for_job(sample_service.id, job_1.id)
|
|
|
|
|
assert [(row.count, row.status) for row in results] == [
|
|
|
|
|
(1, 'created')
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_should_return_notifications_only_for_this_service(notify_db, notify_db_session):
|
2016-09-21 15:45:26 +01:00
|
|
|
service_1 = create_service(notify_db, notify_db_session, service_name="one", email_from="one")
|
|
|
|
|
service_2 = create_service(notify_db, notify_db_session, service_name="two", email_from="two")
|
2016-08-23 16:46:58 +01:00
|
|
|
|
2016-09-21 15:45:26 +01:00
|
|
|
job_1 = create_job(notify_db, notify_db_session, service=service_1)
|
|
|
|
|
job_2 = create_job(notify_db, notify_db_session, service=service_2)
|
2016-08-23 16:46:58 +01:00
|
|
|
|
2016-09-21 15:45:26 +01:00
|
|
|
create_notification(notify_db, notify_db_session, service=service_1, job=job_1, status='created')
|
|
|
|
|
create_notification(notify_db, notify_db_session, service=service_2, job=job_2, status='created')
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
assert len(dao_get_notification_outcomes_for_job(service_1.id, job_2.id)) == 0
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
def test_create_job(sample_template):
|
2016-01-15 11:12:05 +00:00
|
|
|
assert Job.query.count() == 0
|
2016-01-16 10:14:48 +00:00
|
|
|
|
2016-01-15 11:12:05 +00:00
|
|
|
job_id = uuid.uuid4()
|
|
|
|
|
data = {
|
|
|
|
|
'id': job_id,
|
2016-01-16 10:14:48 +00:00
|
|
|
'service_id': sample_template.service.id,
|
2016-01-15 12:16:07 +00:00
|
|
|
'template_id': sample_template.id,
|
2016-05-13 16:25:05 +01:00
|
|
|
'template_version': sample_template.version,
|
2016-02-22 14:56:09 +00:00
|
|
|
'original_file_name': 'some.csv',
|
2016-04-26 16:15:34 +01:00
|
|
|
'notification_count': 1,
|
|
|
|
|
'created_by': sample_template.created_by
|
2016-01-15 11:12:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
job = Job(**data)
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_create_job(job)
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
assert Job.query.count() == 1
|
|
|
|
|
job_from_db = Job.query.get(job_id)
|
|
|
|
|
assert job == job_from_db
|
2016-05-23 15:44:57 +01:00
|
|
|
assert job_from_db.notifications_delivered == 0
|
|
|
|
|
assert job_from_db.notifications_failed == 0
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
def test_get_job_by_id(sample_job):
|
|
|
|
|
job_from_db = dao_get_job_by_service_id_and_job_id(sample_job.service.id, sample_job.id)
|
2016-01-16 10:14:48 +00:00
|
|
|
assert sample_job == job_from_db
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2016-01-18 09:57:04 +00:00
|
|
|
def test_get_jobs_for_service(notify_db, notify_db_session, sample_template):
|
2016-02-24 17:12:30 +00:00
|
|
|
one_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template)
|
2016-01-18 09:57:04 +00:00
|
|
|
|
2016-12-28 12:30:26 +00:00
|
|
|
other_user = create_user(email="test@digital.cabinet-office.gov.uk")
|
2016-03-31 17:46:18 +01:00
|
|
|
other_service = create_service(notify_db, notify_db_session, user=other_user, service_name="other service",
|
|
|
|
|
email_from='other.service')
|
2016-02-24 17:12:30 +00:00
|
|
|
other_template = create_template(notify_db, notify_db_session, service=other_service)
|
|
|
|
|
other_job = create_job(notify_db, notify_db_session, service=other_service, template=other_template)
|
2016-01-18 09:57:04 +00:00
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
one_job_from_db = dao_get_jobs_by_service_id(one_job.service_id).items
|
|
|
|
|
other_job_from_db = dao_get_jobs_by_service_id(other_job.service_id).items
|
2016-01-18 09:57:04 +00:00
|
|
|
|
|
|
|
|
assert len(one_job_from_db) == 1
|
|
|
|
|
assert one_job == one_job_from_db[0]
|
|
|
|
|
|
|
|
|
|
assert len(other_job_from_db) == 1
|
|
|
|
|
assert other_job == other_job_from_db[0]
|
2016-01-15 11:12:05 +00:00
|
|
|
|
2016-01-18 09:57:04 +00:00
|
|
|
assert one_job_from_db != other_job_from_db
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2016-05-25 11:13:49 +01:00
|
|
|
def test_get_jobs_for_service_with_limit_days_param(notify_db, notify_db_session, sample_template):
|
2016-05-24 17:21:04 +01:00
|
|
|
one_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template)
|
|
|
|
|
old_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template,
|
|
|
|
|
created_at=datetime.now() - timedelta(days=8))
|
|
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
jobs = dao_get_jobs_by_service_id(one_job.service_id).items
|
2016-05-24 17:21:04 +01:00
|
|
|
|
|
|
|
|
assert len(jobs) == 2
|
|
|
|
|
assert one_job in jobs
|
|
|
|
|
assert old_job in jobs
|
|
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
jobs_limit_days = dao_get_jobs_by_service_id(one_job.service_id, limit_days=7).items
|
2016-05-24 17:21:04 +01:00
|
|
|
assert len(jobs_limit_days) == 1
|
|
|
|
|
assert one_job in jobs_limit_days
|
|
|
|
|
assert old_job not in jobs_limit_days
|
|
|
|
|
|
|
|
|
|
|
2016-05-25 11:13:49 +01:00
|
|
|
def test_get_jobs_for_service_with_limit_days_edge_case(notify_db, notify_db_session, sample_template):
|
|
|
|
|
one_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template)
|
|
|
|
|
job_two = create_job(notify_db, notify_db_session, sample_template.service, sample_template,
|
|
|
|
|
created_at=(datetime.now() - timedelta(days=7)).date())
|
|
|
|
|
one_second_after_midnight = datetime.combine((datetime.now() - timedelta(days=7)).date(),
|
|
|
|
|
datetime.strptime("000001", "%H%M%S").time())
|
|
|
|
|
just_after_midnight_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template,
|
|
|
|
|
created_at=one_second_after_midnight)
|
|
|
|
|
job_eight_days_old = create_job(notify_db, notify_db_session, sample_template.service, sample_template,
|
|
|
|
|
created_at=datetime.now() - timedelta(days=8))
|
|
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
jobs_limit_days = dao_get_jobs_by_service_id(one_job.service_id, limit_days=7).items
|
2016-05-25 11:13:49 +01:00
|
|
|
assert len(jobs_limit_days) == 3
|
|
|
|
|
assert one_job in jobs_limit_days
|
|
|
|
|
assert job_two in jobs_limit_days
|
|
|
|
|
assert just_after_midnight_job in jobs_limit_days
|
|
|
|
|
assert job_eight_days_old not in jobs_limit_days
|
|
|
|
|
|
|
|
|
|
|
2016-10-08 11:44:55 +01:00
|
|
|
def test_get_jobs_for_service_in_processed_at_then_created_at_order(notify_db, notify_db_session, sample_template):
|
|
|
|
|
_create_job = partial(create_job, notify_db, notify_db_session, sample_template.service, sample_template)
|
2016-10-10 13:10:53 +01:00
|
|
|
from_hour = partial(datetime, 2001, 1, 1)
|
2016-10-10 12:45:48 +01:00
|
|
|
|
|
|
|
|
created_jobs = [
|
2016-10-10 13:10:53 +01:00
|
|
|
_create_job(created_at=from_hour(2), processing_started=None),
|
|
|
|
|
_create_job(created_at=from_hour(1), processing_started=None),
|
|
|
|
|
_create_job(created_at=from_hour(1), processing_started=from_hour(4)),
|
2016-10-10 13:12:50 +01:00
|
|
|
_create_job(created_at=from_hour(2), processing_started=from_hour(3)),
|
2016-10-10 12:45:48 +01:00
|
|
|
]
|
2016-03-14 16:15:39 +00:00
|
|
|
|
2016-09-21 14:35:23 +01:00
|
|
|
jobs = dao_get_jobs_by_service_id(sample_template.service.id).items
|
2016-03-14 16:15:39 +00:00
|
|
|
|
2016-10-10 12:45:48 +01:00
|
|
|
assert len(jobs) == len(created_jobs)
|
2016-10-08 11:44:55 +01:00
|
|
|
|
2016-10-10 12:45:48 +01:00
|
|
|
for index in range(0, len(created_jobs)):
|
|
|
|
|
assert jobs[index].id == created_jobs[index].id
|
2016-03-14 16:15:39 +00:00
|
|
|
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
def test_update_job(sample_job):
|
2016-08-25 16:59:38 +01:00
|
|
|
assert sample_job.job_status == 'pending'
|
2016-02-04 20:55:09 +00:00
|
|
|
|
2016-08-25 16:59:38 +01:00
|
|
|
sample_job.job_status = 'in progress'
|
2016-02-04 20:55:09 +00:00
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_update_job(sample_job)
|
2016-02-04 20:55:09 +00:00
|
|
|
|
|
|
|
|
job_from_db = Job.query.get(sample_job.id)
|
|
|
|
|
|
2016-08-25 16:59:38 +01:00
|
|
|
assert job_from_db.job_status == 'in progress'
|
2016-08-24 16:24:30 +01:00
|
|
|
|
|
|
|
|
|
2016-10-07 12:55:48 +01:00
|
|
|
def test_set_scheduled_jobs_to_pending_gets_all_jobs_in_scheduled_state_before_now(notify_db, notify_db_session):
|
2016-08-24 16:24:30 +01:00
|
|
|
one_minute_ago = datetime.utcnow() - timedelta(minutes=1)
|
|
|
|
|
one_hour_ago = datetime.utcnow() - timedelta(minutes=60)
|
2016-09-21 15:45:26 +01:00
|
|
|
job_new = create_job(notify_db, notify_db_session, scheduled_for=one_minute_ago, job_status='scheduled')
|
|
|
|
|
job_old = create_job(notify_db, notify_db_session, scheduled_for=one_hour_ago, job_status='scheduled')
|
2016-10-07 12:55:48 +01:00
|
|
|
jobs = dao_set_scheduled_jobs_to_pending()
|
2016-08-24 16:24:30 +01:00
|
|
|
assert len(jobs) == 2
|
|
|
|
|
assert jobs[0].id == job_old.id
|
|
|
|
|
assert jobs[1].id == job_new.id
|
|
|
|
|
|
|
|
|
|
|
2016-10-07 12:55:48 +01:00
|
|
|
def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_not_scheduled(notify_db, notify_db_session):
|
2016-08-24 16:24:30 +01:00
|
|
|
one_minute_ago = datetime.utcnow() - timedelta(minutes=1)
|
2016-09-21 15:45:26 +01:00
|
|
|
create_job(notify_db, notify_db_session)
|
|
|
|
|
job_scheduled = create_job(notify_db, notify_db_session, scheduled_for=one_minute_ago, job_status='scheduled')
|
2016-10-07 12:55:48 +01:00
|
|
|
jobs = dao_set_scheduled_jobs_to_pending()
|
2016-08-24 16:24:30 +01:00
|
|
|
assert len(jobs) == 1
|
|
|
|
|
assert jobs[0].id == job_scheduled.id
|
|
|
|
|
|
|
|
|
|
|
2016-10-07 12:55:48 +01:00
|
|
|
def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_scheduled_in_the_future(sample_scheduled_job):
|
|
|
|
|
jobs = dao_set_scheduled_jobs_to_pending()
|
2016-08-24 16:24:30 +01:00
|
|
|
assert len(jobs) == 0
|
2016-09-01 14:31:01 +01:00
|
|
|
|
|
|
|
|
|
2016-10-07 12:55:48 +01:00
|
|
|
def test_set_scheduled_jobs_to_pending_updates_rows(notify_db, notify_db_session):
|
|
|
|
|
one_minute_ago = datetime.utcnow() - timedelta(minutes=1)
|
|
|
|
|
one_hour_ago = datetime.utcnow() - timedelta(minutes=60)
|
|
|
|
|
create_job(notify_db, notify_db_session, scheduled_for=one_minute_ago, job_status='scheduled')
|
|
|
|
|
create_job(notify_db, notify_db_session, scheduled_for=one_hour_ago, job_status='scheduled')
|
|
|
|
|
jobs = dao_set_scheduled_jobs_to_pending()
|
|
|
|
|
assert len(jobs) == 2
|
|
|
|
|
assert jobs[0].job_status == 'pending'
|
|
|
|
|
assert jobs[1].job_status == 'pending'
|
|
|
|
|
|
|
|
|
|
|
2016-09-02 23:18:55 +01:00
|
|
|
def test_get_future_scheduled_job_gets_a_job_yet_to_send(sample_scheduled_job):
|
2016-09-02 12:24:14 +01:00
|
|
|
result = dao_get_future_scheduled_job_by_id_and_service_id(sample_scheduled_job.id, sample_scheduled_job.service_id)
|
|
|
|
|
assert result.id == sample_scheduled_job.id
|
2016-09-07 15:36:07 +01:00
|
|
|
|
|
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
@freeze_time('2016-10-31 10:00:00')
|
|
|
|
|
def test_should_get_jobs_seven_days_old(notify_db, notify_db_session, sample_template):
|
|
|
|
|
"""
|
|
|
|
|
Jobs older than seven days are deleted, but only two day's worth (two-day window)
|
|
|
|
|
"""
|
|
|
|
|
seven_days_ago = datetime.utcnow() - timedelta(days=7)
|
|
|
|
|
within_seven_days = seven_days_ago + timedelta(seconds=1)
|
2016-09-07 15:36:07 +01:00
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
eight_days_ago = seven_days_ago - timedelta(days=1)
|
2016-09-07 15:36:07 +01:00
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
nine_days_ago = eight_days_ago - timedelta(days=2)
|
|
|
|
|
nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1)
|
Updates to the delete CSV file job to reduce the number of eligible jobs in any run
- previously this was unbounded, so it got all jobs older then 7 days. In excess of 75,000 🔥
- this meant that the job took (a) a long time and (b) a lot memory and (c) doing the same thing every day
These changes mean that the job has a 2 day eligible window for jobs, minimising the number of eligible jobs in a run, whilst still retaining some leeway in event if it failing one night.
In principle the job runs early morning on a given day. The previous 7 days are left along, and then the previous 2 days worth of files are deleted:
so:
runs on
31st
30,29,28,27,26,25,24 are ignored
23,22 jobs here have files deleted
21 and earlier are ignored.
2017-04-05 16:23:41 +01:00
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
job = partial(create_job, notify_db, notify_db_session)
|
|
|
|
|
job(created_at=seven_days_ago)
|
|
|
|
|
job(created_at=within_seven_days)
|
|
|
|
|
job_to_delete = job(created_at=eight_days_ago)
|
2018-11-26 16:30:23 +00:00
|
|
|
job(created_at=nine_days_ago, archived=True)
|
|
|
|
|
job(created_at=nine_days_one_second_ago, archived=True)
|
Updates to the delete CSV file job to reduce the number of eligible jobs in any run
- previously this was unbounded, so it got all jobs older then 7 days. In excess of 75,000 🔥
- this meant that the job took (a) a long time and (b) a lot memory and (c) doing the same thing every day
These changes mean that the job has a 2 day eligible window for jobs, minimising the number of eligible jobs in a run, whilst still retaining some leeway in event if it failing one night.
In principle the job runs early morning on a given day. The previous 7 days are left along, and then the previous 2 days worth of files are deleted:
so:
runs on
31st
30,29,28,27,26,25,24 are ignored
23,22 jobs here have files deleted
21 and earlier are ignored.
2017-04-05 16:23:41 +01:00
|
|
|
|
2018-11-19 17:09:27 +00:00
|
|
|
jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type])
|
Updates to the delete CSV file job to reduce the number of eligible jobs in any run
- previously this was unbounded, so it got all jobs older then 7 days. In excess of 75,000 🔥
- this meant that the job took (a) a long time and (b) a lot memory and (c) doing the same thing every day
These changes mean that the job has a 2 day eligible window for jobs, minimising the number of eligible jobs in a run, whilst still retaining some leeway in event if it failing one night.
In principle the job runs early morning on a given day. The previous 7 days are left along, and then the previous 2 days worth of files are deleted:
so:
runs on
31st
30,29,28,27,26,25,24 are ignored
23,22 jobs here have files deleted
21 and earlier are ignored.
2017-04-05 16:23:41 +01:00
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
assert len(jobs) == 1
|
|
|
|
|
assert jobs[0].id == job_to_delete.id
|
2016-09-21 14:35:23 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_service, sample_template):
|
|
|
|
|
with freeze_time('2015-01-01T00:00:00') as the_time:
|
|
|
|
|
for _ in range(10):
|
|
|
|
|
the_time.tick(timedelta(hours=1))
|
|
|
|
|
create_job(notify_db, notify_db_session, sample_service, sample_template)
|
|
|
|
|
|
|
|
|
|
res = dao_get_jobs_by_service_id(sample_service.id, page=1, page_size=2)
|
|
|
|
|
|
|
|
|
|
assert res.per_page == 2
|
|
|
|
|
assert res.total == 10
|
|
|
|
|
assert len(res.items) == 2
|
|
|
|
|
assert res.items[0].created_at == datetime(2015, 1, 1, 10)
|
|
|
|
|
assert res.items[1].created_at == datetime(2015, 1, 1, 9)
|
|
|
|
|
|
|
|
|
|
res = dao_get_jobs_by_service_id(sample_service.id, page=2, page_size=2)
|
|
|
|
|
|
|
|
|
|
assert len(res.items) == 2
|
|
|
|
|
assert res.items[0].created_at == datetime(2015, 1, 1, 8)
|
|
|
|
|
assert res.items[1].created_at == datetime(2015, 1, 1, 7)
|
2016-10-11 14:30:40 +01:00
|
|
|
|
|
|
|
|
|
2017-06-01 09:35:13 +01:00
|
|
|
@pytest.mark.parametrize('file_name', [
|
|
|
|
|
'Test message',
|
2017-06-01 13:56:47 +01:00
|
|
|
'Report',
|
2017-06-01 09:35:13 +01:00
|
|
|
])
|
|
|
|
|
def test_get_jobs_for_service_doesnt_return_test_messages(
|
2018-11-15 17:24:37 +00:00
|
|
|
notify_db,
|
|
|
|
|
notify_db_session,
|
|
|
|
|
sample_template,
|
|
|
|
|
sample_job,
|
|
|
|
|
file_name,
|
2017-06-01 09:35:13 +01:00
|
|
|
):
|
2017-11-28 17:21:21 +00:00
|
|
|
create_job(
|
2016-10-11 14:30:40 +01:00
|
|
|
notify_db,
|
|
|
|
|
notify_db_session,
|
|
|
|
|
sample_template.service,
|
|
|
|
|
sample_template,
|
2017-06-01 09:35:13 +01:00
|
|
|
original_file_name=file_name,
|
|
|
|
|
)
|
2016-10-11 14:30:40 +01:00
|
|
|
|
|
|
|
|
jobs = dao_get_jobs_by_service_id(sample_job.service_id).items
|
|
|
|
|
|
|
|
|
|
assert jobs == [sample_job]
|
2017-03-14 10:50:09 +00:00
|
|
|
|
|
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
@freeze_time('2016-10-31 10:00:00')
|
|
|
|
|
def test_should_get_jobs_seven_days_old_filters_type(notify_db, notify_db_session):
|
|
|
|
|
eight_days_ago = datetime.utcnow() - timedelta(days=8)
|
|
|
|
|
letter_template = create_template(notify_db, notify_db_session, template_type=LETTER_TYPE)
|
|
|
|
|
sms_template = create_template(notify_db, notify_db_session, template_type=SMS_TYPE)
|
|
|
|
|
email_template = create_template(notify_db, notify_db_session, template_type=EMAIL_TYPE)
|
|
|
|
|
|
|
|
|
|
job = partial(create_job, notify_db, notify_db_session, created_at=eight_days_ago)
|
|
|
|
|
job_to_remain = job(template=letter_template)
|
|
|
|
|
job(template=sms_template)
|
|
|
|
|
job(template=email_template)
|
|
|
|
|
|
2018-11-19 17:09:27 +00:00
|
|
|
jobs = dao_get_jobs_older_than_data_retention(
|
|
|
|
|
notification_types=[EMAIL_TYPE, SMS_TYPE]
|
2017-06-06 16:01:27 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
assert len(jobs) == 2
|
|
|
|
|
assert job_to_remain.id not in [job.id for job in jobs]
|
2017-06-12 14:25:17 +01:00
|
|
|
|
|
|
|
|
|
2017-06-13 10:56:03 +01:00
|
|
|
def assert_job_stat(job, result, sent, delivered, failed):
|
2017-06-12 14:25:17 +01:00
|
|
|
assert result.job_id == job.id
|
|
|
|
|
assert result.original_file_name == job.original_file_name
|
|
|
|
|
assert result.created_at == job.created_at
|
2017-06-12 17:15:32 +01:00
|
|
|
assert result.scheduled_for == job.scheduled_for
|
2017-06-13 10:56:03 +01:00
|
|
|
assert result.template_id == job.template_id
|
|
|
|
|
assert result.template_version == job.template_version
|
|
|
|
|
assert result.job_status == job.job_status
|
|
|
|
|
assert result.service_id == job.service_id
|
|
|
|
|
assert result.notification_count == job.notification_count
|
2017-06-12 14:25:17 +01:00
|
|
|
assert result.sent == sent
|
|
|
|
|
assert result.delivered == delivered
|
|
|
|
|
assert result.failed == failed
|