2021-03-10 13:55:06 +00:00
|
|
|
from datetime import date, datetime, timedelta
|
2021-09-23 15:40:55 +01:00
|
|
|
from unittest.mock import ANY, call
|
2019-01-16 17:32:19 +00:00
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
from freezegun import freeze_time
|
|
|
|
|
|
|
|
|
|
from app.celery import nightly_tasks
|
|
|
|
|
from app.celery.nightly_tasks import (
|
2021-12-06 09:30:48 +00:00
|
|
|
_delete_notifications_older_than_retention_by_type,
|
2023-05-23 08:31:30 -07:00
|
|
|
cleanup_unfinished_jobs,
|
2019-02-26 17:57:35 +00:00
|
|
|
delete_email_notifications_older_than_retention,
|
|
|
|
|
delete_inbound_sms,
|
|
|
|
|
delete_sms_notifications_older_than_retention,
|
2019-01-22 10:31:37 +00:00
|
|
|
remove_sms_email_csv_files,
|
2019-01-16 17:32:19 +00:00
|
|
|
s3,
|
2021-03-11 18:15:11 +00:00
|
|
|
save_daily_notification_processing_time,
|
2019-01-16 17:32:19 +00:00
|
|
|
timeout_notifications,
|
|
|
|
|
)
|
2023-05-23 08:31:30 -07:00
|
|
|
from app.models import EMAIL_TYPE, SMS_TYPE, FactProcessingTime, Job
|
2019-01-16 17:32:19 +00:00
|
|
|
from tests.app.db import (
|
2021-03-10 13:55:06 +00:00
|
|
|
create_job,
|
2019-01-16 17:32:19 +00:00
|
|
|
create_notification,
|
|
|
|
|
create_service,
|
2019-03-29 14:21:05 +00:00
|
|
|
create_service_data_retention,
|
2021-03-10 13:55:06 +00:00
|
|
|
create_template,
|
2019-01-16 17:32:19 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None):
|
|
|
|
|
if subfolder == '2018-01-11/zips_sent':
|
2019-04-03 11:03:42 +01:00
|
|
|
return ['NOTIFY.2018-01-11175007.ZIP.TXT', 'NOTIFY.2018-01-11175008.ZIP.TXT']
|
2019-01-16 17:32:19 +00:00
|
|
|
if subfolder == 'root/dispatch':
|
2019-04-03 11:03:42 +01:00
|
|
|
return ['root/dispatch/NOTIFY.2018-01-11175007.ACK.txt', 'root/dispatch/NOTIFY.2018-01-11175008.ACK.txt']
|
2019-01-16 17:32:19 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def mock_s3_get_list_diff(bucket_name, subfolder='', suffix='', last_modified=None):
|
|
|
|
|
if subfolder == '2018-01-11/zips_sent':
|
2019-04-08 11:58:25 +01:00
|
|
|
return ['NOTIFY.2018-01-11175007p.ZIP.TXT', 'NOTIFY.2018-01-11175008.ZIP.TXT',
|
|
|
|
|
'NOTIFY.2018-01-11175009.ZIP.TXT', 'NOTIFY.2018-01-11175010.ZIP.TXT']
|
2019-01-16 17:32:19 +00:00
|
|
|
if subfolder == 'root/dispatch':
|
2019-04-08 11:58:25 +01:00
|
|
|
return ['root/disoatch/NOTIFY.2018-01-11175007p.ACK.TXT', 'root/disoatch/NOTIFY.2018-01-11175008.ACK.TXT']
|
2019-01-16 17:32:19 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@freeze_time('2016-10-18T10:00:00')
|
|
|
|
|
def test_will_remove_csv_files_for_jobs_older_than_seven_days(
|
2022-05-03 17:00:51 +01:00
|
|
|
notify_db_session, mocker, sample_template
|
2019-01-16 17:32:19 +00:00
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Jobs older than seven days are deleted, but only two day's worth (two-day window)
|
|
|
|
|
"""
|
|
|
|
|
mocker.patch('app.celery.nightly_tasks.s3.remove_job_from_s3')
|
|
|
|
|
|
|
|
|
|
seven_days_ago = datetime.utcnow() - timedelta(days=7)
|
|
|
|
|
just_under_seven_days = seven_days_ago + timedelta(seconds=1)
|
|
|
|
|
eight_days_ago = seven_days_ago - timedelta(days=1)
|
|
|
|
|
nine_days_ago = eight_days_ago - timedelta(days=1)
|
|
|
|
|
just_under_nine_days = nine_days_ago + timedelta(seconds=1)
|
|
|
|
|
nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1)
|
|
|
|
|
|
2019-01-22 10:31:37 +00:00
|
|
|
create_job(sample_template, created_at=nine_days_one_second_ago, archived=True)
|
|
|
|
|
job1_to_delete = create_job(sample_template, created_at=eight_days_ago)
|
|
|
|
|
job2_to_delete = create_job(sample_template, created_at=just_under_nine_days)
|
|
|
|
|
dont_delete_me_1 = create_job(sample_template, created_at=seven_days_ago)
|
|
|
|
|
create_job(sample_template, created_at=just_under_seven_days)
|
2019-01-16 17:32:19 +00:00
|
|
|
|
2019-01-22 10:31:37 +00:00
|
|
|
remove_sms_email_csv_files()
|
2019-01-16 17:32:19 +00:00
|
|
|
|
|
|
|
|
assert s3.remove_job_from_s3.call_args_list == [
|
|
|
|
|
call(job1_to_delete.service_id, job1_to_delete.id),
|
|
|
|
|
call(job2_to_delete.service_id, job2_to_delete.id),
|
|
|
|
|
]
|
|
|
|
|
assert job1_to_delete.archived is True
|
|
|
|
|
assert dont_delete_me_1.archived is False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@freeze_time('2016-10-18T10:00:00')
|
|
|
|
|
def test_will_remove_csv_files_for_jobs_older_than_retention_period(
|
2022-05-03 17:00:51 +01:00
|
|
|
notify_db_session, mocker
|
2019-01-16 17:32:19 +00:00
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Jobs older than retention period are deleted, but only two day's worth (two-day window)
|
|
|
|
|
"""
|
|
|
|
|
mocker.patch('app.celery.nightly_tasks.s3.remove_job_from_s3')
|
|
|
|
|
service_1 = create_service(service_name='service 1')
|
|
|
|
|
service_2 = create_service(service_name='service 2')
|
2019-06-03 17:27:08 +01:00
|
|
|
create_service_data_retention(service=service_1, notification_type=SMS_TYPE, days_of_retention=3)
|
|
|
|
|
create_service_data_retention(service=service_2, notification_type=EMAIL_TYPE, days_of_retention=30)
|
2019-01-16 17:32:19 +00:00
|
|
|
sms_template_service_1 = create_template(service=service_1)
|
|
|
|
|
email_template_service_1 = create_template(service=service_1, template_type='email')
|
|
|
|
|
|
|
|
|
|
sms_template_service_2 = create_template(service=service_2)
|
|
|
|
|
email_template_service_2 = create_template(service=service_2, template_type='email')
|
|
|
|
|
|
|
|
|
|
four_days_ago = datetime.utcnow() - timedelta(days=4)
|
|
|
|
|
eight_days_ago = datetime.utcnow() - timedelta(days=8)
|
|
|
|
|
thirty_one_days_ago = datetime.utcnow() - timedelta(days=31)
|
|
|
|
|
|
2019-01-22 10:31:37 +00:00
|
|
|
job1_to_delete = create_job(sms_template_service_1, created_at=four_days_ago)
|
|
|
|
|
job2_to_delete = create_job(email_template_service_1, created_at=eight_days_ago)
|
|
|
|
|
create_job(email_template_service_1, created_at=four_days_ago)
|
2019-01-16 17:32:19 +00:00
|
|
|
|
2019-01-22 10:31:37 +00:00
|
|
|
create_job(email_template_service_2, created_at=eight_days_ago)
|
|
|
|
|
job3_to_delete = create_job(email_template_service_2, created_at=thirty_one_days_ago)
|
|
|
|
|
job4_to_delete = create_job(sms_template_service_2, created_at=eight_days_ago)
|
2019-01-16 17:32:19 +00:00
|
|
|
|
2019-01-22 10:31:37 +00:00
|
|
|
remove_sms_email_csv_files()
|
2019-01-16 17:32:19 +00:00
|
|
|
|
|
|
|
|
s3.remove_job_from_s3.assert_has_calls([
|
|
|
|
|
call(job1_to_delete.service_id, job1_to_delete.id),
|
|
|
|
|
call(job2_to_delete.service_id, job2_to_delete.id),
|
|
|
|
|
call(job3_to_delete.service_id, job3_to_delete.id),
|
|
|
|
|
call(job4_to_delete.service_id, job4_to_delete.id)
|
|
|
|
|
], any_order=True)
|
|
|
|
|
|
|
|
|
|
|
2021-11-25 17:57:11 +00:00
|
|
|
def test_delete_sms_notifications_older_than_retention_calls_child_task(notify_api, mocker):
|
2021-12-06 09:30:48 +00:00
|
|
|
mocked = mocker.patch('app.celery.nightly_tasks._delete_notifications_older_than_retention_by_type')
|
2019-02-26 17:57:35 +00:00
|
|
|
delete_sms_notifications_older_than_retention()
|
2019-01-16 17:32:19 +00:00
|
|
|
mocked.assert_called_once_with('sms')
|
|
|
|
|
|
|
|
|
|
|
2021-11-25 17:57:11 +00:00
|
|
|
def test_delete_email_notifications_older_than_retentions_calls_child_task(notify_api, mocker):
|
2019-01-16 17:32:19 +00:00
|
|
|
mocked_notifications = mocker.patch(
|
2021-12-06 09:30:48 +00:00
|
|
|
'app.celery.nightly_tasks._delete_notifications_older_than_retention_by_type')
|
2019-02-26 17:57:35 +00:00
|
|
|
delete_email_notifications_older_than_retention()
|
2019-01-16 17:32:19 +00:00
|
|
|
mocked_notifications.assert_called_once_with('email')
|
|
|
|
|
|
|
|
|
|
|
2021-12-13 16:56:21 +00:00
|
|
|
@freeze_time("2021-12-13T10:00")
|
2021-11-26 15:18:53 +00:00
|
|
|
def test_timeout_notifications(mocker, sample_notification):
|
|
|
|
|
mock_update = mocker.patch('app.celery.nightly_tasks.check_and_queue_callback_task')
|
2021-11-25 18:03:54 +00:00
|
|
|
mock_dao = mocker.patch('app.celery.nightly_tasks.dao_timeout_notifications')
|
2021-12-13 17:09:22 +00:00
|
|
|
|
|
|
|
|
mock_dao.side_effect = [
|
|
|
|
|
[sample_notification], # first batch to time out
|
|
|
|
|
[sample_notification], # second batch
|
|
|
|
|
[] # nothing left to time out
|
|
|
|
|
]
|
2019-01-16 17:32:19 +00:00
|
|
|
|
|
|
|
|
timeout_notifications()
|
2021-12-13 17:09:22 +00:00
|
|
|
mock_dao.assert_called_with(datetime.fromisoformat('2021-12-10T10:00'))
|
|
|
|
|
assert mock_update.mock_calls == [call(sample_notification), call(sample_notification)]
|
2019-01-16 17:32:19 +00:00
|
|
|
|
|
|
|
|
|
2021-11-25 17:57:11 +00:00
|
|
|
def test_delete_inbound_sms_calls_child_task(notify_api, mocker):
|
2019-02-26 17:57:35 +00:00
|
|
|
mocker.patch('app.celery.nightly_tasks.delete_inbound_sms_older_than_retention')
|
|
|
|
|
delete_inbound_sms()
|
|
|
|
|
assert nightly_tasks.delete_inbound_sms_older_than_retention.call_count == 1
|
2019-01-16 17:32:19 +00:00
|
|
|
|
|
|
|
|
|
2021-03-11 18:15:11 +00:00
|
|
|
@freeze_time('2021-01-18T02:00')
|
|
|
|
|
@pytest.mark.parametrize('date_provided', [None, '2021-1-17'])
|
|
|
|
|
def test_save_daily_notification_processing_time(mocker, sample_template, date_provided):
|
|
|
|
|
# notification created too early to be counted
|
|
|
|
|
create_notification(
|
|
|
|
|
sample_template,
|
|
|
|
|
created_at=datetime(2021, 1, 16, 23, 59),
|
|
|
|
|
sent_at=datetime(2021, 1, 16, 23, 59) + timedelta(seconds=5)
|
|
|
|
|
)
|
|
|
|
|
# notification counted and sent within 10 seconds
|
|
|
|
|
create_notification(
|
|
|
|
|
sample_template,
|
|
|
|
|
created_at=datetime(2021, 1, 17, 00, 00),
|
|
|
|
|
sent_at=datetime(2021, 1, 17, 00, 00) + timedelta(seconds=5)
|
|
|
|
|
)
|
|
|
|
|
# notification counted but not sent within 10 seconds
|
|
|
|
|
create_notification(
|
|
|
|
|
sample_template,
|
|
|
|
|
created_at=datetime(2021, 1, 17, 23, 59),
|
|
|
|
|
sent_at=datetime(2021, 1, 17, 23, 59) + timedelta(seconds=15)
|
|
|
|
|
)
|
|
|
|
|
# notification created too late to be counted
|
|
|
|
|
create_notification(
|
|
|
|
|
sample_template,
|
|
|
|
|
created_at=datetime(2021, 1, 18, 00, 00),
|
|
|
|
|
sent_at=datetime(2021, 1, 18, 00, 00) + timedelta(seconds=5)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
save_daily_notification_processing_time(date_provided)
|
|
|
|
|
|
|
|
|
|
persisted_to_db = FactProcessingTime.query.all()
|
|
|
|
|
assert len(persisted_to_db) == 1
|
2022-11-21 11:49:59 -05:00
|
|
|
assert persisted_to_db[0].local_date == date(2021, 1, 17)
|
2021-03-11 18:15:11 +00:00
|
|
|
assert persisted_to_db[0].messages_total == 2
|
|
|
|
|
assert persisted_to_db[0].messages_within_10_secs == 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@freeze_time('2021-04-18T02:00')
|
|
|
|
|
@pytest.mark.parametrize('date_provided', [None, '2021-4-17'])
|
2022-11-10 12:33:25 -05:00
|
|
|
def test_save_daily_notification_processing_time_when_in_est(mocker, sample_template, date_provided):
|
2021-03-11 18:15:11 +00:00
|
|
|
# notification created too early to be counted
|
|
|
|
|
create_notification(
|
|
|
|
|
sample_template,
|
|
|
|
|
created_at=datetime(2021, 4, 16, 22, 59),
|
|
|
|
|
sent_at=datetime(2021, 4, 16, 22, 59) + timedelta(seconds=15)
|
|
|
|
|
)
|
|
|
|
|
# notification counted and sent within 10 seconds
|
|
|
|
|
create_notification(
|
|
|
|
|
sample_template,
|
2022-11-14 14:53:28 -05:00
|
|
|
created_at=datetime(2021, 4, 17, 4, 00),
|
|
|
|
|
sent_at=datetime(2021, 4, 17, 4, 00) + timedelta(seconds=5)
|
2021-03-11 18:15:11 +00:00
|
|
|
)
|
|
|
|
|
# notification counted and sent within 10 seconds
|
|
|
|
|
create_notification(
|
|
|
|
|
sample_template,
|
|
|
|
|
created_at=datetime(2021, 4, 17, 22, 59),
|
|
|
|
|
sent_at=datetime(2021, 4, 17, 22, 59) + timedelta(seconds=5)
|
|
|
|
|
)
|
|
|
|
|
# notification created too late to be counted
|
|
|
|
|
create_notification(
|
|
|
|
|
sample_template,
|
2022-11-14 14:53:28 -05:00
|
|
|
created_at=datetime(2021, 4, 18, 23, 00),
|
|
|
|
|
sent_at=datetime(2021, 4, 18, 23, 00) + timedelta(seconds=15)
|
2021-03-11 18:15:11 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
save_daily_notification_processing_time(date_provided)
|
|
|
|
|
|
|
|
|
|
persisted_to_db = FactProcessingTime.query.all()
|
|
|
|
|
assert len(persisted_to_db) == 1
|
2022-11-21 11:49:59 -05:00
|
|
|
assert persisted_to_db[0].local_date == date(2021, 4, 17)
|
2021-03-11 18:15:11 +00:00
|
|
|
assert persisted_to_db[0].messages_total == 2
|
|
|
|
|
assert persisted_to_db[0].messages_within_10_secs == 2
|
2021-12-06 09:30:48 +00:00
|
|
|
|
|
|
|
|
|
2022-11-14 14:53:28 -05:00
|
|
|
@freeze_time('2021-06-05 08:00')
|
2021-12-06 09:30:48 +00:00
|
|
|
def test_delete_notifications_task_calls_task_for_services_with_data_retention_of_same_type(notify_db_session, mocker):
|
|
|
|
|
sms_service = create_service(service_name='a')
|
|
|
|
|
email_service = create_service(service_name='b')
|
|
|
|
|
letter_service = create_service(service_name='c')
|
|
|
|
|
|
|
|
|
|
create_service_data_retention(sms_service, notification_type='sms')
|
|
|
|
|
create_service_data_retention(email_service, notification_type='email')
|
|
|
|
|
create_service_data_retention(letter_service, notification_type='letter')
|
|
|
|
|
|
|
|
|
|
mock_subtask = mocker.patch('app.celery.nightly_tasks.delete_notifications_for_service_and_type')
|
|
|
|
|
|
|
|
|
|
_delete_notifications_older_than_retention_by_type('sms')
|
|
|
|
|
|
|
|
|
|
mock_subtask.apply_async.assert_called_once_with(queue='reporting-tasks', kwargs={
|
|
|
|
|
'service_id': sms_service.id,
|
|
|
|
|
'notification_type': 'sms',
|
|
|
|
|
# three days of retention, its morn of 5th, so we want to keep all messages from 4th, 3rd and 2nd.
|
2023-05-10 08:39:50 -07:00
|
|
|
'datetime_to_delete_before': date(2021, 6, 2),
|
2021-12-06 09:30:48 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
2023-05-10 08:39:50 -07:00
|
|
|
@freeze_time('2021-04-04 23:00')
|
2021-12-06 09:30:48 +00:00
|
|
|
def test_delete_notifications_task_calls_task_for_services_with_data_retention_by_looking_at_retention(
|
|
|
|
|
notify_db_session,
|
|
|
|
|
mocker
|
|
|
|
|
):
|
|
|
|
|
service_14_days = create_service(service_name='a')
|
|
|
|
|
service_3_days = create_service(service_name='b')
|
|
|
|
|
create_service_data_retention(service_14_days, days_of_retention=14)
|
|
|
|
|
create_service_data_retention(service_3_days, days_of_retention=3)
|
|
|
|
|
|
|
|
|
|
mock_subtask = mocker.patch('app.celery.nightly_tasks.delete_notifications_for_service_and_type')
|
|
|
|
|
|
|
|
|
|
_delete_notifications_older_than_retention_by_type('sms')
|
|
|
|
|
|
|
|
|
|
assert mock_subtask.apply_async.call_count == 2
|
|
|
|
|
mock_subtask.apply_async.assert_has_calls(any_order=True, calls=[
|
|
|
|
|
call(queue=ANY, kwargs={
|
|
|
|
|
'service_id': service_14_days.id,
|
|
|
|
|
'notification_type': 'sms',
|
2023-05-10 08:39:50 -07:00
|
|
|
'datetime_to_delete_before': date(2021, 3, 21)
|
2021-12-06 09:30:48 +00:00
|
|
|
}),
|
|
|
|
|
call(queue=ANY, kwargs={
|
|
|
|
|
'service_id': service_3_days.id,
|
|
|
|
|
'notification_type': 'sms',
|
2023-05-10 08:39:50 -07:00
|
|
|
'datetime_to_delete_before': date(2021, 4, 1)
|
2021-12-06 09:30:48 +00:00
|
|
|
}),
|
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
|
2023-05-10 08:39:50 -07:00
|
|
|
@freeze_time('2021-04-02 23:00')
|
2021-12-06 09:30:48 +00:00
|
|
|
def test_delete_notifications_task_calls_task_for_services_that_have_sent_notifications_recently(
|
|
|
|
|
notify_db_session,
|
|
|
|
|
mocker
|
|
|
|
|
):
|
|
|
|
|
|
|
|
|
|
service_will_delete_1 = create_service(service_name='a')
|
|
|
|
|
service_will_delete_2 = create_service(service_name='b')
|
|
|
|
|
service_nothing_to_delete = create_service(service_name='c')
|
|
|
|
|
|
|
|
|
|
create_template(service_will_delete_1)
|
|
|
|
|
create_template(service_will_delete_2)
|
|
|
|
|
nothing_to_delete_sms_template = create_template(service_nothing_to_delete, template_type='sms')
|
|
|
|
|
nothing_to_delete_email_template = create_template(service_nothing_to_delete, template_type='email')
|
|
|
|
|
|
|
|
|
|
# will be deleted as service has no custom retention, but past our default 7 days
|
2023-05-10 08:39:50 -07:00
|
|
|
create_notification(service_will_delete_1.templates[0], created_at=datetime.utcnow() - timedelta(days=8))
|
|
|
|
|
create_notification(service_will_delete_2.templates[0], created_at=datetime.utcnow() - timedelta(days=8))
|
2021-12-06 09:30:48 +00:00
|
|
|
|
|
|
|
|
# will be kept as it's recent, and we won't run delete_notifications_for_service_and_type
|
2023-05-10 08:39:50 -07:00
|
|
|
create_notification(nothing_to_delete_sms_template, created_at=datetime.utcnow() - timedelta(days=2))
|
2021-12-06 09:30:48 +00:00
|
|
|
# this is an old notification, but for email not sms, so we won't run delete_notifications_for_service_and_type
|
2023-05-10 08:39:50 -07:00
|
|
|
create_notification(nothing_to_delete_email_template, created_at=datetime.utcnow() - timedelta(days=8))
|
2021-12-06 09:30:48 +00:00
|
|
|
|
|
|
|
|
mock_subtask = mocker.patch('app.celery.nightly_tasks.delete_notifications_for_service_and_type')
|
|
|
|
|
|
|
|
|
|
_delete_notifications_older_than_retention_by_type('sms')
|
|
|
|
|
|
|
|
|
|
assert mock_subtask.apply_async.call_count == 2
|
|
|
|
|
mock_subtask.apply_async.assert_has_calls(any_order=True, calls=[
|
|
|
|
|
call(queue=ANY, kwargs={
|
|
|
|
|
'service_id': service_will_delete_1.id,
|
|
|
|
|
'notification_type': 'sms',
|
2023-05-10 08:39:50 -07:00
|
|
|
'datetime_to_delete_before': date(2021, 3, 26)
|
2021-12-06 09:30:48 +00:00
|
|
|
}),
|
|
|
|
|
call(queue=ANY, kwargs={
|
|
|
|
|
'service_id': service_will_delete_2.id,
|
|
|
|
|
'notification_type': 'sms',
|
2023-05-10 08:39:50 -07:00
|
|
|
'datetime_to_delete_before': date(2021, 3, 26)
|
2021-12-06 09:30:48 +00:00
|
|
|
}),
|
|
|
|
|
])
|
2023-05-23 08:31:30 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_cleanup_unfinished_jobs(mocker):
|
|
|
|
|
mock_s3 = mocker.patch('app.celery.nightly_tasks.remove_csv_object')
|
|
|
|
|
mock_dao_archive = mocker.patch('app.celery.nightly_tasks.dao_archive_job')
|
|
|
|
|
mock_dao = mocker.patch('app.celery.nightly_tasks.dao_get_unfinished_jobs')
|
|
|
|
|
mock_job_unfinished = Job()
|
|
|
|
|
mock_job_unfinished.processing_started = datetime(2023, 1, 1, 0, 0, 0)
|
|
|
|
|
mock_job_unfinished.original_file_name = "blah"
|
|
|
|
|
|
|
|
|
|
mock_dao.return_value = [mock_job_unfinished]
|
|
|
|
|
cleanup_unfinished_jobs()
|
|
|
|
|
mock_s3.assert_called_once_with('blah')
|
|
|
|
|
mock_dao_archive.assert_called_once_with(mock_job_unfinished)
|