diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 6e3d2f25b..5eaf71457 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -130,24 +130,22 @@ def dao_get_jobs_older_than_data_retention(notification_types): today = datetime.utcnow().date() for f in flexible_data_retention: end_date = today - timedelta(days=f.days_of_retention) - start_date = end_date - timedelta(days=2) jobs.extend(Job.query.join(Template).filter( Job.created_at < end_date, - Job.created_at >= start_date, + Job.archived == False, # noqa Template.template_type == f.notification_type, Job.service_id == f.service_id ).order_by(desc(Job.created_at)).all()) end_date = today - timedelta(days=7) - start_date = end_date - timedelta(days=2) for notification_type in notification_types: services_with_data_retention = [ x.service_id for x in flexible_data_retention if x.notification_type == notification_type ] jobs.extend(Job.query.join(Template).filter( Job.created_at < end_date, - Job.created_at >= start_date, + Job.archived == False, # noqa Template.template_type == notification_type, Job.service_id.notin_(services_with_data_retention) ).order_by(desc(Job.created_at)).all()) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 02007195f..f62a98a60 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -292,7 +292,7 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( just_under_nine_days = nine_days_ago + timedelta(seconds=1) nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - job3_to_delete = create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago) + create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago, archived=True) job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) dont_delete_me_1 = create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) @@ -303,7 +303,6 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( assert s3.remove_job_from_s3.call_args_list == [ call(job1_to_delete.service_id, job1_to_delete.id), call(job2_to_delete.service_id, job2_to_delete.id), - call(job3_to_delete.service_id, job3_to_delete.id) ] assert job1_to_delete.archived is True assert dont_delete_me_1.archived is False diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 04d77a60e..ef7317acb 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -371,7 +371,8 @@ def sample_job( job_status='pending', scheduled_for=None, processing_started=None, - original_file_name='some.csv' + original_file_name='some.csv', + archived=False ): if service is None: service = sample_service(notify_db, notify_db_session) @@ -390,7 +391,8 @@ def sample_job( 'created_by': service.created_by, 'job_status': job_status, 'scheduled_for': scheduled_for, - 'processing_started': processing_started + 'processing_started': processing_started, + 'archived': archived } job = Job(**data) dao_create_job(job) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index bd8752ead..7d1d88346 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -293,8 +293,8 @@ def test_should_get_jobs_seven_days_old(notify_db, notify_db_session, sample_tem job(created_at=seven_days_ago) job(created_at=within_seven_days) job_to_delete = job(created_at=eight_days_ago) - job(created_at=nine_days_ago) - job(created_at=nine_days_one_second_ago) + job(created_at=nine_days_ago, archived=True) + job(created_at=nine_days_one_second_ago, archived=True) jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type]) diff --git a/tests/app/db.py b/tests/app/db.py index 3990a24cf..36e29953e 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -261,7 +261,8 @@ def create_job( job_status='pending', scheduled_for=None, processing_started=None, - original_file_name='some.csv' + original_file_name='some.csv', + archived=False ): data = { 'id': uuid.uuid4(), @@ -275,7 +276,8 @@ def create_job( 'created_by': template.created_by, 'job_status': job_status, 'scheduled_for': scheduled_for, - 'processing_started': processing_started + 'processing_started': processing_started, + 'archived': archived } job = Job(**data) dao_create_job(job)