Change job selection dao to take flexible retention into account

Also test deleting jobs with flexible data retention

Also update tests for default data retention following logic
change: dao_get_jobs_older_than_data_retention now counts
today at the start of the day, not at a time when function runs
and updated tests reflect that
This commit is contained in:
Pea Tyczynska
2018-11-19 17:09:27 +00:00
committed by Alexey Bezhan
parent 744389f557
commit be6f37069b
4 changed files with 108 additions and 29 deletions

View File

@@ -23,7 +23,7 @@ from app.dao.invited_org_user_dao import delete_org_invitations_created_more_tha
from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago
from app.dao.jobs_dao import (
dao_set_scheduled_jobs_to_pending,
dao_get_jobs_older_than_limited_by
dao_get_jobs_older_than_data_retention
)
from app.dao.jobs_dao import dao_update_job
from app.dao.notifications_dao import (
@@ -64,7 +64,7 @@ from app.v2.errors import JobIncompleteError
@notify_celery.task(name="remove_csv_files")
@statsd(namespace="tasks")
def remove_csv_files(job_types):
jobs = dao_get_jobs_older_than_limited_by(job_types=job_types)
jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types)
for job in jobs:
s3.remove_job_from_s3(job.service_id, job.id)
current_app.logger.info("Job ID {} has been removed from s3.".format(job.id))
@@ -299,7 +299,7 @@ def delete_inbound_sms_older_than_seven_days():
@notify_celery.task(name="remove_transformed_dvla_files")
@statsd(namespace="tasks")
def remove_transformed_dvla_files():
jobs = dao_get_jobs_older_than_limited_by(job_types=[LETTER_TYPE])
jobs = dao_get_jobs_older_than_data_retention(notification_types=[LETTER_TYPE])
for job in jobs:
s3.remove_transformed_dvla_file(job.id)
current_app.logger.info("Transformed dvla file for job {} has been removed from s3.".format(job.id))

View File

@@ -18,6 +18,7 @@ from app.models import (
LETTER_TYPE,
NotificationHistory,
Template,
ServiceDataRetention
)
from app.variables import LETTER_TEST_API_FILENAME
@@ -115,15 +116,37 @@ def dao_update_job(job):
db.session.commit()
def dao_get_jobs_older_than_limited_by(job_types, older_than=7, limit_days=2):
end_date = datetime.utcnow() - timedelta(days=older_than)
start_date = end_date - timedelta(days=limit_days)
def dao_get_jobs_older_than_data_retention(notification_types):
flexible_data_retention = ServiceDataRetention.query.filter(
ServiceDataRetention.notification_type.in_(notification_types)
).all()
jobs = []
today = datetime.utcnow().date()
for f in flexible_data_retention:
end_date = today - timedelta(days=f.days_of_retention)
start_date = end_date - timedelta(days=2)
return Job.query.join(Template).filter(
Job.created_at < end_date,
Job.created_at >= start_date,
Template.template_type.in_(job_types)
).order_by(desc(Job.created_at)).all()
jobs.extend(Job.query.join(Template).filter(
Job.created_at < end_date,
Job.created_at >= start_date,
Template.template_type == f.notification_type,
Job.service_id == f.service_id
).order_by(desc(Job.created_at)).all())
end_date = today - timedelta(days=7)
start_date = end_date - timedelta(days=2)
for notification_type in notification_types:
services_with_data_retention = [
x.service_id for x in flexible_data_retention if x.notification_type == notification_type
]
jobs.extend(Job.query.join(Template).filter(
Job.created_at < end_date,
Job.created_at >= start_date,
Template.template_type == notification_type,
Job.service_id.notin_(services_with_data_retention)
).order_by(desc(Job.created_at)).all())
return jobs
def dao_get_all_letter_jobs():