Finding only jobs and letters within data retention works and tested

This commit is contained in:
Pea Tyczynska
2020-03-10 10:33:30 +00:00
parent 851435701f
commit 8b60e69157
2 changed files with 65 additions and 4 deletions

View File

@@ -1,10 +1,11 @@
from datetime import datetime
from flask import current_app
from sqlalchemy import desc, literal
from sqlalchemy import and_, desc, func, literal, String
from app import db
from app.models import (
Job, Notification, Template, LETTER_TYPE, JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED,
NOTIFICATION_CANCELLED
NOTIFICATION_CANCELLED, ServiceDataRetention
)
from app.utils import midnight_n_days_ago
@@ -12,11 +13,15 @@ from app.utils import midnight_n_days_ago
def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50):
# Hardcoded filter to exclude cancelled or scheduled jobs
# for the moment, but we may want to change this method take 'statuses' as a argument in the future
today = datetime.utcnow().date()
jobs_query_filter = [
Job.service_id == service_id,
Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],
Job.original_file_name != current_app.config['ONE_OFF_MESSAGE_FILENAME'],
Job.job_status.notin_([JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED])
Job.job_status.notin_([JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED]),
func.coalesce(
Job.processing_started, Job.created_at
) >= today - func.coalesce(ServiceDataRetention.days_of_retention, 7)
]
if limit_days is not None:
jobs_query_filter.append(Job.created_at >= midnight_n_days_ago(limit_days))
@@ -26,6 +31,7 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
Job.original_file_name,
Job.notification_count,
Template.template_type,
func.coalesce(ServiceDataRetention.days_of_retention, 7).label('days_of_retention'),
Job.created_at.label("created_at"),
Job.scheduled_for.label("scheduled_for"),
Job.processing_started.label('processing_started'),
@@ -34,6 +40,11 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
literal(None).label('recipient'),
).join(
Template, Job.template_id == Template.id
).outerjoin(
ServiceDataRetention, and_(
Template.service_id == ServiceDataRetention.service_id,
func.cast(Template.template_type, String) == func.cast(ServiceDataRetention.notification_type, String)
)
).filter(
*jobs_query_filter
)
@@ -44,6 +55,7 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
Notification.api_key_id == None, # noqa
Notification.status != NOTIFICATION_CANCELLED,
Template.hidden == True,
Notification.created_at >= today - func.coalesce(ServiceDataRetention.days_of_retention, 7)
]
if limit_days is not None:
@@ -54,6 +66,7 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
Notification.client_reference.label('original_file_name'),
literal('1').label('notification_count'),
literal(None).label('template_type'),
func.coalesce(ServiceDataRetention.days_of_retention, 7).label('days_of_retention'),
Notification.created_at.label("created_at"),
literal(None).label('scheduled_for'),
# letters don't have a processing_started date but we want created_at to be used for sorting
@@ -63,6 +76,11 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
Notification.to.label('recipient'),
).join(
Template, Notification.template_id == Template.id
).outerjoin(
ServiceDataRetention, and_(
Template.service_id == ServiceDataRetention.service_id,
func.cast(Template.template_type, String) == func.cast(ServiceDataRetention.notification_type, String)
)
).filter(
*letters_query_filter
)