mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-22 00:11:16 -05:00
Finding only jobs and letters within data retention works and tested
This commit is contained in:
@@ -1,10 +1,11 @@
|
|||||||
|
from datetime import datetime
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from sqlalchemy import desc, literal
|
from sqlalchemy import and_, desc, func, literal, String
|
||||||
|
|
||||||
from app import db
|
from app import db
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Job, Notification, Template, LETTER_TYPE, JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED,
|
Job, Notification, Template, LETTER_TYPE, JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED,
|
||||||
NOTIFICATION_CANCELLED
|
NOTIFICATION_CANCELLED, ServiceDataRetention
|
||||||
)
|
)
|
||||||
from app.utils import midnight_n_days_ago
|
from app.utils import midnight_n_days_ago
|
||||||
|
|
||||||
@@ -12,11 +13,15 @@ from app.utils import midnight_n_days_ago
|
|||||||
def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50):
|
def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50):
|
||||||
# Hardcoded filter to exclude cancelled or scheduled jobs
|
# Hardcoded filter to exclude cancelled or scheduled jobs
|
||||||
# for the moment, but we may want to change this method take 'statuses' as a argument in the future
|
# for the moment, but we may want to change this method take 'statuses' as a argument in the future
|
||||||
|
today = datetime.utcnow().date()
|
||||||
jobs_query_filter = [
|
jobs_query_filter = [
|
||||||
Job.service_id == service_id,
|
Job.service_id == service_id,
|
||||||
Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],
|
Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],
|
||||||
Job.original_file_name != current_app.config['ONE_OFF_MESSAGE_FILENAME'],
|
Job.original_file_name != current_app.config['ONE_OFF_MESSAGE_FILENAME'],
|
||||||
Job.job_status.notin_([JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED])
|
Job.job_status.notin_([JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED]),
|
||||||
|
func.coalesce(
|
||||||
|
Job.processing_started, Job.created_at
|
||||||
|
) >= today - func.coalesce(ServiceDataRetention.days_of_retention, 7)
|
||||||
]
|
]
|
||||||
if limit_days is not None:
|
if limit_days is not None:
|
||||||
jobs_query_filter.append(Job.created_at >= midnight_n_days_ago(limit_days))
|
jobs_query_filter.append(Job.created_at >= midnight_n_days_ago(limit_days))
|
||||||
@@ -26,6 +31,7 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
|
|||||||
Job.original_file_name,
|
Job.original_file_name,
|
||||||
Job.notification_count,
|
Job.notification_count,
|
||||||
Template.template_type,
|
Template.template_type,
|
||||||
|
func.coalesce(ServiceDataRetention.days_of_retention, 7).label('days_of_retention'),
|
||||||
Job.created_at.label("created_at"),
|
Job.created_at.label("created_at"),
|
||||||
Job.scheduled_for.label("scheduled_for"),
|
Job.scheduled_for.label("scheduled_for"),
|
||||||
Job.processing_started.label('processing_started'),
|
Job.processing_started.label('processing_started'),
|
||||||
@@ -34,6 +40,11 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
|
|||||||
literal(None).label('recipient'),
|
literal(None).label('recipient'),
|
||||||
).join(
|
).join(
|
||||||
Template, Job.template_id == Template.id
|
Template, Job.template_id == Template.id
|
||||||
|
).outerjoin(
|
||||||
|
ServiceDataRetention, and_(
|
||||||
|
Template.service_id == ServiceDataRetention.service_id,
|
||||||
|
func.cast(Template.template_type, String) == func.cast(ServiceDataRetention.notification_type, String)
|
||||||
|
)
|
||||||
).filter(
|
).filter(
|
||||||
*jobs_query_filter
|
*jobs_query_filter
|
||||||
)
|
)
|
||||||
@@ -44,6 +55,7 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
|
|||||||
Notification.api_key_id == None, # noqa
|
Notification.api_key_id == None, # noqa
|
||||||
Notification.status != NOTIFICATION_CANCELLED,
|
Notification.status != NOTIFICATION_CANCELLED,
|
||||||
Template.hidden == True,
|
Template.hidden == True,
|
||||||
|
Notification.created_at >= today - func.coalesce(ServiceDataRetention.days_of_retention, 7)
|
||||||
|
|
||||||
]
|
]
|
||||||
if limit_days is not None:
|
if limit_days is not None:
|
||||||
@@ -54,6 +66,7 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
|
|||||||
Notification.client_reference.label('original_file_name'),
|
Notification.client_reference.label('original_file_name'),
|
||||||
literal('1').label('notification_count'),
|
literal('1').label('notification_count'),
|
||||||
literal(None).label('template_type'),
|
literal(None).label('template_type'),
|
||||||
|
func.coalesce(ServiceDataRetention.days_of_retention, 7).label('days_of_retention'),
|
||||||
Notification.created_at.label("created_at"),
|
Notification.created_at.label("created_at"),
|
||||||
literal(None).label('scheduled_for'),
|
literal(None).label('scheduled_for'),
|
||||||
# letters don't have a processing_started date but we want created_at to be used for sorting
|
# letters don't have a processing_started date but we want created_at to be used for sorting
|
||||||
@@ -63,6 +76,11 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
|
|||||||
Notification.to.label('recipient'),
|
Notification.to.label('recipient'),
|
||||||
).join(
|
).join(
|
||||||
Template, Notification.template_id == Template.id
|
Template, Notification.template_id == Template.id
|
||||||
|
).outerjoin(
|
||||||
|
ServiceDataRetention, and_(
|
||||||
|
Template.service_id == ServiceDataRetention.service_id,
|
||||||
|
func.cast(Template.template_type, String) == func.cast(ServiceDataRetention.notification_type, String)
|
||||||
|
)
|
||||||
).filter(
|
).filter(
|
||||||
*letters_query_filter
|
*letters_query_filter
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from datetime import datetime, timedelta
|
|||||||
|
|
||||||
from app.dao.uploads_dao import dao_get_uploads_by_service_id
|
from app.dao.uploads_dao import dao_get_uploads_by_service_id
|
||||||
from app.models import LETTER_TYPE, JOB_STATUS_IN_PROGRESS
|
from app.models import LETTER_TYPE, JOB_STATUS_IN_PROGRESS
|
||||||
from tests.app.db import create_job, create_service, create_template, create_notification
|
from tests.app.db import create_job, create_service, create_service_data_retention, create_template, create_notification
|
||||||
|
|
||||||
|
|
||||||
def create_uploaded_letter(letter_template, service, status='created', created_at=None):
|
def create_uploaded_letter(letter_template, service, status='created', created_at=None):
|
||||||
@@ -31,6 +31,7 @@ def create_uploaded_template(service):
|
|||||||
|
|
||||||
|
|
||||||
def test_get_uploads_for_service(sample_template):
|
def test_get_uploads_for_service(sample_template):
|
||||||
|
create_service_data_retention(sample_template.service, 'sms', days_of_retention=9)
|
||||||
job = create_job(sample_template, processing_started=datetime.utcnow())
|
job = create_job(sample_template, processing_started=datetime.utcnow())
|
||||||
letter_template = create_uploaded_template(sample_template.service)
|
letter_template = create_uploaded_template(sample_template.service)
|
||||||
letter = create_uploaded_letter(letter_template, sample_template.service)
|
letter = create_uploaded_letter(letter_template, sample_template.service)
|
||||||
@@ -51,6 +52,7 @@ def test_get_uploads_for_service(sample_template):
|
|||||||
letter.client_reference,
|
letter.client_reference,
|
||||||
1,
|
1,
|
||||||
None,
|
None,
|
||||||
|
7,
|
||||||
letter.created_at,
|
letter.created_at,
|
||||||
None,
|
None,
|
||||||
letter.created_at,
|
letter.created_at,
|
||||||
@@ -63,6 +65,7 @@ def test_get_uploads_for_service(sample_template):
|
|||||||
job.original_file_name,
|
job.original_file_name,
|
||||||
job.notification_count,
|
job.notification_count,
|
||||||
'sms',
|
'sms',
|
||||||
|
9,
|
||||||
job.created_at,
|
job.created_at,
|
||||||
job.scheduled_for,
|
job.scheduled_for,
|
||||||
job.processing_started,
|
job.processing_started,
|
||||||
@@ -76,6 +79,7 @@ def test_get_uploads_for_service(sample_template):
|
|||||||
other_letter.client_reference,
|
other_letter.client_reference,
|
||||||
1,
|
1,
|
||||||
None,
|
None,
|
||||||
|
7,
|
||||||
other_letter.created_at,
|
other_letter.created_at,
|
||||||
None,
|
None,
|
||||||
other_letter.created_at,
|
other_letter.created_at,
|
||||||
@@ -86,6 +90,7 @@ def test_get_uploads_for_service(sample_template):
|
|||||||
other_job.original_file_name,
|
other_job.original_file_name,
|
||||||
other_job.notification_count,
|
other_job.notification_count,
|
||||||
other_job.template.template_type,
|
other_job.template.template_type,
|
||||||
|
7,
|
||||||
other_job.created_at,
|
other_job.created_at,
|
||||||
other_job.scheduled_for,
|
other_job.scheduled_for,
|
||||||
other_job.processing_started,
|
other_job.processing_started,
|
||||||
@@ -162,6 +167,44 @@ def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_tem
|
|||||||
assert results[3].id == upload_4.id
|
assert results[3].id == upload_4.id
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_uploads_only_gets_uploads_within_service_retention_period(sample_template):
|
||||||
|
letter_template = create_uploaded_template(sample_template.service)
|
||||||
|
create_service_data_retention(sample_template.service, 'sms', days_of_retention=3)
|
||||||
|
|
||||||
|
days_ago = datetime.utcnow() - timedelta(days=4)
|
||||||
|
upload_1 = create_uploaded_letter(letter_template, service=letter_template.service)
|
||||||
|
upload_2 = create_job(
|
||||||
|
sample_template, processing_started=datetime.utcnow() - timedelta(days=1), created_at=days_ago,
|
||||||
|
job_status=JOB_STATUS_IN_PROGRESS
|
||||||
|
)
|
||||||
|
# older than custom retention for sms:
|
||||||
|
create_job(
|
||||||
|
sample_template, processing_started=datetime.utcnow() - timedelta(days=5), created_at=days_ago,
|
||||||
|
job_status=JOB_STATUS_IN_PROGRESS
|
||||||
|
)
|
||||||
|
upload_3 = create_uploaded_letter(
|
||||||
|
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=3)
|
||||||
|
)
|
||||||
|
|
||||||
|
# older than retention for sms but within letter retention:
|
||||||
|
upload_4 = create_uploaded_letter(
|
||||||
|
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=6)
|
||||||
|
)
|
||||||
|
|
||||||
|
# older than default retention for letters:
|
||||||
|
create_uploaded_letter(
|
||||||
|
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=8)
|
||||||
|
)
|
||||||
|
|
||||||
|
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
||||||
|
|
||||||
|
assert len(results) == 4
|
||||||
|
assert results[0].id == upload_1.id
|
||||||
|
assert results[1].id == upload_2.id
|
||||||
|
assert results[2].id == upload_3.id
|
||||||
|
assert results[3].id == upload_4.id
|
||||||
|
|
||||||
|
|
||||||
def test_get_uploads_is_paginated(sample_template):
|
def test_get_uploads_is_paginated(sample_template):
|
||||||
letter_template = create_uploaded_template(sample_template.service)
|
letter_template = create_uploaded_template(sample_template.service)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user