Merge pull request #2839 from alphagov/group-letter-uploads

Group letter uploads by printing day
This commit is contained in:
Chris Hill-Scott
2020-05-19 11:05:14 +01:00
committed by GitHub
4 changed files with 367 additions and 101 deletions

View File

@@ -1,6 +1,6 @@
from datetime import datetime
from flask import current_app
from sqlalchemy import and_, desc, func, literal, String
from sqlalchemy import and_, desc, func, literal, text, String
from app import db
from app.models import (
@@ -10,6 +10,28 @@ from app.models import (
from app.utils import midnight_n_days_ago
def _get_printing_day(created_at):
return func.date_trunc(
'day',
func.timezone('Europe/London', func.timezone('UTC', created_at)) + text(
# We add 6 hours 30 minutes to the local created_at time so that
# any letters created after 5:30pm get shifted into the next day
"interval '6 hours 30 minutes'"
)
)
def _get_printing_datetime(created_at):
return _get_printing_day(created_at) + text(
# Letters are printed from 5:30pm each day
"interval '17 hours 30 minutes'"
)
def _naive_gmt_to_utc(column):
return func.timezone('UTC', func.timezone('Europe/London', column))
def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50):
# Hardcoded filter to exclude cancelled or scheduled jobs
# for the moment, but we may want to change this method take 'statuses' as a argument in the future
@@ -56,24 +78,13 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
Notification.status != NOTIFICATION_CANCELLED,
Template.hidden == True,
Notification.created_at >= today - func.coalesce(ServiceDataRetention.days_of_retention, 7)
]
if limit_days is not None:
letters_query_filter.append(Notification.created_at >= midnight_n_days_ago(limit_days))
letters_query = db.session.query(
Notification.id,
Notification.client_reference.label('original_file_name'),
literal('1').label('notification_count'),
literal(None).label('template_type'),
func.coalesce(ServiceDataRetention.days_of_retention, 7).label('days_of_retention'),
Notification.created_at.label("created_at"),
literal(None).label('scheduled_for'),
# letters don't have a processing_started date but we want created_at to be used for sorting
Notification.created_at.label('processing_started'),
Notification.status,
literal('letter').label('upload_type'),
Notification.to.label('recipient'),
letters_subquery = db.session.query(
func.count().label('notification_count'),
_naive_gmt_to_utc(_get_printing_datetime(Notification.created_at)).label('printing_at'),
).join(
Template, Notification.template_id == Template.id
).outerjoin(
@@ -83,6 +94,25 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
)
).filter(
*letters_query_filter
).group_by(
'printing_at'
).subquery()
letters_query = db.session.query(
literal(None).label('id'),
literal('Uploaded letters').label('original_file_name'),
letters_subquery.c.notification_count.label('notification_count'),
literal('letter').label('template_type'),
literal(None).label('days_of_retention'),
letters_subquery.c.printing_at.label('created_at'),
literal(None).label('scheduled_for'),
letters_subquery.c.printing_at.label('processing_started'),
literal(None).label('status'),
literal('letter_day').label('upload_type'),
literal(None).label('recipient'),
).group_by(
letters_subquery.c.notification_count,
letters_subquery.c.printing_at,
)
return jobs_query.union_all(
@@ -90,3 +120,23 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
).order_by(
desc("processing_started"), desc("created_at")
).paginate(page=page, per_page=page_size)
def dao_get_uploaded_letters_by_print_date(service_id, letter_print_date, page=1, page_size=50):
return db.session.query(
Notification,
).join(
Template, Notification.template_id == Template.id
).filter(
Notification.service_id == service_id,
Notification.notification_type == LETTER_TYPE,
Notification.api_key_id.is_(None),
Notification.status != NOTIFICATION_CANCELLED,
Template.hidden.is_(True),
_get_printing_day(Notification.created_at) == letter_print_date.date(),
).order_by(
desc(Notification.created_at)
).paginate(
page=page,
per_page=page_size,
)

View File

@@ -1,5 +1,7 @@
from datetime import datetime
from flask import (
Blueprint,
abort,
jsonify,
request,
current_app
@@ -9,10 +11,14 @@ from app.dao.fact_notification_status_dao import fetch_notification_statuses_for
from app.dao.jobs_dao import (
dao_get_notification_outcomes_for_job
)
from app.dao.uploads_dao import dao_get_uploads_by_service_id
from app.dao.uploads_dao import (
dao_get_uploaded_letters_by_print_date,
dao_get_uploads_by_service_id,
)
from app.errors import (
register_errors,
)
from app.schemas import notification_with_template_schema
from app.utils import pagination_links, midnight_n_days_ago
upload_blueprint = Blueprint('upload', __name__, url_prefix='/service/<uuid:service_id>/upload')
@@ -61,7 +67,7 @@ def get_paginated_uploads(service_id, limit_days, page):
upload_dict['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in
statistics]
else:
upload_dict['statistics'] = [{'status': upload.status, 'count': 1}]
upload_dict['statistics'] = []
data.append(upload_dict)
return {
@@ -74,3 +80,31 @@ def get_paginated_uploads(service_id, limit_days, page):
service_id=service_id
)
}
@upload_blueprint.route('/uploaded-letters/<letter_print_date>', methods=['GET'])
def get_uploaded_letter_by_service_and_print_day(service_id, letter_print_date):
try:
letter_print_datetime = datetime.strptime(letter_print_date, '%Y-%m-%d')
except ValueError:
abort(400)
pagination = dao_get_uploaded_letters_by_print_date(
service_id,
letter_print_date=letter_print_datetime,
page=request.args.get('page', type=int),
page_size=current_app.config['PAGE_SIZE']
)
return jsonify({
'notifications': notification_with_template_schema.dump(
pagination.items,
many=True,
).data,
'page_size': pagination.per_page,
'total': pagination.total,
'links': pagination_links(
pagination,
'.get_uploaded_letter_by_service_and_print_day',
service_id=service_id,
letter_print_date=letter_print_date,
),
})