Merge pull request #2258 from alphagov/dashboard-jobs-speedup

stop dashboard reading notification_history
This commit is contained in:
Leo Hemsted
2018-12-17 14:52:06 +00:00
committed by GitHub
8 changed files with 274 additions and 435 deletions

View File

@@ -186,3 +186,14 @@ def fetch_notification_status_totals_for_all_services(start_date, end_date):
else:
query = stats
return query.all()
def fetch_notification_statuses_for_job(job_id):
return db.session.query(
FactNotificationStatus.notification_status.label('status'),
func.sum(FactNotificationStatus.notification_count).label('count'),
).filter(
FactNotificationStatus.job_id == job_id,
).group_by(
FactNotificationStatus.notification_status
).all()

View File

@@ -16,7 +16,7 @@ from app.models import (
JOB_STATUS_PENDING,
JOB_STATUS_SCHEDULED,
LETTER_TYPE,
NotificationHistory,
Notification,
Template,
ServiceDataRetention
)
@@ -25,19 +25,14 @@ from app.variables import LETTER_TEST_API_FILENAME
@statsd(namespace="dao")
def dao_get_notification_outcomes_for_job(service_id, job_id):
query = db.session.query(
func.count(NotificationHistory.status).label('count'),
NotificationHistory.status
)
return query.filter(
NotificationHistory.service_id == service_id
return db.session.query(
func.count(Notification.status).label('count'),
Notification.status
).filter(
NotificationHistory.job_id == job_id
Notification.service_id == service_id,
Notification.job_id == job_id
).group_by(
NotificationHistory.status
).order_by(
asc(NotificationHistory.status)
Notification.status
).all()

View File

@@ -1,3 +1,4 @@
import dateutil
from flask import (
Blueprint,
jsonify,
@@ -13,6 +14,7 @@ from app.dao.jobs_dao import (
dao_get_jobs_by_service_id,
dao_get_future_scheduled_job_by_id_and_service_id,
dao_get_notification_outcomes_for_job)
from app.dao.fact_notification_status_dao import fetch_notification_statuses_for_job
from app.dao.services_dao import dao_fetch_service_by_id
from app.dao.templates_dao import dao_get_template_by_id
from app.dao.notifications_dao import get_notifications_for_job
@@ -24,7 +26,7 @@ from app.schemas import (
)
from app.celery.tasks import process_job
from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, JOB_STATUS_CANCELLED, LETTER_TYPE
from app.utils import pagination_links
from app.utils import pagination_links, midnight_n_days_ago
from app.config import QueueNames
from app.errors import (
register_errors,
@@ -171,8 +173,14 @@ def get_paginated_jobs(service_id, limit_days, statuses, page):
)
data = job_schema.dump(pagination.items, many=True).data
for job_data in data:
statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id'])
job_data['statistics'] = [{'status': statistic[1], 'count': statistic[0]} for statistic in statistics]
created_at = dateutil.parser.parse(job_data['created_at']).replace(tzinfo=None)
if created_at < midnight_n_days_ago(3):
# ft_notification_status table
statistics = fetch_notification_statuses_for_job(job_data['id'])
else:
# notifications table
statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id'])
job_data['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in statistics]
return {
'data': data,