Add an endpoint for stats about scheduled jobs

At the moment we display the count of scheduled jobs on the dashboard
by sending all the scheduled jobs to the admin app and letting it work
out the stats.

This is inefficient and, because the get jobs response has a page size
of 50, becomes incorrect if a service schedules more than 50 jobs.

This commit adds a separate endpoint which gives the admin app the stats
it needs directly and correctly.
This commit is contained in:
Chris Hill-Scott
2020-09-28 09:57:32 +01:00
parent f651176343
commit 2fcde009ac
3 changed files with 59 additions and 0 deletions

View File

@@ -84,6 +84,18 @@ def dao_get_jobs_by_service_id(
.paginate(page=page, per_page=page_size) .paginate(page=page, per_page=page_size)
def dao_get_scheduled_job_stats(
service_id,
):
return db.session.query(
func.count(Job.id),
func.min(Job.scheduled_for),
).filter(
Job.service_id == service_id,
Job.job_status == JOB_STATUS_SCHEDULED,
).one()
def dao_get_job_by_id(job_id): def dao_get_job_by_id(job_id):
return Job.query.filter_by(id=job_id).one() return Job.query.filter_by(id=job_id).one()

View File

@@ -1,4 +1,5 @@
import dateutil import dateutil
import pytz
from flask import ( from flask import (
Blueprint, Blueprint,
jsonify, jsonify,
@@ -14,6 +15,7 @@ from app.dao.jobs_dao import (
dao_get_jobs_by_service_id, dao_get_jobs_by_service_id,
dao_get_future_scheduled_job_by_id_and_service_id, dao_get_future_scheduled_job_by_id_and_service_id,
dao_get_notification_outcomes_for_job, dao_get_notification_outcomes_for_job,
dao_get_scheduled_job_stats,
dao_cancel_letter_job, dao_cancel_letter_job,
can_letter_job_be_cancelled can_letter_job_be_cancelled
) )
@@ -188,6 +190,18 @@ def create_job(service_id):
return jsonify(data=job_json), 201 return jsonify(data=job_json), 201
@job_blueprint.route('/scheduled-job-stats', methods=['GET'])
def get_scheduled_job_stats(service_id):
count, soonest_scheduled_for = dao_get_scheduled_job_stats(service_id)
return jsonify(
count=count,
soonest_scheduled_for=(
soonest_scheduled_for.replace(tzinfo=pytz.UTC).isoformat()
if soonest_scheduled_for else None
),
), 200
def get_paginated_jobs( def get_paginated_jobs(
service_id, service_id,
*, *,

View File

@@ -910,3 +910,36 @@ def test_get_jobs_should_retrieve_from_ft_notification_status_for_old_jobs(admin
assert resp_json['data'][1]['statistics'] == [{'status': 'created', 'count': 1}] assert resp_json['data'][1]['statistics'] == [{'status': 'created', 'count': 1}]
assert resp_json['data'][2]['id'] == str(job_1.id) assert resp_json['data'][2]['id'] == str(job_1.id)
assert resp_json['data'][2]['statistics'] == [{'status': 'delivered', 'count': 6}] assert resp_json['data'][2]['statistics'] == [{'status': 'delivered', 'count': 6}]
@freeze_time('2017-07-17 07:17')
def test_get_scheduled_job_stats_when_no_scheduled_jobs(admin_request, sample_template):
# This sets up a bunch of regular, non-scheduled jobs
_setup_jobs(sample_template)
service_id = sample_template.service.id
resp_json = admin_request.get('job.get_scheduled_job_stats', service_id=service_id)
assert resp_json == {
'count': 0,
'soonest_scheduled_for': None,
}
@freeze_time('2017-07-17 07:17')
def test_get_scheduled_job_stats(admin_request, sample_template):
create_job(sample_template, job_status='scheduled', scheduled_for='2017-07-17 09:00')
create_job(sample_template, job_status='scheduled', scheduled_for='2017-07-17 10:00')
create_job(sample_template, job_status='scheduled', scheduled_for='2017-07-17 11:00')
service_id = sample_template.service.id
resp_json = admin_request.get(
'job.get_scheduled_job_stats',
service_id=service_id,
)
assert resp_json == {
'count': 3,
'soonest_scheduled_for': '2017-07-17T09:00:00+00:00',
}