Merge pull request #1033 from alphagov/job-stats-for-dashboard

Job stats for dashboard
This commit is contained in:
Rebecca Law
2017-06-13 14:35:58 +01:00
committed by GitHub
4 changed files with 332 additions and 4 deletions

View File

@@ -8,7 +8,7 @@ from app.dao import days_ago
from app.models import (
Job, JobStatistics, Notification, NotificationHistory, Template,
JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING,
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE
LETTER_TYPE
)
from app.statsd_decorators import statsd
@@ -142,3 +142,56 @@ def dao_get_all_letter_jobs():
return db.session.query(Job).join(Job.template).filter(
Template.template_type == LETTER_TYPE
).order_by(desc(Job.created_at)).all()
@statsd(namespace="dao")
def dao_get_job_statistics_for_job(service_id, job_id):
query = Job.query.join(
JobStatistics, Job.id == JobStatistics.job_id
).filter(
Job.id == job_id,
Job.service_id == service_id
).add_columns(
JobStatistics.job_id,
Job.original_file_name,
Job.created_at,
Job.scheduled_for,
Job.template_id,
Job.template_version,
Job.job_status,
Job.service_id,
Job.notification_count,
JobStatistics.sent,
JobStatistics.delivered,
JobStatistics.failed
)
return query.one()
@statsd(namespace="dao")
def dao_get_job_stats_for_service(service_id, page=1, page_size=50, limit_days=None, statuses=None):
query = Job.query.join(
JobStatistics, Job.id == JobStatistics.job_id
).filter(
Job.service_id == service_id
).add_columns(
JobStatistics.job_id,
Job.original_file_name,
Job.created_at,
Job.scheduled_for,
Job.template_id,
Job.template_version,
Job.job_status,
Job.service_id,
Job.notification_count,
JobStatistics.sent,
JobStatistics.delivered,
JobStatistics.failed
)
if limit_days:
query = query.filter(Job.created_at >= days_ago(limit_days))
if statuses is not None and statuses != ['']:
query = query.filter(Job.job_status.in_(statuses))
query = query.order_by(Job.created_at.desc())
return query.paginate(page=page, per_page=page_size)

View File

@@ -5,14 +5,16 @@ from flask import (
current_app
)
from app import DATETIME_FORMAT
from app.dao.jobs_dao import (
dao_create_job,
dao_update_job,
dao_get_job_by_service_id_and_job_id,
dao_get_jobs_by_service_id,
dao_get_future_scheduled_job_by_id_and_service_id,
dao_get_notification_outcomes_for_job
)
dao_get_notification_outcomes_for_job,
dao_get_job_stats_for_service,
dao_get_job_statistics_for_job)
from app.dao.services_dao import (
dao_fetch_service_by_id
@@ -57,6 +59,13 @@ def get_job_by_service_and_job_id(service_id, job_id):
return jsonify(data=data)
@job_blueprint.route('/job-stats/<job_id>', methods=['GET'])
def get_job_stats_by_service_and_job_id(service_id, job_id):
statistic = dao_get_job_statistics_for_job(service_id=service_id, job_id=job_id)
return jsonify(_serialize_job_stats(statistic))
@job_blueprint.route('/<job_id>/cancel', methods=['POST'])
def cancel_job(service_id, job_id):
job = dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id)
@@ -117,6 +126,57 @@ def get_jobs_by_service(service_id):
return jsonify(**get_paginated_jobs(service_id, limit_days, statuses, page))
@job_blueprint.route('/job-stats', methods=['GET'])
def get_jobs_for_service(service_id):
if request.args.get('limit_days'):
try:
limit_days = int(request.args['limit_days'])
except ValueError:
errors = {'limit_days': ['{} is not an integer'.format(request.args['limit_days'])]}
raise InvalidRequest(errors, status_code=400)
else:
limit_days = None
statuses = _parse_statuses(request.args.get('statuses', ''))
page = int(request.args.get('page', 1))
pagination = dao_get_job_stats_for_service(service_id=service_id,
page=page,
page_size=current_app.config['PAGE_SIZE'],
limit_days=limit_days,
statuses=statuses)
return jsonify({
'data': [_serialize_job_stats(x) for x in pagination.items],
'page_size': pagination.per_page,
'total': pagination.total,
'links': pagination_links(
pagination,
'.get_jobs_by_service',
service_id=service_id
)
})
def _parse_statuses(statuses):
return [x.strip() for x in statuses.split(',')]
def _serialize_job_stats(stat):
return {
"job_id": stat.job_id,
"original_file_name": stat.original_file_name,
"created_at": stat.created_at.strftime(DATETIME_FORMAT),
"scheduled_for": stat.scheduled_for,
"template_id": stat.template_id,
"template_version": stat.template_version,
"job_status": stat.job_status,
"service_id": stat.service_id,
"requested": stat.notification_count,
"sent": stat.sent,
"delivered": stat.delivered,
"failed": stat.failed
}
@job_blueprint.route('', methods=['POST'])
def create_job(service_id):
service = dao_fetch_service_by_id(service_id)

View File

@@ -16,7 +16,10 @@ from app.dao.jobs_dao import (
all_notifications_are_created_for_job,
dao_update_job_status,
dao_get_all_notifications_for_job,
dao_get_jobs_older_than_limited_by)
dao_get_jobs_older_than_limited_by,
dao_get_job_statistics_for_job,
dao_get_job_stats_for_service)
from app.dao.statistics_dao import create_or_update_job_sending_statistics, update_job_stats_outcome_count
from app.models import (
Job, JobStatistics,
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE
@@ -411,3 +414,128 @@ def test_should_get_jobs_seven_days_old_filters_type(notify_db, notify_db_sessio
assert len(jobs) == 2
assert job_to_remain.id not in [job.id for job in jobs]
def test_dao_get_job_statistics_for_job(notify_db, notify_db_session, sample_job):
notification = create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
notification_delivered = create_notification(notify_db=notify_db, notify_db_session=notify_db_session,
job=sample_job, status='delivered')
notification_failed = create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job,
status='permanent-failure')
create_or_update_job_sending_statistics(notification)
create_or_update_job_sending_statistics(notification_delivered)
create_or_update_job_sending_statistics(notification_failed)
update_job_stats_outcome_count(notification_delivered)
update_job_stats_outcome_count(notification_failed)
result = dao_get_job_statistics_for_job(sample_job.service_id, sample_job.id)
assert_job_stat(job=sample_job, result=result, sent=3, delivered=1, failed=1)
def test_dao_get_job_statistics_for_job(notify_db, notify_db_session, sample_service):
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
result = dao_get_job_statistics_for_job(sample_service.id, job_1.id)
assert_job_stat(job=job_1, result=result, sent=2, delivered=1, failed=0)
result_2 = dao_get_job_statistics_for_job(sample_service.id, job_2.id)
assert_job_stat(job=job_2, result=result_2, sent=1, delivered=0, failed=1)
def test_dao_get_job_stats_for_service(notify_db, notify_db_session, sample_service):
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
results = dao_get_job_stats_for_service(sample_service.id).items
assert len(results) == 2
assert_job_stat(job_2, results[0], 1, 0, 1)
assert_job_stat(job_1, results[1], 2, 1, 0)
def test_dao_get_job_stats_for_service_only_returns_stats_for_service(notify_db, notify_db_session, sample_service):
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
another_service = create_service(notify_db=notify_db, notify_db_session=notify_db_session,
service_name='Another Service')
job_3, job_4 = stats_set_up(notify_db, notify_db_session, service=another_service)
results = dao_get_job_stats_for_service(sample_service.id).items
assert len(results) == 2
assert_job_stat(job_2, results[0], 1, 0, 1)
assert_job_stat(job_1, results[1], 2, 1, 0)
results = dao_get_job_stats_for_service(another_service.id).items
assert len(results) == 2
assert_job_stat(job_4, results[0], 1, 0, 1)
assert_job_stat(job_3, results[1], 2, 1, 0)
def test_dao_get_job_stats_for_service_only_returns_jobs_created_within_limited_days(
notify_db, notify_db_session, sample_service):
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
results = dao_get_job_stats_for_service(sample_service.id, limit_days=1)
assert results.total == 1
assert_job_stat(job_2, results.items[0], 1, 0, 1)
def test_dao_get_job_stats_for_service_only_returns_jobs_created_within_limited_days_inclusive(
notify_db, notify_db_session, sample_service):
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
results = dao_get_job_stats_for_service(sample_service.id, limit_days=2).items
assert len(results) == 2
assert_job_stat(job_2, results[0], 1, 0, 1)
assert_job_stat(job_1, results[1], 2, 1, 0)
def test_dao_get_job_stats_paginates_results(
notify_db, notify_db_session, sample_service):
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
results = dao_get_job_stats_for_service(sample_service.id, page=1, page_size=1).items
assert len(results) == 1
assert_job_stat(job_2, results[0], 1, 0, 1)
results_2 = dao_get_job_stats_for_service(sample_service.id, page=2, page_size=1).items
assert len(results_2) == 1
assert_job_stat(job_1, results_2[0], 2, 1, 0)
def test_dao_get_job_returns_jobs_for_status(
notify_db, notify_db_session, sample_service):
stats_set_up(notify_db, notify_db_session, sample_service)
results = dao_get_job_stats_for_service(sample_service.id, statuses=['pending'])
assert results.total == 1
results_2 = dao_get_job_stats_for_service(sample_service.id, statuses=['pending', 'finished'])
assert results_2.total == 2
def assert_job_stat(job, result, sent, delivered, failed):
assert result.job_id == job.id
assert result.original_file_name == job.original_file_name
assert result.created_at == job.created_at
assert result.scheduled_for == job.scheduled_for
assert result.template_id == job.template_id
assert result.template_version == job.template_version
assert result.job_status == job.job_status
assert result.service_id == job.service_id
assert result.notification_count == job.notification_count
assert result.sent == sent
assert result.delivered == delivered
assert result.failed == failed
def stats_set_up(notify_db, notify_db_session, service):
job_1 = create_job(notify_db=notify_db, notify_db_session=notify_db_session,
service=service, created_at=datetime.utcnow() - timedelta(days=2))
job_2 = create_job(notify_db=notify_db, notify_db_session=notify_db_session,
service=service, original_file_name='Another job', job_status='finished')
notification = create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=job_1)
notification_delivered = create_notification(notify_db=notify_db, notify_db_session=notify_db_session,
job=job_1, status='delivered')
notification_failed = create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=job_2,
status='permanent-failure')
create_or_update_job_sending_statistics(notification)
create_or_update_job_sending_statistics(notification_delivered)
create_or_update_job_sending_statistics(notification_failed)
update_job_stats_outcome_count(notification_delivered)
update_job_stats_outcome_count(notification_failed)
return job_1, job_2

View File

@@ -6,6 +6,7 @@ from freezegun import freeze_time
import pytest
import pytz
import app.celery.tasks
from app import DATETIME_FORMAT
from tests import create_authorization_header
from tests.conftest import set_config
@@ -762,3 +763,89 @@ def test_get_all_notifications_for_job_returns_csv_format(
notification = resp['notifications'][0]
assert set(notification.keys()) == \
set(['created_at', 'template_type', 'template_name', 'job_name', 'status', 'row_number', 'recipient'])
# New endpoint to get job statistics the old tests will be refactored away.
def test_get_jobs_for_service_new_endpoint(client, notify_db, notify_db_session, sample_template):
_setup_jobs(notify_db, notify_db_session, sample_template)
service_id = sample_template.service.id
path = '/service/{}/job/job-stats'.format(service_id)
auth_header = create_authorization_header()
response = client.get(path, headers=[auth_header])
assert response.status_code == 200
resp_json = json.loads(response.get_data(as_text=True))
assert len(resp_json['data']) == 5
assert resp_json['data'][0]["job_id"]
assert resp_json['data'][0]["created_at"]
assert not resp_json['data'][0]["scheduled_for"]
assert resp_json['data'][0]["template_id"]
assert resp_json['data'][0]["template_version"]
assert resp_json['data'][0]["service_id"]
assert resp_json['data'][0]["requested"]
assert resp_json['data'][0]["sent"] == 0
assert resp_json['data'][0]["delivered"] == 0
assert resp_json['data'][0]["failed"] == 0
def test_get_jobs_raises_for_bad_limit_days(client, sample_service):
path = '/service/{}/job/job-stats'.format(sample_service.id)
auth_header = create_authorization_header()
response = client.get(path,
query_string={'limit_days': 'bad_number'},
headers=[auth_header])
assert response.status_code == 400
resp_json = json.loads(response.get_data(as_text=True))
assert resp_json["result"] == "error"
assert resp_json["message"] == {'limit_days': ['bad_number is not an integer']}
def test_parse_status_turns_comma_sep_strings_into_list():
statuses = "started, finished, pending"
from app.job.rest import _parse_statuses
assert _parse_statuses(statuses) == ["started", "finished", "pending"]
def test_parse_status_turns_empty_string_into_empty_list():
statuses = ""
from app.job.rest import _parse_statuses
assert _parse_statuses(statuses) == ['']
def test_get_job_stats_by_service_id_and_job_id(client, sample_job):
auth_header = create_authorization_header()
response = client.get("/service/{}/job/job-stats/{}".format(sample_job.service_id, sample_job.id),
headers=[auth_header])
assert response.status_code == 200
resp_json = json.loads(response.get_data(as_text=True))
assert resp_json["job_id"] == str(sample_job.id)
assert resp_json["created_at"] == sample_job.created_at.strftime(DATETIME_FORMAT)
assert not resp_json["scheduled_for"]
assert resp_json["template_id"] == str(sample_job.template_id)
assert resp_json["template_version"] == sample_job.template_version
assert resp_json["service_id"] == str(sample_job.service_id)
assert resp_json["requested"] == sample_job.notification_count
assert resp_json["sent"] == 0
assert resp_json["delivered"] == 0
assert resp_json["failed"] == 0
def test_get_job_stats_with_invalid_job_id_returns404(client, sample_template):
path = '/service/{}/job/job-stats/{}'.format(sample_template.service.id, uuid.uuid4())
auth_header = create_authorization_header()
response = client.get(path, headers=[auth_header])
assert response.status_code == 404
resp_json = json.loads(response.get_data(as_text=True))
assert resp_json['result'] == 'error'
assert resp_json['message'] == 'No result found'
def test_get_job_stats_with_invalid_service_id_returns404(client, sample_job):
path = '/service/{}/job/job-stats/{}'.format(uuid.uuid4(), sample_job.id)
auth_header = create_authorization_header()
response = client.get(path, headers=[auth_header])
assert response.status_code == 404
resp_json = json.loads(response.get_data(as_text=True))
assert resp_json['result'] == 'error'
assert resp_json['message'] == 'No result found'