mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-20 15:31:15 -05:00
Added new endpoint for job statistics.
The structure has been flattened, so I need to create a new endpoint, start using that endpoint, then change the name back. Added template_id and version to the get job stats by id.
This commit is contained in:
@@ -145,20 +145,31 @@ def dao_get_all_letter_jobs():
|
||||
|
||||
|
||||
@statsd(namespace="dao")
|
||||
def dao_get_job_statistics_for_job(job_id):
|
||||
query = db.session.query(
|
||||
def dao_get_job_statistics_for_job(service_id, job_id):
|
||||
query = Job.query.join(
|
||||
JobStatistics, Job.id == JobStatistics.job_id
|
||||
).filter(
|
||||
Job.id == job_id
|
||||
).join(
|
||||
Template, Template.id == Job.template_id and Template.version == Job.template_version
|
||||
).add_columns(
|
||||
JobStatistics.job_id,
|
||||
Job.original_file_name,
|
||||
Job.created_at,
|
||||
Job.scheduled_for,
|
||||
Job.template_id,
|
||||
Job.template_version,
|
||||
JobStatistics.sent,
|
||||
JobStatistics.delivered,
|
||||
JobStatistics.failed
|
||||
).filter(JobStatistics.job_id == job_id).filter(Job.id == JobStatistics.job_id)
|
||||
).filter(
|
||||
Job.service_id == service_id
|
||||
)
|
||||
return query.one()
|
||||
|
||||
|
||||
@statsd(namespace="dao")
|
||||
def dao_get_job_stats_for_service(service_id, page=1, page_size=50, limit_days=None):
|
||||
def dao_get_job_stats_for_service(service_id, page=1, page_size=50, limit_days=None, statuses=None):
|
||||
query = Job.query.join(
|
||||
JobStatistics, Job.id == JobStatistics.job_id
|
||||
).filter(
|
||||
@@ -167,11 +178,15 @@ def dao_get_job_stats_for_service(service_id, page=1, page_size=50, limit_days=N
|
||||
JobStatistics.job_id,
|
||||
Job.original_file_name,
|
||||
Job.created_at,
|
||||
Job.scheduled_for,
|
||||
JobStatistics.sent,
|
||||
JobStatistics.delivered,
|
||||
JobStatistics.failed
|
||||
)
|
||||
if limit_days:
|
||||
query = query.filter(Job.created_at >= days_ago(limit_days))
|
||||
if statuses is not None and statuses != ['']:
|
||||
query = query.filter(Job.job_status.in_(statuses))
|
||||
|
||||
query = query.order_by(Job.created_at.desc())
|
||||
return query.paginate(page=page, per_page=page_size).items
|
||||
return query.paginate(page=page, per_page=page_size)
|
||||
|
||||
@@ -11,8 +11,8 @@ from app.dao.jobs_dao import (
|
||||
dao_get_job_by_service_id_and_job_id,
|
||||
dao_get_jobs_by_service_id,
|
||||
dao_get_future_scheduled_job_by_id_and_service_id,
|
||||
dao_get_notification_outcomes_for_job
|
||||
)
|
||||
dao_get_notification_outcomes_for_job,
|
||||
dao_get_job_stats_for_service)
|
||||
|
||||
from app.dao.services_dao import (
|
||||
dao_fetch_service_by_id
|
||||
@@ -117,6 +117,52 @@ def get_jobs_by_service(service_id):
|
||||
return jsonify(**get_paginated_jobs(service_id, limit_days, statuses, page))
|
||||
|
||||
|
||||
@job_blueprint.route('/job-stats', methods=['GET'])
|
||||
def get_jobs_for_service(service_id):
|
||||
if request.args.get('limit_days'):
|
||||
try:
|
||||
limit_days = int(request.args['limit_days'])
|
||||
except ValueError:
|
||||
errors = {'limit_days': ['{} is not an integer'.format(request.args['limit_days'])]}
|
||||
raise InvalidRequest(errors, status_code=400)
|
||||
else:
|
||||
limit_days = None
|
||||
statuses = _parse_statuses(request.args.get('statuses', ''))
|
||||
page = int(request.args.get('page', 1))
|
||||
|
||||
pagination = dao_get_job_stats_for_service(service_id=service_id,
|
||||
page=page,
|
||||
page_size=current_app.config['PAGE_SIZE'],
|
||||
limit_days=limit_days,
|
||||
statuses=statuses)
|
||||
return jsonify({
|
||||
'data': [_serialize_job_stats(x) for x in pagination.items],
|
||||
'page_size': pagination.per_page,
|
||||
'total': pagination.total,
|
||||
'links': pagination_links(
|
||||
pagination,
|
||||
'.get_jobs_by_service',
|
||||
service_id=service_id
|
||||
)
|
||||
})
|
||||
|
||||
|
||||
def _parse_statuses(statuses):
|
||||
return [x.strip() for x in statuses.split(',')]
|
||||
|
||||
|
||||
def _serialize_job_stats(stat):
|
||||
return {
|
||||
"job_id": stat.job_id,
|
||||
"original_file_name": stat.original_file_name,
|
||||
"created_at": stat.created_at,
|
||||
"scheduled_for": stat.scheduled_for,
|
||||
"sent": stat.sent,
|
||||
"delivered": stat.delivered,
|
||||
"failed": stat.failed
|
||||
}
|
||||
|
||||
|
||||
@job_blueprint.route('', methods=['POST'])
|
||||
def create_job(service_id):
|
||||
service = dao_fetch_service_by_id(service_id)
|
||||
|
||||
@@ -428,26 +428,23 @@ def test_dao_get_job_statistics_for_job(notify_db, notify_db_session, sample_job
|
||||
create_or_update_job_sending_statistics(notification_failed)
|
||||
update_job_stats_outcome_count(notification_delivered)
|
||||
update_job_stats_outcome_count(notification_failed)
|
||||
results = dao_get_job_statistics_for_job(sample_job.id)
|
||||
assert results.job_id == sample_job.id
|
||||
assert results.sent == 3
|
||||
assert results.delivered == 1
|
||||
assert results.failed == 1
|
||||
result = dao_get_job_statistics_for_job(sample_job.service_id, sample_job.id)
|
||||
assert_job_stat(job=sample_job, result=result, sent=3, delivered=1, failed=1, with_template=True)
|
||||
|
||||
|
||||
def test_dao_get_job_statistics_for_job(notify_db, notify_db_session, sample_service):
|
||||
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
|
||||
results = dao_get_job_statistics_for_job(job_1.id)
|
||||
assert_job_stat(job_1, results, 2, 1, 0)
|
||||
result = dao_get_job_statistics_for_job(sample_service.id, job_1.id)
|
||||
assert_job_stat(job=job_1, result=result, sent=2, delivered=1, failed=0, with_template=True)
|
||||
|
||||
results_2 = dao_get_job_statistics_for_job(job_2.id)
|
||||
assert_job_stat(job_2, results_2, 1, 0, 1)
|
||||
result_2 = dao_get_job_statistics_for_job(sample_service.id, job_2.id)
|
||||
assert_job_stat(job=job_2, result=result_2, sent=1, delivered=0, failed=1, with_template=True)
|
||||
|
||||
|
||||
def test_dao_get_job_stats_for_service(notify_db, notify_db_session, sample_service):
|
||||
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
|
||||
|
||||
results = dao_get_job_stats_for_service(sample_service.id)
|
||||
results = dao_get_job_stats_for_service(sample_service.id).items
|
||||
assert len(results) == 2
|
||||
assert_job_stat(job_2, results[0], 1, 0, 1)
|
||||
assert_job_stat(job_1, results[1], 2, 1, 0)
|
||||
@@ -459,12 +456,12 @@ def test_dao_get_job_stats_for_service_only_returns_stats_for_service(notify_db,
|
||||
service_name='Another Service')
|
||||
job_3, job_4 = stats_set_up(notify_db, notify_db_session, service=another_service)
|
||||
|
||||
results = dao_get_job_stats_for_service(sample_service.id)
|
||||
results = dao_get_job_stats_for_service(sample_service.id).items
|
||||
assert len(results) == 2
|
||||
assert_job_stat(job_2, results[0], 1, 0, 1)
|
||||
assert_job_stat(job_1, results[1], 2, 1, 0)
|
||||
|
||||
results = dao_get_job_stats_for_service(another_service.id)
|
||||
results = dao_get_job_stats_for_service(another_service.id).items
|
||||
assert len(results) == 2
|
||||
assert_job_stat(job_4, results[0], 1, 0, 1)
|
||||
assert_job_stat(job_3, results[1], 2, 1, 0)
|
||||
@@ -475,15 +472,15 @@ def test_dao_get_job_stats_for_service_only_returns_jobs_created_within_limited_
|
||||
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
|
||||
|
||||
results = dao_get_job_stats_for_service(sample_service.id, limit_days=1)
|
||||
assert len(results) == 1
|
||||
assert_job_stat(job_2, results[0], 1, 0, 1)
|
||||
assert results.total == 1
|
||||
assert_job_stat(job_2, results.items[0], 1, 0, 1)
|
||||
|
||||
|
||||
def test_dao_get_job_stats_for_service_only_returns_jobs_created_within_limited_days_inclusive(
|
||||
notify_db, notify_db_session, sample_service):
|
||||
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
|
||||
|
||||
results = dao_get_job_stats_for_service(sample_service.id, limit_days=2)
|
||||
results = dao_get_job_stats_for_service(sample_service.id, limit_days=2).items
|
||||
assert len(results) == 2
|
||||
assert_job_stat(job_2, results[0], 1, 0, 1)
|
||||
assert_job_stat(job_1, results[1], 2, 1, 0)
|
||||
@@ -493,28 +490,42 @@ def test_dao_get_job_stats_paginates_results(
|
||||
notify_db, notify_db_session, sample_service):
|
||||
job_1, job_2 = stats_set_up(notify_db, notify_db_session, sample_service)
|
||||
|
||||
results = dao_get_job_stats_for_service(sample_service.id, page=1, page_size=1)
|
||||
results = dao_get_job_stats_for_service(sample_service.id, page=1, page_size=1).items
|
||||
assert len(results) == 1
|
||||
assert_job_stat(job_2, results[0], 1, 0, 1)
|
||||
results_2 = dao_get_job_stats_for_service(sample_service.id, page=2, page_size=1)
|
||||
results_2 = dao_get_job_stats_for_service(sample_service.id, page=2, page_size=1).items
|
||||
assert len(results_2) == 1
|
||||
assert_job_stat(job_1, results_2[0], 2, 1, 0)
|
||||
|
||||
|
||||
def assert_job_stat(job, result, sent, delivered, failed):
|
||||
def test_dao_get_job_returns_jobs_for_status(
|
||||
notify_db, notify_db_session, sample_service):
|
||||
stats_set_up(notify_db, notify_db_session, sample_service)
|
||||
|
||||
results = dao_get_job_stats_for_service(sample_service.id, statuses=['pending'])
|
||||
assert results.total == 1
|
||||
results_2 = dao_get_job_stats_for_service(sample_service.id, statuses=['pending', 'finished'])
|
||||
assert results_2.total == 2
|
||||
|
||||
|
||||
def assert_job_stat(job, result, sent, delivered, failed, with_template=False):
|
||||
assert result.job_id == job.id
|
||||
assert result.original_file_name == job.original_file_name
|
||||
assert result.created_at == job.created_at
|
||||
assert result.scheduled_for == job.scheduled_for
|
||||
assert result.sent == sent
|
||||
assert result.delivered == delivered
|
||||
assert result.failed == failed
|
||||
if with_template:
|
||||
assert result.template_id == job.template_id
|
||||
assert result.template_version == job.template_version
|
||||
|
||||
|
||||
def stats_set_up(notify_db, notify_db_session, service):
|
||||
job_1 = create_job(notify_db=notify_db, notify_db_session=notify_db_session,
|
||||
service=service, created_at=datetime.utcnow() - timedelta(days=2))
|
||||
job_2 = create_job(notify_db=notify_db, notify_db_session=notify_db_session,
|
||||
service=service, original_file_name='Another job')
|
||||
service=service, original_file_name='Another job', job_status='finished')
|
||||
notification = create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=job_1)
|
||||
notification_delivered = create_notification(notify_db=notify_db, notify_db_session=notify_db_session,
|
||||
job=job_1, status='delivered')
|
||||
|
||||
@@ -762,3 +762,46 @@ def test_get_all_notifications_for_job_returns_csv_format(
|
||||
notification = resp['notifications'][0]
|
||||
assert set(notification.keys()) == \
|
||||
set(['created_at', 'template_type', 'template_name', 'job_name', 'status', 'row_number', 'recipient'])
|
||||
|
||||
|
||||
# New endpoint to get job statistics the old tests will be refactored away.
|
||||
def test_get_jobs_for_service_new_endpoint(client, notify_db, notify_db_session, sample_template):
|
||||
_setup_jobs(notify_db, notify_db_session, sample_template)
|
||||
|
||||
service_id = sample_template.service.id
|
||||
|
||||
path = '/service/{}/job/job-stats'.format(service_id)
|
||||
auth_header = create_authorization_header()
|
||||
response = client.get(path, headers=[auth_header])
|
||||
assert response.status_code == 200
|
||||
resp_json = json.loads(response.get_data(as_text=True))
|
||||
assert len(resp_json['data']) == 5
|
||||
assert resp_json['data'][0]["job_id"]
|
||||
assert resp_json['data'][0]["created_at"]
|
||||
assert not resp_json['data'][0]["scheduled_for"]
|
||||
assert resp_json['data'][0]["sent"] == 0
|
||||
assert resp_json['data'][0]["delivered"] == 0
|
||||
assert resp_json['data'][0]["failed"] == 0
|
||||
|
||||
|
||||
def test_get_jobs_raises_for_bad_limit_days(client, sample_service):
|
||||
path = '/service/{}/job/job-stats'.format(sample_service.id)
|
||||
auth_header = create_authorization_header()
|
||||
response = client.get(path,
|
||||
query_string={'limit_days': 'bad_number'},
|
||||
headers=[auth_header])
|
||||
assert response.status_code == 400
|
||||
assert response.get_data(as_text=True) == '{\n "message": {\n "limit_days": [\n ' \
|
||||
'"bad_number is not an integer"\n ]\n },\n "result": "error"\n}'
|
||||
|
||||
|
||||
def test_parse_status_turns_comma_sep_strings_into_list():
|
||||
statuses = "started, finished, pending"
|
||||
from app.job.rest import _parse_statuses
|
||||
assert _parse_statuses(statuses) == ["started", "finished", "pending"]
|
||||
|
||||
|
||||
def test_parse_status_turns_empty_string_into_empty_list():
|
||||
statuses = ""
|
||||
from app.job.rest import _parse_statuses
|
||||
assert _parse_statuses(statuses) == ['']
|
||||
|
||||
Reference in New Issue
Block a user