mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-03 01:41:05 -05:00
add GET /service/<id>/notifications/weekly
moved format_statistics to a new service/statistics.py file, and refactored to share code. moved tests as well, to try and enforce separation between the restful endpoints of rest.py and the logic/ data manipulation of statistics.py
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
from datetime import date
|
||||
from datetime import date, timedelta
|
||||
|
||||
from flask import (
|
||||
jsonify,
|
||||
@@ -8,7 +8,6 @@ from flask import (
|
||||
)
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from app.models import EMAIL_TYPE, SMS_TYPE
|
||||
from app.dao.api_key_dao import (
|
||||
save_model_api_key,
|
||||
get_model_api_keys,
|
||||
@@ -23,7 +22,8 @@ from app.dao.services_dao import (
|
||||
dao_add_user_to_service,
|
||||
dao_remove_user_from_service,
|
||||
dao_fetch_stats_for_service,
|
||||
dao_fetch_todays_stats_for_service
|
||||
dao_fetch_todays_stats_for_service,
|
||||
dao_fetch_weekly_historical_stats_for_service
|
||||
)
|
||||
from app.dao import notifications_dao
|
||||
from app.dao.provider_statistics_dao import get_fragment_count
|
||||
@@ -236,29 +236,19 @@ def get_all_notifications_for_service(service_id):
|
||||
), 200
|
||||
|
||||
|
||||
@service.route('/<uuid:service_id>/notifications/weekly', methods=['GET'])
|
||||
def get_weekly_notification_stats(service_id):
|
||||
service = dao_fetch_service_by_id(service_id)
|
||||
statistics = dao_fetch_weekly_historical_stats_for_service(service_id, created_at, preceeding_monday)
|
||||
return jsonify(data=statistics.format_weekly_notification_stats(statistics))
|
||||
|
||||
|
||||
def get_detailed_service(service_id, today_only=False):
|
||||
service = dao_fetch_service_by_id(service_id)
|
||||
stats_fn = dao_fetch_todays_stats_for_service if today_only else dao_fetch_stats_for_service
|
||||
statistics = stats_fn(service_id)
|
||||
service.statistics = format_statistics(statistics)
|
||||
stats = stats_fn(service_id)
|
||||
|
||||
service.statistics = statistics.format_statistics(stats)
|
||||
|
||||
data = detailed_service_schema.dump(service).data
|
||||
return jsonify(data=data)
|
||||
|
||||
|
||||
def format_statistics(statistics):
|
||||
# statistics come in a named tuple with uniqueness from 'notification_type', 'status' - however missing
|
||||
# statuses/notification types won't be represented and the status types need to be simplified/summed up
|
||||
# so we can return emails/sms * created, sent, and failed
|
||||
counts = {
|
||||
template_type: {
|
||||
status: 0 for status in ('requested', 'delivered', 'failed')
|
||||
} for template_type in (EMAIL_TYPE, SMS_TYPE)
|
||||
}
|
||||
for row in statistics:
|
||||
counts[row.notification_type]['requested'] += row.count
|
||||
if row.status == 'delivered':
|
||||
counts[row.notification_type]['delivered'] += row.count
|
||||
elif row.status in ('failed', 'technical-failure', 'temporary-failure', 'permanent-failure'):
|
||||
counts[row.notification_type]['failed'] += row.count
|
||||
|
||||
return counts
|
||||
|
||||
51
app/service/statistics.py
Normal file
51
app/service/statistics.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import itertools
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.models import EMAIL_TYPE, SMS_TYPE
|
||||
|
||||
|
||||
def format_statistics(statistics):
|
||||
# statistics come in a named tuple with uniqueness from 'notification_type', 'status' - however missing
|
||||
# statuses/notification types won't be represented and the status types need to be simplified/summed up
|
||||
# so we can return emails/sms * created, sent, and failed
|
||||
counts = _create_zeroed_stats_dicts()
|
||||
for row in statistics:
|
||||
_update_statuses_from_row(counts[row.notification_type], row)
|
||||
|
||||
return counts
|
||||
|
||||
|
||||
def format_weekly_notification_stats(statistics, service_created_at):
|
||||
preceeding_monday = service_created_at - timedelta(days=service_created_at.weekday())
|
||||
week_dict = {
|
||||
week: _create_zeroed_stats_dicts()
|
||||
for week in _weeks_for_range(preceeding_monday, datetime.utcnow())
|
||||
}
|
||||
for row in statistics:
|
||||
_update_statuses_from_row(week_dict[row.week_start][row.notification_type], row)
|
||||
|
||||
return week_dict
|
||||
|
||||
|
||||
def _create_zeroed_stats_dicts():
|
||||
return {
|
||||
template_type: {
|
||||
status: 0 for status in ('requested', 'delivered', 'failed')
|
||||
} for template_type in (EMAIL_TYPE, SMS_TYPE)
|
||||
}
|
||||
|
||||
|
||||
def _update_statuses_from_row(update_dict, row):
|
||||
update_dict['requested'] += row.count
|
||||
if row.status == 'delivered':
|
||||
update_dict['delivered'] += row.count
|
||||
elif row.status in ('failed', 'technical-failure', 'temporary-failure', 'permanent-failure'):
|
||||
update_dict['failed'] += row.count
|
||||
|
||||
|
||||
def _weeks_for_range(start, end):
|
||||
"""
|
||||
Generator that yields dates from `start` to `end`, in 7 day intervals. End is inclusive.
|
||||
"""
|
||||
infinite_date_generator = (start + timedelta(days=i) for i in itertools.count(step=7))
|
||||
return itertools.takewhile(lambda x: x <= end, infinite_date_generator)
|
||||
@@ -1,5 +1,4 @@
|
||||
import json
|
||||
import collections
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
@@ -18,9 +17,6 @@ from tests.app.conftest import (
|
||||
)
|
||||
|
||||
|
||||
Row = collections.namedtuple('row', ('notification_type', 'status', 'count'))
|
||||
|
||||
|
||||
def test_get_service_list(notify_api, service_factory):
|
||||
with notify_api.test_request_context():
|
||||
with notify_api.test_client() as client:
|
||||
@@ -1125,39 +1121,3 @@ def test_get_detailed_service(notify_db, notify_db_session, notify_api, sample_s
|
||||
assert 'statistics' in service.keys()
|
||||
assert set(service['statistics'].keys()) == set(['sms', 'email'])
|
||||
assert service['statistics']['sms'] == stats
|
||||
|
||||
|
||||
# email_counts and sms_counts are 3-tuple of requested, delivered, failed
|
||||
@pytest.mark.idparametrize('stats, email_counts, sms_counts', {
|
||||
'empty': ([], [0, 0, 0], [0, 0, 0]),
|
||||
'always_increment_requested': ([
|
||||
Row('email', 'delivered', 1),
|
||||
Row('email', 'failed', 1)
|
||||
], [2, 1, 1], [0, 0, 0]),
|
||||
'dont_mix_email_and_sms': ([
|
||||
Row('email', 'delivered', 1),
|
||||
Row('sms', 'delivered', 1)
|
||||
], [1, 1, 0], [1, 1, 0]),
|
||||
'convert_fail_statuses_to_failed': ([
|
||||
Row('email', 'failed', 1),
|
||||
Row('email', 'technical-failure', 1),
|
||||
Row('email', 'temporary-failure', 1),
|
||||
Row('email', 'permanent-failure', 1),
|
||||
], [4, 0, 4], [0, 0, 0]),
|
||||
})
|
||||
def test_format_statistics(stats, email_counts, sms_counts):
|
||||
from app.service.rest import format_statistics
|
||||
|
||||
ret = format_statistics(stats)
|
||||
|
||||
assert ret['email'] == {
|
||||
status: count
|
||||
for status, count
|
||||
in zip(['requested', 'delivered', 'failed'], email_counts)
|
||||
}
|
||||
|
||||
assert ret['sms'] == {
|
||||
status: count
|
||||
for status, count
|
||||
in zip(['requested', 'delivered', 'failed'], sms_counts)
|
||||
}
|
||||
|
||||
131
tests/app/service/test_statistics.py
Normal file
131
tests/app/service/test_statistics.py
Normal file
@@ -0,0 +1,131 @@
|
||||
from datetime import datetime
|
||||
import collections
|
||||
|
||||
import pytest
|
||||
from freezegun import freeze_time
|
||||
|
||||
from app.service.statistics import (
|
||||
format_statistics,
|
||||
_weeks_for_range,
|
||||
_create_zeroed_stats_dicts,
|
||||
format_weekly_notification_stats
|
||||
)
|
||||
|
||||
StatsRow = collections.namedtuple('row', ('notification_type', 'status', 'count'))
|
||||
WeeklyStatsRow = collections.namedtuple('row', ('notification_type', 'status', 'week_start', 'count'))
|
||||
|
||||
|
||||
# email_counts and sms_counts are 3-tuple of requested, delivered, failed
|
||||
@pytest.mark.idparametrize('stats, email_counts, sms_counts', {
|
||||
'empty': ([], [0, 0, 0], [0, 0, 0]),
|
||||
'always_increment_requested': ([
|
||||
StatsRow('email', 'delivered', 1),
|
||||
StatsRow('email', 'failed', 1)
|
||||
], [2, 1, 1], [0, 0, 0]),
|
||||
'dont_mix_email_and_sms': ([
|
||||
StatsRow('email', 'delivered', 1),
|
||||
StatsRow('sms', 'delivered', 1)
|
||||
], [1, 1, 0], [1, 1, 0]),
|
||||
'convert_fail_statuses_to_failed': ([
|
||||
StatsRow('email', 'failed', 1),
|
||||
StatsRow('email', 'technical-failure', 1),
|
||||
StatsRow('email', 'temporary-failure', 1),
|
||||
StatsRow('email', 'permanent-failure', 1),
|
||||
], [4, 0, 4], [0, 0, 0]),
|
||||
})
|
||||
def test_format_statistics(stats, email_counts, sms_counts):
|
||||
|
||||
ret = format_statistics(stats)
|
||||
|
||||
assert ret['email'] == {
|
||||
status: count
|
||||
for status, count
|
||||
in zip(['requested', 'delivered', 'failed'], email_counts)
|
||||
}
|
||||
|
||||
assert ret['sms'] == {
|
||||
status: count
|
||||
for status, count
|
||||
in zip(['requested', 'delivered', 'failed'], sms_counts)
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start,end,dates', [
|
||||
(datetime(2016, 7, 25), datetime(2016, 7, 25), [datetime(2016, 7, 25)]),
|
||||
(datetime(2016, 7, 25), datetime(2016, 7, 28), [datetime(2016, 7, 25)]),
|
||||
(datetime(2016, 7, 25), datetime(2016, 8, 1), [datetime(2016, 7, 25), datetime(2016, 8, 1)]),
|
||||
(datetime(2016, 7, 25), datetime(2016, 8, 10), [
|
||||
datetime(2016, 7, 25), datetime(2016, 8, 1), datetime(2016, 8, 8)
|
||||
])
|
||||
])
|
||||
def test_weeks_for_range(start, end, dates):
|
||||
assert list(_weeks_for_range(start, end)) == dates
|
||||
|
||||
|
||||
def test_create_zeroed_stats_dicts():
|
||||
assert _create_zeroed_stats_dicts() == {
|
||||
'sms': {'requested': 0, 'delivered': 0, 'failed': 0},
|
||||
'email': {'requested': 0, 'delivered': 0, 'failed': 0},
|
||||
}
|
||||
|
||||
|
||||
@freeze_time('2016-07-28T12:00:00')
|
||||
@pytest.mark.parametrize('created_at, statistics, expected_results', [
|
||||
# with no stats and just today, return this week's stats
|
||||
(datetime(2016, 7, 28), [], {
|
||||
datetime(2016, 7, 25): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0)
|
||||
}
|
||||
}),
|
||||
# with no stats but a service
|
||||
(datetime(2016, 7, 14), [], {
|
||||
datetime(2016, 7, 11): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0)
|
||||
},
|
||||
datetime(2016, 7, 18): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0)
|
||||
},
|
||||
datetime(2016, 7, 25): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0)
|
||||
}
|
||||
}),
|
||||
# two stats for same week dont re-zero each other
|
||||
(datetime(2016, 7, 21), [
|
||||
WeeklyStatsRow('email', 'created', datetime(2016, 7, 18), 1),
|
||||
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 18), 1),
|
||||
], {
|
||||
datetime(2016, 7, 18): {
|
||||
'sms': _stats(1, 0, 0),
|
||||
'email': _stats(1, 0, 0)
|
||||
},
|
||||
datetime(2016, 7, 25): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0)
|
||||
}
|
||||
}),
|
||||
# two stats for same type are added together
|
||||
(datetime(2016, 7, 21), [
|
||||
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 18), 1),
|
||||
WeeklyStatsRow('sms', 'delivered', datetime(2016, 7, 18), 1),
|
||||
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 18), 1),
|
||||
], {
|
||||
datetime(2016, 7, 18): {
|
||||
'sms': _stats(2, 1, 0),
|
||||
'email': _stats(0, 0, 0)
|
||||
},
|
||||
datetime(2016, 7, 25): {
|
||||
'sms': _stats(1, 0, 0),
|
||||
'email': _stats(0, 0, 0)
|
||||
}
|
||||
})
|
||||
])
|
||||
def test_format_weekly_notification_stats(statistics, created_at, expected_results):
|
||||
assert format_weekly_notification_stats(statistics, created_at) == expected_results
|
||||
|
||||
|
||||
def _stats(requested, delivered, failed):
|
||||
return {'requested': requested, 'delivered': delivered, 'failed': failed}
|
||||
Reference in New Issue
Block a user