mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-02 09:26:08 -05:00
Merge pull request #821 from alphagov/remove-weekly-stats-endpoints
Remove weekly stats endpoints
This commit is contained in:
@@ -224,25 +224,6 @@ def _stats_for_service_query(service_id):
|
||||
)
|
||||
|
||||
|
||||
@statsd(namespace="dao")
|
||||
def dao_fetch_weekly_historical_stats_for_service(service_id):
|
||||
monday_of_notification_week = func.date_trunc('week', NotificationHistory.created_at).label('week_start')
|
||||
return db.session.query(
|
||||
NotificationHistory.notification_type,
|
||||
NotificationHistory.status,
|
||||
monday_of_notification_week,
|
||||
func.count(NotificationHistory.id).label('count')
|
||||
).filter(
|
||||
NotificationHistory.service_id == service_id
|
||||
).group_by(
|
||||
NotificationHistory.notification_type,
|
||||
NotificationHistory.status,
|
||||
monday_of_notification_week
|
||||
).order_by(
|
||||
asc(monday_of_notification_week), NotificationHistory.status
|
||||
).all()
|
||||
|
||||
|
||||
@statsd(namespace="dao")
|
||||
def dao_fetch_monthly_historical_stats_for_service(service_id, year):
|
||||
monday_of_notification_week = func.date_trunc('week', NotificationHistory.created_at).label('week_start')
|
||||
|
||||
@@ -25,7 +25,6 @@ from app.dao.services_dao import (
|
||||
dao_remove_user_from_service,
|
||||
dao_fetch_stats_for_service,
|
||||
dao_fetch_todays_stats_for_service,
|
||||
dao_fetch_weekly_historical_stats_for_service,
|
||||
dao_fetch_todays_stats_for_all_services,
|
||||
dao_archive_service,
|
||||
fetch_stats_by_date_range_for_all_services,
|
||||
@@ -270,14 +269,6 @@ def get_all_notifications_for_service(service_id):
|
||||
), 200
|
||||
|
||||
|
||||
@service_blueprint.route('/<uuid:service_id>/notifications/weekly', methods=['GET'])
|
||||
def get_weekly_notification_stats(service_id):
|
||||
service = dao_fetch_service_by_id(service_id)
|
||||
stats = dao_fetch_weekly_historical_stats_for_service(service_id)
|
||||
stats = statistics.format_weekly_notification_stats(stats, service.created_at)
|
||||
return jsonify(data={week.date().isoformat(): statistics for week, statistics in stats.items()})
|
||||
|
||||
|
||||
@service_blueprint.route('/<uuid:service_id>/notifications/monthly', methods=['GET'])
|
||||
def get_monthly_notification_stats(service_id):
|
||||
service = dao_fetch_service_by_id(service_id)
|
||||
|
||||
@@ -15,20 +15,6 @@ def format_statistics(statistics):
|
||||
return counts
|
||||
|
||||
|
||||
def format_weekly_notification_stats(statistics, service_created_at):
|
||||
preceeding_monday = (service_created_at - timedelta(days=service_created_at.weekday()))
|
||||
# turn a datetime into midnight that day http://stackoverflow.com/a/1937636
|
||||
preceeding_monday_midnight = datetime.combine(preceeding_monday.date(), datetime.min.time())
|
||||
week_dict = {
|
||||
week: create_zeroed_stats_dicts()
|
||||
for week in _weeks_for_range(preceeding_monday_midnight, datetime.utcnow())
|
||||
}
|
||||
for row in statistics:
|
||||
_update_statuses_from_row(week_dict[row.week_start][row.notification_type], row)
|
||||
|
||||
return week_dict
|
||||
|
||||
|
||||
def create_zeroed_stats_dicts():
|
||||
return {
|
||||
template_type: {
|
||||
@@ -43,11 +29,3 @@ def _update_statuses_from_row(update_dict, row):
|
||||
update_dict['delivered'] += row.count
|
||||
elif row.status in ('failed', 'technical-failure', 'temporary-failure', 'permanent-failure'):
|
||||
update_dict['failed'] += row.count
|
||||
|
||||
|
||||
def _weeks_for_range(start, end):
|
||||
"""
|
||||
Generator that yields dates from `start` to `end`, in 7 day intervals. End is inclusive.
|
||||
"""
|
||||
infinite_date_generator = (start + timedelta(days=i) for i in itertools.count(step=7))
|
||||
return itertools.takewhile(lambda x: x <= end, infinite_date_generator)
|
||||
|
||||
@@ -19,7 +19,6 @@ from app.dao.services_dao import (
|
||||
delete_service_and_all_associated_db_objects,
|
||||
dao_fetch_stats_for_service,
|
||||
dao_fetch_todays_stats_for_service,
|
||||
dao_fetch_weekly_historical_stats_for_service,
|
||||
dao_fetch_monthly_historical_stats_for_service,
|
||||
fetch_todays_total_message_count,
|
||||
dao_fetch_todays_stats_for_all_services,
|
||||
@@ -56,7 +55,7 @@ from tests.app.conftest import (
|
||||
|
||||
|
||||
def test_should_have_decorated_services_dao_functions():
|
||||
assert dao_fetch_weekly_historical_stats_for_service.__wrapped__.__name__ == 'dao_fetch_weekly_historical_stats_for_service' # noqa
|
||||
assert dao_fetch_monthly_historical_stats_for_service.__wrapped__.__name__ == 'dao_fetch_monthly_historical_stats_for_service' # noqa
|
||||
assert dao_fetch_todays_stats_for_service.__wrapped__.__name__ == 'dao_fetch_todays_stats_for_service' # noqa
|
||||
assert dao_fetch_stats_for_service.__wrapped__.__name__ == 'dao_fetch_stats_for_service' # noqa
|
||||
|
||||
@@ -480,35 +479,7 @@ def test_fetch_stats_for_today_only_includes_today(notify_db, notify_db_session,
|
||||
assert stats['created'] == 1
|
||||
|
||||
|
||||
def test_fetch_weekly_historical_stats_separates_weeks(notify_db, notify_db_session, sample_template):
|
||||
notification_history = functools.partial(
|
||||
create_notification_history,
|
||||
notify_db,
|
||||
notify_db_session,
|
||||
sample_template
|
||||
)
|
||||
week_53_last_yr = notification_history(created_at=datetime(2016, 1, 1))
|
||||
week_1_last_yr = notification_history(created_at=datetime(2016, 1, 5))
|
||||
last_sunday = notification_history(created_at=datetime(2016, 7, 24, 23, 59))
|
||||
last_monday_morning = notification_history(created_at=datetime(2016, 7, 25, 0, 0))
|
||||
last_monday_evening = notification_history(created_at=datetime(2016, 7, 25, 23, 59))
|
||||
|
||||
with freeze_time('Wed 27th July 2016'):
|
||||
today = notification_history(created_at=datetime.now(), status='delivered')
|
||||
ret = dao_fetch_weekly_historical_stats_for_service(sample_template.service_id)
|
||||
|
||||
assert [(row.week_start, row.status) for row in ret] == [
|
||||
(datetime(2015, 12, 28), 'created'),
|
||||
(datetime(2016, 1, 4), 'created'),
|
||||
(datetime(2016, 7, 18), 'created'),
|
||||
(datetime(2016, 7, 25), 'created'),
|
||||
(datetime(2016, 7, 25), 'delivered')
|
||||
]
|
||||
assert ret[-2].count == 2
|
||||
assert ret[-1].count == 1
|
||||
|
||||
|
||||
def test_fetch_monthly_historical_stats_separates_weeks(notify_db, notify_db_session, sample_template):
|
||||
def test_fetch_monthly_historical_stats_separates_months(notify_db, notify_db_session, sample_template):
|
||||
notification_history = functools.partial(
|
||||
create_notification_history,
|
||||
notify_db,
|
||||
@@ -556,52 +527,6 @@ def test_fetch_monthly_historical_stats_separates_weeks(notify_db, notify_db_ses
|
||||
}
|
||||
|
||||
|
||||
def test_fetch_weekly_historical_stats_ignores_second_service(notify_db, notify_db_session, service_factory):
|
||||
template_1 = service_factory.get('1').templates[0]
|
||||
template_2 = service_factory.get('2').templates[0]
|
||||
|
||||
notification_history = functools.partial(
|
||||
create_notification_history,
|
||||
notify_db,
|
||||
notify_db_session
|
||||
)
|
||||
last_sunday = notification_history(template_1, created_at=datetime(2016, 7, 24, 23, 59))
|
||||
last_monday_morning = notification_history(template_2, created_at=datetime(2016, 7, 25, 0, 0))
|
||||
|
||||
with freeze_time('Wed 27th July 2016'):
|
||||
ret = dao_fetch_weekly_historical_stats_for_service(template_1.service_id)
|
||||
|
||||
assert len(ret) == 1
|
||||
assert ret[0].week_start == datetime(2016, 7, 18)
|
||||
assert ret[0].count == 1
|
||||
|
||||
|
||||
def test_fetch_weekly_historical_stats_separates_types(notify_db,
|
||||
notify_db_session,
|
||||
sample_template,
|
||||
sample_email_template):
|
||||
notification_history = functools.partial(
|
||||
create_notification_history,
|
||||
notify_db,
|
||||
notify_db_session,
|
||||
created_at=datetime(2016, 7, 25)
|
||||
)
|
||||
|
||||
notification_history(sample_template)
|
||||
notification_history(sample_email_template)
|
||||
|
||||
with freeze_time('Wed 27th July 2016'):
|
||||
ret = dao_fetch_weekly_historical_stats_for_service(sample_template.service_id)
|
||||
|
||||
assert len(ret) == 2
|
||||
assert ret[0].week_start == datetime(2016, 7, 25)
|
||||
assert ret[0].count == 1
|
||||
assert ret[0].notification_type == 'email'
|
||||
assert ret[1].week_start == datetime(2016, 7, 25)
|
||||
assert ret[1].count == 1
|
||||
assert ret[1].notification_type == 'sms'
|
||||
|
||||
|
||||
def test_dao_fetch_todays_total_message_count_returns_count_for_today(notify_db,
|
||||
notify_db_session,
|
||||
sample_notification):
|
||||
|
||||
@@ -1202,26 +1202,6 @@ def test_get_detailed_service(notify_db, notify_db_session, notify_api, sample_s
|
||||
assert service['statistics']['sms'] == stats
|
||||
|
||||
|
||||
def test_get_weekly_notification_stats(notify_api, notify_db, notify_db_session):
|
||||
with freeze_time('2000-01-01T12:00:00'):
|
||||
noti = create_sample_notification(notify_db, notify_db_session)
|
||||
with notify_api.test_request_context(), notify_api.test_client() as client, freeze_time('2000-01-02T12:00:00'):
|
||||
resp = client.get(
|
||||
'/service/{}/notifications/weekly'.format(noti.service_id),
|
||||
headers=[create_authorization_header()]
|
||||
)
|
||||
|
||||
assert resp.status_code == 200
|
||||
data = json.loads(resp.get_data(as_text=True))['data']
|
||||
assert data == {
|
||||
'1999-12-27': {
|
||||
'sms': {'requested': 1, 'delivered': 0, 'failed': 0},
|
||||
'email': {'requested': 0, 'delivered': 0, 'failed': 0},
|
||||
'letter': {'delivered': 0, 'failed': 0, 'requested': 0}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'url, expected_status, expected_json', [
|
||||
(
|
||||
|
||||
@@ -6,13 +6,10 @@ from freezegun import freeze_time
|
||||
|
||||
from app.service.statistics import (
|
||||
format_statistics,
|
||||
_weeks_for_range,
|
||||
create_zeroed_stats_dicts,
|
||||
format_weekly_notification_stats
|
||||
)
|
||||
|
||||
StatsRow = collections.namedtuple('row', ('notification_type', 'status', 'count'))
|
||||
WeeklyStatsRow = collections.namedtuple('row', ('notification_type', 'status', 'week_start', 'count'))
|
||||
|
||||
|
||||
# email_counts and sms_counts are 3-tuple of requested, delivered, failed
|
||||
@@ -57,18 +54,6 @@ def test_format_statistics(stats, email_counts, sms_counts, letter_counts):
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start,end,dates', [
|
||||
(datetime(2016, 7, 25), datetime(2016, 7, 25), [datetime(2016, 7, 25)]),
|
||||
(datetime(2016, 7, 25), datetime(2016, 7, 28), [datetime(2016, 7, 25)]),
|
||||
(datetime(2016, 7, 25), datetime(2016, 8, 1), [datetime(2016, 7, 25), datetime(2016, 8, 1)]),
|
||||
(datetime(2016, 7, 25), datetime(2016, 8, 10), [
|
||||
datetime(2016, 7, 25), datetime(2016, 8, 1), datetime(2016, 8, 8)
|
||||
])
|
||||
])
|
||||
def test_weeks_for_range(start, end, dates):
|
||||
assert list(_weeks_for_range(start, end)) == dates
|
||||
|
||||
|
||||
def test_create_zeroed_stats_dicts():
|
||||
assert create_zeroed_stats_dicts() == {
|
||||
'sms': {'requested': 0, 'delivered': 0, 'failed': 0},
|
||||
@@ -79,78 +64,3 @@ def test_create_zeroed_stats_dicts():
|
||||
|
||||
def _stats(requested, delivered, failed):
|
||||
return {'requested': requested, 'delivered': delivered, 'failed': failed}
|
||||
|
||||
|
||||
@freeze_time('2016-07-28T12:00:00')
|
||||
@pytest.mark.parametrize('created_at, statistics, expected_results', [
|
||||
# with no stats and just today, return this week's stats
|
||||
(datetime(2016, 7, 28), [], {
|
||||
datetime(2016, 7, 25): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0),
|
||||
'letter': _stats(0, 0, 0)
|
||||
}
|
||||
}),
|
||||
# with a random created time, still create the dict for midnight
|
||||
(datetime(2016, 7, 28, 12, 13, 14), [], {
|
||||
datetime(2016, 7, 25, 0, 0, 0): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0),
|
||||
'letter': _stats(0, 0, 0)
|
||||
}
|
||||
}),
|
||||
# with no stats but a service
|
||||
(datetime(2016, 7, 14), [], {
|
||||
datetime(2016, 7, 11): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0),
|
||||
'letter': _stats(0, 0, 0)
|
||||
},
|
||||
datetime(2016, 7, 18): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0),
|
||||
'letter': _stats(0, 0, 0)
|
||||
},
|
||||
datetime(2016, 7, 25): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0),
|
||||
'letter': _stats(0, 0, 0)
|
||||
}
|
||||
}),
|
||||
# two stats for same week dont re-zero each other
|
||||
(datetime(2016, 7, 21), [
|
||||
WeeklyStatsRow('email', 'created', datetime(2016, 7, 18), 1),
|
||||
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 18), 1),
|
||||
WeeklyStatsRow('letter', 'created', datetime(2016, 7, 18), 1),
|
||||
], {
|
||||
datetime(2016, 7, 18): {
|
||||
'sms': _stats(1, 0, 0),
|
||||
'email': _stats(1, 0, 0),
|
||||
'letter': _stats(1, 0, 0)
|
||||
},
|
||||
datetime(2016, 7, 25): {
|
||||
'sms': _stats(0, 0, 0),
|
||||
'email': _stats(0, 0, 0),
|
||||
'letter': _stats(0, 0, 0)
|
||||
}
|
||||
}),
|
||||
# two stats for same type are added together
|
||||
(datetime(2016, 7, 21), [
|
||||
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 18), 1),
|
||||
WeeklyStatsRow('sms', 'delivered', datetime(2016, 7, 18), 1),
|
||||
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 25), 1),
|
||||
], {
|
||||
datetime(2016, 7, 18): {
|
||||
'sms': _stats(2, 1, 0),
|
||||
'email': _stats(0, 0, 0),
|
||||
'letter': _stats(0, 0, 0)
|
||||
},
|
||||
datetime(2016, 7, 25): {
|
||||
'sms': _stats(1, 0, 0),
|
||||
'email': _stats(0, 0, 0),
|
||||
'letter': _stats(0, 0, 0)
|
||||
}
|
||||
})
|
||||
])
|
||||
def test_format_weekly_notification_stats(statistics, created_at, expected_results):
|
||||
assert format_weekly_notification_stats(statistics, created_at) == expected_results
|
||||
|
||||
Reference in New Issue
Block a user