group results by service using itertools

allows us to nicely reuse the existing format_statistics function
This commit is contained in:
Leo Hemsted
2016-08-19 16:36:20 +01:00
parent ebb13a1251
commit 00d19f63f0
3 changed files with 14 additions and 10 deletions

View File

@@ -187,6 +187,7 @@ def dao_fetch_todays_stats_for_all_services():
return db.session.query( return db.session.query(
Notification.notification_type, Notification.notification_type,
Notification.status, Notification.status,
Notification.service_id,
func.count(Notification.id).label('count') func.count(Notification.id).label('count')
).select_from( ).select_from(
Service Service
@@ -199,4 +200,6 @@ def dao_fetch_todays_stats_for_all_services():
Notification.notification_type, Notification.notification_type,
Notification.status, Notification.status,
Notification.service_id Notification.service_id
).order_by(
Notification.service_id
) )

View File

@@ -1,3 +1,5 @@
import itertools
from flask import ( from flask import (
jsonify, jsonify,
request, request,
@@ -21,7 +23,8 @@ from app.dao.services_dao import (
dao_remove_user_from_service, dao_remove_user_from_service,
dao_fetch_stats_for_service, dao_fetch_stats_for_service,
dao_fetch_todays_stats_for_service, dao_fetch_todays_stats_for_service,
dao_fetch_weekly_historical_stats_for_service dao_fetch_weekly_historical_stats_for_service,
dao_fetch_todays_stats_for_all_services
) )
from app.dao import notifications_dao from app.dao import notifications_dao
from app.dao.provider_statistics_dao import get_fragment_count from app.dao.provider_statistics_dao import get_fragment_count
@@ -51,9 +54,9 @@ def get_services():
user_id = request.args.get('user_id', None) user_id = request.args.get('user_id', None)
if user_id: if user_id:
services = dao_fetch_all_services_by_user(user_id) services = dao_fetch_all_services_by_user(user_id)
elif request.args.get('detailed') == 'True':
return get_detailed_services()
else: else:
if request.args.get('detailed') == 'True':
return get_detailed_services()
services = dao_fetch_all_services() services = dao_fetch_all_services()
data = service_schema.dump(services, many=True).data data = service_schema.dump(services, many=True).data
return jsonify(data=data) return jsonify(data=data)
@@ -251,12 +254,10 @@ def get_detailed_service(service_id, today_only=False):
def get_detailed_services(): def get_detailed_services():
services = {service.id: service for service in dao_fetch_all_services()} services = {service.id: service for service in dao_fetch_all_services()}
stats = dao_fetch_todays_stats_for_all_services(service_id) stats = dao_fetch_todays_stats_for_all_services()
for row in stats: for service_id, rows in itertools.groupby(stats, lambda x: x.service_id):
services[row.service_id].statistics services[service_id].statistics = statistics.format_statistics(rows)
# todo: how do we separate rows of statistics by service?
service.statistics = statistics.format_statistics(stats)
data = detailed_service_schema.dump(service).data data = detailed_service_schema.dump(service, many=True).data
return jsonify(data=data) return jsonify(data=data)

View File

@@ -207,7 +207,7 @@ def test_normal_api_key_returns_notifications_created_from_jobs_and_from_api(
notifications = json.loads(response.get_data(as_text=True))['notifications'] notifications = json.loads(response.get_data(as_text=True))['notifications']
assert len(notifications) == 2 assert len(notifications) == 2
assert set(x['id'] for x in notifications) == set([str(sample_notification.id), str(api_notification.id)]) assert set(x['id'] for x in notifications) == {str(sample_notification.id), str(api_notification.id)}
@pytest.mark.parametrize('key_type', [KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST]) @pytest.mark.parametrize('key_type', [KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST])