Merge branch 'master' into scheduled-delivery-of-jobs

Conflicts:
	app/dao/jobs_dao.py
	tests/app/dao/test_jobs_dao.py
	tests/app/job/test_rest.py
This commit is contained in:
Martyn Inglis
2016-08-25 14:53:00 +01:00
21 changed files with 403 additions and 670 deletions

View File

@@ -58,7 +58,7 @@ def create_app(app_name=None):
encryption.init_app(application)
clients.init_app(sms_clients=[firetext_client, mmg_client, loadtest_client], email_clients=[aws_ses_client])
from app.service.rest import service as service_blueprint
from app.service.rest import service_blueprint
from app.user.rest import user as user_blueprint
from app.template.rest import template as template_blueprint
from app.status.healthcheck import status as status_blueprint
@@ -66,7 +66,6 @@ def create_app(app_name=None):
from app.notifications.rest import notifications as notifications_blueprint
from app.invite.rest import invite as invite_blueprint
from app.accept_invite.rest import accept_invite
from app.notifications_statistics.rest import notifications_statistics as notifications_statistics_blueprint
from app.template_statistics.rest import template_statistics as template_statistics_blueprint
from app.events.rest import events as events_blueprint
from app.provider_details.rest import provider_details as provider_details_blueprint
@@ -81,7 +80,7 @@ def create_app(app_name=None):
application.register_blueprint(job_blueprint)
application.register_blueprint(invite_blueprint)
application.register_blueprint(accept_invite, url_prefix='/invite')
application.register_blueprint(notifications_statistics_blueprint)
application.register_blueprint(template_statistics_blueprint)
application.register_blueprint(events_blueprint)
application.register_blueprint(provider_details_blueprint, url_prefix='/provider-details')

View File

@@ -1,9 +1,25 @@
from datetime import date, timedelta, datetime
from sqlalchemy import desc, asc, cast, Date as sql_date
from app import db
from app.dao import days_ago
from app.models import Job
from app.models import Job, NotificationHistory
from app.statsd_decorators import statsd
from sqlalchemy import func, asc
@statsd(namespace="dao")
def dao_get_notification_outcomes_for_job(service_id, job_id):
query = db.session.query(
func.count(NotificationHistory.status).label('count'),
NotificationHistory.status.label('status')
)
return query \
.filter(NotificationHistory.service_id == service_id) \
.filter(NotificationHistory.job_id == job_id)\
.group_by(NotificationHistory.status) \
.order_by(asc(NotificationHistory.status)) \
.all()
def dao_get_job_by_service_id_and_job_id(service_id, job_id):

View File

@@ -7,9 +7,8 @@ from datetime import (
from flask import current_app
from werkzeug.datastructures import MultiDict
from sqlalchemy import (desc, func, Integer, or_, and_, asc)
from sqlalchemy import (desc, func, or_, and_, asc)
from sqlalchemy.orm import joinedload
from sqlalchemy.sql.expression import cast
from app import db
from app.dao import days_ago
@@ -34,29 +33,14 @@ from app.dao.dao_utils import transactional
from app.statsd_decorators import statsd
@statsd(namespace="dao")
def dao_get_notification_statistics_for_service(service_id, limit_days=None):
query_filter = [NotificationStatistics.service_id == service_id]
if limit_days is not None:
query_filter.append(NotificationStatistics.day >= days_ago(limit_days))
return NotificationStatistics.query.filter(*query_filter)\
.order_by(desc(NotificationStatistics.day))\
.all()
@statsd(namespace="dao")
def dao_get_notification_statistics_for_service_and_day(service_id, day):
# only used by stat-updating code in tasks.py
return NotificationStatistics.query.filter_by(
service_id=service_id,
day=day
).order_by(desc(NotificationStatistics.day)).first()
@statsd(namespace="dao")
def dao_get_notification_statistics_for_day(day):
return NotificationStatistics.query.filter_by(day=day).all()
@statsd(namespace="dao")
def dao_get_potential_notification_statistics_for_day(day):
all_services = db.session.query(
@@ -105,34 +89,6 @@ def create_notification_statistics_dict(service_id, day):
}
@statsd(namespace="dao")
def dao_get_7_day_agg_notification_statistics_for_service(service_id,
date_from,
week_count=52):
doy = date_from.timetuple().tm_yday
return db.session.query(
cast(func.floor((func.extract('doy', NotificationStatistics.day) - doy) / 7), Integer),
cast(func.sum(NotificationStatistics.emails_requested), Integer),
cast(func.sum(NotificationStatistics.emails_delivered), Integer),
cast(func.sum(NotificationStatistics.emails_failed), Integer),
cast(func.sum(NotificationStatistics.sms_requested), Integer),
cast(func.sum(NotificationStatistics.sms_delivered), Integer),
cast(func.sum(NotificationStatistics.sms_failed), Integer)
).filter(
NotificationStatistics.service_id == service_id
).filter(
NotificationStatistics.day >= date_from
).filter(
NotificationStatistics.day < date_from + timedelta(days=7 * week_count)
).group_by(
func.floor(((func.extract('doy', NotificationStatistics.day) - doy) / 7))
).order_by(
desc(func.floor(((func.extract('doy', NotificationStatistics.day) - doy) / 7)))
).limit(
week_count
)
@statsd(namespace="dao")
def dao_get_template_usage(service_id, limit_days=None):
table = NotificationHistory

View File

@@ -180,3 +180,25 @@ def dao_fetch_weekly_historical_stats_for_service(service_id):
).order_by(
asc(monday_of_notification_week), NotificationHistory.status
).all()
@statsd(namespace='dao')
def dao_fetch_todays_stats_for_all_services():
return db.session.query(
Notification.notification_type,
Notification.status,
Notification.service_id,
func.count(Notification.id).label('count')
).select_from(
Service
).join(
Notification
).filter(
func.date(Notification.created_at) == date.today()
).group_by(
Notification.notification_type,
Notification.status,
Notification.service_id
).order_by(
Notification.service_id
)

View File

@@ -8,7 +8,8 @@ from flask import (
from app.dao.jobs_dao import (
dao_create_job,
dao_get_job_by_service_id_and_job_id,
dao_get_jobs_by_service_id
dao_get_jobs_by_service_id,
dao_get_notification_outcomes_for_job
)
from app.dao.services_dao import (
@@ -44,7 +45,11 @@ register_errors(job)
@job.route('/<job_id>', methods=['GET'])
def get_job_by_service_and_job_id(service_id, job_id):
job = dao_get_job_by_service_id_and_job_id(service_id, job_id)
statistics = dao_get_notification_outcomes_for_job(service_id, job_id)
data = job_schema.dump(job).data
data['statistics'] = [{'status': statistic[1], 'count': statistic[0]} for statistic in statistics]
return jsonify(data=data)

View File

@@ -1,105 +0,0 @@
from datetime import (
date,
timedelta,
datetime
)
from flask import (
Blueprint,
jsonify,
request,
current_app
)
from app import DATE_FORMAT
from app.dao.notifications_dao import (
dao_get_notification_statistics_for_service,
dao_get_7_day_agg_notification_statistics_for_service,
dao_get_notification_statistics_for_service_and_day
)
from app.schemas import (
notifications_statistics_schema,
week_aggregate_notification_statistics_schema
)
notifications_statistics = Blueprint(
'notifications-statistics',
__name__, url_prefix='/service/<service_id>/notifications-statistics'
)
from app.errors import (
register_errors,
InvalidRequest
)
register_errors(notifications_statistics)
@notifications_statistics.route('', methods=['GET'])
def get_all_notification_statistics_for_service(service_id):
if request.args.get('limit_days'):
try:
statistics = dao_get_notification_statistics_for_service(
service_id=service_id,
limit_days=int(request.args['limit_days'])
)
except ValueError as e:
message = '{} is not an integer'.format(request.args['limit_days'])
errors = {'limit_days': [message]}
raise InvalidRequest(errors, status_code=400)
else:
statistics = dao_get_notification_statistics_for_service(service_id=service_id)
data, errors = notifications_statistics_schema.dump(statistics, many=True)
return jsonify(data=data)
@notifications_statistics.route('/day/<day>', methods=['GET'])
def get_notification_statistics_for_service_for_day(service_id, day):
try:
datetime.strptime(day, DATE_FORMAT)
except ValueError:
raise InvalidRequest('Invalid date {}'.format(day), status_code=400)
service_stats = dao_get_notification_statistics_for_service_and_day(
service_id,
day
)
if not service_stats:
message = 'No statistics found for service id: {} on day: {} '.format(service_id, day)
errors = {'not found': [message]}
raise InvalidRequest(errors, status_code=404)
data = notifications_statistics_schema.dump(service_stats).data
return jsonify(data=data)
@notifications_statistics.route('/seven_day_aggregate')
def get_notification_statistics_for_service_seven_day_aggregate(service_id):
data = week_aggregate_notification_statistics_schema.load(request.args).data
date_from = data['date_from'] if 'date_from' in data else date(date.today().year, 4, 1)
week_count = data['week_count'] if 'week_count' in data else 52
stats = dao_get_7_day_agg_notification_statistics_for_service(
service_id,
date_from,
week_count).all()
json_stats = []
for x in range(week_count - 1, -1, -1):
week_stats = stats.pop(0) if len(stats) > 0 and stats[0][0] == x else [x, 0, 0, 0, 0, 0, 0]
week_start = (date_from + timedelta(days=week_stats[0] * 7))
if week_start <= date.today():
json_stats.append({
'week_start': week_start.strftime('%Y-%m-%d'),
'week_end': (date_from + timedelta(days=(week_stats[0] * 7) + 6)).strftime('%Y-%m-%d'),
'emails_requested': week_stats[1],
'emails_delivered': week_stats[2],
'emails_failed': week_stats[3],
'sms_requested': week_stats[4],
'sms_delivered': week_stats[5],
'sms_failed': week_stats[6]
})
return jsonify(data=json_stats)

View File

@@ -228,7 +228,11 @@ class JobSchema(BaseSchema):
class Meta:
model = models.Job
exclude = ('notifications',)
exclude = (
'notifications',
'notifications_sent',
'notifications_delivered',
'notifications_failed')
strict = True
@@ -487,30 +491,6 @@ class OrganisationSchema(BaseSchema):
strict = True
class FromToDateSchema(ma.Schema):
class Meta:
strict = True
date_from = fields.Date()
date_to = fields.Date()
@validates('date_from')
def validate_date_from(self, value):
_validate_not_in_future(value)
@validates('date_to')
def validate_date_to(self, value):
_validate_not_in_future(value)
@validates_schema
def validate_dates(self, data):
df = data.get('date_from')
dt = data.get('date_to')
if (df and dt) and (df > dt):
raise ValidationError("date_from needs to be greater than date_to")
class DaySchema(ma.Schema):
class Meta:
@@ -523,23 +503,6 @@ class DaySchema(ma.Schema):
_validate_not_in_future(value)
class WeekAggregateNotificationStatisticsSchema(ma.Schema):
class Meta:
strict = True
date_from = fields.Date()
week_count = fields.Int()
@validates('date_from')
def validate_date_from(self, value):
_validate_not_in_future(value)
@validates('week_count')
def validate_week_count(self, value):
_validate_positive_number(value)
class UnarchivedTemplateSchema(BaseSchema):
archived = fields.Boolean(required=True)
@@ -580,8 +543,6 @@ api_key_history_schema = ApiKeyHistorySchema()
template_history_schema = TemplateHistorySchema()
event_schema = EventSchema()
organisation_schema = OrganisationSchema()
from_to_date_schema = FromToDateSchema()
provider_details_schema = ProviderDetailsSchema()
week_aggregate_notification_statistics_schema = WeekAggregateNotificationStatisticsSchema()
day_schema = DaySchema()
unarchived_template_schema = UnarchivedTemplateSchema()

View File

@@ -1,4 +1,4 @@
from datetime import date, timedelta
import itertools
from flask import (
jsonify,
@@ -23,7 +23,8 @@ from app.dao.services_dao import (
dao_remove_user_from_service,
dao_fetch_stats_for_service,
dao_fetch_todays_stats_for_service,
dao_fetch_weekly_historical_stats_for_service
dao_fetch_weekly_historical_stats_for_service,
dao_fetch_todays_stats_for_all_services
)
from app.dao import notifications_dao
from app.dao.provider_statistics_dao import get_fragment_count
@@ -32,7 +33,6 @@ from app.schemas import (
service_schema,
api_key_schema,
user_schema,
from_to_date_schema,
permission_schema,
notification_with_template_schema,
notifications_filter_schema,
@@ -45,25 +45,28 @@ from app.errors import (
)
from app.service import statistics
service = Blueprint('service', __name__)
register_errors(service)
service_blueprint = Blueprint('service', __name__)
register_errors(service_blueprint)
@service.route('', methods=['GET'])
@service_blueprint.route('', methods=['GET'])
def get_services():
user_id = request.args.get('user_id', None)
if user_id:
services = dao_fetch_all_services_by_user(user_id)
elif request.args.get('detailed') == 'True':
return jsonify(data=get_detailed_services())
else:
services = dao_fetch_all_services()
data = service_schema.dump(services, many=True).data
return jsonify(data=data)
@service.route('/<uuid:service_id>', methods=['GET'])
@service_blueprint.route('/<uuid:service_id>', methods=['GET'])
def get_service_by_id(service_id):
if request.args.get('detailed') == 'True':
return get_detailed_service(service_id, today_only=request.args.get('today_only') == 'True')
data = get_detailed_service(service_id, today_only=request.args.get('today_only') == 'True')
return jsonify(data=data)
else:
fetched = dao_fetch_service_by_id(service_id)
@@ -71,7 +74,7 @@ def get_service_by_id(service_id):
return jsonify(data=data)
@service.route('', methods=['POST'])
@service_blueprint.route('', methods=['POST'])
def create_service():
data = request.get_json()
if not data.get('user_id', None):
@@ -85,7 +88,7 @@ def create_service():
return jsonify(data=service_schema.dump(valid_service).data), 201
@service.route('/<uuid:service_id>', methods=['POST'])
@service_blueprint.route('/<uuid:service_id>', methods=['POST'])
def update_service(service_id):
fetched_service = dao_fetch_service_by_id(service_id)
current_data = dict(service_schema.dump(fetched_service).data.items())
@@ -95,7 +98,7 @@ def update_service(service_id):
return jsonify(data=service_schema.dump(fetched_service).data), 200
@service.route('/<uuid:service_id>/api-key', methods=['POST'])
@service_blueprint.route('/<uuid:service_id>/api-key', methods=['POST'])
def create_api_key(service_id=None):
fetched_service = dao_fetch_service_by_id(service_id=service_id)
valid_api_key = api_key_schema.load(request.get_json()).data
@@ -105,14 +108,14 @@ def create_api_key(service_id=None):
return jsonify(data=unsigned_api_key), 201
@service.route('/<uuid:service_id>/api-key/revoke/<uuid:api_key_id>', methods=['POST'])
@service_blueprint.route('/<uuid:service_id>/api-key/revoke/<uuid:api_key_id>', methods=['POST'])
def revoke_api_key(service_id, api_key_id):
expire_api_key(service_id=service_id, api_key_id=api_key_id)
return jsonify(), 202
@service.route('/<uuid:service_id>/api-keys', methods=['GET'])
@service.route('/<uuid:service_id>/api-keys/<uuid:key_id>', methods=['GET'])
@service_blueprint.route('/<uuid:service_id>/api-keys', methods=['GET'])
@service_blueprint.route('/<uuid:service_id>/api-keys/<uuid:key_id>', methods=['GET'])
def get_api_keys(service_id, key_id=None):
dao_fetch_service_by_id(service_id=service_id)
@@ -128,14 +131,14 @@ def get_api_keys(service_id, key_id=None):
return jsonify(apiKeys=api_key_schema.dump(api_keys, many=True).data), 200
@service.route('/<uuid:service_id>/users', methods=['GET'])
@service_blueprint.route('/<uuid:service_id>/users', methods=['GET'])
def get_users_for_service(service_id):
fetched = dao_fetch_service_by_id(service_id)
result = user_schema.dump(fetched.users, many=True)
return jsonify(data=result.data)
@service.route('/<uuid:service_id>/users/<user_id>', methods=['POST'])
@service_blueprint.route('/<uuid:service_id>/users/<user_id>', methods=['POST'])
def add_user_to_service(service_id, user_id):
service = dao_fetch_service_by_id(service_id)
user = get_model_users(user_id=user_id)
@@ -150,7 +153,7 @@ def add_user_to_service(service_id, user_id):
return jsonify(data=data), 201
@service.route('/<uuid:service_id>/users/<user_id>', methods=['DELETE'])
@service_blueprint.route('/<uuid:service_id>/users/<user_id>', methods=['DELETE'])
def remove_user_from_service(service_id, user_id):
service = dao_fetch_service_by_id(service_id)
user = get_model_users(user_id=user_id)
@@ -166,7 +169,7 @@ def remove_user_from_service(service_id, user_id):
return jsonify({}), 204
@service.route('/<uuid:service_id>/fragment/aggregate_statistics')
@service_blueprint.route('/<uuid:service_id>/fragment/aggregate_statistics')
def get_service_provider_aggregate_statistics(service_id):
return jsonify(data=get_fragment_count(service_id))
@@ -174,7 +177,7 @@ def get_service_provider_aggregate_statistics(service_id):
# This is placeholder get method until more thought
# goes into how we want to fetch and view various items in history
# tables. This is so product owner can pass stories as done
@service.route('/<uuid:service_id>/history', methods=['GET'])
@service_blueprint.route('/<uuid:service_id>/history', methods=['GET'])
def get_service_history(service_id):
from app.models import (Service, ApiKey, Template, TemplateHistory, Event)
from app.schemas import (
@@ -204,7 +207,7 @@ def get_service_history(service_id):
return jsonify(data=data)
@service.route('/<uuid:service_id>/notifications', methods=['GET'])
@service_blueprint.route('/<uuid:service_id>/notifications', methods=['GET'])
def get_all_notifications_for_service(service_id):
data = notifications_filter_schema.load(request.args).data
page = data['page'] if 'page' in data else 1
@@ -231,7 +234,7 @@ def get_all_notifications_for_service(service_id):
), 200
@service.route('/<uuid:service_id>/notifications/weekly', methods=['GET'])
@service_blueprint.route('/<uuid:service_id>/notifications/weekly', methods=['GET'])
def get_weekly_notification_stats(service_id):
service = dao_fetch_service_by_id(service_id)
stats = dao_fetch_weekly_historical_stats_for_service(service_id)
@@ -246,5 +249,19 @@ def get_detailed_service(service_id, today_only=False):
service.statistics = statistics.format_statistics(stats)
data = detailed_service_schema.dump(service).data
return jsonify(data=data)
return detailed_service_schema.dump(service).data
def get_detailed_services():
services = {service.id: service for service in dao_fetch_all_services()}
stats = dao_fetch_todays_stats_for_all_services()
for service_id, rows in itertools.groupby(stats, lambda x: x.service_id):
services[service_id].statistics = statistics.format_statistics(rows)
# if service has not sent anything, query will not have set statistics correctly
for service in services.values():
if not hasattr(service, 'statistics'):
service.statistics = statistics.create_zeroed_stats_dicts()
return detailed_service_schema.dump(services.values(), many=True).data

View File

@@ -8,7 +8,7 @@ def format_statistics(statistics):
# statistics come in a named tuple with uniqueness from 'notification_type', 'status' - however missing
# statuses/notification types won't be represented and the status types need to be simplified/summed up
# so we can return emails/sms * created, sent, and failed
counts = _create_zeroed_stats_dicts()
counts = create_zeroed_stats_dicts()
for row in statistics:
_update_statuses_from_row(counts[row.notification_type], row)
@@ -20,7 +20,7 @@ def format_weekly_notification_stats(statistics, service_created_at):
# turn a datetime into midnight that day http://stackoverflow.com/a/1937636
preceeding_monday_midnight = datetime.combine(preceeding_monday.date(), datetime.min.time())
week_dict = {
week: _create_zeroed_stats_dicts()
week: create_zeroed_stats_dicts()
for week in _weeks_for_range(preceeding_monday_midnight, datetime.utcnow())
}
for row in statistics:
@@ -29,7 +29,7 @@ def format_weekly_notification_stats(statistics, service_created_at):
return week_dict
def _create_zeroed_stats_dicts():
def create_zeroed_stats_dicts():
return {
template_type: {
status: 0 for status in ('requested', 'delivered', 'failed')