2016-01-15 15:48:05 +00:00
|
|
|
from flask import (
|
|
|
|
|
Blueprint,
|
|
|
|
|
jsonify,
|
2016-06-28 15:17:36 +01:00
|
|
|
request,
|
|
|
|
|
current_app
|
2016-06-15 16:19:28 +01:00
|
|
|
)
|
2016-01-15 15:48:05 +00:00
|
|
|
|
|
|
|
|
from app.dao.jobs_dao import (
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_create_job,
|
2016-09-01 14:31:01 +01:00
|
|
|
dao_update_job,
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_get_job_by_service_id_and_job_id,
|
2016-08-23 16:46:58 +01:00
|
|
|
dao_get_jobs_by_service_id,
|
2016-09-01 14:31:01 +01:00
|
|
|
dao_get_future_scheduled_job_by_id_and_service_id,
|
2016-08-23 16:46:58 +01:00
|
|
|
dao_get_notification_outcomes_for_job
|
2016-01-15 15:48:05 +00:00
|
|
|
)
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
from app.dao.services_dao import (
|
|
|
|
|
dao_fetch_service_by_id
|
|
|
|
|
)
|
2016-02-09 14:17:42 +00:00
|
|
|
|
2016-05-11 17:04:51 +01:00
|
|
|
from app.dao.templates_dao import (dao_get_template_by_id)
|
2016-06-28 15:17:36 +01:00
|
|
|
from app.dao.notifications_dao import get_notifications_for_job
|
2016-05-11 17:04:51 +01:00
|
|
|
|
2016-07-26 12:34:39 +01:00
|
|
|
from app.schemas import (
|
|
|
|
|
job_schema,
|
|
|
|
|
unarchived_template_schema,
|
|
|
|
|
notifications_filter_schema,
|
2016-07-26 14:33:14 +01:00
|
|
|
notification_with_template_schema
|
2016-07-26 12:34:39 +01:00
|
|
|
)
|
2016-01-15 15:48:05 +00:00
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
from app.celery.tasks import process_job
|
2016-01-15 15:48:05 +00:00
|
|
|
|
2016-09-01 14:31:01 +01:00
|
|
|
from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, JOB_STATUS_CANCELLED
|
2016-08-24 16:00:21 +01:00
|
|
|
|
2016-06-28 15:17:36 +01:00
|
|
|
from app.utils import pagination_links
|
|
|
|
|
|
2017-03-16 18:15:49 +00:00
|
|
|
job_blueprint = Blueprint('job', __name__, url_prefix='/service/<uuid:service_id>/job')
|
2016-01-15 15:48:05 +00:00
|
|
|
|
2016-06-14 15:07:23 +01:00
|
|
|
from app.errors import (
|
|
|
|
|
register_errors,
|
|
|
|
|
InvalidRequest
|
|
|
|
|
)
|
2016-02-24 17:12:30 +00:00
|
|
|
|
2017-03-16 18:15:49 +00:00
|
|
|
register_errors(job_blueprint)
|
2016-02-17 17:04:50 +00:00
|
|
|
|
|
|
|
|
|
2017-03-16 18:15:49 +00:00
|
|
|
@job_blueprint.route('/<job_id>', methods=['GET'])
|
2016-02-24 17:12:30 +00:00
|
|
|
def get_job_by_service_and_job_id(service_id, job_id):
|
|
|
|
|
job = dao_get_job_by_service_id_and_job_id(service_id, job_id)
|
2016-08-23 16:46:58 +01:00
|
|
|
statistics = dao_get_notification_outcomes_for_job(service_id, job_id)
|
2016-06-14 15:07:23 +01:00
|
|
|
data = job_schema.dump(job).data
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
data['statistics'] = [{'status': statistic[1], 'count': statistic[0]} for statistic in statistics]
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
return jsonify(data=data)
|
|
|
|
|
|
|
|
|
|
|
2017-03-16 18:15:49 +00:00
|
|
|
@job_blueprint.route('/<job_id>/cancel', methods=['POST'])
|
2016-09-01 14:31:01 +01:00
|
|
|
def cancel_job(service_id, job_id):
|
|
|
|
|
job = dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id)
|
|
|
|
|
job.job_status = JOB_STATUS_CANCELLED
|
|
|
|
|
dao_update_job(job)
|
|
|
|
|
|
|
|
|
|
return get_job_by_service_and_job_id(service_id, job_id)
|
|
|
|
|
|
|
|
|
|
|
2017-03-16 18:15:49 +00:00
|
|
|
@job_blueprint.route('/<job_id>/notifications', methods=['GET'])
|
2016-06-28 15:17:36 +01:00
|
|
|
def get_all_notifications_for_service_job(service_id, job_id):
|
|
|
|
|
data = notifications_filter_schema.load(request.args).data
|
|
|
|
|
page = data['page'] if 'page' in data else 1
|
|
|
|
|
page_size = data['page_size'] if 'page_size' in data else current_app.config.get('PAGE_SIZE')
|
|
|
|
|
|
|
|
|
|
pagination = get_notifications_for_job(
|
|
|
|
|
service_id,
|
|
|
|
|
job_id,
|
|
|
|
|
filter_dict=data,
|
|
|
|
|
page=page,
|
|
|
|
|
page_size=page_size)
|
2017-03-14 11:29:12 +00:00
|
|
|
|
2016-06-28 15:17:36 +01:00
|
|
|
kwargs = request.args.to_dict()
|
2017-03-14 11:29:12 +00:00
|
|
|
|
|
|
|
|
if page_size > 50:
|
|
|
|
|
current_app.logger.info('Current page: {}'.format(page))
|
|
|
|
|
current_app.logger.info('Page size: {}'.format(page_size))
|
|
|
|
|
current_app.logger.info('Total pages in pagination: {}'.format(pagination.pages))
|
|
|
|
|
current_app.logger.info('Total notifications in pagination query: {}'.format(pagination.total))
|
|
|
|
|
current_app.logger.info('Arguments for next query: {}'.format(kwargs))
|
|
|
|
|
|
2016-06-28 15:17:36 +01:00
|
|
|
kwargs['service_id'] = service_id
|
|
|
|
|
kwargs['job_id'] = job_id
|
|
|
|
|
return jsonify(
|
2016-07-26 14:33:14 +01:00
|
|
|
notifications=notification_with_template_schema.dump(pagination.items, many=True).data,
|
2016-06-28 15:17:36 +01:00
|
|
|
page_size=page_size,
|
|
|
|
|
total=pagination.total,
|
|
|
|
|
links=pagination_links(
|
|
|
|
|
pagination,
|
|
|
|
|
'.get_all_notifications_for_service_job',
|
|
|
|
|
**kwargs
|
|
|
|
|
)
|
|
|
|
|
), 200
|
|
|
|
|
|
|
|
|
|
|
2017-03-16 18:15:49 +00:00
|
|
|
@job_blueprint.route('', methods=['GET'])
|
2016-02-24 17:12:30 +00:00
|
|
|
def get_jobs_by_service(service_id):
|
2016-05-24 17:21:04 +01:00
|
|
|
if request.args.get('limit_days'):
|
|
|
|
|
try:
|
|
|
|
|
limit_days = int(request.args['limit_days'])
|
2016-09-21 16:54:02 +01:00
|
|
|
except ValueError:
|
2016-06-15 16:19:28 +01:00
|
|
|
errors = {'limit_days': ['{} is not an integer'.format(request.args['limit_days'])]}
|
2016-06-14 15:07:23 +01:00
|
|
|
raise InvalidRequest(errors, status_code=400)
|
2016-05-24 17:21:04 +01:00
|
|
|
else:
|
|
|
|
|
limit_days = None
|
|
|
|
|
|
2016-09-23 16:34:13 +01:00
|
|
|
statuses = [x.strip() for x in request.args.get('statuses', '').split(',')]
|
|
|
|
|
|
2016-09-21 16:54:02 +01:00
|
|
|
page = int(request.args.get('page', 1))
|
2016-09-23 16:34:13 +01:00
|
|
|
return jsonify(**get_paginated_jobs(service_id, limit_days, statuses, page))
|
2016-01-15 15:48:05 +00:00
|
|
|
|
|
|
|
|
|
2017-03-16 18:15:49 +00:00
|
|
|
@job_blueprint.route('', methods=['POST'])
|
2016-01-18 09:57:04 +00:00
|
|
|
def create_job(service_id):
|
2017-01-31 11:11:33 +00:00
|
|
|
service = dao_fetch_service_by_id(service_id)
|
|
|
|
|
if not service.active:
|
2017-01-31 13:53:13 +00:00
|
|
|
raise InvalidRequest("Create job is not allowed: service is inactive ", 403)
|
2016-02-04 20:55:09 +00:00
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
data = request.get_json()
|
2016-08-30 12:47:33 +01:00
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
data.update({
|
|
|
|
|
"service": service_id
|
|
|
|
|
})
|
2016-05-11 17:04:51 +01:00
|
|
|
template = dao_get_template_by_id(data['template'])
|
2016-05-23 15:44:56 +01:00
|
|
|
|
|
|
|
|
errors = unarchived_template_schema.validate({'archived': template.archived})
|
|
|
|
|
|
|
|
|
|
if errors:
|
2016-06-14 15:07:23 +01:00
|
|
|
raise InvalidRequest(errors, status_code=400)
|
2016-05-23 15:44:56 +01:00
|
|
|
|
2016-05-11 17:04:51 +01:00
|
|
|
data.update({"template_version": template.version})
|
2016-08-24 16:00:21 +01:00
|
|
|
|
2016-06-14 15:07:23 +01:00
|
|
|
job = job_schema.load(data).data
|
2016-08-24 16:00:21 +01:00
|
|
|
|
|
|
|
|
if job.scheduled_for:
|
|
|
|
|
job.job_status = JOB_STATUS_SCHEDULED
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_create_job(job)
|
2016-08-24 16:00:21 +01:00
|
|
|
|
|
|
|
|
if job.job_status == JOB_STATUS_PENDING:
|
|
|
|
|
process_job.apply_async([str(job.id)], queue="process-job")
|
|
|
|
|
|
2016-08-31 12:12:09 +01:00
|
|
|
job_json = job_schema.dump(job).data
|
|
|
|
|
job_json['statistics'] = []
|
|
|
|
|
|
|
|
|
|
return jsonify(data=job_json), 201
|
2016-09-21 16:54:02 +01:00
|
|
|
|
|
|
|
|
|
2016-09-23 16:34:13 +01:00
|
|
|
def get_paginated_jobs(service_id, limit_days, statuses, page):
|
2016-09-21 16:54:02 +01:00
|
|
|
pagination = dao_get_jobs_by_service_id(
|
|
|
|
|
service_id,
|
|
|
|
|
limit_days=limit_days,
|
|
|
|
|
page=page,
|
2016-09-23 16:34:13 +01:00
|
|
|
page_size=current_app.config['PAGE_SIZE'],
|
|
|
|
|
statuses=statuses
|
2016-09-21 16:54:02 +01:00
|
|
|
)
|
|
|
|
|
data = job_schema.dump(pagination.items, many=True).data
|
|
|
|
|
for job_data in data:
|
|
|
|
|
statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id'])
|
|
|
|
|
job_data['statistics'] = [{'status': statistic[1], 'count': statistic[0]} for statistic in statistics]
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
'data': data,
|
|
|
|
|
'page_size': pagination.per_page,
|
|
|
|
|
'total': pagination.total,
|
|
|
|
|
'links': pagination_links(
|
|
|
|
|
pagination,
|
|
|
|
|
'.get_jobs_by_service',
|
|
|
|
|
service_id=service_id
|
|
|
|
|
)
|
|
|
|
|
}
|