2023-03-02 20:20:31 -05:00
|
|
|
from flask import Blueprint, current_app, jsonify, request
|
2021-03-10 13:55:06 +00:00
|
|
|
|
|
|
|
|
from app.dao.fact_notification_status_dao import (
|
|
|
|
|
fetch_notification_statuses_for_job,
|
2019-12-03 15:53:32 +00:00
|
|
|
)
|
2021-03-10 13:55:06 +00:00
|
|
|
from app.dao.jobs_dao import dao_get_notification_outcomes_for_job
|
2023-03-02 20:20:31 -05:00
|
|
|
from app.dao.uploads_dao import dao_get_uploads_by_service_id
|
2021-03-10 13:55:06 +00:00
|
|
|
from app.errors import register_errors
|
|
|
|
|
from app.utils import midnight_n_days_ago, pagination_links
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
upload_blueprint = Blueprint('upload', __name__, url_prefix='/service/<uuid:service_id>/upload')
|
|
|
|
|
|
|
|
|
|
register_errors(upload_blueprint)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@upload_blueprint.route('', methods=['GET'])
|
|
|
|
|
def get_uploads_by_service(service_id):
|
2019-12-06 13:10:38 +00:00
|
|
|
return jsonify(**get_paginated_uploads(service_id,
|
|
|
|
|
request.args.get('limit_days', type=int),
|
|
|
|
|
request.args.get('page', type=int)))
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_paginated_uploads(service_id, limit_days, page):
|
|
|
|
|
pagination = dao_get_uploads_by_service_id(
|
|
|
|
|
service_id,
|
|
|
|
|
limit_days=limit_days,
|
|
|
|
|
page=page,
|
|
|
|
|
page_size=current_app.config['PAGE_SIZE']
|
|
|
|
|
)
|
|
|
|
|
uploads = pagination.items
|
|
|
|
|
data = []
|
|
|
|
|
for upload in uploads:
|
|
|
|
|
upload_dict = {
|
|
|
|
|
'id': upload.id,
|
|
|
|
|
'original_file_name': upload.original_file_name,
|
|
|
|
|
'notification_count': upload.notification_count,
|
|
|
|
|
'created_at': upload.scheduled_for.strftime(
|
|
|
|
|
"%Y-%m-%d %H:%M:%S") if upload.scheduled_for else upload.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
2020-02-25 17:13:01 +00:00
|
|
|
'upload_type': upload.upload_type,
|
2020-02-28 09:58:13 +00:00
|
|
|
'template_type': upload.template_type,
|
2020-02-27 13:19:51 +00:00
|
|
|
'recipient': upload.recipient,
|
2019-12-03 15:53:32 +00:00
|
|
|
}
|
|
|
|
|
if upload.upload_type == 'job':
|
|
|
|
|
start = upload.processing_started
|
|
|
|
|
|
|
|
|
|
if start is None:
|
|
|
|
|
statistics = []
|
|
|
|
|
elif start.replace(tzinfo=None) < midnight_n_days_ago(3):
|
|
|
|
|
# ft_notification_status table
|
|
|
|
|
statistics = fetch_notification_statuses_for_job(upload.id)
|
|
|
|
|
else:
|
|
|
|
|
# notifications table
|
|
|
|
|
statistics = dao_get_notification_outcomes_for_job(service_id, upload.id)
|
|
|
|
|
upload_dict['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in
|
|
|
|
|
statistics]
|
|
|
|
|
else:
|
2020-05-11 10:51:33 +01:00
|
|
|
upload_dict['statistics'] = []
|
2019-12-03 15:53:32 +00:00
|
|
|
data.append(upload_dict)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
'data': data,
|
|
|
|
|
'page_size': pagination.per_page,
|
|
|
|
|
'total': pagination.total,
|
|
|
|
|
'links': pagination_links(
|
|
|
|
|
pagination,
|
|
|
|
|
'.get_uploads_by_service',
|
|
|
|
|
service_id=service_id
|
|
|
|
|
)
|
|
|
|
|
}
|