notify-api-412 use black to enforce python style standards

This commit is contained in:
Kenneth Kehl
2023-08-23 10:35:43 -07:00
parent a7898118d7
commit 026dc14021
586 changed files with 33990 additions and 23461 deletions

View File

@@ -8,16 +8,22 @@ from app.dao.uploads_dao import dao_get_uploads_by_service_id
from app.errors import register_errors
from app.utils import midnight_n_days_ago, pagination_links
upload_blueprint = Blueprint('upload', __name__, url_prefix='/service/<uuid:service_id>/upload')
upload_blueprint = Blueprint(
"upload", __name__, url_prefix="/service/<uuid:service_id>/upload"
)
register_errors(upload_blueprint)
@upload_blueprint.route('', methods=['GET'])
@upload_blueprint.route("", methods=["GET"])
def get_uploads_by_service(service_id):
return jsonify(**get_paginated_uploads(service_id,
request.args.get('limit_days', type=int),
request.args.get('page', type=int)))
return jsonify(
**get_paginated_uploads(
service_id,
request.args.get("limit_days", type=int),
request.args.get("page", type=int),
)
)
def get_paginated_uploads(service_id, limit_days, page):
@@ -25,22 +31,23 @@ def get_paginated_uploads(service_id, limit_days, page):
service_id,
limit_days=limit_days,
page=page,
page_size=current_app.config['PAGE_SIZE']
page_size=current_app.config["PAGE_SIZE"],
)
uploads = pagination.items
data = []
for upload in uploads:
upload_dict = {
'id': upload.id,
'original_file_name': upload.original_file_name,
'notification_count': upload.notification_count,
'created_at': upload.scheduled_for.strftime(
"%Y-%m-%d %H:%M:%S") if upload.scheduled_for else upload.created_at.strftime("%Y-%m-%d %H:%M:%S"),
'upload_type': upload.upload_type,
'template_type': upload.template_type,
'recipient': upload.recipient,
"id": upload.id,
"original_file_name": upload.original_file_name,
"notification_count": upload.notification_count,
"created_at": upload.scheduled_for.strftime("%Y-%m-%d %H:%M:%S")
if upload.scheduled_for
else upload.created_at.strftime("%Y-%m-%d %H:%M:%S"),
"upload_type": upload.upload_type,
"template_type": upload.template_type,
"recipient": upload.recipient,
}
if upload.upload_type == 'job':
if upload.upload_type == "job":
start = upload.processing_started
if start is None:
@@ -50,20 +57,22 @@ def get_paginated_uploads(service_id, limit_days, page):
statistics = fetch_notification_statuses_for_job(upload.id)
else:
# notifications table
statistics = dao_get_notification_outcomes_for_job(service_id, upload.id)
upload_dict['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in
statistics]
statistics = dao_get_notification_outcomes_for_job(
service_id, upload.id
)
upload_dict["statistics"] = [
{"status": statistic.status, "count": statistic.count}
for statistic in statistics
]
else:
upload_dict['statistics'] = []
upload_dict["statistics"] = []
data.append(upload_dict)
return {
'data': data,
'page_size': pagination.per_page,
'total': pagination.total,
'links': pagination_links(
pagination,
'.get_uploads_by_service',
service_id=service_id
)
"data": data,
"page_size": pagination.per_page,
"total": pagination.total,
"links": pagination_links(
pagination, ".get_uploads_by_service", service_id=service_id
),
}