mirror of
https://github.com/GSA/notifications-api.git
synced 2026-01-30 06:21:50 -05:00
Add uploads blueprint, the endpoint returns a combination of uploaded letters and jobs. The endpoint returns data about the uploaded letter or job, including notification statistics for the upload. The data is ordered by scheduled for and created_at.
It is likely this endppoint will need additional data for the UI to display, for the first iteration this will enable the /uploads page to show both letters and jobs. Only letter uploaded by the UI are included in the resultset. Add file name to resultset.
This commit is contained in:
committed by
Rebecca Law
parent
570eb5e340
commit
203e19bef3
@@ -124,6 +124,7 @@ def register_blueprint(application):
|
||||
from app.platform_stats.rest import platform_stats_blueprint
|
||||
from app.template_folder.rest import template_folder_blueprint
|
||||
from app.letter_branding.letter_branding_rest import letter_branding_blueprint
|
||||
from app.upload.rest import upload_blueprint
|
||||
|
||||
service_blueprint.before_request(requires_admin_auth)
|
||||
application.register_blueprint(service_blueprint, url_prefix='/service')
|
||||
@@ -206,6 +207,9 @@ def register_blueprint(application):
|
||||
letter_branding_blueprint.before_request(requires_admin_auth)
|
||||
application.register_blueprint(letter_branding_blueprint)
|
||||
|
||||
upload_blueprint.before_request(requires_admin_auth)
|
||||
application.register_blueprint(upload_blueprint)
|
||||
|
||||
|
||||
def register_v2_blueprints(application):
|
||||
from app.v2.inbound_sms.get_inbound_sms import v2_inbound_sms_blueprint as get_inbound_sms
|
||||
|
||||
66
app/dao/uploads_dao.py
Normal file
66
app/dao/uploads_dao.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from flask import current_app
|
||||
from sqlalchemy import desc, literal
|
||||
|
||||
from app import db
|
||||
from app.models import (
|
||||
Job, Notification, Template, LETTER_TYPE, JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED,
|
||||
NOTIFICATION_CANCELLED
|
||||
)
|
||||
from app.utils import midnight_n_days_ago
|
||||
|
||||
|
||||
def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50):
|
||||
jobs_query_filter = [
|
||||
Job.service_id == service_id,
|
||||
Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],
|
||||
Job.original_file_name != current_app.config['ONE_OFF_MESSAGE_FILENAME'],
|
||||
Job.job_status.notin_([JOB_STATUS_CANCELLED, JOB_STATUS_SCHEDULED])
|
||||
]
|
||||
if limit_days is not None:
|
||||
jobs_query_filter.append(Job.created_at >= midnight_n_days_ago(limit_days))
|
||||
|
||||
jobs_query = db.session.query(
|
||||
Job.id,
|
||||
Job.original_file_name,
|
||||
Job.notification_count,
|
||||
Job.created_at.label("created_at"),
|
||||
Job.scheduled_for.label("scheduled_for"),
|
||||
Job.processing_started.label('processing_started'),
|
||||
Job.job_status.label("status"),
|
||||
literal('job').label('upload_type')
|
||||
).filter(
|
||||
*jobs_query_filter
|
||||
)
|
||||
|
||||
letters_query_filter = [
|
||||
Notification.service_id == service_id,
|
||||
Notification.notification_type == LETTER_TYPE,
|
||||
Notification.api_key_id == None, # noqa
|
||||
Notification.status != NOTIFICATION_CANCELLED,
|
||||
Template.hidden == True,
|
||||
|
||||
]
|
||||
if limit_days is not None:
|
||||
letters_query_filter.append(Notification.created_at >= midnight_n_days_ago(limit_days))
|
||||
|
||||
letters_query = db.session.query(
|
||||
Notification.id,
|
||||
Notification.client_reference.label('original_file_name'),
|
||||
literal('1').label('notification_count'),
|
||||
Notification.created_at.label("created_at"),
|
||||
literal(None).label('scheduled_for'),
|
||||
# letters don't have a processing_started date but we want created_at to be used for sorting
|
||||
Notification.created_at.label('processing_started'),
|
||||
Notification.status,
|
||||
literal('letter').label('upload_type')
|
||||
).join(
|
||||
Template, Notification.template_id == Template.id
|
||||
).filter(
|
||||
*letters_query_filter
|
||||
)
|
||||
|
||||
return jobs_query.union_all(
|
||||
letters_query
|
||||
).order_by(
|
||||
desc("processing_started"), desc("created_at")
|
||||
).paginate(page=page, per_page=page_size)
|
||||
0
app/upload/__init__.py
Normal file
0
app/upload/__init__.py
Normal file
83
app/upload/rest.py
Normal file
83
app/upload/rest.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from flask import (
|
||||
Blueprint,
|
||||
jsonify,
|
||||
request,
|
||||
current_app
|
||||
)
|
||||
|
||||
from app.dao.fact_notification_status_dao import fetch_notification_statuses_for_job
|
||||
from app.dao.jobs_dao import (
|
||||
dao_get_notification_outcomes_for_job
|
||||
)
|
||||
from app.dao.uploads_dao import dao_get_uploads_by_service_id
|
||||
from app.errors import (
|
||||
register_errors,
|
||||
InvalidRequest
|
||||
)
|
||||
from app.utils import pagination_links, midnight_n_days_ago
|
||||
|
||||
upload_blueprint = Blueprint('upload', __name__, url_prefix='/service/<uuid:service_id>/upload')
|
||||
|
||||
register_errors(upload_blueprint)
|
||||
|
||||
|
||||
@upload_blueprint.route('', methods=['GET'])
|
||||
def get_uploads_by_service(service_id):
|
||||
if request.args.get('limit_days'):
|
||||
try:
|
||||
limit_days = int(request.args['limit_days'])
|
||||
except ValueError:
|
||||
errors = {'limit_days': ['{} is not an integer'.format(request.args['limit_days'])]}
|
||||
raise InvalidRequest(errors, status_code=400)
|
||||
else:
|
||||
limit_days = None
|
||||
|
||||
page = int(request.args.get('page', 1))
|
||||
return jsonify(**get_paginated_uploads(service_id, limit_days, page))
|
||||
|
||||
|
||||
def get_paginated_uploads(service_id, limit_days, page):
|
||||
pagination = dao_get_uploads_by_service_id(
|
||||
service_id,
|
||||
limit_days=limit_days,
|
||||
page=page,
|
||||
page_size=current_app.config['PAGE_SIZE']
|
||||
)
|
||||
uploads = pagination.items
|
||||
data = []
|
||||
for upload in uploads:
|
||||
upload_dict = {
|
||||
'id': upload.id,
|
||||
'original_file_name': upload.original_file_name,
|
||||
'notification_count': upload.notification_count,
|
||||
'created_at': upload.scheduled_for.strftime(
|
||||
"%Y-%m-%d %H:%M:%S") if upload.scheduled_for else upload.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
'upload_type': upload.upload_type
|
||||
}
|
||||
if upload.upload_type == 'job':
|
||||
start = upload.processing_started
|
||||
|
||||
if start is None:
|
||||
statistics = []
|
||||
elif start.replace(tzinfo=None) < midnight_n_days_ago(3):
|
||||
# ft_notification_status table
|
||||
statistics = fetch_notification_statuses_for_job(upload.id)
|
||||
else:
|
||||
# notifications table
|
||||
statistics = dao_get_notification_outcomes_for_job(service_id, upload.id)
|
||||
upload_dict['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in
|
||||
statistics]
|
||||
else:
|
||||
upload_dict['statistics'] = [{'status': upload.status, 'count': 1}]
|
||||
data.append(upload_dict)
|
||||
|
||||
return {
|
||||
'data': data,
|
||||
'page_size': pagination.per_page,
|
||||
'total': pagination.total,
|
||||
'links': pagination_links(
|
||||
pagination,
|
||||
'.get_uploads_by_service',
|
||||
service_id=service_id
|
||||
)
|
||||
}
|
||||
Reference in New Issue
Block a user