Add type=int to request.args.get, if the arg is an int it's returned else None. This means we ignore the arg if its the wrong data type and we don't need to handle the error.

This commit is contained in:
Rebecca Law
2019-12-06 13:10:38 +00:00
parent 203e19bef3
commit 921b90cdec
4 changed files with 28 additions and 37 deletions

View File

@@ -10,6 +10,8 @@ from app.utils import midnight_n_days_ago
def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50): def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50):
# Hardcoded filter to exclude cancelled or scheduled jobs
# for the moment, but we may want to change this method take 'statuses' as a argument in the future
jobs_query_filter = [ jobs_query_filter = [
Job.service_id == service_id, Job.service_id == service_id,
Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'], Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],

View File

@@ -12,7 +12,6 @@ from app.dao.jobs_dao import (
from app.dao.uploads_dao import dao_get_uploads_by_service_id from app.dao.uploads_dao import dao_get_uploads_by_service_id
from app.errors import ( from app.errors import (
register_errors, register_errors,
InvalidRequest
) )
from app.utils import pagination_links, midnight_n_days_ago from app.utils import pagination_links, midnight_n_days_ago
@@ -23,17 +22,9 @@ register_errors(upload_blueprint)
@upload_blueprint.route('', methods=['GET']) @upload_blueprint.route('', methods=['GET'])
def get_uploads_by_service(service_id): def get_uploads_by_service(service_id):
if request.args.get('limit_days'): return jsonify(**get_paginated_uploads(service_id,
try: request.args.get('limit_days', type=int),
limit_days = int(request.args['limit_days']) request.args.get('page', type=int)))
except ValueError:
errors = {'limit_days': ['{} is not an integer'.format(request.args['limit_days'])]}
raise InvalidRequest(errors, status_code=400)
else:
limit_days = None
page = int(request.args.get('page', 1))
return jsonify(**get_paginated_uploads(service_id, limit_days, page))
def get_paginated_uploads(service_id, limit_days, page): def get_paginated_uploads(service_id, limit_days, page):

View File

@@ -18,7 +18,7 @@ def create_uploaded_letter(letter_template, service, status='created', created_a
) )
def create_precompiled_template(service): def create_uploaded_template(service):
return create_template( return create_template(
service, service,
template_type=LETTER_TYPE, template_type=LETTER_TYPE,
@@ -32,13 +32,13 @@ def create_precompiled_template(service):
def test_get_uploads_for_service(sample_template): def test_get_uploads_for_service(sample_template):
job = create_job(sample_template, processing_started=datetime.utcnow()) job = create_job(sample_template, processing_started=datetime.utcnow())
letter_template = create_precompiled_template(sample_template.service) letter_template = create_uploaded_template(sample_template.service)
letter = create_uploaded_letter(letter_template, sample_template.service) letter = create_uploaded_letter(letter_template, sample_template.service)
other_service = create_service(service_name="other service") other_service = create_service(service_name="other service")
other_template = create_template(service=other_service) other_template = create_template(service=other_service)
other_job = create_job(other_template, processing_started=datetime.utcnow()) other_job = create_job(other_template, processing_started=datetime.utcnow())
other_letter_template = create_precompiled_template(other_service) other_letter_template = create_uploaded_template(other_service)
other_letter = create_uploaded_letter(other_letter_template, other_service) other_letter = create_uploaded_letter(other_letter_template, other_service)
uploads_from_db = dao_get_uploads_by_service_id(job.service_id).items uploads_from_db = dao_get_uploads_by_service_id(job.service_id).items
@@ -75,14 +75,14 @@ def test_get_uploads_for_service(sample_template):
def test_get_uploads_does_not_return_cancelled_jobs_or_letters(sample_template): def test_get_uploads_does_not_return_cancelled_jobs_or_letters(sample_template):
create_job(sample_template, job_status='scheduled') create_job(sample_template, job_status='scheduled')
create_job(sample_template, job_status='cancelled') create_job(sample_template, job_status='cancelled')
letter_template = create_precompiled_template(sample_template.service) letter_template = create_uploaded_template(sample_template.service)
create_uploaded_letter(letter_template, sample_template.service, status='cancelled') create_uploaded_letter(letter_template, sample_template.service, status='cancelled')
assert len(dao_get_uploads_by_service_id(sample_template.service_id).items) == 0 assert len(dao_get_uploads_by_service_id(sample_template.service_id).items) == 0
def test_get_uploads_orders_by_created_at_desc(sample_template): def test_get_uploads_orders_by_created_at_desc(sample_template):
letter_template = create_precompiled_template(sample_template.service) letter_template = create_uploaded_template(sample_template.service)
upload_1 = create_job(sample_template, processing_started=datetime.utcnow(), upload_1 = create_job(sample_template, processing_started=datetime.utcnow(),
job_status=JOB_STATUS_IN_PROGRESS) job_status=JOB_STATUS_IN_PROGRESS)
@@ -115,30 +115,30 @@ def test_get_uploads_orders_by_processing_started_desc(sample_template):
def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_template): def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_template):
letter_template = create_precompiled_template(sample_template.service) letter_template = create_uploaded_template(sample_template.service)
days_ago = datetime.utcnow() - timedelta(days=4) days_ago = datetime.utcnow() - timedelta(days=4)
upload_0 = create_uploaded_letter(letter_template, service=letter_template.service) upload_1 = create_uploaded_letter(letter_template, service=letter_template.service)
upload_1 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1), upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1),
created_at=days_ago, created_at=days_ago,
job_status=JOB_STATUS_IN_PROGRESS) job_status=JOB_STATUS_IN_PROGRESS)
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2), upload_3 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2),
created_at=days_ago, created_at=days_ago,
job_status=JOB_STATUS_IN_PROGRESS) job_status=JOB_STATUS_IN_PROGRESS)
upload_3 = create_uploaded_letter(letter_template, service=letter_template.service, upload_4 = create_uploaded_letter(letter_template, service=letter_template.service,
created_at=datetime.utcnow() - timedelta(days=3)) created_at=datetime.utcnow() - timedelta(days=3))
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
assert len(results) == 4 assert len(results) == 4
assert results[0].id == upload_0.id assert results[0].id == upload_1.id
assert results[1].id == upload_1.id assert results[1].id == upload_2.id
assert results[2].id == upload_2.id assert results[2].id == upload_3.id
assert results[3].id == upload_3.id assert results[3].id == upload_4.id
def test_get_uploads_is_paginated(sample_template): def test_get_uploads_is_paginated(sample_template):
letter_template = create_precompiled_template(sample_template.service) letter_template = create_uploaded_template(sample_template.service)
upload_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered', upload_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=3)) created_at=datetime.utcnow() - timedelta(minutes=3))

View File

@@ -92,33 +92,31 @@ def test_get_uploads_should_return_statistics(admin_request, sample_template):
earlier = datetime.utcnow() - timedelta(days=1) earlier = datetime.utcnow() - timedelta(days=1)
job_1 = create_job(template=sample_template, job_status='pending') job_1 = create_job(template=sample_template, job_status='pending')
job_2 = create_job(sample_template, processing_started=earlier) job_2 = create_job(sample_template, processing_started=earlier)
for _ in range(0, 3): for _ in range(3):
create_notification(template=sample_template, job=job_2, status='created') create_notification(template=sample_template, job=job_2, status='created')
job_3 = create_job(sample_template, processing_started=now) job_3 = create_job(sample_template, processing_started=now)
for _ in range(0, 4): for _ in range(4):
create_notification(template=sample_template, job=job_3, status='sending') create_notification(template=sample_template, job=job_3, status='sending')
letter_template = create_precompiled_template(sample_template.service) letter_template = create_precompiled_template(sample_template.service)
letter_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered', letter_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(days=3)) created_at=datetime.utcnow() - timedelta(days=3))
letter_2 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(days=2))
resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id)['data'] resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id)['data']
assert len(resp_json) == 5 assert len(resp_json) == 4
assert resp_json[0]['id'] == str(job_1.id) assert resp_json[0]['id'] == str(job_1.id)
assert resp_json[0]['statistics'] == [] assert resp_json[0]['statistics'] == []
assert resp_json[1]['id'] == str(job_3.id) assert resp_json[1]['id'] == str(job_3.id)
assert resp_json[1]['statistics'] == [{'status': 'sending', 'count': 4}] assert resp_json[1]['statistics'] == [{'status': 'sending', 'count': 4}]
assert resp_json[2]['id'] == str(job_2.id) assert resp_json[2]['id'] == str(job_2.id)
assert resp_json[2]['statistics'] == [{'status': 'created', 'count': 3}] assert resp_json[2]['statistics'] == [{'status': 'created', 'count': 3}]
assert resp_json[3]['id'] == str(letter_2.id) assert resp_json[3]['id'] == str(letter_1.id)
assert resp_json[3]['statistics'] == [{'status': 'delivered', 'count': 1}] assert resp_json[3]['statistics'] == [{'status': 'delivered', 'count': 1}]
assert resp_json[4]['id'] == str(letter_1.id)
assert resp_json[4]['statistics'] == [{'status': 'delivered', 'count': 1}]
def test_get_uploads_should_paginate(admin_request, sample_template): def test_get_uploads_should_paginate(admin_request, sample_template):
for _ in range(0, 10): for _ in range(10):
create_job(sample_template) create_job(sample_template)
with set_config(admin_request.app, 'PAGE_SIZE', 2): with set_config(admin_request.app, 'PAGE_SIZE', 2):
@@ -132,7 +130,7 @@ def test_get_uploads_should_paginate(admin_request, sample_template):
def test_get_uploads_accepts_page_parameter(admin_request, sample_template): def test_get_uploads_accepts_page_parameter(admin_request, sample_template):
for _ in range(0, 10): for _ in range(10):
create_job(sample_template) create_job(sample_template)
with set_config(admin_request.app, 'PAGE_SIZE', 2): with set_config(admin_request.app, 'PAGE_SIZE', 2):