Merge pull request #2839 from alphagov/group-letter-uploads

Group letter uploads by printing day
This commit is contained in:
Chris Hill-Scott
2020-05-19 11:05:14 +01:00
committed by GitHub
4 changed files with 367 additions and 101 deletions

View File

@@ -1,6 +1,6 @@
from datetime import datetime
from flask import current_app
from sqlalchemy import and_, desc, func, literal, String
from sqlalchemy import and_, desc, func, literal, text, String
from app import db
from app.models import (
@@ -10,6 +10,28 @@ from app.models import (
from app.utils import midnight_n_days_ago
def _get_printing_day(created_at):
return func.date_trunc(
'day',
func.timezone('Europe/London', func.timezone('UTC', created_at)) + text(
# We add 6 hours 30 minutes to the local created_at time so that
# any letters created after 5:30pm get shifted into the next day
"interval '6 hours 30 minutes'"
)
)
def _get_printing_datetime(created_at):
return _get_printing_day(created_at) + text(
# Letters are printed from 5:30pm each day
"interval '17 hours 30 minutes'"
)
def _naive_gmt_to_utc(column):
return func.timezone('UTC', func.timezone('Europe/London', column))
def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50):
# Hardcoded filter to exclude cancelled or scheduled jobs
# for the moment, but we may want to change this method take 'statuses' as a argument in the future
@@ -56,24 +78,13 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
Notification.status != NOTIFICATION_CANCELLED,
Template.hidden == True,
Notification.created_at >= today - func.coalesce(ServiceDataRetention.days_of_retention, 7)
]
if limit_days is not None:
letters_query_filter.append(Notification.created_at >= midnight_n_days_ago(limit_days))
letters_query = db.session.query(
Notification.id,
Notification.client_reference.label('original_file_name'),
literal('1').label('notification_count'),
literal(None).label('template_type'),
func.coalesce(ServiceDataRetention.days_of_retention, 7).label('days_of_retention'),
Notification.created_at.label("created_at"),
literal(None).label('scheduled_for'),
# letters don't have a processing_started date but we want created_at to be used for sorting
Notification.created_at.label('processing_started'),
Notification.status,
literal('letter').label('upload_type'),
Notification.to.label('recipient'),
letters_subquery = db.session.query(
func.count().label('notification_count'),
_naive_gmt_to_utc(_get_printing_datetime(Notification.created_at)).label('printing_at'),
).join(
Template, Notification.template_id == Template.id
).outerjoin(
@@ -83,6 +94,25 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
)
).filter(
*letters_query_filter
).group_by(
'printing_at'
).subquery()
letters_query = db.session.query(
literal(None).label('id'),
literal('Uploaded letters').label('original_file_name'),
letters_subquery.c.notification_count.label('notification_count'),
literal('letter').label('template_type'),
literal(None).label('days_of_retention'),
letters_subquery.c.printing_at.label('created_at'),
literal(None).label('scheduled_for'),
letters_subquery.c.printing_at.label('processing_started'),
literal(None).label('status'),
literal('letter_day').label('upload_type'),
literal(None).label('recipient'),
).group_by(
letters_subquery.c.notification_count,
letters_subquery.c.printing_at,
)
return jobs_query.union_all(
@@ -90,3 +120,23 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size
).order_by(
desc("processing_started"), desc("created_at")
).paginate(page=page, per_page=page_size)
def dao_get_uploaded_letters_by_print_date(service_id, letter_print_date, page=1, page_size=50):
return db.session.query(
Notification,
).join(
Template, Notification.template_id == Template.id
).filter(
Notification.service_id == service_id,
Notification.notification_type == LETTER_TYPE,
Notification.api_key_id.is_(None),
Notification.status != NOTIFICATION_CANCELLED,
Template.hidden.is_(True),
_get_printing_day(Notification.created_at) == letter_print_date.date(),
).order_by(
desc(Notification.created_at)
).paginate(
page=page,
per_page=page_size,
)

View File

@@ -1,5 +1,7 @@
from datetime import datetime
from flask import (
Blueprint,
abort,
jsonify,
request,
current_app
@@ -9,10 +11,14 @@ from app.dao.fact_notification_status_dao import fetch_notification_statuses_for
from app.dao.jobs_dao import (
dao_get_notification_outcomes_for_job
)
from app.dao.uploads_dao import dao_get_uploads_by_service_id
from app.dao.uploads_dao import (
dao_get_uploaded_letters_by_print_date,
dao_get_uploads_by_service_id,
)
from app.errors import (
register_errors,
)
from app.schemas import notification_with_template_schema
from app.utils import pagination_links, midnight_n_days_ago
upload_blueprint = Blueprint('upload', __name__, url_prefix='/service/<uuid:service_id>/upload')
@@ -61,7 +67,7 @@ def get_paginated_uploads(service_id, limit_days, page):
upload_dict['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in
statistics]
else:
upload_dict['statistics'] = [{'status': upload.status, 'count': 1}]
upload_dict['statistics'] = []
data.append(upload_dict)
return {
@@ -74,3 +80,31 @@ def get_paginated_uploads(service_id, limit_days, page):
service_id=service_id
)
}
@upload_blueprint.route('/uploaded-letters/<letter_print_date>', methods=['GET'])
def get_uploaded_letter_by_service_and_print_day(service_id, letter_print_date):
try:
letter_print_datetime = datetime.strptime(letter_print_date, '%Y-%m-%d')
except ValueError:
abort(400)
pagination = dao_get_uploaded_letters_by_print_date(
service_id,
letter_print_date=letter_print_datetime,
page=request.args.get('page', type=int),
page_size=current_app.config['PAGE_SIZE']
)
return jsonify({
'notifications': notification_with_template_schema.dump(
pagination.items,
many=True,
).data,
'page_size': pagination.per_page,
'total': pagination.total,
'links': pagination_links(
pagination,
'.get_uploaded_letter_by_service_and_print_day',
service_id=service_id,
letter_print_date=letter_print_date,
),
})

View File

@@ -1,6 +1,7 @@
from datetime import datetime, timedelta
from freezegun import freeze_time
from app.dao.uploads_dao import dao_get_uploads_by_service_id
from app.dao.uploads_dao import dao_get_uploads_by_service_id, dao_get_uploaded_letters_by_print_date
from app.models import LETTER_TYPE, JOB_STATUS_IN_PROGRESS
from tests.app.db import create_job, create_service, create_service_data_retention, create_template, create_notification
@@ -30,6 +31,7 @@ def create_uploaded_template(service):
)
@freeze_time("2020-02-02 14:00") # GMT time
def test_get_uploads_for_service(sample_template):
create_service_data_retention(sample_template.service, 'sms', days_of_retention=9)
job = create_job(sample_template, processing_started=datetime.utcnow())
@@ -40,7 +42,7 @@ def test_get_uploads_for_service(sample_template):
other_template = create_template(service=other_service)
other_job = create_job(other_template, processing_started=datetime.utcnow())
other_letter_template = create_uploaded_template(other_service)
other_letter = create_uploaded_letter(other_letter_template, other_service)
create_uploaded_letter(other_letter_template, other_service)
uploads_from_db = dao_get_uploads_by_service_id(job.service_id).items
other_uploads_from_db = dao_get_uploads_by_service_id(other_job.service_id).items
@@ -48,17 +50,17 @@ def test_get_uploads_for_service(sample_template):
assert len(uploads_from_db) == 2
assert uploads_from_db[0] == (
letter.id,
letter.client_reference,
None,
'Uploaded letters',
1,
'letter',
None,
7,
letter.created_at,
letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
None,
letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
None,
'letter_day',
None,
letter.created_at,
letter.status,
"letter",
"file-name",
)
assert uploads_from_db[1] == (
job.id,
@@ -75,17 +77,19 @@ def test_get_uploads_for_service(sample_template):
)
assert len(other_uploads_from_db) == 2
assert other_uploads_from_db[0] == (other_letter.id,
other_letter.client_reference,
1,
None,
7,
other_letter.created_at,
None,
other_letter.created_at,
other_letter.status,
"letter",
"file-name")
assert other_uploads_from_db[0] == (
None,
'Uploaded letters',
1,
'letter',
None,
letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
None,
letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
None,
"letter_day",
None,
)
assert other_uploads_from_db[1] == (other_job.id,
other_job.original_file_name,
other_job.notification_count,
@@ -98,10 +102,48 @@ def test_get_uploads_for_service(sample_template):
"job",
None)
assert uploads_from_db[0] != other_uploads_from_db[0]
assert uploads_from_db[1] != other_uploads_from_db[1]
@freeze_time("2020-02-02 18:00")
def test_get_uploads_for_service_groups_letters(sample_template):
letter_template = create_uploaded_template(sample_template.service)
# Just gets into yesterdays print run
create_uploaded_letter(letter_template, sample_template.service, created_at=(
datetime(2020, 2, 1, 17, 29, 59)
))
# Yesterday but in todays print run
create_uploaded_letter(letter_template, sample_template.service, created_at=(
datetime(2020, 2, 1, 17, 30)
))
# First thing today
create_uploaded_letter(letter_template, sample_template.service, created_at=(
datetime(2020, 2, 2, 0, 0)
))
# Just before todays print deadline
create_uploaded_letter(letter_template, sample_template.service, created_at=(
datetime(2020, 2, 2, 17, 29, 59)
))
# Just missed todays print deadline
create_uploaded_letter(letter_template, sample_template.service, created_at=(
datetime(2020, 2, 2, 17, 30)
))
uploads_from_db = dao_get_uploads_by_service_id(sample_template.service_id).items
assert [
(upload.notification_count, upload.created_at)
for upload in uploads_from_db
] == [
(1, datetime(2020, 2, 3, 17, 30)),
(3, datetime(2020, 2, 2, 17, 30)),
(1, datetime(2020, 2, 1, 17, 30)),
]
def test_get_uploads_does_not_return_cancelled_jobs_or_letters(sample_template):
create_job(sample_template, job_status='scheduled')
create_job(sample_template, job_status='cancelled')
@@ -118,14 +160,17 @@ def test_get_uploads_orders_by_created_at_desc(sample_template):
job_status=JOB_STATUS_IN_PROGRESS)
upload_2 = create_job(sample_template, processing_started=datetime.utcnow(),
job_status=JOB_STATUS_IN_PROGRESS)
upload_3 = create_uploaded_letter(letter_template, sample_template.service, status='delivered')
create_uploaded_letter(letter_template, sample_template.service, status='delivered')
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
assert len(results) == 3
assert results[0].id == upload_3.id
assert results[1].id == upload_2.id
assert results[2].id == upload_1.id
assert [
(result.id, result.upload_type) for result in results
] == [
(None, 'letter_day'),
(upload_2.id, 'job'),
(upload_1.id, 'job'),
]
def test_get_uploads_orders_by_processing_started_desc(sample_template):
@@ -148,25 +193,26 @@ def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_tem
letter_template = create_uploaded_template(sample_template.service)
days_ago = datetime.utcnow() - timedelta(days=4)
upload_1 = create_uploaded_letter(letter_template, service=letter_template.service)
create_uploaded_letter(letter_template, service=letter_template.service)
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1),
created_at=days_ago,
job_status=JOB_STATUS_IN_PROGRESS)
upload_3 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2),
created_at=days_ago,
job_status=JOB_STATUS_IN_PROGRESS)
upload_4 = create_uploaded_letter(letter_template, service=letter_template.service,
created_at=datetime.utcnow() - timedelta(days=3))
create_uploaded_letter(letter_template, service=letter_template.service,
created_at=datetime.utcnow() - timedelta(days=3))
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
assert len(results) == 4
assert results[0].id == upload_1.id
assert results[0].id is None
assert results[1].id == upload_2.id
assert results[2].id == upload_3.id
assert results[3].id == upload_4.id
assert results[3].id is None
@freeze_time('2020-04-02 14:00') # Few days after the clocks go forward
def test_get_uploads_only_gets_uploads_within_service_retention_period(sample_template):
letter_template = create_uploaded_template(sample_template.service)
create_service_data_retention(sample_template.service, 'sms', days_of_retention=3)
@@ -199,38 +245,113 @@ def test_get_uploads_only_gets_uploads_within_service_retention_period(sample_te
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
assert len(results) == 4
assert results[0].id == upload_1.id
assert results[1].id == upload_2.id
assert results[2].id == upload_3.id
assert results[3].id == upload_4.id
# Uploaded letters get their `created_at` shifted time of printing
# 17:30 BST == 16:30 UTC
assert results[0].created_at == upload_1.created_at.replace(hour=16, minute=30, second=0, microsecond=0)
# Jobs keep their original `created_at`
assert results[1].created_at == upload_2.created_at.replace(hour=14, minute=00, second=0, microsecond=0)
# Still in BST here…
assert results[2].created_at == upload_3.created_at.replace(hour=16, minute=30, second=0, microsecond=0)
# Now weve gone far enough back to be in GMT
# 17:30 GMT == 17:30 UTC
assert results[3].created_at == upload_4.created_at.replace(hour=17, minute=30, second=0, microsecond=0)
@freeze_time('2020-02-02 14:00')
def test_get_uploads_is_paginated(sample_template):
letter_template = create_uploaded_template(sample_template.service)
upload_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=3))
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(minutes=2),
job_status=JOB_STATUS_IN_PROGRESS)
upload_3 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=1))
upload_4 = create_job(sample_template, processing_started=datetime.utcnow(), job_status=JOB_STATUS_IN_PROGRESS)
create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=3),
)
create_job(
sample_template, processing_started=datetime.utcnow() - timedelta(minutes=2),
job_status=JOB_STATUS_IN_PROGRESS,
)
create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=1),
)
create_job(
sample_template, processing_started=datetime.utcnow(),
job_status=JOB_STATUS_IN_PROGRESS,
)
results = dao_get_uploads_by_service_id(sample_template.service_id, page=1, page_size=2)
results = dao_get_uploads_by_service_id(sample_template.service_id, page=1, page_size=1)
assert results.per_page == 2
assert results.total == 4
assert len(results.items) == 2
assert results.items[0].id == upload_4.id
assert results.items[1].id == upload_3.id
assert results.per_page == 1
assert results.total == 3
assert len(results.items) == 1
assert results.items[0].created_at == datetime.utcnow().replace(hour=17, minute=30, second=0, microsecond=0)
assert results.items[0].notification_count == 2
assert results.items[0].upload_type == 'letter_day'
results = dao_get_uploads_by_service_id(sample_template.service_id, page=2, page_size=2)
results = dao_get_uploads_by_service_id(sample_template.service_id, page=2, page_size=1)
assert len(results.items) == 2
assert results.items[0].id == upload_2.id
assert results.items[1].id == upload_1.id
assert len(results.items) == 1
assert results.items[0].created_at == datetime.utcnow().replace(hour=14, minute=0, second=0, microsecond=0)
assert results.items[0].notification_count == 1
assert results.items[0].upload_type == 'job'
def test_get_uploads_returns_empty_list(sample_service):
items = dao_get_uploads_by_service_id(sample_service.id).items
assert items == []
@freeze_time('2020-02-02 14:00')
def test_get_uploaded_letters_by_print_date(sample_template):
letter_template = create_uploaded_template(sample_template.service)
# Letters for the previous days run
for i in range(3):
create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow().replace(day=1, hour=17, minute=29, second=59)
)
# Letters from yesterday that rolled into todays run
for i in range(30):
create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow().replace(day=1, hour=17, minute=30, second=0)
)
# Letters that just made todays run
for i in range(30):
create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow().replace(hour=17, minute=29, second=59)
)
# Letters that just missed todays run
for i in range(3):
create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow().replace(hour=17, minute=30, second=0)
)
result = dao_get_uploaded_letters_by_print_date(
sample_template.service_id,
datetime.utcnow(),
)
assert result.total == 60
assert len(result.items) == 50
assert result.has_next is True
assert result.has_prev is False
result = dao_get_uploaded_letters_by_print_date(
sample_template.service_id,
datetime.utcnow(),
page=10,
page_size=2,
)
assert result.total == 60
assert len(result.items) == 2
assert result.has_next is True
assert result.has_prev is True

View File

@@ -32,16 +32,17 @@ def create_precompiled_template(service):
)
@freeze_time('2020-02-02 14:00')
def test_get_uploads(admin_request, sample_template):
letter_template = create_precompiled_template(sample_template.service)
upload_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=4))
create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=4))
upload_2 = create_job(template=sample_template,
processing_started=datetime.utcnow() - timedelta(minutes=3),
job_status=JOB_STATUS_FINISHED)
upload_3 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=2))
create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=2))
upload_4 = create_job(template=sample_template,
processing_started=datetime.utcnow() - timedelta(minutes=1),
job_status=JOB_STATUS_FINISHED)
@@ -52,7 +53,7 @@ def test_get_uploads(admin_request, sample_template):
resp_json = admin_request.get('upload.get_uploads_by_service', service_id=service_id)
data = resp_json['data']
assert len(data) == 5
assert len(data) == 4
assert data[0] == {'id': str(upload_5.id),
'original_file_name': 'some.csv',
'recipient': None,
@@ -61,23 +62,15 @@ def test_get_uploads(admin_request, sample_template):
'created_at': upload_5.created_at.strftime("%Y-%m-%d %H:%M:%S"),
'statistics': [],
'upload_type': 'job'}
assert data[1] == {'id': str(upload_4.id),
'original_file_name': 'some.csv',
assert data[1] == {'id': None,
'original_file_name': 'Uploaded letters',
'recipient': None,
'notification_count': 1,
'template_type': 'sms',
'created_at': upload_4.created_at.strftime(
'notification_count': 2,
'template_type': 'letter',
'created_at': upload_4.created_at.replace(hour=17, minute=30).strftime(
"%Y-%m-%d %H:%M:%S"),
'statistics': [],
'upload_type': 'job'}
assert data[2] == {'id': str(upload_3.id),
'original_file_name': "file-name",
'recipient': '742 Evergreen Terrace',
'notification_count': 1,
'template_type': None,
'created_at': upload_3.created_at.strftime("%Y-%m-%d %H:%M:%S"),
'statistics': [{'count': 1, 'status': 'delivered'}],
'upload_type': 'letter'}
'upload_type': 'letter_day'}
assert data[3] == {'id': str(upload_2.id),
'original_file_name': "some.csv",
'recipient': None,
@@ -87,14 +80,6 @@ def test_get_uploads(admin_request, sample_template):
"%Y-%m-%d %H:%M:%S"),
'statistics': [],
'upload_type': 'job'}
assert data[4] == {'id': str(upload_1.id),
'original_file_name': "file-name",
'recipient': '742 Evergreen Terrace',
'notification_count': 1,
'template_type': None,
'created_at': upload_1.created_at.strftime("%Y-%m-%d %H:%M:%S"),
'statistics': [{'count': 1, 'status': 'delivered'}],
'upload_type': 'letter'}
def test_get_uploads_should_return_statistics(admin_request, sample_template):
@@ -110,8 +95,8 @@ def test_get_uploads_should_return_statistics(admin_request, sample_template):
create_notification(template=sample_template, job=job_3, status='sending')
letter_template = create_precompiled_template(sample_template.service)
letter_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(days=3))
create_uploaded_letter(letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(days=3))
resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id)['data']
assert len(resp_json) == 4
@@ -121,8 +106,8 @@ def test_get_uploads_should_return_statistics(admin_request, sample_template):
assert resp_json[1]['statistics'] == [{'status': 'sending', 'count': 4}]
assert resp_json[2]['id'] == str(job_2.id)
assert resp_json[2]['statistics'] == [{'status': 'created', 'count': 3}]
assert resp_json[3]['id'] == str(letter_1.id)
assert resp_json[3]['statistics'] == [{'status': 'delivered', 'count': 1}]
assert resp_json[3]['id'] is None
assert resp_json[3]['statistics'] == []
def test_get_uploads_should_paginate(admin_request, sample_template):
@@ -186,3 +171,79 @@ def test_get_uploads_should_retrieve_from_ft_notification_status_for_old_jobs(ad
assert resp_json[1]['statistics'] == [{'status': 'created', 'count': 1}]
assert resp_json[2]['id'] == str(job_1.id)
assert resp_json[2]['statistics'] == [{'status': 'delivered', 'count': 6}]
@freeze_time('2020-02-02 14:00')
def test_get_uploaded_letters_by_print_date(admin_request, sample_template):
letter_template = create_precompiled_template(sample_template.service)
letter_1 = create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=1)
)
letter_2 = create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=2)
)
service_id = sample_template.service.id
resp_json = admin_request.get(
'upload.get_uploaded_letter_by_service_and_print_day',
service_id=service_id,
letter_print_date='2020-02-02',
)
assert resp_json['total'] == 2
assert resp_json['page_size'] == 50
assert len(resp_json['notifications']) == 2
assert resp_json['notifications'][0]['id'] == str(letter_1.id)
assert resp_json['notifications'][0]['created_at'] == letter_1.created_at.strftime(
'%Y-%m-%dT%H:%M:%S+00:00'
)
assert resp_json['notifications'][1]['id'] == str(letter_2.id)
assert resp_json['notifications'][1]['created_at'] == letter_2.created_at.strftime(
'%Y-%m-%dT%H:%M:%S+00:00'
)
@freeze_time('2020-02-02 14:00')
def test_get_uploaded_letters_by_print_date_paginates(admin_request, sample_template):
letter_template = create_precompiled_template(sample_template.service)
for i in range(101):
create_uploaded_letter(
letter_template, sample_template.service, status='delivered',
created_at=datetime.utcnow() - timedelta(minutes=1)
)
service_id = sample_template.service.id
resp_json = admin_request.get(
'upload.get_uploaded_letter_by_service_and_print_day',
service_id=service_id,
letter_print_date='2020-02-02',
page=2,
)
assert resp_json['total'] == 101
assert resp_json['page_size'] == 50
assert len(resp_json['notifications']) == 50
assert resp_json['links']['prev'] == (
f'/service/{service_id}/upload/uploaded-letters/2020-02-02?page=1'
)
assert resp_json['links']['next'] == (
f'/service/{service_id}/upload/uploaded-letters/2020-02-02?page=3'
)
def test_get_uploaded_letters_by_print_date_404s_for_bad_date(
admin_request,
sample_service,
):
admin_request.get(
'upload.get_uploaded_letter_by_service_and_print_day',
service_id=sample_service.id,
letter_print_date='foo',
_expected_status=400,
)