2019-12-03 15:53:32 +00:00
|
|
|
|
from datetime import datetime, timedelta
|
2020-05-11 10:51:33 +01:00
|
|
|
|
from freezegun import freeze_time
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
2020-05-11 10:51:40 +01:00
|
|
|
|
from app.dao.uploads_dao import dao_get_uploads_by_service_id, dao_get_uploaded_letters_by_print_date
|
2019-12-03 15:53:32 +00:00
|
|
|
|
from app.models import LETTER_TYPE, JOB_STATUS_IN_PROGRESS
|
2020-12-01 15:23:38 +00:00
|
|
|
|
from tests.app.db import (
|
|
|
|
|
|
create_job,
|
|
|
|
|
|
create_service,
|
|
|
|
|
|
create_service_data_retention,
|
|
|
|
|
|
create_service_contact_list,
|
|
|
|
|
|
create_template,
|
|
|
|
|
|
create_notification,
|
|
|
|
|
|
)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def create_uploaded_letter(letter_template, service, status='created', created_at=None):
|
|
|
|
|
|
return create_notification(
|
|
|
|
|
|
template=letter_template,
|
|
|
|
|
|
to_field="file-name",
|
|
|
|
|
|
status=status,
|
|
|
|
|
|
reference="dvla-reference",
|
|
|
|
|
|
client_reference="file-name",
|
|
|
|
|
|
one_off=True,
|
|
|
|
|
|
created_by_id=service.users[0].id,
|
|
|
|
|
|
created_at=created_at
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-12-06 13:10:38 +00:00
|
|
|
|
def create_uploaded_template(service):
|
2019-12-03 15:53:32 +00:00
|
|
|
|
return create_template(
|
|
|
|
|
|
service,
|
|
|
|
|
|
template_type=LETTER_TYPE,
|
|
|
|
|
|
template_name='Pre-compiled PDF',
|
|
|
|
|
|
subject='Pre-compiled PDF',
|
|
|
|
|
|
content="",
|
|
|
|
|
|
hidden=True,
|
|
|
|
|
|
postage="second",
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
@freeze_time("2020-02-02 14:00") # GMT time
|
2019-12-03 15:53:32 +00:00
|
|
|
|
def test_get_uploads_for_service(sample_template):
|
2020-03-10 10:33:30 +00:00
|
|
|
|
create_service_data_retention(sample_template.service, 'sms', days_of_retention=9)
|
2020-12-01 15:23:38 +00:00
|
|
|
|
contact_list = create_service_contact_list()
|
|
|
|
|
|
# Jobs created from contact lists should be filtered out
|
|
|
|
|
|
create_job(sample_template, contact_list_id=contact_list.id)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
job = create_job(sample_template, processing_started=datetime.utcnow())
|
2019-12-06 13:10:38 +00:00
|
|
|
|
letter_template = create_uploaded_template(sample_template.service)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
letter = create_uploaded_letter(letter_template, sample_template.service)
|
|
|
|
|
|
|
|
|
|
|
|
other_service = create_service(service_name="other service")
|
|
|
|
|
|
other_template = create_template(service=other_service)
|
|
|
|
|
|
other_job = create_job(other_template, processing_started=datetime.utcnow())
|
2019-12-06 13:10:38 +00:00
|
|
|
|
other_letter_template = create_uploaded_template(other_service)
|
2020-05-11 10:51:33 +01:00
|
|
|
|
create_uploaded_letter(other_letter_template, other_service)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
uploads_from_db = dao_get_uploads_by_service_id(job.service_id).items
|
|
|
|
|
|
other_uploads_from_db = dao_get_uploads_by_service_id(other_job.service_id).items
|
|
|
|
|
|
|
|
|
|
|
|
assert len(uploads_from_db) == 2
|
|
|
|
|
|
|
2020-02-25 17:13:01 +00:00
|
|
|
|
assert uploads_from_db[0] == (
|
2020-05-11 10:51:33 +01:00
|
|
|
|
None,
|
|
|
|
|
|
'Uploaded letters',
|
2020-02-25 17:13:01 +00:00
|
|
|
|
1,
|
2020-05-11 10:51:33 +01:00
|
|
|
|
'letter',
|
|
|
|
|
|
None,
|
|
|
|
|
|
letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
|
|
|
|
|
|
None,
|
|
|
|
|
|
letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
|
2020-02-25 17:13:01 +00:00
|
|
|
|
None,
|
2020-05-11 10:51:33 +01:00
|
|
|
|
'letter_day',
|
2020-02-25 17:13:01 +00:00
|
|
|
|
None,
|
|
|
|
|
|
)
|
|
|
|
|
|
assert uploads_from_db[1] == (
|
|
|
|
|
|
job.id,
|
|
|
|
|
|
job.original_file_name,
|
|
|
|
|
|
job.notification_count,
|
|
|
|
|
|
'sms',
|
2020-03-10 10:33:30 +00:00
|
|
|
|
9,
|
2020-02-25 17:13:01 +00:00
|
|
|
|
job.created_at,
|
|
|
|
|
|
job.scheduled_for,
|
|
|
|
|
|
job.processing_started,
|
|
|
|
|
|
job.job_status,
|
|
|
|
|
|
"job",
|
2020-02-27 13:19:51 +00:00
|
|
|
|
None,
|
2020-02-25 17:13:01 +00:00
|
|
|
|
)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
assert len(other_uploads_from_db) == 2
|
2020-05-11 10:51:33 +01:00
|
|
|
|
assert other_uploads_from_db[0] == (
|
|
|
|
|
|
None,
|
|
|
|
|
|
'Uploaded letters',
|
|
|
|
|
|
1,
|
|
|
|
|
|
'letter',
|
|
|
|
|
|
None,
|
|
|
|
|
|
letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
|
|
|
|
|
|
None,
|
|
|
|
|
|
letter.created_at.replace(hour=17, minute=30, second=0, microsecond=0),
|
|
|
|
|
|
None,
|
|
|
|
|
|
"letter_day",
|
|
|
|
|
|
None,
|
|
|
|
|
|
)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
assert other_uploads_from_db[1] == (other_job.id,
|
|
|
|
|
|
other_job.original_file_name,
|
|
|
|
|
|
other_job.notification_count,
|
2020-02-25 17:13:01 +00:00
|
|
|
|
other_job.template.template_type,
|
2020-03-10 10:33:30 +00:00
|
|
|
|
7,
|
2019-12-03 15:53:32 +00:00
|
|
|
|
other_job.created_at,
|
|
|
|
|
|
other_job.scheduled_for,
|
|
|
|
|
|
other_job.processing_started,
|
2020-02-27 13:19:51 +00:00
|
|
|
|
other_job.job_status,
|
|
|
|
|
|
"job",
|
|
|
|
|
|
None)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
assert uploads_from_db[1] != other_uploads_from_db[1]
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
@freeze_time("2020-02-02 18:00")
|
|
|
|
|
|
def test_get_uploads_for_service_groups_letters(sample_template):
|
|
|
|
|
|
letter_template = create_uploaded_template(sample_template.service)
|
|
|
|
|
|
|
|
|
|
|
|
# Just gets into yesterday’s print run
|
|
|
|
|
|
create_uploaded_letter(letter_template, sample_template.service, created_at=(
|
|
|
|
|
|
datetime(2020, 2, 1, 17, 29, 59)
|
|
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
# Yesterday but in today’s print run
|
|
|
|
|
|
create_uploaded_letter(letter_template, sample_template.service, created_at=(
|
|
|
|
|
|
datetime(2020, 2, 1, 17, 30)
|
|
|
|
|
|
))
|
|
|
|
|
|
# First thing today
|
|
|
|
|
|
create_uploaded_letter(letter_template, sample_template.service, created_at=(
|
|
|
|
|
|
datetime(2020, 2, 2, 0, 0)
|
|
|
|
|
|
))
|
|
|
|
|
|
# Just before today’s print deadline
|
|
|
|
|
|
create_uploaded_letter(letter_template, sample_template.service, created_at=(
|
|
|
|
|
|
datetime(2020, 2, 2, 17, 29, 59)
|
|
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
# Just missed today’s print deadline
|
|
|
|
|
|
create_uploaded_letter(letter_template, sample_template.service, created_at=(
|
|
|
|
|
|
datetime(2020, 2, 2, 17, 30)
|
|
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
uploads_from_db = dao_get_uploads_by_service_id(sample_template.service_id).items
|
|
|
|
|
|
|
|
|
|
|
|
assert [
|
|
|
|
|
|
(upload.notification_count, upload.created_at)
|
|
|
|
|
|
for upload in uploads_from_db
|
|
|
|
|
|
] == [
|
|
|
|
|
|
(1, datetime(2020, 2, 3, 17, 30)),
|
|
|
|
|
|
(3, datetime(2020, 2, 2, 17, 30)),
|
|
|
|
|
|
(1, datetime(2020, 2, 1, 17, 30)),
|
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-12-03 15:53:32 +00:00
|
|
|
|
def test_get_uploads_does_not_return_cancelled_jobs_or_letters(sample_template):
|
|
|
|
|
|
create_job(sample_template, job_status='scheduled')
|
|
|
|
|
|
create_job(sample_template, job_status='cancelled')
|
2019-12-06 13:10:38 +00:00
|
|
|
|
letter_template = create_uploaded_template(sample_template.service)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
create_uploaded_letter(letter_template, sample_template.service, status='cancelled')
|
|
|
|
|
|
|
|
|
|
|
|
assert len(dao_get_uploads_by_service_id(sample_template.service_id).items) == 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_uploads_orders_by_created_at_desc(sample_template):
|
2019-12-06 13:10:38 +00:00
|
|
|
|
letter_template = create_uploaded_template(sample_template.service)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
upload_1 = create_job(sample_template, processing_started=datetime.utcnow(),
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS)
|
|
|
|
|
|
upload_2 = create_job(sample_template, processing_started=datetime.utcnow(),
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS)
|
2020-05-11 10:51:33 +01:00
|
|
|
|
create_uploaded_letter(letter_template, sample_template.service, status='delivered')
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
|
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
assert [
|
|
|
|
|
|
(result.id, result.upload_type) for result in results
|
|
|
|
|
|
] == [
|
|
|
|
|
|
(None, 'letter_day'),
|
|
|
|
|
|
(upload_2.id, 'job'),
|
|
|
|
|
|
(upload_1.id, 'job'),
|
|
|
|
|
|
]
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_uploads_orders_by_processing_started_desc(sample_template):
|
|
|
|
|
|
days_ago = datetime.utcnow() - timedelta(days=3)
|
|
|
|
|
|
upload_1 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1),
|
|
|
|
|
|
created_at=days_ago,
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS)
|
|
|
|
|
|
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2),
|
|
|
|
|
|
created_at=days_ago,
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS)
|
|
|
|
|
|
|
|
|
|
|
|
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
|
|
|
|
|
|
|
|
|
|
|
assert len(results) == 2
|
|
|
|
|
|
assert results[0].id == upload_1.id
|
|
|
|
|
|
assert results[1].id == upload_2.id
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-10-27 17:25:58 +00:00
|
|
|
|
@freeze_time("2020-10-27 16:15") # GMT time
|
2019-12-03 15:53:32 +00:00
|
|
|
|
def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_template):
|
2019-12-06 13:10:38 +00:00
|
|
|
|
letter_template = create_uploaded_template(sample_template.service)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
days_ago = datetime.utcnow() - timedelta(days=4)
|
2020-05-11 10:51:33 +01:00
|
|
|
|
create_uploaded_letter(letter_template, service=letter_template.service)
|
2019-12-06 13:10:38 +00:00
|
|
|
|
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1),
|
2019-12-03 15:53:32 +00:00
|
|
|
|
created_at=days_ago,
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS)
|
2019-12-06 13:10:38 +00:00
|
|
|
|
upload_3 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2),
|
2019-12-03 15:53:32 +00:00
|
|
|
|
created_at=days_ago,
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS)
|
2020-05-11 10:51:33 +01:00
|
|
|
|
create_uploaded_letter(letter_template, service=letter_template.service,
|
|
|
|
|
|
created_at=datetime.utcnow() - timedelta(days=3))
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
|
|
|
|
|
|
|
|
|
|
|
assert len(results) == 4
|
2020-05-11 10:51:33 +01:00
|
|
|
|
assert results[0].id is None
|
2019-12-06 13:10:38 +00:00
|
|
|
|
assert results[1].id == upload_2.id
|
|
|
|
|
|
assert results[2].id == upload_3.id
|
2020-05-11 10:51:33 +01:00
|
|
|
|
assert results[3].id is None
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
@freeze_time('2020-04-02 14:00') # Few days after the clocks go forward
|
2020-03-10 10:33:30 +00:00
|
|
|
|
def test_get_uploads_only_gets_uploads_within_service_retention_period(sample_template):
|
|
|
|
|
|
letter_template = create_uploaded_template(sample_template.service)
|
|
|
|
|
|
create_service_data_retention(sample_template.service, 'sms', days_of_retention=3)
|
|
|
|
|
|
|
|
|
|
|
|
days_ago = datetime.utcnow() - timedelta(days=4)
|
|
|
|
|
|
upload_1 = create_uploaded_letter(letter_template, service=letter_template.service)
|
|
|
|
|
|
upload_2 = create_job(
|
|
|
|
|
|
sample_template, processing_started=datetime.utcnow() - timedelta(days=1), created_at=days_ago,
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS
|
|
|
|
|
|
)
|
|
|
|
|
|
# older than custom retention for sms:
|
|
|
|
|
|
create_job(
|
|
|
|
|
|
sample_template, processing_started=datetime.utcnow() - timedelta(days=5), created_at=days_ago,
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS
|
|
|
|
|
|
)
|
|
|
|
|
|
upload_3 = create_uploaded_letter(
|
|
|
|
|
|
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=3)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# older than retention for sms but within letter retention:
|
|
|
|
|
|
upload_4 = create_uploaded_letter(
|
|
|
|
|
|
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=6)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# older than default retention for letters:
|
|
|
|
|
|
create_uploaded_letter(
|
|
|
|
|
|
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=8)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
|
|
|
|
|
|
|
|
|
|
|
assert len(results) == 4
|
|
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
# Uploaded letters get their `created_at` shifted time of printing
|
|
|
|
|
|
# 17:30 BST == 16:30 UTC
|
|
|
|
|
|
assert results[0].created_at == upload_1.created_at.replace(hour=16, minute=30, second=0, microsecond=0)
|
|
|
|
|
|
|
|
|
|
|
|
# Jobs keep their original `created_at`
|
|
|
|
|
|
assert results[1].created_at == upload_2.created_at.replace(hour=14, minute=00, second=0, microsecond=0)
|
2020-03-10 10:33:30 +00:00
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
# Still in BST here…
|
|
|
|
|
|
assert results[2].created_at == upload_3.created_at.replace(hour=16, minute=30, second=0, microsecond=0)
|
|
|
|
|
|
|
|
|
|
|
|
# Now we’ve gone far enough back to be in GMT
|
|
|
|
|
|
# 17:30 GMT == 17:30 UTC
|
|
|
|
|
|
assert results[3].created_at == upload_4.created_at.replace(hour=17, minute=30, second=0, microsecond=0)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@freeze_time('2020-02-02 14:00')
|
2019-12-03 15:53:32 +00:00
|
|
|
|
def test_get_uploads_is_paginated(sample_template):
|
2019-12-06 13:10:38 +00:00
|
|
|
|
letter_template = create_uploaded_template(sample_template.service)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
create_uploaded_letter(
|
|
|
|
|
|
letter_template, sample_template.service, status='delivered',
|
|
|
|
|
|
created_at=datetime.utcnow() - timedelta(minutes=3),
|
|
|
|
|
|
)
|
|
|
|
|
|
create_job(
|
|
|
|
|
|
sample_template, processing_started=datetime.utcnow() - timedelta(minutes=2),
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS,
|
|
|
|
|
|
)
|
|
|
|
|
|
create_uploaded_letter(
|
|
|
|
|
|
letter_template, sample_template.service, status='delivered',
|
|
|
|
|
|
created_at=datetime.utcnow() - timedelta(minutes=1),
|
|
|
|
|
|
)
|
|
|
|
|
|
create_job(
|
|
|
|
|
|
sample_template, processing_started=datetime.utcnow(),
|
|
|
|
|
|
job_status=JOB_STATUS_IN_PROGRESS,
|
|
|
|
|
|
)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
results = dao_get_uploads_by_service_id(sample_template.service_id, page=1, page_size=1)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
assert results.per_page == 1
|
|
|
|
|
|
assert results.total == 3
|
|
|
|
|
|
assert len(results.items) == 1
|
|
|
|
|
|
assert results.items[0].created_at == datetime.utcnow().replace(hour=17, minute=30, second=0, microsecond=0)
|
|
|
|
|
|
assert results.items[0].notification_count == 2
|
|
|
|
|
|
assert results.items[0].upload_type == 'letter_day'
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
results = dao_get_uploads_by_service_id(sample_template.service_id, page=2, page_size=1)
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
2020-05-11 10:51:33 +01:00
|
|
|
|
assert len(results.items) == 1
|
|
|
|
|
|
assert results.items[0].created_at == datetime.utcnow().replace(hour=14, minute=0, second=0, microsecond=0)
|
|
|
|
|
|
assert results.items[0].notification_count == 1
|
|
|
|
|
|
assert results.items[0].upload_type == 'job'
|
2019-12-03 15:53:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_uploads_returns_empty_list(sample_service):
|
|
|
|
|
|
items = dao_get_uploads_by_service_id(sample_service.id).items
|
|
|
|
|
|
assert items == []
|
2020-05-11 10:51:40 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@freeze_time('2020-02-02 14:00')
|
|
|
|
|
|
def test_get_uploaded_letters_by_print_date(sample_template):
|
|
|
|
|
|
letter_template = create_uploaded_template(sample_template.service)
|
|
|
|
|
|
|
|
|
|
|
|
# Letters for the previous day’s run
|
2020-12-22 15:46:31 +00:00
|
|
|
|
for _ in range(3):
|
2020-05-11 10:51:40 +01:00
|
|
|
|
create_uploaded_letter(
|
|
|
|
|
|
letter_template, sample_template.service, status='delivered',
|
|
|
|
|
|
created_at=datetime.utcnow().replace(day=1, hour=17, minute=29, second=59)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Letters from yesterday that rolled into today’s run
|
2020-12-22 15:46:31 +00:00
|
|
|
|
for _ in range(30):
|
2020-05-11 10:51:40 +01:00
|
|
|
|
create_uploaded_letter(
|
|
|
|
|
|
letter_template, sample_template.service, status='delivered',
|
|
|
|
|
|
created_at=datetime.utcnow().replace(day=1, hour=17, minute=30, second=0)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Letters that just made today’s run
|
2020-12-22 15:46:31 +00:00
|
|
|
|
for _ in range(30):
|
2020-05-11 10:51:40 +01:00
|
|
|
|
create_uploaded_letter(
|
|
|
|
|
|
letter_template, sample_template.service, status='delivered',
|
|
|
|
|
|
created_at=datetime.utcnow().replace(hour=17, minute=29, second=59)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Letters that just missed today’s run
|
2020-12-22 15:46:31 +00:00
|
|
|
|
for _ in range(3):
|
2020-05-11 10:51:40 +01:00
|
|
|
|
create_uploaded_letter(
|
|
|
|
|
|
letter_template, sample_template.service, status='delivered',
|
|
|
|
|
|
created_at=datetime.utcnow().replace(hour=17, minute=30, second=0)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
result = dao_get_uploaded_letters_by_print_date(
|
|
|
|
|
|
sample_template.service_id,
|
|
|
|
|
|
datetime.utcnow(),
|
|
|
|
|
|
)
|
|
|
|
|
|
assert result.total == 60
|
|
|
|
|
|
assert len(result.items) == 50
|
|
|
|
|
|
assert result.has_next is True
|
|
|
|
|
|
assert result.has_prev is False
|
|
|
|
|
|
|
|
|
|
|
|
result = dao_get_uploaded_letters_by_print_date(
|
|
|
|
|
|
sample_template.service_id,
|
|
|
|
|
|
datetime.utcnow(),
|
|
|
|
|
|
page=10,
|
|
|
|
|
|
page_size=2,
|
|
|
|
|
|
)
|
|
|
|
|
|
assert result.total == 60
|
|
|
|
|
|
assert len(result.items) == 2
|
|
|
|
|
|
assert result.has_next is True
|
|
|
|
|
|
assert result.has_prev is True
|