mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-01 15:46:07 -05:00
Add uploads blueprint, the endpoint returns a combination of uploaded letters and jobs. The endpoint returns data about the uploaded letter or job, including notification statistics for the upload. The data is ordered by scheduled for and created_at.
It is likely this endppoint will need additional data for the UI to display, for the first iteration this will enable the /uploads page to show both letters and jobs. Only letter uploaded by the UI are included in the resultset. Add file name to resultset.
This commit is contained in:
committed by
Rebecca Law
parent
570eb5e340
commit
203e19bef3
168
tests/app/dao/test_uploads_dao.py
Normal file
168
tests/app/dao/test_uploads_dao.py
Normal file
@@ -0,0 +1,168 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.dao.uploads_dao import dao_get_uploads_by_service_id
|
||||
from app.models import LETTER_TYPE, JOB_STATUS_IN_PROGRESS
|
||||
from tests.app.db import create_job, create_service, create_template, create_notification
|
||||
|
||||
|
||||
def create_uploaded_letter(letter_template, service, status='created', created_at=None):
|
||||
return create_notification(
|
||||
template=letter_template,
|
||||
to_field="file-name",
|
||||
status=status,
|
||||
reference="dvla-reference",
|
||||
client_reference="file-name",
|
||||
one_off=True,
|
||||
created_by_id=service.users[0].id,
|
||||
created_at=created_at
|
||||
)
|
||||
|
||||
|
||||
def create_precompiled_template(service):
|
||||
return create_template(
|
||||
service,
|
||||
template_type=LETTER_TYPE,
|
||||
template_name='Pre-compiled PDF',
|
||||
subject='Pre-compiled PDF',
|
||||
content="",
|
||||
hidden=True,
|
||||
postage="second",
|
||||
)
|
||||
|
||||
|
||||
def test_get_uploads_for_service(sample_template):
|
||||
job = create_job(sample_template, processing_started=datetime.utcnow())
|
||||
letter_template = create_precompiled_template(sample_template.service)
|
||||
letter = create_uploaded_letter(letter_template, sample_template.service)
|
||||
|
||||
other_service = create_service(service_name="other service")
|
||||
other_template = create_template(service=other_service)
|
||||
other_job = create_job(other_template, processing_started=datetime.utcnow())
|
||||
other_letter_template = create_precompiled_template(other_service)
|
||||
other_letter = create_uploaded_letter(other_letter_template, other_service)
|
||||
|
||||
uploads_from_db = dao_get_uploads_by_service_id(job.service_id).items
|
||||
other_uploads_from_db = dao_get_uploads_by_service_id(other_job.service_id).items
|
||||
|
||||
assert len(uploads_from_db) == 2
|
||||
|
||||
assert uploads_from_db[0] == (letter.id, letter.client_reference, 1, letter.created_at,
|
||||
None, letter.created_at, letter.status, "letter")
|
||||
assert uploads_from_db[1] == (job.id, job.original_file_name, job.notification_count, job.created_at,
|
||||
job.scheduled_for, job.processing_started, job.job_status, "job")
|
||||
|
||||
assert len(other_uploads_from_db) == 2
|
||||
assert other_uploads_from_db[0] == (other_letter.id,
|
||||
other_letter.client_reference,
|
||||
1,
|
||||
other_letter.created_at,
|
||||
None,
|
||||
other_letter.created_at,
|
||||
other_letter.status,
|
||||
"letter")
|
||||
assert other_uploads_from_db[1] == (other_job.id,
|
||||
other_job.original_file_name,
|
||||
other_job.notification_count,
|
||||
other_job.created_at,
|
||||
other_job.scheduled_for,
|
||||
other_job.processing_started,
|
||||
other_job.job_status, "job")
|
||||
|
||||
assert uploads_from_db[0] != other_uploads_from_db[0]
|
||||
assert uploads_from_db[1] != other_uploads_from_db[1]
|
||||
|
||||
|
||||
def test_get_uploads_does_not_return_cancelled_jobs_or_letters(sample_template):
|
||||
create_job(sample_template, job_status='scheduled')
|
||||
create_job(sample_template, job_status='cancelled')
|
||||
letter_template = create_precompiled_template(sample_template.service)
|
||||
create_uploaded_letter(letter_template, sample_template.service, status='cancelled')
|
||||
|
||||
assert len(dao_get_uploads_by_service_id(sample_template.service_id).items) == 0
|
||||
|
||||
|
||||
def test_get_uploads_orders_by_created_at_desc(sample_template):
|
||||
letter_template = create_precompiled_template(sample_template.service)
|
||||
|
||||
upload_1 = create_job(sample_template, processing_started=datetime.utcnow(),
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
upload_2 = create_job(sample_template, processing_started=datetime.utcnow(),
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
upload_3 = create_uploaded_letter(letter_template, sample_template.service, status='delivered')
|
||||
|
||||
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
||||
|
||||
assert len(results) == 3
|
||||
assert results[0].id == upload_3.id
|
||||
assert results[1].id == upload_2.id
|
||||
assert results[2].id == upload_1.id
|
||||
|
||||
|
||||
def test_get_uploads_orders_by_processing_started_desc(sample_template):
|
||||
days_ago = datetime.utcnow() - timedelta(days=3)
|
||||
upload_1 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1),
|
||||
created_at=days_ago,
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2),
|
||||
created_at=days_ago,
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
|
||||
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
||||
|
||||
assert len(results) == 2
|
||||
assert results[0].id == upload_1.id
|
||||
assert results[1].id == upload_2.id
|
||||
|
||||
|
||||
def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_template):
|
||||
letter_template = create_precompiled_template(sample_template.service)
|
||||
|
||||
days_ago = datetime.utcnow() - timedelta(days=4)
|
||||
upload_0 = create_uploaded_letter(letter_template, service=letter_template.service)
|
||||
upload_1 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1),
|
||||
created_at=days_ago,
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2),
|
||||
created_at=days_ago,
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
upload_3 = create_uploaded_letter(letter_template, service=letter_template.service,
|
||||
created_at=datetime.utcnow() - timedelta(days=3))
|
||||
|
||||
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
||||
|
||||
assert len(results) == 4
|
||||
assert results[0].id == upload_0.id
|
||||
assert results[1].id == upload_1.id
|
||||
assert results[2].id == upload_2.id
|
||||
assert results[3].id == upload_3.id
|
||||
|
||||
|
||||
def test_get_uploads_is_paginated(sample_template):
|
||||
letter_template = create_precompiled_template(sample_template.service)
|
||||
|
||||
upload_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
|
||||
created_at=datetime.utcnow() - timedelta(minutes=3))
|
||||
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(minutes=2),
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
upload_3 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
|
||||
created_at=datetime.utcnow() - timedelta(minutes=1))
|
||||
upload_4 = create_job(sample_template, processing_started=datetime.utcnow(), job_status=JOB_STATUS_IN_PROGRESS)
|
||||
|
||||
results = dao_get_uploads_by_service_id(sample_template.service_id, page=1, page_size=2)
|
||||
|
||||
assert results.per_page == 2
|
||||
assert results.total == 4
|
||||
assert len(results.items) == 2
|
||||
assert results.items[0].id == upload_4.id
|
||||
assert results.items[1].id == upload_3.id
|
||||
|
||||
results = dao_get_uploads_by_service_id(sample_template.service_id, page=2, page_size=2)
|
||||
|
||||
assert len(results.items) == 2
|
||||
assert results.items[0].id == upload_2.id
|
||||
assert results.items[1].id == upload_1.id
|
||||
|
||||
|
||||
def test_get_uploads_returns_empty_list(sample_service):
|
||||
items = dao_get_uploads_by_service_id(sample_service.id).items
|
||||
assert items == []
|
||||
0
tests/app/upload/__init__.py
Normal file
0
tests/app/upload/__init__.py
Normal file
180
tests/app/upload/test_rest.py
Normal file
180
tests/app/upload/test_rest.py
Normal file
@@ -0,0 +1,180 @@
|
||||
from datetime import datetime, timedelta, date
|
||||
|
||||
from freezegun import freeze_time
|
||||
|
||||
from app.models import LETTER_TYPE, JOB_STATUS_FINISHED, JOB_STATUS_PENDING
|
||||
from tests.app.db import create_job, create_notification, create_template, create_ft_notification_status
|
||||
from tests.conftest import set_config
|
||||
|
||||
|
||||
def create_uploaded_letter(letter_template, service, status='created', created_at=None):
|
||||
return create_notification(
|
||||
template=letter_template,
|
||||
to_field="file-name",
|
||||
status=status,
|
||||
reference="dvla-reference",
|
||||
client_reference="file-name",
|
||||
one_off=True,
|
||||
created_by_id=service.users[0].id,
|
||||
created_at=created_at
|
||||
)
|
||||
|
||||
|
||||
def create_precompiled_template(service):
|
||||
return create_template(
|
||||
service,
|
||||
template_type=LETTER_TYPE,
|
||||
template_name='Pre-compiled PDF',
|
||||
subject='Pre-compiled PDF',
|
||||
content="",
|
||||
hidden=True,
|
||||
postage="second",
|
||||
)
|
||||
|
||||
|
||||
def test_get_uploads(admin_request, sample_template):
|
||||
letter_template = create_precompiled_template(sample_template.service)
|
||||
|
||||
upload_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
|
||||
created_at=datetime.utcnow() - timedelta(minutes=4))
|
||||
upload_2 = create_job(template=sample_template,
|
||||
processing_started=datetime.utcnow() - timedelta(minutes=3),
|
||||
job_status=JOB_STATUS_FINISHED)
|
||||
upload_3 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
|
||||
created_at=datetime.utcnow() - timedelta(minutes=2))
|
||||
upload_4 = create_job(template=sample_template,
|
||||
processing_started=datetime.utcnow() - timedelta(minutes=1),
|
||||
job_status=JOB_STATUS_FINISHED)
|
||||
upload_5 = create_job(template=sample_template, processing_started=None,
|
||||
job_status=JOB_STATUS_PENDING, notification_count=10)
|
||||
|
||||
service_id = sample_template.service.id
|
||||
|
||||
resp_json = admin_request.get('upload.get_uploads_by_service', service_id=service_id)
|
||||
data = resp_json['data']
|
||||
assert len(data) == 5
|
||||
assert data[0] == {'id': str(upload_5.id),
|
||||
'original_file_name': 'some.csv',
|
||||
'notification_count': 10,
|
||||
'created_at': upload_5.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
'statistics': [],
|
||||
'upload_type': 'job'}
|
||||
assert data[1] == {'id': str(upload_4.id),
|
||||
'original_file_name': 'some.csv',
|
||||
'notification_count': 1,
|
||||
'created_at': upload_4.created_at.strftime(
|
||||
"%Y-%m-%d %H:%M:%S"),
|
||||
'statistics': [],
|
||||
'upload_type': 'job'}
|
||||
assert data[2] == {'id': str(upload_3.id),
|
||||
'original_file_name': "file-name",
|
||||
'notification_count': 1,
|
||||
'created_at': upload_3.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
'statistics': [{'count': 1, 'status': 'delivered'}],
|
||||
'upload_type': 'letter'}
|
||||
assert data[3] == {'id': str(upload_2.id),
|
||||
'original_file_name': "some.csv",
|
||||
'notification_count': 1,
|
||||
'created_at': upload_2.created_at.strftime(
|
||||
"%Y-%m-%d %H:%M:%S"),
|
||||
'statistics': [],
|
||||
'upload_type': 'job'}
|
||||
assert data[4] == {'id': str(upload_1.id),
|
||||
'original_file_name': "file-name",
|
||||
'notification_count': 1,
|
||||
'created_at': upload_1.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
'statistics': [{'count': 1, 'status': 'delivered'}],
|
||||
'upload_type': 'letter'}
|
||||
|
||||
|
||||
def test_get_uploads_should_return_statistics(admin_request, sample_template):
|
||||
now = datetime.utcnow()
|
||||
earlier = datetime.utcnow() - timedelta(days=1)
|
||||
job_1 = create_job(template=sample_template, job_status='pending')
|
||||
job_2 = create_job(sample_template, processing_started=earlier)
|
||||
for _ in range(0, 3):
|
||||
create_notification(template=sample_template, job=job_2, status='created')
|
||||
|
||||
job_3 = create_job(sample_template, processing_started=now)
|
||||
for _ in range(0, 4):
|
||||
create_notification(template=sample_template, job=job_3, status='sending')
|
||||
letter_template = create_precompiled_template(sample_template.service)
|
||||
letter_1 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
|
||||
created_at=datetime.utcnow() - timedelta(days=3))
|
||||
letter_2 = create_uploaded_letter(letter_template, sample_template.service, status='delivered',
|
||||
created_at=datetime.utcnow() - timedelta(days=2))
|
||||
resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id)['data']
|
||||
assert len(resp_json) == 5
|
||||
assert resp_json[0]['id'] == str(job_1.id)
|
||||
assert resp_json[0]['statistics'] == []
|
||||
assert resp_json[1]['id'] == str(job_3.id)
|
||||
assert resp_json[1]['statistics'] == [{'status': 'sending', 'count': 4}]
|
||||
assert resp_json[2]['id'] == str(job_2.id)
|
||||
assert resp_json[2]['statistics'] == [{'status': 'created', 'count': 3}]
|
||||
assert resp_json[3]['id'] == str(letter_2.id)
|
||||
assert resp_json[3]['statistics'] == [{'status': 'delivered', 'count': 1}]
|
||||
assert resp_json[4]['id'] == str(letter_1.id)
|
||||
assert resp_json[4]['statistics'] == [{'status': 'delivered', 'count': 1}]
|
||||
|
||||
|
||||
def test_get_uploads_should_paginate(admin_request, sample_template):
|
||||
for _ in range(0, 10):
|
||||
create_job(sample_template)
|
||||
|
||||
with set_config(admin_request.app, 'PAGE_SIZE', 2):
|
||||
resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id)
|
||||
|
||||
assert len(resp_json['data']) == 2
|
||||
assert resp_json['page_size'] == 2
|
||||
assert resp_json['total'] == 10
|
||||
assert 'links' in resp_json
|
||||
assert set(resp_json['links'].keys()) == {'next', 'last'}
|
||||
|
||||
|
||||
def test_get_uploads_accepts_page_parameter(admin_request, sample_template):
|
||||
for _ in range(0, 10):
|
||||
create_job(sample_template)
|
||||
|
||||
with set_config(admin_request.app, 'PAGE_SIZE', 2):
|
||||
resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id, page=2)
|
||||
|
||||
assert len(resp_json['data']) == 2
|
||||
assert resp_json['page_size'] == 2
|
||||
assert resp_json['total'] == 10
|
||||
assert 'links' in resp_json
|
||||
assert set(resp_json['links'].keys()) == {'prev', 'next', 'last'}
|
||||
|
||||
|
||||
@freeze_time('2017-06-10 12:00')
|
||||
def test_get_uploads_should_retrieve_from_ft_notification_status_for_old_jobs(admin_request, sample_template):
|
||||
# it's the 10th today, so 3 days should include all of 7th, 8th, 9th, and some of 10th.
|
||||
just_three_days_ago = datetime(2017, 6, 6, 22, 59, 59)
|
||||
not_quite_three_days_ago = just_three_days_ago + timedelta(seconds=1)
|
||||
|
||||
job_1 = create_job(sample_template, created_at=just_three_days_ago, processing_started=just_three_days_ago)
|
||||
job_2 = create_job(sample_template, created_at=just_three_days_ago, processing_started=not_quite_three_days_ago)
|
||||
# is old but hasn't started yet (probably a scheduled job). We don't have any stats for this job yet.
|
||||
job_3 = create_job(sample_template, created_at=just_three_days_ago, processing_started=None)
|
||||
|
||||
# some notifications created more than three days ago, some created after the midnight cutoff
|
||||
create_ft_notification_status(date(2017, 6, 6), job=job_1, notification_status='delivered', count=2)
|
||||
create_ft_notification_status(date(2017, 6, 7), job=job_1, notification_status='delivered', count=4)
|
||||
# job2's new enough
|
||||
create_notification(job=job_2, status='created', created_at=not_quite_three_days_ago)
|
||||
|
||||
# this isn't picked up because the job is too new
|
||||
create_ft_notification_status(date(2017, 6, 7), job=job_2, notification_status='delivered', count=8)
|
||||
# this isn't picked up - while the job is old, it started in last 3 days so we look at notification table instead
|
||||
create_ft_notification_status(date(2017, 6, 7), job=job_3, notification_status='delivered', count=16)
|
||||
|
||||
# this isn't picked up because we're using the ft status table for job_1 as it's old
|
||||
create_notification(job=job_1, status='created', created_at=not_quite_three_days_ago)
|
||||
|
||||
resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id)['data']
|
||||
|
||||
assert resp_json[0]['id'] == str(job_3.id)
|
||||
assert resp_json[0]['statistics'] == []
|
||||
assert resp_json[1]['id'] == str(job_2.id)
|
||||
assert resp_json[1]['statistics'] == [{'status': 'created', 'count': 1}]
|
||||
assert resp_json[2]['id'] == str(job_1.id)
|
||||
assert resp_json[2]['statistics'] == [{'status': 'delivered', 'count': 6}]
|
||||
Reference in New Issue
Block a user