2019-02-12 13:17:08 +00:00
|
|
|
|
from collections import Counter, OrderedDict
|
2016-07-11 10:49:01 +01:00
|
|
|
|
from csv import DictReader
|
2018-02-20 11:22:17 +00:00
|
|
|
|
from io import StringIO
|
|
|
|
|
|
from pathlib import Path
|
2016-10-26 15:44:24 +01:00
|
|
|
|
|
2017-07-27 16:30:26 +01:00
|
|
|
|
import pytest
|
2018-02-20 11:22:17 +00:00
|
|
|
|
from freezegun import freeze_time
|
2018-05-08 15:12:46 +01:00
|
|
|
|
from notifications_utils.recipients import validate_email_address
|
2017-05-04 09:30:55 +01:00
|
|
|
|
|
2018-03-21 16:01:26 +00:00
|
|
|
|
from app import format_datetime_relative
|
2017-01-13 11:37:14 +00:00
|
|
|
|
from app.utils import (
|
2018-03-09 14:53:04 +00:00
|
|
|
|
AgreementInfo,
|
2018-05-08 15:12:46 +01:00
|
|
|
|
GovernmentEmailDomain,
|
2018-02-20 11:22:17 +00:00
|
|
|
|
Spreadsheet,
|
2017-01-13 11:37:14 +00:00
|
|
|
|
email_safe,
|
2018-02-20 11:22:17 +00:00
|
|
|
|
generate_next_dict,
|
2017-01-13 11:37:14 +00:00
|
|
|
|
generate_notifications_csv,
|
|
|
|
|
|
generate_previous_dict,
|
2018-11-29 11:41:13 +00:00
|
|
|
|
get_logo_cdn_domain,
|
2018-11-27 16:49:01 +00:00
|
|
|
|
printing_today_or_tomorrow,
|
2017-01-13 11:37:14 +00:00
|
|
|
|
)
|
2018-04-25 14:12:58 +01:00
|
|
|
|
from tests.conftest import fake_uuid
|
2017-01-13 11:37:14 +00:00
|
|
|
|
|
2016-03-31 15:17:05 +01:00
|
|
|
|
|
2017-04-20 14:55:14 +01:00
|
|
|
|
def _get_notifications_csv(
|
|
|
|
|
|
row_number=1,
|
|
|
|
|
|
recipient='foo@bar.com',
|
|
|
|
|
|
template_name='foo',
|
|
|
|
|
|
template_type='sms',
|
|
|
|
|
|
job_name='bar.csv',
|
|
|
|
|
|
status='Delivered',
|
2018-12-06 11:54:34 +00:00
|
|
|
|
created_at='1943-04-19 12:00:00',
|
2017-04-20 14:55:14 +01:00
|
|
|
|
rows=1,
|
2018-01-03 13:21:00 +00:00
|
|
|
|
with_links=False,
|
2018-09-06 14:41:55 +01:00
|
|
|
|
job_id=fake_uuid,
|
|
|
|
|
|
created_by_name=None,
|
2018-12-06 11:54:34 +00:00
|
|
|
|
created_by_email_address=None,
|
2017-04-20 14:55:14 +01:00
|
|
|
|
):
|
2018-09-06 14:41:55 +01:00
|
|
|
|
|
|
|
|
|
|
def _get(
|
|
|
|
|
|
service_id,
|
|
|
|
|
|
page=1,
|
|
|
|
|
|
job_id=None,
|
|
|
|
|
|
template_type=template_type,
|
|
|
|
|
|
):
|
|
|
|
|
|
links = {}
|
|
|
|
|
|
if with_links:
|
|
|
|
|
|
links = {
|
|
|
|
|
|
'prev': '/service/{}/notifications?page=0'.format(service_id),
|
|
|
|
|
|
'next': '/service/{}/notifications?page=1'.format(service_id),
|
|
|
|
|
|
'last': '/service/{}/notifications?page=2'.format(service_id)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
data = {
|
|
|
|
|
|
'notifications': [{
|
|
|
|
|
|
"row_number": row_number + i,
|
|
|
|
|
|
"to": recipient,
|
|
|
|
|
|
"recipient": recipient,
|
|
|
|
|
|
"template_name": template_name,
|
|
|
|
|
|
"template_type": template_type,
|
|
|
|
|
|
"template": {"name": template_name, "template_type": template_type},
|
|
|
|
|
|
"job_name": job_name,
|
|
|
|
|
|
"status": status,
|
|
|
|
|
|
"created_at": created_at,
|
|
|
|
|
|
"updated_at": None,
|
|
|
|
|
|
"created_by_name": created_by_name,
|
2018-12-06 11:54:34 +00:00
|
|
|
|
"created_by_email_address": created_by_email_address,
|
2018-09-06 14:41:55 +01:00
|
|
|
|
} for i in range(rows)],
|
|
|
|
|
|
'total': rows,
|
|
|
|
|
|
'page_size': 50,
|
|
|
|
|
|
'links': links
|
2017-04-20 14:55:14 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
2018-09-06 14:41:55 +01:00
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
return _get
|
2017-04-20 14:55:14 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='function')
|
|
|
|
|
|
def _get_notifications_csv_mock(
|
|
|
|
|
|
mocker,
|
2017-12-30 16:54:39 +00:00
|
|
|
|
api_user_active,
|
2018-01-03 13:21:00 +00:00
|
|
|
|
job_id=fake_uuid
|
2017-04-20 14:55:14 +01:00
|
|
|
|
):
|
|
|
|
|
|
return mocker.patch(
|
|
|
|
|
|
'app.notification_api_client.get_notifications_for_service',
|
2018-09-06 14:41:55 +01:00
|
|
|
|
side_effect=_get_notifications_csv()
|
2017-04-20 14:55:14 +01:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-10-27 17:31:13 +01:00
|
|
|
|
@pytest.mark.parametrize('service_name, safe_email', [
|
|
|
|
|
|
('name with spaces', 'name.with.spaces'),
|
|
|
|
|
|
('singleword', 'singleword'),
|
|
|
|
|
|
('UPPER CASE', 'upper.case'),
|
|
|
|
|
|
('Service - with dash', 'service.with.dash'),
|
|
|
|
|
|
('lots of spaces', 'lots.of.spaces'),
|
|
|
|
|
|
('name.with.dots', 'name.with.dots'),
|
|
|
|
|
|
('name-with-other-delimiters', 'namewithotherdelimiters'),
|
|
|
|
|
|
('.leading', 'leading'),
|
|
|
|
|
|
('trailing.', 'trailing'),
|
|
|
|
|
|
('üńïçödë wördś', 'unicode.words'),
|
|
|
|
|
|
])
|
|
|
|
|
|
def test_email_safe_return_dot_separated_email_domain(service_name, safe_email):
|
|
|
|
|
|
assert email_safe(service_name) == safe_email
|
2016-07-11 10:49:01 +01:00
|
|
|
|
|
|
|
|
|
|
|
2016-10-10 17:15:57 +01:00
|
|
|
|
def test_generate_previous_dict(client):
|
|
|
|
|
|
ret = generate_previous_dict('main.view_jobs', 'foo', 2, {})
|
|
|
|
|
|
assert 'page=1' in ret['url']
|
|
|
|
|
|
assert ret['title'] == 'Previous page'
|
|
|
|
|
|
assert ret['label'] == 'page 1'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_generate_next_dict(client):
|
|
|
|
|
|
ret = generate_next_dict('main.view_jobs', 'foo', 2, {})
|
|
|
|
|
|
assert 'page=3' in ret['url']
|
|
|
|
|
|
assert ret['title'] == 'Next page'
|
|
|
|
|
|
assert ret['label'] == 'page 3'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_generate_previous_next_dict_adds_other_url_args(client):
|
|
|
|
|
|
ret = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
|
|
|
|
|
|
assert 'notifications/blah' in ret['url']
|
2016-10-26 15:44:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_can_create_spreadsheet_from_large_excel_file():
|
|
|
|
|
|
with open(str(Path.cwd() / 'tests' / 'spreadsheet_files' / 'excel 2007.xlsx'), 'rb') as xl:
|
|
|
|
|
|
ret = Spreadsheet.from_file(xl, filename='xl.xlsx')
|
|
|
|
|
|
assert ret.as_csv_data
|
2017-01-13 11:37:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
2017-05-04 09:30:55 +01:00
|
|
|
|
def test_can_create_spreadsheet_from_dict():
|
|
|
|
|
|
assert Spreadsheet.from_dict(OrderedDict(
|
|
|
|
|
|
foo='bar',
|
|
|
|
|
|
name='Jane',
|
|
|
|
|
|
)).as_csv_data == (
|
|
|
|
|
|
"foo,name\r\n"
|
|
|
|
|
|
"bar,Jane\r\n"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_can_create_spreadsheet_from_dict_with_filename():
|
|
|
|
|
|
assert Spreadsheet.from_dict({}, filename='empty.csv').as_dict['file_name'] == "empty.csv"
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-09-06 14:41:55 +01:00
|
|
|
|
@pytest.mark.parametrize('created_by_name, expected_content', [
|
|
|
|
|
|
(
|
|
|
|
|
|
None, [
|
2018-12-06 11:54:34 +00:00
|
|
|
|
'Recipient,Template,Type,Sent by,Sent by email,Job,Status,Time\n',
|
|
|
|
|
|
'foo@bar.com,foo,sms,,sender@email.gov.uk,,Delivered,1943-04-19 12:00:00\r\n',
|
2018-09-06 14:41:55 +01:00
|
|
|
|
]
|
|
|
|
|
|
),
|
|
|
|
|
|
(
|
|
|
|
|
|
'Anne Example', [
|
2018-12-06 11:54:34 +00:00
|
|
|
|
'Recipient,Template,Type,Sent by,Sent by email,Job,Status,Time\n',
|
|
|
|
|
|
'foo@bar.com,foo,sms,Anne Example,sender@email.gov.uk,,Delivered,1943-04-19 12:00:00\r\n',
|
2018-09-06 14:41:55 +01:00
|
|
|
|
]
|
|
|
|
|
|
),
|
|
|
|
|
|
])
|
|
|
|
|
|
def test_generate_notifications_csv_without_job(
|
|
|
|
|
|
app_,
|
|
|
|
|
|
mocker,
|
|
|
|
|
|
created_by_name,
|
|
|
|
|
|
expected_content,
|
|
|
|
|
|
):
|
|
|
|
|
|
mocker.patch(
|
|
|
|
|
|
'app.notification_api_client.get_notifications_for_service',
|
|
|
|
|
|
side_effect=_get_notifications_csv(
|
|
|
|
|
|
created_by_name=created_by_name,
|
2018-12-06 11:54:34 +00:00
|
|
|
|
created_by_email_address="sender@email.gov.uk",
|
2018-09-06 14:41:55 +01:00
|
|
|
|
job_id=None,
|
|
|
|
|
|
job_name=None,
|
|
|
|
|
|
)
|
|
|
|
|
|
)
|
|
|
|
|
|
assert list(generate_notifications_csv(service_id=fake_uuid)) == expected_content
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-02-16 11:35:36 +00:00
|
|
|
|
@pytest.mark.parametrize('original_file_contents, expected_column_headers, expected_1st_row', [
|
|
|
|
|
|
(
|
|
|
|
|
|
"""
|
|
|
|
|
|
phone_number
|
|
|
|
|
|
07700900123
|
|
|
|
|
|
""",
|
|
|
|
|
|
['Row number', 'phone_number', 'Template', 'Type', 'Job', 'Status', 'Time'],
|
2018-12-06 11:54:34 +00:00
|
|
|
|
['1', '07700900123', 'foo', 'sms', 'bar.csv', 'Delivered', '1943-04-19 12:00:00'],
|
2018-02-16 11:35:36 +00:00
|
|
|
|
),
|
|
|
|
|
|
(
|
|
|
|
|
|
"""
|
|
|
|
|
|
phone_number, a, b, c
|
|
|
|
|
|
07700900123, 🐜,🐝,🦀
|
|
|
|
|
|
""",
|
|
|
|
|
|
['Row number', 'phone_number', 'a', 'b', 'c', 'Template', 'Type', 'Job', 'Status', 'Time'],
|
2018-12-06 11:54:34 +00:00
|
|
|
|
['1', '07700900123', '🐜', '🐝', '🦀', 'foo', 'sms', 'bar.csv', 'Delivered', '1943-04-19 12:00:00'],
|
2018-03-06 15:11:59 +00:00
|
|
|
|
),
|
|
|
|
|
|
(
|
|
|
|
|
|
"""
|
|
|
|
|
|
"phone_number", "a", "b", "c"
|
|
|
|
|
|
"07700900123","🐜,🐜","🐝,🐝","🦀"
|
|
|
|
|
|
""",
|
|
|
|
|
|
['Row number', 'phone_number', 'a', 'b', 'c', 'Template', 'Type', 'Job', 'Status', 'Time'],
|
2018-12-06 11:54:34 +00:00
|
|
|
|
['1', '07700900123', '🐜,🐜', '🐝,🐝', '🦀', 'foo', 'sms', 'bar.csv', 'Delivered', '1943-04-19 12:00:00'],
|
2018-02-16 11:35:36 +00:00
|
|
|
|
),
|
|
|
|
|
|
])
|
|
|
|
|
|
def test_generate_notifications_csv_returns_correct_csv_file(
|
|
|
|
|
|
app_,
|
|
|
|
|
|
mocker,
|
|
|
|
|
|
_get_notifications_csv_mock,
|
|
|
|
|
|
original_file_contents,
|
|
|
|
|
|
expected_column_headers,
|
|
|
|
|
|
expected_1st_row,
|
|
|
|
|
|
):
|
|
|
|
|
|
mocker.patch(
|
2019-01-30 09:42:15 +00:00
|
|
|
|
'app.s3_client.s3_csv_client.s3download',
|
2018-02-16 11:35:36 +00:00
|
|
|
|
return_value=original_file_contents,
|
|
|
|
|
|
)
|
|
|
|
|
|
csv_content = generate_notifications_csv(service_id='1234', job_id=fake_uuid, template_type='sms')
|
2017-01-13 11:37:14 +00:00
|
|
|
|
csv_file = DictReader(StringIO('\n'.join(csv_content)))
|
2018-02-16 11:35:36 +00:00
|
|
|
|
assert csv_file.fieldnames == expected_column_headers
|
|
|
|
|
|
assert next(csv_file) == dict(zip(expected_column_headers, expected_1st_row))
|
2017-01-13 11:37:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
2018-02-16 11:35:36 +00:00
|
|
|
|
def test_generate_notifications_csv_only_calls_once_if_no_next_link(
|
|
|
|
|
|
app_,
|
|
|
|
|
|
_get_notifications_csv_mock,
|
|
|
|
|
|
):
|
|
|
|
|
|
list(generate_notifications_csv(service_id='1234'))
|
2017-01-13 11:37:14 +00:00
|
|
|
|
|
2017-04-20 14:55:14 +01:00
|
|
|
|
assert _get_notifications_csv_mock.call_count == 1
|
2017-01-13 11:37:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
2018-01-12 14:03:31 +00:00
|
|
|
|
@pytest.mark.parametrize("job_id", ["some", None])
|
2018-02-16 11:35:36 +00:00
|
|
|
|
def test_generate_notifications_csv_calls_twice_if_next_link(
|
|
|
|
|
|
app_,
|
|
|
|
|
|
mocker,
|
|
|
|
|
|
job_id,
|
|
|
|
|
|
):
|
|
|
|
|
|
|
|
|
|
|
|
mocker.patch(
|
2019-01-30 09:42:15 +00:00
|
|
|
|
'app.s3_client.s3_csv_client.s3download',
|
2018-02-16 11:35:36 +00:00
|
|
|
|
return_value="""
|
|
|
|
|
|
phone_number
|
|
|
|
|
|
07700900000
|
|
|
|
|
|
07700900001
|
|
|
|
|
|
07700900002
|
|
|
|
|
|
07700900003
|
|
|
|
|
|
07700900004
|
|
|
|
|
|
07700900005
|
|
|
|
|
|
07700900006
|
|
|
|
|
|
07700900007
|
|
|
|
|
|
07700900008
|
|
|
|
|
|
07700900009
|
|
|
|
|
|
"""
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2017-01-13 11:37:14 +00:00
|
|
|
|
service_id = '1234'
|
2018-09-06 14:41:55 +01:00
|
|
|
|
response_with_links = _get_notifications_csv(rows=7, with_links=True)
|
|
|
|
|
|
response_with_no_links = _get_notifications_csv(rows=3, row_number=8, with_links=False)
|
2017-04-20 14:55:14 +01:00
|
|
|
|
|
2017-01-13 11:37:14 +00:00
|
|
|
|
mock_get_notifications = mocker.patch(
|
|
|
|
|
|
'app.notification_api_client.get_notifications_for_service',
|
|
|
|
|
|
side_effect=[
|
2018-09-06 14:41:55 +01:00
|
|
|
|
response_with_links(service_id),
|
|
|
|
|
|
response_with_no_links(service_id),
|
2017-01-13 11:37:14 +00:00
|
|
|
|
]
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2018-02-16 11:35:36 +00:00
|
|
|
|
csv_content = generate_notifications_csv(
|
|
|
|
|
|
service_id=service_id,
|
|
|
|
|
|
job_id=job_id or fake_uuid,
|
|
|
|
|
|
template_type='sms',
|
|
|
|
|
|
)
|
|
|
|
|
|
csv = list(DictReader(StringIO('\n'.join(csv_content))))
|
2017-01-13 11:37:14 +00:00
|
|
|
|
|
2018-02-16 11:35:36 +00:00
|
|
|
|
assert len(csv) == 10
|
|
|
|
|
|
assert csv[0]['phone_number'] == '07700900000'
|
|
|
|
|
|
assert csv[9]['phone_number'] == '07700900009'
|
2017-01-13 11:37:14 +00:00
|
|
|
|
assert mock_get_notifications.call_count == 2
|
|
|
|
|
|
# mock_calls[0][2] is the kwargs from first call
|
|
|
|
|
|
assert mock_get_notifications.mock_calls[0][2]['page'] == 1
|
|
|
|
|
|
assert mock_get_notifications.mock_calls[1][2]['page'] == 2
|
2017-07-11 17:06:15 +01:00
|
|
|
|
|
|
|
|
|
|
|
2017-07-24 15:20:40 +01:00
|
|
|
|
def test_get_cdn_domain_on_localhost(client, mocker):
|
|
|
|
|
|
mocker.patch.dict('app.current_app.config', values={'ADMIN_BASE_URL': 'http://localhost:6012'})
|
2018-11-29 11:41:13 +00:00
|
|
|
|
domain = get_logo_cdn_domain()
|
2017-07-24 15:20:40 +01:00
|
|
|
|
assert domain == 'static-logos.notify.tools'
|
|
|
|
|
|
|
|
|
|
|
|
|
2017-07-28 15:19:20 +01:00
|
|
|
|
def test_get_cdn_domain_on_non_localhost(client, mocker):
|
2017-07-24 15:20:40 +01:00
|
|
|
|
mocker.patch.dict('app.current_app.config', values={'ADMIN_BASE_URL': 'https://some.admintest.com'})
|
2018-11-29 11:41:13 +00:00
|
|
|
|
domain = get_logo_cdn_domain()
|
2017-07-24 15:20:40 +01:00
|
|
|
|
assert domain == 'static-logos.admintest.com'
|
2018-02-06 10:55:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("domain_or_email_address", (
|
2018-02-06 16:55:00 +00:00
|
|
|
|
"test@dclgdatamart.co.uk", "test@communities.gsi.gov.uk", "test@communities.gov.uk",
|
2018-02-06 10:55:29 +00:00
|
|
|
|
))
|
2018-03-09 14:53:04 +00:00
|
|
|
|
def test_get_valid_agreement_info_known_details(domain_or_email_address):
|
|
|
|
|
|
agreement_info = AgreementInfo(domain_or_email_address)
|
|
|
|
|
|
assert agreement_info.crown_status is None
|
|
|
|
|
|
assert agreement_info.owner == "Ministry of Housing, Communities & Local Government"
|
|
|
|
|
|
assert agreement_info.agreement_signed is True
|
|
|
|
|
|
assert agreement_info.as_human_readable == (
|
2018-03-08 16:44:09 +00:00
|
|
|
|
'Yes, on behalf of Ministry of Housing, Communities & Local Government'
|
|
|
|
|
|
)
|
2018-02-06 10:55:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
2018-09-03 10:46:52 +01:00
|
|
|
|
@pytest.mark.parametrize("domain_or_email_address, is_canonical", (
|
|
|
|
|
|
("test@dclgdatamart.co.uk", False),
|
|
|
|
|
|
("test@communities.gsi.gov.uk", False),
|
|
|
|
|
|
("test@communities.gov.uk", True),
|
|
|
|
|
|
))
|
|
|
|
|
|
def test_get_canonical_domain(domain_or_email_address, is_canonical):
|
|
|
|
|
|
assert AgreementInfo(domain_or_email_address).canonical_domain == 'communities.gov.uk'
|
|
|
|
|
|
assert AgreementInfo(domain_or_email_address).is_canonical == is_canonical
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_canonical_domain_passes_through_unknown_domain():
|
|
|
|
|
|
assert AgreementInfo('example.com').canonical_domain is None
|
|
|
|
|
|
assert AgreementInfo('example.com').is_canonical is False
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-02-06 10:55:29 +00:00
|
|
|
|
@pytest.mark.parametrize("domain_or_email_address", (
|
|
|
|
|
|
"test@police.gov.uk", "police.gov.uk",
|
|
|
|
|
|
))
|
2018-03-09 14:53:04 +00:00
|
|
|
|
def test_get_valid_agreement_info_unknown_details(domain_or_email_address):
|
|
|
|
|
|
government_domain = AgreementInfo(domain_or_email_address)
|
2018-02-06 16:55:00 +00:00
|
|
|
|
assert government_domain.crown_status is None
|
2018-02-06 10:55:29 +00:00
|
|
|
|
assert government_domain.owner is None
|
2018-02-06 16:55:00 +00:00
|
|
|
|
assert government_domain.agreement_signed is None
|
2018-07-10 16:08:32 +01:00
|
|
|
|
assert government_domain.as_human_readable == 'Can’t tell (domain is police.gov.uk)'
|
2018-03-08 16:44:09 +00:00
|
|
|
|
|
|
|
|
|
|
|
2018-03-09 14:53:04 +00:00
|
|
|
|
def test_get_valid_agreement_info_only_org_known():
|
|
|
|
|
|
agreement_info = AgreementInfo('nhs.net')
|
2018-03-08 16:44:09 +00:00
|
|
|
|
# Some parts of the NHS are Crown, some aren’t
|
2018-03-09 14:53:04 +00:00
|
|
|
|
assert agreement_info.crown_status is None
|
|
|
|
|
|
assert agreement_info.owner == 'NHS'
|
|
|
|
|
|
assert agreement_info.agreement_signed is None
|
|
|
|
|
|
assert agreement_info.as_human_readable == 'Can’t tell (organisation is NHS, crown status unknown)'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_valid_agreement_info_some_known_details():
|
|
|
|
|
|
agreement_info = AgreementInfo("marinemanagement.org.uk")
|
|
|
|
|
|
assert agreement_info.crown_status is None
|
|
|
|
|
|
assert agreement_info.owner == "Marine Management Organisation"
|
|
|
|
|
|
assert agreement_info.agreement_signed is True
|
|
|
|
|
|
assert agreement_info.as_human_readable == (
|
2018-03-08 16:44:09 +00:00
|
|
|
|
'Yes, on behalf of Marine Management Organisation'
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-03-09 14:53:04 +00:00
|
|
|
|
def test_get_valid_local_agreement_info_some_known_details():
|
2019-01-08 15:12:52 +00:00
|
|
|
|
# This example may need to be updated to use a different council if
|
|
|
|
|
|
# Babergh every sign the agreement
|
|
|
|
|
|
agreement_info = AgreementInfo("babergh.gov.uk")
|
2018-03-09 14:53:04 +00:00
|
|
|
|
assert agreement_info.crown_status is False
|
2019-01-08 15:12:52 +00:00
|
|
|
|
assert agreement_info.owner == "Babergh District Council"
|
2018-03-09 14:53:04 +00:00
|
|
|
|
assert agreement_info.agreement_signed is False
|
|
|
|
|
|
assert agreement_info.as_human_readable == (
|
2019-01-08 15:12:52 +00:00
|
|
|
|
'No (organisation is Babergh District Council, a non-crown body)'
|
2018-03-08 16:44:09 +00:00
|
|
|
|
)
|
2018-02-06 11:02:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
2018-02-06 13:07:21 +00:00
|
|
|
|
def test_get_valid_government_domain_gets_most_specific_first():
|
|
|
|
|
|
|
2018-03-09 14:53:04 +00:00
|
|
|
|
generic = AgreementInfo("gov.uk")
|
2018-02-06 16:55:00 +00:00
|
|
|
|
assert generic.crown_status is None
|
2018-02-06 13:07:21 +00:00
|
|
|
|
assert generic.owner is None
|
2018-02-06 16:55:00 +00:00
|
|
|
|
assert generic.agreement_signed is None
|
2018-03-08 16:44:09 +00:00
|
|
|
|
assert generic.as_human_readable == (
|
2018-07-10 16:08:32 +01:00
|
|
|
|
'Can’t tell (domain is gov.uk)'
|
2018-03-08 16:44:09 +00:00
|
|
|
|
)
|
2018-02-06 13:07:21 +00:00
|
|
|
|
|
2018-03-09 14:53:04 +00:00
|
|
|
|
specific = AgreementInfo("dacorum.gov.uk")
|
2018-02-06 16:55:00 +00:00
|
|
|
|
assert specific.crown_status is False
|
|
|
|
|
|
assert specific.owner == 'Dacorum Borough Council'
|
|
|
|
|
|
assert specific.agreement_signed is True
|
2018-03-08 16:44:09 +00:00
|
|
|
|
assert specific.as_human_readable == (
|
|
|
|
|
|
'Yes, on behalf of Dacorum Borough Council'
|
|
|
|
|
|
)
|
2018-02-06 13:07:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
2018-07-10 17:18:50 +01:00
|
|
|
|
def test_get_domain_info_for_branding_request():
|
|
|
|
|
|
|
|
|
|
|
|
assert AgreementInfo("gov.uk").as_info_for_branding_request == (
|
|
|
|
|
|
'Can’t tell (domain is gov.uk)'
|
|
|
|
|
|
)
|
|
|
|
|
|
assert AgreementInfo("dacorum.gov.uk").as_info_for_branding_request == (
|
|
|
|
|
|
'Dacorum Borough Council'
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-02-12 11:18:10 +00:00
|
|
|
|
def test_domains_are_lowercased():
|
|
|
|
|
|
for domain in AgreementInfo.domains.keys():
|
|
|
|
|
|
assert domain == domain.lower()
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-02-08 10:51:59 +00:00
|
|
|
|
def test_validate_government_domain_data():
|
2018-02-06 11:02:54 +00:00
|
|
|
|
|
2018-03-09 14:53:04 +00:00
|
|
|
|
for domain in AgreementInfo.domains.keys():
|
2018-02-06 11:02:54 +00:00
|
|
|
|
|
2018-05-08 15:12:46 +01:00
|
|
|
|
validate_email_address('test@{}'.format(domain))
|
|
|
|
|
|
|
2018-03-09 14:53:04 +00:00
|
|
|
|
agreement_info = AgreementInfo(domain)
|
2018-02-06 11:02:54 +00:00
|
|
|
|
|
2018-03-09 14:53:04 +00:00
|
|
|
|
assert agreement_info.crown_status in {
|
2018-02-08 10:51:59 +00:00
|
|
|
|
True, False, None
|
|
|
|
|
|
}
|
2018-02-06 11:02:54 +00:00
|
|
|
|
|
2019-02-12 10:33:02 +00:00
|
|
|
|
assert isinstance(agreement_info.owner, str) and agreement_info.owner.strip()
|
2018-02-08 10:51:59 +00:00
|
|
|
|
|
2018-03-09 14:53:04 +00:00
|
|
|
|
assert agreement_info.agreement_signed in {
|
2018-02-08 10:51:59 +00:00
|
|
|
|
True, False, None
|
|
|
|
|
|
}
|
2018-03-21 16:01:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
2019-02-12 13:17:08 +00:00
|
|
|
|
def test_domain_data_is_canonicalized():
|
|
|
|
|
|
for owner, count in Counter(
|
|
|
|
|
|
AgreementInfo(domain).owner
|
|
|
|
|
|
for domain in AgreementInfo.domains.keys()
|
|
|
|
|
|
if AgreementInfo(domain).is_canonical
|
|
|
|
|
|
).most_common():
|
|
|
|
|
|
if count > 1:
|
|
|
|
|
|
raise ValueError(
|
|
|
|
|
|
'{} entries in domains.yml for {}'.format(count, owner)
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-05-08 15:12:46 +01:00
|
|
|
|
def test_validate_email_domain_data():
|
|
|
|
|
|
|
|
|
|
|
|
for domain in GovernmentEmailDomain.domains.keys():
|
|
|
|
|
|
validate_email_address('test@{}'.format(domain))
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-03-21 16:01:26 +00:00
|
|
|
|
@pytest.mark.parametrize('time, human_readable_datetime', [
|
|
|
|
|
|
('2018-03-14 09:00', '14 March at 9:00am'),
|
|
|
|
|
|
('2018-03-14 15:00', '14 March at 3:00pm'),
|
|
|
|
|
|
|
|
|
|
|
|
('2018-03-15 09:00', '15 March at 9:00am'),
|
|
|
|
|
|
('2018-03-15 15:00', '15 March at 3:00pm'),
|
|
|
|
|
|
|
|
|
|
|
|
('2018-03-19 09:00', '19 March at 9:00am'),
|
|
|
|
|
|
('2018-03-19 15:00', '19 March at 3:00pm'),
|
|
|
|
|
|
('2018-03-19 23:59', '19 March at 11:59pm'),
|
|
|
|
|
|
|
|
|
|
|
|
('2018-03-20 00:00', '19 March at midnight'), # we specifically refer to 00:00 as belonging to the day before.
|
|
|
|
|
|
('2018-03-20 00:01', 'yesterday at 12:01am'),
|
|
|
|
|
|
('2018-03-20 09:00', 'yesterday at 9:00am'),
|
|
|
|
|
|
('2018-03-20 15:00', 'yesterday at 3:00pm'),
|
|
|
|
|
|
('2018-03-20 23:59', 'yesterday at 11:59pm'),
|
|
|
|
|
|
|
|
|
|
|
|
('2018-03-21 00:00', 'yesterday at midnight'), # we specifically refer to 00:00 as belonging to the day before.
|
|
|
|
|
|
('2018-03-21 00:01', 'today at 12:01am'),
|
|
|
|
|
|
('2018-03-21 09:00', 'today at 9:00am'),
|
|
|
|
|
|
('2018-03-21 12:00', 'today at midday'),
|
|
|
|
|
|
('2018-03-21 15:00', 'today at 3:00pm'),
|
|
|
|
|
|
('2018-03-21 23:59', 'today at 11:59pm'),
|
|
|
|
|
|
|
|
|
|
|
|
('2018-03-22 00:00', 'today at midnight'), # we specifically refer to 00:00 as belonging to the day before.
|
|
|
|
|
|
('2018-03-22 00:01', 'tomorrow at 12:01am'),
|
|
|
|
|
|
('2018-03-22 09:00', 'tomorrow at 9:00am'),
|
|
|
|
|
|
('2018-03-22 15:00', 'tomorrow at 3:00pm'),
|
|
|
|
|
|
('2018-03-22 23:59', 'tomorrow at 11:59pm'),
|
|
|
|
|
|
|
|
|
|
|
|
('2018-03-23 00:01', '23 March at 12:01am'),
|
|
|
|
|
|
('2018-03-23 09:00', '23 March at 9:00am'),
|
|
|
|
|
|
('2018-03-23 15:00', '23 March at 3:00pm'),
|
|
|
|
|
|
|
|
|
|
|
|
])
|
|
|
|
|
|
def test_format_datetime_relative(time, human_readable_datetime):
|
|
|
|
|
|
with freeze_time('2018-03-21 12:00'):
|
|
|
|
|
|
assert format_datetime_relative(time) == human_readable_datetime
|
2018-11-27 16:49:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('utc_datetime', [
|
|
|
|
|
|
'2018-08-01 23:00',
|
|
|
|
|
|
'2018-08-01 16:29',
|
|
|
|
|
|
'2018-11-01 00:00',
|
|
|
|
|
|
'2018-11-01 10:00',
|
|
|
|
|
|
'2018-11-01 17:29',
|
|
|
|
|
|
])
|
|
|
|
|
|
def test_printing_today_or_tomorrow_returns_today(utc_datetime):
|
|
|
|
|
|
with freeze_time(utc_datetime):
|
|
|
|
|
|
assert printing_today_or_tomorrow() == 'today'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('datetime', [
|
|
|
|
|
|
'2018-08-01 22:59',
|
|
|
|
|
|
'2018-08-01 16:30',
|
|
|
|
|
|
'2018-11-01 17:30',
|
|
|
|
|
|
'2018-11-01 21:00',
|
|
|
|
|
|
'2018-11-01 23:59',
|
|
|
|
|
|
])
|
|
|
|
|
|
def test_printing_today_or_tomorrow_returns_tomorrow(datetime):
|
|
|
|
|
|
with freeze_time(datetime):
|
|
|
|
|
|
assert printing_today_or_tomorrow() == 'tomorrow'
|