Files
notifications-api/tests/app/notifications/test_process_notification.py

475 lines
18 KiB
Python
Raw Normal View History

import datetime
import uuid
2021-03-10 13:55:06 +00:00
from collections import namedtuple
2023-03-14 16:28:38 -04:00
from unittest.mock import call
2017-04-27 12:41:10 +01:00
add new redis template usage per day key We've run into issues with redis expiring keys while we try and write to them - short lived redis TTLs aren't really sustainable for keys where we mutate the state. Template usage is a hash contained in redis where we increment a count keyed by template_id each time a message is sent for that template. But if the key expires, hincrby (redis command for incrementing a value in a hash) will re-create an empty hash. This is no good, as we need the hash to be populated with the last seven days worth of data, which we then increment further. We can't tell whether the hincrby created the key, so a different approach entirely was needed: * New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table * Incremented to from process_notification - if it doesn't exist yet, it'll be created then. * Expiry set to 8 days every time it's incremented to. Then, at read time, we'll just read the last eight days of keys from Redis, and sum them up. This works because we're only ever incrementing from that one place - never setting wholesale, never recreating the data from scratch. So we know that if the data is in redis, then it is good and accurate data. One thing we *don't* know and *cannot* reason about is what no key in redis means. It could be either of: * This is the first message that the service has sent today. * The key was deleted from redis for some reason. Since we set the TTL to so long, we'll never be writing to a key that previously expired. But if there is a redis (or operator) error and the key is deleted, then we'll have bad data - after any data loss we'll have to rebuild the data.
2018-03-29 13:55:22 +01:00
import pytest
from boto3.exceptions import Boto3Error
from freezegun import freeze_time
2021-03-10 13:55:06 +00:00
from notifications_utils.recipients import (
validate_and_format_email_address,
validate_and_format_phone_number,
)
2021-03-10 13:55:06 +00:00
from sqlalchemy.exc import SQLAlchemyError
from app.models import SMS_TYPE, Notification, NotificationHistory
from app.notifications.process_notifications import (
create_content_for_notification,
persist_notification,
send_notification_to_queue,
2021-03-10 13:55:06 +00:00
simulated_recipient,
)
from app.serialised_models import SerialisedTemplate
from app.v2.errors import BadRequestError
2021-03-10 13:55:06 +00:00
from tests.app.db import create_api_key, create_service, create_template
from tests.conftest import set_config
def test_create_content_for_notification_passes(sample_email_template):
template = SerialisedTemplate.from_id_and_service_id(
sample_email_template.id, sample_email_template.service_id
)
content = create_content_for_notification(template, None)
assert str(content) == template.content + '\n'
def test_create_content_for_notification_with_placeholders_passes(sample_template_with_placeholders):
template = SerialisedTemplate.from_id_and_service_id(
sample_template_with_placeholders.id, sample_template_with_placeholders.service_id
)
content = create_content_for_notification(template, {'name': 'Bobby'})
assert content.content == template.content
assert 'Bobby' in str(content)
def test_create_content_for_notification_fails_with_missing_personalisation(sample_template_with_placeholders):
template = SerialisedTemplate.from_id_and_service_id(
sample_template_with_placeholders.id, sample_template_with_placeholders.service_id
)
with pytest.raises(BadRequestError):
create_content_for_notification(template, None)
def test_create_content_for_notification_allows_additional_personalisation(sample_template_with_placeholders):
template = SerialisedTemplate.from_id_and_service_id(
sample_template_with_placeholders.id, sample_template_with_placeholders.service_id
)
create_content_for_notification(template, {'name': 'Bobby', 'Additional placeholder': 'Data'})
@freeze_time("2016-01-01 11:09:00.061258")
def test_persist_notification_creates_and_save_to_db(sample_template, sample_api_key, sample_job):
assert Notification.query.count() == 0
assert NotificationHistory.query.count() == 0
notification = persist_notification(
template_id=sample_template.id,
template_version=sample_template.version,
recipient='+447111111111',
service=sample_template.service,
personalisation={},
notification_type='sms',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type,
job_id=sample_job.id,
job_row_number=100,
2017-11-27 14:45:34 +00:00
reference="ref",
reply_to_text=sample_template.service.get_default_sms_sender())
assert Notification.query.get(notification.id) is not None
notification_from_db = Notification.query.one()
assert notification_from_db.id == notification.id
assert notification_from_db.template_id == notification.template_id
assert notification_from_db.template_version == notification.template_version
assert notification_from_db.api_key_id == notification.api_key_id
assert notification_from_db.key_type == notification.key_type
assert notification_from_db.key_type == notification.key_type
assert notification_from_db.billable_units == notification.billable_units
assert notification_from_db.notification_type == notification.notification_type
assert notification_from_db.created_at == notification.created_at
assert not notification_from_db.sent_at
assert notification_from_db.updated_at == notification.updated_at
assert notification_from_db.status == notification.status
assert notification_from_db.reference == notification.reference
assert notification_from_db.client_reference == notification.client_reference
assert notification_from_db.created_by_id == notification.created_by_id
2017-11-27 14:45:34 +00:00
assert notification_from_db.reply_to_text == sample_template.service.get_default_sms_sender()
def test_persist_notification_throws_exception_when_missing_template(sample_api_key):
assert Notification.query.count() == 0
assert NotificationHistory.query.count() == 0
with pytest.raises(SQLAlchemyError):
persist_notification(template_id=None,
template_version=None,
recipient='+447111111111',
service=sample_api_key.service,
personalisation=None,
notification_type='sms',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type)
assert Notification.query.count() == 0
assert NotificationHistory.query.count() == 0
@freeze_time("2016-01-01 11:09:00.061258")
def test_persist_notification_with_optionals(sample_job, sample_api_key):
assert Notification.query.count() == 0
assert NotificationHistory.query.count() == 0
n_id = uuid.uuid4()
created_at = datetime.datetime(2016, 11, 11, 16, 8, 18)
2017-04-26 10:22:20 +01:00
persist_notification(
2017-04-26 11:52:03 +01:00
template_id=sample_job.template.id,
template_version=sample_job.template.version,
2023-01-04 16:35:25 -05:00
recipient='+12028675309',
2017-04-26 11:52:03 +01:00
service=sample_job.service,
personalisation=None,
notification_type='sms',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type,
created_at=created_at,
job_id=sample_job.id,
job_row_number=10,
client_reference="ref from client",
notification_id=n_id,
created_by_id=sample_job.created_by_id
2017-04-26 11:52:03 +01:00
)
assert Notification.query.count() == 1
assert NotificationHistory.query.count() == 0
persisted_notification = Notification.query.all()[0]
assert persisted_notification.id == n_id
assert persisted_notification.job_id == sample_job.id
assert persisted_notification.job_row_number == 10
assert persisted_notification.created_at == created_at
assert persisted_notification.client_reference == "ref from client"
assert persisted_notification.reference is None
2017-04-26 10:22:20 +01:00
assert persisted_notification.international is False
2023-01-04 16:35:25 -05:00
assert persisted_notification.phone_prefix == '1'
2017-04-27 12:41:10 +01:00
assert persisted_notification.rate_multiplier == 1
assert persisted_notification.created_by_id == sample_job.created_by_id
2017-11-27 14:45:34 +00:00
assert not persisted_notification.reply_to_text
2017-04-26 10:22:20 +01:00
def test_persist_notification_cache_is_not_incremented_on_failure_to_create_notification(
notify_api, sample_api_key, mocker
):
mocked_redis = mocker.patch('app.redis_store.incr')
with pytest.raises(SQLAlchemyError):
persist_notification(template_id=None,
template_version=None,
recipient='+447111111111',
service=sample_api_key.service,
personalisation=None,
notification_type='sms',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type)
mocked_redis.assert_not_called()
def test_persist_notification_does_not_increment_cache_if_test_key(
notify_api, sample_template, sample_job, mocker, sample_test_api_key
):
daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr')
assert Notification.query.count() == 0
assert NotificationHistory.query.count() == 0
with set_config(notify_api, 'REDIS_ENABLED', True):
persist_notification(
template_id=sample_template.id,
template_version=sample_template.version,
recipient='+447111111111',
service=sample_template.service,
personalisation={},
notification_type='sms',
api_key_id=sample_test_api_key.id,
key_type=sample_test_api_key.key_type,
job_id=sample_job.id,
job_row_number=100,
reference="ref",
)
assert Notification.query.count() == 1
assert not daily_limit_cache.called
@pytest.mark.parametrize('restricted_service', [True, False])
@freeze_time("2016-01-01 11:09:00.061258")
def test_persist_notification_increments_cache_for_trial_or_live_service(
notify_api, notify_db_session, mocker, restricted_service
):
service = create_service(restricted=restricted_service)
template = create_template(service=service)
api_key = create_api_key(service=service)
mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1)
add new redis template usage per day key We've run into issues with redis expiring keys while we try and write to them - short lived redis TTLs aren't really sustainable for keys where we mutate the state. Template usage is a hash contained in redis where we increment a count keyed by template_id each time a message is sent for that template. But if the key expires, hincrby (redis command for incrementing a value in a hash) will re-create an empty hash. This is no good, as we need the hash to be populated with the last seven days worth of data, which we then increment further. We can't tell whether the hincrby created the key, so a different approach entirely was needed: * New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table * Incremented to from process_notification - if it doesn't exist yet, it'll be created then. * Expiry set to 8 days every time it's incremented to. Then, at read time, we'll just read the last eight days of keys from Redis, and sum them up. This works because we're only ever incrementing from that one place - never setting wholesale, never recreating the data from scratch. So we know that if the data is in redis, then it is good and accurate data. One thing we *don't* know and *cannot* reason about is what no key in redis means. It could be either of: * This is the first message that the service has sent today. * The key was deleted from redis for some reason. Since we set the TTL to so long, we'll never be writing to a key that previously expired. But if there is a redis (or operator) error and the key is deleted, then we'll have bad data - after any data loss we'll have to rebuild the data.
2018-03-29 13:55:22 +01:00
mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr')
with set_config(notify_api, 'REDIS_ENABLED', True):
persist_notification(
template_id=template.id,
template_version=template.version,
recipient='+447111111122',
service=template.service,
personalisation={},
notification_type='sms',
api_key_id=api_key.id,
key_type=api_key.key_type,
reference="ref2")
2023-07-12 14:52:40 -07:00
assert mock_incr.call_count == 1
2023-03-14 16:28:38 -04:00
mock_incr.assert_has_calls([
2023-07-12 14:52:40 -07:00
# call(str(service.id) + "-2016-01-01-count", ),
2023-03-14 16:28:38 -04:00
call("2016-01-01-total", )
])
@pytest.mark.parametrize('restricted_service', [True, False])
@freeze_time("2016-01-01 11:09:00.061258")
def test_persist_notification_sets_daily_limit_cache_if_one_does_not_exists(
notify_api, notify_db_session, mocker, restricted_service
):
service = create_service(restricted=restricted_service)
template = create_template(service=service)
api_key = create_api_key(service=service)
mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=None)
mock_set = mocker.patch('app.notifications.process_notifications.redis_store.set')
with set_config(notify_api, 'REDIS_ENABLED', True):
persist_notification(
template_id=template.id,
template_version=template.version,
recipient='+447111111122',
service=template.service,
personalisation={},
notification_type='sms',
api_key_id=api_key.id,
key_type=api_key.key_type,
reference="ref2")
2023-07-12 14:52:40 -07:00
assert mock_set.call_count == 1
2023-03-14 16:28:38 -04:00
mock_set.assert_has_calls([
2023-07-12 14:52:40 -07:00
# call(str(service.id) + "-2016-01-01-count", 1, ex=86400),
2023-03-14 16:28:38 -04:00
call("2016-01-01-total", 1, ex=86400)
])
@pytest.mark.parametrize((
2023-08-25 12:09:00 -07:00
'requested_queue, notification_type, key_type, expected_queue, expected_task'
), [
2023-08-25 12:09:00 -07:00
(None, 'sms', 'normal', 'send-sms-tasks', 'provider_tasks.deliver_sms'),
(None, 'email', 'normal', 'send-email-tasks', 'provider_tasks.deliver_email'),
(None, 'sms', 'team', 'send-sms-tasks', 'provider_tasks.deliver_sms'),
('notify-internal-tasks', 'sms', 'normal', 'notify-internal-tasks', 'provider_tasks.deliver_sms'),
('notify-internal-tasks', 'email', 'normal', 'notify-internal-tasks', 'provider_tasks.deliver_email'),
])
def test_send_notification_to_queue(
notify_db_session,
requested_queue,
notification_type,
key_type,
expected_queue,
expected_task,
mocker,
):
mocked = mocker.patch('app.celery.{}.apply_async'.format(expected_task))
Notification = namedtuple('Notification', ['id', 'key_type', 'notification_type', 'created_at'])
notification = Notification(
id=uuid.uuid4(),
key_type=key_type,
notification_type=notification_type,
created_at=datetime.datetime(2016, 11, 11, 16, 8, 18),
)
2023-08-25 12:09:00 -07:00
send_notification_to_queue(notification=notification, queue=requested_queue)
mocked.assert_called_once_with([str(notification.id)], queue=expected_queue)
def test_send_notification_to_queue_throws_exception_deletes_notification(sample_notification, mocker):
mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async', side_effect=Boto3Error("EXPECTED"))
with pytest.raises(Boto3Error):
send_notification_to_queue(sample_notification, False)
mocked.assert_called_once_with([(str(sample_notification.id))], queue='send-sms-tasks')
assert Notification.query.count() == 0
assert NotificationHistory.query.count() == 0
@pytest.mark.parametrize("to_address, notification_type, expected", [
2023-01-04 16:35:25 -05:00
("+12028675000", "sms", True),
("+12028675111", "sms", True),
("+12028675222", "sms", True),
("2028675000", "sms", True),
("2028675111", "sms", True),
("simulate-delivered@notifications.service.gov.uk", "email", True),
("simulate-delivered-2@notifications.service.gov.uk", "email", True),
("simulate-delivered-3@notifications.service.gov.uk", "email", True),
2023-01-04 16:35:25 -05:00
("2028675309", "sms", False),
("valid_email@test.com", "email", False)
])
def test_simulated_recipient(notify_api, to_address, notification_type, expected):
"""
The values where the expected = 'research-mode' are listed in the config['SIMULATED_EMAIL_ADDRESSES']
and config['SIMULATED_SMS_NUMBERS']. These values should result in using the research mode queue.
SIMULATED_EMAIL_ADDRESSES = (
'simulate-delivered@notifications.service.gov.uk',
'simulate-delivered-2@notifications.service.gov.uk',
'simulate-delivered-2@notifications.service.gov.uk'
)
2023-01-04 16:35:25 -05:00
SIMULATED_SMS_NUMBERS = ('+12028675000', '+12028675111', '+12028675222')
"""
formatted_address = None
if notification_type == 'email':
formatted_address = validate_and_format_email_address(to_address)
else:
formatted_address = validate_and_format_phone_number(to_address)
is_simulated_address = simulated_recipient(formatted_address, notification_type)
assert is_simulated_address == expected
2017-04-27 12:41:10 +01:00
@pytest.mark.parametrize('recipient, expected_international, expected_prefix, expected_units', [
2023-01-04 16:35:25 -05:00
('+447900900123', True, '44', 1), # UK
('+73122345678', True, '7', 1), # Russia
('+360623400400', True, '36', 1), # Hungary
('2028675309', False, '1', 1)] # USA
2017-04-27 12:41:10 +01:00
)
def test_persist_notification_with_international_info_stores_correct_info(
sample_job,
sample_api_key,
mocker,
recipient,
expected_international,
expected_prefix,
expected_units
):
persist_notification(
template_id=sample_job.template.id,
template_version=sample_job.template.version,
recipient=recipient,
service=sample_job.service,
personalisation=None,
notification_type='sms',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type,
job_id=sample_job.id,
job_row_number=10,
client_reference="ref from client"
)
persisted_notification = Notification.query.all()[0]
assert persisted_notification.international is expected_international
assert persisted_notification.phone_prefix == expected_prefix
assert persisted_notification.rate_multiplier == expected_units
def test_persist_notification_with_international_info_does_not_store_for_email(
sample_job,
sample_api_key,
mocker
):
persist_notification(
template_id=sample_job.template.id,
template_version=sample_job.template.version,
recipient='foo@bar.com',
service=sample_job.service,
personalisation=None,
notification_type='email',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type,
job_id=sample_job.id,
job_row_number=10,
client_reference="ref from client"
)
persisted_notification = Notification.query.all()[0]
assert persisted_notification.international is False
assert persisted_notification.phone_prefix is None
assert persisted_notification.rate_multiplier is None
@pytest.mark.parametrize('recipient, expected_recipient_normalised', [
2023-01-04 16:35:25 -05:00
('+4407900900123', '+447900900123'),
('202-867-5309', '+12028675309'),
('1 202-867-5309', '+12028675309'),
('+1 (202) 867-5309', '+12028675309'),
('(202) 867-5309', '+12028675309'),
('2028675309', '+12028675309')
])
def test_persist_sms_notification_stores_normalised_number(
sample_job,
sample_api_key,
mocker,
recipient,
expected_recipient_normalised
):
persist_notification(
template_id=sample_job.template.id,
template_version=sample_job.template.version,
recipient=recipient,
service=sample_job.service,
personalisation=None,
notification_type='sms',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type,
job_id=sample_job.id,
)
persisted_notification = Notification.query.all()[0]
assert persisted_notification.to == recipient
assert persisted_notification.normalised_to == expected_recipient_normalised
2017-05-23 15:45:11 +01:00
@pytest.mark.parametrize('recipient, expected_recipient_normalised', [
('FOO@bar.com', 'foo@bar.com'),
('BAR@foo.com', 'bar@foo.com')
])
def test_persist_email_notification_stores_normalised_email(
sample_job,
sample_api_key,
mocker,
recipient,
expected_recipient_normalised
):
persist_notification(
template_id=sample_job.template.id,
template_version=sample_job.template.version,
recipient=recipient,
service=sample_job.service,
personalisation=None,
notification_type='email',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type,
job_id=sample_job.id,
)
persisted_notification = Notification.query.all()[0]
assert persisted_notification.to == recipient
assert persisted_notification.normalised_to == expected_recipient_normalised
add new redis template usage per day key We've run into issues with redis expiring keys while we try and write to them - short lived redis TTLs aren't really sustainable for keys where we mutate the state. Template usage is a hash contained in redis where we increment a count keyed by template_id each time a message is sent for that template. But if the key expires, hincrby (redis command for incrementing a value in a hash) will re-create an empty hash. This is no good, as we need the hash to be populated with the last seven days worth of data, which we then increment further. We can't tell whether the hincrby created the key, so a different approach entirely was needed: * New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table * Incremented to from process_notification - if it doesn't exist yet, it'll be created then. * Expiry set to 8 days every time it's incremented to. Then, at read time, we'll just read the last eight days of keys from Redis, and sum them up. This works because we're only ever incrementing from that one place - never setting wholesale, never recreating the data from scratch. So we know that if the data is in redis, then it is good and accurate data. One thing we *don't* know and *cannot* reason about is what no key in redis means. It could be either of: * This is the first message that the service has sent today. * The key was deleted from redis for some reason. Since we set the TTL to so long, we'll never be writing to a key that previously expired. But if there is a redis (or operator) error and the key is deleted, then we'll have bad data - after any data loss we'll have to rebuild the data.
2018-03-29 13:55:22 +01:00
def test_persist_notification_with_billable_units_stores_correct_info(
mocker
):
service = create_service(service_permissions=[SMS_TYPE])
template = create_template(service, template_type=SMS_TYPE)
mocker.patch('app.dao.templates_dao.dao_get_template_by_id', return_value=template)
persist_notification(
template_id=template.id,
template_version=template.version,
recipient="+12028675309",
service=template.service,
personalisation=None,
notification_type=template.template_type,
api_key_id=None,
key_type="normal",
billable_units=3,
)
persisted_notification = Notification.query.all()[0]
assert persisted_notification.billable_units == 3