2016-11-11 14:56:33 +00:00
|
|
|
import datetime
|
2016-11-25 17:32:01 +00:00
|
|
|
import uuid
|
2017-04-27 12:41:10 +01:00
|
|
|
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
import pytest
|
2016-10-28 17:10:00 +01:00
|
|
|
from boto3.exceptions import Boto3Error
|
2016-10-27 17:34:54 +01:00
|
|
|
from sqlalchemy.exc import SQLAlchemyError
|
2016-11-22 12:53:20 +00:00
|
|
|
from freezegun import freeze_time
|
2016-12-09 17:37:18 +00:00
|
|
|
from collections import namedtuple
|
2016-10-27 11:46:37 +01:00
|
|
|
|
2017-11-01 11:01:20 +00:00
|
|
|
from app.models import (
|
|
|
|
|
Notification,
|
|
|
|
|
NotificationHistory,
|
|
|
|
|
ScheduledNotification,
|
2018-12-17 17:49:51 +00:00
|
|
|
Template,
|
2019-02-05 12:48:40 +00:00
|
|
|
LETTER_TYPE
|
2017-11-01 11:01:20 +00:00
|
|
|
)
|
2017-04-26 17:26:06 +01:00
|
|
|
from app.notifications.process_notifications import (
|
|
|
|
|
create_content_for_notification,
|
|
|
|
|
persist_notification,
|
2017-10-05 11:33:20 +01:00
|
|
|
persist_scheduled_notification,
|
2017-11-01 11:01:20 +00:00
|
|
|
send_notification_to_queue,
|
|
|
|
|
simulated_recipient
|
|
|
|
|
)
|
2017-04-26 17:26:06 +01:00
|
|
|
from notifications_utils.recipients import validate_and_format_phone_number, validate_and_format_email_address
|
2016-10-27 11:46:37 +01:00
|
|
|
from app.v2.errors import BadRequestError
|
2018-12-17 17:49:51 +00:00
|
|
|
from tests.app.db import create_service, create_template
|
|
|
|
|
|
2016-10-27 11:46:37 +01:00
|
|
|
|
|
|
|
|
def test_create_content_for_notification_passes(sample_email_template):
|
|
|
|
|
template = Template.query.get(sample_email_template.id)
|
|
|
|
|
content = create_content_for_notification(template, None)
|
2020-04-06 14:25:43 +01:00
|
|
|
assert str(content) == template.content + '\n'
|
2016-10-28 17:10:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_create_content_for_notification_with_placeholders_passes(sample_template_with_placeholders):
|
|
|
|
|
template = Template.query.get(sample_template_with_placeholders.id)
|
|
|
|
|
content = create_content_for_notification(template, {'name': 'Bobby'})
|
2016-10-27 11:46:37 +01:00
|
|
|
assert content.content == template.content
|
2016-12-09 15:56:25 +00:00
|
|
|
assert 'Bobby' in str(content)
|
2016-10-27 11:46:37 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_create_content_for_notification_fails_with_missing_personalisation(sample_template_with_placeholders):
|
|
|
|
|
template = Template.query.get(sample_template_with_placeholders.id)
|
|
|
|
|
with pytest.raises(BadRequestError):
|
|
|
|
|
create_content_for_notification(template, None)
|
2016-10-27 17:34:54 +01:00
|
|
|
|
|
|
|
|
|
2017-03-07 16:03:10 +00:00
|
|
|
def test_create_content_for_notification_allows_additional_personalisation(sample_template_with_placeholders):
|
2016-10-28 17:10:00 +01:00
|
|
|
template = Template.query.get(sample_template_with_placeholders.id)
|
2017-03-07 16:03:10 +00:00
|
|
|
create_content_for_notification(template, {'name': 'Bobby', 'Additional placeholder': 'Data'})
|
2016-10-28 17:10:00 +01:00
|
|
|
|
|
|
|
|
|
2016-11-22 12:53:20 +00:00
|
|
|
@freeze_time("2016-01-01 11:09:00.061258")
|
2016-12-15 16:59:03 +00:00
|
|
|
def test_persist_notification_creates_and_save_to_db(sample_template, sample_api_key, sample_job, mocker):
|
2017-02-15 11:49:19 +00:00
|
|
|
mocked_redis = mocker.patch('app.notifications.process_notifications.redis_store.get')
|
2016-11-22 12:53:20 +00:00
|
|
|
|
2016-10-27 17:34:54 +01:00
|
|
|
assert Notification.query.count() == 0
|
|
|
|
|
assert NotificationHistory.query.count() == 0
|
2017-07-07 17:10:25 +01:00
|
|
|
notification = persist_notification(
|
|
|
|
|
template_id=sample_template.id,
|
|
|
|
|
template_version=sample_template.version,
|
|
|
|
|
recipient='+447111111111',
|
|
|
|
|
service=sample_template.service,
|
|
|
|
|
personalisation={},
|
|
|
|
|
notification_type='sms',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
|
|
|
|
job_id=sample_job.id,
|
|
|
|
|
job_row_number=100,
|
2017-11-27 14:45:34 +00:00
|
|
|
reference="ref",
|
|
|
|
|
reply_to_text=sample_template.service.get_default_sms_sender())
|
2016-12-15 16:59:03 +00:00
|
|
|
|
2016-11-01 10:33:34 +00:00
|
|
|
assert Notification.query.get(notification.id) is not None
|
2016-12-15 16:59:03 +00:00
|
|
|
|
|
|
|
|
notification_from_db = Notification.query.one()
|
2019-05-30 10:37:57 +01:00
|
|
|
|
|
|
|
|
assert notification_from_db.id == notification.id
|
|
|
|
|
assert notification_from_db.template_id == notification.template_id
|
|
|
|
|
assert notification_from_db.template_version == notification.template_version
|
|
|
|
|
assert notification_from_db.api_key_id == notification.api_key_id
|
|
|
|
|
assert notification_from_db.key_type == notification.key_type
|
|
|
|
|
assert notification_from_db.key_type == notification.key_type
|
|
|
|
|
assert notification_from_db.billable_units == notification.billable_units
|
|
|
|
|
assert notification_from_db.notification_type == notification.notification_type
|
|
|
|
|
assert notification_from_db.created_at == notification.created_at
|
2016-12-15 16:59:03 +00:00
|
|
|
assert not notification_from_db.sent_at
|
2019-05-30 10:37:57 +01:00
|
|
|
assert notification_from_db.updated_at == notification.updated_at
|
|
|
|
|
assert notification_from_db.status == notification.status
|
|
|
|
|
assert notification_from_db.reference == notification.reference
|
|
|
|
|
assert notification_from_db.client_reference == notification.client_reference
|
|
|
|
|
assert notification_from_db.created_by_id == notification.created_by_id
|
2017-11-27 14:45:34 +00:00
|
|
|
assert notification_from_db.reply_to_text == sample_template.service.get_default_sms_sender()
|
2016-12-15 16:59:03 +00:00
|
|
|
|
2016-11-22 12:53:20 +00:00
|
|
|
mocked_redis.assert_called_once_with(str(sample_template.service_id) + "-2016-01-01-count")
|
2016-10-27 17:34:54 +01:00
|
|
|
|
|
|
|
|
|
2016-11-11 14:56:33 +00:00
|
|
|
def test_persist_notification_throws_exception_when_missing_template(sample_api_key):
|
2016-10-27 17:34:54 +01:00
|
|
|
assert Notification.query.count() == 0
|
2016-10-28 17:10:00 +01:00
|
|
|
assert NotificationHistory.query.count() == 0
|
2016-10-27 17:34:54 +01:00
|
|
|
with pytest.raises(SQLAlchemyError):
|
|
|
|
|
persist_notification(template_id=None,
|
|
|
|
|
template_version=None,
|
|
|
|
|
recipient='+447111111111',
|
2016-12-19 16:45:18 +00:00
|
|
|
service=sample_api_key.service,
|
2016-11-11 14:56:33 +00:00
|
|
|
personalisation=None,
|
|
|
|
|
notification_type='sms',
|
2016-10-27 17:34:54 +01:00
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type)
|
2016-10-28 17:10:00 +01:00
|
|
|
assert Notification.query.count() == 0
|
|
|
|
|
assert NotificationHistory.query.count() == 0
|
2016-10-27 17:34:54 +01:00
|
|
|
|
|
|
|
|
|
2016-11-22 12:53:20 +00:00
|
|
|
def test_cache_is_not_incremented_on_failure_to_persist_notification(sample_api_key, mocker):
|
2017-02-15 11:49:19 +00:00
|
|
|
mocked_redis = mocker.patch('app.redis_store.get')
|
|
|
|
|
mock_service_template_cache = mocker.patch('app.redis_store.get_all_from_hash')
|
2016-11-22 12:53:20 +00:00
|
|
|
with pytest.raises(SQLAlchemyError):
|
|
|
|
|
persist_notification(template_id=None,
|
|
|
|
|
template_version=None,
|
|
|
|
|
recipient='+447111111111',
|
2016-12-19 16:45:18 +00:00
|
|
|
service=sample_api_key.service,
|
2016-11-22 12:53:20 +00:00
|
|
|
personalisation=None,
|
|
|
|
|
notification_type='sms',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type)
|
|
|
|
|
mocked_redis.assert_not_called()
|
2017-02-14 14:22:52 +00:00
|
|
|
mock_service_template_cache.assert_not_called()
|
2016-11-22 12:53:20 +00:00
|
|
|
|
|
|
|
|
|
2017-03-30 13:43:44 +01:00
|
|
|
def test_persist_notification_does_not_increment_cache_if_test_key(
|
2019-10-30 10:51:07 +00:00
|
|
|
sample_template, sample_job, mocker, sample_test_api_key
|
2017-03-30 13:43:44 +01:00
|
|
|
):
|
|
|
|
|
mocker.patch('app.notifications.process_notifications.redis_store.get', return_value="cache")
|
|
|
|
|
mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value="cache")
|
|
|
|
|
daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr')
|
|
|
|
|
template_usage_cache = mocker.patch('app.notifications.process_notifications.redis_store.increment_hash_value')
|
|
|
|
|
|
|
|
|
|
assert Notification.query.count() == 0
|
|
|
|
|
assert NotificationHistory.query.count() == 0
|
|
|
|
|
persist_notification(
|
2017-07-07 17:10:25 +01:00
|
|
|
template_id=sample_template.id,
|
|
|
|
|
template_version=sample_template.version,
|
|
|
|
|
recipient='+447111111111',
|
|
|
|
|
service=sample_template.service,
|
|
|
|
|
personalisation={},
|
|
|
|
|
notification_type='sms',
|
2019-10-30 10:51:07 +00:00
|
|
|
api_key_id=sample_test_api_key.id,
|
|
|
|
|
key_type=sample_test_api_key.key_type,
|
2017-03-30 13:43:44 +01:00
|
|
|
job_id=sample_job.id,
|
|
|
|
|
job_row_number=100,
|
2017-06-23 15:56:47 +01:00
|
|
|
reference="ref",
|
|
|
|
|
)
|
2017-03-30 13:43:44 +01:00
|
|
|
|
|
|
|
|
assert Notification.query.count() == 1
|
|
|
|
|
|
|
|
|
|
assert not daily_limit_cache.called
|
|
|
|
|
assert not template_usage_cache.called
|
|
|
|
|
|
|
|
|
|
|
2016-11-22 12:53:20 +00:00
|
|
|
@freeze_time("2016-01-01 11:09:00.061258")
|
2016-11-22 13:00:37 +00:00
|
|
|
def test_persist_notification_with_optionals(sample_job, sample_api_key, mocker):
|
2016-11-11 14:56:33 +00:00
|
|
|
assert Notification.query.count() == 0
|
|
|
|
|
assert NotificationHistory.query.count() == 0
|
2017-02-15 11:49:19 +00:00
|
|
|
mocked_redis = mocker.patch('app.notifications.process_notifications.redis_store.get')
|
2016-11-25 17:32:01 +00:00
|
|
|
n_id = uuid.uuid4()
|
2016-11-11 14:56:33 +00:00
|
|
|
created_at = datetime.datetime(2016, 11, 11, 16, 8, 18)
|
2017-04-26 10:22:20 +01:00
|
|
|
persist_notification(
|
2017-04-26 11:52:03 +01:00
|
|
|
template_id=sample_job.template.id,
|
|
|
|
|
template_version=sample_job.template.version,
|
|
|
|
|
recipient='+447111111111',
|
|
|
|
|
service=sample_job.service,
|
|
|
|
|
personalisation=None,
|
|
|
|
|
notification_type='sms',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
|
|
|
|
created_at=created_at,
|
|
|
|
|
job_id=sample_job.id,
|
|
|
|
|
job_row_number=10,
|
|
|
|
|
client_reference="ref from client",
|
2017-06-23 15:56:47 +01:00
|
|
|
notification_id=n_id,
|
|
|
|
|
created_by_id=sample_job.created_by_id
|
2017-04-26 11:52:03 +01:00
|
|
|
)
|
2016-11-11 14:56:33 +00:00
|
|
|
assert Notification.query.count() == 1
|
2019-05-30 10:37:57 +01:00
|
|
|
assert NotificationHistory.query.count() == 0
|
2016-11-11 14:56:33 +00:00
|
|
|
persisted_notification = Notification.query.all()[0]
|
2016-11-25 17:32:01 +00:00
|
|
|
assert persisted_notification.id == n_id
|
|
|
|
|
persisted_notification.job_id == sample_job.id
|
2016-11-11 14:56:33 +00:00
|
|
|
assert persisted_notification.job_row_number == 10
|
|
|
|
|
assert persisted_notification.created_at == created_at
|
2016-11-22 12:53:20 +00:00
|
|
|
mocked_redis.assert_called_once_with(str(sample_job.service_id) + "-2016-01-01-count")
|
2016-11-17 13:42:34 +00:00
|
|
|
assert persisted_notification.client_reference == "ref from client"
|
|
|
|
|
assert persisted_notification.reference is None
|
2017-04-26 10:22:20 +01:00
|
|
|
assert persisted_notification.international is False
|
2017-04-27 12:41:10 +01:00
|
|
|
assert persisted_notification.phone_prefix == '44'
|
|
|
|
|
assert persisted_notification.rate_multiplier == 1
|
2017-06-23 15:56:47 +01:00
|
|
|
assert persisted_notification.created_by_id == sample_job.created_by_id
|
2017-11-27 14:45:34 +00:00
|
|
|
assert not persisted_notification.reply_to_text
|
2017-04-26 10:22:20 +01:00
|
|
|
|
2016-11-11 14:56:33 +00:00
|
|
|
|
2017-02-15 11:49:19 +00:00
|
|
|
@freeze_time("2016-01-01 11:09:00.061258")
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
def test_persist_notification_doesnt_touch_cache_for_old_keys_that_dont_exist(sample_template, sample_api_key, mocker):
|
2017-02-15 11:49:19 +00:00
|
|
|
mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr')
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=None)
|
|
|
|
|
mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value=None)
|
2017-02-15 11:49:19 +00:00
|
|
|
|
2017-07-07 17:10:25 +01:00
|
|
|
persist_notification(
|
|
|
|
|
template_id=sample_template.id,
|
|
|
|
|
template_version=sample_template.version,
|
|
|
|
|
recipient='+447111111111',
|
|
|
|
|
service=sample_template.service,
|
|
|
|
|
personalisation={},
|
|
|
|
|
notification_type='sms',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
|
|
|
|
reference="ref"
|
|
|
|
|
)
|
2017-02-15 11:49:19 +00:00
|
|
|
mock_incr.assert_not_called()
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
|
2017-02-15 11:49:19 +00:00
|
|
|
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
@freeze_time("2016-01-01 11:09:00.061258")
|
|
|
|
|
def test_persist_notification_increments_cache_if_key_exists(sample_template, sample_api_key, mocker):
|
|
|
|
|
mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr')
|
2017-02-15 11:49:19 +00:00
|
|
|
mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1)
|
|
|
|
|
mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash',
|
|
|
|
|
return_value={sample_template.id, 1})
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
|
2017-07-07 17:10:25 +01:00
|
|
|
persist_notification(
|
|
|
|
|
template_id=sample_template.id,
|
|
|
|
|
template_version=sample_template.version,
|
|
|
|
|
recipient='+447111111122',
|
|
|
|
|
service=sample_template.service,
|
|
|
|
|
personalisation={},
|
|
|
|
|
notification_type='sms',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
|
|
|
|
reference="ref2")
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
|
2017-02-15 11:49:19 +00:00
|
|
|
mock_incr.assert_called_once_with(str(sample_template.service_id) + "-2016-01-01-count", )
|
|
|
|
|
|
|
|
|
|
|
2018-10-31 14:30:46 +00:00
|
|
|
@pytest.mark.parametrize((
|
|
|
|
|
'research_mode, requested_queue, notification_type, key_type, expected_queue, expected_task'
|
|
|
|
|
), [
|
|
|
|
|
(True, None, 'sms', 'normal', 'research-mode-tasks', 'provider_tasks.deliver_sms'),
|
|
|
|
|
(True, None, 'email', 'normal', 'research-mode-tasks', 'provider_tasks.deliver_email'),
|
|
|
|
|
(True, None, 'email', 'team', 'research-mode-tasks', 'provider_tasks.deliver_email'),
|
2020-05-06 18:23:56 +01:00
|
|
|
(True, None, 'letter', 'normal', 'research-mode-tasks', 'letters_pdf_tasks.get_pdf_for_templated_letter'),
|
2018-10-31 14:30:46 +00:00
|
|
|
(False, None, 'sms', 'normal', 'send-sms-tasks', 'provider_tasks.deliver_sms'),
|
|
|
|
|
(False, None, 'email', 'normal', 'send-email-tasks', 'provider_tasks.deliver_email'),
|
|
|
|
|
(False, None, 'sms', 'team', 'send-sms-tasks', 'provider_tasks.deliver_sms'),
|
2020-05-06 18:23:56 +01:00
|
|
|
(False, None, 'letter', 'normal', 'create-letters-pdf-tasks', 'letters_pdf_tasks.get_pdf_for_templated_letter'),
|
2018-10-31 14:30:46 +00:00
|
|
|
(False, None, 'sms', 'test', 'research-mode-tasks', 'provider_tasks.deliver_sms'),
|
|
|
|
|
(True, 'notify-internal-tasks', 'email', 'normal', 'research-mode-tasks', 'provider_tasks.deliver_email'),
|
|
|
|
|
(False, 'notify-internal-tasks', 'sms', 'normal', 'notify-internal-tasks', 'provider_tasks.deliver_sms'),
|
|
|
|
|
(False, 'notify-internal-tasks', 'email', 'normal', 'notify-internal-tasks', 'provider_tasks.deliver_email'),
|
|
|
|
|
(False, 'notify-internal-tasks', 'sms', 'test', 'research-mode-tasks', 'provider_tasks.deliver_sms'),
|
|
|
|
|
])
|
|
|
|
|
def test_send_notification_to_queue(
|
|
|
|
|
notify_db,
|
|
|
|
|
notify_db_session,
|
|
|
|
|
research_mode,
|
|
|
|
|
requested_queue,
|
|
|
|
|
notification_type,
|
|
|
|
|
key_type,
|
|
|
|
|
expected_queue,
|
|
|
|
|
expected_task,
|
|
|
|
|
mocker,
|
|
|
|
|
):
|
|
|
|
|
mocked = mocker.patch('app.celery.{}.apply_async'.format(expected_task))
|
2016-12-09 17:37:18 +00:00
|
|
|
Notification = namedtuple('Notification', ['id', 'key_type', 'notification_type', 'created_at'])
|
|
|
|
|
notification = Notification(
|
|
|
|
|
id=uuid.uuid4(),
|
|
|
|
|
key_type=key_type,
|
|
|
|
|
notification_type=notification_type,
|
|
|
|
|
created_at=datetime.datetime(2016, 11, 11, 16, 8, 18),
|
|
|
|
|
)
|
|
|
|
|
|
2016-12-09 12:10:42 +00:00
|
|
|
send_notification_to_queue(notification=notification, research_mode=research_mode, queue=requested_queue)
|
2016-10-27 17:34:54 +01:00
|
|
|
|
2016-12-09 12:10:42 +00:00
|
|
|
mocked.assert_called_once_with([str(notification.id)], queue=expected_queue)
|
2016-10-27 17:34:54 +01:00
|
|
|
|
|
|
|
|
|
2016-10-28 17:10:00 +01:00
|
|
|
def test_send_notification_to_queue_throws_exception_deletes_notification(sample_notification, mocker):
|
|
|
|
|
mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async', side_effect=Boto3Error("EXPECTED"))
|
2017-06-19 14:58:38 +01:00
|
|
|
with pytest.raises(Boto3Error):
|
2016-10-27 17:34:54 +01:00
|
|
|
send_notification_to_queue(sample_notification, False)
|
2019-09-13 11:40:05 +01:00
|
|
|
mocked.assert_called_once_with([(str(sample_notification.id))], queue='send-sms-tasks')
|
2016-10-28 17:10:00 +01:00
|
|
|
|
|
|
|
|
assert Notification.query.count() == 0
|
|
|
|
|
assert NotificationHistory.query.count() == 0
|
2017-01-17 12:08:24 +00:00
|
|
|
|
|
|
|
|
|
2017-04-26 17:26:06 +01:00
|
|
|
@pytest.mark.parametrize("to_address, notification_type, expected", [
|
|
|
|
|
("+447700900000", "sms", True),
|
|
|
|
|
("+447700900111", "sms", True),
|
|
|
|
|
("+447700900222", "sms", True),
|
|
|
|
|
("07700900000", "sms", True),
|
|
|
|
|
("7700900111", "sms", True),
|
|
|
|
|
("simulate-delivered@notifications.service.gov.uk", "email", True),
|
|
|
|
|
("simulate-delivered-2@notifications.service.gov.uk", "email", True),
|
|
|
|
|
("simulate-delivered-3@notifications.service.gov.uk", "email", True),
|
|
|
|
|
("07515896969", "sms", False),
|
|
|
|
|
("valid_email@test.com", "email", False)
|
|
|
|
|
])
|
2017-01-17 12:08:24 +00:00
|
|
|
def test_simulated_recipient(notify_api, to_address, notification_type, expected):
|
2017-04-26 17:26:06 +01:00
|
|
|
"""
|
|
|
|
|
The values where the expected = 'research-mode' are listed in the config['SIMULATED_EMAIL_ADDRESSES']
|
|
|
|
|
and config['SIMULATED_SMS_NUMBERS']. These values should result in using the research mode queue.
|
|
|
|
|
SIMULATED_EMAIL_ADDRESSES = (
|
|
|
|
|
'simulate-delivered@notifications.service.gov.uk',
|
|
|
|
|
'simulate-delivered-2@notifications.service.gov.uk',
|
|
|
|
|
'simulate-delivered-2@notifications.service.gov.uk'
|
|
|
|
|
)
|
|
|
|
|
SIMULATED_SMS_NUMBERS = ('+447700900000', '+447700900111', '+447700900222')
|
|
|
|
|
"""
|
|
|
|
|
formatted_address = None
|
|
|
|
|
|
|
|
|
|
if notification_type == 'email':
|
|
|
|
|
formatted_address = validate_and_format_email_address(to_address)
|
|
|
|
|
else:
|
|
|
|
|
formatted_address = validate_and_format_phone_number(to_address)
|
|
|
|
|
|
|
|
|
|
is_simulated_address = simulated_recipient(formatted_address, notification_type)
|
|
|
|
|
|
|
|
|
|
assert is_simulated_address == expected
|
2017-04-27 12:41:10 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('recipient, expected_international, expected_prefix, expected_units', [
|
|
|
|
|
('7900900123', False, '44', 1), # UK
|
|
|
|
|
('+447900900123', False, '44', 1), # UK
|
|
|
|
|
('07700900222', False, '44', 1), # UK
|
|
|
|
|
('73122345678', True, '7', 1), # Russia
|
|
|
|
|
('360623400400', True, '36', 3)] # Hungary
|
|
|
|
|
)
|
|
|
|
|
def test_persist_notification_with_international_info_stores_correct_info(
|
|
|
|
|
sample_job,
|
|
|
|
|
sample_api_key,
|
|
|
|
|
mocker,
|
|
|
|
|
recipient,
|
|
|
|
|
expected_international,
|
|
|
|
|
expected_prefix,
|
|
|
|
|
expected_units
|
|
|
|
|
):
|
|
|
|
|
persist_notification(
|
|
|
|
|
template_id=sample_job.template.id,
|
|
|
|
|
template_version=sample_job.template.version,
|
|
|
|
|
recipient=recipient,
|
|
|
|
|
service=sample_job.service,
|
|
|
|
|
personalisation=None,
|
|
|
|
|
notification_type='sms',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
|
|
|
|
job_id=sample_job.id,
|
|
|
|
|
job_row_number=10,
|
|
|
|
|
client_reference="ref from client"
|
|
|
|
|
)
|
|
|
|
|
persisted_notification = Notification.query.all()[0]
|
|
|
|
|
|
|
|
|
|
assert persisted_notification.international is expected_international
|
|
|
|
|
assert persisted_notification.phone_prefix == expected_prefix
|
|
|
|
|
assert persisted_notification.rate_multiplier == expected_units
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_persist_notification_with_international_info_does_not_store_for_email(
|
|
|
|
|
sample_job,
|
|
|
|
|
sample_api_key,
|
|
|
|
|
mocker
|
|
|
|
|
):
|
|
|
|
|
persist_notification(
|
|
|
|
|
template_id=sample_job.template.id,
|
|
|
|
|
template_version=sample_job.template.version,
|
|
|
|
|
recipient='foo@bar.com',
|
|
|
|
|
service=sample_job.service,
|
|
|
|
|
personalisation=None,
|
|
|
|
|
notification_type='email',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
|
|
|
|
job_id=sample_job.id,
|
|
|
|
|
job_row_number=10,
|
|
|
|
|
client_reference="ref from client"
|
|
|
|
|
)
|
|
|
|
|
persisted_notification = Notification.query.all()[0]
|
|
|
|
|
|
|
|
|
|
assert persisted_notification.international is False
|
|
|
|
|
assert persisted_notification.phone_prefix is None
|
|
|
|
|
assert persisted_notification.rate_multiplier is None
|
2017-05-17 15:06:15 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_persist_scheduled_notification(sample_notification):
|
2017-05-22 14:15:35 +01:00
|
|
|
persist_scheduled_notification(sample_notification.id, '2017-05-12 14:15')
|
2017-05-17 15:06:15 +01:00
|
|
|
scheduled_notification = ScheduledNotification.query.all()
|
|
|
|
|
assert len(scheduled_notification) == 1
|
|
|
|
|
assert scheduled_notification[0].notification_id == sample_notification.id
|
2017-05-22 14:15:35 +01:00
|
|
|
assert scheduled_notification[0].scheduled_for == datetime.datetime(2017, 5, 12, 13, 15)
|
2017-05-23 14:47:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('recipient, expected_recipient_normalised', [
|
|
|
|
|
('7900900123', '447900900123'),
|
|
|
|
|
('+447900 900 123', '447900900123'),
|
|
|
|
|
(' 07700900222', '447700900222'),
|
|
|
|
|
('07700900222', '447700900222'),
|
|
|
|
|
(' 73122345678', '73122345678'),
|
|
|
|
|
('360623400400', '360623400400'),
|
|
|
|
|
('-077-00900222-', '447700900222'),
|
|
|
|
|
('(360623(400400)', '360623400400')
|
|
|
|
|
|
|
|
|
|
])
|
|
|
|
|
def test_persist_sms_notification_stores_normalised_number(
|
|
|
|
|
sample_job,
|
|
|
|
|
sample_api_key,
|
|
|
|
|
mocker,
|
|
|
|
|
recipient,
|
|
|
|
|
expected_recipient_normalised
|
|
|
|
|
):
|
|
|
|
|
persist_notification(
|
|
|
|
|
template_id=sample_job.template.id,
|
|
|
|
|
template_version=sample_job.template.version,
|
|
|
|
|
recipient=recipient,
|
|
|
|
|
service=sample_job.service,
|
|
|
|
|
personalisation=None,
|
|
|
|
|
notification_type='sms',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
|
|
|
|
job_id=sample_job.id,
|
|
|
|
|
)
|
|
|
|
|
persisted_notification = Notification.query.all()[0]
|
|
|
|
|
|
|
|
|
|
assert persisted_notification.to == recipient
|
|
|
|
|
assert persisted_notification.normalised_to == expected_recipient_normalised
|
2017-05-23 15:45:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('recipient, expected_recipient_normalised', [
|
|
|
|
|
('FOO@bar.com', 'foo@bar.com'),
|
|
|
|
|
('BAR@foo.com', 'bar@foo.com')
|
|
|
|
|
|
|
|
|
|
])
|
|
|
|
|
def test_persist_email_notification_stores_normalised_email(
|
|
|
|
|
sample_job,
|
|
|
|
|
sample_api_key,
|
|
|
|
|
mocker,
|
|
|
|
|
recipient,
|
|
|
|
|
expected_recipient_normalised
|
|
|
|
|
):
|
|
|
|
|
persist_notification(
|
|
|
|
|
template_id=sample_job.template.id,
|
|
|
|
|
template_version=sample_job.template.version,
|
|
|
|
|
recipient=recipient,
|
|
|
|
|
service=sample_job.service,
|
|
|
|
|
personalisation=None,
|
|
|
|
|
notification_type='email',
|
|
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
|
|
|
|
job_id=sample_job.id,
|
|
|
|
|
)
|
|
|
|
|
persisted_notification = Notification.query.all()[0]
|
|
|
|
|
|
|
|
|
|
assert persisted_notification.to == recipient
|
|
|
|
|
assert persisted_notification.normalised_to == expected_recipient_normalised
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
|
|
|
|
|
|
2018-12-17 17:49:51 +00:00
|
|
|
@pytest.mark.parametrize(
|
2019-02-05 12:48:40 +00:00
|
|
|
"postage_argument, template_postage, expected_postage",
|
2018-12-17 17:49:51 +00:00
|
|
|
[
|
2019-02-05 12:48:40 +00:00
|
|
|
("second", "first", "second"),
|
|
|
|
|
("first", "first", "first"),
|
|
|
|
|
("first", "second", "first"),
|
|
|
|
|
(None, "second", "second")
|
2018-12-17 17:49:51 +00:00
|
|
|
]
|
|
|
|
|
)
|
|
|
|
|
def test_persist_letter_notification_finds_correct_postage(
|
|
|
|
|
mocker,
|
2019-02-05 12:48:40 +00:00
|
|
|
postage_argument,
|
2018-12-17 17:49:51 +00:00
|
|
|
template_postage,
|
2019-10-30 10:51:07 +00:00
|
|
|
expected_postage,
|
|
|
|
|
sample_service_full_permissions,
|
|
|
|
|
sample_api_key,
|
2018-12-17 17:49:51 +00:00
|
|
|
):
|
2019-10-30 10:51:07 +00:00
|
|
|
template = create_template(sample_service_full_permissions, template_type=LETTER_TYPE, postage=template_postage)
|
2018-12-17 17:49:51 +00:00
|
|
|
mocker.patch('app.dao.templates_dao.dao_get_template_by_id', return_value=template)
|
|
|
|
|
persist_notification(
|
|
|
|
|
template_id=template.id,
|
|
|
|
|
template_version=template.version,
|
2019-01-14 17:45:56 +00:00
|
|
|
template_postage=template.postage,
|
2018-12-17 17:49:51 +00:00
|
|
|
recipient="Jane Doe, 10 Downing Street, London",
|
2019-10-30 10:51:07 +00:00
|
|
|
service=sample_service_full_permissions,
|
2018-12-17 17:49:51 +00:00
|
|
|
personalisation=None,
|
|
|
|
|
notification_type=LETTER_TYPE,
|
2019-10-30 10:51:07 +00:00
|
|
|
api_key_id=sample_api_key.id,
|
|
|
|
|
key_type=sample_api_key.key_type,
|
2019-02-05 12:48:40 +00:00
|
|
|
postage=postage_argument
|
2018-12-17 17:49:51 +00:00
|
|
|
)
|
|
|
|
|
persisted_notification = Notification.query.all()[0]
|
|
|
|
|
|
|
|
|
|
assert persisted_notification.postage == expected_postage
|
|
|
|
|
|
|
|
|
|
|
2018-09-20 14:47:24 +01:00
|
|
|
def test_persist_notification_with_billable_units_stores_correct_info(
|
2019-02-05 12:48:40 +00:00
|
|
|
mocker
|
2018-09-20 14:47:24 +01:00
|
|
|
):
|
2019-02-05 12:48:40 +00:00
|
|
|
service = create_service(service_permissions=[LETTER_TYPE])
|
|
|
|
|
template = create_template(service, template_type=LETTER_TYPE)
|
|
|
|
|
mocker.patch('app.dao.templates_dao.dao_get_template_by_id', return_value=template)
|
2018-09-20 14:47:24 +01:00
|
|
|
persist_notification(
|
2019-02-05 12:48:40 +00:00
|
|
|
template_id=template.id,
|
|
|
|
|
template_version=template.version,
|
2018-09-20 14:47:24 +01:00
|
|
|
recipient="123 Main Street",
|
2019-02-05 12:48:40 +00:00
|
|
|
service=template.service,
|
2018-09-20 14:47:24 +01:00
|
|
|
personalisation=None,
|
2019-02-05 12:48:40 +00:00
|
|
|
notification_type=template.template_type,
|
2018-09-20 14:47:24 +01:00
|
|
|
api_key_id=None,
|
|
|
|
|
key_type="normal",
|
2019-02-05 12:48:40 +00:00
|
|
|
billable_units=3,
|
|
|
|
|
template_postage=template.postage
|
2018-09-20 14:47:24 +01:00
|
|
|
)
|
|
|
|
|
persisted_notification = Notification.query.all()[0]
|
|
|
|
|
|
|
|
|
|
assert persisted_notification.billable_units == 3
|