- Merge daily limit functions into one, refactor call for daily limit check from process_job

- refactor tests to standardise test names
- refactor some tests to be more clear
- remove unnecessary tests
- include missing test
This commit is contained in:
Rebecca Law
2021-06-24 11:05:22 +01:00
parent 57fb9da414
commit fd7486d751
5 changed files with 126 additions and 255 deletions

View File

@@ -54,10 +54,11 @@ from app.models import (
DailySortedLetter,
)
from app.notifications.process_notifications import persist_notification
from app.notifications.validators import get_service_daily_limit_cache_value
from app.notifications.validators import check_service_over_daily_message_limit
from app.serialised_models import SerialisedService, SerialisedTemplate
from app.service.utils import service_allowed_to_send_to
from app.utils import DATETIME_FORMAT, get_reference_from_personalisation
from app.v2.errors import TooManyRequestsError
@notify_celery.task(name="process-job")
@@ -159,9 +160,10 @@ def process_row(row, template, job, service, sender_id=None):
def __sending_limits_for_job_exceeded(service, job, job_id):
total_sent = get_service_daily_limit_cache_value(KEY_TYPE_NORMAL, service)
print(total_sent)
if total_sent + job.notification_count > service.message_limit:
try:
check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service)
return False
except TooManyRequestsError:
job.job_status = 'sending limits exceeded'
job.processing_finished = datetime.utcnow()
dao_update_job(job)
@@ -170,7 +172,6 @@ def __sending_limits_for_job_exceeded(service, job, job_id):
job_id, job.notification_count, service.message_limit)
)
return True
return False
@notify_celery.task(bind=True, name="save-sms", max_retries=5, default_retry_delay=300)

View File

@@ -58,35 +58,23 @@ def check_service_over_api_rate_limit(service, api_key):
def check_service_over_daily_message_limit(key_type, service):
if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']:
cache_key = daily_limit_cache_key(service.id)
service_stats = redis_store.get(cache_key)
if service_stats is None:
# first message of the day, set the cache to 0 and the expiry to 24 hours
service_stats = 0
redis_store.set(cache_key, service_stats, ex=86400)
if int(service_stats) >= service.message_limit:
current_app.logger.info(
"service {} has been rate limited for daily use sent {} limit {}".format(
service.id, int(service_stats), service.message_limit)
)
raise TooManyRequestsError(service.message_limit)
def get_service_daily_limit_cache_value(key_type, service):
if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']:
cache_key = daily_limit_cache_key(service.id)
service_stats = redis_store.get(cache_key)
if not service_stats:
# first message of the day, set the cache to 0 and the expiry to 24 hours
service_stats = 0
redis_store.set(cache_key, service_stats, ex=86400)
return 0
else:
return int(service_stats)
else:
if key_type == KEY_TYPE_TEST or not current_app.config['REDIS_ENABLED']:
return 0
cache_key = daily_limit_cache_key(service.id)
service_stats = redis_store.get(cache_key)
if service_stats is None:
# first message of the day, set the cache to 0 and the expiry to 24 hours
service_stats = 0
redis_store.set(cache_key, service_stats, ex=86400)
return service_stats
if int(service_stats) >= service.message_limit:
current_app.logger.info(
"service {} has been rate limited for daily use sent {} limit {}".format(
service.id, int(service_stats), service.message_limit)
)
raise TooManyRequestsError(service.message_limit)
def check_rate_limiting(service, api_key):
check_service_over_api_rate_limit(service, api_key)

View File

@@ -51,6 +51,7 @@ from app.models import (
)
from app.serialised_models import SerialisedService, SerialisedTemplate
from app.utils import DATETIME_FORMAT
from app.v2.errors import TooManyRequestsError
from tests.app import load_example_csv
from tests.app.db import (
create_api_key,
@@ -150,68 +151,6 @@ def test_should_process_sms_job_with_sender_id(sample_job, mocker, fake_uuid):
)
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_not_process_sms_job_if_would_exceed_send_limits(
notify_db_session, mocker
):
service = create_service(message_limit=9)
template = create_template(service=service)
job = create_job(template=template, notification_count=10, original_file_name='multiple_sms.csv')
mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3',
return_value=(load_example_csv('multiple_sms'), {'sender_id': None}))
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=8)
process_job(job.id)
job = jobs_dao.dao_get_job_by_id(job.id)
assert job.job_status == 'sending limits exceeded'
assert s3.get_job_and_metadata_from_s3.called is False
assert tasks.process_row.called is False
def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(
notify_db_session, mocker
):
service = create_service(message_limit=1)
template = create_template(service=service)
job = create_job(template=template)
create_notification(template=template, job=job)
mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3',
return_value=(load_example_csv('sms'), {'sender_id': None}))
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=2)
process_job(job.id)
job = jobs_dao.dao_get_job_by_id(job.id)
assert job.job_status == 'sending limits exceeded'
assert s3.get_job_and_metadata_from_s3.called is False
assert tasks.process_row.called is False
@pytest.mark.parametrize('template_type', ['sms', 'email'])
def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db_session, template_type, mocker):
service = create_service(message_limit=1)
template = create_template(service=service, template_type=template_type)
job = create_job(template=template)
create_notification(template=template, job=job)
mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3')
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=2)
process_job(job.id)
job = jobs_dao.dao_get_job_by_id(job.id)
assert job.job_status == 'sending limits exceeded'
assert s3.get_job_and_metadata_from_s3.called is False
assert tasks.process_row.called is False
def test_should_not_process_job_if_already_pending(sample_template, mocker):
job = create_job(template=sample_template, job_status='scheduled')
@@ -224,8 +163,28 @@ def test_should_not_process_job_if_already_pending(sample_template, mocker):
assert tasks.process_row.called is False
def test_should_process_email_job_if_exactly_on_send_limits(notify_db_session,
mocker):
def test_should_not_process_if_send_limit_is_exceeded(
notify_api, notify_db_session, mocker
):
service = create_service(message_limit=9)
template = create_template(service=service)
job = create_job(template=template, notification_count=10, original_file_name='multiple_sms.csv')
mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3',
return_value=(load_example_csv('multiple_sms'), {'sender_id': None}))
mocker.patch('app.celery.tasks.process_row')
mocker.patch('app.celery.tasks.check_service_over_daily_message_limit',
side_effect=TooManyRequestsError("exceeded limit"))
process_job(job.id)
job = jobs_dao.dao_get_job_by_id(job.id)
assert job.job_status == 'sending limits exceeded'
assert s3.get_job_and_metadata_from_s3.called is False
assert tasks.process_row.called is False
def test_should_process_job_if_send_limits_are_not_exceeded(
notify_api, notify_db_session, mocker
):
service = create_service(message_limit=10)
template = create_template(service=service, template_type='email')
job = create_job(template=template, notification_count=10)
@@ -235,8 +194,7 @@ def test_should_process_email_job_if_exactly_on_send_limits(notify_db_session,
mocker.patch('app.celery.tasks.save_email.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=0)
mocker.patch('app.celery.tasks.check_service_over_daily_message_limit', return_value=0)
process_job(job.id)
s3.get_job_and_metadata_from_s3.assert_called_once_with(

View File

@@ -113,49 +113,6 @@ def test_persist_notification_throws_exception_when_missing_template(sample_api_
assert NotificationHistory.query.count() == 0
def test_cache_is_not_incremented_on_failure_to_persist_notification(notify_api, sample_api_key, mocker):
mocked_redis = mocker.patch('app.redis_store.incr')
with pytest.raises(SQLAlchemyError):
with set_config(notify_api, 'REDIS_ENABLED', True):
persist_notification(template_id=None,
template_version=None,
recipient='+447111111111',
service=sample_api_key.service,
personalisation=None,
notification_type='sms',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type)
mocked_redis.assert_not_called()
def test_persist_notification_does_not_increment_cache_if_test_key(
notify_api, sample_template, sample_job, mocker, sample_test_api_key
):
daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr')
assert Notification.query.count() == 0
assert NotificationHistory.query.count() == 0
with set_config(notify_api, 'REDIS_ENABLED', True):
persist_notification(
template_id=sample_template.id,
template_version=sample_template.version,
recipient='+447111111111',
service=sample_template.service,
personalisation={},
notification_type='sms',
api_key_id=sample_test_api_key.id,
key_type=sample_test_api_key.key_type,
job_id=sample_job.id,
job_row_number=100,
reference="ref",
)
assert Notification.query.count() == 1
assert not daily_limit_cache.called
@freeze_time("2016-01-01 11:09:00.061258")
def test_persist_notification_with_optionals(sample_job, sample_api_key):
assert Notification.query.count() == 0
@@ -195,11 +152,55 @@ def test_persist_notification_with_optionals(sample_job, sample_api_key):
assert not persisted_notification.reply_to_text
@freeze_time("2016-01-01 11:09:00.061258")
def test_persist_notification_increments_cache_for_trial_service(
notify_api, notify_db_session, mocker
def test_persist_notification_cache_is_not_incremented_on_failure_to_create_notification(
notify_api, sample_api_key, mocker
):
service = create_service(restricted=True)
mocked_redis = mocker.patch('app.redis_store.incr')
with pytest.raises(SQLAlchemyError):
persist_notification(template_id=None,
template_version=None,
recipient='+447111111111',
service=sample_api_key.service,
personalisation=None,
notification_type='sms',
api_key_id=sample_api_key.id,
key_type=sample_api_key.key_type)
mocked_redis.assert_not_called()
def test_persist_notification_does_not_increment_cache_if_test_key(
notify_api, sample_template, sample_job, mocker, sample_test_api_key
):
daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr')
assert Notification.query.count() == 0
assert NotificationHistory.query.count() == 0
with set_config(notify_api, 'REDIS_ENABLED', True):
persist_notification(
template_id=sample_template.id,
template_version=sample_template.version,
recipient='+447111111111',
service=sample_template.service,
personalisation={},
notification_type='sms',
api_key_id=sample_test_api_key.id,
key_type=sample_test_api_key.key_type,
job_id=sample_job.id,
job_row_number=100,
reference="ref",
)
assert Notification.query.count() == 1
assert not daily_limit_cache.called
@pytest.mark.parametrize('restricted_service', [True, False])
@freeze_time("2016-01-01 11:09:00.061258")
def test_persist_notification_increments_cache_for_trial_or_live_service(
notify_api, notify_db_session, mocker, restricted_service
):
service = create_service(restricted=restricted_service)
template = create_template(service=service)
api_key = create_api_key(service=service)
mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1)
@@ -219,14 +220,16 @@ def test_persist_notification_increments_cache_for_trial_service(
mock_incr.assert_called_once_with(str(service.id) + "-2016-01-01-count", )
def test_persist_notification_increments_cache_live_service(
notify_api, notify_db_session, mocker
@pytest.mark.parametrize('restricted_service', [True, False])
@freeze_time("2016-01-01 11:09:00.061258")
def test_persist_notification_sets_daily_limit_cache_if_one_does_not_exists(
notify_api, notify_db_session, mocker, restricted_service
):
service = create_service(restricted=False)
service = create_service(restricted=restricted_service)
template = create_template(service=service)
api_key = create_api_key(service=service)
mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1)
mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr')
mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=None)
mock_set = mocker.patch('app.notifications.process_notifications.redis_store.set')
with set_config(notify_api, 'REDIS_ENABLED', True):
persist_notification(
template_id=template.id,
@@ -239,7 +242,7 @@ def test_persist_notification_increments_cache_live_service(
key_type=api_key.key_type,
reference="ref2")
assert mock_incr.called
mock_set.assert_called_once_with(str(service.id) + "-2016-01-01-count", 1, ex=86400)
@pytest.mark.parametrize((

View File

@@ -24,7 +24,6 @@ from app.notifications.validators import (
check_service_sms_sender_id,
check_template_is_active,
check_template_is_for_notification_type,
get_service_daily_limit_cache_value,
service_can_send_to_recipient,
validate_address,
validate_and_format_recipient,
@@ -70,17 +69,17 @@ def test_check_service_message_limit_in_cache_under_message_limit_passes(
assert not mock_set.called
def test_should_not_interact_with_cache_for_test_key(sample_service, mocker):
def test_check_service_over_daily_message_limit_should_not_interact_with_cache_for_test_key(sample_service, mocker):
mocker.patch('app.notifications.validators.redis_store')
mock_get = mocker.patch('app.notifications.validators.redis_store.get', side_effect=[None])
serialised_service = SerialisedService.from_id(sample_service.id)
check_service_over_daily_message_limit('test', serialised_service)
assert not app.notifications.validators.redis_store.mock_calls
assert not mock_get.called
app.notifications.validators.redis_store.assert_not_called
mock_get.assert_not_called()
@pytest.mark.parametrize('key_type', ['team', 'normal'])
def test_should_set_cache_value_as_zero_if_cache_not_set(
def test_check_service_over_daily_message_limit_should_set_cache_value_as_zero_if_cache_not_set(
key_type,
sample_template,
sample_service,
@@ -88,7 +87,6 @@ def test_should_set_cache_value_as_zero_if_cache_not_set(
):
serialised_service = SerialisedService.from_id(sample_service.id)
with freeze_time("2016-01-01 12:00:00.000000"):
mocker.patch('app.notifications.validators.redis_store.get', return_value=None)
mocker.patch('app.notifications.validators.redis_store.set')
check_service_over_daily_message_limit(key_type, serialised_service)
app.notifications.validators.redis_store.set.assert_called_with(
@@ -96,7 +94,7 @@ def test_should_set_cache_value_as_zero_if_cache_not_set(
)
def test_does_nothing_if_redis_disabled(notify_api, sample_service, mocker):
def test_check_service_over_daily_message_limit_does_nothing_if_redis_disabled(notify_api, sample_service, mocker):
serialised_service = SerialisedService.from_id(sample_service.id)
with set_config(notify_api, 'REDIS_ENABLED', False):
mock_cache_key = mocker.patch('notifications_utils.clients.redis.daily_limit_cache_key')
@@ -107,15 +105,14 @@ def test_does_nothing_if_redis_disabled(notify_api, sample_service, mocker):
@pytest.mark.parametrize('key_type', ['team', 'normal'])
def test_check_service_message_limit_over_message_limit_fails(key_type, mocker, notify_db_session):
with freeze_time("2016-01-01 12:00:00.000000"):
service = create_service(restricted=True, message_limit=4)
mocker.patch('app.redis_store.get', return_value=5)
service = create_service(restricted=True, message_limit=4)
mocker.patch('app.redis_store.get', return_value=5)
with pytest.raises(TooManyRequestsError) as e:
check_service_over_daily_message_limit(key_type, service)
assert e.value.status_code == 429
assert e.value.message == 'Exceeded send limits (4) for today'
assert e.value.fields == []
with pytest.raises(TooManyRequestsError) as e:
check_service_over_daily_message_limit(key_type, service)
assert e.value.status_code == 429
assert e.value.message == 'Exceeded send limits (4) for today'
assert e.value.fields == []
@pytest.mark.parametrize('template_type, notification_type',
@@ -371,7 +368,7 @@ def test_validate_template_calls_all_validators_exception_message_too_long(mocke
@pytest.mark.parametrize('key_type', ['team', 'live', 'test'])
def test_that_when_exceed_rate_limit_request_fails(
def test_check_service_over_api_rate_limit_when_exceed_rate_limit_request_fails_raises_error(
key_type,
sample_service,
mocker):
@@ -404,7 +401,7 @@ def test_that_when_exceed_rate_limit_request_fails(
assert e.value.fields == []
def test_that_when_not_exceeded_rate_limit_request_succeeds(
def test_check_service_over_api_rate_limit_when_rate_limit_has_not_exceeded_limit_succeeds(
sample_service,
mocker):
with freeze_time("2016-01-01 12:00:00.000000"):
@@ -423,7 +420,7 @@ def test_that_when_not_exceeded_rate_limit_request_succeeds(
)
def test_should_not_rate_limit_if_limiting_is_disabled(
def test_check_service_over_api_rate_limit_should_do_nothing_if_limiting_is_disabled(
sample_service,
mocker):
with freeze_time("2016-01-01 12:00:00.000000"):
@@ -437,101 +434,25 @@ def test_should_not_rate_limit_if_limiting_is_disabled(
serialised_api_key = SerialisedAPIKeyCollection.from_service_id(serialised_service.id)[0]
check_service_over_api_rate_limit(serialised_service, serialised_api_key)
assert not app.redis_store.exceeded_rate_limit.called
app.redis_store.exceeded_rate_limit.assert_not_called()
def test_check_rate_limiting_checks_daily_limit_if_trial_mode_service(
notify_db_session, mocker
):
mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit')
trial_mode_service = create_service(restricted=True, message_limit=5)
team_key = create_api_key(service=trial_mode_service, key_type='team')
check_rate_limiting(trial_mode_service, team_key)
mock_daily_limit.assert_called_once_with('team', trial_mode_service)
def test_check_rate_limiting_checks_daily_limit_if_live_service(
notify_db_session, mocker
):
mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit')
live_service = create_service(restricted=False, message_limit=5)
live_key = create_api_key(service=live_service, key_type='normal')
check_rate_limiting(service=live_service, api_key=live_key)
mock_daily_limit.assert_called_once_with('normal', live_service)
def test_check_rate_limiting_checks_rate_limit_if_trial_mode_service(
def test_check_rate_limiting_validates_api_rate_limit_and_daily_limit(
notify_db_session, mocker
):
mock_rate_limit = mocker.patch('app.notifications.validators.check_service_over_api_rate_limit')
trial_mode_service = create_service(restricted=True, message_limit=5)
team_key = create_api_key(service=trial_mode_service, key_type='team')
check_rate_limiting(trial_mode_service, team_key)
mock_rate_limit.assert_called_once_with(trial_mode_service, team_key)
def test_check_rate_limiting_checks_rate_limit_if_live_service(
notify_db_session, mocker
):
mock_rate_limit = mocker.patch('app.notifications.validators.check_service_over_api_rate_limit')
live_service = create_service(restricted=False, message_limit=5)
live_key = create_api_key(service=live_service, key_type='normal')
check_rate_limiting(service=live_service, api_key=live_key)
mock_rate_limit.assert_called_once_with(live_service, live_key)
def test_get_service_daily_limit_cache_value(
notify_api, notify_db_session, mocker
):
mock_get = mocker.patch('app.redis_store.get', return_value=5)
mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit')
service = create_service()
with set_config(notify_api, 'REDIS_ENABLED', True):
get_service_daily_limit_cache_value('normal', service)
mock_get.assert_called_once_with(f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count')
api_key = create_api_key(service=service)
check_rate_limiting(service, api_key)
def test_get_service_daily_limit_cache_value_return_zero_and_sets_cache_if_key_not_found_in_cache(
notify_api, notify_db_session, mocker
):
mock_get = mocker.patch('app.redis_store.get', return_value=None)
mock_set = mocker.patch('app.redis_store.set')
service = create_service()
with set_config(notify_api, 'REDIS_ENABLED', True):
assert get_service_daily_limit_cache_value('normal', service) == 0
mock_get.assert_called_once_with(f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count')
mock_set.assert_called_once_with(
f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count',
0,
ex=86400
)
def test_get_service_daily_limit_cache_value_returns_zero_if_test_api_key(
notify_api, notify_db_session, mocker
):
mock_get = mocker.patch('app.redis_store.get', return_value=None)
mock_set = mocker.patch('app.redis_store.set')
service = create_service()
with set_config(notify_api, 'REDIS_ENABLED', True):
assert get_service_daily_limit_cache_value('test', service) == 0
assert not mock_get.called
assert not mock_set.called
def test_get_service_daily_limit_cache_value_returns_zero_if_redis_is_not_enabled(
notify_api, notify_db_session, mocker
):
mock_get = mocker.patch('app.redis_store.get', return_value=None)
mock_set = mocker.patch('app.redis_store.set')
service = create_service()
# redis is not enabled for the test environment but is always enabled for production
assert get_service_daily_limit_cache_value('test', service) == 0
assert not mock_get.called
assert not mock_set.called
mock_rate_limit.assert_called_once_with(service, api_key)
mock_daily_limit.assert_called_once_with(api_key.key_type, service)
@pytest.mark.parametrize('key_type', ['test', 'normal'])
def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_int_sms(
def test_validate_and_format_recipient_fails_when_international_number_and_service_does_not_allow_int_sms(
key_type,
notify_db_session,
):
@@ -545,14 +466,14 @@ def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_
@pytest.mark.parametrize('key_type', ['test', 'normal'])
def test_allows_api_calls_with_international_numbers_if_service_does_allow_int_sms(
def test_validate_and_format_recipient_succeeds_with_international_numbers_if_service_does_allow_int_sms(
key_type, sample_service_full_permissions):
service_model = SerialisedService.from_id(sample_service_full_permissions.id)
result = validate_and_format_recipient('20-12-1234-1234', key_type, service_model, SMS_TYPE)
assert result == '201212341234'
def test_rejects_api_calls_with_no_recipient():
def test_validate_and_format_recipient_fails_when_no_recipient():
with pytest.raises(BadRequestError) as e:
validate_and_format_recipient(None, 'key_type', 'service', 'SMS_TYPE')
assert e.value.status_code == 400