From 35b20ba363a1d994bb5512dbe86ad5a5400f473d Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 21 Jun 2021 16:03:24 +0100 Subject: [PATCH 1/4] Correct the daily limits cache. Last year we had an issue with the daily limit cache and the query that was populating it. As a result we have not been checking the daily limit properly. This PR should correct all that. The daily limit cache is not being incremented in app.notifications.process_notifications.persist_notification, this method is and should always be the only method used to create a notification. We increment the daily limit cache is redis is enabled (and it is always enabled for production) and the key type for the notification is team or normal. We check if the daily limit is exceed in many places: - app.celery.tasks.process_job - app.v2.notifications.post_notifications.post_notification - app.v2.notifications.post_notifications.post_precompiled_letter_notification - app.service.send_notification.send_one_off_notification - app.service.send_notification.send_pdf_letter_notification If the daily limits cache is not found, set the cache to 0 with an expiry of 24 hours. The daily limit cache key is service_id-yyy-mm-dd-count, so each day a new cache is created. The best thing about this PR is that the app.service_dao.fetch_todays_total_message_count query has been removed. This query was not performant and had been wrong for ages. --- app/celery/tasks.py | 6 +- app/dao/services_dao.py | 12 -- app/notifications/process_notifications.py | 6 +- app/notifications/validators.py | 24 ++- tests/app/celery/test_tasks.py | 4 + tests/app/dao/test_services_dao.py | 26 --- .../test_process_notification.py | 147 +++++-------- tests/app/notifications/test_validators.py | 201 ++++++++++-------- 8 files changed, 196 insertions(+), 230 deletions(-) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 6d3b31b94..bb184bade 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -33,7 +33,6 @@ from app.dao.returned_letters_dao import insert_or_update_returned_letters from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id -from app.dao.services_dao import fetch_todays_total_message_count from app.dao.templates_dao import dao_get_template_by_id from app.exceptions import DVLAException, NotificationTechnicalFailureException from app.models import ( @@ -55,6 +54,7 @@ from app.models import ( DailySortedLetter, ) from app.notifications.process_notifications import persist_notification +from app.notifications.validators import get_service_daily_limit_cache_value from app.serialised_models import SerialisedService, SerialisedTemplate from app.service.utils import service_allowed_to_send_to from app.utils import DATETIME_FORMAT, get_reference_from_personalisation @@ -159,8 +159,8 @@ def process_row(row, template, job, service, sender_id=None): def __sending_limits_for_job_exceeded(service, job, job_id): - total_sent = fetch_todays_total_message_count(service.id) - + total_sent = get_service_daily_limit_cache_value(KEY_TYPE_NORMAL, service) + print(total_sent) if total_sent + job.notification_count > service.message_limit: job.job_status = 'sending limits exceeded' job.processing_finished = datetime.utcnow() diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 68d5ed2cc..374022004 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -436,18 +436,6 @@ def dao_fetch_todays_stats_for_service(service_id): ).all() -def fetch_todays_total_message_count(service_id): - start_date = get_london_midnight_in_utc(date.today()) - result = db.session.query( - func.count(Notification.id).label('count') - ).filter( - Notification.service_id == service_id, - Notification.key_type != KEY_TYPE_TEST, - Notification.created_at >= start_date - ).first() - return 0 if result is None else result.count - - def _stats_for_service_query(service_id): return db.session.query( Notification.notification_type, diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index ed4334d29..b6cdc26fe 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -148,10 +148,8 @@ def persist_notification( # if simulated create a Notification model to return but do not persist the Notification to the dB if not simulated: dao_create_notification(notification) - # Only keep track of the daily limit for trial mode services. - if service.restricted and key_type != KEY_TYPE_TEST: - if redis_store.get(redis.daily_limit_cache_key(service.id)): - redis_store.incr(redis.daily_limit_cache_key(service.id)) + if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']: + redis_store.incr(redis.daily_limit_cache_key(service.id)) current_app.logger.info( "{} {} created at {}".format(notification_type, notification_id, notification_created_at) diff --git a/app/notifications/validators.py b/app/notifications/validators.py index bcdf15534..b33bc79a8 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -14,7 +14,6 @@ from notifications_utils.recipients import ( from sqlalchemy.orm.exc import NoResultFound from app import redis_store -from app.dao import services_dao from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_letter_contact_dao import dao_get_letter_contact_by_id from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id @@ -63,8 +62,9 @@ def check_service_over_daily_message_limit(key_type, service): cache_key = daily_limit_cache_key(service.id) service_stats = redis_store.get(cache_key) if not service_stats: - service_stats = services_dao.fetch_todays_total_message_count(service.id) - redis_store.set(cache_key, service_stats, ex=3600) + # first message of the day, set the cache to 0 and the expiry to 24 hours + service_stats = 0 + redis_store.set(cache_key, service_stats, ex=86400) if int(service_stats) >= service.message_limit: current_app.logger.info( "service {} has been rate limited for daily use sent {} limit {}".format( @@ -73,10 +73,24 @@ def check_service_over_daily_message_limit(key_type, service): raise TooManyRequestsError(service.message_limit) +def get_service_daily_limit_cache_value(key_type, service): + if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']: + cache_key = daily_limit_cache_key(service.id) + service_stats = redis_store.get(cache_key) + if not service_stats: + # first message of the day, set the cache to 0 and the expiry to 24 hours + service_stats = 0 + redis_store.set(cache_key, service_stats, ex=86400) + return 0 + else: + return int(service_stats) + else: + return 0 + + def check_rate_limiting(service, api_key): check_service_over_api_rate_limit(service, api_key) - # Reduce queries to the notifications table - # check_service_over_daily_message_limit(api_key.key_type, service) + check_service_over_daily_message_limit(api_key.key_type, service) def check_template_is_for_notification_type(notification_type, template_type): diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index ba0443bad..c3a9f1d6b 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -160,6 +160,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits( mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', return_value=(load_example_csv('multiple_sms'), {'sender_id': None})) mocker.patch('app.celery.tasks.process_row') + mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=8) process_job(job.id) @@ -181,6 +182,7 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today( mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', return_value=(load_example_csv('sms'), {'sender_id': None})) mocker.patch('app.celery.tasks.process_row') + mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=2) process_job(job.id) @@ -200,6 +202,7 @@ def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(noti mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3') mocker.patch('app.celery.tasks.process_row') + mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=2) process_job(job.id) @@ -232,6 +235,7 @@ def test_should_process_email_job_if_exactly_on_send_limits(notify_db_session, mocker.patch('app.celery.tasks.save_email.apply_async') mocker.patch('app.encryption.encrypt', return_value="something_encrypted") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") + mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=0) process_job(job.id) diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index a2a716c70..d34184305 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -41,7 +41,6 @@ from app.dao.services_dao import ( dao_suspend_service, dao_update_service, delete_service_and_all_associated_db_objects, - fetch_todays_total_message_count, get_live_services_with_organisation, get_services_by_partial_name, ) @@ -984,31 +983,6 @@ def test_fetch_stats_should_not_gather_notifications_older_than_7_days( assert len(stats) == rows_returned -def test_dao_fetch_todays_total_message_count_returns_count_for_today(notify_db_session): - template = create_template(service=create_service()) - notification = create_notification(template=template) - # don't include notifications earlier than today - create_notification(template=template, created_at=datetime.utcnow()-timedelta(days=2)) - assert fetch_todays_total_message_count(notification.service.id) == 1 - - -def test_dao_fetch_todays_total_message_count_returns_count_for_all_notification_type_and_selected_service( - notify_db_session -): - service = create_service() - different_service = create_service(service_name='different service') - create_notification(template=create_template(service=service)) - create_notification(template=create_template(service=service, template_type='email')) - create_notification(template=create_template(service=service, template_type='letter')) - create_notification(template=create_template(service=different_service)) - assert fetch_todays_total_message_count(service.id) == 3 - - -def test_dao_fetch_todays_total_message_count_returns_0_when_no_messages_for_today(notify_db, - notify_db_session): - assert fetch_todays_total_message_count(uuid.uuid4()) == 0 - - def test_dao_fetch_todays_stats_for_all_services_includes_all_services(notify_db_session): # two services, each with an email and sms notification service1 = create_service(service_name='service 1', email_from='service.1') diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 7b22282fa..cb9703fb0 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -21,6 +21,7 @@ from app.notifications.process_notifications import ( from app.serialised_models import SerialisedTemplate from app.v2.errors import BadRequestError from tests.app.db import create_api_key, create_service, create_template +from tests.conftest import set_config def test_create_content_for_notification_passes(sample_email_template): @@ -112,50 +113,47 @@ def test_persist_notification_throws_exception_when_missing_template(sample_api_ assert NotificationHistory.query.count() == 0 -def test_cache_is_not_incremented_on_failure_to_persist_notification(sample_api_key, mocker): - mocked_redis = mocker.patch('app.redis_store.get') - mock_service_template_cache = mocker.patch('app.redis_store.get_all_from_hash') +def test_cache_is_not_incremented_on_failure_to_persist_notification(notify_api, sample_api_key, mocker): + mocked_redis = mocker.patch('app.redis_store.incr') with pytest.raises(SQLAlchemyError): - persist_notification(template_id=None, - template_version=None, - recipient='+447111111111', - service=sample_api_key.service, - personalisation=None, - notification_type='sms', - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type) + with set_config(notify_api, 'REDIS_ENABLED', True): + persist_notification(template_id=None, + template_version=None, + recipient='+447111111111', + service=sample_api_key.service, + personalisation=None, + notification_type='sms', + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type) mocked_redis.assert_not_called() - mock_service_template_cache.assert_not_called() def test_persist_notification_does_not_increment_cache_if_test_key( - sample_template, sample_job, mocker, sample_test_api_key + notify_api, sample_template, sample_job, mocker, sample_test_api_key ): - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value="cache") - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value="cache") daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr') - template_usage_cache = mocker.patch('app.notifications.process_notifications.redis_store.increment_hash_value') assert Notification.query.count() == 0 assert NotificationHistory.query.count() == 0 - persist_notification( - template_id=sample_template.id, - template_version=sample_template.version, - recipient='+447111111111', - service=sample_template.service, - personalisation={}, - notification_type='sms', - api_key_id=sample_test_api_key.id, - key_type=sample_test_api_key.key_type, - job_id=sample_job.id, - job_row_number=100, - reference="ref", - ) - assert Notification.query.count() == 1 + with set_config(notify_api, 'REDIS_ENABLED', True): + persist_notification( + template_id=sample_template.id, + template_version=sample_template.version, + recipient='+447111111111', + service=sample_template.service, + personalisation={}, + notification_type='sms', + api_key_id=sample_test_api_key.id, + key_type=sample_test_api_key.key_type, + job_id=sample_job.id, + job_row_number=100, + reference="ref", + ) - assert not daily_limit_cache.called - assert not template_usage_cache.called + assert Notification.query.count() == 1 + + assert not daily_limit_cache.called @freeze_time("2016-01-01 11:09:00.061258") @@ -198,77 +196,48 @@ def test_persist_notification_with_optionals(sample_job, sample_api_key): @freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_doesnt_touch_cache_for_old_keys_that_dont_exist(notify_db_session, mocker): - service = create_service(restricted=True) - template = create_template(service=service) - api_key = create_api_key(service=service) - mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=None) - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value=None) - - persist_notification( - template_id=template.id, - template_version=template.version, - recipient='+447111111111', - service=template.service, - personalisation={}, - notification_type='sms', - api_key_id=api_key.id, - key_type=api_key.key_type, - reference="ref" - ) - mock_incr.assert_not_called() - - -@freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_increments_cache_if_key_exists_and_for_trial_service( - notify_db_session, mocker +def test_persist_notification_increments_cache_for_trial_service( + notify_api, notify_db_session, mocker ): service = create_service(restricted=True) template = create_template(service=service) api_key = create_api_key(service=service) mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', - return_value={template.id, 1}) + with set_config(notify_api, 'REDIS_ENABLED', True): + persist_notification( + template_id=template.id, + template_version=template.version, + recipient='+447111111122', + service=template.service, + personalisation={}, + notification_type='sms', + api_key_id=api_key.id, + key_type=api_key.key_type, + reference="ref2") - persist_notification( - template_id=template.id, - template_version=template.version, - recipient='+447111111122', - service=template.service, - personalisation={}, - notification_type='sms', - api_key_id=api_key.id, - key_type=api_key.key_type, - reference="ref2") - - mock_incr.assert_called_once_with(str(service.id) + "-2016-01-01-count", ) + mock_incr.assert_called_once_with(str(service.id) + "-2016-01-01-count", ) -def test_persist_notification_does_not_increments_cache_live_service( - notify_db_session, mocker +def test_persist_notification_increments_cache_live_service( + notify_api, notify_db_session, mocker ): service = create_service(restricted=False) template = create_template(service=service) api_key = create_api_key(service=service) mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', - return_value={template.id, 1}) + with set_config(notify_api, 'REDIS_ENABLED', True): + persist_notification( + template_id=template.id, + template_version=template.version, + recipient='+447111111122', + service=template.service, + personalisation={}, + notification_type='sms', + api_key_id=api_key.id, + key_type=api_key.key_type, + reference="ref2") - persist_notification( - template_id=template.id, - template_version=template.version, - recipient='+447111111122', - service=template.service, - personalisation={}, - notification_type='sms', - api_key_id=api_key.id, - key_type=api_key.key_type, - reference="ref2") - - assert not mock_incr.called + assert mock_incr.called @pytest.mark.parametrize(( diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index a99de221a..d253c2a25 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -1,4 +1,4 @@ -from unittest.mock import ANY +from datetime import datetime import pytest from flask import current_app @@ -15,6 +15,7 @@ from app.notifications.validators import ( check_if_service_can_send_files_by_email, check_is_message_too_long, check_notification_content_is_not_empty, + check_rate_limiting, check_reply_to, check_service_email_reply_to_id, check_service_letter_contact_id, @@ -23,6 +24,7 @@ from app.notifications.validators import ( check_service_sms_sender_id, check_template_is_active, check_template_is_for_notification_type, + get_service_daily_limit_cache_value, service_can_send_to_recipient, validate_address, validate_and_format_recipient, @@ -38,7 +40,6 @@ from app.v2.errors import BadRequestError, RateLimitError, TooManyRequestsError from tests.app.db import ( create_api_key, create_letter_contact, - create_notification, create_reply_to_email, create_service, create_service_guest_list, @@ -55,59 +56,31 @@ def enable_redis(notify_api): yield -@pytest.mark.parametrize('key_type', ['test', 'team', 'normal']) -def test_check_service_message_limit_in_cache_with_unrestricted_service_is_allowed( - key_type, - sample_service, - mocker): - mocker.patch('app.notifications.validators.redis_store.get', side_effect=[ - None, # The serialised service - 1, # The rolling count - ]) - mocker.patch('app.notifications.validators.redis_store.set') - mocker.patch('app.notifications.validators.services_dao') - serialised_service = SerialisedService.from_id(sample_service.id) - - check_service_over_daily_message_limit(key_type, serialised_service) - app.notifications.validators.redis_store.set.assert_called_once_with( - f'service-{serialised_service.id}', - ANY, - ex=ANY, - ) - assert not app.notifications.validators.services_dao.mock_calls - - -@pytest.mark.parametrize('key_type', ['test', 'team', 'normal']) +@pytest.mark.parametrize('key_type', ['team', 'normal']) def test_check_service_message_limit_in_cache_under_message_limit_passes( key_type, sample_service, mocker): - mocker.patch('app.notifications.validators.redis_store.get', side_effect=[ - None, # The serialised service - 1, # The rolling count - ]) - mocker.patch('app.notifications.validators.redis_store.set') - mocker.patch('app.notifications.validators.services_dao') serialised_service = SerialisedService.from_id(sample_service.id) + mock_get = mocker.patch('app.notifications.validators.redis_store.get', return_value=1) + mock_set = mocker.patch('app.notifications.validators.redis_store.set') check_service_over_daily_message_limit(key_type, serialised_service) - app.notifications.validators.redis_store.set.assert_called_once_with( - f'service-{serialised_service.id}', - ANY, - ex=ANY, - ) - assert not app.notifications.validators.services_dao.mock_calls + mock_get.assert_called_once_with(f'{serialised_service.id}-{datetime.utcnow().strftime("%Y-%m-%d")}-count') + assert mock_get.called + assert not mock_set.called def test_should_not_interact_with_cache_for_test_key(sample_service, mocker): mocker.patch('app.notifications.validators.redis_store') - mocker.patch('app.notifications.validators.redis_store.get', side_effect=[None]) + mock_get = mocker.patch('app.notifications.validators.redis_store.get', side_effect=[None]) serialised_service = SerialisedService.from_id(sample_service.id) check_service_over_daily_message_limit('test', serialised_service) assert not app.notifications.validators.redis_store.mock_calls + assert not mock_get.called @pytest.mark.parametrize('key_type', ['team', 'normal']) -def test_should_set_cache_value_as_value_from_database_if_cache_not_set( +def test_should_set_cache_value_as_zero_if_cache_not_set( key_type, sample_template, sample_service, @@ -115,75 +88,34 @@ def test_should_set_cache_value_as_value_from_database_if_cache_not_set( ): serialised_service = SerialisedService.from_id(sample_service.id) with freeze_time("2016-01-01 12:00:00.000000"): - for _ in range(5): - create_notification(sample_template) mocker.patch('app.notifications.validators.redis_store.get', return_value=None) mocker.patch('app.notifications.validators.redis_store.set') check_service_over_daily_message_limit(key_type, serialised_service) app.notifications.validators.redis_store.set.assert_called_with( - str(sample_service.id) + "-2016-01-01-count", 5, ex=3600 + str(sample_service.id) + "-2016-01-01-count", 0, ex=86400 ) -def test_should_not_access_database_if_redis_disabled(notify_api, sample_service, mocker): +def test_does_nothing_if_redis_disabled(notify_api, sample_service, mocker): serialised_service = SerialisedService.from_id(sample_service.id) with set_config(notify_api, 'REDIS_ENABLED', False): - db_mock = mocker.patch('app.notifications.validators.services_dao') - + mock_cache_key = mocker.patch('notifications_utils.clients.redis.daily_limit_cache_key') check_service_over_daily_message_limit('normal', serialised_service) - assert db_mock.method_calls == [] + assert mock_cache_key.method_calls == [] @pytest.mark.parametrize('key_type', ['team', 'normal']) -def test_check_service_message_limit_over_message_limit_fails(key_type, sample_service, mocker): +def test_check_service_message_limit_over_message_limit_fails(key_type, mocker, notify_db_session): with freeze_time("2016-01-01 12:00:00.000000"): - mocker.patch('app.redis_store.get', return_value=None) - mocker.patch('app.notifications.validators.redis_store.set') - - sample_service.restricted = True - sample_service.message_limit = 4 - template = create_template(sample_service) - serialised_service = SerialisedService.from_id(sample_service.id) - - for _ in range(5): - create_notification(template) - with pytest.raises(TooManyRequestsError) as e: - check_service_over_daily_message_limit(key_type, serialised_service) - assert e.value.status_code == 429 - assert e.value.message == 'Exceeded send limits (4) for today' - assert e.value.fields == [] - app.notifications.validators.redis_store.set.assert_called_with( - str(sample_service.id) + "-2016-01-01-count", 5, ex=3600 - ) - - -@pytest.mark.parametrize('key_type', ['team', 'normal']) -def test_check_service_message_limit_in_cache_over_message_limit_fails( - notify_db_session, - key_type, - mocker): - with freeze_time("2016-01-01 12:00:00.000000"): - mocker.patch('app.redis_store.get', side_effect=[ - None, # The serialised service - 5, # The rolling count - ]) - mocker.patch('app.notifications.validators.redis_store.set') - mocker.patch('app.notifications.validators.services_dao') - service = create_service(restricted=True, message_limit=4) - serialised_service = SerialisedService.from_id(service.id) + mocker.patch('app.redis_store.get', return_value=5) + with pytest.raises(TooManyRequestsError) as e: - check_service_over_daily_message_limit(key_type, serialised_service) + check_service_over_daily_message_limit(key_type, service) assert e.value.status_code == 429 assert e.value.message == 'Exceeded send limits (4) for today' assert e.value.fields == [] - app.notifications.validators.redis_store.set.assert_called_once_with( - f'service-{serialised_service.id}', - ANY, - ex=ANY, - ) - assert not app.notifications.validators.services_dao.mock_calls @pytest.mark.parametrize('template_type, notification_type', @@ -451,7 +383,6 @@ def test_that_when_exceed_rate_limit_request_fails( api_key_type = key_type mocker.patch('app.redis_store.exceeded_rate_limit', return_value=True) - mocker.patch('app.notifications.validators.services_dao') sample_service.restricted = True api_key = create_api_key(sample_service, key_type=api_key_type) @@ -478,7 +409,6 @@ def test_that_when_not_exceeded_rate_limit_request_succeeds( mocker): with freeze_time("2016-01-01 12:00:00.000000"): mocker.patch('app.redis_store.exceeded_rate_limit', return_value=False) - mocker.patch('app.notifications.validators.services_dao') sample_service.restricted = True api_key = create_api_key(sample_service) @@ -500,7 +430,6 @@ def test_should_not_rate_limit_if_limiting_is_disabled( current_app.config['API_RATE_LIMIT_ENABLED'] = False mocker.patch('app.redis_store.exceeded_rate_limit', return_value=False) - mocker.patch('app.notifications.validators.services_dao') sample_service.restricted = True create_api_key(sample_service) @@ -511,6 +440,96 @@ def test_should_not_rate_limit_if_limiting_is_disabled( assert not app.redis_store.exceeded_rate_limit.called +def test_check_rate_limiting_checks_daily_limit_if_trial_mode_service( + notify_db_session, mocker +): + mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit') + trial_mode_service = create_service(restricted=True, message_limit=5) + team_key = create_api_key(service=trial_mode_service, key_type='team') + check_rate_limiting(trial_mode_service, team_key) + mock_daily_limit.assert_called_once_with('team', trial_mode_service) + + +def test_check_rate_limiting_checks_daily_limit_if_live_service( + notify_db_session, mocker +): + mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit') + live_service = create_service(restricted=False, message_limit=5) + live_key = create_api_key(service=live_service, key_type='normal') + check_rate_limiting(service=live_service, api_key=live_key) + mock_daily_limit.assert_called_once_with('normal', live_service) + + +def test_check_rate_limiting_checks_rate_limit_if_trial_mode_service( + notify_db_session, mocker +): + mock_rate_limit = mocker.patch('app.notifications.validators.check_service_over_api_rate_limit') + trial_mode_service = create_service(restricted=True, message_limit=5) + team_key = create_api_key(service=trial_mode_service, key_type='team') + check_rate_limiting(trial_mode_service, team_key) + mock_rate_limit.assert_called_once_with(trial_mode_service, team_key) + + +def test_check_rate_limiting_checks_rate_limit_if_live_service( + notify_db_session, mocker +): + mock_rate_limit = mocker.patch('app.notifications.validators.check_service_over_api_rate_limit') + live_service = create_service(restricted=False, message_limit=5) + live_key = create_api_key(service=live_service, key_type='normal') + check_rate_limiting(service=live_service, api_key=live_key) + mock_rate_limit.assert_called_once_with(live_service, live_key) + + +def test_get_service_daily_limit_cache_value( + notify_api, notify_db_session, mocker +): + mock_get = mocker.patch('app.redis_store.get', return_value=5) + service = create_service() + with set_config(notify_api, 'REDIS_ENABLED', True): + get_service_daily_limit_cache_value('normal', service) + mock_get.assert_called_once_with(f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count') + + +def test_get_service_daily_limit_cache_value_return_zero_and_sets_cache_if_key_not_found_in_cache( + notify_api, notify_db_session, mocker +): + mock_get = mocker.patch('app.redis_store.get', return_value=None) + mock_set = mocker.patch('app.redis_store.set') + service = create_service() + with set_config(notify_api, 'REDIS_ENABLED', True): + assert get_service_daily_limit_cache_value('normal', service) == 0 + mock_get.assert_called_once_with(f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count') + mock_set.assert_called_once_with( + f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count', + 0, + ex=86400 + ) + + +def test_get_service_daily_limit_cache_value_returns_zero_if_test_api_key( + notify_api, notify_db_session, mocker +): + mock_get = mocker.patch('app.redis_store.get', return_value=None) + mock_set = mocker.patch('app.redis_store.set') + service = create_service() + with set_config(notify_api, 'REDIS_ENABLED', True): + assert get_service_daily_limit_cache_value('test', service) == 0 + assert not mock_get.called + assert not mock_set.called + + +def test_get_service_daily_limit_cache_value_returns_zero_if_redis_is_not_enabled( + notify_api, notify_db_session, mocker +): + mock_get = mocker.patch('app.redis_store.get', return_value=None) + mock_set = mocker.patch('app.redis_store.set') + service = create_service() + # redis is not enabled for the test environment but is always enabled for production + assert get_service_daily_limit_cache_value('test', service) == 0 + assert not mock_get.called + assert not mock_set.called + + @pytest.mark.parametrize('key_type', ['test', 'normal']) def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_int_sms( key_type, From 57fb9da4140f84275982a897b587e22ee61346fd Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Wed, 23 Jun 2021 15:09:09 +0100 Subject: [PATCH 2/4] - change the condition so that we don't reset the cache if it's zero - set the cache if it doesn't exist so there is an expiry of 24 hours. --- app/notifications/process_notifications.py | 11 +++++++++-- app/notifications/validators.py | 2 +- tests/app/notifications/test_process_notification.py | 2 ++ 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index b6cdc26fe..4aea1ec05 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -149,8 +149,15 @@ def persist_notification( if not simulated: dao_create_notification(notification) if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']: - redis_store.incr(redis.daily_limit_cache_key(service.id)) - + cache_key = redis.daily_limit_cache_key(service.id) + if redis_store.get(cache_key) is None: + # if cache does not exist set the cache to 1 with an expiry of 24 hours, + # The cache should be set by the time we create the notification + # but in case it is this will make sure the expiry is set to 24 hours, + # where if we let the incr method create the cache it will be set a ttl. + redis_store.set(cache_key, 1, ex=86400) + else: + redis_store.incr(cache_key) current_app.logger.info( "{} {} created at {}".format(notification_type, notification_id, notification_created_at) ) diff --git a/app/notifications/validators.py b/app/notifications/validators.py index b33bc79a8..60bf639e1 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -61,7 +61,7 @@ def check_service_over_daily_message_limit(key_type, service): if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']: cache_key = daily_limit_cache_key(service.id) service_stats = redis_store.get(cache_key) - if not service_stats: + if service_stats is None: # first message of the day, set the cache to 0 and the expiry to 24 hours service_stats = 0 redis_store.set(cache_key, service_stats, ex=86400) diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index cb9703fb0..051e2953b 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -202,6 +202,7 @@ def test_persist_notification_increments_cache_for_trial_service( service = create_service(restricted=True) template = create_template(service=service) api_key = create_api_key(service=service) + mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') with set_config(notify_api, 'REDIS_ENABLED', True): persist_notification( @@ -224,6 +225,7 @@ def test_persist_notification_increments_cache_live_service( service = create_service(restricted=False) template = create_template(service=service) api_key = create_api_key(service=service) + mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') with set_config(notify_api, 'REDIS_ENABLED', True): persist_notification( From fd7486d751965fd48edfdbcc3712abafc0b3ea58 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Thu, 24 Jun 2021 11:05:22 +0100 Subject: [PATCH 3/4] - Merge daily limit functions into one, refactor call for daily limit check from process_job - refactor tests to standardise test names - refactor some tests to be more clear - remove unnecessary tests - include missing test --- app/celery/tasks.py | 11 +- app/notifications/validators.py | 42 ++---- tests/app/celery/test_tasks.py | 90 ++++-------- .../test_process_notification.py | 109 ++++++++------- tests/app/notifications/test_validators.py | 129 ++++-------------- 5 files changed, 126 insertions(+), 255 deletions(-) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index bb184bade..2498d5d88 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -54,10 +54,11 @@ from app.models import ( DailySortedLetter, ) from app.notifications.process_notifications import persist_notification -from app.notifications.validators import get_service_daily_limit_cache_value +from app.notifications.validators import check_service_over_daily_message_limit from app.serialised_models import SerialisedService, SerialisedTemplate from app.service.utils import service_allowed_to_send_to from app.utils import DATETIME_FORMAT, get_reference_from_personalisation +from app.v2.errors import TooManyRequestsError @notify_celery.task(name="process-job") @@ -159,9 +160,10 @@ def process_row(row, template, job, service, sender_id=None): def __sending_limits_for_job_exceeded(service, job, job_id): - total_sent = get_service_daily_limit_cache_value(KEY_TYPE_NORMAL, service) - print(total_sent) - if total_sent + job.notification_count > service.message_limit: + try: + check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) + return False + except TooManyRequestsError: job.job_status = 'sending limits exceeded' job.processing_finished = datetime.utcnow() dao_update_job(job) @@ -170,7 +172,6 @@ def __sending_limits_for_job_exceeded(service, job, job_id): job_id, job.notification_count, service.message_limit) ) return True - return False @notify_celery.task(bind=True, name="save-sms", max_retries=5, default_retry_delay=300) diff --git a/app/notifications/validators.py b/app/notifications/validators.py index 60bf639e1..c63aad5af 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -58,35 +58,23 @@ def check_service_over_api_rate_limit(service, api_key): def check_service_over_daily_message_limit(key_type, service): - if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']: - cache_key = daily_limit_cache_key(service.id) - service_stats = redis_store.get(cache_key) - if service_stats is None: - # first message of the day, set the cache to 0 and the expiry to 24 hours - service_stats = 0 - redis_store.set(cache_key, service_stats, ex=86400) - if int(service_stats) >= service.message_limit: - current_app.logger.info( - "service {} has been rate limited for daily use sent {} limit {}".format( - service.id, int(service_stats), service.message_limit) - ) - raise TooManyRequestsError(service.message_limit) - - -def get_service_daily_limit_cache_value(key_type, service): - if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']: - cache_key = daily_limit_cache_key(service.id) - service_stats = redis_store.get(cache_key) - if not service_stats: - # first message of the day, set the cache to 0 and the expiry to 24 hours - service_stats = 0 - redis_store.set(cache_key, service_stats, ex=86400) - return 0 - else: - return int(service_stats) - else: + if key_type == KEY_TYPE_TEST or not current_app.config['REDIS_ENABLED']: return 0 + cache_key = daily_limit_cache_key(service.id) + service_stats = redis_store.get(cache_key) + if service_stats is None: + # first message of the day, set the cache to 0 and the expiry to 24 hours + service_stats = 0 + redis_store.set(cache_key, service_stats, ex=86400) + return service_stats + if int(service_stats) >= service.message_limit: + current_app.logger.info( + "service {} has been rate limited for daily use sent {} limit {}".format( + service.id, int(service_stats), service.message_limit) + ) + raise TooManyRequestsError(service.message_limit) + def check_rate_limiting(service, api_key): check_service_over_api_rate_limit(service, api_key) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index c3a9f1d6b..55a0bdccc 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -51,6 +51,7 @@ from app.models import ( ) from app.serialised_models import SerialisedService, SerialisedTemplate from app.utils import DATETIME_FORMAT +from app.v2.errors import TooManyRequestsError from tests.app import load_example_csv from tests.app.db import ( create_api_key, @@ -150,68 +151,6 @@ def test_should_process_sms_job_with_sender_id(sample_job, mocker, fake_uuid): ) -@freeze_time("2016-01-01 11:09:00.061258") -def test_should_not_process_sms_job_if_would_exceed_send_limits( - notify_db_session, mocker -): - service = create_service(message_limit=9) - template = create_template(service=service) - job = create_job(template=template, notification_count=10, original_file_name='multiple_sms.csv') - mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', - return_value=(load_example_csv('multiple_sms'), {'sender_id': None})) - mocker.patch('app.celery.tasks.process_row') - mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=8) - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == 'sending limits exceeded' - assert s3.get_job_and_metadata_from_s3.called is False - assert tasks.process_row.called is False - - -def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today( - notify_db_session, mocker -): - service = create_service(message_limit=1) - template = create_template(service=service) - job = create_job(template=template) - - create_notification(template=template, job=job) - - mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', - return_value=(load_example_csv('sms'), {'sender_id': None})) - mocker.patch('app.celery.tasks.process_row') - mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=2) - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == 'sending limits exceeded' - assert s3.get_job_and_metadata_from_s3.called is False - assert tasks.process_row.called is False - - -@pytest.mark.parametrize('template_type', ['sms', 'email']) -def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db_session, template_type, mocker): - service = create_service(message_limit=1) - template = create_template(service=service, template_type=template_type) - job = create_job(template=template) - - create_notification(template=template, job=job) - - mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3') - mocker.patch('app.celery.tasks.process_row') - mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=2) - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == 'sending limits exceeded' - assert s3.get_job_and_metadata_from_s3.called is False - assert tasks.process_row.called is False - - def test_should_not_process_job_if_already_pending(sample_template, mocker): job = create_job(template=sample_template, job_status='scheduled') @@ -224,8 +163,28 @@ def test_should_not_process_job_if_already_pending(sample_template, mocker): assert tasks.process_row.called is False -def test_should_process_email_job_if_exactly_on_send_limits(notify_db_session, - mocker): +def test_should_not_process_if_send_limit_is_exceeded( + notify_api, notify_db_session, mocker +): + service = create_service(message_limit=9) + template = create_template(service=service) + job = create_job(template=template, notification_count=10, original_file_name='multiple_sms.csv') + mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', + return_value=(load_example_csv('multiple_sms'), {'sender_id': None})) + mocker.patch('app.celery.tasks.process_row') + mocker.patch('app.celery.tasks.check_service_over_daily_message_limit', + side_effect=TooManyRequestsError("exceeded limit")) + process_job(job.id) + + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == 'sending limits exceeded' + assert s3.get_job_and_metadata_from_s3.called is False + assert tasks.process_row.called is False + + +def test_should_process_job_if_send_limits_are_not_exceeded( + notify_api, notify_db_session, mocker +): service = create_service(message_limit=10) template = create_template(service=service, template_type='email') job = create_job(template=template, notification_count=10) @@ -235,8 +194,7 @@ def test_should_process_email_job_if_exactly_on_send_limits(notify_db_session, mocker.patch('app.celery.tasks.save_email.apply_async') mocker.patch('app.encryption.encrypt', return_value="something_encrypted") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") - mocker.patch('app.celery.tasks.get_service_daily_limit_cache_value', return_value=0) - + mocker.patch('app.celery.tasks.check_service_over_daily_message_limit', return_value=0) process_job(job.id) s3.get_job_and_metadata_from_s3.assert_called_once_with( diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 051e2953b..266bfa9c1 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -113,49 +113,6 @@ def test_persist_notification_throws_exception_when_missing_template(sample_api_ assert NotificationHistory.query.count() == 0 -def test_cache_is_not_incremented_on_failure_to_persist_notification(notify_api, sample_api_key, mocker): - mocked_redis = mocker.patch('app.redis_store.incr') - with pytest.raises(SQLAlchemyError): - with set_config(notify_api, 'REDIS_ENABLED', True): - persist_notification(template_id=None, - template_version=None, - recipient='+447111111111', - service=sample_api_key.service, - personalisation=None, - notification_type='sms', - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type) - mocked_redis.assert_not_called() - - -def test_persist_notification_does_not_increment_cache_if_test_key( - notify_api, sample_template, sample_job, mocker, sample_test_api_key -): - daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr') - - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 - - with set_config(notify_api, 'REDIS_ENABLED', True): - persist_notification( - template_id=sample_template.id, - template_version=sample_template.version, - recipient='+447111111111', - service=sample_template.service, - personalisation={}, - notification_type='sms', - api_key_id=sample_test_api_key.id, - key_type=sample_test_api_key.key_type, - job_id=sample_job.id, - job_row_number=100, - reference="ref", - ) - - assert Notification.query.count() == 1 - - assert not daily_limit_cache.called - - @freeze_time("2016-01-01 11:09:00.061258") def test_persist_notification_with_optionals(sample_job, sample_api_key): assert Notification.query.count() == 0 @@ -195,11 +152,55 @@ def test_persist_notification_with_optionals(sample_job, sample_api_key): assert not persisted_notification.reply_to_text -@freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_increments_cache_for_trial_service( - notify_api, notify_db_session, mocker +def test_persist_notification_cache_is_not_incremented_on_failure_to_create_notification( + notify_api, sample_api_key, mocker ): - service = create_service(restricted=True) + mocked_redis = mocker.patch('app.redis_store.incr') + with pytest.raises(SQLAlchemyError): + persist_notification(template_id=None, + template_version=None, + recipient='+447111111111', + service=sample_api_key.service, + personalisation=None, + notification_type='sms', + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type) + mocked_redis.assert_not_called() + + +def test_persist_notification_does_not_increment_cache_if_test_key( + notify_api, sample_template, sample_job, mocker, sample_test_api_key +): + daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr') + + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + with set_config(notify_api, 'REDIS_ENABLED', True): + persist_notification( + template_id=sample_template.id, + template_version=sample_template.version, + recipient='+447111111111', + service=sample_template.service, + personalisation={}, + notification_type='sms', + api_key_id=sample_test_api_key.id, + key_type=sample_test_api_key.key_type, + job_id=sample_job.id, + job_row_number=100, + reference="ref", + ) + + assert Notification.query.count() == 1 + + assert not daily_limit_cache.called + + +@pytest.mark.parametrize('restricted_service', [True, False]) +@freeze_time("2016-01-01 11:09:00.061258") +def test_persist_notification_increments_cache_for_trial_or_live_service( + notify_api, notify_db_session, mocker, restricted_service +): + service = create_service(restricted=restricted_service) template = create_template(service=service) api_key = create_api_key(service=service) mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) @@ -219,14 +220,16 @@ def test_persist_notification_increments_cache_for_trial_service( mock_incr.assert_called_once_with(str(service.id) + "-2016-01-01-count", ) -def test_persist_notification_increments_cache_live_service( - notify_api, notify_db_session, mocker +@pytest.mark.parametrize('restricted_service', [True, False]) +@freeze_time("2016-01-01 11:09:00.061258") +def test_persist_notification_sets_daily_limit_cache_if_one_does_not_exists( + notify_api, notify_db_session, mocker, restricted_service ): - service = create_service(restricted=False) + service = create_service(restricted=restricted_service) template = create_template(service=service) api_key = create_api_key(service=service) - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) - mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') + mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=None) + mock_set = mocker.patch('app.notifications.process_notifications.redis_store.set') with set_config(notify_api, 'REDIS_ENABLED', True): persist_notification( template_id=template.id, @@ -239,7 +242,7 @@ def test_persist_notification_increments_cache_live_service( key_type=api_key.key_type, reference="ref2") - assert mock_incr.called + mock_set.assert_called_once_with(str(service.id) + "-2016-01-01-count", 1, ex=86400) @pytest.mark.parametrize(( diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index d253c2a25..7a0fb0ca3 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -24,7 +24,6 @@ from app.notifications.validators import ( check_service_sms_sender_id, check_template_is_active, check_template_is_for_notification_type, - get_service_daily_limit_cache_value, service_can_send_to_recipient, validate_address, validate_and_format_recipient, @@ -70,17 +69,17 @@ def test_check_service_message_limit_in_cache_under_message_limit_passes( assert not mock_set.called -def test_should_not_interact_with_cache_for_test_key(sample_service, mocker): +def test_check_service_over_daily_message_limit_should_not_interact_with_cache_for_test_key(sample_service, mocker): mocker.patch('app.notifications.validators.redis_store') mock_get = mocker.patch('app.notifications.validators.redis_store.get', side_effect=[None]) serialised_service = SerialisedService.from_id(sample_service.id) check_service_over_daily_message_limit('test', serialised_service) - assert not app.notifications.validators.redis_store.mock_calls - assert not mock_get.called + app.notifications.validators.redis_store.assert_not_called + mock_get.assert_not_called() @pytest.mark.parametrize('key_type', ['team', 'normal']) -def test_should_set_cache_value_as_zero_if_cache_not_set( +def test_check_service_over_daily_message_limit_should_set_cache_value_as_zero_if_cache_not_set( key_type, sample_template, sample_service, @@ -88,7 +87,6 @@ def test_should_set_cache_value_as_zero_if_cache_not_set( ): serialised_service = SerialisedService.from_id(sample_service.id) with freeze_time("2016-01-01 12:00:00.000000"): - mocker.patch('app.notifications.validators.redis_store.get', return_value=None) mocker.patch('app.notifications.validators.redis_store.set') check_service_over_daily_message_limit(key_type, serialised_service) app.notifications.validators.redis_store.set.assert_called_with( @@ -96,7 +94,7 @@ def test_should_set_cache_value_as_zero_if_cache_not_set( ) -def test_does_nothing_if_redis_disabled(notify_api, sample_service, mocker): +def test_check_service_over_daily_message_limit_does_nothing_if_redis_disabled(notify_api, sample_service, mocker): serialised_service = SerialisedService.from_id(sample_service.id) with set_config(notify_api, 'REDIS_ENABLED', False): mock_cache_key = mocker.patch('notifications_utils.clients.redis.daily_limit_cache_key') @@ -107,15 +105,14 @@ def test_does_nothing_if_redis_disabled(notify_api, sample_service, mocker): @pytest.mark.parametrize('key_type', ['team', 'normal']) def test_check_service_message_limit_over_message_limit_fails(key_type, mocker, notify_db_session): - with freeze_time("2016-01-01 12:00:00.000000"): - service = create_service(restricted=True, message_limit=4) - mocker.patch('app.redis_store.get', return_value=5) + service = create_service(restricted=True, message_limit=4) + mocker.patch('app.redis_store.get', return_value=5) - with pytest.raises(TooManyRequestsError) as e: - check_service_over_daily_message_limit(key_type, service) - assert e.value.status_code == 429 - assert e.value.message == 'Exceeded send limits (4) for today' - assert e.value.fields == [] + with pytest.raises(TooManyRequestsError) as e: + check_service_over_daily_message_limit(key_type, service) + assert e.value.status_code == 429 + assert e.value.message == 'Exceeded send limits (4) for today' + assert e.value.fields == [] @pytest.mark.parametrize('template_type, notification_type', @@ -371,7 +368,7 @@ def test_validate_template_calls_all_validators_exception_message_too_long(mocke @pytest.mark.parametrize('key_type', ['team', 'live', 'test']) -def test_that_when_exceed_rate_limit_request_fails( +def test_check_service_over_api_rate_limit_when_exceed_rate_limit_request_fails_raises_error( key_type, sample_service, mocker): @@ -404,7 +401,7 @@ def test_that_when_exceed_rate_limit_request_fails( assert e.value.fields == [] -def test_that_when_not_exceeded_rate_limit_request_succeeds( +def test_check_service_over_api_rate_limit_when_rate_limit_has_not_exceeded_limit_succeeds( sample_service, mocker): with freeze_time("2016-01-01 12:00:00.000000"): @@ -423,7 +420,7 @@ def test_that_when_not_exceeded_rate_limit_request_succeeds( ) -def test_should_not_rate_limit_if_limiting_is_disabled( +def test_check_service_over_api_rate_limit_should_do_nothing_if_limiting_is_disabled( sample_service, mocker): with freeze_time("2016-01-01 12:00:00.000000"): @@ -437,101 +434,25 @@ def test_should_not_rate_limit_if_limiting_is_disabled( serialised_api_key = SerialisedAPIKeyCollection.from_service_id(serialised_service.id)[0] check_service_over_api_rate_limit(serialised_service, serialised_api_key) - assert not app.redis_store.exceeded_rate_limit.called + app.redis_store.exceeded_rate_limit.assert_not_called() -def test_check_rate_limiting_checks_daily_limit_if_trial_mode_service( - notify_db_session, mocker -): - mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit') - trial_mode_service = create_service(restricted=True, message_limit=5) - team_key = create_api_key(service=trial_mode_service, key_type='team') - check_rate_limiting(trial_mode_service, team_key) - mock_daily_limit.assert_called_once_with('team', trial_mode_service) - - -def test_check_rate_limiting_checks_daily_limit_if_live_service( - notify_db_session, mocker -): - mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit') - live_service = create_service(restricted=False, message_limit=5) - live_key = create_api_key(service=live_service, key_type='normal') - check_rate_limiting(service=live_service, api_key=live_key) - mock_daily_limit.assert_called_once_with('normal', live_service) - - -def test_check_rate_limiting_checks_rate_limit_if_trial_mode_service( +def test_check_rate_limiting_validates_api_rate_limit_and_daily_limit( notify_db_session, mocker ): mock_rate_limit = mocker.patch('app.notifications.validators.check_service_over_api_rate_limit') - trial_mode_service = create_service(restricted=True, message_limit=5) - team_key = create_api_key(service=trial_mode_service, key_type='team') - check_rate_limiting(trial_mode_service, team_key) - mock_rate_limit.assert_called_once_with(trial_mode_service, team_key) - - -def test_check_rate_limiting_checks_rate_limit_if_live_service( - notify_db_session, mocker -): - mock_rate_limit = mocker.patch('app.notifications.validators.check_service_over_api_rate_limit') - live_service = create_service(restricted=False, message_limit=5) - live_key = create_api_key(service=live_service, key_type='normal') - check_rate_limiting(service=live_service, api_key=live_key) - mock_rate_limit.assert_called_once_with(live_service, live_key) - - -def test_get_service_daily_limit_cache_value( - notify_api, notify_db_session, mocker -): - mock_get = mocker.patch('app.redis_store.get', return_value=5) + mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit') service = create_service() - with set_config(notify_api, 'REDIS_ENABLED', True): - get_service_daily_limit_cache_value('normal', service) - mock_get.assert_called_once_with(f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count') + api_key = create_api_key(service=service) + check_rate_limiting(service, api_key) -def test_get_service_daily_limit_cache_value_return_zero_and_sets_cache_if_key_not_found_in_cache( - notify_api, notify_db_session, mocker -): - mock_get = mocker.patch('app.redis_store.get', return_value=None) - mock_set = mocker.patch('app.redis_store.set') - service = create_service() - with set_config(notify_api, 'REDIS_ENABLED', True): - assert get_service_daily_limit_cache_value('normal', service) == 0 - mock_get.assert_called_once_with(f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count') - mock_set.assert_called_once_with( - f'{str(service.id)}-{datetime.utcnow().strftime("%Y-%m-%d")}-count', - 0, - ex=86400 - ) - - -def test_get_service_daily_limit_cache_value_returns_zero_if_test_api_key( - notify_api, notify_db_session, mocker -): - mock_get = mocker.patch('app.redis_store.get', return_value=None) - mock_set = mocker.patch('app.redis_store.set') - service = create_service() - with set_config(notify_api, 'REDIS_ENABLED', True): - assert get_service_daily_limit_cache_value('test', service) == 0 - assert not mock_get.called - assert not mock_set.called - - -def test_get_service_daily_limit_cache_value_returns_zero_if_redis_is_not_enabled( - notify_api, notify_db_session, mocker -): - mock_get = mocker.patch('app.redis_store.get', return_value=None) - mock_set = mocker.patch('app.redis_store.set') - service = create_service() - # redis is not enabled for the test environment but is always enabled for production - assert get_service_daily_limit_cache_value('test', service) == 0 - assert not mock_get.called - assert not mock_set.called + mock_rate_limit.assert_called_once_with(service, api_key) + mock_daily_limit.assert_called_once_with(api_key.key_type, service) @pytest.mark.parametrize('key_type', ['test', 'normal']) -def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_int_sms( +def test_validate_and_format_recipient_fails_when_international_number_and_service_does_not_allow_int_sms( key_type, notify_db_session, ): @@ -545,14 +466,14 @@ def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_ @pytest.mark.parametrize('key_type', ['test', 'normal']) -def test_allows_api_calls_with_international_numbers_if_service_does_allow_int_sms( +def test_validate_and_format_recipient_succeeds_with_international_numbers_if_service_does_allow_int_sms( key_type, sample_service_full_permissions): service_model = SerialisedService.from_id(sample_service_full_permissions.id) result = validate_and_format_recipient('20-12-1234-1234', key_type, service_model, SMS_TYPE) assert result == '201212341234' -def test_rejects_api_calls_with_no_recipient(): +def test_validate_and_format_recipient_fails_when_no_recipient(): with pytest.raises(BadRequestError) as e: validate_and_format_recipient(None, 'key_type', 'service', 'SMS_TYPE') assert e.value.status_code == 400 From 18dd9050a4d8e6d70d46498ff9d9769f7296a485 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 28 Jun 2021 10:12:43 +0100 Subject: [PATCH 4/4] - make sure when processing a job that we check the total_sent + job.notification_count against the service.message_limit. --- app/celery/tasks.py | 7 +++++-- tests/app/celery/test_tasks.py | 19 +++++++++++++++++++ tests/app/notifications/test_validators.py | 6 ++---- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 2498d5d88..48e419781 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -161,8 +161,11 @@ def process_row(row, template, job, service, sender_id=None): def __sending_limits_for_job_exceeded(service, job, job_id): try: - check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) - return False + total_sent = check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) + if total_sent + job.notification_count > service.message_limit: + raise TooManyRequestsError(service.message_limit) + else: + return False except TooManyRequestsError: job.job_status = 'sending limits exceeded' job.processing_finished = datetime.utcnow() diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 55a0bdccc..deb82afc1 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -182,6 +182,25 @@ def test_should_not_process_if_send_limit_is_exceeded( assert tasks.process_row.called is False +def test_should_not_process_if_send_limit_is_exceeded_by_job_notification_count( + notify_api, notify_db_session, mocker +): + service = create_service(message_limit=9) + template = create_template(service=service) + job = create_job(template=template, notification_count=10, original_file_name='multiple_sms.csv') + mock_s3 = mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', + return_value=(load_example_csv('multiple_sms'), {'sender_id': None})) + mock_process_row = mocker.patch('app.celery.tasks.process_row') + mocker.patch('app.celery.tasks.check_service_over_daily_message_limit', + return_value=0) + process_job(job.id) + + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == 'sending limits exceeded' + mock_s3.assert_not_called() + mock_process_row.assert_not_called() + + def test_should_process_job_if_send_limits_are_not_exceeded( notify_api, notify_db_session, mocker ): diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index 7a0fb0ca3..490666153 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -65,8 +65,7 @@ def test_check_service_message_limit_in_cache_under_message_limit_passes( mock_set = mocker.patch('app.notifications.validators.redis_store.set') check_service_over_daily_message_limit(key_type, serialised_service) mock_get.assert_called_once_with(f'{serialised_service.id}-{datetime.utcnow().strftime("%Y-%m-%d")}-count') - assert mock_get.called - assert not mock_set.called + mock_set.assert_not_called() def test_check_service_over_daily_message_limit_should_not_interact_with_cache_for_test_key(sample_service, mocker): @@ -74,7 +73,6 @@ def test_check_service_over_daily_message_limit_should_not_interact_with_cache_f mock_get = mocker.patch('app.notifications.validators.redis_store.get', side_effect=[None]) serialised_service = SerialisedService.from_id(sample_service.id) check_service_over_daily_message_limit('test', serialised_service) - app.notifications.validators.redis_store.assert_not_called mock_get.assert_not_called() @@ -105,7 +103,7 @@ def test_check_service_over_daily_message_limit_does_nothing_if_redis_disabled(n @pytest.mark.parametrize('key_type', ['team', 'normal']) def test_check_service_message_limit_over_message_limit_fails(key_type, mocker, notify_db_session): - service = create_service(restricted=True, message_limit=4) + service = create_service(message_limit=4) mocker.patch('app.redis_store.get', return_value=5) with pytest.raises(TooManyRequestsError) as e: