diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 6d3b31b94..48e419781 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -33,7 +33,6 @@ from app.dao.returned_letters_dao import insert_or_update_returned_letters from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id -from app.dao.services_dao import fetch_todays_total_message_count from app.dao.templates_dao import dao_get_template_by_id from app.exceptions import DVLAException, NotificationTechnicalFailureException from app.models import ( @@ -55,9 +54,11 @@ from app.models import ( DailySortedLetter, ) from app.notifications.process_notifications import persist_notification +from app.notifications.validators import check_service_over_daily_message_limit from app.serialised_models import SerialisedService, SerialisedTemplate from app.service.utils import service_allowed_to_send_to from app.utils import DATETIME_FORMAT, get_reference_from_personalisation +from app.v2.errors import TooManyRequestsError @notify_celery.task(name="process-job") @@ -159,9 +160,13 @@ def process_row(row, template, job, service, sender_id=None): def __sending_limits_for_job_exceeded(service, job, job_id): - total_sent = fetch_todays_total_message_count(service.id) - - if total_sent + job.notification_count > service.message_limit: + try: + total_sent = check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) + if total_sent + job.notification_count > service.message_limit: + raise TooManyRequestsError(service.message_limit) + else: + return False + except TooManyRequestsError: job.job_status = 'sending limits exceeded' job.processing_finished = datetime.utcnow() dao_update_job(job) @@ -170,7 +175,6 @@ def __sending_limits_for_job_exceeded(service, job, job_id): job_id, job.notification_count, service.message_limit) ) return True - return False @notify_celery.task(bind=True, name="save-sms", max_retries=5, default_retry_delay=300) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 68d5ed2cc..374022004 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -436,18 +436,6 @@ def dao_fetch_todays_stats_for_service(service_id): ).all() -def fetch_todays_total_message_count(service_id): - start_date = get_london_midnight_in_utc(date.today()) - result = db.session.query( - func.count(Notification.id).label('count') - ).filter( - Notification.service_id == service_id, - Notification.key_type != KEY_TYPE_TEST, - Notification.created_at >= start_date - ).first() - return 0 if result is None else result.count - - def _stats_for_service_query(service_id): return db.session.query( Notification.notification_type, diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index ed4334d29..4aea1ec05 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -148,11 +148,16 @@ def persist_notification( # if simulated create a Notification model to return but do not persist the Notification to the dB if not simulated: dao_create_notification(notification) - # Only keep track of the daily limit for trial mode services. - if service.restricted and key_type != KEY_TYPE_TEST: - if redis_store.get(redis.daily_limit_cache_key(service.id)): - redis_store.incr(redis.daily_limit_cache_key(service.id)) - + if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']: + cache_key = redis.daily_limit_cache_key(service.id) + if redis_store.get(cache_key) is None: + # if cache does not exist set the cache to 1 with an expiry of 24 hours, + # The cache should be set by the time we create the notification + # but in case it is this will make sure the expiry is set to 24 hours, + # where if we let the incr method create the cache it will be set a ttl. + redis_store.set(cache_key, 1, ex=86400) + else: + redis_store.incr(cache_key) current_app.logger.info( "{} {} created at {}".format(notification_type, notification_id, notification_created_at) ) diff --git a/app/notifications/validators.py b/app/notifications/validators.py index bcdf15534..c63aad5af 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -14,7 +14,6 @@ from notifications_utils.recipients import ( from sqlalchemy.orm.exc import NoResultFound from app import redis_store -from app.dao import services_dao from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_letter_contact_dao import dao_get_letter_contact_by_id from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id @@ -59,24 +58,27 @@ def check_service_over_api_rate_limit(service, api_key): def check_service_over_daily_message_limit(key_type, service): - if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']: - cache_key = daily_limit_cache_key(service.id) - service_stats = redis_store.get(cache_key) - if not service_stats: - service_stats = services_dao.fetch_todays_total_message_count(service.id) - redis_store.set(cache_key, service_stats, ex=3600) - if int(service_stats) >= service.message_limit: - current_app.logger.info( - "service {} has been rate limited for daily use sent {} limit {}".format( - service.id, int(service_stats), service.message_limit) - ) - raise TooManyRequestsError(service.message_limit) + if key_type == KEY_TYPE_TEST or not current_app.config['REDIS_ENABLED']: + return 0 + + cache_key = daily_limit_cache_key(service.id) + service_stats = redis_store.get(cache_key) + if service_stats is None: + # first message of the day, set the cache to 0 and the expiry to 24 hours + service_stats = 0 + redis_store.set(cache_key, service_stats, ex=86400) + return service_stats + if int(service_stats) >= service.message_limit: + current_app.logger.info( + "service {} has been rate limited for daily use sent {} limit {}".format( + service.id, int(service_stats), service.message_limit) + ) + raise TooManyRequestsError(service.message_limit) def check_rate_limiting(service, api_key): check_service_over_api_rate_limit(service, api_key) - # Reduce queries to the notifications table - # check_service_over_daily_message_limit(api_key.key_type, service) + check_service_over_daily_message_limit(api_key.key_type, service) def check_template_is_for_notification_type(notification_type, template_type): diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index ba0443bad..deb82afc1 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -51,6 +51,7 @@ from app.models import ( ) from app.serialised_models import SerialisedService, SerialisedTemplate from app.utils import DATETIME_FORMAT +from app.v2.errors import TooManyRequestsError from tests.app import load_example_csv from tests.app.db import ( create_api_key, @@ -150,65 +151,6 @@ def test_should_process_sms_job_with_sender_id(sample_job, mocker, fake_uuid): ) -@freeze_time("2016-01-01 11:09:00.061258") -def test_should_not_process_sms_job_if_would_exceed_send_limits( - notify_db_session, mocker -): - service = create_service(message_limit=9) - template = create_template(service=service) - job = create_job(template=template, notification_count=10, original_file_name='multiple_sms.csv') - mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', - return_value=(load_example_csv('multiple_sms'), {'sender_id': None})) - mocker.patch('app.celery.tasks.process_row') - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == 'sending limits exceeded' - assert s3.get_job_and_metadata_from_s3.called is False - assert tasks.process_row.called is False - - -def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today( - notify_db_session, mocker -): - service = create_service(message_limit=1) - template = create_template(service=service) - job = create_job(template=template) - - create_notification(template=template, job=job) - - mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', - return_value=(load_example_csv('sms'), {'sender_id': None})) - mocker.patch('app.celery.tasks.process_row') - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == 'sending limits exceeded' - assert s3.get_job_and_metadata_from_s3.called is False - assert tasks.process_row.called is False - - -@pytest.mark.parametrize('template_type', ['sms', 'email']) -def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db_session, template_type, mocker): - service = create_service(message_limit=1) - template = create_template(service=service, template_type=template_type) - job = create_job(template=template) - - create_notification(template=template, job=job) - - mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3') - mocker.patch('app.celery.tasks.process_row') - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == 'sending limits exceeded' - assert s3.get_job_and_metadata_from_s3.called is False - assert tasks.process_row.called is False - - def test_should_not_process_job_if_already_pending(sample_template, mocker): job = create_job(template=sample_template, job_status='scheduled') @@ -221,8 +163,47 @@ def test_should_not_process_job_if_already_pending(sample_template, mocker): assert tasks.process_row.called is False -def test_should_process_email_job_if_exactly_on_send_limits(notify_db_session, - mocker): +def test_should_not_process_if_send_limit_is_exceeded( + notify_api, notify_db_session, mocker +): + service = create_service(message_limit=9) + template = create_template(service=service) + job = create_job(template=template, notification_count=10, original_file_name='multiple_sms.csv') + mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', + return_value=(load_example_csv('multiple_sms'), {'sender_id': None})) + mocker.patch('app.celery.tasks.process_row') + mocker.patch('app.celery.tasks.check_service_over_daily_message_limit', + side_effect=TooManyRequestsError("exceeded limit")) + process_job(job.id) + + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == 'sending limits exceeded' + assert s3.get_job_and_metadata_from_s3.called is False + assert tasks.process_row.called is False + + +def test_should_not_process_if_send_limit_is_exceeded_by_job_notification_count( + notify_api, notify_db_session, mocker +): + service = create_service(message_limit=9) + template = create_template(service=service) + job = create_job(template=template, notification_count=10, original_file_name='multiple_sms.csv') + mock_s3 = mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', + return_value=(load_example_csv('multiple_sms'), {'sender_id': None})) + mock_process_row = mocker.patch('app.celery.tasks.process_row') + mocker.patch('app.celery.tasks.check_service_over_daily_message_limit', + return_value=0) + process_job(job.id) + + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == 'sending limits exceeded' + mock_s3.assert_not_called() + mock_process_row.assert_not_called() + + +def test_should_process_job_if_send_limits_are_not_exceeded( + notify_api, notify_db_session, mocker +): service = create_service(message_limit=10) template = create_template(service=service, template_type='email') job = create_job(template=template, notification_count=10) @@ -232,7 +213,7 @@ def test_should_process_email_job_if_exactly_on_send_limits(notify_db_session, mocker.patch('app.celery.tasks.save_email.apply_async') mocker.patch('app.encryption.encrypt', return_value="something_encrypted") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") - + mocker.patch('app.celery.tasks.check_service_over_daily_message_limit', return_value=0) process_job(job.id) s3.get_job_and_metadata_from_s3.assert_called_once_with( diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index a2a716c70..d34184305 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -41,7 +41,6 @@ from app.dao.services_dao import ( dao_suspend_service, dao_update_service, delete_service_and_all_associated_db_objects, - fetch_todays_total_message_count, get_live_services_with_organisation, get_services_by_partial_name, ) @@ -984,31 +983,6 @@ def test_fetch_stats_should_not_gather_notifications_older_than_7_days( assert len(stats) == rows_returned -def test_dao_fetch_todays_total_message_count_returns_count_for_today(notify_db_session): - template = create_template(service=create_service()) - notification = create_notification(template=template) - # don't include notifications earlier than today - create_notification(template=template, created_at=datetime.utcnow()-timedelta(days=2)) - assert fetch_todays_total_message_count(notification.service.id) == 1 - - -def test_dao_fetch_todays_total_message_count_returns_count_for_all_notification_type_and_selected_service( - notify_db_session -): - service = create_service() - different_service = create_service(service_name='different service') - create_notification(template=create_template(service=service)) - create_notification(template=create_template(service=service, template_type='email')) - create_notification(template=create_template(service=service, template_type='letter')) - create_notification(template=create_template(service=different_service)) - assert fetch_todays_total_message_count(service.id) == 3 - - -def test_dao_fetch_todays_total_message_count_returns_0_when_no_messages_for_today(notify_db, - notify_db_session): - assert fetch_todays_total_message_count(uuid.uuid4()) == 0 - - def test_dao_fetch_todays_stats_for_all_services_includes_all_services(notify_db_session): # two services, each with an email and sms notification service1 = create_service(service_name='service 1', email_from='service.1') diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 7b22282fa..266bfa9c1 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -21,6 +21,7 @@ from app.notifications.process_notifications import ( from app.serialised_models import SerialisedTemplate from app.v2.errors import BadRequestError from tests.app.db import create_api_key, create_service, create_template +from tests.conftest import set_config def test_create_content_for_notification_passes(sample_email_template): @@ -112,52 +113,6 @@ def test_persist_notification_throws_exception_when_missing_template(sample_api_ assert NotificationHistory.query.count() == 0 -def test_cache_is_not_incremented_on_failure_to_persist_notification(sample_api_key, mocker): - mocked_redis = mocker.patch('app.redis_store.get') - mock_service_template_cache = mocker.patch('app.redis_store.get_all_from_hash') - with pytest.raises(SQLAlchemyError): - persist_notification(template_id=None, - template_version=None, - recipient='+447111111111', - service=sample_api_key.service, - personalisation=None, - notification_type='sms', - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type) - mocked_redis.assert_not_called() - mock_service_template_cache.assert_not_called() - - -def test_persist_notification_does_not_increment_cache_if_test_key( - sample_template, sample_job, mocker, sample_test_api_key -): - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value="cache") - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value="cache") - daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr') - template_usage_cache = mocker.patch('app.notifications.process_notifications.redis_store.increment_hash_value') - - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 - persist_notification( - template_id=sample_template.id, - template_version=sample_template.version, - recipient='+447111111111', - service=sample_template.service, - personalisation={}, - notification_type='sms', - api_key_id=sample_test_api_key.id, - key_type=sample_test_api_key.key_type, - job_id=sample_job.id, - job_row_number=100, - reference="ref", - ) - - assert Notification.query.count() == 1 - - assert not daily_limit_cache.called - assert not template_usage_cache.called - - @freeze_time("2016-01-01 11:09:00.061258") def test_persist_notification_with_optionals(sample_job, sample_api_key): assert Notification.query.count() == 0 @@ -197,78 +152,97 @@ def test_persist_notification_with_optionals(sample_job, sample_api_key): assert not persisted_notification.reply_to_text +def test_persist_notification_cache_is_not_incremented_on_failure_to_create_notification( + notify_api, sample_api_key, mocker +): + mocked_redis = mocker.patch('app.redis_store.incr') + with pytest.raises(SQLAlchemyError): + persist_notification(template_id=None, + template_version=None, + recipient='+447111111111', + service=sample_api_key.service, + personalisation=None, + notification_type='sms', + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type) + mocked_redis.assert_not_called() + + +def test_persist_notification_does_not_increment_cache_if_test_key( + notify_api, sample_template, sample_job, mocker, sample_test_api_key +): + daily_limit_cache = mocker.patch('app.notifications.process_notifications.redis_store.incr') + + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + with set_config(notify_api, 'REDIS_ENABLED', True): + persist_notification( + template_id=sample_template.id, + template_version=sample_template.version, + recipient='+447111111111', + service=sample_template.service, + personalisation={}, + notification_type='sms', + api_key_id=sample_test_api_key.id, + key_type=sample_test_api_key.key_type, + job_id=sample_job.id, + job_row_number=100, + reference="ref", + ) + + assert Notification.query.count() == 1 + + assert not daily_limit_cache.called + + +@pytest.mark.parametrize('restricted_service', [True, False]) @freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_doesnt_touch_cache_for_old_keys_that_dont_exist(notify_db_session, mocker): - service = create_service(restricted=True) +def test_persist_notification_increments_cache_for_trial_or_live_service( + notify_api, notify_db_session, mocker, restricted_service +): + service = create_service(restricted=restricted_service) template = create_template(service=service) api_key = create_api_key(service=service) + mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') + with set_config(notify_api, 'REDIS_ENABLED', True): + persist_notification( + template_id=template.id, + template_version=template.version, + recipient='+447111111122', + service=template.service, + personalisation={}, + notification_type='sms', + api_key_id=api_key.id, + key_type=api_key.key_type, + reference="ref2") + + mock_incr.assert_called_once_with(str(service.id) + "-2016-01-01-count", ) + + +@pytest.mark.parametrize('restricted_service', [True, False]) +@freeze_time("2016-01-01 11:09:00.061258") +def test_persist_notification_sets_daily_limit_cache_if_one_does_not_exists( + notify_api, notify_db_session, mocker, restricted_service +): + service = create_service(restricted=restricted_service) + template = create_template(service=service) + api_key = create_api_key(service=service) mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=None) - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value=None) + mock_set = mocker.patch('app.notifications.process_notifications.redis_store.set') + with set_config(notify_api, 'REDIS_ENABLED', True): + persist_notification( + template_id=template.id, + template_version=template.version, + recipient='+447111111122', + service=template.service, + personalisation={}, + notification_type='sms', + api_key_id=api_key.id, + key_type=api_key.key_type, + reference="ref2") - persist_notification( - template_id=template.id, - template_version=template.version, - recipient='+447111111111', - service=template.service, - personalisation={}, - notification_type='sms', - api_key_id=api_key.id, - key_type=api_key.key_type, - reference="ref" - ) - mock_incr.assert_not_called() - - -@freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_increments_cache_if_key_exists_and_for_trial_service( - notify_db_session, mocker -): - service = create_service(restricted=True) - template = create_template(service=service) - api_key = create_api_key(service=service) - mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', - return_value={template.id, 1}) - - persist_notification( - template_id=template.id, - template_version=template.version, - recipient='+447111111122', - service=template.service, - personalisation={}, - notification_type='sms', - api_key_id=api_key.id, - key_type=api_key.key_type, - reference="ref2") - - mock_incr.assert_called_once_with(str(service.id) + "-2016-01-01-count", ) - - -def test_persist_notification_does_not_increments_cache_live_service( - notify_db_session, mocker -): - service = create_service(restricted=False) - template = create_template(service=service) - api_key = create_api_key(service=service) - mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', - return_value={template.id, 1}) - - persist_notification( - template_id=template.id, - template_version=template.version, - recipient='+447111111122', - service=template.service, - personalisation={}, - notification_type='sms', - api_key_id=api_key.id, - key_type=api_key.key_type, - reference="ref2") - - assert not mock_incr.called + mock_set.assert_called_once_with(str(service.id) + "-2016-01-01-count", 1, ex=86400) @pytest.mark.parametrize(( diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index a99de221a..490666153 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -1,4 +1,4 @@ -from unittest.mock import ANY +from datetime import datetime import pytest from flask import current_app @@ -15,6 +15,7 @@ from app.notifications.validators import ( check_if_service_can_send_files_by_email, check_is_message_too_long, check_notification_content_is_not_empty, + check_rate_limiting, check_reply_to, check_service_email_reply_to_id, check_service_letter_contact_id, @@ -38,7 +39,6 @@ from app.v2.errors import BadRequestError, RateLimitError, TooManyRequestsError from tests.app.db import ( create_api_key, create_letter_contact, - create_notification, create_reply_to_email, create_service, create_service_guest_list, @@ -55,59 +55,29 @@ def enable_redis(notify_api): yield -@pytest.mark.parametrize('key_type', ['test', 'team', 'normal']) -def test_check_service_message_limit_in_cache_with_unrestricted_service_is_allowed( - key_type, - sample_service, - mocker): - mocker.patch('app.notifications.validators.redis_store.get', side_effect=[ - None, # The serialised service - 1, # The rolling count - ]) - mocker.patch('app.notifications.validators.redis_store.set') - mocker.patch('app.notifications.validators.services_dao') - serialised_service = SerialisedService.from_id(sample_service.id) - - check_service_over_daily_message_limit(key_type, serialised_service) - app.notifications.validators.redis_store.set.assert_called_once_with( - f'service-{serialised_service.id}', - ANY, - ex=ANY, - ) - assert not app.notifications.validators.services_dao.mock_calls - - -@pytest.mark.parametrize('key_type', ['test', 'team', 'normal']) +@pytest.mark.parametrize('key_type', ['team', 'normal']) def test_check_service_message_limit_in_cache_under_message_limit_passes( key_type, sample_service, mocker): - mocker.patch('app.notifications.validators.redis_store.get', side_effect=[ - None, # The serialised service - 1, # The rolling count - ]) - mocker.patch('app.notifications.validators.redis_store.set') - mocker.patch('app.notifications.validators.services_dao') serialised_service = SerialisedService.from_id(sample_service.id) + mock_get = mocker.patch('app.notifications.validators.redis_store.get', return_value=1) + mock_set = mocker.patch('app.notifications.validators.redis_store.set') check_service_over_daily_message_limit(key_type, serialised_service) - app.notifications.validators.redis_store.set.assert_called_once_with( - f'service-{serialised_service.id}', - ANY, - ex=ANY, - ) - assert not app.notifications.validators.services_dao.mock_calls + mock_get.assert_called_once_with(f'{serialised_service.id}-{datetime.utcnow().strftime("%Y-%m-%d")}-count') + mock_set.assert_not_called() -def test_should_not_interact_with_cache_for_test_key(sample_service, mocker): +def test_check_service_over_daily_message_limit_should_not_interact_with_cache_for_test_key(sample_service, mocker): mocker.patch('app.notifications.validators.redis_store') - mocker.patch('app.notifications.validators.redis_store.get', side_effect=[None]) + mock_get = mocker.patch('app.notifications.validators.redis_store.get', side_effect=[None]) serialised_service = SerialisedService.from_id(sample_service.id) check_service_over_daily_message_limit('test', serialised_service) - assert not app.notifications.validators.redis_store.mock_calls + mock_get.assert_not_called() @pytest.mark.parametrize('key_type', ['team', 'normal']) -def test_should_set_cache_value_as_value_from_database_if_cache_not_set( +def test_check_service_over_daily_message_limit_should_set_cache_value_as_zero_if_cache_not_set( key_type, sample_template, sample_service, @@ -115,75 +85,32 @@ def test_should_set_cache_value_as_value_from_database_if_cache_not_set( ): serialised_service = SerialisedService.from_id(sample_service.id) with freeze_time("2016-01-01 12:00:00.000000"): - for _ in range(5): - create_notification(sample_template) - mocker.patch('app.notifications.validators.redis_store.get', return_value=None) mocker.patch('app.notifications.validators.redis_store.set') check_service_over_daily_message_limit(key_type, serialised_service) app.notifications.validators.redis_store.set.assert_called_with( - str(sample_service.id) + "-2016-01-01-count", 5, ex=3600 + str(sample_service.id) + "-2016-01-01-count", 0, ex=86400 ) -def test_should_not_access_database_if_redis_disabled(notify_api, sample_service, mocker): +def test_check_service_over_daily_message_limit_does_nothing_if_redis_disabled(notify_api, sample_service, mocker): serialised_service = SerialisedService.from_id(sample_service.id) with set_config(notify_api, 'REDIS_ENABLED', False): - db_mock = mocker.patch('app.notifications.validators.services_dao') - + mock_cache_key = mocker.patch('notifications_utils.clients.redis.daily_limit_cache_key') check_service_over_daily_message_limit('normal', serialised_service) - assert db_mock.method_calls == [] + assert mock_cache_key.method_calls == [] @pytest.mark.parametrize('key_type', ['team', 'normal']) -def test_check_service_message_limit_over_message_limit_fails(key_type, sample_service, mocker): - with freeze_time("2016-01-01 12:00:00.000000"): - mocker.patch('app.redis_store.get', return_value=None) - mocker.patch('app.notifications.validators.redis_store.set') +def test_check_service_message_limit_over_message_limit_fails(key_type, mocker, notify_db_session): + service = create_service(message_limit=4) + mocker.patch('app.redis_store.get', return_value=5) - sample_service.restricted = True - sample_service.message_limit = 4 - template = create_template(sample_service) - serialised_service = SerialisedService.from_id(sample_service.id) - - for _ in range(5): - create_notification(template) - with pytest.raises(TooManyRequestsError) as e: - check_service_over_daily_message_limit(key_type, serialised_service) - assert e.value.status_code == 429 - assert e.value.message == 'Exceeded send limits (4) for today' - assert e.value.fields == [] - app.notifications.validators.redis_store.set.assert_called_with( - str(sample_service.id) + "-2016-01-01-count", 5, ex=3600 - ) - - -@pytest.mark.parametrize('key_type', ['team', 'normal']) -def test_check_service_message_limit_in_cache_over_message_limit_fails( - notify_db_session, - key_type, - mocker): - with freeze_time("2016-01-01 12:00:00.000000"): - mocker.patch('app.redis_store.get', side_effect=[ - None, # The serialised service - 5, # The rolling count - ]) - mocker.patch('app.notifications.validators.redis_store.set') - mocker.patch('app.notifications.validators.services_dao') - - service = create_service(restricted=True, message_limit=4) - serialised_service = SerialisedService.from_id(service.id) - with pytest.raises(TooManyRequestsError) as e: - check_service_over_daily_message_limit(key_type, serialised_service) - assert e.value.status_code == 429 - assert e.value.message == 'Exceeded send limits (4) for today' - assert e.value.fields == [] - app.notifications.validators.redis_store.set.assert_called_once_with( - f'service-{serialised_service.id}', - ANY, - ex=ANY, - ) - assert not app.notifications.validators.services_dao.mock_calls + with pytest.raises(TooManyRequestsError) as e: + check_service_over_daily_message_limit(key_type, service) + assert e.value.status_code == 429 + assert e.value.message == 'Exceeded send limits (4) for today' + assert e.value.fields == [] @pytest.mark.parametrize('template_type, notification_type', @@ -439,7 +366,7 @@ def test_validate_template_calls_all_validators_exception_message_too_long(mocke @pytest.mark.parametrize('key_type', ['team', 'live', 'test']) -def test_that_when_exceed_rate_limit_request_fails( +def test_check_service_over_api_rate_limit_when_exceed_rate_limit_request_fails_raises_error( key_type, sample_service, mocker): @@ -451,7 +378,6 @@ def test_that_when_exceed_rate_limit_request_fails( api_key_type = key_type mocker.patch('app.redis_store.exceeded_rate_limit', return_value=True) - mocker.patch('app.notifications.validators.services_dao') sample_service.restricted = True api_key = create_api_key(sample_service, key_type=api_key_type) @@ -473,12 +399,11 @@ def test_that_when_exceed_rate_limit_request_fails( assert e.value.fields == [] -def test_that_when_not_exceeded_rate_limit_request_succeeds( +def test_check_service_over_api_rate_limit_when_rate_limit_has_not_exceeded_limit_succeeds( sample_service, mocker): with freeze_time("2016-01-01 12:00:00.000000"): mocker.patch('app.redis_store.exceeded_rate_limit', return_value=False) - mocker.patch('app.notifications.validators.services_dao') sample_service.restricted = True api_key = create_api_key(sample_service) @@ -493,14 +418,13 @@ def test_that_when_not_exceeded_rate_limit_request_succeeds( ) -def test_should_not_rate_limit_if_limiting_is_disabled( +def test_check_service_over_api_rate_limit_should_do_nothing_if_limiting_is_disabled( sample_service, mocker): with freeze_time("2016-01-01 12:00:00.000000"): current_app.config['API_RATE_LIMIT_ENABLED'] = False mocker.patch('app.redis_store.exceeded_rate_limit', return_value=False) - mocker.patch('app.notifications.validators.services_dao') sample_service.restricted = True create_api_key(sample_service) @@ -508,11 +432,25 @@ def test_should_not_rate_limit_if_limiting_is_disabled( serialised_api_key = SerialisedAPIKeyCollection.from_service_id(serialised_service.id)[0] check_service_over_api_rate_limit(serialised_service, serialised_api_key) - assert not app.redis_store.exceeded_rate_limit.called + app.redis_store.exceeded_rate_limit.assert_not_called() + + +def test_check_rate_limiting_validates_api_rate_limit_and_daily_limit( + notify_db_session, mocker +): + mock_rate_limit = mocker.patch('app.notifications.validators.check_service_over_api_rate_limit') + mock_daily_limit = mocker.patch('app.notifications.validators.check_service_over_daily_message_limit') + service = create_service() + api_key = create_api_key(service=service) + + check_rate_limiting(service, api_key) + + mock_rate_limit.assert_called_once_with(service, api_key) + mock_daily_limit.assert_called_once_with(api_key.key_type, service) @pytest.mark.parametrize('key_type', ['test', 'normal']) -def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_int_sms( +def test_validate_and_format_recipient_fails_when_international_number_and_service_does_not_allow_int_sms( key_type, notify_db_session, ): @@ -526,14 +464,14 @@ def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_ @pytest.mark.parametrize('key_type', ['test', 'normal']) -def test_allows_api_calls_with_international_numbers_if_service_does_allow_int_sms( +def test_validate_and_format_recipient_succeeds_with_international_numbers_if_service_does_allow_int_sms( key_type, sample_service_full_permissions): service_model = SerialisedService.from_id(sample_service_full_permissions.id) result = validate_and_format_recipient('20-12-1234-1234', key_type, service_model, SMS_TYPE) assert result == '201212341234' -def test_rejects_api_calls_with_no_recipient(): +def test_validate_and_format_recipient_fails_when_no_recipient(): with pytest.raises(BadRequestError) as e: validate_and_format_recipient(None, 'key_type', 'service', 'SMS_TYPE') assert e.value.status_code == 400