Merge pull request #3276 from alphagov/daily-limit-redis-cache

Correct the daily limits cache.
This commit is contained in:
Rebecca Law
2021-06-29 12:06:35 +01:00
committed by GitHub
8 changed files with 209 additions and 343 deletions

View File

@@ -33,7 +33,6 @@ from app.dao.returned_letters_dao import insert_or_update_returned_letters
from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id
from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service
from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id
from app.dao.services_dao import fetch_todays_total_message_count
from app.dao.templates_dao import dao_get_template_by_id
from app.exceptions import DVLAException, NotificationTechnicalFailureException
from app.models import (
@@ -55,9 +54,11 @@ from app.models import (
DailySortedLetter,
)
from app.notifications.process_notifications import persist_notification
from app.notifications.validators import check_service_over_daily_message_limit
from app.serialised_models import SerialisedService, SerialisedTemplate
from app.service.utils import service_allowed_to_send_to
from app.utils import DATETIME_FORMAT, get_reference_from_personalisation
from app.v2.errors import TooManyRequestsError
@notify_celery.task(name="process-job")
@@ -159,9 +160,13 @@ def process_row(row, template, job, service, sender_id=None):
def __sending_limits_for_job_exceeded(service, job, job_id):
total_sent = fetch_todays_total_message_count(service.id)
if total_sent + job.notification_count > service.message_limit:
try:
total_sent = check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service)
if total_sent + job.notification_count > service.message_limit:
raise TooManyRequestsError(service.message_limit)
else:
return False
except TooManyRequestsError:
job.job_status = 'sending limits exceeded'
job.processing_finished = datetime.utcnow()
dao_update_job(job)
@@ -170,7 +175,6 @@ def __sending_limits_for_job_exceeded(service, job, job_id):
job_id, job.notification_count, service.message_limit)
)
return True
return False
@notify_celery.task(bind=True, name="save-sms", max_retries=5, default_retry_delay=300)

View File

@@ -436,18 +436,6 @@ def dao_fetch_todays_stats_for_service(service_id):
).all()
def fetch_todays_total_message_count(service_id):
start_date = get_london_midnight_in_utc(date.today())
result = db.session.query(
func.count(Notification.id).label('count')
).filter(
Notification.service_id == service_id,
Notification.key_type != KEY_TYPE_TEST,
Notification.created_at >= start_date
).first()
return 0 if result is None else result.count
def _stats_for_service_query(service_id):
return db.session.query(
Notification.notification_type,

View File

@@ -148,11 +148,16 @@ def persist_notification(
# if simulated create a Notification model to return but do not persist the Notification to the dB
if not simulated:
dao_create_notification(notification)
# Only keep track of the daily limit for trial mode services.
if service.restricted and key_type != KEY_TYPE_TEST:
if redis_store.get(redis.daily_limit_cache_key(service.id)):
redis_store.incr(redis.daily_limit_cache_key(service.id))
if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']:
cache_key = redis.daily_limit_cache_key(service.id)
if redis_store.get(cache_key) is None:
# if cache does not exist set the cache to 1 with an expiry of 24 hours,
# The cache should be set by the time we create the notification
# but in case it is this will make sure the expiry is set to 24 hours,
# where if we let the incr method create the cache it will be set a ttl.
redis_store.set(cache_key, 1, ex=86400)
else:
redis_store.incr(cache_key)
current_app.logger.info(
"{} {} created at {}".format(notification_type, notification_id, notification_created_at)
)

View File

@@ -14,7 +14,6 @@ from notifications_utils.recipients import (
from sqlalchemy.orm.exc import NoResultFound
from app import redis_store
from app.dao import services_dao
from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id
from app.dao.service_letter_contact_dao import dao_get_letter_contact_by_id
from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id
@@ -59,24 +58,27 @@ def check_service_over_api_rate_limit(service, api_key):
def check_service_over_daily_message_limit(key_type, service):
if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']:
cache_key = daily_limit_cache_key(service.id)
service_stats = redis_store.get(cache_key)
if not service_stats:
service_stats = services_dao.fetch_todays_total_message_count(service.id)
redis_store.set(cache_key, service_stats, ex=3600)
if int(service_stats) >= service.message_limit:
current_app.logger.info(
"service {} has been rate limited for daily use sent {} limit {}".format(
service.id, int(service_stats), service.message_limit)
)
raise TooManyRequestsError(service.message_limit)
if key_type == KEY_TYPE_TEST or not current_app.config['REDIS_ENABLED']:
return 0
cache_key = daily_limit_cache_key(service.id)
service_stats = redis_store.get(cache_key)
if service_stats is None:
# first message of the day, set the cache to 0 and the expiry to 24 hours
service_stats = 0
redis_store.set(cache_key, service_stats, ex=86400)
return service_stats
if int(service_stats) >= service.message_limit:
current_app.logger.info(
"service {} has been rate limited for daily use sent {} limit {}".format(
service.id, int(service_stats), service.message_limit)
)
raise TooManyRequestsError(service.message_limit)
def check_rate_limiting(service, api_key):
check_service_over_api_rate_limit(service, api_key)
# Reduce queries to the notifications table
# check_service_over_daily_message_limit(api_key.key_type, service)
check_service_over_daily_message_limit(api_key.key_type, service)
def check_template_is_for_notification_type(notification_type, template_type):