Correct the daily limits cache.

Last year we had an issue with the daily limit cache and the query that was populating it. As a result we have not been checking the daily limit properly. This PR should correct all that.

The daily limit cache is not being incremented in app.notifications.process_notifications.persist_notification, this method is and should always be the only method used to create a notification.
We increment the daily limit cache is redis is enabled (and it is always enabled for production) and the key type for the notification is team or normal.

We check if the daily limit is exceed in many places:
 - app.celery.tasks.process_job
 -  app.v2.notifications.post_notifications.post_notification
 - app.v2.notifications.post_notifications.post_precompiled_letter_notification
 - app.service.send_notification.send_one_off_notification
 - app.service.send_notification.send_pdf_letter_notification

If the daily limits cache is not found, set the cache to 0 with an expiry of 24 hours. The daily limit cache key is service_id-yyy-mm-dd-count, so each day a new cache is created.

The best thing about this PR is that the app.service_dao.fetch_todays_total_message_count query has been removed. This query was not performant and had been wrong for ages.
This commit is contained in:
Rebecca Law
2021-06-21 16:03:24 +01:00
parent c44ec57c17
commit 35b20ba363
8 changed files with 196 additions and 230 deletions

View File

@@ -33,7 +33,6 @@ from app.dao.returned_letters_dao import insert_or_update_returned_letters
from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id
from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service
from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id
from app.dao.services_dao import fetch_todays_total_message_count
from app.dao.templates_dao import dao_get_template_by_id
from app.exceptions import DVLAException, NotificationTechnicalFailureException
from app.models import (
@@ -55,6 +54,7 @@ from app.models import (
DailySortedLetter,
)
from app.notifications.process_notifications import persist_notification
from app.notifications.validators import get_service_daily_limit_cache_value
from app.serialised_models import SerialisedService, SerialisedTemplate
from app.service.utils import service_allowed_to_send_to
from app.utils import DATETIME_FORMAT, get_reference_from_personalisation
@@ -159,8 +159,8 @@ def process_row(row, template, job, service, sender_id=None):
def __sending_limits_for_job_exceeded(service, job, job_id):
total_sent = fetch_todays_total_message_count(service.id)
total_sent = get_service_daily_limit_cache_value(KEY_TYPE_NORMAL, service)
print(total_sent)
if total_sent + job.notification_count > service.message_limit:
job.job_status = 'sending limits exceeded'
job.processing_finished = datetime.utcnow()

View File

@@ -436,18 +436,6 @@ def dao_fetch_todays_stats_for_service(service_id):
).all()
def fetch_todays_total_message_count(service_id):
start_date = get_london_midnight_in_utc(date.today())
result = db.session.query(
func.count(Notification.id).label('count')
).filter(
Notification.service_id == service_id,
Notification.key_type != KEY_TYPE_TEST,
Notification.created_at >= start_date
).first()
return 0 if result is None else result.count
def _stats_for_service_query(service_id):
return db.session.query(
Notification.notification_type,

View File

@@ -148,10 +148,8 @@ def persist_notification(
# if simulated create a Notification model to return but do not persist the Notification to the dB
if not simulated:
dao_create_notification(notification)
# Only keep track of the daily limit for trial mode services.
if service.restricted and key_type != KEY_TYPE_TEST:
if redis_store.get(redis.daily_limit_cache_key(service.id)):
redis_store.incr(redis.daily_limit_cache_key(service.id))
if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']:
redis_store.incr(redis.daily_limit_cache_key(service.id))
current_app.logger.info(
"{} {} created at {}".format(notification_type, notification_id, notification_created_at)

View File

@@ -14,7 +14,6 @@ from notifications_utils.recipients import (
from sqlalchemy.orm.exc import NoResultFound
from app import redis_store
from app.dao import services_dao
from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id
from app.dao.service_letter_contact_dao import dao_get_letter_contact_by_id
from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id
@@ -63,8 +62,9 @@ def check_service_over_daily_message_limit(key_type, service):
cache_key = daily_limit_cache_key(service.id)
service_stats = redis_store.get(cache_key)
if not service_stats:
service_stats = services_dao.fetch_todays_total_message_count(service.id)
redis_store.set(cache_key, service_stats, ex=3600)
# first message of the day, set the cache to 0 and the expiry to 24 hours
service_stats = 0
redis_store.set(cache_key, service_stats, ex=86400)
if int(service_stats) >= service.message_limit:
current_app.logger.info(
"service {} has been rate limited for daily use sent {} limit {}".format(
@@ -73,10 +73,24 @@ def check_service_over_daily_message_limit(key_type, service):
raise TooManyRequestsError(service.message_limit)
def get_service_daily_limit_cache_value(key_type, service):
if key_type != KEY_TYPE_TEST and current_app.config['REDIS_ENABLED']:
cache_key = daily_limit_cache_key(service.id)
service_stats = redis_store.get(cache_key)
if not service_stats:
# first message of the day, set the cache to 0 and the expiry to 24 hours
service_stats = 0
redis_store.set(cache_key, service_stats, ex=86400)
return 0
else:
return int(service_stats)
else:
return 0
def check_rate_limiting(service, api_key):
check_service_over_api_rate_limit(service, api_key)
# Reduce queries to the notifications table
# check_service_over_daily_message_limit(api_key.key_type, service)
check_service_over_daily_message_limit(api_key.key_type, service)
def check_template_is_for_notification_type(notification_type, template_type):