Added a redis cache for the template usage stats.

Cache expires every 10 minutes, but will help with the every 2 second query, especially when a job is running.
There is some clean up and qa to do for this yet
This commit is contained in:
Rebecca Law
2017-02-13 18:47:29 +00:00
parent b2267ae5fc
commit 458adefcb8
9 changed files with 351 additions and 218 deletions

View File

@@ -65,6 +65,8 @@ def create_app(app_name=None):
aws_ses_client.init_app(application.config['AWS_REGION'], statsd_client=statsd_client)
notify_celery.init_app(application)
encryption.init_app(application)
print(os.environ['REDIS_URL'])
print(application.config['REDIS_ENABLED'])
redis_store.init_app(application)
performance_platform_client.init_app(application)
clients.init_app(sms_clients=[firetext_client, mmg_client, loadtest_client], email_clients=[aws_ses_client])
@@ -176,3 +178,7 @@ def process_user_agent(user_agent_string):
return "non-notify-user-agent"
else:
return "unknown"
def cache_key_for_service_template_counter(service_id, limit_days=7):
return "{}-template-counter-limit-{}-days".format(service_id, limit_days)

View File

@@ -49,6 +49,7 @@ class Config(object):
# URL of redis instance
REDIS_URL = os.getenv('REDIS_URL')
REDIS_ENABLED = os.getenv('REDIS_ENABLED') == '1'
EXPIRE_CACHE_IN_SECONDS = 600
# Performance platform
PERFORMANCE_PLATFORM_ENABLED = os.getenv('PERFORMANCE_PLATFORM_ENABLED') == '1'
@@ -185,6 +186,7 @@ class Development(Config):
Queue('research-mode', Exchange('default'), routing_key='research-mode')
]
API_HOST_NAME = "http://localhost:6011"
REDIS_ENABLED = True
class Test(Config):

View File

@@ -1,6 +1,7 @@
import uuid
from sqlalchemy import (asc, desc)
import sqlalchemy
from sqlalchemy import (desc, cast, String, text)
from app import db
from app.models import (Template, TemplateHistory)
@@ -56,3 +57,39 @@ def dao_get_template_versions(service_id, template_id):
).order_by(
desc(TemplateHistory.version)
).all()
# def dao_get_templates_by_for_cache(cache):
# if not cache or len(cache) == 0:
# return []
# # First create a subquery that is a union select of the cache values
# # Then join templates to the subquery
# cache_queries = [
# db.session.query(sqlalchemy.sql.expression.bindparam("template_id" + str(i),
# template_id).label('template_id'),
# sqlalchemy.sql.expression.bindparam("count" + str(i), count).label('count'))
# for i, (template_id, count) in enumerate(cache)]
# cache_subq = cache_queries[0].union(*cache_queries[1:]).subquery()
# query = db.session.query(Template.id.label('template_id'),
# Template.template_type,
# Template.name,
# cache_subq.c.count.label('count')
# ).join(cache_subq,
# cast(Template.id, String) == cast(cache_subq.c.template_id, String)
# ).order_by(Template.name)
#
# return query.all()
def dao_get_templates_by_for_cache(cache):
if not cache or len(cache) == 0:
return []
txt = "( " + " Union all ".join(
"select '{}'::text as template_id, {} as count".format(x.decode(),
y.decode()) for x, y in cache) + " ) as cache"
txt = "Select t.id as template_id, t.template_type, t.name, cache.count from templates t, " + \
txt + " where t.id::text = cache.template_id order by t.name"
stmt = text(txt)
return db.session.execute(stmt).fetchall()

View File

@@ -2,7 +2,7 @@ from datetime import datetime
from flask import current_app
from app import redis_store
from app import redis_store, cache_key_for_service_template_counter
from app.celery import provider_tasks
from notifications_utils.clients import redis
from app.dao.notifications_dao import dao_create_notification, dao_delete_notifications_and_history_by_id
@@ -63,6 +63,7 @@ def persist_notification(template_id,
if not simulated:
dao_create_notification(notification)
redis_store.incr(redis.daily_limit_cache_key(service.id))
redis_store.increment_hash_value(cache_key_for_service_template_counter(service.id), template_id)
current_app.logger.info(
"{} {} created at {}".format(notification.notification_type, notification.id, notification.created_at)
)

View File

@@ -1,14 +1,16 @@
from flask import (
Blueprint,
jsonify,
request
)
request,
current_app)
from app import redis_store
from app.dao.notifications_dao import (
dao_get_template_usage,
dao_get_last_template_usage)
from app.dao.templates_dao import dao_get_templates_by_for_cache
from app.schemas import notifications_filter_schema, NotificationWithTemplateSchema, notification_with_template_schema
from app.schemas import notification_with_template_schema
template_statistics = Blueprint('template-statistics',
__name__,
@@ -30,7 +32,15 @@ def get_template_statistics_for_service_by_day(service_id):
raise InvalidRequest(message, status_code=400)
else:
limit_days = None
stats = dao_get_template_usage(service_id, limit_days=limit_days)
if limit_days == 7:
stats = get_template_statistics_for_7_days(limit_days, service_id)
print(stats)
# [(UUID('c2a331f8-e0b9-43de-9dd2-88300511a1d7'), 'Create with priority', 'sms', 1)]
else:
stats = dao_get_template_usage(service_id, limit_days=limit_days)
print(stats)
def serialize(data):
return {
@@ -52,3 +62,18 @@ def get_template_statistics_for_template_id(service_id, template_id):
raise InvalidRequest(errors, status_code=404)
data = notification_with_template_schema.dump(notification).data
return jsonify(data=data)
def get_template_statistics_for_7_days(limit_days, service_id):
cache_key = "{}-template-counter-limit-7-days".format(service_id)
template_stats_by_id = redis_store.get_all_from_hash(cache_key)
if not template_stats_by_id:
print("populate cache")
stats = dao_get_template_usage(service_id, limit_days=limit_days)
cache_values = dict([(x.template_id, x.count) for x in stats])
redis_store.set_hash_and_expire(cache_key, cache_values, current_app.config.get('EXPIRE_CACHE_IN_SECONDS', 600))
current_app.logger.info('use redis-client: {}'.format(cache_key))
else:
print("template_stats_by_id: {}".format(template_stats_by_id))
stats = dao_get_templates_by_for_cache(template_stats_by_id.items())
return stats