2017-01-27 12:21:28 +00:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
|
|
2017-01-27 15:57:25 +00:00
|
|
|
import pytz
|
2017-09-04 17:24:41 +01:00
|
|
|
from flask import url_for
|
2017-01-30 16:46:47 +00:00
|
|
|
from sqlalchemy import func
|
2018-04-09 16:30:24 +01:00
|
|
|
from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate
|
2016-06-28 15:17:36 +01:00
|
|
|
|
2017-04-03 15:49:23 +01:00
|
|
|
local_timezone = pytz.timezone("Europe/London")
|
|
|
|
|
|
2016-06-28 15:17:36 +01:00
|
|
|
|
|
|
|
|
def pagination_links(pagination, endpoint, **kwargs):
|
|
|
|
|
if 'page' in kwargs:
|
|
|
|
|
kwargs.pop('page', None)
|
2016-09-21 16:54:02 +01:00
|
|
|
links = {}
|
2016-06-28 15:17:36 +01:00
|
|
|
if pagination.has_prev:
|
|
|
|
|
links['prev'] = url_for(endpoint, page=pagination.prev_num, **kwargs)
|
|
|
|
|
if pagination.has_next:
|
|
|
|
|
links['next'] = url_for(endpoint, page=pagination.next_num, **kwargs)
|
|
|
|
|
links['last'] = url_for(endpoint, page=pagination.pages, **kwargs)
|
|
|
|
|
return links
|
2016-10-13 11:59:47 +01:00
|
|
|
|
|
|
|
|
|
2018-02-09 14:16:10 +00:00
|
|
|
def url_with_token(data, url, config, base_url=None):
|
2016-10-13 11:59:47 +01:00
|
|
|
from notifications_utils.url_safe_token import generate_token
|
|
|
|
|
token = generate_token(data, config['SECRET_KEY'], config['DANGEROUS_SALT'])
|
2018-02-09 14:16:10 +00:00
|
|
|
base_url = (base_url or config['ADMIN_BASE_URL']) + url
|
2016-10-13 11:59:47 +01:00
|
|
|
return base_url + token
|
2016-12-09 15:56:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_template_instance(template, values):
|
2017-03-22 14:22:26 +00:00
|
|
|
from app.models import SMS_TYPE, EMAIL_TYPE, LETTER_TYPE
|
2016-12-09 15:56:25 +00:00
|
|
|
return {
|
2018-04-09 16:30:24 +01:00
|
|
|
SMS_TYPE: SMSMessageTemplate, EMAIL_TYPE: WithSubjectTemplate, LETTER_TYPE: WithSubjectTemplate
|
2016-12-09 15:56:25 +00:00
|
|
|
}[template['template_type']](template, values)
|
2017-01-27 12:21:28 +00:00
|
|
|
|
|
|
|
|
|
2017-01-27 15:57:25 +00:00
|
|
|
def get_london_midnight_in_utc(date):
|
|
|
|
|
"""
|
|
|
|
|
This function converts date to midnight as BST (British Standard Time) to UTC,
|
|
|
|
|
the tzinfo is lastly removed from the datetime because the database stores the timestamps without timezone.
|
|
|
|
|
:param date: the day to calculate the London midnight in UTC for
|
2018-04-12 10:47:16 +01:00
|
|
|
:return: the datetime of London midnight in UTC, for example 2016-06-17 = 2016-06-16 23:00:00
|
2017-01-27 15:57:25 +00:00
|
|
|
"""
|
2017-04-03 15:49:23 +01:00
|
|
|
return local_timezone.localize(datetime.combine(date, datetime.min.time())).astimezone(
|
2017-01-27 15:57:25 +00:00
|
|
|
pytz.UTC).replace(
|
|
|
|
|
tzinfo=None)
|
2017-01-27 12:21:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_midnight_for_day_before(date):
|
|
|
|
|
day_before = date - timedelta(1)
|
2017-01-27 15:57:25 +00:00
|
|
|
return get_london_midnight_in_utc(day_before)
|
2017-01-30 16:46:47 +00:00
|
|
|
|
|
|
|
|
|
2017-08-10 16:24:48 +01:00
|
|
|
def convert_utc_to_bst(utc_dt):
|
2017-04-03 16:20:54 +01:00
|
|
|
return pytz.utc.localize(utc_dt).astimezone(local_timezone).replace(tzinfo=None)
|
2017-04-03 15:49:23 +01:00
|
|
|
|
|
|
|
|
|
2017-05-17 15:06:15 +01:00
|
|
|
def convert_bst_to_utc(date):
|
|
|
|
|
return local_timezone.localize(date).astimezone(pytz.UTC).replace(tzinfo=None)
|
|
|
|
|
|
|
|
|
|
|
2017-01-30 16:46:47 +00:00
|
|
|
def get_london_month_from_utc_column(column):
|
|
|
|
|
"""
|
|
|
|
|
Where queries need to count notifications by month it needs to be
|
|
|
|
|
the month in BST (British Summer Time).
|
|
|
|
|
The database stores all timestamps as UTC without the timezone.
|
|
|
|
|
- First set the timezone on created_at to UTC
|
|
|
|
|
- then convert the timezone to BST (or Europe/London)
|
|
|
|
|
- lastly truncate the datetime to month with which we can group
|
|
|
|
|
queries
|
|
|
|
|
"""
|
|
|
|
|
return func.date_trunc(
|
|
|
|
|
"month",
|
|
|
|
|
func.timezone("Europe/London", func.timezone("UTC", column))
|
|
|
|
|
)
|
2017-02-14 14:22:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def cache_key_for_service_template_counter(service_id, limit_days=7):
|
|
|
|
|
return "{}-template-counter-limit-{}-days".format(service_id, limit_days)
|
2017-06-29 18:02:21 +01:00
|
|
|
|
|
|
|
|
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
def cache_key_for_service_template_usage_per_day(service_id, datetime):
|
2018-04-12 10:46:22 +01:00
|
|
|
"""
|
2018-04-30 11:50:56 +01:00
|
|
|
You should pass a BST datetime into this function
|
2018-04-12 10:46:22 +01:00
|
|
|
"""
|
2018-04-03 15:55:22 +01:00
|
|
|
return "service-{}-template-usage-{}".format(service_id, datetime.date().isoformat())
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
|
|
|
|
|
|
2017-06-29 18:02:21 +01:00
|
|
|
def get_public_notify_type_text(notify_type, plural=False):
|
|
|
|
|
from app.models import SMS_TYPE
|
|
|
|
|
notify_type_text = notify_type
|
|
|
|
|
if notify_type == SMS_TYPE:
|
|
|
|
|
notify_type_text = 'text message'
|
|
|
|
|
|
|
|
|
|
return '{}{}'.format(notify_type_text, 's' if plural else '')
|
2018-04-12 10:47:16 +01:00
|
|
|
|
|
|
|
|
|
2018-04-30 11:50:56 +01:00
|
|
|
def midnight_n_days_ago(number_of_days):
|
2018-04-12 10:47:16 +01:00
|
|
|
"""
|
|
|
|
|
Returns midnight a number of days ago. Takes care of daylight savings etc.
|
|
|
|
|
"""
|
2018-04-12 10:46:22 +01:00
|
|
|
return get_london_midnight_in_utc(datetime.utcnow() - timedelta(days=number_of_days))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def last_n_days(limit_days):
|
|
|
|
|
"""
|
2018-04-12 14:47:56 +01:00
|
|
|
Returns the last n dates, oldest first. Takes care of daylight savings (but returns a date, be careful how you
|
|
|
|
|
manipulate it later! Don't directly use the date for comparing to UTC datetimes!). Includes today.
|
2018-04-12 10:46:22 +01:00
|
|
|
"""
|
|
|
|
|
return [
|
|
|
|
|
datetime.combine(
|
|
|
|
|
(convert_utc_to_bst(datetime.utcnow()) - timedelta(days=x)),
|
|
|
|
|
datetime.min.time()
|
|
|
|
|
)
|
|
|
|
|
# reverse the countdown, -1 from first two args to ensure it stays 0-indexed
|
|
|
|
|
for x in range(limit_days - 1, -1, -1)
|
|
|
|
|
]
|
2018-07-13 15:26:42 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def escape_special_characters(string):
|
|
|
|
|
for special_character in ('\\', '_', '%', '/'):
|
|
|
|
|
string = string.replace(
|
|
|
|
|
special_character,
|
2018-11-07 13:39:08 +00:00
|
|
|
r'\{}'.format(special_character)
|
2018-07-13 15:26:42 +01:00
|
|
|
)
|
|
|
|
|
return string
|