2017-04-19 11:34:00 +01:00
|
|
|
import functools
|
2019-10-14 15:01:08 +01:00
|
|
|
from itertools import groupby
|
|
|
|
|
from operator import attrgetter
|
2016-03-31 15:57:50 +01:00
|
|
|
from datetime import (
|
|
|
|
|
datetime,
|
|
|
|
|
timedelta,
|
2017-06-02 12:21:12 +01:00
|
|
|
)
|
2016-03-31 15:57:50 +01:00
|
|
|
|
2020-02-12 15:28:46 +00:00
|
|
|
from botocore.exceptions import ClientError
|
2016-03-01 13:30:10 +00:00
|
|
|
from flask import current_app
|
2018-12-10 16:27:59 +00:00
|
|
|
from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES
|
2017-05-24 14:24:57 +01:00
|
|
|
from notifications_utils.recipients import (
|
|
|
|
|
validate_and_format_email_address,
|
2017-05-30 14:40:27 +01:00
|
|
|
InvalidEmailError,
|
2018-03-07 18:13:40 +00:00
|
|
|
try_validate_and_format_phone_number
|
2017-05-24 14:24:57 +01:00
|
|
|
)
|
2018-02-06 09:35:33 +00:00
|
|
|
from notifications_utils.statsd_decorators import statsd
|
2019-06-11 15:13:06 +01:00
|
|
|
from notifications_utils.timezones import convert_bst_to_utc, convert_utc_to_bst
|
2019-12-12 16:01:22 +00:00
|
|
|
from sqlalchemy import (desc, func, asc, and_, or_)
|
2016-08-09 13:07:48 +01:00
|
|
|
from sqlalchemy.orm import joinedload
|
2019-05-21 16:08:18 +01:00
|
|
|
from sqlalchemy.orm.exc import NoResultFound
|
2017-08-29 16:35:30 +01:00
|
|
|
from sqlalchemy.sql import functions
|
2018-12-10 16:27:59 +00:00
|
|
|
from sqlalchemy.sql.expression import case
|
|
|
|
|
from werkzeug.datastructures import MultiDict
|
2016-03-31 15:57:50 +01:00
|
|
|
|
2016-10-28 17:10:00 +01:00
|
|
|
from app import db, create_uuid
|
2018-08-13 11:33:19 +01:00
|
|
|
from app.aws.s3 import remove_s3_object, get_s3_bucket_objects
|
2018-12-10 16:27:59 +00:00
|
|
|
from app.dao.dao_utils import transactional
|
2018-03-07 18:13:40 +00:00
|
|
|
from app.errors import InvalidRequest
|
2019-09-16 14:20:40 +01:00
|
|
|
from app.letters.utils import get_letter_pdf_filename
|
2016-03-21 12:37:34 +00:00
|
|
|
from app.models import (
|
2020-02-05 13:03:54 +00:00
|
|
|
FactNotificationStatus,
|
2016-03-21 12:37:34 +00:00
|
|
|
Notification,
|
2016-07-08 16:19:34 +01:00
|
|
|
NotificationHistory,
|
2019-10-14 15:01:08 +01:00
|
|
|
ProviderDetails,
|
2017-06-02 12:21:12 +01:00
|
|
|
ScheduledNotification,
|
2019-11-13 16:39:59 +00:00
|
|
|
KEY_TYPE_NORMAL,
|
2017-10-05 16:29:11 +01:00
|
|
|
KEY_TYPE_TEST,
|
|
|
|
|
LETTER_TYPE,
|
2016-09-13 16:42:53 +01:00
|
|
|
NOTIFICATION_CREATED,
|
2017-02-13 14:27:32 +00:00
|
|
|
NOTIFICATION_DELIVERED,
|
2016-09-13 16:42:53 +01:00
|
|
|
NOTIFICATION_SENDING,
|
|
|
|
|
NOTIFICATION_PENDING,
|
2018-11-13 14:20:24 +00:00
|
|
|
NOTIFICATION_PENDING_VIRUS_CHECK,
|
2016-12-14 16:18:43 +00:00
|
|
|
NOTIFICATION_TECHNICAL_FAILURE,
|
2016-12-16 11:40:58 +00:00
|
|
|
NOTIFICATION_TEMPORARY_FAILURE,
|
2017-04-19 11:34:00 +01:00
|
|
|
NOTIFICATION_PERMANENT_FAILURE,
|
2018-03-07 18:13:40 +00:00
|
|
|
NOTIFICATION_SENT,
|
|
|
|
|
SMS_TYPE,
|
2018-08-06 13:51:54 +01:00
|
|
|
EMAIL_TYPE,
|
2018-12-20 16:09:38 +00:00
|
|
|
ServiceDataRetention,
|
2019-11-13 16:39:59 +00:00
|
|
|
Service,
|
2017-10-30 14:55:44 +00:00
|
|
|
)
|
2018-11-26 12:53:39 +00:00
|
|
|
from app.utils import get_london_midnight_in_utc
|
2018-12-10 16:27:59 +00:00
|
|
|
from app.utils import midnight_n_days_ago, escape_special_characters
|
2016-04-04 12:21:38 +01:00
|
|
|
|
2016-02-09 12:01:17 +00:00
|
|
|
|
2020-02-05 13:03:54 +00:00
|
|
|
@statsd(namespace="dao")
|
2020-02-05 16:43:17 +00:00
|
|
|
def dao_get_last_date_template_was_used(template_id, service_id):
|
2020-02-05 13:03:54 +00:00
|
|
|
last_date_from_notifications = db.session.query(
|
|
|
|
|
functions.max(Notification.created_at)
|
|
|
|
|
).filter(
|
|
|
|
|
Notification.service_id == service_id,
|
|
|
|
|
Notification.template_id == template_id,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST
|
|
|
|
|
).scalar()
|
|
|
|
|
|
2020-02-05 16:43:17 +00:00
|
|
|
if last_date_from_notifications:
|
2020-02-05 13:03:54 +00:00
|
|
|
return last_date_from_notifications
|
2020-02-05 16:43:17 +00:00
|
|
|
|
|
|
|
|
last_date = db.session.query(
|
|
|
|
|
functions.max(FactNotificationStatus.bst_date)
|
|
|
|
|
).filter(
|
|
|
|
|
FactNotificationStatus.template_id == template_id,
|
|
|
|
|
FactNotificationStatus.key_type != KEY_TYPE_TEST
|
|
|
|
|
).scalar()
|
|
|
|
|
|
|
|
|
|
return last_date
|
2020-02-05 13:03:54 +00:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-04-04 12:21:38 +01:00
|
|
|
@transactional
|
2016-08-25 11:55:38 +01:00
|
|
|
def dao_create_notification(notification):
|
2016-07-11 16:48:32 +01:00
|
|
|
if not notification.id:
|
|
|
|
|
# need to populate defaulted fields before we create the notification history object
|
2016-10-28 17:10:00 +01:00
|
|
|
notification.id = create_uuid()
|
2016-07-11 16:48:32 +01:00
|
|
|
if not notification.status:
|
2017-04-19 11:34:00 +01:00
|
|
|
notification.status = NOTIFICATION_CREATED
|
2016-07-11 16:48:32 +01:00
|
|
|
|
2016-04-04 12:21:38 +01:00
|
|
|
db.session.add(notification)
|
2016-12-19 13:57:06 +00:00
|
|
|
|
|
|
|
|
|
2016-05-27 12:09:36 +01:00
|
|
|
def _decide_permanent_temporary_failure(current_status, status):
|
2016-05-26 16:46:00 +01:00
|
|
|
# Firetext will send pending, then send either succes or fail.
|
|
|
|
|
# If we go from pending to delivered we need to set failure type as temporary-failure
|
2017-04-19 11:34:00 +01:00
|
|
|
if current_status == NOTIFICATION_PENDING and status == NOTIFICATION_PERMANENT_FAILURE:
|
|
|
|
|
status = NOTIFICATION_TEMPORARY_FAILURE
|
2016-05-26 16:46:00 +01:00
|
|
|
return status
|
|
|
|
|
|
2016-02-09 12:01:17 +00:00
|
|
|
|
2017-05-12 14:59:14 +01:00
|
|
|
def country_records_delivery(phone_prefix):
|
2018-01-29 11:41:46 +00:00
|
|
|
dlr = INTERNATIONAL_BILLING_RATES[phone_prefix]['attributes']['dlr']
|
|
|
|
|
return dlr and dlr.lower() == 'yes'
|
2017-05-12 14:59:14 +01:00
|
|
|
|
|
|
|
|
|
2016-08-25 11:55:38 +01:00
|
|
|
def _update_notification_status(notification, status):
|
2016-05-27 12:09:36 +01:00
|
|
|
status = _decide_permanent_temporary_failure(current_status=notification.status, status=status)
|
2016-07-11 16:48:32 +01:00
|
|
|
notification.status = status
|
|
|
|
|
dao_update_notification(notification)
|
2016-09-13 12:29:40 +01:00
|
|
|
return notification
|
2016-03-10 17:29:17 +00:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-05-27 12:09:36 +01:00
|
|
|
@transactional
|
2018-10-24 11:24:53 +01:00
|
|
|
def update_notification_status_by_id(notification_id, status, sent_by=None):
|
2018-12-20 16:01:39 +00:00
|
|
|
notification = Notification.query.with_for_update().filter(Notification.id == notification_id).first()
|
2016-05-27 12:09:36 +01:00
|
|
|
|
2017-05-12 14:59:14 +01:00
|
|
|
if not notification:
|
2019-01-29 15:55:31 +00:00
|
|
|
current_app.logger.info('notification not found for id {} (update to status {})'.format(
|
2018-12-20 16:01:39 +00:00
|
|
|
notification_id,
|
|
|
|
|
status
|
|
|
|
|
))
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
if notification.status not in {
|
|
|
|
|
NOTIFICATION_CREATED,
|
|
|
|
|
NOTIFICATION_SENDING,
|
|
|
|
|
NOTIFICATION_PENDING,
|
|
|
|
|
NOTIFICATION_SENT,
|
|
|
|
|
NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
}:
|
|
|
|
|
_duplicate_update_warning(notification, status)
|
2017-05-12 14:59:14 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
if notification.international and not country_records_delivery(notification.phone_prefix):
|
2016-09-13 12:29:40 +01:00
|
|
|
return None
|
2018-10-24 11:24:53 +01:00
|
|
|
if not notification.sent_by and sent_by:
|
|
|
|
|
notification.sent_by = sent_by
|
2016-05-31 10:17:15 +01:00
|
|
|
return _update_notification_status(
|
|
|
|
|
notification=notification,
|
2016-08-25 11:55:38 +01:00
|
|
|
status=status
|
2016-05-31 10:17:15 +01:00
|
|
|
)
|
2016-03-21 13:24:37 +00:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-05-27 12:09:36 +01:00
|
|
|
@transactional
|
2016-08-25 11:55:38 +01:00
|
|
|
def update_notification_status_by_reference(reference, status):
|
2018-12-20 16:01:39 +00:00
|
|
|
# this is used to update letters and emails
|
|
|
|
|
notification = Notification.query.filter(Notification.reference == reference).first()
|
|
|
|
|
|
|
|
|
|
if not notification:
|
|
|
|
|
current_app.logger.error('notification not found for reference {} (update to {})'.format(reference, status))
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
if notification.status not in {
|
|
|
|
|
NOTIFICATION_SENDING,
|
2018-12-28 14:29:59 +00:00
|
|
|
NOTIFICATION_PENDING
|
2018-12-20 16:01:39 +00:00
|
|
|
}:
|
|
|
|
|
_duplicate_update_warning(notification, status)
|
2016-09-13 12:29:40 +01:00
|
|
|
return None
|
2016-05-27 12:09:36 +01:00
|
|
|
|
2016-05-31 10:17:15 +01:00
|
|
|
return _update_notification_status(
|
|
|
|
|
notification=notification,
|
2016-08-25 11:55:38 +01:00
|
|
|
status=status
|
2016-05-31 10:17:15 +01:00
|
|
|
)
|
2016-03-11 09:40:35 +00:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2019-05-08 17:31:27 +01:00
|
|
|
@transactional
|
2016-05-26 16:46:00 +01:00
|
|
|
def dao_update_notification(notification):
|
|
|
|
|
notification.updated_at = datetime.utcnow()
|
|
|
|
|
db.session.add(notification)
|
|
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-02-16 11:22:44 +00:00
|
|
|
def get_notification_for_job(service_id, job_id, notification_id):
|
2016-02-09 14:17:42 +00:00
|
|
|
return Notification.query.filter_by(service_id=service_id, job_id=job_id, id=notification_id).one()
|
2016-02-09 12:01:17 +00:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-04-19 10:52:52 +01:00
|
|
|
def get_notifications_for_job(service_id, job_id, filter_dict=None, page=1, page_size=None):
|
|
|
|
|
if page_size is None:
|
|
|
|
|
page_size = current_app.config['PAGE_SIZE']
|
2016-03-21 12:37:34 +00:00
|
|
|
query = Notification.query.filter_by(service_id=service_id, job_id=job_id)
|
2016-05-24 11:31:44 +01:00
|
|
|
query = _filter_query(query, filter_dict)
|
2016-05-19 10:46:03 +01:00
|
|
|
return query.order_by(asc(Notification.job_row_number)).paginate(
|
2016-03-04 14:25:28 +00:00
|
|
|
page=page,
|
2016-04-19 10:52:52 +01:00
|
|
|
per_page=page_size
|
2016-03-04 14:25:28 +00:00
|
|
|
)
|
2016-02-16 11:22:44 +00:00
|
|
|
|
|
|
|
|
|
2019-09-24 16:52:18 +01:00
|
|
|
@statsd(namespace="dao")
|
2019-10-03 14:58:49 +01:00
|
|
|
def dao_get_notification_count_for_job_id(*, job_id):
|
|
|
|
|
return Notification.query.filter_by(job_id=job_id).count()
|
2019-09-24 16:52:18 +01:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-08-09 13:07:48 +01:00
|
|
|
def get_notification_with_personalisation(service_id, notification_id, key_type):
|
2016-06-30 18:43:15 +01:00
|
|
|
filter_dict = {'service_id': service_id, 'id': notification_id}
|
|
|
|
|
if key_type:
|
|
|
|
|
filter_dict['key_type'] = key_type
|
|
|
|
|
|
2017-11-09 14:25:47 +00:00
|
|
|
return Notification.query.filter_by(**filter_dict).options(joinedload('template')).one()
|
2016-03-01 13:30:10 +00:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2018-11-15 10:55:29 +00:00
|
|
|
def get_notification_by_id(notification_id, service_id=None, _raise=False):
|
|
|
|
|
filters = [Notification.id == notification_id]
|
|
|
|
|
|
|
|
|
|
if service_id:
|
|
|
|
|
filters.append(Notification.service_id == service_id)
|
|
|
|
|
|
|
|
|
|
query = Notification.query.filter(*filters)
|
|
|
|
|
|
|
|
|
|
return query.one() if _raise else query.first()
|
2016-03-10 15:40:41 +00:00
|
|
|
|
|
|
|
|
|
2016-06-08 15:25:57 +01:00
|
|
|
def get_notifications(filter_dict=None):
|
|
|
|
|
return _filter_query(Notification.query, filter_dict=filter_dict)
|
|
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-09-23 09:43:25 +01:00
|
|
|
def get_notifications_for_service(
|
2018-08-08 16:20:25 +01:00
|
|
|
service_id,
|
|
|
|
|
filter_dict=None,
|
|
|
|
|
page=1,
|
|
|
|
|
page_size=None,
|
2019-01-07 17:12:00 +00:00
|
|
|
count_pages=True,
|
2018-08-08 16:20:25 +01:00
|
|
|
limit_days=None,
|
|
|
|
|
key_type=None,
|
|
|
|
|
personalisation=False,
|
|
|
|
|
include_jobs=False,
|
|
|
|
|
include_from_test_key=False,
|
|
|
|
|
older_than=None,
|
|
|
|
|
client_reference=None,
|
|
|
|
|
include_one_off=True
|
2016-09-23 09:43:25 +01:00
|
|
|
):
|
2016-04-19 10:52:52 +01:00
|
|
|
if page_size is None:
|
|
|
|
|
page_size = current_app.config['PAGE_SIZE']
|
2016-09-15 15:59:02 +01:00
|
|
|
|
2016-04-28 16:10:35 +01:00
|
|
|
filters = [Notification.service_id == service_id]
|
|
|
|
|
|
|
|
|
|
if limit_days is not None:
|
2018-04-30 11:50:56 +01:00
|
|
|
filters.append(Notification.created_at >= midnight_n_days_ago(limit_days))
|
2016-04-28 16:10:35 +01:00
|
|
|
|
2016-11-23 11:44:38 +00:00
|
|
|
if older_than is not None:
|
|
|
|
|
older_than_created_at = db.session.query(
|
|
|
|
|
Notification.created_at).filter(Notification.id == older_than).as_scalar()
|
|
|
|
|
filters.append(Notification.created_at < older_than_created_at)
|
|
|
|
|
|
2018-07-18 10:54:20 +01:00
|
|
|
if not include_jobs:
|
|
|
|
|
filters.append(Notification.job_id == None) # noqa
|
|
|
|
|
|
|
|
|
|
if not include_one_off:
|
|
|
|
|
filters.append(Notification.created_by_id == None) # noqa
|
2016-09-15 15:59:02 +01:00
|
|
|
|
2016-06-30 18:43:15 +01:00
|
|
|
if key_type is not None:
|
|
|
|
|
filters.append(Notification.key_type == key_type)
|
2016-09-23 10:27:10 +01:00
|
|
|
elif not include_from_test_key:
|
2016-09-16 13:47:09 +01:00
|
|
|
filters.append(Notification.key_type != KEY_TYPE_TEST)
|
|
|
|
|
|
2016-12-12 18:04:20 +00:00
|
|
|
if client_reference is not None:
|
|
|
|
|
filters.append(Notification.client_reference == client_reference)
|
|
|
|
|
|
2016-04-28 16:10:35 +01:00
|
|
|
query = Notification.query.filter(*filters)
|
2016-05-24 11:31:44 +01:00
|
|
|
query = _filter_query(query, filter_dict)
|
2016-08-09 13:07:48 +01:00
|
|
|
if personalisation:
|
2016-08-09 16:53:09 +01:00
|
|
|
query = query.options(
|
2017-11-09 14:25:47 +00:00
|
|
|
joinedload('template')
|
2016-08-09 13:07:48 +01:00
|
|
|
)
|
2016-09-15 15:59:02 +01:00
|
|
|
|
2016-05-06 11:07:11 +01:00
|
|
|
return query.order_by(desc(Notification.created_at)).paginate(
|
2016-03-01 13:30:10 +00:00
|
|
|
page=page,
|
2019-01-07 17:12:00 +00:00
|
|
|
per_page=page_size,
|
|
|
|
|
count=count_pages
|
2016-03-01 13:30:10 +00:00
|
|
|
)
|
2016-03-21 12:37:34 +00:00
|
|
|
|
|
|
|
|
|
2016-05-24 11:31:44 +01:00
|
|
|
def _filter_query(query, filter_dict=None):
|
2016-04-04 13:13:29 +01:00
|
|
|
if filter_dict is None:
|
2016-11-22 17:30:03 +00:00
|
|
|
return query
|
|
|
|
|
|
|
|
|
|
multidict = MultiDict(filter_dict)
|
|
|
|
|
|
|
|
|
|
# filter by status
|
|
|
|
|
statuses = multidict.getlist('status')
|
2016-04-04 13:13:29 +01:00
|
|
|
if statuses:
|
2016-11-25 14:55:45 +00:00
|
|
|
statuses = Notification.substitute_status(statuses)
|
2016-04-04 13:13:29 +01:00
|
|
|
query = query.filter(Notification.status.in_(statuses))
|
2016-11-22 17:30:03 +00:00
|
|
|
|
|
|
|
|
# filter by template
|
|
|
|
|
template_types = multidict.getlist('template_type')
|
2016-04-04 13:13:29 +01:00
|
|
|
if template_types:
|
2018-12-11 14:57:10 +00:00
|
|
|
query = query.filter(Notification.notification_type.in_(template_types))
|
2016-11-22 17:30:03 +00:00
|
|
|
|
2016-03-01 13:30:10 +00:00
|
|
|
return query
|
2016-03-09 17:46:01 +00:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2020-03-24 17:04:38 +00:00
|
|
|
def delete_notifications_older_than_retention_by_type(notification_type, qry_limit=50000):
|
2018-12-20 16:09:38 +00:00
|
|
|
current_app.logger.info(
|
|
|
|
|
'Deleting {} notifications for services with flexible data retention'.format(notification_type))
|
2018-12-20 12:31:00 +00:00
|
|
|
|
2018-08-06 13:51:54 +01:00
|
|
|
flexible_data_retention = ServiceDataRetention.query.filter(
|
|
|
|
|
ServiceDataRetention.notification_type == notification_type
|
|
|
|
|
).all()
|
|
|
|
|
deleted = 0
|
|
|
|
|
for f in flexible_data_retention:
|
2018-12-21 14:09:29 +00:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"Deleting {} notifications for service id: {}".format(notification_type, f.service_id))
|
2020-03-20 19:07:08 +00:00
|
|
|
|
|
|
|
|
day_to_delete_backwards_from = get_london_midnight_in_utc(
|
|
|
|
|
convert_utc_to_bst(datetime.utcnow()).date()) - timedelta(days=f.days_of_retention)
|
2020-03-24 14:44:42 +00:00
|
|
|
|
|
|
|
|
deleted += _move_notifications_to_notification_history(
|
|
|
|
|
notification_type, f.service_id, day_to_delete_backwards_from, qry_limit)
|
2018-12-20 12:31:00 +00:00
|
|
|
|
2018-12-20 16:09:38 +00:00
|
|
|
current_app.logger.info(
|
|
|
|
|
'Deleting {} notifications for services without flexible data retention'.format(notification_type))
|
2018-08-08 16:20:25 +01:00
|
|
|
|
2018-12-21 13:57:35 +00:00
|
|
|
seven_days_ago = get_london_midnight_in_utc(convert_utc_to_bst(datetime.utcnow()).date()) - timedelta(days=7)
|
2018-08-06 13:51:54 +01:00
|
|
|
services_with_data_retention = [x.service_id for x in flexible_data_retention]
|
2018-12-20 16:09:38 +00:00
|
|
|
service_ids_to_purge = db.session.query(Service.id).filter(Service.id.notin_(services_with_data_retention)).all()
|
|
|
|
|
|
|
|
|
|
for service_id in service_ids_to_purge:
|
2020-03-20 19:07:08 +00:00
|
|
|
deleted += _move_notifications_to_notification_history(
|
|
|
|
|
notification_type, service_id, seven_days_ago, qry_limit)
|
2018-12-20 12:31:00 +00:00
|
|
|
|
|
|
|
|
current_app.logger.info('Finished deleting {} notifications'.format(notification_type))
|
|
|
|
|
|
2017-10-09 14:51:27 +01:00
|
|
|
return deleted
|
|
|
|
|
|
2016-09-08 16:00:18 +01:00
|
|
|
|
2020-03-23 15:53:53 +00:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
@transactional
|
|
|
|
|
def insert_notification_history_delete_notifications(
|
2020-03-24 12:21:28 +00:00
|
|
|
notification_type, service_id, timestamp_to_delete_backwards_from, qry_limit=50000
|
2020-03-23 15:53:53 +00:00
|
|
|
):
|
2020-03-24 12:21:28 +00:00
|
|
|
# Setting default query limit to 50,000 which take about 48 seconds on current table size
|
|
|
|
|
# 10, 000 took 11s and 100,000 took 1 min 30 seconds.
|
2020-03-23 15:53:53 +00:00
|
|
|
drop_table_if_exists = """
|
|
|
|
|
DROP TABLE if exists NOTIFICATION_ARCHIVE
|
|
|
|
|
"""
|
|
|
|
|
select_into_temp_table = """
|
|
|
|
|
CREATE TEMP TABLE NOTIFICATION_ARCHIVE AS
|
2020-03-24 12:21:28 +00:00
|
|
|
SELECT id, job_id, job_row_number, service_id, template_id, template_version, api_key_id,
|
|
|
|
|
key_type, notification_type, created_at, sent_at, sent_by, updated_at, reference, billable_units,
|
|
|
|
|
client_reference, international, phone_prefix, rate_multiplier, notification_status,
|
|
|
|
|
created_by_id, postage, document_download_count
|
2020-03-23 15:53:53 +00:00
|
|
|
FROM notifications
|
|
|
|
|
WHERE service_id = :service_id
|
|
|
|
|
AND notification_type = :notification_type
|
2020-03-24 14:09:13 +00:00
|
|
|
AND created_at < :timestamp_to_delete_backwards_from
|
2020-03-23 15:53:53 +00:00
|
|
|
AND key_type = 'normal'
|
|
|
|
|
AND notification_status in ('delivered', 'permanent-failure', 'temporary-failure')
|
|
|
|
|
limit :qry_limit
|
|
|
|
|
"""
|
2020-03-24 12:21:28 +00:00
|
|
|
# Insert into NotificationHistory if the row already exists do nothing.
|
2020-03-23 15:53:53 +00:00
|
|
|
insert_query = """
|
|
|
|
|
insert into notification_history
|
2020-03-24 12:21:28 +00:00
|
|
|
SELECT * from NOTIFICATION_ARCHIVE
|
|
|
|
|
ON CONFLICT ON CONSTRAINT notification_history_pkey
|
|
|
|
|
DO NOTHING
|
2020-03-23 15:53:53 +00:00
|
|
|
"""
|
|
|
|
|
delete_query = """
|
|
|
|
|
DELETE FROM notifications
|
|
|
|
|
where id in (select id from NOTIFICATION_ARCHIVE)
|
|
|
|
|
"""
|
|
|
|
|
input_params = {
|
|
|
|
|
"service_id": service_id,
|
|
|
|
|
"notification_type": notification_type,
|
2020-03-24 12:21:28 +00:00
|
|
|
"timestamp_to_delete_backwards_from": timestamp_to_delete_backwards_from,
|
2020-03-23 15:53:53 +00:00
|
|
|
"qry_limit": qry_limit
|
|
|
|
|
}
|
2020-03-24 12:21:28 +00:00
|
|
|
current_app.logger.info(
|
|
|
|
|
f"Start executing insert_notification_history_delete_notifications for input params {input_params}"
|
|
|
|
|
)
|
2020-03-23 15:53:53 +00:00
|
|
|
db.session.execute(drop_table_if_exists)
|
|
|
|
|
current_app.logger.info('Start executing select into temp table')
|
|
|
|
|
db.session.execute(select_into_temp_table, input_params)
|
|
|
|
|
|
|
|
|
|
result = db.session.execute("select * from NOTIFICATION_ARCHIVE")
|
|
|
|
|
|
|
|
|
|
current_app.logger.info('Start executing insert into history')
|
|
|
|
|
db.session.execute(insert_query)
|
|
|
|
|
|
2020-03-24 12:21:28 +00:00
|
|
|
current_app.logger.info('Start executing deleting notifications')
|
2020-03-23 15:53:53 +00:00
|
|
|
db.session.execute(delete_query)
|
|
|
|
|
|
|
|
|
|
db.session.execute("DROP TABLE NOTIFICATION_ARCHIVE")
|
|
|
|
|
return result.rowcount
|
|
|
|
|
|
|
|
|
|
|
2020-03-20 19:07:08 +00:00
|
|
|
def _move_notifications_to_notification_history(notification_type, service_id, day_to_delete_backwards_from, qry_limit):
|
|
|
|
|
deleted = 0
|
|
|
|
|
if notification_type == LETTER_TYPE:
|
|
|
|
|
_delete_letters_from_s3(
|
|
|
|
|
notification_type, service_id, day_to_delete_backwards_from, qry_limit
|
|
|
|
|
)
|
2020-03-24 14:44:42 +00:00
|
|
|
delete_count_per_call = 1
|
|
|
|
|
while delete_count_per_call > 0:
|
|
|
|
|
delete_count_per_call = insert_notification_history_delete_notifications(
|
2020-03-24 12:21:28 +00:00
|
|
|
notification_type=notification_type,
|
|
|
|
|
service_id=service_id,
|
2020-03-24 14:44:42 +00:00
|
|
|
timestamp_to_delete_backwards_from=day_to_delete_backwards_from,
|
2020-03-24 12:21:28 +00:00
|
|
|
qry_limit=qry_limit
|
2020-03-20 19:07:08 +00:00
|
|
|
)
|
2020-03-24 14:44:42 +00:00
|
|
|
deleted += delete_count_per_call
|
2020-03-20 19:07:08 +00:00
|
|
|
|
2020-03-24 14:09:13 +00:00
|
|
|
# Deleting test Notifications, test notifications are not persisted to NotificationHistory
|
2020-03-24 12:21:28 +00:00
|
|
|
Notification.query.filter(
|
2018-12-27 14:00:53 +00:00
|
|
|
Notification.notification_type == notification_type,
|
|
|
|
|
Notification.service_id == service_id,
|
2020-03-24 12:21:28 +00:00
|
|
|
Notification.created_at < day_to_delete_backwards_from,
|
2019-06-03 15:16:46 +01:00
|
|
|
Notification.key_type == KEY_TYPE_TEST
|
2020-03-24 12:21:28 +00:00
|
|
|
).delete(synchronize_session=False)
|
2018-12-21 13:57:35 +00:00
|
|
|
db.session.commit()
|
2019-10-14 16:43:37 +01:00
|
|
|
|
2020-03-24 12:21:28 +00:00
|
|
|
return deleted
|
2019-04-29 15:44:42 +01:00
|
|
|
|
|
|
|
|
|
2018-12-27 14:00:53 +00:00
|
|
|
def _delete_letters_from_s3(
|
|
|
|
|
notification_type, service_id, date_to_delete_from, query_limit
|
|
|
|
|
):
|
|
|
|
|
letters_to_delete_from_s3 = db.session.query(
|
|
|
|
|
Notification
|
|
|
|
|
).filter(
|
|
|
|
|
Notification.notification_type == notification_type,
|
|
|
|
|
Notification.created_at < date_to_delete_from,
|
|
|
|
|
Notification.service_id == service_id
|
|
|
|
|
).limit(query_limit).all()
|
2018-08-10 13:11:23 +01:00
|
|
|
for letter in letters_to_delete_from_s3:
|
|
|
|
|
bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME']
|
2019-09-16 14:20:40 +01:00
|
|
|
# I don't think we need this anymore, we should update the query to get letters sent 7 days ago
|
2018-08-10 16:22:25 +01:00
|
|
|
if letter.sent_at:
|
2019-09-16 14:20:40 +01:00
|
|
|
prefix = get_letter_pdf_filename(reference=letter.reference,
|
|
|
|
|
crown=letter.service.crown,
|
|
|
|
|
sending_date=letter.created_at,
|
2019-09-16 15:10:10 +01:00
|
|
|
dont_use_sending_date=letter.key_type == KEY_TYPE_TEST,
|
2019-09-16 14:20:40 +01:00
|
|
|
postage=letter.postage)
|
2018-08-13 11:44:44 +01:00
|
|
|
s3_objects = get_s3_bucket_objects(bucket_name=bucket_name, subfolder=prefix)
|
2018-08-10 16:22:25 +01:00
|
|
|
for s3_object in s3_objects:
|
2018-08-13 14:09:51 +01:00
|
|
|
try:
|
|
|
|
|
remove_s3_object(bucket_name, s3_object['Key'])
|
2020-02-12 15:28:46 +00:00
|
|
|
except ClientError:
|
2018-08-13 14:09:51 +01:00
|
|
|
current_app.logger.exception(
|
|
|
|
|
"Could not delete S3 object with filename: {}".format(s3_object['Key']))
|
2018-08-08 16:20:25 +01:00
|
|
|
|
|
|
|
|
|
2016-09-08 16:00:18 +01:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
@transactional
|
2019-05-30 10:37:57 +01:00
|
|
|
def dao_delete_notifications_by_id(notification_id):
|
2016-09-08 16:00:18 +01:00
|
|
|
db.session.query(Notification).filter(
|
|
|
|
|
Notification.id == notification_id
|
|
|
|
|
).delete(synchronize_session='fetch')
|
2016-09-13 16:42:53 +01:00
|
|
|
|
|
|
|
|
|
2017-04-19 11:34:00 +01:00
|
|
|
def _timeout_notifications(current_statuses, new_status, timeout_start, updated_at):
|
2017-12-07 16:37:36 +00:00
|
|
|
notifications = Notification.query.filter(
|
|
|
|
|
Notification.created_at < timeout_start,
|
|
|
|
|
Notification.status.in_(current_statuses),
|
|
|
|
|
Notification.notification_type != LETTER_TYPE
|
|
|
|
|
).all()
|
2019-05-30 10:37:57 +01:00
|
|
|
Notification.query.filter(
|
|
|
|
|
Notification.created_at < timeout_start,
|
|
|
|
|
Notification.status.in_(current_statuses),
|
|
|
|
|
Notification.notification_type != LETTER_TYPE
|
|
|
|
|
).update(
|
|
|
|
|
{'status': new_status, 'updated_at': updated_at},
|
|
|
|
|
synchronize_session=False
|
|
|
|
|
)
|
2017-12-07 16:37:36 +00:00
|
|
|
return notifications
|
2017-04-19 11:34:00 +01:00
|
|
|
|
|
|
|
|
|
2016-09-13 16:42:53 +01:00
|
|
|
def dao_timeout_notifications(timeout_period_in_seconds):
|
2017-04-19 11:34:00 +01:00
|
|
|
"""
|
|
|
|
|
Timeout SMS and email notifications by the following rules:
|
2016-12-16 11:40:58 +00:00
|
|
|
|
2017-04-19 11:34:00 +01:00
|
|
|
we never sent the notification to the provider for some reason
|
|
|
|
|
created -> technical-failure
|
|
|
|
|
|
|
|
|
|
the notification was sent to the provider but there was not a delivery receipt
|
|
|
|
|
sending -> temporary-failure
|
|
|
|
|
pending -> temporary-failure
|
|
|
|
|
|
|
|
|
|
Letter notifications are not timed out
|
|
|
|
|
"""
|
|
|
|
|
timeout_start = datetime.utcnow() - timedelta(seconds=timeout_period_in_seconds)
|
|
|
|
|
updated_at = datetime.utcnow()
|
|
|
|
|
timeout = functools.partial(_timeout_notifications, timeout_start=timeout_start, updated_at=updated_at)
|
2017-07-06 11:55:56 +01:00
|
|
|
|
2016-12-16 11:40:58 +00:00
|
|
|
# Notifications still in created status are marked with a technical-failure:
|
2018-03-16 17:18:44 +00:00
|
|
|
technical_failure_notifications = timeout([NOTIFICATION_CREATED], NOTIFICATION_TECHNICAL_FAILURE)
|
2016-12-16 11:40:58 +00:00
|
|
|
|
|
|
|
|
# Notifications still in sending or pending status are marked with a temporary-failure:
|
2018-03-16 17:18:44 +00:00
|
|
|
temporary_failure_notifications = timeout([NOTIFICATION_SENDING, NOTIFICATION_PENDING],
|
|
|
|
|
NOTIFICATION_TEMPORARY_FAILURE)
|
2017-04-19 11:34:00 +01:00
|
|
|
|
2016-09-13 16:42:53 +01:00
|
|
|
db.session.commit()
|
2016-12-16 11:40:58 +00:00
|
|
|
|
2018-03-16 17:18:44 +00:00
|
|
|
return technical_failure_notifications, temporary_failure_notifications
|
2016-09-30 17:17:28 +01:00
|
|
|
|
|
|
|
|
|
2019-10-14 15:01:08 +01:00
|
|
|
def is_delivery_slow_for_providers(
|
2018-12-04 17:39:43 +00:00
|
|
|
created_at,
|
2018-08-08 16:20:25 +01:00
|
|
|
threshold,
|
|
|
|
|
delivery_time,
|
2017-02-13 14:27:32 +00:00
|
|
|
):
|
2019-10-14 15:01:08 +01:00
|
|
|
"""
|
|
|
|
|
Returns a dict of providers and whether they are currently slow or not. eg:
|
|
|
|
|
{
|
|
|
|
|
'mmg': True,
|
|
|
|
|
'firetext': False
|
|
|
|
|
}
|
|
|
|
|
"""
|
|
|
|
|
slow_notification_counts = db.session.query(
|
|
|
|
|
ProviderDetails.identifier,
|
2018-12-04 17:39:43 +00:00
|
|
|
case(
|
|
|
|
|
[(
|
|
|
|
|
Notification.status == NOTIFICATION_DELIVERED,
|
|
|
|
|
(Notification.updated_at - Notification.sent_at) >= delivery_time
|
|
|
|
|
)],
|
|
|
|
|
else_=(datetime.utcnow() - Notification.sent_at) >= delivery_time
|
2019-10-14 15:01:08 +01:00
|
|
|
).label("slow"),
|
|
|
|
|
func.count().label('count')
|
|
|
|
|
).select_from(
|
|
|
|
|
ProviderDetails
|
|
|
|
|
).outerjoin(
|
|
|
|
|
Notification, and_(
|
|
|
|
|
Notification.sent_by == ProviderDetails.identifier,
|
|
|
|
|
Notification.created_at >= created_at,
|
|
|
|
|
Notification.sent_at.isnot(None),
|
|
|
|
|
Notification.status.in_([NOTIFICATION_DELIVERED, NOTIFICATION_PENDING, NOTIFICATION_SENDING]),
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST
|
|
|
|
|
)
|
2018-12-04 17:39:43 +00:00
|
|
|
).filter(
|
2019-10-14 15:01:08 +01:00
|
|
|
ProviderDetails.notification_type == 'sms',
|
|
|
|
|
ProviderDetails.active
|
|
|
|
|
).order_by(
|
|
|
|
|
ProviderDetails.identifier
|
|
|
|
|
).group_by(
|
|
|
|
|
ProviderDetails.identifier,
|
|
|
|
|
"slow"
|
|
|
|
|
)
|
2018-12-04 17:39:43 +00:00
|
|
|
|
2019-10-14 15:01:08 +01:00
|
|
|
slow_providers = {}
|
|
|
|
|
for provider, rows in groupby(slow_notification_counts, key=attrgetter('identifier')):
|
|
|
|
|
rows = list(rows)
|
|
|
|
|
total_notifications = sum(row.count for row in rows)
|
|
|
|
|
slow_notifications = sum(row.count for row in rows if row.slow)
|
|
|
|
|
|
|
|
|
|
slow_providers[provider] = (slow_notifications / total_notifications >= threshold)
|
2018-12-11 15:28:38 +00:00
|
|
|
|
2019-02-25 14:29:39 +00:00
|
|
|
current_app.logger.info("Slow delivery notifications count for provider {}: {} out of {}. Ratio {}".format(
|
|
|
|
|
provider, slow_notifications, total_notifications, slow_notifications / total_notifications
|
2018-12-11 15:28:38 +00:00
|
|
|
))
|
2019-10-14 15:01:08 +01:00
|
|
|
|
|
|
|
|
return slow_providers
|
2017-04-07 10:59:12 +01:00
|
|
|
|
|
|
|
|
|
2017-09-15 17:46:08 +01:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
@transactional
|
2017-09-20 11:12:37 +01:00
|
|
|
def dao_update_notifications_by_reference(references, update_dict):
|
2017-09-26 09:56:09 +01:00
|
|
|
updated_count = Notification.query.filter(
|
2017-09-20 11:12:37 +01:00
|
|
|
Notification.reference.in_(references)
|
|
|
|
|
).update(
|
2017-09-26 09:56:09 +01:00
|
|
|
update_dict,
|
|
|
|
|
synchronize_session=False
|
2017-09-20 11:12:37 +01:00
|
|
|
)
|
2017-04-07 11:22:03 +01:00
|
|
|
|
2019-05-08 17:31:27 +01:00
|
|
|
updated_history_count = 0
|
2019-05-15 15:30:15 +01:00
|
|
|
if updated_count != len(references):
|
2019-05-08 17:31:27 +01:00
|
|
|
updated_history_count = NotificationHistory.query.filter(
|
|
|
|
|
NotificationHistory.reference.in_(references)
|
|
|
|
|
).update(
|
|
|
|
|
update_dict,
|
|
|
|
|
synchronize_session=False
|
|
|
|
|
)
|
2017-04-07 10:59:12 +01:00
|
|
|
|
2018-08-30 14:27:57 +01:00
|
|
|
return updated_count, updated_history_count
|
2017-05-05 14:12:50 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@statsd(namespace="dao")
|
2019-12-12 16:01:22 +00:00
|
|
|
def dao_get_notifications_by_recipient_or_reference(service_id, search_term, notification_type=None, statuses=None):
|
2018-03-07 18:13:40 +00:00
|
|
|
|
|
|
|
|
if notification_type == SMS_TYPE:
|
|
|
|
|
normalised = try_validate_and_format_phone_number(search_term)
|
|
|
|
|
|
|
|
|
|
for character in {'(', ')', ' ', '-'}:
|
|
|
|
|
normalised = normalised.replace(character, '')
|
|
|
|
|
|
|
|
|
|
normalised = normalised.lstrip('+0')
|
|
|
|
|
|
|
|
|
|
elif notification_type == EMAIL_TYPE:
|
2017-05-30 14:40:27 +01:00
|
|
|
try:
|
|
|
|
|
normalised = validate_and_format_email_address(search_term)
|
|
|
|
|
except InvalidEmailError:
|
2018-03-07 18:13:40 +00:00
|
|
|
normalised = search_term.lower()
|
2018-06-13 16:04:49 +01:00
|
|
|
|
2019-12-16 10:27:55 +00:00
|
|
|
elif notification_type == LETTER_TYPE:
|
2018-03-07 18:13:40 +00:00
|
|
|
raise InvalidRequest("Only email and SMS can use search by recipient", 400)
|
2018-03-06 12:39:58 +00:00
|
|
|
|
2019-12-16 10:27:55 +00:00
|
|
|
else:
|
|
|
|
|
normalised = search_term.lower()
|
|
|
|
|
|
2018-07-13 15:26:42 +01:00
|
|
|
normalised = escape_special_characters(normalised)
|
2019-12-12 16:01:22 +00:00
|
|
|
search_term = escape_special_characters(search_term)
|
2018-03-14 10:34:45 +00:00
|
|
|
|
2017-05-24 14:24:57 +01:00
|
|
|
filters = [
|
2017-05-05 14:12:50 +01:00
|
|
|
Notification.service_id == service_id,
|
2019-12-12 16:01:22 +00:00
|
|
|
or_(
|
|
|
|
|
Notification.normalised_to.like("%{}%".format(normalised)),
|
|
|
|
|
Notification.client_reference.ilike("%{}%".format(search_term)),
|
|
|
|
|
),
|
2017-08-21 15:35:55 +01:00
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
2017-05-24 14:24:57 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
if statuses:
|
|
|
|
|
filters.append(Notification.status.in_(statuses))
|
2018-03-07 17:11:29 +00:00
|
|
|
if notification_type:
|
|
|
|
|
filters.append(Notification.notification_type == notification_type)
|
2017-05-24 14:24:57 +01:00
|
|
|
|
2017-06-05 15:53:57 +01:00
|
|
|
results = db.session.query(Notification).filter(*filters).order_by(desc(Notification.created_at)).all()
|
2017-05-24 14:24:57 +01:00
|
|
|
return results
|
2017-05-15 17:27:38 +01:00
|
|
|
|
|
|
|
|
|
2018-01-17 09:52:13 +00:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
def dao_get_notification_by_reference(reference):
|
|
|
|
|
return Notification.query.filter(
|
|
|
|
|
Notification.reference == reference
|
|
|
|
|
).one()
|
|
|
|
|
|
|
|
|
|
|
2018-03-01 15:39:51 +00:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
def dao_get_notification_history_by_reference(reference):
|
2019-05-21 16:08:18 +01:00
|
|
|
try:
|
|
|
|
|
# This try except is necessary because in test keys and research mode does not create notification history.
|
|
|
|
|
# Otherwise we could just search for the NotificationHistory object
|
|
|
|
|
return Notification.query.filter(
|
|
|
|
|
Notification.reference == reference
|
|
|
|
|
).one()
|
|
|
|
|
except NoResultFound:
|
|
|
|
|
return NotificationHistory.query.filter(
|
|
|
|
|
NotificationHistory.reference == reference
|
|
|
|
|
).one()
|
2018-03-01 15:39:51 +00:00
|
|
|
|
|
|
|
|
|
2017-12-01 09:31:03 +00:00
|
|
|
@statsd(namespace="dao")
|
2017-12-04 14:48:23 +00:00
|
|
|
def dao_get_notifications_by_references(references):
|
2017-12-01 09:31:03 +00:00
|
|
|
return Notification.query.filter(
|
|
|
|
|
Notification.reference.in_(references)
|
|
|
|
|
).all()
|
|
|
|
|
|
|
|
|
|
|
2017-05-15 17:27:38 +01:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
def dao_created_scheduled_notification(scheduled_notification):
|
|
|
|
|
db.session.add(scheduled_notification)
|
|
|
|
|
db.session.commit()
|
2017-05-16 10:48:04 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
def dao_get_scheduled_notifications():
|
2017-05-16 13:47:22 +01:00
|
|
|
notifications = Notification.query.join(
|
|
|
|
|
ScheduledNotification
|
|
|
|
|
).filter(
|
|
|
|
|
ScheduledNotification.scheduled_for < datetime.utcnow(),
|
2017-05-22 15:07:16 +01:00
|
|
|
ScheduledNotification.pending).all()
|
2017-05-16 13:47:22 +01:00
|
|
|
|
|
|
|
|
return notifications
|
2017-05-22 15:07:16 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_scheduled_notification_to_processed(notification_id):
|
2017-05-22 16:30:45 +01:00
|
|
|
db.session.query(ScheduledNotification).filter(
|
2017-05-22 15:07:16 +01:00
|
|
|
ScheduledNotification.notification_id == notification_id
|
|
|
|
|
).update(
|
|
|
|
|
{'pending': False}
|
|
|
|
|
)
|
2017-05-22 16:30:45 +01:00
|
|
|
db.session.commit()
|
2017-08-29 16:35:30 +01:00
|
|
|
|
|
|
|
|
|
2017-08-31 12:44:06 +01:00
|
|
|
def dao_get_total_notifications_sent_per_day_for_performance_platform(start_date, end_date):
|
2017-08-29 16:35:30 +01:00
|
|
|
"""
|
|
|
|
|
SELECT
|
2017-08-31 12:44:06 +01:00
|
|
|
count(notification_history),
|
2017-08-30 16:02:30 +01:00
|
|
|
coalesce(sum(CASE WHEN sent_at - created_at <= interval '10 seconds' THEN 1 ELSE 0 END), 0)
|
2017-08-31 12:44:06 +01:00
|
|
|
FROM notification_history
|
2017-08-29 16:35:30 +01:00
|
|
|
WHERE
|
|
|
|
|
created_at > 'START DATE' AND
|
|
|
|
|
created_at < 'END DATE' AND
|
|
|
|
|
api_key_id IS NOT NULL AND
|
|
|
|
|
key_type != 'test' AND
|
|
|
|
|
notification_type != 'letter';
|
|
|
|
|
"""
|
2019-04-10 10:06:27 +01:00
|
|
|
under_10_secs = Notification.sent_at - Notification.created_at <= timedelta(seconds=10)
|
2017-08-30 16:02:30 +01:00
|
|
|
sum_column = functions.coalesce(functions.sum(
|
2017-08-29 16:35:30 +01:00
|
|
|
case(
|
|
|
|
|
[
|
|
|
|
|
(under_10_secs, 1)
|
|
|
|
|
],
|
|
|
|
|
else_=0
|
|
|
|
|
)
|
2017-08-30 16:02:30 +01:00
|
|
|
), 0)
|
2017-08-31 12:44:06 +01:00
|
|
|
|
2017-08-29 16:35:30 +01:00
|
|
|
return db.session.query(
|
2019-04-10 10:06:27 +01:00
|
|
|
func.count(Notification.id).label('messages_total'),
|
2017-08-29 16:35:30 +01:00
|
|
|
sum_column.label('messages_within_10_secs')
|
|
|
|
|
).filter(
|
2019-04-10 10:06:27 +01:00
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
Notification.created_at < end_date,
|
|
|
|
|
Notification.api_key_id.isnot(None),
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
Notification.notification_type != LETTER_TYPE
|
2017-08-29 16:35:30 +01:00
|
|
|
).one()
|
2017-09-15 17:46:08 +01:00
|
|
|
|
|
|
|
|
|
2017-10-17 11:07:36 +01:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
def dao_get_last_notification_added_for_job_id(job_id):
|
|
|
|
|
last_notification_added = Notification.query.filter(
|
|
|
|
|
Notification.job_id == job_id
|
|
|
|
|
).order_by(
|
|
|
|
|
Notification.job_row_number.desc()
|
|
|
|
|
).first()
|
|
|
|
|
|
|
|
|
|
return last_notification_added
|
2017-12-18 16:12:17 +00:00
|
|
|
|
|
|
|
|
|
2018-03-23 15:38:35 +00:00
|
|
|
def notifications_not_yet_sent(should_be_sending_after_seconds, notification_type):
|
|
|
|
|
older_than_date = datetime.utcnow() - timedelta(seconds=should_be_sending_after_seconds)
|
|
|
|
|
|
|
|
|
|
notifications = Notification.query.filter(
|
|
|
|
|
Notification.created_at <= older_than_date,
|
|
|
|
|
Notification.notification_type == notification_type,
|
|
|
|
|
Notification.status == NOTIFICATION_CREATED
|
|
|
|
|
).all()
|
|
|
|
|
return notifications
|
2018-06-13 16:04:49 +01:00
|
|
|
|
|
|
|
|
|
2020-02-19 14:23:33 +00:00
|
|
|
def dao_get_letters_to_be_printed(print_run_deadline):
|
2020-02-17 15:59:53 +00:00
|
|
|
"""
|
2020-02-19 14:23:33 +00:00
|
|
|
Return all letters created before the print run deadline that have not yet been sent
|
2020-02-17 15:59:53 +00:00
|
|
|
"""
|
|
|
|
|
notifications = Notification.query.filter(
|
2020-02-19 14:23:33 +00:00
|
|
|
Notification.created_at < convert_bst_to_utc(print_run_deadline),
|
2020-02-17 15:59:53 +00:00
|
|
|
Notification.notification_type == LETTER_TYPE,
|
2020-02-19 13:36:05 +00:00
|
|
|
Notification.status == NOTIFICATION_CREATED,
|
|
|
|
|
Notification.key_type == KEY_TYPE_NORMAL
|
2020-02-17 15:59:53 +00:00
|
|
|
).order_by(
|
|
|
|
|
Notification.created_at
|
|
|
|
|
).all()
|
|
|
|
|
return notifications
|
|
|
|
|
|
|
|
|
|
|
2019-06-11 15:13:06 +01:00
|
|
|
def dao_old_letters_with_created_status():
|
|
|
|
|
yesterday_bst = convert_utc_to_bst(datetime.utcnow()) - timedelta(days=1)
|
|
|
|
|
last_processing_deadline = yesterday_bst.replace(hour=17, minute=30, second=0, microsecond=0)
|
|
|
|
|
|
|
|
|
|
notifications = Notification.query.filter(
|
2019-11-13 16:39:59 +00:00
|
|
|
Notification.created_at < convert_bst_to_utc(last_processing_deadline),
|
2019-06-11 15:13:06 +01:00
|
|
|
Notification.notification_type == LETTER_TYPE,
|
|
|
|
|
Notification.status == NOTIFICATION_CREATED
|
|
|
|
|
).order_by(
|
2019-11-13 16:39:59 +00:00
|
|
|
Notification.created_at
|
2019-06-11 15:13:06 +01:00
|
|
|
).all()
|
|
|
|
|
return notifications
|
|
|
|
|
|
|
|
|
|
|
2019-11-13 16:39:59 +00:00
|
|
|
def letters_missing_from_sending_bucket(seconds_to_subtract):
|
|
|
|
|
older_than_date = datetime.utcnow() - timedelta(seconds=seconds_to_subtract)
|
2019-11-19 16:04:21 +00:00
|
|
|
# We expect letters to have a `created` status, updated_at timestamp and billable units greater than zero.
|
2019-11-13 16:39:59 +00:00
|
|
|
notifications = Notification.query.filter(
|
2019-11-19 16:04:21 +00:00
|
|
|
Notification.billable_units == 0,
|
2019-11-13 16:39:59 +00:00
|
|
|
Notification.updated_at == None, # noqa
|
|
|
|
|
Notification.status == NOTIFICATION_CREATED,
|
|
|
|
|
Notification.created_at <= older_than_date,
|
|
|
|
|
Notification.notification_type == LETTER_TYPE,
|
|
|
|
|
Notification.key_type == KEY_TYPE_NORMAL
|
|
|
|
|
).order_by(
|
|
|
|
|
Notification.created_at
|
|
|
|
|
).all()
|
|
|
|
|
|
|
|
|
|
return notifications
|
|
|
|
|
|
|
|
|
|
|
2019-06-11 13:16:34 +01:00
|
|
|
def dao_precompiled_letters_still_pending_virus_check():
|
|
|
|
|
ninety_minutes_ago = datetime.utcnow() - timedelta(seconds=5400)
|
|
|
|
|
|
|
|
|
|
notifications = Notification.query.filter(
|
|
|
|
|
Notification.created_at < ninety_minutes_ago,
|
|
|
|
|
Notification.status == NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
).order_by(
|
|
|
|
|
Notification.created_at
|
|
|
|
|
).all()
|
|
|
|
|
return notifications
|
|
|
|
|
|
|
|
|
|
|
2018-12-20 16:01:39 +00:00
|
|
|
def _duplicate_update_warning(notification, status):
|
|
|
|
|
current_app.logger.info(
|
|
|
|
|
(
|
|
|
|
|
'Duplicate callback received. Notification id {id} received a status update to {new_status}'
|
|
|
|
|
'{time_diff} after being set to {old_status}. {type} sent by {sent_by}'
|
|
|
|
|
).format(
|
|
|
|
|
id=notification.id,
|
|
|
|
|
old_status=notification.status,
|
|
|
|
|
new_status=status,
|
2018-12-28 14:29:59 +00:00
|
|
|
time_diff=datetime.utcnow() - (notification.updated_at or notification.created_at),
|
2018-12-20 16:01:39 +00:00
|
|
|
type=notification.notification_type,
|
|
|
|
|
sent_by=notification.sent_by
|
|
|
|
|
)
|
|
|
|
|
)
|