2021-03-10 13:55:06 +00:00
|
|
|
|
from datetime import datetime, timedelta
|
2016-03-31 15:57:50 +01:00
|
|
|
|
|
2016-03-01 13:30:10 +00:00
|
|
|
|
from flask import current_app
|
2021-03-10 13:55:06 +00:00
|
|
|
|
from notifications_utils.international_billing_rates import (
|
|
|
|
|
|
INTERNATIONAL_BILLING_RATES,
|
|
|
|
|
|
)
|
2017-05-24 14:24:57 +01:00
|
|
|
|
from notifications_utils.recipients import (
|
2017-05-30 14:40:27 +01:00
|
|
|
|
InvalidEmailError,
|
2021-03-10 13:55:06 +00:00
|
|
|
|
try_validate_and_format_phone_number,
|
|
|
|
|
|
validate_and_format_email_address,
|
2017-05-24 14:24:57 +01:00
|
|
|
|
)
|
2022-11-30 13:50:49 -05:00
|
|
|
|
from sqlalchemy import asc, desc, func, or_, union
|
2016-08-09 13:07:48 +01:00
|
|
|
|
from sqlalchemy.orm import joinedload
|
2019-05-21 16:08:18 +01:00
|
|
|
|
from sqlalchemy.orm.exc import NoResultFound
|
2017-08-29 16:35:30 +01:00
|
|
|
|
from sqlalchemy.sql import functions
|
2018-12-10 16:27:59 +00:00
|
|
|
|
from sqlalchemy.sql.expression import case
|
|
|
|
|
|
from werkzeug.datastructures import MultiDict
|
2016-03-31 15:57:50 +01:00
|
|
|
|
|
2022-11-30 13:50:49 -05:00
|
|
|
|
from app import create_uuid, db
|
2021-04-14 07:11:01 +01:00
|
|
|
|
from app.dao.dao_utils import autocommit
|
2016-03-21 12:37:34 +00:00
|
|
|
|
from app.models import (
|
2021-03-10 13:55:06 +00:00
|
|
|
|
EMAIL_TYPE,
|
2017-10-05 16:29:11 +01:00
|
|
|
|
KEY_TYPE_TEST,
|
2016-09-13 16:42:53 +01:00
|
|
|
|
NOTIFICATION_CREATED,
|
|
|
|
|
|
NOTIFICATION_PENDING,
|
2018-11-13 14:20:24 +00:00
|
|
|
|
NOTIFICATION_PENDING_VIRUS_CHECK,
|
2017-04-19 11:34:00 +01:00
|
|
|
|
NOTIFICATION_PERMANENT_FAILURE,
|
2021-03-10 13:55:06 +00:00
|
|
|
|
NOTIFICATION_SENDING,
|
2018-03-07 18:13:40 +00:00
|
|
|
|
NOTIFICATION_SENT,
|
2021-03-10 13:55:06 +00:00
|
|
|
|
NOTIFICATION_TEMPORARY_FAILURE,
|
2018-03-07 18:13:40 +00:00
|
|
|
|
SMS_TYPE,
|
2021-03-10 13:55:06 +00:00
|
|
|
|
FactNotificationStatus,
|
|
|
|
|
|
Notification,
|
|
|
|
|
|
NotificationHistory,
|
2017-10-30 14:55:44 +00:00
|
|
|
|
)
|
2022-02-10 10:37:32 +00:00
|
|
|
|
from app.utils import (
|
|
|
|
|
|
escape_special_characters,
|
2023-05-10 08:39:50 -07:00
|
|
|
|
get_midnight_in_utc,
|
2022-02-10 10:37:32 +00:00
|
|
|
|
midnight_n_days_ago,
|
|
|
|
|
|
)
|
2016-04-04 12:21:38 +01:00
|
|
|
|
|
2016-02-09 12:01:17 +00:00
|
|
|
|
|
2020-02-05 16:43:17 +00:00
|
|
|
|
def dao_get_last_date_template_was_used(template_id, service_id):
|
2020-02-05 13:03:54 +00:00
|
|
|
|
last_date_from_notifications = db.session.query(
|
|
|
|
|
|
functions.max(Notification.created_at)
|
|
|
|
|
|
).filter(
|
|
|
|
|
|
Notification.service_id == service_id,
|
|
|
|
|
|
Notification.template_id == template_id,
|
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST
|
|
|
|
|
|
).scalar()
|
|
|
|
|
|
|
2020-02-05 16:43:17 +00:00
|
|
|
|
if last_date_from_notifications:
|
2020-02-05 13:03:54 +00:00
|
|
|
|
return last_date_from_notifications
|
2020-02-05 16:43:17 +00:00
|
|
|
|
|
|
|
|
|
|
last_date = db.session.query(
|
2022-11-21 11:49:59 -05:00
|
|
|
|
functions.max(FactNotificationStatus.local_date)
|
2020-02-05 16:43:17 +00:00
|
|
|
|
).filter(
|
|
|
|
|
|
FactNotificationStatus.template_id == template_id,
|
|
|
|
|
|
FactNotificationStatus.key_type != KEY_TYPE_TEST
|
|
|
|
|
|
).scalar()
|
|
|
|
|
|
|
|
|
|
|
|
return last_date
|
2020-02-05 13:03:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
|
@autocommit
|
2016-08-25 11:55:38 +01:00
|
|
|
|
def dao_create_notification(notification):
|
2016-07-11 16:48:32 +01:00
|
|
|
|
if not notification.id:
|
|
|
|
|
|
# need to populate defaulted fields before we create the notification history object
|
2016-10-28 17:10:00 +01:00
|
|
|
|
notification.id = create_uuid()
|
2016-07-11 16:48:32 +01:00
|
|
|
|
if not notification.status:
|
2017-04-19 11:34:00 +01:00
|
|
|
|
notification.status = NOTIFICATION_CREATED
|
2016-07-11 16:48:32 +01:00
|
|
|
|
|
2016-04-04 12:21:38 +01:00
|
|
|
|
db.session.add(notification)
|
2016-12-19 13:57:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
2017-05-12 14:59:14 +01:00
|
|
|
|
def country_records_delivery(phone_prefix):
|
2018-01-29 11:41:46 +00:00
|
|
|
|
dlr = INTERNATIONAL_BILLING_RATES[phone_prefix]['attributes']['dlr']
|
|
|
|
|
|
return dlr and dlr.lower() == 'yes'
|
2017-05-12 14:59:14 +01:00
|
|
|
|
|
2022-10-14 14:45:27 +00:00
|
|
|
|
|
2022-09-15 14:59:13 -07:00
|
|
|
|
def _decide_permanent_temporary_failure(current_status, status):
|
|
|
|
|
|
# If we go from pending to delivered we need to set failure type as temporary-failure
|
|
|
|
|
|
if current_status == NOTIFICATION_PENDING and status == NOTIFICATION_PERMANENT_FAILURE:
|
|
|
|
|
|
status = NOTIFICATION_TEMPORARY_FAILURE
|
|
|
|
|
|
return status
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _update_notification_status(notification, status, provider_response=None):
|
|
|
|
|
|
status = _decide_permanent_temporary_failure(current_status=notification.status, status=status)
|
|
|
|
|
|
notification.status = status
|
|
|
|
|
|
if provider_response:
|
|
|
|
|
|
notification.provider_response = provider_response
|
|
|
|
|
|
dao_update_notification(notification)
|
2016-09-13 12:29:40 +01:00
|
|
|
|
return notification
|
2016-03-10 17:29:17 +00:00
|
|
|
|
|
2022-10-14 14:45:27 +00:00
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
|
@autocommit
|
2023-05-04 07:56:24 -07:00
|
|
|
|
def update_notification_status_by_id(notification_id, status, sent_by=None, provider_response=None):
|
2018-12-20 16:01:39 +00:00
|
|
|
|
notification = Notification.query.with_for_update().filter(Notification.id == notification_id).first()
|
2016-05-27 12:09:36 +01:00
|
|
|
|
|
2017-05-12 14:59:14 +01:00
|
|
|
|
if not notification:
|
2019-01-29 15:55:31 +00:00
|
|
|
|
current_app.logger.info('notification not found for id {} (update to status {})'.format(
|
2018-12-20 16:01:39 +00:00
|
|
|
|
notification_id,
|
|
|
|
|
|
status
|
|
|
|
|
|
))
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
if notification.status not in {
|
|
|
|
|
|
NOTIFICATION_CREATED,
|
|
|
|
|
|
NOTIFICATION_SENDING,
|
|
|
|
|
|
NOTIFICATION_PENDING,
|
|
|
|
|
|
NOTIFICATION_SENT,
|
|
|
|
|
|
NOTIFICATION_PENDING_VIRUS_CHECK
|
|
|
|
|
|
}:
|
|
|
|
|
|
_duplicate_update_warning(notification, status)
|
2017-05-12 14:59:14 +01:00
|
|
|
|
return None
|
|
|
|
|
|
|
2020-09-09 10:55:55 +01:00
|
|
|
|
if (
|
2020-09-09 11:12:06 +01:00
|
|
|
|
notification.notification_type == SMS_TYPE
|
2020-09-09 10:55:55 +01:00
|
|
|
|
and notification.international
|
|
|
|
|
|
and not country_records_delivery(notification.phone_prefix)
|
|
|
|
|
|
):
|
2016-09-13 12:29:40 +01:00
|
|
|
|
return None
|
2023-05-04 07:56:24 -07:00
|
|
|
|
if provider_response:
|
|
|
|
|
|
notification.provider_response = provider_response
|
2018-10-24 11:24:53 +01:00
|
|
|
|
if not notification.sent_by and sent_by:
|
|
|
|
|
|
notification.sent_by = sent_by
|
2016-05-31 10:17:15 +01:00
|
|
|
|
return _update_notification_status(
|
|
|
|
|
|
notification=notification,
|
2022-12-19 10:48:53 -05:00
|
|
|
|
status=status
|
2016-05-31 10:17:15 +01:00
|
|
|
|
)
|
2016-03-21 13:24:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
|
@autocommit
|
2016-08-25 11:55:38 +01:00
|
|
|
|
def update_notification_status_by_reference(reference, status):
|
2023-03-02 20:20:31 -05:00
|
|
|
|
# this is used to update emails
|
2018-12-20 16:01:39 +00:00
|
|
|
|
notification = Notification.query.filter(Notification.reference == reference).first()
|
|
|
|
|
|
|
|
|
|
|
|
if not notification:
|
|
|
|
|
|
current_app.logger.error('notification not found for reference {} (update to {})'.format(reference, status))
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
if notification.status not in {
|
|
|
|
|
|
NOTIFICATION_SENDING,
|
2018-12-28 14:29:59 +00:00
|
|
|
|
NOTIFICATION_PENDING
|
2018-12-20 16:01:39 +00:00
|
|
|
|
}:
|
|
|
|
|
|
_duplicate_update_warning(notification, status)
|
2016-09-13 12:29:40 +01:00
|
|
|
|
return None
|
2016-05-27 12:09:36 +01:00
|
|
|
|
|
2016-05-31 10:17:15 +01:00
|
|
|
|
return _update_notification_status(
|
|
|
|
|
|
notification=notification,
|
2016-08-25 11:55:38 +01:00
|
|
|
|
status=status
|
2016-05-31 10:17:15 +01:00
|
|
|
|
)
|
2016-03-11 09:40:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
|
@autocommit
|
2016-05-26 16:46:00 +01:00
|
|
|
|
def dao_update_notification(notification):
|
|
|
|
|
|
notification.updated_at = datetime.utcnow()
|
|
|
|
|
|
db.session.add(notification)
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-04-19 10:52:52 +01:00
|
|
|
|
def get_notifications_for_job(service_id, job_id, filter_dict=None, page=1, page_size=None):
|
|
|
|
|
|
if page_size is None:
|
|
|
|
|
|
page_size = current_app.config['PAGE_SIZE']
|
2023-06-15 08:23:00 -07:00
|
|
|
|
query = Notification.query.filter_by(service_id=service_id, job_id=job_id)
|
2016-05-24 11:31:44 +01:00
|
|
|
|
query = _filter_query(query, filter_dict)
|
2023-06-15 08:23:00 -07:00
|
|
|
|
return query.order_by(asc(Notification.job_row_number)).paginate(
|
2016-03-04 14:25:28 +00:00
|
|
|
|
page=page,
|
2016-04-19 10:52:52 +01:00
|
|
|
|
per_page=page_size
|
2016-03-04 14:25:28 +00:00
|
|
|
|
)
|
2016-02-16 11:22:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
2019-10-03 14:58:49 +01:00
|
|
|
|
def dao_get_notification_count_for_job_id(*, job_id):
|
2023-06-15 08:23:00 -07:00
|
|
|
|
return Notification.query.filter_by(job_id=job_id).count()
|
2019-09-24 16:52:18 +01:00
|
|
|
|
|
|
|
|
|
|
|
2016-08-09 13:07:48 +01:00
|
|
|
|
def get_notification_with_personalisation(service_id, notification_id, key_type):
|
2016-06-30 18:43:15 +01:00
|
|
|
|
filter_dict = {'service_id': service_id, 'id': notification_id}
|
|
|
|
|
|
if key_type:
|
|
|
|
|
|
filter_dict['key_type'] = key_type
|
|
|
|
|
|
|
2017-11-09 14:25:47 +00:00
|
|
|
|
return Notification.query.filter_by(**filter_dict).options(joinedload('template')).one()
|
2016-03-01 13:30:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
2018-11-15 10:55:29 +00:00
|
|
|
|
def get_notification_by_id(notification_id, service_id=None, _raise=False):
|
|
|
|
|
|
filters = [Notification.id == notification_id]
|
|
|
|
|
|
|
|
|
|
|
|
if service_id:
|
|
|
|
|
|
filters.append(Notification.service_id == service_id)
|
|
|
|
|
|
|
|
|
|
|
|
query = Notification.query.filter(*filters)
|
|
|
|
|
|
|
|
|
|
|
|
return query.one() if _raise else query.first()
|
2016-03-10 15:40:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
2016-09-23 09:43:25 +01:00
|
|
|
|
def get_notifications_for_service(
|
2018-08-08 16:20:25 +01:00
|
|
|
|
service_id,
|
|
|
|
|
|
filter_dict=None,
|
|
|
|
|
|
page=1,
|
|
|
|
|
|
page_size=None,
|
2019-01-07 17:12:00 +00:00
|
|
|
|
count_pages=True,
|
2018-08-08 16:20:25 +01:00
|
|
|
|
limit_days=None,
|
|
|
|
|
|
key_type=None,
|
|
|
|
|
|
personalisation=False,
|
|
|
|
|
|
include_jobs=False,
|
|
|
|
|
|
include_from_test_key=False,
|
|
|
|
|
|
older_than=None,
|
|
|
|
|
|
client_reference=None,
|
2021-12-03 17:07:03 +00:00
|
|
|
|
include_one_off=True,
|
|
|
|
|
|
error_out=True
|
2016-09-23 09:43:25 +01:00
|
|
|
|
):
|
2016-04-19 10:52:52 +01:00
|
|
|
|
if page_size is None:
|
|
|
|
|
|
page_size = current_app.config['PAGE_SIZE']
|
2016-09-15 15:59:02 +01:00
|
|
|
|
|
2016-04-28 16:10:35 +01:00
|
|
|
|
filters = [Notification.service_id == service_id]
|
|
|
|
|
|
|
|
|
|
|
|
if limit_days is not None:
|
2018-04-30 11:50:56 +01:00
|
|
|
|
filters.append(Notification.created_at >= midnight_n_days_ago(limit_days))
|
2016-04-28 16:10:35 +01:00
|
|
|
|
|
2016-11-23 11:44:38 +00:00
|
|
|
|
if older_than is not None:
|
|
|
|
|
|
older_than_created_at = db.session.query(
|
|
|
|
|
|
Notification.created_at).filter(Notification.id == older_than).as_scalar()
|
|
|
|
|
|
filters.append(Notification.created_at < older_than_created_at)
|
|
|
|
|
|
|
2018-07-18 10:54:20 +01:00
|
|
|
|
if not include_jobs:
|
|
|
|
|
|
filters.append(Notification.job_id == None) # noqa
|
|
|
|
|
|
|
|
|
|
|
|
if not include_one_off:
|
|
|
|
|
|
filters.append(Notification.created_by_id == None) # noqa
|
2016-09-15 15:59:02 +01:00
|
|
|
|
|
2016-06-30 18:43:15 +01:00
|
|
|
|
if key_type is not None:
|
|
|
|
|
|
filters.append(Notification.key_type == key_type)
|
2016-09-23 10:27:10 +01:00
|
|
|
|
elif not include_from_test_key:
|
2016-09-16 13:47:09 +01:00
|
|
|
|
filters.append(Notification.key_type != KEY_TYPE_TEST)
|
|
|
|
|
|
|
2016-12-12 18:04:20 +00:00
|
|
|
|
if client_reference is not None:
|
|
|
|
|
|
filters.append(Notification.client_reference == client_reference)
|
|
|
|
|
|
|
2016-04-28 16:10:35 +01:00
|
|
|
|
query = Notification.query.filter(*filters)
|
2016-05-24 11:31:44 +01:00
|
|
|
|
query = _filter_query(query, filter_dict)
|
2016-08-09 13:07:48 +01:00
|
|
|
|
if personalisation:
|
2016-08-09 16:53:09 +01:00
|
|
|
|
query = query.options(
|
2017-11-09 14:25:47 +00:00
|
|
|
|
joinedload('template')
|
2016-08-09 13:07:48 +01:00
|
|
|
|
)
|
2016-09-15 15:59:02 +01:00
|
|
|
|
|
2023-06-15 08:23:00 -07:00
|
|
|
|
return query.order_by(desc(Notification.created_at)).paginate(
|
2016-03-01 13:30:10 +00:00
|
|
|
|
page=page,
|
2019-01-07 17:12:00 +00:00
|
|
|
|
per_page=page_size,
|
2021-12-03 17:07:03 +00:00
|
|
|
|
count=count_pages,
|
|
|
|
|
|
error_out=error_out,
|
2016-03-01 13:30:10 +00:00
|
|
|
|
)
|
2016-03-21 12:37:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
2016-05-24 11:31:44 +01:00
|
|
|
|
def _filter_query(query, filter_dict=None):
|
2016-04-04 13:13:29 +01:00
|
|
|
|
if filter_dict is None:
|
2016-11-22 17:30:03 +00:00
|
|
|
|
return query
|
|
|
|
|
|
|
|
|
|
|
|
multidict = MultiDict(filter_dict)
|
|
|
|
|
|
|
|
|
|
|
|
# filter by status
|
|
|
|
|
|
statuses = multidict.getlist('status')
|
2023-06-14 07:37:38 -07:00
|
|
|
|
|
2016-04-04 13:13:29 +01:00
|
|
|
|
if statuses:
|
|
|
|
|
|
query = query.filter(Notification.status.in_(statuses))
|
2016-11-22 17:30:03 +00:00
|
|
|
|
|
|
|
|
|
|
# filter by template
|
|
|
|
|
|
template_types = multidict.getlist('template_type')
|
2016-04-04 13:13:29 +01:00
|
|
|
|
if template_types:
|
2018-12-11 14:57:10 +00:00
|
|
|
|
query = query.filter(Notification.notification_type.in_(template_types))
|
2016-11-22 17:30:03 +00:00
|
|
|
|
|
2016-03-01 13:30:10 +00:00
|
|
|
|
return query
|
2016-03-09 17:46:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
2023-06-27 10:48:14 -07:00
|
|
|
|
def sanitize_successful_notification_by_id(
|
2023-04-18 12:42:23 -07:00
|
|
|
|
notification_id
|
|
|
|
|
|
):
|
2023-06-15 08:23:00 -07:00
|
|
|
|
# TODO what to do for international?
|
|
|
|
|
|
phone_prefix = '1'
|
|
|
|
|
|
Notification.query.filter(
|
|
|
|
|
|
Notification.id.in_([notification_id]),
|
|
|
|
|
|
).update(
|
2023-06-27 14:45:48 -07:00
|
|
|
|
{'to': phone_prefix, 'normalised_to': phone_prefix, 'status': 'delivered'}
|
2023-06-15 08:23:00 -07:00
|
|
|
|
)
|
2023-06-27 13:06:38 -07:00
|
|
|
|
db.session.commit()
|
2023-04-18 12:42:23 -07:00
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
|
@autocommit
|
2020-03-23 15:53:53 +00:00
|
|
|
|
def insert_notification_history_delete_notifications(
|
2020-03-24 12:21:28 +00:00
|
|
|
|
notification_type, service_id, timestamp_to_delete_backwards_from, qry_limit=50000
|
2020-03-23 15:53:53 +00:00
|
|
|
|
):
|
2021-12-01 14:28:08 +00:00
|
|
|
|
"""
|
|
|
|
|
|
Delete up to 50,000 notifications that are past retention for a notification type and service.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Steps are as follows:
|
|
|
|
|
|
|
|
|
|
|
|
Create a temporary notifications table
|
|
|
|
|
|
Populate that table with up to 50k notifications that are to be deleted. (Note: no specified order)
|
|
|
|
|
|
Insert everything in the temp table into notification history
|
|
|
|
|
|
Delete from notifications if notification id is in the temp table
|
|
|
|
|
|
Drop the temp table (automatically when the transaction commits)
|
|
|
|
|
|
|
|
|
|
|
|
Temporary tables are in a separate postgres schema, and only visible to the current session (db connection,
|
|
|
|
|
|
in a celery task there's one connection per thread.)
|
|
|
|
|
|
"""
|
2020-03-24 12:21:28 +00:00
|
|
|
|
# Setting default query limit to 50,000 which take about 48 seconds on current table size
|
|
|
|
|
|
# 10, 000 took 11s and 100,000 took 1 min 30 seconds.
|
2020-03-23 15:53:53 +00:00
|
|
|
|
select_into_temp_table = """
|
2021-12-01 14:28:08 +00:00
|
|
|
|
CREATE TEMP TABLE NOTIFICATION_ARCHIVE ON COMMIT DROP AS
|
2020-03-24 12:21:28 +00:00
|
|
|
|
SELECT id, job_id, job_row_number, service_id, template_id, template_version, api_key_id,
|
|
|
|
|
|
key_type, notification_type, created_at, sent_at, sent_by, updated_at, reference, billable_units,
|
|
|
|
|
|
client_reference, international, phone_prefix, rate_multiplier, notification_status,
|
2023-03-02 20:20:31 -05:00
|
|
|
|
created_by_id, document_download_count
|
2020-12-16 10:50:11 +00:00
|
|
|
|
FROM notifications
|
|
|
|
|
|
WHERE service_id = :service_id
|
|
|
|
|
|
AND notification_type = :notification_type
|
|
|
|
|
|
AND created_at < :timestamp_to_delete_backwards_from
|
|
|
|
|
|
AND key_type in ('normal', 'team')
|
|
|
|
|
|
limit :qry_limit
|
|
|
|
|
|
"""
|
2020-03-24 12:21:28 +00:00
|
|
|
|
# Insert into NotificationHistory if the row already exists do nothing.
|
2020-03-23 15:53:53 +00:00
|
|
|
|
insert_query = """
|
|
|
|
|
|
insert into notification_history
|
2020-03-24 12:21:28 +00:00
|
|
|
|
SELECT * from NOTIFICATION_ARCHIVE
|
|
|
|
|
|
ON CONFLICT ON CONSTRAINT notification_history_pkey
|
|
|
|
|
|
DO NOTHING
|
2020-03-23 15:53:53 +00:00
|
|
|
|
"""
|
|
|
|
|
|
delete_query = """
|
|
|
|
|
|
DELETE FROM notifications
|
|
|
|
|
|
where id in (select id from NOTIFICATION_ARCHIVE)
|
|
|
|
|
|
"""
|
|
|
|
|
|
input_params = {
|
|
|
|
|
|
"service_id": service_id,
|
|
|
|
|
|
"notification_type": notification_type,
|
2020-03-24 12:21:28 +00:00
|
|
|
|
"timestamp_to_delete_backwards_from": timestamp_to_delete_backwards_from,
|
2020-03-23 15:53:53 +00:00
|
|
|
|
"qry_limit": qry_limit
|
|
|
|
|
|
}
|
2020-03-25 08:08:33 +00:00
|
|
|
|
|
2023-03-02 20:20:31 -05:00
|
|
|
|
db.session.execute(select_into_temp_table, input_params)
|
2020-03-23 15:53:53 +00:00
|
|
|
|
|
2021-11-17 14:46:52 +00:00
|
|
|
|
result = db.session.execute("select count(*) from NOTIFICATION_ARCHIVE").fetchone()[0]
|
2020-03-23 15:53:53 +00:00
|
|
|
|
|
|
|
|
|
|
db.session.execute(insert_query)
|
|
|
|
|
|
|
|
|
|
|
|
db.session.execute(delete_query)
|
|
|
|
|
|
|
2021-11-17 14:46:52 +00:00
|
|
|
|
return result
|
2020-03-23 15:53:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
2021-12-06 09:30:48 +00:00
|
|
|
|
def move_notifications_to_notification_history(
|
|
|
|
|
|
notification_type,
|
|
|
|
|
|
service_id,
|
|
|
|
|
|
timestamp_to_delete_backwards_from,
|
|
|
|
|
|
qry_limit=50000
|
|
|
|
|
|
):
|
2020-03-20 19:07:08 +00:00
|
|
|
|
deleted = 0
|
2020-03-24 14:44:42 +00:00
|
|
|
|
delete_count_per_call = 1
|
|
|
|
|
|
while delete_count_per_call > 0:
|
|
|
|
|
|
delete_count_per_call = insert_notification_history_delete_notifications(
|
2020-03-24 12:21:28 +00:00
|
|
|
|
notification_type=notification_type,
|
|
|
|
|
|
service_id=service_id,
|
2021-12-06 09:30:48 +00:00
|
|
|
|
timestamp_to_delete_backwards_from=timestamp_to_delete_backwards_from,
|
2020-03-24 12:21:28 +00:00
|
|
|
|
qry_limit=qry_limit
|
2020-03-20 19:07:08 +00:00
|
|
|
|
)
|
2020-03-24 14:44:42 +00:00
|
|
|
|
deleted += delete_count_per_call
|
2020-03-20 19:07:08 +00:00
|
|
|
|
|
2020-03-24 14:09:13 +00:00
|
|
|
|
# Deleting test Notifications, test notifications are not persisted to NotificationHistory
|
2020-03-24 12:21:28 +00:00
|
|
|
|
Notification.query.filter(
|
2018-12-27 14:00:53 +00:00
|
|
|
|
Notification.notification_type == notification_type,
|
|
|
|
|
|
Notification.service_id == service_id,
|
2021-12-06 09:30:48 +00:00
|
|
|
|
Notification.created_at < timestamp_to_delete_backwards_from,
|
2019-06-03 15:16:46 +01:00
|
|
|
|
Notification.key_type == KEY_TYPE_TEST
|
2020-03-24 12:21:28 +00:00
|
|
|
|
).delete(synchronize_session=False)
|
2018-12-21 13:57:35 +00:00
|
|
|
|
db.session.commit()
|
2019-10-14 16:43:37 +01:00
|
|
|
|
|
2020-03-24 12:21:28 +00:00
|
|
|
|
return deleted
|
2019-04-29 15:44:42 +01:00
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
|
@autocommit
|
2019-05-30 10:37:57 +01:00
|
|
|
|
def dao_delete_notifications_by_id(notification_id):
|
2016-09-08 16:00:18 +01:00
|
|
|
|
db.session.query(Notification).filter(
|
|
|
|
|
|
Notification.id == notification_id
|
|
|
|
|
|
).delete(synchronize_session='fetch')
|
2016-09-13 16:42:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
2021-12-13 16:59:25 +00:00
|
|
|
|
def dao_timeout_notifications(cutoff_time, limit=100000):
|
2017-04-19 11:34:00 +01:00
|
|
|
|
"""
|
2021-12-13 16:56:21 +00:00
|
|
|
|
Set email and SMS notifications (only) to "temporary-failure" status
|
|
|
|
|
|
if they're still sending from before the specified cutoff_time.
|
2017-04-19 11:34:00 +01:00
|
|
|
|
"""
|
|
|
|
|
|
updated_at = datetime.utcnow()
|
2021-11-25 17:52:16 +00:00
|
|
|
|
current_statuses = [NOTIFICATION_SENDING, NOTIFICATION_PENDING]
|
|
|
|
|
|
new_status = NOTIFICATION_TEMPORARY_FAILURE
|
2017-07-06 11:55:56 +01:00
|
|
|
|
|
2017-12-07 16:37:36 +00:00
|
|
|
|
notifications = Notification.query.filter(
|
2021-12-13 16:56:21 +00:00
|
|
|
|
Notification.created_at < cutoff_time,
|
2017-12-07 16:37:36 +00:00
|
|
|
|
Notification.status.in_(current_statuses),
|
2021-11-08 14:18:21 +00:00
|
|
|
|
Notification.notification_type.in_([SMS_TYPE, EMAIL_TYPE])
|
2021-12-13 16:59:25 +00:00
|
|
|
|
).limit(limit).all()
|
2021-11-08 14:18:21 +00:00
|
|
|
|
|
2019-05-30 10:37:57 +01:00
|
|
|
|
Notification.query.filter(
|
2021-11-08 14:18:21 +00:00
|
|
|
|
Notification.id.in_([n.id for n in notifications]),
|
2019-05-30 10:37:57 +01:00
|
|
|
|
).update(
|
|
|
|
|
|
{'status': new_status, 'updated_at': updated_at},
|
|
|
|
|
|
synchronize_session=False
|
|
|
|
|
|
)
|
2017-04-19 11:34:00 +01:00
|
|
|
|
|
2016-09-13 16:42:53 +01:00
|
|
|
|
db.session.commit()
|
2021-11-25 17:52:16 +00:00
|
|
|
|
return notifications
|
2016-09-30 17:17:28 +01:00
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
|
@autocommit
|
2017-09-20 11:12:37 +01:00
|
|
|
|
def dao_update_notifications_by_reference(references, update_dict):
|
2017-09-26 09:56:09 +01:00
|
|
|
|
updated_count = Notification.query.filter(
|
2017-09-20 11:12:37 +01:00
|
|
|
|
Notification.reference.in_(references)
|
|
|
|
|
|
).update(
|
2017-09-26 09:56:09 +01:00
|
|
|
|
update_dict,
|
|
|
|
|
|
synchronize_session=False
|
2017-09-20 11:12:37 +01:00
|
|
|
|
)
|
2017-04-07 11:22:03 +01:00
|
|
|
|
|
2019-05-08 17:31:27 +01:00
|
|
|
|
updated_history_count = 0
|
2019-05-15 15:30:15 +01:00
|
|
|
|
if updated_count != len(references):
|
2019-05-08 17:31:27 +01:00
|
|
|
|
updated_history_count = NotificationHistory.query.filter(
|
|
|
|
|
|
NotificationHistory.reference.in_(references)
|
|
|
|
|
|
).update(
|
|
|
|
|
|
update_dict,
|
|
|
|
|
|
synchronize_session=False
|
|
|
|
|
|
)
|
2017-04-07 10:59:12 +01:00
|
|
|
|
|
2018-08-30 14:27:57 +01:00
|
|
|
|
return updated_count, updated_history_count
|
2017-05-05 14:12:50 +01:00
|
|
|
|
|
|
|
|
|
|
|
2020-05-01 11:18:33 +01:00
|
|
|
|
def dao_get_notifications_by_recipient_or_reference(
|
|
|
|
|
|
service_id,
|
|
|
|
|
|
search_term,
|
|
|
|
|
|
notification_type=None,
|
|
|
|
|
|
statuses=None,
|
|
|
|
|
|
page=1,
|
|
|
|
|
|
page_size=None,
|
2021-12-10 12:06:55 +00:00
|
|
|
|
error_out=True,
|
2020-05-01 11:18:33 +01:00
|
|
|
|
):
|
2018-03-07 18:13:40 +00:00
|
|
|
|
|
|
|
|
|
|
if notification_type == SMS_TYPE:
|
|
|
|
|
|
normalised = try_validate_and_format_phone_number(search_term)
|
|
|
|
|
|
|
|
|
|
|
|
for character in {'(', ')', ' ', '-'}:
|
|
|
|
|
|
normalised = normalised.replace(character, '')
|
|
|
|
|
|
|
|
|
|
|
|
normalised = normalised.lstrip('+0')
|
|
|
|
|
|
|
|
|
|
|
|
elif notification_type == EMAIL_TYPE:
|
2017-05-30 14:40:27 +01:00
|
|
|
|
try:
|
|
|
|
|
|
normalised = validate_and_format_email_address(search_term)
|
|
|
|
|
|
except InvalidEmailError:
|
2018-03-07 18:13:40 +00:00
|
|
|
|
normalised = search_term.lower()
|
2018-06-13 16:04:49 +01:00
|
|
|
|
|
2023-03-02 20:20:31 -05:00
|
|
|
|
elif notification_type is None:
|
|
|
|
|
|
# This happens when a notification type isn’t provided (this will
|
2020-04-23 16:06:34 +01:00
|
|
|
|
# happen if a user doesn’t have permission to see the dashboard)
|
|
|
|
|
|
# because email addresses and phone numbers will never be stored
|
|
|
|
|
|
# with spaces either.
|
2020-04-21 14:19:41 +01:00
|
|
|
|
normalised = ''.join(search_term.split()).lower()
|
2018-03-06 12:39:58 +00:00
|
|
|
|
|
2019-12-16 10:27:55 +00:00
|
|
|
|
else:
|
2020-04-23 16:06:34 +01:00
|
|
|
|
raise TypeError(
|
2023-03-02 20:20:31 -05:00
|
|
|
|
f'Notification type must be {EMAIL_TYPE}, {SMS_TYPE}, or None'
|
2020-04-23 16:06:34 +01:00
|
|
|
|
)
|
2019-12-16 10:27:55 +00:00
|
|
|
|
|
2018-07-13 15:26:42 +01:00
|
|
|
|
normalised = escape_special_characters(normalised)
|
2019-12-12 16:01:22 +00:00
|
|
|
|
search_term = escape_special_characters(search_term)
|
2018-03-14 10:34:45 +00:00
|
|
|
|
|
2017-05-24 14:24:57 +01:00
|
|
|
|
filters = [
|
2017-05-05 14:12:50 +01:00
|
|
|
|
Notification.service_id == service_id,
|
2019-12-12 16:01:22 +00:00
|
|
|
|
or_(
|
|
|
|
|
|
Notification.normalised_to.like("%{}%".format(normalised)),
|
|
|
|
|
|
Notification.client_reference.ilike("%{}%".format(search_term)),
|
|
|
|
|
|
),
|
2017-08-21 15:35:55 +01:00
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
2017-05-24 14:24:57 +01:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
if statuses:
|
|
|
|
|
|
filters.append(Notification.status.in_(statuses))
|
2018-03-07 17:11:29 +00:00
|
|
|
|
if notification_type:
|
|
|
|
|
|
filters.append(Notification.notification_type == notification_type)
|
2017-05-24 14:24:57 +01:00
|
|
|
|
|
2020-04-28 11:26:06 +01:00
|
|
|
|
results = db.session.query(Notification)\
|
|
|
|
|
|
.filter(*filters)\
|
|
|
|
|
|
.order_by(desc(Notification.created_at))\
|
2021-12-10 12:06:55 +00:00
|
|
|
|
.paginate(page=page, per_page=page_size, count=False, error_out=error_out)
|
2017-05-24 14:24:57 +01:00
|
|
|
|
return results
|
2017-05-15 17:27:38 +01:00
|
|
|
|
|
|
|
|
|
|
|
2018-01-17 09:52:13 +00:00
|
|
|
|
def dao_get_notification_by_reference(reference):
|
|
|
|
|
|
return Notification.query.filter(
|
|
|
|
|
|
Notification.reference == reference
|
|
|
|
|
|
).one()
|
|
|
|
|
|
|
|
|
|
|
|
|
2022-09-15 14:59:13 -07:00
|
|
|
|
def dao_get_notification_history_by_reference(reference):
|
|
|
|
|
|
try:
|
|
|
|
|
|
# This try except is necessary because in test keys and research mode does not create notification history.
|
|
|
|
|
|
# Otherwise we could just search for the NotificationHistory object
|
|
|
|
|
|
return Notification.query.filter(Notification.reference == reference).one()
|
|
|
|
|
|
except NoResultFound:
|
|
|
|
|
|
return NotificationHistory.query.filter(NotificationHistory.reference == reference).one()
|
|
|
|
|
|
|
2018-03-01 15:39:51 +00:00
|
|
|
|
|
2021-03-11 18:53:43 +00:00
|
|
|
|
def dao_get_notifications_processing_time_stats(start_date, end_date):
|
2017-08-29 16:35:30 +01:00
|
|
|
|
"""
|
2021-03-11 18:53:43 +00:00
|
|
|
|
For a given time range, returns the number of notifications sent and the number of
|
|
|
|
|
|
those notifications that we processed within 10 seconds
|
|
|
|
|
|
|
2017-08-29 16:35:30 +01:00
|
|
|
|
SELECT
|
2021-02-22 15:42:29 +00:00
|
|
|
|
count(notifications),
|
2017-08-30 16:02:30 +01:00
|
|
|
|
coalesce(sum(CASE WHEN sent_at - created_at <= interval '10 seconds' THEN 1 ELSE 0 END), 0)
|
2021-02-22 15:42:29 +00:00
|
|
|
|
FROM notifications
|
2017-08-29 16:35:30 +01:00
|
|
|
|
WHERE
|
|
|
|
|
|
created_at > 'START DATE' AND
|
|
|
|
|
|
created_at < 'END DATE' AND
|
|
|
|
|
|
api_key_id IS NOT NULL AND
|
2023-03-02 20:20:31 -05:00
|
|
|
|
key_type != 'test';
|
2017-08-29 16:35:30 +01:00
|
|
|
|
"""
|
2019-04-10 10:06:27 +01:00
|
|
|
|
under_10_secs = Notification.sent_at - Notification.created_at <= timedelta(seconds=10)
|
2017-08-30 16:02:30 +01:00
|
|
|
|
sum_column = functions.coalesce(functions.sum(
|
2017-08-29 16:35:30 +01:00
|
|
|
|
case(
|
|
|
|
|
|
[
|
|
|
|
|
|
(under_10_secs, 1)
|
|
|
|
|
|
],
|
|
|
|
|
|
else_=0
|
|
|
|
|
|
)
|
2017-08-30 16:02:30 +01:00
|
|
|
|
), 0)
|
2017-08-31 12:44:06 +01:00
|
|
|
|
|
2017-08-29 16:35:30 +01:00
|
|
|
|
return db.session.query(
|
2019-04-10 10:06:27 +01:00
|
|
|
|
func.count(Notification.id).label('messages_total'),
|
2017-08-29 16:35:30 +01:00
|
|
|
|
sum_column.label('messages_within_10_secs')
|
|
|
|
|
|
).filter(
|
2019-04-10 10:06:27 +01:00
|
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
|
Notification.created_at < end_date,
|
|
|
|
|
|
Notification.api_key_id.isnot(None),
|
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
2017-08-29 16:35:30 +01:00
|
|
|
|
).one()
|
2017-09-15 17:46:08 +01:00
|
|
|
|
|
|
|
|
|
|
|
2017-10-17 11:07:36 +01:00
|
|
|
|
def dao_get_last_notification_added_for_job_id(job_id):
|
|
|
|
|
|
last_notification_added = Notification.query.filter(
|
|
|
|
|
|
Notification.job_id == job_id
|
|
|
|
|
|
).order_by(
|
|
|
|
|
|
Notification.job_row_number.desc()
|
|
|
|
|
|
).first()
|
|
|
|
|
|
|
|
|
|
|
|
return last_notification_added
|
2017-12-18 16:12:17 +00:00
|
|
|
|
|
|
|
|
|
|
|
2018-03-23 15:38:35 +00:00
|
|
|
|
def notifications_not_yet_sent(should_be_sending_after_seconds, notification_type):
|
|
|
|
|
|
older_than_date = datetime.utcnow() - timedelta(seconds=should_be_sending_after_seconds)
|
|
|
|
|
|
|
|
|
|
|
|
notifications = Notification.query.filter(
|
|
|
|
|
|
Notification.created_at <= older_than_date,
|
|
|
|
|
|
Notification.notification_type == notification_type,
|
|
|
|
|
|
Notification.status == NOTIFICATION_CREATED
|
|
|
|
|
|
).all()
|
|
|
|
|
|
return notifications
|
2018-06-13 16:04:49 +01:00
|
|
|
|
|
|
|
|
|
|
|
2018-12-20 16:01:39 +00:00
|
|
|
|
def _duplicate_update_warning(notification, status):
|
|
|
|
|
|
current_app.logger.info(
|
|
|
|
|
|
(
|
2022-03-21 15:41:59 +00:00
|
|
|
|
'Duplicate callback received for service {service_id}. '
|
|
|
|
|
|
'Notification ID {id} with type {type} sent by {sent_by}. '
|
|
|
|
|
|
'New status was {new_status}, current status is {old_status}. '
|
|
|
|
|
|
'This happened {time_diff} after being first set.'
|
2018-12-20 16:01:39 +00:00
|
|
|
|
).format(
|
|
|
|
|
|
id=notification.id,
|
|
|
|
|
|
old_status=notification.status,
|
|
|
|
|
|
new_status=status,
|
2018-12-28 14:29:59 +00:00
|
|
|
|
time_diff=datetime.utcnow() - (notification.updated_at or notification.created_at),
|
2018-12-20 16:01:39 +00:00
|
|
|
|
type=notification.notification_type,
|
2022-03-21 15:41:59 +00:00
|
|
|
|
sent_by=notification.sent_by,
|
|
|
|
|
|
service_id=notification.service_id
|
2018-12-20 16:01:39 +00:00
|
|
|
|
)
|
|
|
|
|
|
)
|
2021-12-06 09:30:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
2022-01-24 15:54:37 +00:00
|
|
|
|
def get_service_ids_with_notifications_before(notification_type, timestamp):
|
2021-12-06 09:30:48 +00:00
|
|
|
|
return {
|
|
|
|
|
|
row.service_id
|
|
|
|
|
|
for row in db.session.query(
|
|
|
|
|
|
Notification.service_id
|
|
|
|
|
|
).filter(
|
|
|
|
|
|
Notification.notification_type == notification_type,
|
|
|
|
|
|
Notification.created_at < timestamp
|
|
|
|
|
|
).distinct()
|
|
|
|
|
|
}
|
2022-01-25 11:29:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_service_ids_with_notifications_on_date(notification_type, date):
|
2023-05-10 08:39:50 -07:00
|
|
|
|
start_date = get_midnight_in_utc(date)
|
|
|
|
|
|
end_date = get_midnight_in_utc(date + timedelta(days=1))
|
2022-02-10 10:37:32 +00:00
|
|
|
|
|
2022-05-10 11:14:59 +01:00
|
|
|
|
notification_table_query = db.session.query(
|
|
|
|
|
|
Notification.service_id.label('service_id')
|
|
|
|
|
|
).filter(
|
|
|
|
|
|
Notification.notification_type == notification_type,
|
|
|
|
|
|
# using >= + < is much more efficient than date(created_at)
|
|
|
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
|
Notification.created_at < end_date,
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Looking at this table is more efficient for historical notifications,
|
|
|
|
|
|
# provided the task to populate it has run before they were archived.
|
|
|
|
|
|
ft_status_table_query = db.session.query(
|
|
|
|
|
|
FactNotificationStatus.service_id.label('service_id')
|
|
|
|
|
|
).filter(
|
|
|
|
|
|
FactNotificationStatus.notification_type == notification_type,
|
2022-11-21 11:49:59 -05:00
|
|
|
|
FactNotificationStatus.local_date == date,
|
2022-05-10 11:14:59 +01:00
|
|
|
|
)
|
|
|
|
|
|
|
2022-01-25 11:29:57 +00:00
|
|
|
|
return {
|
2022-05-10 11:14:59 +01:00
|
|
|
|
row.service_id for row in db.session.query(union(
|
|
|
|
|
|
notification_table_query, ft_status_table_query
|
|
|
|
|
|
).subquery()).distinct()
|
2022-01-25 11:29:57 +00:00
|
|
|
|
}
|