2016-04-14 15:09:59 +01:00
|
|
|
import uuid
|
2019-01-17 17:20:21 +00:00
|
|
|
from datetime import date, datetime, timedelta
|
2016-04-14 15:09:59 +01:00
|
|
|
|
2017-05-31 17:31:06 +01:00
|
|
|
from flask import current_app
|
2021-03-10 13:55:06 +00:00
|
|
|
from sqlalchemy import Float, cast
|
|
|
|
|
from sqlalchemy.orm import joinedload
|
|
|
|
|
from sqlalchemy.sql.expression import and_, asc, case, func
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-07-18 12:03:44 +01:00
|
|
|
from app import db
|
2021-04-14 07:11:01 +01:00
|
|
|
from app.dao.dao_utils import VersionOptions, autocommit, version_class
|
2019-04-29 15:49:12 +01:00
|
|
|
from app.dao.date_util import get_current_financial_year
|
2019-02-19 12:02:18 +00:00
|
|
|
from app.dao.organisation_dao import dao_get_organisation_by_email_address
|
2017-09-11 17:40:37 +01:00
|
|
|
from app.dao.service_sms_sender_dao import insert_service_sms_sender
|
2019-02-25 15:39:11 +00:00
|
|
|
from app.dao.service_user_dao import dao_get_service_user
|
2019-03-14 16:55:48 +00:00
|
|
|
from app.dao.template_folder_dao import dao_get_valid_template_folders_by_id
|
2016-05-06 11:07:11 +01:00
|
|
|
from app.models import (
|
2021-03-10 13:55:06 +00:00
|
|
|
EMAIL_TYPE,
|
|
|
|
|
INTERNATIONAL_LETTERS,
|
|
|
|
|
INTERNATIONAL_SMS_TYPE,
|
|
|
|
|
KEY_TYPE_TEST,
|
|
|
|
|
LETTER_TYPE,
|
|
|
|
|
NOTIFICATION_PERMANENT_FAILURE,
|
|
|
|
|
SMS_TYPE,
|
|
|
|
|
UPLOAD_LETTERS,
|
2017-11-14 14:32:34 +00:00
|
|
|
AnnualBilling,
|
2016-05-06 11:07:11 +01:00
|
|
|
ApiKey,
|
2019-04-25 18:09:33 +01:00
|
|
|
FactBilling,
|
2017-08-16 12:27:42 +01:00
|
|
|
InboundNumber,
|
2017-11-14 14:32:34 +00:00
|
|
|
InvitedUser,
|
2016-05-06 11:07:11 +01:00
|
|
|
Job,
|
|
|
|
|
Notification,
|
2017-11-14 14:32:34 +00:00
|
|
|
NotificationHistory,
|
2019-04-25 18:09:33 +01:00
|
|
|
Organisation,
|
2016-05-06 11:07:11 +01:00
|
|
|
Permission,
|
2016-09-16 13:47:09 +01:00
|
|
|
Service,
|
2020-03-30 17:42:59 +01:00
|
|
|
ServiceContactList,
|
2021-03-10 13:55:06 +00:00
|
|
|
ServiceEmailReplyTo,
|
2020-03-30 17:42:59 +01:00
|
|
|
ServiceLetterContact,
|
2021-03-10 13:55:06 +00:00
|
|
|
ServicePermission,
|
|
|
|
|
ServiceSmsSender,
|
2017-11-14 14:32:34 +00:00
|
|
|
Template,
|
|
|
|
|
TemplateHistory,
|
|
|
|
|
TemplateRedacted,
|
|
|
|
|
User,
|
|
|
|
|
VerifyCode,
|
2017-10-19 11:06:28 +01:00
|
|
|
)
|
2022-10-21 00:26:37 +00:00
|
|
|
from app.utils import (
|
2020-05-22 09:37:45 +01:00
|
|
|
escape_special_characters,
|
|
|
|
|
get_archived_db_column_value,
|
|
|
|
|
get_london_midnight_in_utc,
|
|
|
|
|
)
|
2016-05-06 11:07:11 +01:00
|
|
|
|
2017-10-19 11:06:28 +01:00
|
|
|
DEFAULT_SERVICE_PERMISSIONS = [
|
|
|
|
|
SMS_TYPE,
|
|
|
|
|
EMAIL_TYPE,
|
2018-01-24 11:53:22 +00:00
|
|
|
LETTER_TYPE,
|
2017-10-19 11:06:28 +01:00
|
|
|
INTERNATIONAL_SMS_TYPE,
|
2020-10-23 15:14:37 +01:00
|
|
|
UPLOAD_LETTERS,
|
2020-08-11 07:51:50 +01:00
|
|
|
INTERNATIONAL_LETTERS,
|
2017-10-19 11:06:28 +01:00
|
|
|
]
|
|
|
|
|
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 11:45:39 +00:00
|
|
|
def dao_fetch_all_services(only_active=False):
|
|
|
|
|
query = Service.query.order_by(
|
2016-07-15 11:34:59 +01:00
|
|
|
asc(Service.created_at)
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 11:45:39 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2019-08-13 17:20:37 +01:00
|
|
|
def get_services_by_partial_name(service_name):
|
|
|
|
|
service_name = escape_special_characters(service_name)
|
|
|
|
|
return Service.query.filter(Service.name.ilike("%{}%".format(service_name))).all()
|
|
|
|
|
|
|
|
|
|
|
2019-04-11 13:38:21 +01:00
|
|
|
def dao_count_live_services():
|
|
|
|
|
return Service.query.filter_by(
|
|
|
|
|
active=True,
|
|
|
|
|
restricted=False,
|
|
|
|
|
count_as_live=True,
|
|
|
|
|
).count()
|
|
|
|
|
|
|
|
|
|
|
2019-04-25 18:09:33 +01:00
|
|
|
def dao_fetch_live_services_data():
|
2019-04-29 15:49:12 +01:00
|
|
|
year_start_date, year_end_date = get_current_financial_year()
|
2019-05-30 17:41:25 +01:00
|
|
|
|
2019-05-30 17:47:00 +01:00
|
|
|
most_recent_annual_billing = db.session.query(
|
|
|
|
|
AnnualBilling.service_id,
|
|
|
|
|
func.max(AnnualBilling.financial_year_start).label('year')
|
|
|
|
|
).group_by(
|
|
|
|
|
AnnualBilling.service_id
|
|
|
|
|
).subquery()
|
|
|
|
|
|
2019-04-29 15:49:12 +01:00
|
|
|
this_year_ft_billing = FactBilling.query.filter(
|
|
|
|
|
FactBilling.bst_date >= year_start_date,
|
|
|
|
|
FactBilling.bst_date <= year_end_date,
|
|
|
|
|
).subquery()
|
2019-05-30 17:41:25 +01:00
|
|
|
|
2019-04-25 18:09:33 +01:00
|
|
|
data = db.session.query(
|
2019-05-30 17:41:25 +01:00
|
|
|
Service.id.label('service_id'),
|
2019-04-25 18:09:33 +01:00
|
|
|
Service.name.label("service_name"),
|
2019-05-30 17:41:25 +01:00
|
|
|
Organisation.name.label("organisation_name"),
|
|
|
|
|
Organisation.organisation_type.label('organisation_type'),
|
|
|
|
|
Service.consent_to_research.label('consent_to_research'),
|
|
|
|
|
User.name.label('contact_name'),
|
|
|
|
|
User.email_address.label('contact_email'),
|
|
|
|
|
User.mobile_number.label('contact_mobile'),
|
2019-04-25 18:09:33 +01:00
|
|
|
Service.go_live_at.label("live_date"),
|
2019-05-30 17:41:25 +01:00
|
|
|
Service.volume_sms.label('sms_volume_intent'),
|
|
|
|
|
Service.volume_email.label('email_volume_intent'),
|
|
|
|
|
Service.volume_letter.label('letter_volume_intent'),
|
2019-04-25 18:09:33 +01:00
|
|
|
case([
|
2019-04-29 15:49:12 +01:00
|
|
|
(this_year_ft_billing.c.notification_type == 'email', func.sum(this_year_ft_billing.c.notifications_sent))
|
2019-04-25 18:09:33 +01:00
|
|
|
], else_=0).label("email_totals"),
|
|
|
|
|
case([
|
2019-04-29 15:49:12 +01:00
|
|
|
(this_year_ft_billing.c.notification_type == 'sms', func.sum(this_year_ft_billing.c.notifications_sent))
|
2019-04-25 18:09:33 +01:00
|
|
|
], else_=0).label("sms_totals"),
|
|
|
|
|
case([
|
2019-04-29 15:49:12 +01:00
|
|
|
(this_year_ft_billing.c.notification_type == 'letter', func.sum(this_year_ft_billing.c.notifications_sent))
|
2019-04-25 18:09:33 +01:00
|
|
|
], else_=0).label("letter_totals"),
|
2019-05-30 17:47:00 +01:00
|
|
|
AnnualBilling.free_sms_fragment_limit,
|
|
|
|
|
).join(
|
|
|
|
|
Service.annual_billing
|
|
|
|
|
).join(
|
|
|
|
|
most_recent_annual_billing,
|
|
|
|
|
and_(
|
|
|
|
|
Service.id == most_recent_annual_billing.c.service_id,
|
|
|
|
|
AnnualBilling.financial_year_start == most_recent_annual_billing.c.year
|
|
|
|
|
)
|
2019-04-25 18:09:33 +01:00
|
|
|
).outerjoin(
|
|
|
|
|
Service.organisation
|
|
|
|
|
).outerjoin(
|
2019-04-29 15:49:12 +01:00
|
|
|
this_year_ft_billing, Service.id == this_year_ft_billing.c.service_id
|
2019-04-25 18:09:33 +01:00
|
|
|
).outerjoin(
|
|
|
|
|
User, Service.go_live_user_id == User.id
|
2019-04-29 16:30:31 +01:00
|
|
|
).filter(
|
2019-05-02 10:39:45 +01:00
|
|
|
Service.count_as_live.is_(True),
|
|
|
|
|
Service.active.is_(True),
|
|
|
|
|
Service.restricted.is_(False),
|
2019-04-25 18:09:33 +01:00
|
|
|
).group_by(
|
|
|
|
|
Service.id,
|
|
|
|
|
Organisation.name,
|
2019-05-03 11:00:53 +01:00
|
|
|
Organisation.organisation_type,
|
2019-04-25 18:09:33 +01:00
|
|
|
Service.name,
|
|
|
|
|
Service.consent_to_research,
|
2019-04-29 16:30:31 +01:00
|
|
|
Service.count_as_live,
|
2019-04-25 18:09:33 +01:00
|
|
|
Service.go_live_user_id,
|
|
|
|
|
User.name,
|
|
|
|
|
User.email_address,
|
|
|
|
|
User.mobile_number,
|
|
|
|
|
Service.go_live_at,
|
|
|
|
|
Service.volume_sms,
|
|
|
|
|
Service.volume_email,
|
|
|
|
|
Service.volume_letter,
|
2019-05-30 17:41:25 +01:00
|
|
|
this_year_ft_billing.c.notification_type,
|
2019-05-30 17:47:00 +01:00
|
|
|
AnnualBilling.free_sms_fragment_limit,
|
2019-05-01 11:04:05 +01:00
|
|
|
).order_by(
|
|
|
|
|
asc(Service.go_live_at)
|
2019-04-25 18:09:33 +01:00
|
|
|
).all()
|
|
|
|
|
results = []
|
|
|
|
|
for row in data:
|
2019-05-30 17:41:25 +01:00
|
|
|
existing_service = next((x for x in results if x['service_id'] == row.service_id), None)
|
|
|
|
|
|
|
|
|
|
if existing_service is not None:
|
|
|
|
|
existing_service["email_totals"] += row.email_totals
|
|
|
|
|
existing_service["sms_totals"] += row.sms_totals
|
|
|
|
|
existing_service["letter_totals"] += row.letter_totals
|
2019-04-25 18:09:33 +01:00
|
|
|
else:
|
2019-05-30 17:41:25 +01:00
|
|
|
results.append(row._asdict())
|
2019-04-25 18:09:33 +01:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
def dao_fetch_service_by_id(service_id, only_active=False):
|
|
|
|
|
query = Service.query.filter_by(
|
2016-07-15 11:34:59 +01:00
|
|
|
id=service_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 15:07:23 +00:00
|
|
|
)
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
return query.one()
|
|
|
|
|
|
|
|
|
|
|
2017-08-23 13:03:52 +01:00
|
|
|
def dao_fetch_service_by_inbound_number(number):
|
2017-08-16 12:27:42 +01:00
|
|
|
inbound_number = InboundNumber.query.filter(
|
2017-08-23 13:03:52 +01:00
|
|
|
InboundNumber.number == number,
|
2017-08-16 16:27:42 +01:00
|
|
|
InboundNumber.active
|
2017-08-16 12:27:42 +01:00
|
|
|
).first()
|
|
|
|
|
|
|
|
|
|
if not inbound_number:
|
2017-08-23 13:03:52 +01:00
|
|
|
return None
|
2017-08-16 12:27:42 +01:00
|
|
|
|
2017-05-22 11:26:47 +01:00
|
|
|
return Service.query.filter(
|
2017-08-23 13:03:52 +01:00
|
|
|
Service.id == inbound_number.service_id
|
|
|
|
|
).first()
|
2017-05-22 11:26:47 +01:00
|
|
|
|
|
|
|
|
|
2017-05-05 15:22:21 +01:00
|
|
|
def dao_fetch_service_by_id_with_api_keys(service_id, only_active=False):
|
|
|
|
|
query = Service.query.filter_by(
|
|
|
|
|
id=service_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('api_keys')
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
|
|
|
|
return query.one()
|
|
|
|
|
|
|
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
def dao_fetch_all_services_by_user(user_id, only_active=False):
|
|
|
|
|
query = Service.query.filter(
|
2016-07-15 11:34:59 +01:00
|
|
|
Service.users.any(id=user_id)
|
|
|
|
|
).order_by(
|
|
|
|
|
asc(Service.created_at)
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 15:07:23 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2020-05-18 10:30:28 +01:00
|
|
|
def dao_fetch_all_services_created_by_user(user_id):
|
|
|
|
|
query = Service.query.filter_by(
|
|
|
|
|
created_by_id=user_id
|
|
|
|
|
).order_by(
|
|
|
|
|
asc(Service.created_at)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2019-03-07 17:39:38 +00:00
|
|
|
@version_class(
|
|
|
|
|
VersionOptions(ApiKey, must_write_history=False),
|
|
|
|
|
VersionOptions(Service),
|
|
|
|
|
VersionOptions(Template, history_class=TemplateHistory, must_write_history=False),
|
|
|
|
|
)
|
2017-01-30 16:32:44 +00:00
|
|
|
def dao_archive_service(service_id):
|
2016-11-10 17:07:02 +00:00
|
|
|
# have to eager load templates and api keys so that we don't flush when we loop through them
|
2016-11-11 12:43:51 +00:00
|
|
|
# to ensure that db.session still contains the models when it comes to creating history objects
|
2016-11-10 17:07:02 +00:00
|
|
|
service = Service.query.options(
|
|
|
|
|
joinedload('templates'),
|
2017-06-29 10:34:38 +01:00
|
|
|
joinedload('templates.template_redacted'),
|
2016-11-10 17:07:02 +00:00
|
|
|
joinedload('api_keys'),
|
|
|
|
|
).filter(Service.id == service_id).one()
|
|
|
|
|
|
|
|
|
|
service.active = False
|
2020-05-22 09:37:45 +01:00
|
|
|
service.name = get_archived_db_column_value(service.name)
|
|
|
|
|
service.email_from = get_archived_db_column_value(service.email_from)
|
2016-11-10 17:07:02 +00:00
|
|
|
|
|
|
|
|
for template in service.templates:
|
2016-11-14 14:10:40 +00:00
|
|
|
if not template.archived:
|
|
|
|
|
template.archived = True
|
2016-11-10 17:07:02 +00:00
|
|
|
|
|
|
|
|
for api_key in service.api_keys:
|
2016-11-14 14:10:40 +00:00
|
|
|
if not api_key.expiry_date:
|
|
|
|
|
api_key.expiry_date = datetime.utcnow()
|
2016-11-10 17:07:02 +00:00
|
|
|
|
|
|
|
|
|
2016-02-19 15:53:15 +00:00
|
|
|
def dao_fetch_service_by_id_and_user(service_id, user_id):
|
2016-07-15 11:34:59 +01:00
|
|
|
return Service.query.filter(
|
|
|
|
|
Service.users.any(id=user_id),
|
|
|
|
|
Service.id == service_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
|
|
|
|
).one()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2016-04-21 18:10:57 +01:00
|
|
|
@version_class(Service)
|
2019-02-19 12:02:18 +00:00
|
|
|
def dao_create_service(
|
|
|
|
|
service,
|
|
|
|
|
user,
|
|
|
|
|
service_id=None,
|
|
|
|
|
service_permissions=None,
|
|
|
|
|
):
|
2017-05-31 17:31:06 +01:00
|
|
|
|
2019-02-19 12:02:18 +00:00
|
|
|
if not user:
|
|
|
|
|
raise ValueError("Can't create a service without a user")
|
|
|
|
|
|
2017-10-19 11:06:28 +01:00
|
|
|
if service_permissions is None:
|
|
|
|
|
service_permissions = DEFAULT_SERVICE_PERMISSIONS
|
|
|
|
|
|
2019-02-19 12:02:18 +00:00
|
|
|
organisation = dao_get_organisation_by_email_address(user.email_address)
|
|
|
|
|
|
2016-04-14 15:09:59 +01:00
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
service.users.append(user)
|
|
|
|
|
permission_dao.add_default_service_permissions_for_user(user, service)
|
2017-02-13 15:46:06 +00:00
|
|
|
service.id = service_id or uuid.uuid4() # must be set now so version history model can use same id
|
2016-11-08 13:49:47 +00:00
|
|
|
service.active = True
|
2016-05-31 12:49:06 +01:00
|
|
|
service.research_mode = False
|
2017-05-17 14:09:18 +01:00
|
|
|
|
2017-06-23 17:06:09 +01:00
|
|
|
for permission in service_permissions:
|
|
|
|
|
service_permission = ServicePermission(service_id=service.id, permission=permission)
|
|
|
|
|
service.permissions.append(service_permission)
|
2017-05-17 14:09:18 +01:00
|
|
|
|
2017-11-07 14:26:18 +00:00
|
|
|
# do we just add the default - or will we get a value from FE?
|
|
|
|
|
insert_service_sms_sender(service, current_app.config['FROM_NUMBER'])
|
2019-02-19 12:02:18 +00:00
|
|
|
|
|
|
|
|
if organisation:
|
2019-08-12 15:59:27 +01:00
|
|
|
service.organisation_id = organisation.id
|
|
|
|
|
service.organisation_type = organisation.organisation_type
|
2021-12-16 17:54:28 +00:00
|
|
|
|
2019-03-22 15:38:28 +00:00
|
|
|
if organisation.email_branding:
|
|
|
|
|
service.email_branding = organisation.email_branding
|
2019-02-19 12:02:18 +00:00
|
|
|
|
2021-12-16 17:54:28 +00:00
|
|
|
if organisation.letter_branding:
|
2019-03-22 15:38:28 +00:00
|
|
|
service.letter_branding = organisation.letter_branding
|
2019-02-19 12:02:18 +00:00
|
|
|
|
2019-08-12 15:59:27 +01:00
|
|
|
if organisation:
|
|
|
|
|
service.crown = organisation.crown
|
|
|
|
|
service.count_as_live = not user.platform_admin
|
2019-04-05 15:18:39 +01:00
|
|
|
|
2016-04-14 15:09:59 +01:00
|
|
|
db.session.add(service)
|
2016-01-07 17:31:17 +00:00
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2016-04-21 18:10:57 +01:00
|
|
|
@version_class(Service)
|
2016-02-19 15:53:15 +00:00
|
|
|
def dao_update_service(service):
|
|
|
|
|
db.session.add(service)
|
2016-01-12 10:39:49 +00:00
|
|
|
|
|
|
|
|
|
2019-03-14 16:55:48 +00:00
|
|
|
def dao_add_user_to_service(service, user, permissions=None, folder_permissions=None):
|
2017-05-04 17:09:04 +01:00
|
|
|
permissions = permissions or []
|
2019-03-14 16:55:48 +00:00
|
|
|
folder_permissions = folder_permissions or []
|
|
|
|
|
|
2016-03-23 16:30:47 +00:00
|
|
|
try:
|
|
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
service.users.append(user)
|
|
|
|
|
permission_dao.set_user_service_permission(user, service, permissions, _commit=False)
|
|
|
|
|
db.session.add(service)
|
2019-03-14 16:55:48 +00:00
|
|
|
|
|
|
|
|
service_user = dao_get_service_user(user.id, service.id)
|
|
|
|
|
valid_template_folders = dao_get_valid_template_folders_by_id(folder_permissions)
|
|
|
|
|
service_user.folders = valid_template_folders
|
|
|
|
|
db.session.add(service_user)
|
|
|
|
|
|
2016-03-23 16:30:47 +00:00
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
raise e
|
|
|
|
|
else:
|
|
|
|
|
db.session.commit()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_remove_user_from_service(service, user):
|
2016-03-22 13:14:23 +00:00
|
|
|
try:
|
|
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
permission_dao.remove_user_service_permissions(user, service)
|
2019-02-25 15:39:11 +00:00
|
|
|
|
|
|
|
|
service_user = dao_get_service_user(user.id, service.id)
|
2019-03-11 17:05:34 +00:00
|
|
|
db.session.delete(service_user)
|
2016-03-22 13:14:23 +00:00
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
raise e
|
|
|
|
|
else:
|
|
|
|
|
db.session.commit()
|
2016-05-06 11:07:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def delete_service_and_all_associated_db_objects(service):
|
|
|
|
|
|
|
|
|
|
def _delete_commit(query):
|
2017-06-28 17:19:53 +01:00
|
|
|
query.delete(synchronize_session=False)
|
2016-05-06 11:07:11 +01:00
|
|
|
db.session.commit()
|
|
|
|
|
|
2017-06-28 17:19:53 +01:00
|
|
|
subq = db.session.query(Template.id).filter_by(service=service).subquery()
|
|
|
|
|
_delete_commit(TemplateRedacted.query.filter(TemplateRedacted.template_id.in_(subq)))
|
|
|
|
|
|
2017-09-11 17:40:37 +01:00
|
|
|
_delete_commit(ServiceSmsSender.query.filter_by(service=service))
|
2020-03-30 17:42:59 +01:00
|
|
|
_delete_commit(ServiceEmailReplyTo.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(ServiceLetterContact.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(ServiceContactList.query.filter_by(service=service))
|
2016-05-06 11:07:11 +01:00
|
|
|
_delete_commit(InvitedUser.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(Permission.query.filter_by(service=service))
|
2016-07-11 16:48:32 +01:00
|
|
|
_delete_commit(NotificationHistory.query.filter_by(service=service))
|
2016-05-06 11:07:11 +01:00
|
|
|
_delete_commit(Notification.query.filter_by(service=service))
|
2017-07-03 13:40:13 +01:00
|
|
|
_delete_commit(Job.query.filter_by(service=service))
|
2016-05-06 11:07:11 +01:00
|
|
|
_delete_commit(Template.query.filter_by(service=service))
|
2016-08-02 16:23:14 +01:00
|
|
|
_delete_commit(TemplateHistory.query.filter_by(service_id=service.id))
|
2017-05-17 14:09:18 +01:00
|
|
|
_delete_commit(ServicePermission.query.filter_by(service_id=service.id))
|
2017-08-02 15:35:56 +01:00
|
|
|
_delete_commit(ApiKey.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(ApiKey.get_history_model().query.filter_by(service_id=service.id))
|
2017-10-25 11:35:13 +01:00
|
|
|
_delete_commit(AnnualBilling.query.filter_by(service_id=service.id))
|
2016-05-06 11:07:11 +01:00
|
|
|
|
|
|
|
|
verify_codes = VerifyCode.query.join(User).filter(User.id.in_([x.id for x in service.users]))
|
|
|
|
|
list(map(db.session.delete, verify_codes))
|
|
|
|
|
db.session.commit()
|
|
|
|
|
users = [x for x in service.users]
|
2020-03-30 17:42:59 +01:00
|
|
|
for user in users:
|
|
|
|
|
user.organisations = []
|
|
|
|
|
service.users.remove(user)
|
2016-05-11 15:52:49 +01:00
|
|
|
_delete_commit(Service.get_history_model().query.filter_by(id=service.id))
|
2016-05-06 11:07:11 +01:00
|
|
|
db.session.delete(service)
|
|
|
|
|
db.session.commit()
|
2020-03-30 17:42:59 +01:00
|
|
|
for user in users:
|
|
|
|
|
db.session.delete(user)
|
2016-05-06 11:07:11 +01:00
|
|
|
db.session.commit()
|
2016-07-18 12:03:44 +01:00
|
|
|
|
|
|
|
|
|
2016-07-22 15:16:24 +01:00
|
|
|
def dao_fetch_todays_stats_for_service(service_id):
|
Update the dao_fetch_todays_stats_for_service query.
We have an index on Notifications(service_id, created_at), by updating the query to use between created_at rather than date(created_at) this query will use the index. Changing the query plan to use an index scan rather than a sequence scan, see query plans below.
This query is still rather slow but is improved by this update.
https://www.pivotaltracker.com/story/show/178263480
explain analyze
SELECT notification_type, notification_status, count(id)
FROM notifications
WHERE service_id = 'e791dbd4-09ea-413a-b773-ead8728ddb09'
AND date(created_at) = '2021-05-23'
AND key_type != 'test'
GROUP BY notification_type, notification_status;
QUERY PLAN
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Finalize GroupAggregate (cost=6326816.31..6326926.48 rows=24 width=22) (actual time=91666.805..91712.976 rows=10 loops=1)
Group Key: notification_type, notification_status
-> Gather Merge (cost=6326816.31..6326925.88 rows=48 width=22) (actual time=91666.712..91712.962 rows=30 loops=1)
Workers Planned: 2
Workers Launched: 2
-> Partial GroupAggregate (cost=6325816.28..6325920.31 rows=24 width=22) (actual time=91662.907..91707.027 rows=10 loops=3)
Group Key: notification_type, notification_status
-> Sort (cost=6325816.28..6325842.23 rows=10379 width=30) (actual time=91635.890..91676.225 rows=270884 loops=3)
Sort Key: notification_type, notification_status
Sort Method: external merge Disk: 10584kB
Worker 0: Sort Method: external merge Disk: 10648kB
Worker 1: Sort Method: external merge Disk: 10696kB
-> Parallel Seq Scan on notifications (cost=0.00..6325123.93 rows=10379 width=30) (actual time=0.036..91513.985 rows=270884 loops=3)
Filter: (((key_type)::text <> 'test'::text) AND (service_id = 'e791dbd4-09ea-413a-b773-ead8728ddb09'::uuid) AND (date(created_at) = '2021-05-23'::date))
Rows Removed by Filter: 16191366
Planning Time: 0.760 ms
Execution Time: 91714.500 ms
(17 rows)
explain analyze
SELECT notification_type, notification_status, count(id)
FROM notifications
WHERE service_id = 'e791dbd4-09ea-413a-b773-ead8728ddb09'
AND created_at >= '2021-05-22 23:00'
and created_at < '2021-05-23 23:00'
AND key_type != 'test'
GROUP BY notification_type, notification_status;
QUERY PLAN
--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Finalize GroupAggregate (cost=2114273.37..2114279.57 rows=24 width=22) (actual time=21032.076..21035.725 rows=10 loops=1)
Group Key: notification_type, notification_status
-> Gather Merge (cost=2114273.37..2114278.97 rows=48 width=22) (actual time=21032.056..21035.703 rows=30 loops=1)
Workers Planned: 2
Workers Launched: 2
-> Sort (cost=2113273.35..2113273.41 rows=24 width=22) (actual time=21029.261..21029.265 rows=10 loops=3)
Sort Key: notification_type, notification_status
Sort Method: quicksort Memory: 25kB
Worker 0: Sort Method: quicksort Memory: 25kB
Worker 1: Sort Method: quicksort Memory: 25kB
-> Partial HashAggregate (cost=2113272.56..2113272.80 rows=24 width=22) (actual time=21029.228..21029.230 rows=10 loops=3)
Group Key: notification_type, notification_status
-> Parallel Bitmap Heap Scan on notifications (cost=114455.71..2111695.14 rows=210322 width=30) (actual time=4983.790..20960.581 rows=271217 loops=3)
Recheck Cond: ((service_id = 'e791dbd4-09ea-413a-b773-ead8728ddb09'::uuid) AND (created_at >= '2021-05-22 23:00:00'::timestamp without time zone) AND (created_at < '2021-05-23 23:00:00'::timestamp without time zone))
Rows Removed by Index Recheck: 1456269
Filter: ((key_type)::text <> 'test'::text)
Heap Blocks: exact=12330 lossy=123418
-> Bitmap Index Scan on ix_notifications_service_created_at (cost=0.00..114329.51 rows=543116 width=0) (actual time=4973.139..4973.140 rows=813671 loops=1)
Index Cond: ((service_id = 'e791dbd4-09ea-413a-b773-ead8728ddb09'::uuid) AND (created_at >= '2021-05-22 23:00:00'::timestamp without time zone) AND (created_at < '2021-05-23 23:00:00'::timestamp without time zone))
Planning Time: 0.191 ms
Execution Time: 21035.770 ms
(21 rows)
2021-05-24 14:36:07 +01:00
|
|
|
today = date.today()
|
|
|
|
|
start_date = get_london_midnight_in_utc(today)
|
2016-07-22 15:16:24 +01:00
|
|
|
|
2016-07-18 12:03:44 +01:00
|
|
|
return db.session.query(
|
|
|
|
|
Notification.notification_type,
|
2017-07-06 14:20:24 +01:00
|
|
|
Notification.status,
|
2016-07-18 12:03:44 +01:00
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
2016-09-16 13:47:09 +01:00
|
|
|
Notification.service_id == service_id,
|
2022-05-19 11:42:02 +01:00
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
Notification.created_at >= start_date
|
2016-07-18 12:03:44 +01:00
|
|
|
).group_by(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
2022-05-19 11:42:02 +01:00
|
|
|
).all()
|
2016-07-26 11:00:03 +01:00
|
|
|
|
|
|
|
|
|
2017-10-26 12:15:52 +01:00
|
|
|
def dao_fetch_todays_stats_for_all_services(include_from_test_key=True, only_active=True):
|
2018-03-20 15:48:32 +00:00
|
|
|
today = date.today()
|
|
|
|
|
start_date = get_london_midnight_in_utc(today)
|
|
|
|
|
end_date = get_london_midnight_in_utc(today + timedelta(days=1))
|
2017-09-15 14:40:56 +01:00
|
|
|
|
2017-10-25 10:40:02 +01:00
|
|
|
subquery = db.session.query(
|
2016-08-11 17:24:44 +01:00
|
|
|
Notification.notification_type,
|
2017-07-06 14:20:24 +01:00
|
|
|
Notification.status,
|
2016-08-19 16:36:20 +01:00
|
|
|
Notification.service_id,
|
2016-08-11 17:24:44 +01:00
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
2018-03-20 15:48:32 +00:00
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
Notification.created_at < end_date
|
2016-08-11 17:24:44 +01:00
|
|
|
).group_by(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
|
|
|
|
Notification.service_id
|
|
|
|
|
)
|
2016-12-02 16:43:24 +00:00
|
|
|
|
|
|
|
|
if not include_from_test_key:
|
2017-10-25 10:40:02 +01:00
|
|
|
subquery = subquery.filter(Notification.key_type != KEY_TYPE_TEST)
|
|
|
|
|
|
|
|
|
|
subquery = subquery.subquery()
|
|
|
|
|
|
|
|
|
|
query = db.session.query(
|
|
|
|
|
Service.id.label('service_id'),
|
|
|
|
|
Service.name,
|
|
|
|
|
Service.restricted,
|
|
|
|
|
Service.research_mode,
|
2017-10-26 12:15:52 +01:00
|
|
|
Service.active,
|
|
|
|
|
Service.created_at,
|
2017-10-25 10:40:02 +01:00
|
|
|
subquery.c.notification_type,
|
|
|
|
|
subquery.c.status,
|
|
|
|
|
subquery.c.count
|
2017-10-26 12:15:52 +01:00
|
|
|
).outerjoin(
|
2017-10-25 10:40:02 +01:00
|
|
|
subquery,
|
|
|
|
|
subquery.c.service_id == Service.id
|
|
|
|
|
).order_by(Service.id)
|
2016-12-02 16:43:24 +00:00
|
|
|
|
2017-10-26 12:15:52 +01:00
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
2016-12-29 13:28:55 +00:00
|
|
|
return query.all()
|
2016-12-28 15:39:55 +00:00
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2019-03-07 17:39:38 +00:00
|
|
|
@version_class(
|
|
|
|
|
VersionOptions(ApiKey, must_write_history=False),
|
|
|
|
|
VersionOptions(Service),
|
|
|
|
|
)
|
2017-01-30 16:32:44 +00:00
|
|
|
def dao_suspend_service(service_id):
|
|
|
|
|
# have to eager load api keys so that we don't flush when we loop through them
|
|
|
|
|
# to ensure that db.session still contains the models when it comes to creating history objects
|
|
|
|
|
service = Service.query.options(
|
|
|
|
|
joinedload('api_keys'),
|
|
|
|
|
).filter(Service.id == service_id).one()
|
|
|
|
|
|
|
|
|
|
for api_key in service.api_keys:
|
|
|
|
|
if not api_key.expiry_date:
|
|
|
|
|
api_key.expiry_date = datetime.utcnow()
|
|
|
|
|
|
2019-03-07 17:39:38 +00:00
|
|
|
service.active = False
|
|
|
|
|
|
2017-01-30 16:32:44 +00:00
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2017-01-30 16:32:44 +00:00
|
|
|
@version_class(Service)
|
|
|
|
|
def dao_resume_service(service_id):
|
|
|
|
|
service = Service.query.get(service_id)
|
|
|
|
|
service.active = True
|
2017-05-10 15:58:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_fetch_active_users_for_service(service_id):
|
|
|
|
|
query = User.query.filter(
|
2019-02-20 16:18:48 +00:00
|
|
|
User.services.any(id=service_id),
|
2017-05-10 15:58:44 +01:00
|
|
|
User.state == 'active'
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2019-12-03 10:26:59 +00:00
|
|
|
|
|
|
|
|
|
2020-01-17 11:30:19 +00:00
|
|
|
def dao_find_services_sending_to_tv_numbers(start_date, end_date, threshold=500):
|
2019-12-03 10:26:59 +00:00
|
|
|
return db.session.query(
|
|
|
|
|
Notification.service_id.label('service_id'),
|
|
|
|
|
func.count(Notification.id).label('notification_count')
|
|
|
|
|
).filter(
|
|
|
|
|
Notification.service_id == Service.id,
|
|
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
Notification.created_at <= end_date,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
Notification.notification_type == SMS_TYPE,
|
|
|
|
|
func.substr(Notification.normalised_to, 3, 7) == '7700900',
|
|
|
|
|
Service.restricted == False, # noqa
|
2022-03-02 15:52:18 +00:00
|
|
|
Service.research_mode == False, # noqa
|
|
|
|
|
Service.active == True, # noqa
|
2019-12-03 10:26:59 +00:00
|
|
|
).group_by(
|
|
|
|
|
Notification.service_id,
|
|
|
|
|
).having(
|
|
|
|
|
func.count(Notification.id) > threshold
|
|
|
|
|
).all()
|
2019-12-03 16:18:07 +00:00
|
|
|
|
|
|
|
|
|
2022-02-25 10:34:01 +00:00
|
|
|
def dao_find_services_with_high_failure_rates(start_date, end_date, threshold=10000):
|
2019-12-05 16:07:06 +00:00
|
|
|
subquery = db.session.query(
|
|
|
|
|
func.count(Notification.id).label('total_count'),
|
|
|
|
|
Notification.service_id.label('service_id')
|
|
|
|
|
).filter(
|
|
|
|
|
Notification.service_id == Service.id,
|
|
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
Notification.created_at <= end_date,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
Notification.notification_type == SMS_TYPE,
|
|
|
|
|
Service.restricted == False, # noqa
|
2022-03-02 15:52:18 +00:00
|
|
|
Service.research_mode == False, # noqa
|
|
|
|
|
Service.active == True, # noqa
|
2019-12-05 16:07:06 +00:00
|
|
|
).group_by(
|
|
|
|
|
Notification.service_id,
|
|
|
|
|
).having(
|
|
|
|
|
func.count(Notification.id) >= threshold
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
subquery = subquery.subquery()
|
|
|
|
|
|
|
|
|
|
query = db.session.query(
|
2019-12-03 16:18:07 +00:00
|
|
|
Notification.service_id.label('service_id'),
|
2019-12-05 16:07:06 +00:00
|
|
|
func.count(Notification.id).label('permanent_failure_count'),
|
|
|
|
|
subquery.c.total_count.label('total_count'),
|
|
|
|
|
(cast(func.count(Notification.id), Float) / cast(subquery.c.total_count, Float)).label('permanent_failure_rate')
|
|
|
|
|
).join(
|
|
|
|
|
subquery,
|
|
|
|
|
subquery.c.service_id == Notification.service_id
|
2019-12-03 16:18:07 +00:00
|
|
|
).filter(
|
|
|
|
|
Notification.service_id == Service.id,
|
|
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
Notification.created_at <= end_date,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
Notification.notification_type == SMS_TYPE,
|
2019-12-05 16:07:06 +00:00
|
|
|
Notification.status == NOTIFICATION_PERMANENT_FAILURE,
|
2019-12-03 16:18:07 +00:00
|
|
|
Service.restricted == False, # noqa
|
2022-03-02 15:52:18 +00:00
|
|
|
Service.research_mode == False, # noqa
|
|
|
|
|
Service.active == True, # noqa
|
2019-12-03 16:18:07 +00:00
|
|
|
).group_by(
|
|
|
|
|
Notification.service_id,
|
2019-12-05 16:07:06 +00:00
|
|
|
subquery.c.total_count
|
|
|
|
|
).having(
|
|
|
|
|
cast(func.count(Notification.id), Float) / cast(subquery.c.total_count, Float) >= 0.25
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2021-03-04 16:10:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_live_services_with_organisation():
|
|
|
|
|
query = db.session.query(
|
|
|
|
|
Service.id.label("service_id"),
|
|
|
|
|
Service.name.label("service_name"),
|
|
|
|
|
Organisation.id.label("organisation_id"),
|
|
|
|
|
Organisation.name.label("organisation_name")
|
|
|
|
|
).outerjoin(
|
|
|
|
|
Service.organisation
|
|
|
|
|
).filter(
|
|
|
|
|
Service.count_as_live.is_(True),
|
|
|
|
|
Service.active.is_(True),
|
|
|
|
|
Service.restricted.is_(False)
|
|
|
|
|
).order_by(
|
|
|
|
|
Organisation.name,
|
|
|
|
|
Service.name
|
|
|
|
|
)
|
|
|
|
|
|
2021-03-10 11:12:29 +00:00
|
|
|
return query.all()
|