2016-04-14 15:09:59 +01:00
|
|
|
import uuid
|
2019-01-17 17:20:21 +00:00
|
|
|
from datetime import date, datetime, timedelta
|
2016-04-14 15:09:59 +01:00
|
|
|
|
2018-02-06 09:35:33 +00:00
|
|
|
from notifications_utils.statsd_decorators import statsd
|
2019-05-30 17:47:00 +01:00
|
|
|
from sqlalchemy.sql.expression import asc, case, and_, func
|
2017-09-25 09:44:16 +01:00
|
|
|
from sqlalchemy.orm import joinedload
|
2017-05-31 17:31:06 +01:00
|
|
|
from flask import current_app
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-07-18 12:03:44 +01:00
|
|
|
from app import db
|
2019-04-29 15:49:12 +01:00
|
|
|
from app.dao.date_util import get_current_financial_year
|
2016-04-14 15:09:59 +01:00
|
|
|
from app.dao.dao_utils import (
|
|
|
|
|
transactional,
|
2019-03-07 17:39:38 +00:00
|
|
|
version_class,
|
|
|
|
|
VersionOptions,
|
2016-04-14 15:09:59 +01:00
|
|
|
)
|
2019-04-05 15:18:39 +01:00
|
|
|
from app.dao.email_branding_dao import dao_get_email_branding_by_name
|
|
|
|
|
from app.dao.letter_branding_dao import dao_get_letter_branding_by_name
|
2019-02-19 12:02:18 +00:00
|
|
|
from app.dao.organisation_dao import dao_get_organisation_by_email_address
|
2017-09-11 17:40:37 +01:00
|
|
|
from app.dao.service_sms_sender_dao import insert_service_sms_sender
|
2019-02-25 15:39:11 +00:00
|
|
|
from app.dao.service_user_dao import dao_get_service_user
|
2019-03-14 16:55:48 +00:00
|
|
|
from app.dao.template_folder_dao import dao_get_valid_template_folders_by_id
|
2016-05-06 11:07:11 +01:00
|
|
|
from app.models import (
|
2017-11-14 14:32:34 +00:00
|
|
|
AnnualBilling,
|
2016-05-06 11:07:11 +01:00
|
|
|
ApiKey,
|
2019-04-25 18:09:33 +01:00
|
|
|
FactBilling,
|
2017-08-16 12:27:42 +01:00
|
|
|
InboundNumber,
|
2017-11-14 14:32:34 +00:00
|
|
|
InvitedUser,
|
2016-05-06 11:07:11 +01:00
|
|
|
Job,
|
|
|
|
|
Notification,
|
2017-11-14 14:32:34 +00:00
|
|
|
NotificationHistory,
|
2019-04-25 18:09:33 +01:00
|
|
|
Organisation,
|
2016-05-06 11:07:11 +01:00
|
|
|
Permission,
|
2016-09-16 13:47:09 +01:00
|
|
|
Service,
|
2017-05-17 14:09:18 +01:00
|
|
|
ServicePermission,
|
2017-11-14 14:32:34 +00:00
|
|
|
ServiceSmsSender,
|
|
|
|
|
Template,
|
|
|
|
|
TemplateHistory,
|
|
|
|
|
TemplateRedacted,
|
|
|
|
|
User,
|
|
|
|
|
VerifyCode,
|
2019-07-16 14:56:04 +01:00
|
|
|
CROWN_ORGANISATION_TYPES,
|
2017-11-14 14:32:34 +00:00
|
|
|
EMAIL_TYPE,
|
|
|
|
|
INTERNATIONAL_SMS_TYPE,
|
2017-01-30 15:17:26 +00:00
|
|
|
KEY_TYPE_TEST,
|
2019-08-28 15:33:00 +01:00
|
|
|
NHS_ORGANISATION_TYPES,
|
2019-07-10 18:17:33 +01:00
|
|
|
NON_CROWN_ORGANISATION_TYPES,
|
2017-05-17 14:09:18 +01:00
|
|
|
SMS_TYPE,
|
2018-01-24 11:53:22 +00:00
|
|
|
LETTER_TYPE,
|
2017-10-19 11:06:28 +01:00
|
|
|
)
|
2019-08-13 17:20:37 +01:00
|
|
|
from app.utils import email_address_is_nhs, escape_special_characters, get_london_midnight_in_utc, midnight_n_days_ago
|
2016-05-06 11:07:11 +01:00
|
|
|
|
2017-10-19 11:06:28 +01:00
|
|
|
DEFAULT_SERVICE_PERMISSIONS = [
|
|
|
|
|
SMS_TYPE,
|
|
|
|
|
EMAIL_TYPE,
|
2018-01-24 11:53:22 +00:00
|
|
|
LETTER_TYPE,
|
2017-10-19 11:06:28 +01:00
|
|
|
INTERNATIONAL_SMS_TYPE,
|
|
|
|
|
]
|
|
|
|
|
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 11:45:39 +00:00
|
|
|
def dao_fetch_all_services(only_active=False):
|
|
|
|
|
query = Service.query.order_by(
|
2016-07-15 11:34:59 +01:00
|
|
|
asc(Service.created_at)
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 11:45:39 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2019-08-13 17:20:37 +01:00
|
|
|
def get_services_by_partial_name(service_name):
|
|
|
|
|
service_name = escape_special_characters(service_name)
|
|
|
|
|
return Service.query.filter(Service.name.ilike("%{}%".format(service_name))).all()
|
|
|
|
|
|
|
|
|
|
|
2019-04-11 13:38:21 +01:00
|
|
|
def dao_count_live_services():
|
|
|
|
|
return Service.query.filter_by(
|
|
|
|
|
active=True,
|
|
|
|
|
restricted=False,
|
|
|
|
|
count_as_live=True,
|
|
|
|
|
).count()
|
|
|
|
|
|
|
|
|
|
|
2019-04-25 18:09:33 +01:00
|
|
|
def dao_fetch_live_services_data():
|
2019-04-29 15:49:12 +01:00
|
|
|
year_start_date, year_end_date = get_current_financial_year()
|
2019-05-30 17:41:25 +01:00
|
|
|
|
2019-05-30 17:47:00 +01:00
|
|
|
most_recent_annual_billing = db.session.query(
|
|
|
|
|
AnnualBilling.service_id,
|
|
|
|
|
func.max(AnnualBilling.financial_year_start).label('year')
|
|
|
|
|
).group_by(
|
|
|
|
|
AnnualBilling.service_id
|
|
|
|
|
).subquery()
|
|
|
|
|
|
2019-04-29 15:49:12 +01:00
|
|
|
this_year_ft_billing = FactBilling.query.filter(
|
|
|
|
|
FactBilling.bst_date >= year_start_date,
|
|
|
|
|
FactBilling.bst_date <= year_end_date,
|
|
|
|
|
).subquery()
|
2019-05-30 17:41:25 +01:00
|
|
|
|
2019-04-25 18:09:33 +01:00
|
|
|
data = db.session.query(
|
2019-05-30 17:41:25 +01:00
|
|
|
Service.id.label('service_id'),
|
2019-04-25 18:09:33 +01:00
|
|
|
Service.name.label("service_name"),
|
2019-05-30 17:41:25 +01:00
|
|
|
Organisation.name.label("organisation_name"),
|
|
|
|
|
Organisation.organisation_type.label('organisation_type'),
|
|
|
|
|
Service.consent_to_research.label('consent_to_research'),
|
|
|
|
|
User.name.label('contact_name'),
|
|
|
|
|
User.email_address.label('contact_email'),
|
|
|
|
|
User.mobile_number.label('contact_mobile'),
|
2019-04-25 18:09:33 +01:00
|
|
|
Service.go_live_at.label("live_date"),
|
2019-05-30 17:41:25 +01:00
|
|
|
Service.volume_sms.label('sms_volume_intent'),
|
|
|
|
|
Service.volume_email.label('email_volume_intent'),
|
|
|
|
|
Service.volume_letter.label('letter_volume_intent'),
|
2019-04-25 18:09:33 +01:00
|
|
|
case([
|
2019-04-29 15:49:12 +01:00
|
|
|
(this_year_ft_billing.c.notification_type == 'email', func.sum(this_year_ft_billing.c.notifications_sent))
|
2019-04-25 18:09:33 +01:00
|
|
|
], else_=0).label("email_totals"),
|
|
|
|
|
case([
|
2019-04-29 15:49:12 +01:00
|
|
|
(this_year_ft_billing.c.notification_type == 'sms', func.sum(this_year_ft_billing.c.notifications_sent))
|
2019-04-25 18:09:33 +01:00
|
|
|
], else_=0).label("sms_totals"),
|
|
|
|
|
case([
|
2019-04-29 15:49:12 +01:00
|
|
|
(this_year_ft_billing.c.notification_type == 'letter', func.sum(this_year_ft_billing.c.notifications_sent))
|
2019-04-25 18:09:33 +01:00
|
|
|
], else_=0).label("letter_totals"),
|
2019-05-30 17:47:00 +01:00
|
|
|
AnnualBilling.free_sms_fragment_limit,
|
|
|
|
|
).join(
|
|
|
|
|
Service.annual_billing
|
|
|
|
|
).join(
|
|
|
|
|
most_recent_annual_billing,
|
|
|
|
|
and_(
|
|
|
|
|
Service.id == most_recent_annual_billing.c.service_id,
|
|
|
|
|
AnnualBilling.financial_year_start == most_recent_annual_billing.c.year
|
|
|
|
|
)
|
2019-04-25 18:09:33 +01:00
|
|
|
).outerjoin(
|
|
|
|
|
Service.organisation
|
|
|
|
|
).outerjoin(
|
2019-04-29 15:49:12 +01:00
|
|
|
this_year_ft_billing, Service.id == this_year_ft_billing.c.service_id
|
2019-04-25 18:09:33 +01:00
|
|
|
).outerjoin(
|
|
|
|
|
User, Service.go_live_user_id == User.id
|
2019-04-29 16:30:31 +01:00
|
|
|
).filter(
|
2019-05-02 10:39:45 +01:00
|
|
|
Service.count_as_live.is_(True),
|
|
|
|
|
Service.active.is_(True),
|
|
|
|
|
Service.restricted.is_(False),
|
2019-04-25 18:09:33 +01:00
|
|
|
).group_by(
|
|
|
|
|
Service.id,
|
|
|
|
|
Organisation.name,
|
2019-05-03 11:00:53 +01:00
|
|
|
Organisation.organisation_type,
|
2019-04-25 18:09:33 +01:00
|
|
|
Service.name,
|
|
|
|
|
Service.consent_to_research,
|
2019-04-29 16:30:31 +01:00
|
|
|
Service.count_as_live,
|
2019-04-25 18:09:33 +01:00
|
|
|
Service.go_live_user_id,
|
|
|
|
|
User.name,
|
|
|
|
|
User.email_address,
|
|
|
|
|
User.mobile_number,
|
|
|
|
|
Service.go_live_at,
|
|
|
|
|
Service.volume_sms,
|
|
|
|
|
Service.volume_email,
|
|
|
|
|
Service.volume_letter,
|
2019-05-30 17:41:25 +01:00
|
|
|
this_year_ft_billing.c.notification_type,
|
2019-05-30 17:47:00 +01:00
|
|
|
AnnualBilling.free_sms_fragment_limit,
|
2019-05-01 11:04:05 +01:00
|
|
|
).order_by(
|
|
|
|
|
asc(Service.go_live_at)
|
2019-04-25 18:09:33 +01:00
|
|
|
).all()
|
|
|
|
|
results = []
|
|
|
|
|
for row in data:
|
2019-05-30 17:41:25 +01:00
|
|
|
existing_service = next((x for x in results if x['service_id'] == row.service_id), None)
|
|
|
|
|
|
|
|
|
|
if existing_service is not None:
|
|
|
|
|
existing_service["email_totals"] += row.email_totals
|
|
|
|
|
existing_service["sms_totals"] += row.sms_totals
|
|
|
|
|
existing_service["letter_totals"] += row.letter_totals
|
2019-04-25 18:09:33 +01:00
|
|
|
else:
|
2019-05-30 17:41:25 +01:00
|
|
|
results.append(row._asdict())
|
2019-04-25 18:09:33 +01:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
def dao_fetch_service_by_id(service_id, only_active=False):
|
|
|
|
|
query = Service.query.filter_by(
|
2016-07-15 11:34:59 +01:00
|
|
|
id=service_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 15:07:23 +00:00
|
|
|
)
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
return query.one()
|
|
|
|
|
|
|
|
|
|
|
2017-08-23 13:03:52 +01:00
|
|
|
def dao_fetch_service_by_inbound_number(number):
|
2017-08-16 12:27:42 +01:00
|
|
|
inbound_number = InboundNumber.query.filter(
|
2017-08-23 13:03:52 +01:00
|
|
|
InboundNumber.number == number,
|
2017-08-16 16:27:42 +01:00
|
|
|
InboundNumber.active
|
2017-08-16 12:27:42 +01:00
|
|
|
).first()
|
|
|
|
|
|
|
|
|
|
if not inbound_number:
|
2017-08-23 13:03:52 +01:00
|
|
|
return None
|
2017-08-16 12:27:42 +01:00
|
|
|
|
2017-05-22 11:26:47 +01:00
|
|
|
return Service.query.filter(
|
2017-08-23 13:03:52 +01:00
|
|
|
Service.id == inbound_number.service_id
|
|
|
|
|
).first()
|
2017-05-22 11:26:47 +01:00
|
|
|
|
|
|
|
|
|
2017-05-05 15:22:21 +01:00
|
|
|
def dao_fetch_service_by_id_with_api_keys(service_id, only_active=False):
|
|
|
|
|
query = Service.query.filter_by(
|
|
|
|
|
id=service_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('api_keys')
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
|
|
|
|
return query.one()
|
|
|
|
|
|
|
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
def dao_fetch_all_services_by_user(user_id, only_active=False):
|
|
|
|
|
query = Service.query.filter(
|
2016-07-15 11:34:59 +01:00
|
|
|
Service.users.any(id=user_id)
|
|
|
|
|
).order_by(
|
|
|
|
|
asc(Service.created_at)
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 15:07:23 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2016-11-10 17:07:02 +00:00
|
|
|
@transactional
|
2019-03-07 17:39:38 +00:00
|
|
|
@version_class(
|
|
|
|
|
VersionOptions(ApiKey, must_write_history=False),
|
|
|
|
|
VersionOptions(Service),
|
|
|
|
|
VersionOptions(Template, history_class=TemplateHistory, must_write_history=False),
|
|
|
|
|
)
|
2017-01-30 16:32:44 +00:00
|
|
|
def dao_archive_service(service_id):
|
2016-11-10 17:07:02 +00:00
|
|
|
# have to eager load templates and api keys so that we don't flush when we loop through them
|
2016-11-11 12:43:51 +00:00
|
|
|
# to ensure that db.session still contains the models when it comes to creating history objects
|
2016-11-10 17:07:02 +00:00
|
|
|
service = Service.query.options(
|
|
|
|
|
joinedload('templates'),
|
2017-06-29 10:34:38 +01:00
|
|
|
joinedload('templates.template_redacted'),
|
2016-11-10 17:07:02 +00:00
|
|
|
joinedload('api_keys'),
|
|
|
|
|
).filter(Service.id == service_id).one()
|
|
|
|
|
|
|
|
|
|
service.active = False
|
|
|
|
|
service.name = '_archived_' + service.name
|
|
|
|
|
service.email_from = '_archived_' + service.email_from
|
|
|
|
|
|
|
|
|
|
for template in service.templates:
|
2016-11-14 14:10:40 +00:00
|
|
|
if not template.archived:
|
|
|
|
|
template.archived = True
|
2016-11-10 17:07:02 +00:00
|
|
|
|
|
|
|
|
for api_key in service.api_keys:
|
2016-11-14 14:10:40 +00:00
|
|
|
if not api_key.expiry_date:
|
|
|
|
|
api_key.expiry_date = datetime.utcnow()
|
2016-11-10 17:07:02 +00:00
|
|
|
|
|
|
|
|
|
2016-02-19 15:53:15 +00:00
|
|
|
def dao_fetch_service_by_id_and_user(service_id, user_id):
|
2016-07-15 11:34:59 +01:00
|
|
|
return Service.query.filter(
|
|
|
|
|
Service.users.any(id=user_id),
|
|
|
|
|
Service.id == service_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
|
|
|
|
).one()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2016-04-14 15:09:59 +01:00
|
|
|
@transactional
|
2016-04-21 18:10:57 +01:00
|
|
|
@version_class(Service)
|
2019-02-19 12:02:18 +00:00
|
|
|
def dao_create_service(
|
|
|
|
|
service,
|
|
|
|
|
user,
|
|
|
|
|
service_id=None,
|
|
|
|
|
service_permissions=None,
|
|
|
|
|
):
|
2017-05-31 17:31:06 +01:00
|
|
|
# the default property does not appear to work when there is a difference between the sqlalchemy schema and the
|
|
|
|
|
# db schema (ie: during a migration), so we have to set sms_sender manually here. After the GOVUK sms_sender
|
|
|
|
|
# migration is completed, this code should be able to be removed.
|
|
|
|
|
|
2019-02-19 12:02:18 +00:00
|
|
|
if not user:
|
|
|
|
|
raise ValueError("Can't create a service without a user")
|
|
|
|
|
|
2017-10-19 11:06:28 +01:00
|
|
|
if service_permissions is None:
|
|
|
|
|
service_permissions = DEFAULT_SERVICE_PERMISSIONS
|
|
|
|
|
|
2019-02-19 12:02:18 +00:00
|
|
|
organisation = dao_get_organisation_by_email_address(user.email_address)
|
|
|
|
|
|
2016-04-14 15:09:59 +01:00
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
service.users.append(user)
|
|
|
|
|
permission_dao.add_default_service_permissions_for_user(user, service)
|
2017-02-13 15:46:06 +00:00
|
|
|
service.id = service_id or uuid.uuid4() # must be set now so version history model can use same id
|
2016-11-08 13:49:47 +00:00
|
|
|
service.active = True
|
2016-05-31 12:49:06 +01:00
|
|
|
service.research_mode = False
|
2017-05-17 14:09:18 +01:00
|
|
|
|
2017-06-23 17:06:09 +01:00
|
|
|
for permission in service_permissions:
|
|
|
|
|
service_permission = ServicePermission(service_id=service.id, permission=permission)
|
|
|
|
|
service.permissions.append(service_permission)
|
2017-05-17 14:09:18 +01:00
|
|
|
|
2017-11-07 14:26:18 +00:00
|
|
|
# do we just add the default - or will we get a value from FE?
|
|
|
|
|
insert_service_sms_sender(service, current_app.config['FROM_NUMBER'])
|
2019-02-19 12:02:18 +00:00
|
|
|
|
|
|
|
|
if organisation:
|
2019-08-12 15:59:27 +01:00
|
|
|
service.organisation_id = organisation.id
|
|
|
|
|
service.organisation_type = organisation.organisation_type
|
2019-03-22 15:38:28 +00:00
|
|
|
if organisation.email_branding:
|
|
|
|
|
service.email_branding = organisation.email_branding
|
2019-02-19 12:02:18 +00:00
|
|
|
|
2019-03-22 15:38:28 +00:00
|
|
|
if organisation.letter_branding and not service.letter_branding:
|
|
|
|
|
service.letter_branding = organisation.letter_branding
|
2019-02-19 12:02:18 +00:00
|
|
|
|
2019-08-28 15:33:00 +01:00
|
|
|
elif service.organisation_type in NHS_ORGANISATION_TYPES or email_address_is_nhs(user.email_address):
|
2019-04-05 15:18:39 +01:00
|
|
|
service.email_branding = dao_get_email_branding_by_name('NHS')
|
|
|
|
|
service.letter_branding = dao_get_letter_branding_by_name('NHS')
|
2019-08-12 15:59:27 +01:00
|
|
|
if organisation:
|
|
|
|
|
service.crown = organisation.crown
|
|
|
|
|
elif service.organisation_type in CROWN_ORGANISATION_TYPES:
|
|
|
|
|
service.crown = True
|
|
|
|
|
elif service.organisation_type in NON_CROWN_ORGANISATION_TYPES:
|
|
|
|
|
service.crown = False
|
|
|
|
|
service.count_as_live = not user.platform_admin
|
2019-04-05 15:18:39 +01:00
|
|
|
|
2016-04-14 15:09:59 +01:00
|
|
|
db.session.add(service)
|
2016-01-07 17:31:17 +00:00
|
|
|
|
|
|
|
|
|
2016-04-14 15:09:59 +01:00
|
|
|
@transactional
|
2016-04-21 18:10:57 +01:00
|
|
|
@version_class(Service)
|
2016-02-19 15:53:15 +00:00
|
|
|
def dao_update_service(service):
|
|
|
|
|
db.session.add(service)
|
2016-01-12 10:39:49 +00:00
|
|
|
|
|
|
|
|
|
2019-03-14 16:55:48 +00:00
|
|
|
def dao_add_user_to_service(service, user, permissions=None, folder_permissions=None):
|
2017-05-04 17:09:04 +01:00
|
|
|
permissions = permissions or []
|
2019-03-14 16:55:48 +00:00
|
|
|
folder_permissions = folder_permissions or []
|
|
|
|
|
|
2016-03-23 16:30:47 +00:00
|
|
|
try:
|
|
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
service.users.append(user)
|
|
|
|
|
permission_dao.set_user_service_permission(user, service, permissions, _commit=False)
|
|
|
|
|
db.session.add(service)
|
2019-03-14 16:55:48 +00:00
|
|
|
|
|
|
|
|
service_user = dao_get_service_user(user.id, service.id)
|
|
|
|
|
valid_template_folders = dao_get_valid_template_folders_by_id(folder_permissions)
|
|
|
|
|
service_user.folders = valid_template_folders
|
|
|
|
|
db.session.add(service_user)
|
|
|
|
|
|
2016-03-23 16:30:47 +00:00
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
raise e
|
|
|
|
|
else:
|
|
|
|
|
db.session.commit()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_remove_user_from_service(service, user):
|
2016-03-22 13:14:23 +00:00
|
|
|
try:
|
|
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
permission_dao.remove_user_service_permissions(user, service)
|
2019-02-25 15:39:11 +00:00
|
|
|
|
|
|
|
|
service_user = dao_get_service_user(user.id, service.id)
|
2019-03-11 17:05:34 +00:00
|
|
|
db.session.delete(service_user)
|
2016-03-22 13:14:23 +00:00
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
raise e
|
|
|
|
|
else:
|
|
|
|
|
db.session.commit()
|
2016-05-06 11:07:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def delete_service_and_all_associated_db_objects(service):
|
|
|
|
|
|
|
|
|
|
def _delete_commit(query):
|
2017-06-28 17:19:53 +01:00
|
|
|
query.delete(synchronize_session=False)
|
2016-05-06 11:07:11 +01:00
|
|
|
db.session.commit()
|
|
|
|
|
|
2017-06-28 17:19:53 +01:00
|
|
|
subq = db.session.query(Template.id).filter_by(service=service).subquery()
|
|
|
|
|
_delete_commit(TemplateRedacted.query.filter(TemplateRedacted.template_id.in_(subq)))
|
|
|
|
|
|
2017-09-11 17:40:37 +01:00
|
|
|
_delete_commit(ServiceSmsSender.query.filter_by(service=service))
|
2016-05-06 11:07:11 +01:00
|
|
|
_delete_commit(InvitedUser.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(Permission.query.filter_by(service=service))
|
2016-07-11 16:48:32 +01:00
|
|
|
_delete_commit(NotificationHistory.query.filter_by(service=service))
|
2016-05-06 11:07:11 +01:00
|
|
|
_delete_commit(Notification.query.filter_by(service=service))
|
2017-07-03 13:40:13 +01:00
|
|
|
_delete_commit(Job.query.filter_by(service=service))
|
2016-05-06 11:07:11 +01:00
|
|
|
_delete_commit(Template.query.filter_by(service=service))
|
2016-08-02 16:23:14 +01:00
|
|
|
_delete_commit(TemplateHistory.query.filter_by(service_id=service.id))
|
2017-05-17 14:09:18 +01:00
|
|
|
_delete_commit(ServicePermission.query.filter_by(service_id=service.id))
|
2017-08-02 15:35:56 +01:00
|
|
|
_delete_commit(ApiKey.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(ApiKey.get_history_model().query.filter_by(service_id=service.id))
|
2017-10-25 11:35:13 +01:00
|
|
|
_delete_commit(AnnualBilling.query.filter_by(service_id=service.id))
|
2016-05-06 11:07:11 +01:00
|
|
|
|
|
|
|
|
verify_codes = VerifyCode.query.join(User).filter(User.id.in_([x.id for x in service.users]))
|
|
|
|
|
list(map(db.session.delete, verify_codes))
|
|
|
|
|
db.session.commit()
|
|
|
|
|
users = [x for x in service.users]
|
|
|
|
|
map(service.users.remove, users)
|
|
|
|
|
[service.users.remove(x) for x in users]
|
2016-05-11 15:52:49 +01:00
|
|
|
_delete_commit(Service.get_history_model().query.filter_by(id=service.id))
|
2016-05-06 11:07:11 +01:00
|
|
|
db.session.delete(service)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
list(map(db.session.delete, users))
|
|
|
|
|
db.session.commit()
|
2016-07-18 12:03:44 +01:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2018-08-13 16:34:04 +01:00
|
|
|
def dao_fetch_stats_for_service(service_id, limit_days):
|
2018-08-03 14:35:36 +01:00
|
|
|
# We always want between seven and eight days
|
2018-08-13 16:34:04 +01:00
|
|
|
start_date = midnight_n_days_ago(limit_days)
|
2018-04-24 14:05:48 +01:00
|
|
|
return _stats_for_service_query(service_id).filter(
|
2018-05-01 16:48:34 +01:00
|
|
|
Notification.created_at >= start_date
|
2018-04-24 14:05:48 +01:00
|
|
|
).all()
|
2016-07-22 15:16:24 +01:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-07-22 15:16:24 +01:00
|
|
|
def dao_fetch_todays_stats_for_service(service_id):
|
|
|
|
|
return _stats_for_service_query(service_id).filter(
|
|
|
|
|
func.date(Notification.created_at) == date.today()
|
|
|
|
|
).all()
|
|
|
|
|
|
|
|
|
|
|
2016-10-03 10:57:10 +01:00
|
|
|
def fetch_todays_total_message_count(service_id):
|
|
|
|
|
result = db.session.query(
|
|
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
|
|
|
|
Notification.service_id == service_id,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
func.date(Notification.created_at) == date.today()
|
|
|
|
|
).group_by(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
|
|
|
|
).first()
|
|
|
|
|
return 0 if result is None else result.count
|
|
|
|
|
|
|
|
|
|
|
2016-07-22 15:16:24 +01:00
|
|
|
def _stats_for_service_query(service_id):
|
2016-07-18 12:03:44 +01:00
|
|
|
return db.session.query(
|
|
|
|
|
Notification.notification_type,
|
2017-07-06 14:20:24 +01:00
|
|
|
Notification.status,
|
2016-07-18 12:03:44 +01:00
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
2016-09-16 13:47:09 +01:00
|
|
|
Notification.service_id == service_id,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST
|
2016-07-18 12:03:44 +01:00
|
|
|
).group_by(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
2016-07-22 15:16:24 +01:00
|
|
|
)
|
2016-07-26 11:00:03 +01:00
|
|
|
|
|
|
|
|
|
2016-08-11 17:24:44 +01:00
|
|
|
@statsd(namespace='dao')
|
2017-10-26 12:15:52 +01:00
|
|
|
def dao_fetch_todays_stats_for_all_services(include_from_test_key=True, only_active=True):
|
2018-03-20 15:48:32 +00:00
|
|
|
today = date.today()
|
|
|
|
|
start_date = get_london_midnight_in_utc(today)
|
|
|
|
|
end_date = get_london_midnight_in_utc(today + timedelta(days=1))
|
2017-09-15 14:40:56 +01:00
|
|
|
|
2017-10-25 10:40:02 +01:00
|
|
|
subquery = db.session.query(
|
2016-08-11 17:24:44 +01:00
|
|
|
Notification.notification_type,
|
2017-07-06 14:20:24 +01:00
|
|
|
Notification.status,
|
2016-08-19 16:36:20 +01:00
|
|
|
Notification.service_id,
|
2016-08-11 17:24:44 +01:00
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
2018-03-20 15:48:32 +00:00
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
Notification.created_at < end_date
|
2016-08-11 17:24:44 +01:00
|
|
|
).group_by(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
|
|
|
|
Notification.service_id
|
|
|
|
|
)
|
2016-12-02 16:43:24 +00:00
|
|
|
|
|
|
|
|
if not include_from_test_key:
|
2017-10-25 10:40:02 +01:00
|
|
|
subquery = subquery.filter(Notification.key_type != KEY_TYPE_TEST)
|
|
|
|
|
|
|
|
|
|
subquery = subquery.subquery()
|
|
|
|
|
|
|
|
|
|
query = db.session.query(
|
|
|
|
|
Service.id.label('service_id'),
|
|
|
|
|
Service.name,
|
|
|
|
|
Service.restricted,
|
|
|
|
|
Service.research_mode,
|
2017-10-26 12:15:52 +01:00
|
|
|
Service.active,
|
|
|
|
|
Service.created_at,
|
2017-10-25 10:40:02 +01:00
|
|
|
subquery.c.notification_type,
|
|
|
|
|
subquery.c.status,
|
|
|
|
|
subquery.c.count
|
2017-10-26 12:15:52 +01:00
|
|
|
).outerjoin(
|
2017-10-25 10:40:02 +01:00
|
|
|
subquery,
|
|
|
|
|
subquery.c.service_id == Service.id
|
|
|
|
|
).order_by(Service.id)
|
2016-12-02 16:43:24 +00:00
|
|
|
|
2017-10-26 12:15:52 +01:00
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
2016-12-29 13:28:55 +00:00
|
|
|
return query.all()
|
2016-12-28 15:39:55 +00:00
|
|
|
|
|
|
|
|
|
2017-01-30 16:32:44 +00:00
|
|
|
@transactional
|
2019-03-07 17:39:38 +00:00
|
|
|
@version_class(
|
|
|
|
|
VersionOptions(ApiKey, must_write_history=False),
|
|
|
|
|
VersionOptions(Service),
|
|
|
|
|
)
|
2017-01-30 16:32:44 +00:00
|
|
|
def dao_suspend_service(service_id):
|
|
|
|
|
# have to eager load api keys so that we don't flush when we loop through them
|
|
|
|
|
# to ensure that db.session still contains the models when it comes to creating history objects
|
|
|
|
|
service = Service.query.options(
|
|
|
|
|
joinedload('api_keys'),
|
|
|
|
|
).filter(Service.id == service_id).one()
|
|
|
|
|
|
|
|
|
|
for api_key in service.api_keys:
|
|
|
|
|
if not api_key.expiry_date:
|
|
|
|
|
api_key.expiry_date = datetime.utcnow()
|
|
|
|
|
|
2019-03-07 17:39:38 +00:00
|
|
|
service.active = False
|
|
|
|
|
|
2017-01-30 16:32:44 +00:00
|
|
|
|
|
|
|
|
@transactional
|
|
|
|
|
@version_class(Service)
|
|
|
|
|
def dao_resume_service(service_id):
|
|
|
|
|
service = Service.query.get(service_id)
|
|
|
|
|
service.active = True
|
2017-05-10 15:58:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_fetch_active_users_for_service(service_id):
|
|
|
|
|
query = User.query.filter(
|
2019-02-20 16:18:48 +00:00
|
|
|
User.services.any(id=service_id),
|
2017-05-10 15:58:44 +01:00
|
|
|
User.state == 'active'
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2019-12-03 10:26:59 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_find_services_sending_to_tv_numbers(start_date, end_date, threshold=100):
|
|
|
|
|
return db.session.query(
|
|
|
|
|
Notification.service_id.label('service_id'),
|
|
|
|
|
func.count(Notification.id).label('notification_count')
|
|
|
|
|
).filter(
|
|
|
|
|
Notification.service_id == Service.id,
|
|
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
Notification.created_at <= end_date,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
Notification.notification_type == SMS_TYPE,
|
|
|
|
|
func.substr(Notification.normalised_to, 3, 7) == '7700900',
|
|
|
|
|
Service.restricted == False, # noqa
|
|
|
|
|
Service.research_mode == False,
|
|
|
|
|
Service.active == True,
|
|
|
|
|
).group_by(
|
|
|
|
|
Notification.service_id,
|
|
|
|
|
).having(
|
|
|
|
|
func.count(Notification.id) > threshold
|
|
|
|
|
).all()
|
2019-12-03 16:18:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_find_real_sms_notification_count_by_status_for_live_services(start_date, end_date):
|
|
|
|
|
# only works within services' retention period
|
|
|
|
|
return db.session.query(
|
|
|
|
|
Notification.service_id.label('service_id'),
|
|
|
|
|
Notification.status.label('status'),
|
|
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
|
|
|
|
Notification.service_id == Service.id,
|
|
|
|
|
Notification.created_at >= start_date,
|
|
|
|
|
Notification.created_at <= end_date,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
Notification.notification_type == SMS_TYPE,
|
|
|
|
|
Service.restricted == False, # noqa
|
|
|
|
|
Service.research_mode == False,
|
|
|
|
|
Service.active == True,
|
|
|
|
|
).group_by(
|
|
|
|
|
Notification.service_id,
|
|
|
|
|
Notification.status
|
|
|
|
|
).all()
|