2016-04-14 15:09:59 +01:00
|
|
|
import uuid
|
2016-11-10 17:07:02 +00:00
|
|
|
from datetime import date, datetime
|
2016-04-14 15:09:59 +01:00
|
|
|
|
2016-07-18 12:03:44 +01:00
|
|
|
from sqlalchemy import asc, func
|
2016-07-15 11:34:59 +01:00
|
|
|
from sqlalchemy.orm import joinedload
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-07-18 12:03:44 +01:00
|
|
|
from app import db
|
2016-04-14 15:09:59 +01:00
|
|
|
from app.dao.dao_utils import (
|
|
|
|
|
transactional,
|
2016-04-21 18:10:57 +01:00
|
|
|
version_class
|
2016-04-14 15:09:59 +01:00
|
|
|
)
|
2017-01-30 15:17:26 +00:00
|
|
|
from app.dao.notifications_dao import get_financial_year
|
2016-05-06 11:07:11 +01:00
|
|
|
from app.models import (
|
|
|
|
|
NotificationStatistics,
|
|
|
|
|
TemplateStatistics,
|
|
|
|
|
ProviderStatistics,
|
|
|
|
|
VerifyCode,
|
|
|
|
|
ApiKey,
|
|
|
|
|
Template,
|
2016-08-02 16:23:14 +01:00
|
|
|
TemplateHistory,
|
2016-05-06 11:07:11 +01:00
|
|
|
Job,
|
2016-07-11 16:48:32 +01:00
|
|
|
NotificationHistory,
|
2016-05-06 11:07:11 +01:00
|
|
|
Notification,
|
|
|
|
|
Permission,
|
|
|
|
|
User,
|
|
|
|
|
InvitedUser,
|
2016-09-16 13:47:09 +01:00
|
|
|
Service,
|
2017-01-30 15:17:26 +00:00
|
|
|
KEY_TYPE_TEST,
|
|
|
|
|
NOTIFICATION_STATUS_TYPES,
|
|
|
|
|
TEMPLATE_TYPES,
|
|
|
|
|
)
|
2017-02-14 17:59:18 +00:00
|
|
|
from app.service.statistics import format_monthly_template_notification_stats
|
2016-08-05 10:44:43 +01:00
|
|
|
from app.statsd_decorators import statsd
|
2017-01-30 16:46:47 +00:00
|
|
|
from app.utils import get_london_month_from_utc_column
|
2016-05-06 11:07:11 +01:00
|
|
|
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 11:45:39 +00:00
|
|
|
def dao_fetch_all_services(only_active=False):
|
|
|
|
|
query = Service.query.order_by(
|
2016-07-15 11:34:59 +01:00
|
|
|
asc(Service.created_at)
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 11:45:39 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
def dao_fetch_service_by_id(service_id, only_active=False):
|
|
|
|
|
query = Service.query.filter_by(
|
2016-07-15 11:34:59 +01:00
|
|
|
id=service_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 15:07:23 +00:00
|
|
|
)
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
2016-02-19 15:53:15 +00:00
|
|
|
|
2016-11-09 15:07:23 +00:00
|
|
|
return query.one()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_fetch_all_services_by_user(user_id, only_active=False):
|
|
|
|
|
query = Service.query.filter(
|
2016-07-15 11:34:59 +01:00
|
|
|
Service.users.any(id=user_id)
|
|
|
|
|
).order_by(
|
|
|
|
|
asc(Service.created_at)
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
2016-11-09 15:07:23 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if only_active:
|
|
|
|
|
query = query.filter(Service.active)
|
|
|
|
|
|
|
|
|
|
return query.all()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2016-11-10 17:07:02 +00:00
|
|
|
@transactional
|
|
|
|
|
@version_class(Service)
|
|
|
|
|
@version_class(Template, TemplateHistory)
|
|
|
|
|
@version_class(ApiKey)
|
2017-01-30 16:32:44 +00:00
|
|
|
def dao_archive_service(service_id):
|
2016-11-10 17:07:02 +00:00
|
|
|
# have to eager load templates and api keys so that we don't flush when we loop through them
|
2016-11-11 12:43:51 +00:00
|
|
|
# to ensure that db.session still contains the models when it comes to creating history objects
|
2016-11-10 17:07:02 +00:00
|
|
|
service = Service.query.options(
|
|
|
|
|
joinedload('templates'),
|
|
|
|
|
joinedload('api_keys'),
|
|
|
|
|
).filter(Service.id == service_id).one()
|
|
|
|
|
|
|
|
|
|
service.active = False
|
|
|
|
|
service.name = '_archived_' + service.name
|
|
|
|
|
service.email_from = '_archived_' + service.email_from
|
|
|
|
|
|
|
|
|
|
for template in service.templates:
|
2016-11-14 14:10:40 +00:00
|
|
|
if not template.archived:
|
|
|
|
|
template.archived = True
|
2016-11-10 17:07:02 +00:00
|
|
|
|
|
|
|
|
for api_key in service.api_keys:
|
2016-11-14 14:10:40 +00:00
|
|
|
if not api_key.expiry_date:
|
|
|
|
|
api_key.expiry_date = datetime.utcnow()
|
2016-11-10 17:07:02 +00:00
|
|
|
|
|
|
|
|
|
2016-02-19 15:53:15 +00:00
|
|
|
def dao_fetch_service_by_id_and_user(service_id, user_id):
|
2016-07-15 11:34:59 +01:00
|
|
|
return Service.query.filter(
|
|
|
|
|
Service.users.any(id=user_id),
|
|
|
|
|
Service.id == service_id
|
|
|
|
|
).options(
|
|
|
|
|
joinedload('users')
|
|
|
|
|
).one()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
2016-04-14 15:09:59 +01:00
|
|
|
@transactional
|
2016-04-21 18:10:57 +01:00
|
|
|
@version_class(Service)
|
2017-02-13 15:46:06 +00:00
|
|
|
def dao_create_service(service, user, service_id=None):
|
2016-04-14 15:09:59 +01:00
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
service.users.append(user)
|
|
|
|
|
permission_dao.add_default_service_permissions_for_user(user, service)
|
2017-02-13 15:46:06 +00:00
|
|
|
service.id = service_id or uuid.uuid4() # must be set now so version history model can use same id
|
2016-11-08 13:49:47 +00:00
|
|
|
service.active = True
|
2016-05-31 12:49:06 +01:00
|
|
|
service.research_mode = False
|
2016-04-14 15:09:59 +01:00
|
|
|
db.session.add(service)
|
2016-01-07 17:31:17 +00:00
|
|
|
|
|
|
|
|
|
2016-04-14 15:09:59 +01:00
|
|
|
@transactional
|
2016-04-21 18:10:57 +01:00
|
|
|
@version_class(Service)
|
2016-02-19 15:53:15 +00:00
|
|
|
def dao_update_service(service):
|
|
|
|
|
db.session.add(service)
|
2016-01-12 10:39:49 +00:00
|
|
|
|
|
|
|
|
|
2016-03-23 16:30:47 +00:00
|
|
|
def dao_add_user_to_service(service, user, permissions=[]):
|
|
|
|
|
try:
|
|
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
service.users.append(user)
|
|
|
|
|
permission_dao.set_user_service_permission(user, service, permissions, _commit=False)
|
|
|
|
|
db.session.add(service)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
raise e
|
|
|
|
|
else:
|
|
|
|
|
db.session.commit()
|
2016-02-19 15:53:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_remove_user_from_service(service, user):
|
2016-03-22 13:14:23 +00:00
|
|
|
try:
|
|
|
|
|
from app.dao.permissions_dao import permission_dao
|
|
|
|
|
permission_dao.remove_user_service_permissions(user, service)
|
|
|
|
|
service.users.remove(user)
|
|
|
|
|
db.session.add(service)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
raise e
|
|
|
|
|
else:
|
|
|
|
|
db.session.commit()
|
2016-05-06 11:07:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def delete_service_and_all_associated_db_objects(service):
|
|
|
|
|
|
|
|
|
|
def _delete_commit(query):
|
|
|
|
|
query.delete()
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
_delete_commit(NotificationStatistics.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(TemplateStatistics.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(ProviderStatistics.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(InvitedUser.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(Permission.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(ApiKey.query.filter_by(service=service))
|
2016-05-11 15:52:49 +01:00
|
|
|
_delete_commit(ApiKey.get_history_model().query.filter_by(service_id=service.id))
|
2016-07-11 16:48:32 +01:00
|
|
|
_delete_commit(NotificationHistory.query.filter_by(service=service))
|
2016-05-06 11:07:11 +01:00
|
|
|
_delete_commit(Notification.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(Job.query.filter_by(service=service))
|
|
|
|
|
_delete_commit(Template.query.filter_by(service=service))
|
2016-08-02 16:23:14 +01:00
|
|
|
_delete_commit(TemplateHistory.query.filter_by(service_id=service.id))
|
2016-05-06 11:07:11 +01:00
|
|
|
|
|
|
|
|
verify_codes = VerifyCode.query.join(User).filter(User.id.in_([x.id for x in service.users]))
|
|
|
|
|
list(map(db.session.delete, verify_codes))
|
|
|
|
|
db.session.commit()
|
|
|
|
|
users = [x for x in service.users]
|
|
|
|
|
map(service.users.remove, users)
|
|
|
|
|
[service.users.remove(x) for x in users]
|
2016-05-11 15:52:49 +01:00
|
|
|
_delete_commit(Service.get_history_model().query.filter_by(id=service.id))
|
2016-05-06 11:07:11 +01:00
|
|
|
db.session.delete(service)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
list(map(db.session.delete, users))
|
|
|
|
|
db.session.commit()
|
2016-07-18 12:03:44 +01:00
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-07-18 12:03:44 +01:00
|
|
|
def dao_fetch_stats_for_service(service_id):
|
2016-07-22 15:16:24 +01:00
|
|
|
return _stats_for_service_query(service_id).all()
|
|
|
|
|
|
|
|
|
|
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="dao")
|
2016-07-22 15:16:24 +01:00
|
|
|
def dao_fetch_todays_stats_for_service(service_id):
|
|
|
|
|
return _stats_for_service_query(service_id).filter(
|
|
|
|
|
func.date(Notification.created_at) == date.today()
|
|
|
|
|
).all()
|
|
|
|
|
|
|
|
|
|
|
2016-10-03 10:57:10 +01:00
|
|
|
def fetch_todays_total_message_count(service_id):
|
|
|
|
|
result = db.session.query(
|
|
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
|
|
|
|
Notification.service_id == service_id,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST,
|
|
|
|
|
func.date(Notification.created_at) == date.today()
|
|
|
|
|
).group_by(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
|
|
|
|
).first()
|
|
|
|
|
return 0 if result is None else result.count
|
|
|
|
|
|
|
|
|
|
|
2016-07-22 15:16:24 +01:00
|
|
|
def _stats_for_service_query(service_id):
|
2016-07-18 12:03:44 +01:00
|
|
|
return db.session.query(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
|
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
2016-09-16 13:47:09 +01:00
|
|
|
Notification.service_id == service_id,
|
|
|
|
|
Notification.key_type != KEY_TYPE_TEST
|
2016-07-18 12:03:44 +01:00
|
|
|
).group_by(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
2016-07-22 15:16:24 +01:00
|
|
|
)
|
2016-07-26 11:00:03 +01:00
|
|
|
|
|
|
|
|
|
2017-02-14 17:59:18 +00:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
def dao_fetch_monthly_historical_stats_by_template_for_service(service_id, year):
|
|
|
|
|
month = get_london_month_from_utc_column(NotificationHistory.created_at)
|
|
|
|
|
|
|
|
|
|
sq = db.session.query(
|
|
|
|
|
NotificationHistory.template_id,
|
|
|
|
|
NotificationHistory.status,
|
|
|
|
|
month.label('month'),
|
|
|
|
|
func.count().label('count')
|
|
|
|
|
).filter(
|
|
|
|
|
NotificationHistory.service_id == service_id,
|
|
|
|
|
NotificationHistory.created_at.between(*get_financial_year(year))
|
|
|
|
|
).group_by(
|
|
|
|
|
month,
|
|
|
|
|
NotificationHistory.template_id,
|
|
|
|
|
NotificationHistory.status
|
|
|
|
|
).subquery()
|
|
|
|
|
|
|
|
|
|
rows = db.session.query(
|
|
|
|
|
Template.id.label('template_id'),
|
|
|
|
|
Template.name,
|
|
|
|
|
sq.c.status,
|
|
|
|
|
sq.c.count.label('count'),
|
|
|
|
|
sq.c.month
|
|
|
|
|
).join(
|
|
|
|
|
sq,
|
|
|
|
|
sq.c.template_id == Template.id
|
|
|
|
|
).all()
|
|
|
|
|
|
|
|
|
|
return format_monthly_template_notification_stats(year, rows)
|
|
|
|
|
|
|
|
|
|
|
2017-01-30 15:17:26 +00:00
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
def dao_fetch_monthly_historical_stats_for_service(service_id, year):
|
2017-01-30 16:46:47 +00:00
|
|
|
month = get_london_month_from_utc_column(NotificationHistory.created_at)
|
2017-01-30 15:17:26 +00:00
|
|
|
|
|
|
|
|
rows = db.session.query(
|
|
|
|
|
NotificationHistory.notification_type,
|
|
|
|
|
NotificationHistory.status,
|
|
|
|
|
month,
|
|
|
|
|
func.count(NotificationHistory.id).label('count')
|
|
|
|
|
).filter(
|
|
|
|
|
NotificationHistory.service_id == service_id,
|
|
|
|
|
NotificationHistory.created_at.between(*get_financial_year(year)),
|
|
|
|
|
).group_by(
|
|
|
|
|
NotificationHistory.notification_type,
|
|
|
|
|
NotificationHistory.status,
|
|
|
|
|
month
|
|
|
|
|
).order_by(
|
|
|
|
|
month
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
months = {
|
Make sure status dictionary is assinged each time
The status dictionary was being assigned once, and then subsequent
uses of it were by reference. This meant that each template type was
pointing at the same dictionary, and updating one meant updating all.
This commit adds a dictionary comprehension, which gets evaluated once
for each template type, so each template type has its own `dict` of
statuses.
Before
--
```
Email SMS Letter
| | |
{'sending':, 'failed', …}
```
After
--
```
Email SMS Letter
| | |
{'sending':, {'sending':, {'sending':,
'failed', 'failed', 'failed',
…} …} …}
```
2017-02-07 13:06:32 +00:00
|
|
|
datetime.strftime(date, '%Y-%m'): {
|
|
|
|
|
template_type: dict.fromkeys(
|
2017-01-30 15:17:26 +00:00
|
|
|
NOTIFICATION_STATUS_TYPES,
|
|
|
|
|
0
|
|
|
|
|
)
|
Make sure status dictionary is assinged each time
The status dictionary was being assigned once, and then subsequent
uses of it were by reference. This meant that each template type was
pointing at the same dictionary, and updating one meant updating all.
This commit adds a dictionary comprehension, which gets evaluated once
for each template type, so each template type has its own `dict` of
statuses.
Before
--
```
Email SMS Letter
| | |
{'sending':, 'failed', …}
```
After
--
```
Email SMS Letter
| | |
{'sending':, {'sending':, {'sending':,
'failed', 'failed', 'failed',
…} …} …}
```
2017-02-07 13:06:32 +00:00
|
|
|
for template_type in TEMPLATE_TYPES
|
|
|
|
|
}
|
2017-01-30 15:17:26 +00:00
|
|
|
for date in [
|
|
|
|
|
datetime(year, month, 1) for month in range(4, 13)
|
|
|
|
|
] + [
|
|
|
|
|
datetime(year + 1, month, 1) for month in range(1, 4)
|
|
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for notification_type, status, date, count in rows:
|
|
|
|
|
months[datetime.strftime(date, "%Y-%m")][notification_type][status] = count
|
|
|
|
|
|
|
|
|
|
return months
|
|
|
|
|
|
|
|
|
|
|
2016-08-11 17:24:44 +01:00
|
|
|
@statsd(namespace='dao')
|
2016-12-02 16:43:24 +00:00
|
|
|
def dao_fetch_todays_stats_for_all_services(include_from_test_key=True):
|
|
|
|
|
query = db.session.query(
|
2016-08-11 17:24:44 +01:00
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
2016-08-19 16:36:20 +01:00
|
|
|
Notification.service_id,
|
2016-08-11 17:24:44 +01:00
|
|
|
func.count(Notification.id).label('count')
|
|
|
|
|
).filter(
|
|
|
|
|
func.date(Notification.created_at) == date.today()
|
|
|
|
|
).group_by(
|
|
|
|
|
Notification.notification_type,
|
|
|
|
|
Notification.status,
|
|
|
|
|
Notification.service_id
|
2016-08-19 16:36:20 +01:00
|
|
|
).order_by(
|
|
|
|
|
Notification.service_id
|
2016-08-11 17:24:44 +01:00
|
|
|
)
|
2016-12-02 16:43:24 +00:00
|
|
|
|
|
|
|
|
if not include_from_test_key:
|
|
|
|
|
query = query.filter(Notification.key_type != KEY_TYPE_TEST)
|
|
|
|
|
|
2016-12-29 13:28:55 +00:00
|
|
|
return query.all()
|
2016-12-28 15:39:55 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@statsd(namespace='dao')
|
|
|
|
|
def fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True):
|
|
|
|
|
query = db.session.query(
|
|
|
|
|
NotificationHistory.notification_type,
|
|
|
|
|
NotificationHistory.status,
|
|
|
|
|
NotificationHistory.service_id,
|
|
|
|
|
func.count(NotificationHistory.id).label('count')
|
|
|
|
|
).filter(
|
|
|
|
|
func.date(NotificationHistory.created_at) >= start_date,
|
|
|
|
|
func.date(NotificationHistory.created_at) <= end_date
|
|
|
|
|
).group_by(
|
|
|
|
|
NotificationHistory.notification_type,
|
|
|
|
|
NotificationHistory.status,
|
|
|
|
|
NotificationHistory.service_id
|
|
|
|
|
).order_by(
|
|
|
|
|
NotificationHistory.service_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if not include_from_test_key:
|
|
|
|
|
query = query.filter(NotificationHistory.key_type != KEY_TYPE_TEST)
|
|
|
|
|
|
2016-12-29 13:28:55 +00:00
|
|
|
return query.all()
|
2017-01-30 16:32:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@transactional
|
|
|
|
|
@version_class(Service)
|
|
|
|
|
@version_class(ApiKey)
|
|
|
|
|
def dao_suspend_service(service_id):
|
|
|
|
|
# have to eager load api keys so that we don't flush when we loop through them
|
|
|
|
|
# to ensure that db.session still contains the models when it comes to creating history objects
|
|
|
|
|
service = Service.query.options(
|
|
|
|
|
joinedload('api_keys'),
|
|
|
|
|
).filter(Service.id == service_id).one()
|
|
|
|
|
|
|
|
|
|
service.active = False
|
|
|
|
|
|
|
|
|
|
for api_key in service.api_keys:
|
|
|
|
|
if not api_key.expiry_date:
|
|
|
|
|
api_key.expiry_date = datetime.utcnow()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@transactional
|
|
|
|
|
@version_class(Service)
|
|
|
|
|
def dao_resume_service(service_id):
|
|
|
|
|
service = Service.query.get(service_id)
|
|
|
|
|
service.active = True
|