Delete Statistics DAO and some Jobs DAO functions

* Deleted the statistics DAO
  (this was used for the job statistics tasks)
* Deleted the functions in the jobs DAO which are no longer used
  (the functions that were used for the job-stats endpoints)
This commit is contained in:
Katie Smith
2018-03-05 16:56:12 +00:00
parent 44c4026df2
commit b582f9f077
4 changed files with 0 additions and 1264 deletions

View File

@@ -170,56 +170,3 @@ def dao_get_letter_job_ids_by_status(status):
).all()
return [str(job.id) for job in jobs]
@statsd(namespace="dao")
def dao_get_job_statistics_for_job(service_id, job_id):
query = Job.query.join(
JobStatistics, Job.id == JobStatistics.job_id
).filter(
Job.id == job_id,
Job.service_id == service_id
).add_columns(
JobStatistics.job_id,
Job.original_file_name,
Job.created_at,
Job.scheduled_for,
Job.template_id,
Job.template_version,
Job.job_status,
Job.service_id,
Job.notification_count,
JobStatistics.sent,
JobStatistics.delivered,
JobStatistics.failed
)
return query.one()
@statsd(namespace="dao")
def dao_get_job_stats_for_service(service_id, page=1, page_size=50, limit_days=None, statuses=None):
query = Job.query.join(
JobStatistics, Job.id == JobStatistics.job_id
).filter(
Job.service_id == service_id
).add_columns(
JobStatistics.job_id,
Job.original_file_name,
Job.created_at,
Job.scheduled_for,
Job.template_id,
Job.template_version,
Job.job_status,
Job.service_id,
Job.notification_count,
JobStatistics.sent,
JobStatistics.delivered,
JobStatistics.failed
)
if limit_days:
query = query.filter(Job.created_at >= days_ago(limit_days))
if statuses is not None and statuses != ['']:
query = query.filter(Job.job_status.in_(statuses))
query = query.order_by(Job.created_at.desc())
return query.paginate(page=page, per_page=page_size)

View File

@@ -1,158 +0,0 @@
from datetime import datetime, timedelta
from itertools import groupby
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from sqlalchemy import func
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from app import db
from app.dao.dao_utils import transactional
from app.models import (
JobStatistics,
Notification,
EMAIL_TYPE,
SMS_TYPE,
LETTER_TYPE,
NOTIFICATION_STATUS_TYPES_FAILED,
NOTIFICATION_STATUS_SUCCESS,
NOTIFICATION_DELIVERED,
NOTIFICATION_SENT)
@transactional
def timeout_job_counts(notifications_type, timeout_start):
total_updated = 0
sent = columns(notifications_type, 'sent')
delivered = columns(notifications_type, 'delivered')
failed = columns(notifications_type, 'failed')
results = db.session.query(
JobStatistics.job_id.label('job_id'),
func.count(Notification.status).label('count'),
Notification.status
).filter(
Notification.notification_type == notifications_type,
JobStatistics.job_id == Notification.job_id,
JobStatistics.created_at < timeout_start,
sent != failed + delivered
).group_by(Notification.status, JobStatistics.job_id).order_by(JobStatistics.job_id).all()
sort = sorted(results, key=lambda result: result.job_id)
groups = []
for k, g in groupby(sort, key=lambda result: result.job_id):
groups.append(list(g))
for job in groups:
sent_count = 0
delivered_count = 0
failed_count = 0
for notification_status in job:
if notification_status.status in NOTIFICATION_STATUS_SUCCESS:
delivered_count += notification_status.count
else:
failed_count += notification_status.count
sent_count += notification_status.count
total_updated += JobStatistics.query.filter_by(
job_id=notification_status.job_id
).update({
sent: sent_count,
failed: failed_count,
delivered: delivered_count,
'sent': sent_count,
'delivered': delivered_count,
'failed': failed_count
}, synchronize_session=False)
return total_updated
@statsd(namespace="dao")
def dao_timeout_job_statistics(timeout_period):
timeout_start = datetime.utcnow() - timedelta(seconds=timeout_period)
sms_count = timeout_job_counts(SMS_TYPE, timeout_start)
email_count = timeout_job_counts(EMAIL_TYPE, timeout_start)
return sms_count + email_count
@statsd(namespace="dao")
def create_or_update_job_sending_statistics(notification):
if __update_job_stats_sent_count(notification) == 0:
try:
__insert_job_stats(notification)
except IntegrityError as e:
current_app.logger.exception(e)
if __update_job_stats_sent_count(notification) == 0:
raise SQLAlchemyError("Failed to create job statistics for {}".format(notification.job_id))
@transactional
def __update_job_stats_sent_count(notification):
column = columns(notification.notification_type, 'sent')
new_column = 'sent'
return db.session.query(JobStatistics).filter_by(
job_id=notification.job_id,
).update({
column: column + 1,
new_column: column + 1
})
@transactional
def __insert_job_stats(notification):
stats = JobStatistics(
job_id=notification.job_id,
emails_sent=1 if notification.notification_type == EMAIL_TYPE else 0,
sms_sent=1 if notification.notification_type == SMS_TYPE else 0,
letters_sent=1 if notification.notification_type == LETTER_TYPE else 0,
updated_at=datetime.utcnow(),
sent=1
)
db.session.add(stats)
def columns(notification_type, status):
keys = {
EMAIL_TYPE: {
'failed': JobStatistics.emails_failed,
'delivered': JobStatistics.emails_delivered,
'sent': JobStatistics.emails_sent
},
SMS_TYPE: {
'failed': JobStatistics.sms_failed,
'delivered': JobStatistics.sms_delivered,
'sent': JobStatistics.sms_sent
},
LETTER_TYPE: {
'failed': JobStatistics.letters_failed,
'sent': JobStatistics.letters_sent
}
}
return keys.get(notification_type).get(status)
@transactional
def update_job_stats_outcome_count(notification):
if notification.status in NOTIFICATION_STATUS_TYPES_FAILED:
column = columns(notification.notification_type, 'failed')
new_column = 'failed'
elif notification.status in [NOTIFICATION_DELIVERED,
NOTIFICATION_SENT] and notification.notification_type != LETTER_TYPE:
column = columns(notification.notification_type, 'delivered')
new_column = 'delivered'
else:
column = None
if column:
return db.session.query(JobStatistics).filter_by(
job_id=notification.job_id,
).update({
column: column + 1,
new_column: column + 1
})
else:
return 0