2017-07-27 11:10:22 +01:00
|
|
|
import uuid
|
2017-06-06 16:01:27 +01:00
|
|
|
from datetime import datetime, timedelta
|
2016-09-21 14:35:23 +01:00
|
|
|
|
2016-10-11 14:30:40 +01:00
|
|
|
from flask import current_app
|
2018-02-06 09:35:33 +00:00
|
|
|
from notifications_utils.statsd_decorators import statsd
|
2018-02-28 11:42:23 +00:00
|
|
|
from sqlalchemy import (
|
|
|
|
|
Date as sql_date,
|
|
|
|
|
asc,
|
|
|
|
|
cast,
|
|
|
|
|
desc,
|
|
|
|
|
func,
|
|
|
|
|
)
|
2016-09-21 14:35:23 +01:00
|
|
|
|
2016-01-15 11:12:05 +00:00
|
|
|
from app import db
|
2016-05-25 11:13:49 +01:00
|
|
|
from app.dao import days_ago
|
2017-06-06 16:01:27 +01:00
|
|
|
from app.models import (
|
2018-02-28 11:42:23 +00:00
|
|
|
Job,
|
|
|
|
|
JobStatistics,
|
|
|
|
|
JOB_STATUS_PENDING,
|
|
|
|
|
JOB_STATUS_SCHEDULED,
|
|
|
|
|
LETTER_TYPE,
|
|
|
|
|
NotificationHistory,
|
|
|
|
|
Template,
|
2017-06-06 16:01:27 +01:00
|
|
|
)
|
2017-08-01 18:23:29 +01:00
|
|
|
from app.variables import LETTER_TEST_API_FILENAME
|
2016-08-23 16:46:58 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@statsd(namespace="dao")
|
|
|
|
|
def dao_get_notification_outcomes_for_job(service_id, job_id):
|
|
|
|
|
query = db.session.query(
|
|
|
|
|
func.count(NotificationHistory.status).label('count'),
|
2017-07-06 14:20:24 +01:00
|
|
|
NotificationHistory.status
|
2016-08-23 16:46:58 +01:00
|
|
|
)
|
|
|
|
|
|
2018-02-28 11:42:23 +00:00
|
|
|
return query.filter(
|
|
|
|
|
NotificationHistory.service_id == service_id
|
|
|
|
|
).filter(
|
|
|
|
|
NotificationHistory.job_id == job_id
|
|
|
|
|
).group_by(
|
|
|
|
|
NotificationHistory.status
|
|
|
|
|
).order_by(
|
|
|
|
|
asc(NotificationHistory.status)
|
|
|
|
|
).all()
|
2017-03-15 15:26:58 +00:00
|
|
|
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
def dao_get_job_by_service_id_and_job_id(service_id, job_id):
|
2016-03-11 12:39:55 +00:00
|
|
|
return Job.query.filter_by(service_id=service_id, id=job_id).one()
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2016-09-23 17:05:42 +01:00
|
|
|
def dao_get_jobs_by_service_id(service_id, limit_days=None, page=1, page_size=50, statuses=None):
|
2016-10-11 14:30:40 +01:00
|
|
|
query_filter = [
|
|
|
|
|
Job.service_id == service_id,
|
2017-06-01 09:35:13 +01:00
|
|
|
Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'],
|
|
|
|
|
Job.original_file_name != current_app.config['ONE_OFF_MESSAGE_FILENAME'],
|
2016-10-11 14:30:40 +01:00
|
|
|
]
|
2016-05-24 17:21:04 +01:00
|
|
|
if limit_days is not None:
|
2016-05-25 11:13:49 +01:00
|
|
|
query_filter.append(cast(Job.created_at, sql_date) >= days_ago(limit_days))
|
2016-09-23 17:05:42 +01:00
|
|
|
if statuses is not None and statuses != ['']:
|
2016-09-23 16:34:13 +01:00
|
|
|
query_filter.append(
|
|
|
|
|
Job.job_status.in_(statuses)
|
|
|
|
|
)
|
2016-09-21 14:35:23 +01:00
|
|
|
return Job.query \
|
|
|
|
|
.filter(*query_filter) \
|
2016-10-08 11:44:55 +01:00
|
|
|
.order_by(Job.processing_started.desc(), Job.created_at.desc()) \
|
2016-09-21 14:35:23 +01:00
|
|
|
.paginate(page=page, per_page=page_size)
|
2016-01-15 11:12:05 +00:00
|
|
|
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
def dao_get_job_by_id(job_id):
|
2016-03-11 15:34:20 +00:00
|
|
|
return Job.query.filter_by(id=job_id).one()
|
2016-02-24 17:12:30 +00:00
|
|
|
|
|
|
|
|
|
2016-10-07 12:28:42 +01:00
|
|
|
def dao_set_scheduled_jobs_to_pending():
|
|
|
|
|
"""
|
|
|
|
|
Sets all past scheduled jobs to pending, and then returns them for further processing.
|
|
|
|
|
|
|
|
|
|
this is used in the run_scheduled_jobs task, so we put a FOR UPDATE lock on the job table for the duration of
|
|
|
|
|
the transaction so that if the task is run more than once concurrently, one task will block the other select
|
|
|
|
|
from completing until it commits.
|
|
|
|
|
"""
|
|
|
|
|
jobs = Job.query \
|
2016-09-02 23:14:03 +01:00
|
|
|
.filter(
|
|
|
|
|
Job.job_status == JOB_STATUS_SCHEDULED,
|
|
|
|
|
Job.scheduled_for < datetime.utcnow()
|
|
|
|
|
) \
|
2016-08-24 16:24:30 +01:00
|
|
|
.order_by(asc(Job.scheduled_for)) \
|
2016-10-07 10:47:48 +01:00
|
|
|
.with_for_update() \
|
2016-08-24 16:24:30 +01:00
|
|
|
.all()
|
|
|
|
|
|
2016-10-07 12:28:42 +01:00
|
|
|
for job in jobs:
|
|
|
|
|
job.job_status = JOB_STATUS_PENDING
|
|
|
|
|
|
|
|
|
|
db.session.add_all(jobs)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
return jobs
|
|
|
|
|
|
|
|
|
|
|
2016-09-01 14:31:01 +01:00
|
|
|
def dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id):
|
|
|
|
|
return Job.query \
|
2016-09-02 12:20:28 +01:00
|
|
|
.filter(
|
|
|
|
|
Job.service_id == service_id,
|
|
|
|
|
Job.id == job_id,
|
2016-09-02 23:13:32 +01:00
|
|
|
Job.job_status == JOB_STATUS_SCHEDULED,
|
2016-09-02 12:20:28 +01:00
|
|
|
Job.scheduled_for > datetime.utcnow()
|
|
|
|
|
) \
|
2016-09-01 14:31:01 +01:00
|
|
|
.one()
|
|
|
|
|
|
|
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
def dao_create_job(job):
|
2017-07-27 11:10:22 +01:00
|
|
|
if not job.id:
|
|
|
|
|
job.id = uuid.uuid4()
|
2017-05-12 12:17:34 +01:00
|
|
|
job_stats = JobStatistics(
|
|
|
|
|
job_id=job.id,
|
|
|
|
|
updated_at=datetime.utcnow()
|
|
|
|
|
)
|
|
|
|
|
db.session.add(job_stats)
|
2016-02-24 17:12:30 +00:00
|
|
|
db.session.add(job)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_update_job(job):
|
|
|
|
|
db.session.add(job)
|
|
|
|
|
db.session.commit()
|
2016-09-07 15:36:07 +01:00
|
|
|
|
|
|
|
|
|
2017-04-05 11:57:56 +01:00
|
|
|
def dao_update_job_status(job_id, status):
|
|
|
|
|
db.session.query(Job).filter_by(id=job_id).update({'job_status': status})
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
|
2017-06-06 16:01:27 +01:00
|
|
|
def dao_get_jobs_older_than_limited_by(job_types, older_than=7, limit_days=2):
|
|
|
|
|
end_date = datetime.utcnow() - timedelta(days=older_than)
|
|
|
|
|
start_date = end_date - timedelta(days=limit_days)
|
|
|
|
|
|
|
|
|
|
return Job.query.join(Template).filter(
|
|
|
|
|
Job.created_at < end_date,
|
|
|
|
|
Job.created_at >= start_date,
|
|
|
|
|
Template.template_type.in_(job_types)
|
2016-09-07 15:36:07 +01:00
|
|
|
).order_by(desc(Job.created_at)).all()
|
2017-04-07 11:50:56 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_get_all_letter_jobs():
|
2017-08-01 18:23:29 +01:00
|
|
|
return db.session.query(
|
|
|
|
|
Job
|
|
|
|
|
).join(
|
|
|
|
|
Job.template
|
|
|
|
|
).filter(
|
|
|
|
|
Template.template_type == LETTER_TYPE,
|
|
|
|
|
# test letter jobs (or from research mode services) are created with a different filename,
|
2017-08-02 11:14:05 +01:00
|
|
|
# exclude them so we don't see them on the send to CSV
|
2017-08-01 18:23:29 +01:00
|
|
|
Job.original_file_name != LETTER_TEST_API_FILENAME
|
|
|
|
|
).order_by(
|
|
|
|
|
desc(Job.created_at)
|
|
|
|
|
).all()
|
2017-06-12 14:25:17 +01:00
|
|
|
|
|
|
|
|
|
2017-08-24 11:57:46 +01:00
|
|
|
def dao_get_letter_job_ids_by_status(status):
|
|
|
|
|
jobs = db.session.query(
|
2017-08-22 09:55:47 +01:00
|
|
|
Job
|
|
|
|
|
).join(
|
|
|
|
|
Job.template
|
|
|
|
|
).filter(
|
|
|
|
|
Job.job_status == status,
|
|
|
|
|
Template.template_type == LETTER_TYPE,
|
|
|
|
|
# test letter jobs (or from research mode services) are created with a different filename,
|
|
|
|
|
# exclude them so we don't see them on the send to CSV
|
|
|
|
|
Job.original_file_name != LETTER_TEST_API_FILENAME
|
|
|
|
|
).order_by(
|
|
|
|
|
desc(Job.created_at)
|
|
|
|
|
).all()
|
|
|
|
|
|
2017-08-24 11:57:46 +01:00
|
|
|
return [str(job.id) for job in jobs]
|
|
|
|
|
|
2017-08-22 09:55:47 +01:00
|
|
|
|
2017-06-12 14:25:17 +01:00
|
|
|
@statsd(namespace="dao")
|
2017-06-12 17:15:32 +01:00
|
|
|
def dao_get_job_statistics_for_job(service_id, job_id):
|
|
|
|
|
query = Job.query.join(
|
|
|
|
|
JobStatistics, Job.id == JobStatistics.job_id
|
|
|
|
|
).filter(
|
2017-06-13 11:55:14 +01:00
|
|
|
Job.id == job_id,
|
|
|
|
|
Job.service_id == service_id
|
2017-06-12 17:15:32 +01:00
|
|
|
).add_columns(
|
2017-06-12 14:25:17 +01:00
|
|
|
JobStatistics.job_id,
|
|
|
|
|
Job.original_file_name,
|
|
|
|
|
Job.created_at,
|
2017-06-12 17:15:32 +01:00
|
|
|
Job.scheduled_for,
|
|
|
|
|
Job.template_id,
|
|
|
|
|
Job.template_version,
|
2017-06-13 10:56:03 +01:00
|
|
|
Job.job_status,
|
|
|
|
|
Job.service_id,
|
|
|
|
|
Job.notification_count,
|
2017-06-12 14:25:17 +01:00
|
|
|
JobStatistics.sent,
|
|
|
|
|
JobStatistics.delivered,
|
|
|
|
|
JobStatistics.failed
|
2017-06-12 17:15:32 +01:00
|
|
|
)
|
2017-06-12 14:25:17 +01:00
|
|
|
return query.one()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@statsd(namespace="dao")
|
2017-06-12 17:15:32 +01:00
|
|
|
def dao_get_job_stats_for_service(service_id, page=1, page_size=50, limit_days=None, statuses=None):
|
2017-06-12 14:25:17 +01:00
|
|
|
query = Job.query.join(
|
|
|
|
|
JobStatistics, Job.id == JobStatistics.job_id
|
|
|
|
|
).filter(
|
|
|
|
|
Job.service_id == service_id
|
|
|
|
|
).add_columns(
|
|
|
|
|
JobStatistics.job_id,
|
|
|
|
|
Job.original_file_name,
|
|
|
|
|
Job.created_at,
|
2017-06-12 17:15:32 +01:00
|
|
|
Job.scheduled_for,
|
2017-06-13 10:56:03 +01:00
|
|
|
Job.template_id,
|
|
|
|
|
Job.template_version,
|
|
|
|
|
Job.job_status,
|
|
|
|
|
Job.service_id,
|
|
|
|
|
Job.notification_count,
|
2017-06-12 14:25:17 +01:00
|
|
|
JobStatistics.sent,
|
|
|
|
|
JobStatistics.delivered,
|
|
|
|
|
JobStatistics.failed
|
|
|
|
|
)
|
|
|
|
|
if limit_days:
|
|
|
|
|
query = query.filter(Job.created_at >= days_ago(limit_days))
|
2017-06-12 17:15:32 +01:00
|
|
|
if statuses is not None and statuses != ['']:
|
|
|
|
|
query = query.filter(Job.job_status.in_(statuses))
|
|
|
|
|
|
2017-06-12 14:25:17 +01:00
|
|
|
query = query.order_by(Job.created_at.desc())
|
2017-06-12 17:15:32 +01:00
|
|
|
return query.paginate(page=page, per_page=page_size)
|