mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-03 01:41:05 -05:00
Add scheduled letter jobs
This commit is contained in:
@@ -13,6 +13,7 @@ from app.dao.date_util import get_month_start_and_end_date_in_utc
|
||||
from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago
|
||||
from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago
|
||||
from app.dao.jobs_dao import (
|
||||
dao_get_letter_jobs_by_status,
|
||||
dao_set_scheduled_jobs_to_pending,
|
||||
dao_get_jobs_older_than_limited_by
|
||||
)
|
||||
@@ -32,7 +33,7 @@ from app.dao.provider_details_dao import (
|
||||
dao_toggle_sms_provider
|
||||
)
|
||||
from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago
|
||||
from app.models import LETTER_TYPE
|
||||
from app.models import LETTER_TYPE, JOB_STATUS_READY_TO_SEND
|
||||
from app.notifications.process_notifications import send_notification_to_queue
|
||||
from app.statsd_decorators import statsd
|
||||
from app.celery.tasks import process_job
|
||||
@@ -302,3 +303,15 @@ def populate_monthly_billing():
|
||||
start_date, end_date = get_month_start_and_end_date_in_utc(yesterday_in_bst)
|
||||
services = get_service_ids_that_need_billing_populated(start_date=start_date, end_date=end_date)
|
||||
[create_or_update_monthly_billing(service_id=s.service_id, billing_month=end_date) for s in services]
|
||||
|
||||
|
||||
@notify_celery.task(name="run-letter-jobs")
|
||||
@statsd(namespace="tasks")
|
||||
def run_letter_jobs():
|
||||
ids = dao_get_letter_jobs_by_status(JOB_STATUS_READY_TO_SEND)
|
||||
notify_celery.send_task(
|
||||
name=QueueNames.DVLA_FILES,
|
||||
args=(ids),
|
||||
queue=QueueNames.PROCESS_FTP
|
||||
)
|
||||
current_app.logger.info("Queued {} ready letter job ids onto {}".format(len(ids), QueueNames.PROCESS_FTP))
|
||||
|
||||
@@ -30,6 +30,7 @@ class QueueNames(object):
|
||||
RETRY = 'retry-tasks'
|
||||
NOTIFY = 'notify-internal-tasks'
|
||||
PROCESS_FTP = 'process-ftp-tasks'
|
||||
DVLA_FILES = 'send-files-to-dvla'
|
||||
|
||||
@staticmethod
|
||||
def all_queues():
|
||||
@@ -44,7 +45,8 @@ class QueueNames(object):
|
||||
QueueNames.JOBS,
|
||||
QueueNames.RETRY,
|
||||
QueueNames.NOTIFY,
|
||||
QueueNames.PROCESS_FTP
|
||||
QueueNames.PROCESS_FTP,
|
||||
QueueNames.DVLA_FILES
|
||||
]
|
||||
|
||||
|
||||
@@ -220,6 +222,11 @@ class Config(object):
|
||||
'task': 'populate_monthly_billing',
|
||||
'schedule': crontab(minute=10, hour=5),
|
||||
'options': {'queue': QueueNames.PERIODIC}
|
||||
},
|
||||
'run-letter-jobs': {
|
||||
'task': 'run-letter-jobs',
|
||||
'schedule': crontab(minute=0, hour=16),
|
||||
'options': {'queue': QueueNames.PERIODIC}
|
||||
}
|
||||
}
|
||||
CELERY_QUEUES = []
|
||||
|
||||
@@ -157,6 +157,22 @@ def dao_get_all_letter_jobs():
|
||||
).all()
|
||||
|
||||
|
||||
def dao_get_letter_jobs_by_status(status):
|
||||
return db.session.query(
|
||||
Job
|
||||
).join(
|
||||
Job.template
|
||||
).filter(
|
||||
Job.job_status == status,
|
||||
Template.template_type == LETTER_TYPE,
|
||||
# test letter jobs (or from research mode services) are created with a different filename,
|
||||
# exclude them so we don't see them on the send to CSV
|
||||
Job.original_file_name != LETTER_TEST_API_FILENAME
|
||||
).order_by(
|
||||
desc(Job.created_at)
|
||||
).all()
|
||||
|
||||
|
||||
@statsd(namespace="dao")
|
||||
def dao_get_job_statistics_for_job(service_id, job_id):
|
||||
query = Job.query.join(
|
||||
|
||||
@@ -16,7 +16,7 @@ register_errors(letter_job)
|
||||
@letter_job.route('/send-letter-jobs', methods=['POST'])
|
||||
def send_letter_jobs():
|
||||
job_ids = validate(request.get_json(), letter_job_ids)
|
||||
notify_celery.send_task(name="send-files-to-dvla", args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP)
|
||||
notify_celery.send_task(name=QueueNames.DVLA_FILES, args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP)
|
||||
|
||||
return jsonify(data={"response": "Task created to send files to DVLA"}), 201
|
||||
|
||||
|
||||
Reference in New Issue
Block a user