diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 8e0beef4b..f95420f33 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -13,6 +13,7 @@ from app.dao.date_util import get_month_start_and_end_date_in_utc from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago from app.dao.jobs_dao import ( + dao_get_letter_jobs_by_status, dao_set_scheduled_jobs_to_pending, dao_get_jobs_older_than_limited_by ) @@ -32,7 +33,7 @@ from app.dao.provider_details_dao import ( dao_toggle_sms_provider ) from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago -from app.models import LETTER_TYPE +from app.models import LETTER_TYPE, JOB_STATUS_READY_TO_SEND from app.notifications.process_notifications import send_notification_to_queue from app.statsd_decorators import statsd from app.celery.tasks import process_job @@ -302,3 +303,15 @@ def populate_monthly_billing(): start_date, end_date = get_month_start_and_end_date_in_utc(yesterday_in_bst) services = get_service_ids_that_need_billing_populated(start_date=start_date, end_date=end_date) [create_or_update_monthly_billing(service_id=s.service_id, billing_month=end_date) for s in services] + + +@notify_celery.task(name="run-letter-jobs") +@statsd(namespace="tasks") +def run_letter_jobs(): + ids = dao_get_letter_jobs_by_status(JOB_STATUS_READY_TO_SEND) + notify_celery.send_task( + name=QueueNames.DVLA_FILES, + args=(ids), + queue=QueueNames.PROCESS_FTP + ) + current_app.logger.info("Queued {} ready letter job ids onto {}".format(len(ids), QueueNames.PROCESS_FTP)) diff --git a/app/config.py b/app/config.py index 678193dc4..51285050a 100644 --- a/app/config.py +++ b/app/config.py @@ -30,6 +30,7 @@ class QueueNames(object): RETRY = 'retry-tasks' NOTIFY = 'notify-internal-tasks' PROCESS_FTP = 'process-ftp-tasks' + DVLA_FILES = 'send-files-to-dvla' @staticmethod def all_queues(): @@ -44,7 +45,8 @@ class QueueNames(object): QueueNames.JOBS, QueueNames.RETRY, QueueNames.NOTIFY, - QueueNames.PROCESS_FTP + QueueNames.PROCESS_FTP, + QueueNames.DVLA_FILES ] @@ -220,6 +222,11 @@ class Config(object): 'task': 'populate_monthly_billing', 'schedule': crontab(minute=10, hour=5), 'options': {'queue': QueueNames.PERIODIC} + }, + 'run-letter-jobs': { + 'task': 'run-letter-jobs', + 'schedule': crontab(minute=0, hour=16), + 'options': {'queue': QueueNames.PERIODIC} } } CELERY_QUEUES = [] diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index e6a80832e..ee90ffbe8 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -157,6 +157,22 @@ def dao_get_all_letter_jobs(): ).all() +def dao_get_letter_jobs_by_status(status): + return db.session.query( + Job + ).join( + Job.template + ).filter( + Job.job_status == status, + Template.template_type == LETTER_TYPE, + # test letter jobs (or from research mode services) are created with a different filename, + # exclude them so we don't see them on the send to CSV + Job.original_file_name != LETTER_TEST_API_FILENAME + ).order_by( + desc(Job.created_at) + ).all() + + @statsd(namespace="dao") def dao_get_job_statistics_for_job(service_id, job_id): query = Job.query.join( diff --git a/app/letters/rest.py b/app/letters/rest.py index 91c39615a..ce43ea169 100644 --- a/app/letters/rest.py +++ b/app/letters/rest.py @@ -16,7 +16,7 @@ register_errors(letter_job) @letter_job.route('/send-letter-jobs', methods=['POST']) def send_letter_jobs(): job_ids = validate(request.get_json(), letter_job_ids) - notify_celery.send_task(name="send-files-to-dvla", args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP) + notify_celery.send_task(name=QueueNames.DVLA_FILES, args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP) return jsonify(data={"response": "Task created to send files to DVLA"}), 201 diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 2e20ee138..f2720a2cb 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -1,3 +1,5 @@ +import uuid + from datetime import datetime, timedelta from functools import partial from unittest.mock import call, patch, PropertyMock @@ -20,6 +22,7 @@ from app.celery.scheduled_tasks import ( remove_csv_files, remove_transformed_dvla_files, run_scheduled_jobs, + run_letter_jobs, s3, send_daily_performance_platform_stats, send_scheduled_notifications, @@ -28,6 +31,7 @@ from app.celery.scheduled_tasks import ( timeout_notifications, populate_monthly_billing) from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient +from app.config import QueueNames from app.dao.jobs_dao import dao_get_job_by_id from app.dao.notifications_dao import dao_get_scheduled_notifications from app.dao.provider_details_dao import ( @@ -677,3 +681,18 @@ def test_populate_monthly_billing_updates_correct_month_in_bst(sample_template): assert monthly_billing[1].notification_type == 'sms' assert monthly_billing[1].monthly_totals[0]['billing_units'] == 1 assert monthly_billing[1].monthly_totals[0]['total_cost'] == 0.0123 + + +def test_run_letter_jobs(client, mocker): + job_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + mocker.patch( + "app.celery.scheduled_tasks.dao_get_letter_jobs_by_status", + return_value=job_ids + ) + mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") + + run_letter_jobs() + + mock_celery.assert_called_once_with(name=QueueNames.DVLA_FILES, + args=(job_ids), + queue=QueueNames.PROCESS_FTP) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index d2c00981b..4e2f3062f 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -18,18 +18,24 @@ from app.dao.jobs_dao import ( dao_get_all_notifications_for_job, dao_get_jobs_older_than_limited_by, dao_get_job_statistics_for_job, - dao_get_job_stats_for_service) + dao_get_job_stats_for_service, + dao_get_letter_jobs_by_status) from app.dao.statistics_dao import create_or_update_job_sending_statistics, update_job_stats_outcome_count from app.models import ( Job, JobStatistics, - EMAIL_TYPE, SMS_TYPE, LETTER_TYPE + EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, + JOB_STATUS_READY_TO_SEND ) from tests.app.conftest import sample_notification as create_notification from tests.app.conftest import sample_job as create_job from tests.app.conftest import sample_service as create_service from tests.app.conftest import sample_template as create_template -from tests.app.db import create_user +from tests.app.db import ( + create_user, + create_job as create_db_job, + create_template as create_db_template +) def test_should_have_decorated_notifications_dao_functions(): @@ -542,3 +548,17 @@ def stats_set_up(notify_db, notify_db_session, service): update_job_stats_outcome_count(notification_delivered) update_job_stats_outcome_count(notification_failed) return job_1, job_2 + + +def test_dao_get_letter_jobs_by_status(sample_service): + create_db_template(service=sample_service, template_type=SMS_TYPE) + create_db_template(service=sample_service, template_type=EMAIL_TYPE) + letter_template = create_db_template(service=sample_service, template_type=LETTER_TYPE) + jobs = [] + jobs.append(create_db_job(letter_template, job_status=JOB_STATUS_READY_TO_SEND, original_file_name='1.csv')) + jobs.append(create_db_job(letter_template, job_status=JOB_STATUS_READY_TO_SEND, original_file_name='2.csv')) + + result = dao_get_letter_jobs_by_status(JOB_STATUS_READY_TO_SEND) + + assert len(result) == 2 + assert set(result) == set(jobs)