mirror of
https://github.com/GSA/notifications-api.git
synced 2026-04-22 02:09:55 -04:00
Add scheduled letter jobs
This commit is contained in:
@@ -13,6 +13,7 @@ from app.dao.date_util import get_month_start_and_end_date_in_utc
|
||||
from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago
|
||||
from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago
|
||||
from app.dao.jobs_dao import (
|
||||
dao_get_letter_jobs_by_status,
|
||||
dao_set_scheduled_jobs_to_pending,
|
||||
dao_get_jobs_older_than_limited_by
|
||||
)
|
||||
@@ -32,7 +33,7 @@ from app.dao.provider_details_dao import (
|
||||
dao_toggle_sms_provider
|
||||
)
|
||||
from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago
|
||||
from app.models import LETTER_TYPE
|
||||
from app.models import LETTER_TYPE, JOB_STATUS_READY_TO_SEND
|
||||
from app.notifications.process_notifications import send_notification_to_queue
|
||||
from app.statsd_decorators import statsd
|
||||
from app.celery.tasks import process_job
|
||||
@@ -302,3 +303,15 @@ def populate_monthly_billing():
|
||||
start_date, end_date = get_month_start_and_end_date_in_utc(yesterday_in_bst)
|
||||
services = get_service_ids_that_need_billing_populated(start_date=start_date, end_date=end_date)
|
||||
[create_or_update_monthly_billing(service_id=s.service_id, billing_month=end_date) for s in services]
|
||||
|
||||
|
||||
@notify_celery.task(name="run-letter-jobs")
|
||||
@statsd(namespace="tasks")
|
||||
def run_letter_jobs():
|
||||
ids = dao_get_letter_jobs_by_status(JOB_STATUS_READY_TO_SEND)
|
||||
notify_celery.send_task(
|
||||
name=QueueNames.DVLA_FILES,
|
||||
args=(ids),
|
||||
queue=QueueNames.PROCESS_FTP
|
||||
)
|
||||
current_app.logger.info("Queued {} ready letter job ids onto {}".format(len(ids), QueueNames.PROCESS_FTP))
|
||||
|
||||
@@ -30,6 +30,7 @@ class QueueNames(object):
|
||||
RETRY = 'retry-tasks'
|
||||
NOTIFY = 'notify-internal-tasks'
|
||||
PROCESS_FTP = 'process-ftp-tasks'
|
||||
DVLA_FILES = 'send-files-to-dvla'
|
||||
|
||||
@staticmethod
|
||||
def all_queues():
|
||||
@@ -44,7 +45,8 @@ class QueueNames(object):
|
||||
QueueNames.JOBS,
|
||||
QueueNames.RETRY,
|
||||
QueueNames.NOTIFY,
|
||||
QueueNames.PROCESS_FTP
|
||||
QueueNames.PROCESS_FTP,
|
||||
QueueNames.DVLA_FILES
|
||||
]
|
||||
|
||||
|
||||
@@ -220,6 +222,11 @@ class Config(object):
|
||||
'task': 'populate_monthly_billing',
|
||||
'schedule': crontab(minute=10, hour=5),
|
||||
'options': {'queue': QueueNames.PERIODIC}
|
||||
},
|
||||
'run-letter-jobs': {
|
||||
'task': 'run-letter-jobs',
|
||||
'schedule': crontab(minute=0, hour=16),
|
||||
'options': {'queue': QueueNames.PERIODIC}
|
||||
}
|
||||
}
|
||||
CELERY_QUEUES = []
|
||||
|
||||
@@ -157,6 +157,22 @@ def dao_get_all_letter_jobs():
|
||||
).all()
|
||||
|
||||
|
||||
def dao_get_letter_jobs_by_status(status):
|
||||
return db.session.query(
|
||||
Job
|
||||
).join(
|
||||
Job.template
|
||||
).filter(
|
||||
Job.job_status == status,
|
||||
Template.template_type == LETTER_TYPE,
|
||||
# test letter jobs (or from research mode services) are created with a different filename,
|
||||
# exclude them so we don't see them on the send to CSV
|
||||
Job.original_file_name != LETTER_TEST_API_FILENAME
|
||||
).order_by(
|
||||
desc(Job.created_at)
|
||||
).all()
|
||||
|
||||
|
||||
@statsd(namespace="dao")
|
||||
def dao_get_job_statistics_for_job(service_id, job_id):
|
||||
query = Job.query.join(
|
||||
|
||||
@@ -16,7 +16,7 @@ register_errors(letter_job)
|
||||
@letter_job.route('/send-letter-jobs', methods=['POST'])
|
||||
def send_letter_jobs():
|
||||
job_ids = validate(request.get_json(), letter_job_ids)
|
||||
notify_celery.send_task(name="send-files-to-dvla", args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP)
|
||||
notify_celery.send_task(name=QueueNames.DVLA_FILES, args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP)
|
||||
|
||||
return jsonify(data={"response": "Task created to send files to DVLA"}), 201
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import uuid
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from unittest.mock import call, patch, PropertyMock
|
||||
@@ -20,6 +22,7 @@ from app.celery.scheduled_tasks import (
|
||||
remove_csv_files,
|
||||
remove_transformed_dvla_files,
|
||||
run_scheduled_jobs,
|
||||
run_letter_jobs,
|
||||
s3,
|
||||
send_daily_performance_platform_stats,
|
||||
send_scheduled_notifications,
|
||||
@@ -28,6 +31,7 @@ from app.celery.scheduled_tasks import (
|
||||
timeout_notifications,
|
||||
populate_monthly_billing)
|
||||
from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient
|
||||
from app.config import QueueNames
|
||||
from app.dao.jobs_dao import dao_get_job_by_id
|
||||
from app.dao.notifications_dao import dao_get_scheduled_notifications
|
||||
from app.dao.provider_details_dao import (
|
||||
@@ -677,3 +681,18 @@ def test_populate_monthly_billing_updates_correct_month_in_bst(sample_template):
|
||||
assert monthly_billing[1].notification_type == 'sms'
|
||||
assert monthly_billing[1].monthly_totals[0]['billing_units'] == 1
|
||||
assert monthly_billing[1].monthly_totals[0]['total_cost'] == 0.0123
|
||||
|
||||
|
||||
def test_run_letter_jobs(client, mocker):
|
||||
job_ids = [str(uuid.uuid4()), str(uuid.uuid4())]
|
||||
mocker.patch(
|
||||
"app.celery.scheduled_tasks.dao_get_letter_jobs_by_status",
|
||||
return_value=job_ids
|
||||
)
|
||||
mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task")
|
||||
|
||||
run_letter_jobs()
|
||||
|
||||
mock_celery.assert_called_once_with(name=QueueNames.DVLA_FILES,
|
||||
args=(job_ids),
|
||||
queue=QueueNames.PROCESS_FTP)
|
||||
|
||||
@@ -18,18 +18,24 @@ from app.dao.jobs_dao import (
|
||||
dao_get_all_notifications_for_job,
|
||||
dao_get_jobs_older_than_limited_by,
|
||||
dao_get_job_statistics_for_job,
|
||||
dao_get_job_stats_for_service)
|
||||
dao_get_job_stats_for_service,
|
||||
dao_get_letter_jobs_by_status)
|
||||
from app.dao.statistics_dao import create_or_update_job_sending_statistics, update_job_stats_outcome_count
|
||||
from app.models import (
|
||||
Job, JobStatistics,
|
||||
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE
|
||||
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE,
|
||||
JOB_STATUS_READY_TO_SEND
|
||||
)
|
||||
|
||||
from tests.app.conftest import sample_notification as create_notification
|
||||
from tests.app.conftest import sample_job as create_job
|
||||
from tests.app.conftest import sample_service as create_service
|
||||
from tests.app.conftest import sample_template as create_template
|
||||
from tests.app.db import create_user
|
||||
from tests.app.db import (
|
||||
create_user,
|
||||
create_job as create_db_job,
|
||||
create_template as create_db_template
|
||||
)
|
||||
|
||||
|
||||
def test_should_have_decorated_notifications_dao_functions():
|
||||
@@ -542,3 +548,17 @@ def stats_set_up(notify_db, notify_db_session, service):
|
||||
update_job_stats_outcome_count(notification_delivered)
|
||||
update_job_stats_outcome_count(notification_failed)
|
||||
return job_1, job_2
|
||||
|
||||
|
||||
def test_dao_get_letter_jobs_by_status(sample_service):
|
||||
create_db_template(service=sample_service, template_type=SMS_TYPE)
|
||||
create_db_template(service=sample_service, template_type=EMAIL_TYPE)
|
||||
letter_template = create_db_template(service=sample_service, template_type=LETTER_TYPE)
|
||||
jobs = []
|
||||
jobs.append(create_db_job(letter_template, job_status=JOB_STATUS_READY_TO_SEND, original_file_name='1.csv'))
|
||||
jobs.append(create_db_job(letter_template, job_status=JOB_STATUS_READY_TO_SEND, original_file_name='2.csv'))
|
||||
|
||||
result = dao_get_letter_jobs_by_status(JOB_STATUS_READY_TO_SEND)
|
||||
|
||||
assert len(result) == 2
|
||||
assert set(result) == set(jobs)
|
||||
|
||||
Reference in New Issue
Block a user