mirror of
https://github.com/GSA/notifications-api.git
synced 2026-05-06 00:59:41 -04:00
Merge pull request #1196 from alphagov/ken-add-scheduled-letter-job
Add scheduled letter job
This commit is contained in:
@@ -100,7 +100,7 @@ def register_blueprint(application):
|
||||
from app.notifications.notifications_sms_callback import sms_callback_blueprint
|
||||
from app.notifications.notifications_letter_callback import letter_callback_blueprint
|
||||
from app.authentication.auth import requires_admin_auth, requires_auth, requires_no_auth, restrict_ip_sms
|
||||
from app.letters.send_letter_jobs import letter_job
|
||||
from app.letters.rest import letter_job
|
||||
from app.billing.rest import billing_blueprint
|
||||
|
||||
service_blueprint.before_request(requires_admin_auth)
|
||||
|
||||
@@ -13,6 +13,7 @@ from app.dao.date_util import get_month_start_and_end_date_in_utc
|
||||
from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago
|
||||
from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago
|
||||
from app.dao.jobs_dao import (
|
||||
dao_get_letter_jobs_by_status,
|
||||
dao_set_scheduled_jobs_to_pending,
|
||||
dao_get_jobs_older_than_limited_by
|
||||
)
|
||||
@@ -32,11 +33,11 @@ from app.dao.provider_details_dao import (
|
||||
dao_toggle_sms_provider
|
||||
)
|
||||
from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago
|
||||
from app.models import LETTER_TYPE
|
||||
from app.models import LETTER_TYPE, JOB_STATUS_READY_TO_SEND
|
||||
from app.notifications.process_notifications import send_notification_to_queue
|
||||
from app.statsd_decorators import statsd
|
||||
from app.celery.tasks import process_job
|
||||
from app.config import QueueNames
|
||||
from app.config import QueueNames, TaskNames
|
||||
from app.utils import convert_utc_to_bst
|
||||
|
||||
|
||||
@@ -302,3 +303,15 @@ def populate_monthly_billing():
|
||||
start_date, end_date = get_month_start_and_end_date_in_utc(yesterday_in_bst)
|
||||
services = get_service_ids_that_need_billing_populated(start_date=start_date, end_date=end_date)
|
||||
[create_or_update_monthly_billing(service_id=s.service_id, billing_month=end_date) for s in services]
|
||||
|
||||
|
||||
@notify_celery.task(name="run-letter-jobs")
|
||||
@statsd(namespace="tasks")
|
||||
def run_letter_jobs():
|
||||
job_ids = [job.id for job in dao_get_letter_jobs_by_status(JOB_STATUS_READY_TO_SEND)]
|
||||
notify_celery.send_task(
|
||||
name=TaskNames.DVLA_FILES,
|
||||
args=(job_ids),
|
||||
queue=QueueNames.PROCESS_FTP
|
||||
)
|
||||
current_app.logger.info("Queued {} ready letter job ids onto {}".format(len(job_ids), QueueNames.PROCESS_FTP))
|
||||
|
||||
@@ -44,10 +44,13 @@ class QueueNames(object):
|
||||
QueueNames.JOBS,
|
||||
QueueNames.RETRY,
|
||||
QueueNames.NOTIFY,
|
||||
QueueNames.PROCESS_FTP
|
||||
]
|
||||
|
||||
|
||||
class TaskNames(object):
|
||||
DVLA_FILES = 'send-files-to-dvla'
|
||||
|
||||
|
||||
class Config(object):
|
||||
# URL of admin app
|
||||
ADMIN_BASE_URL = os.environ['ADMIN_BASE_URL']
|
||||
@@ -220,6 +223,11 @@ class Config(object):
|
||||
'task': 'populate_monthly_billing',
|
||||
'schedule': crontab(minute=10, hour=5),
|
||||
'options': {'queue': QueueNames.PERIODIC}
|
||||
},
|
||||
'run-letter-jobs': {
|
||||
'task': 'run-letter-jobs',
|
||||
'schedule': crontab(minute=30, hour=17),
|
||||
'options': {'queue': QueueNames.PERIODIC}
|
||||
}
|
||||
}
|
||||
CELERY_QUEUES = []
|
||||
|
||||
@@ -157,6 +157,22 @@ def dao_get_all_letter_jobs():
|
||||
).all()
|
||||
|
||||
|
||||
def dao_get_letter_jobs_by_status(status):
|
||||
return db.session.query(
|
||||
Job
|
||||
).join(
|
||||
Job.template
|
||||
).filter(
|
||||
Job.job_status == status,
|
||||
Template.template_type == LETTER_TYPE,
|
||||
# test letter jobs (or from research mode services) are created with a different filename,
|
||||
# exclude them so we don't see them on the send to CSV
|
||||
Job.original_file_name != LETTER_TEST_API_FILENAME
|
||||
).order_by(
|
||||
desc(Job.created_at)
|
||||
).all()
|
||||
|
||||
|
||||
@statsd(namespace="dao")
|
||||
def dao_get_job_statistics_for_job(service_id, job_id):
|
||||
query = Job.query.join(
|
||||
|
||||
@@ -2,7 +2,7 @@ from flask import Blueprint, jsonify
|
||||
from flask import request
|
||||
|
||||
from app import notify_celery
|
||||
from app.config import QueueNames
|
||||
from app.config import QueueNames, TaskNames
|
||||
from app.dao.jobs_dao import dao_get_all_letter_jobs
|
||||
from app.schemas import job_schema
|
||||
from app.v2.errors import register_errors
|
||||
@@ -16,7 +16,7 @@ register_errors(letter_job)
|
||||
@letter_job.route('/send-letter-jobs', methods=['POST'])
|
||||
def send_letter_jobs():
|
||||
job_ids = validate(request.get_json(), letter_job_ids)
|
||||
notify_celery.send_task(name="send-files-to-dvla", args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP)
|
||||
notify_celery.send_task(name=TaskNames.DVLA_FILES, args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP)
|
||||
|
||||
return jsonify(data={"response": "Task created to send files to DVLA"}), 201
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import uuid
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from unittest.mock import call, patch, PropertyMock
|
||||
@@ -20,6 +22,7 @@ from app.celery.scheduled_tasks import (
|
||||
remove_csv_files,
|
||||
remove_transformed_dvla_files,
|
||||
run_scheduled_jobs,
|
||||
run_letter_jobs,
|
||||
s3,
|
||||
send_daily_performance_platform_stats,
|
||||
send_scheduled_notifications,
|
||||
@@ -28,6 +31,7 @@ from app.celery.scheduled_tasks import (
|
||||
timeout_notifications,
|
||||
populate_monthly_billing)
|
||||
from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient
|
||||
from app.config import QueueNames, TaskNames
|
||||
from app.dao.jobs_dao import dao_get_job_by_id
|
||||
from app.dao.notifications_dao import dao_get_scheduled_notifications
|
||||
from app.dao.provider_details_dao import (
|
||||
@@ -37,6 +41,7 @@ from app.dao.provider_details_dao import (
|
||||
from app.models import (
|
||||
Service, Template,
|
||||
SMS_TYPE, LETTER_TYPE,
|
||||
JOB_STATUS_READY_TO_SEND,
|
||||
MonthlyBilling)
|
||||
from app.utils import get_london_midnight_in_utc, convert_utc_to_bst
|
||||
from tests.app.db import create_notification, create_service, create_template, create_job, create_rate
|
||||
@@ -677,3 +682,19 @@ def test_populate_monthly_billing_updates_correct_month_in_bst(sample_template):
|
||||
assert monthly_billing[1].notification_type == 'sms'
|
||||
assert monthly_billing[1].monthly_totals[0]['billing_units'] == 1
|
||||
assert monthly_billing[1].monthly_totals[0]['total_cost'] == 0.0123
|
||||
|
||||
|
||||
def test_run_letter_jobs(client, mocker, sample_letter_template):
|
||||
jobs = [create_job(template=sample_letter_template, job_status=JOB_STATUS_READY_TO_SEND),
|
||||
create_job(template=sample_letter_template, job_status=JOB_STATUS_READY_TO_SEND)]
|
||||
mocker.patch(
|
||||
"app.celery.scheduled_tasks.dao_get_letter_jobs_by_status",
|
||||
return_value=jobs
|
||||
)
|
||||
mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task")
|
||||
|
||||
run_letter_jobs()
|
||||
|
||||
mock_celery.assert_called_once_with(name=TaskNames.DVLA_FILES,
|
||||
args=([job.id for job in jobs]),
|
||||
queue=QueueNames.PROCESS_FTP)
|
||||
|
||||
@@ -18,18 +18,25 @@ from app.dao.jobs_dao import (
|
||||
dao_get_all_notifications_for_job,
|
||||
dao_get_jobs_older_than_limited_by,
|
||||
dao_get_job_statistics_for_job,
|
||||
dao_get_job_stats_for_service)
|
||||
dao_get_job_stats_for_service,
|
||||
dao_get_letter_jobs_by_status)
|
||||
from app.dao.statistics_dao import create_or_update_job_sending_statistics, update_job_stats_outcome_count
|
||||
from app.models import (
|
||||
Job, JobStatistics,
|
||||
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE
|
||||
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE,
|
||||
JOB_STATUS_READY_TO_SEND, JOB_STATUS_SENT_TO_DVLA, JOB_STATUS_FINISHED, JOB_STATUS_PENDING
|
||||
)
|
||||
|
||||
from tests.app.conftest import sample_notification as create_notification
|
||||
from tests.app.conftest import sample_job as create_job
|
||||
from tests.app.conftest import sample_service as create_service
|
||||
from tests.app.conftest import sample_template as create_template
|
||||
from tests.app.db import create_user
|
||||
from tests.app.db import (
|
||||
create_user,
|
||||
create_job as create_db_job,
|
||||
create_service as create_db_service,
|
||||
create_template as create_db_template
|
||||
)
|
||||
|
||||
|
||||
def test_should_have_decorated_notifications_dao_functions():
|
||||
@@ -542,3 +549,37 @@ def stats_set_up(notify_db, notify_db_session, service):
|
||||
update_job_stats_outcome_count(notification_delivered)
|
||||
update_job_stats_outcome_count(notification_failed)
|
||||
return job_1, job_2
|
||||
|
||||
|
||||
def test_dao_get_letter_jobs_by_status(sample_service):
|
||||
another_service = create_db_service(service_name="another service")
|
||||
|
||||
sms_template = create_db_template(service=sample_service, template_type=SMS_TYPE)
|
||||
email_template = create_db_template(service=sample_service, template_type=EMAIL_TYPE)
|
||||
letter_template = create_db_template(service=sample_service, template_type=LETTER_TYPE)
|
||||
another_letter_template = create_db_template(service=another_service, template_type=LETTER_TYPE)
|
||||
ready_letter_jobs = []
|
||||
ready_letter_jobs.append(
|
||||
create_db_job(
|
||||
letter_template,
|
||||
job_status=JOB_STATUS_READY_TO_SEND,
|
||||
original_file_name='1.csv'
|
||||
)
|
||||
)
|
||||
ready_letter_jobs.append(
|
||||
create_db_job(
|
||||
another_letter_template,
|
||||
job_status=JOB_STATUS_READY_TO_SEND,
|
||||
original_file_name='2.csv'
|
||||
)
|
||||
)
|
||||
create_db_job(sms_template, job_status=JOB_STATUS_FINISHED)
|
||||
create_db_job(email_template, job_status=JOB_STATUS_FINISHED)
|
||||
create_db_job(letter_template, job_status=JOB_STATUS_SENT_TO_DVLA)
|
||||
create_db_job(letter_template, job_status=JOB_STATUS_FINISHED)
|
||||
create_db_job(letter_template, job_status=JOB_STATUS_PENDING)
|
||||
|
||||
result = dao_get_letter_jobs_by_status(JOB_STATUS_READY_TO_SEND)
|
||||
|
||||
assert len(result) == 2
|
||||
assert set(result) == set(ready_letter_jobs)
|
||||
|
||||
@@ -8,7 +8,7 @@ from tests import create_authorization_header
|
||||
|
||||
|
||||
def test_send_letter_jobs(client, mocker):
|
||||
mock_celery = mocker.patch("app.letters.send_letter_jobs.notify_celery.send_task")
|
||||
mock_celery = mocker.patch("app.letters.rest.notify_celery.send_task")
|
||||
job_ids = {"job_ids": [str(uuid.uuid4()), str(uuid.uuid4()), str(uuid.uuid4())]}
|
||||
|
||||
auth_header = create_authorization_header()
|
||||
@@ -27,7 +27,7 @@ def test_send_letter_jobs(client, mocker):
|
||||
|
||||
|
||||
def test_send_letter_jobs_throws_validation_error(client, mocker):
|
||||
mock_celery = mocker.patch("app.letters.send_letter_jobs.notify_celery.send_task")
|
||||
mock_celery = mocker.patch("app.letters.rest.notify_celery.send_task")
|
||||
|
||||
job_ids = {"job_ids": ["1", "2"]}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from unittest import mock
|
||||
import pytest
|
||||
|
||||
from app import config
|
||||
from app.config import QueueNames
|
||||
|
||||
|
||||
def cf_conf():
|
||||
@@ -57,3 +58,21 @@ def test_load_config_if_cloudfoundry_not_available(monkeypatch, reload_config):
|
||||
def test_cloudfoundry_config_has_different_defaults():
|
||||
# these should always be set on Sandbox
|
||||
assert config.Sandbox.REDIS_ENABLED is False
|
||||
|
||||
|
||||
def test_queue_names_all_queues_correct():
|
||||
# Need to ensure that all_queues() only returns queue names used in API
|
||||
queues = QueueNames.all_queues()
|
||||
assert len(queues) == 10
|
||||
assert set([
|
||||
QueueNames.PRIORITY,
|
||||
QueueNames.PERIODIC,
|
||||
QueueNames.DATABASE,
|
||||
QueueNames.SEND_SMS,
|
||||
QueueNames.SEND_EMAIL,
|
||||
QueueNames.RESEARCH_MODE,
|
||||
QueueNames.STATISTICS,
|
||||
QueueNames.JOBS,
|
||||
QueueNames.RETRY,
|
||||
QueueNames.NOTIFY
|
||||
]) == set(queues)
|
||||
|
||||
Reference in New Issue
Block a user