Remove letter-jobs api

When we first built letters you could only send them via a CSV upload, initially we needed a way to send those files to dvla per job.
We since stopped using this page. So let's delete it!
This commit is contained in:
Rebecca Law
2018-11-15 17:24:37 +00:00
parent 07078e804e
commit 7a16ac35bd
8 changed files with 63 additions and 249 deletions

View File

@@ -22,7 +22,6 @@ from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_
from app.dao.invited_org_user_dao import delete_org_invitations_created_more_than_two_days_ago from app.dao.invited_org_user_dao import delete_org_invitations_created_more_than_two_days_ago
from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago
from app.dao.jobs_dao import ( from app.dao.jobs_dao import (
dao_get_letter_job_ids_by_status,
dao_set_scheduled_jobs_to_pending, dao_set_scheduled_jobs_to_pending,
dao_get_jobs_older_than_limited_by dao_get_jobs_older_than_limited_by
) )
@@ -52,7 +51,6 @@ from app.models import (
NOTIFICATION_SENDING, NOTIFICATION_SENDING,
LETTER_TYPE, LETTER_TYPE,
JOB_STATUS_IN_PROGRESS, JOB_STATUS_IN_PROGRESS,
JOB_STATUS_READY_TO_SEND,
JOB_STATUS_ERROR, JOB_STATUS_ERROR,
SMS_TYPE, SMS_TYPE,
EMAIL_TYPE, EMAIL_TYPE,
@@ -336,7 +334,6 @@ def delete_dvla_response_files_older_than_seven_days():
@notify_celery.task(name="raise-alert-if-letter-notifications-still-sending") @notify_celery.task(name="raise-alert-if-letter-notifications-still-sending")
@statsd(namespace="tasks") @statsd(namespace="tasks")
def raise_alert_if_letter_notifications_still_sending(): def raise_alert_if_letter_notifications_still_sending():
today = datetime.utcnow().date() today = datetime.utcnow().date()
# Do nothing on the weekend # Do nothing on the weekend
@@ -370,19 +367,6 @@ def raise_alert_if_letter_notifications_still_sending():
current_app.logger.info(message) current_app.logger.info(message)
@notify_celery.task(name="run-letter-jobs")
@statsd(namespace="tasks")
def run_letter_jobs():
job_ids = dao_get_letter_job_ids_by_status(JOB_STATUS_READY_TO_SEND)
if job_ids:
notify_celery.send_task(
name=TaskNames.DVLA_JOBS,
args=(job_ids,),
queue=QueueNames.PROCESS_FTP
)
current_app.logger.info("Queued {} ready letter job ids onto {}".format(len(job_ids), QueueNames.PROCESS_FTP))
@notify_celery.task(name='check-job-status') @notify_celery.task(name='check-job-status')
@statsd(namespace="tasks") @statsd(namespace="tasks")
def check_job_status(): def check_job_status():
@@ -446,14 +430,13 @@ def letter_raise_alert_if_no_ack_file_for_zip():
for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'],
subfolder=datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent', subfolder=datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent',
suffix='.TXT'): suffix='.TXT'):
subname = key.split('/')[-1] # strip subfolder in name
subname = key.split('/')[-1] # strip subfolder in name
zip_file_set.add(subname.upper().rstrip('.TXT')) zip_file_set.add(subname.upper().rstrip('.TXT'))
# get acknowledgement file # get acknowledgement file
ack_file_set = set() ack_file_set = set()
yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # AWS datetime format yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # AWS datetime format
for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'],
subfolder='root/dispatch', suffix='.ACK.txt', last_modified=yesterday): subfolder='root/dispatch', suffix='.ACK.txt', last_modified=yesterday):
@@ -465,7 +448,7 @@ def letter_raise_alert_if_no_ack_file_for_zip():
for key in ack_file_set: for key in ack_file_set:
if today_str in key: if today_str in key:
content = s3.get_s3_file(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], key) content = s3.get_s3_file(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], key)
for zip_file in content.split('\n'): # each line for zip_file in content.split('\n'): # each line
s = zip_file.split('|') s = zip_file.split('|')
ack_content_set.add(s[0].upper()) ack_content_set.add(s[0].upper())

View File

@@ -54,7 +54,6 @@ class QueueNames(object):
class TaskNames(object): class TaskNames(object):
DVLA_JOBS = 'send-jobs-to-dvla'
PROCESS_INCOMPLETE_JOBS = 'process-incomplete-jobs' PROCESS_INCOMPLETE_JOBS = 'process-incomplete-jobs'
ZIP_AND_SEND_LETTER_PDFS = 'zip-and-send-letter-pdfs' ZIP_AND_SEND_LETTER_PDFS = 'zip-and-send-letter-pdfs'
SCAN_FILE = 'scan-file' SCAN_FILE = 'scan-file'

View File

@@ -144,21 +144,3 @@ def dao_get_all_letter_jobs():
).order_by( ).order_by(
desc(Job.created_at) desc(Job.created_at)
).all() ).all()
def dao_get_letter_job_ids_by_status(status):
jobs = db.session.query(
Job
).join(
Job.template
).filter(
Job.job_status == status,
Template.template_type == LETTER_TYPE,
# test letter jobs (or from research mode services) are created with a different filename,
# exclude them so we don't see them on the send to CSV
Job.original_file_name != LETTER_TEST_API_FILENAME
).order_by(
desc(Job.created_at)
).all()
return [str(job.id) for job in jobs]

View File

@@ -1,35 +1,16 @@
from flask import Blueprint, jsonify from flask import Blueprint, jsonify
from flask import request from flask import request
from app import notify_celery
from app.celery.tasks import process_returned_letters_list from app.celery.tasks import process_returned_letters_list
from app.config import QueueNames, TaskNames from app.config import QueueNames
from app.dao.jobs_dao import dao_get_all_letter_jobs from app.letters.letter_schemas import letter_references
from app.schemas import job_schema
from app.v2.errors import register_errors
from app.letters.letter_schemas import letter_job_ids, letter_references
from app.schema_validation import validate from app.schema_validation import validate
from app.v2.errors import register_errors
letter_job = Blueprint("letter-job", __name__) letter_job = Blueprint("letter-job", __name__)
register_errors(letter_job) register_errors(letter_job)
@letter_job.route('/send-letter-jobs', methods=['POST'])
def send_letter_jobs():
job_ids = validate(request.get_json(), letter_job_ids)
notify_celery.send_task(name=TaskNames.DVLA_JOBS, args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP)
return jsonify(data={"response": "Task created to send files to DVLA"}), 201
@letter_job.route('/letter-jobs', methods=['GET'])
def get_letter_jobs():
letter_jobs = dao_get_all_letter_jobs()
data = job_schema.dump(letter_jobs, many=True).data
return jsonify(data=data), 200
@letter_job.route('/letters/returned', methods=['POST']) @letter_job.route('/letters/returned', methods=['POST'])
def create_process_returned_letters_job(): def create_process_returned_letters_job():
references = validate(request.get_json(), letter_references) references = validate(request.get_json(), letter_references)

View File

@@ -1,11 +1,11 @@
import functools
from datetime import datetime, timedelta from datetime import datetime, timedelta
from functools import partial from functools import partial
from unittest.mock import call, patch, PropertyMock from unittest.mock import call, patch, PropertyMock
import functools
import pytest
import pytz import pytz
from flask import current_app from flask import current_app
import pytest
from freezegun import freeze_time from freezegun import freeze_time
from notifications_utils.clients.zendesk.zendesk_client import ZendeskClient from notifications_utils.clients.zendesk.zendesk_client import ZendeskClient
@@ -25,7 +25,6 @@ from app.celery.scheduled_tasks import (
remove_csv_files, remove_csv_files,
remove_transformed_dvla_files, remove_transformed_dvla_files,
run_scheduled_jobs, run_scheduled_jobs,
run_letter_jobs,
s3, s3,
send_daily_performance_platform_stats, send_daily_performance_platform_stats,
send_scheduled_notifications, send_scheduled_notifications,
@@ -36,6 +35,7 @@ from app.celery.scheduled_tasks import (
letter_raise_alert_if_no_ack_file_for_zip, letter_raise_alert_if_no_ack_file_for_zip,
replay_created_notifications replay_created_notifications
) )
from app.celery.service_callback_tasks import create_delivery_status_callback_data
from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient
from app.config import QueueNames, TaskNames from app.config import QueueNames, TaskNames
from app.dao.jobs_dao import dao_get_job_by_id from app.dao.jobs_dao import dao_get_job_by_id
@@ -49,20 +49,14 @@ from app.models import (
NotificationHistory, NotificationHistory,
Service, Service,
StatsTemplateUsageByMonth, StatsTemplateUsageByMonth,
JOB_STATUS_READY_TO_SEND,
JOB_STATUS_IN_PROGRESS, JOB_STATUS_IN_PROGRESS,
JOB_STATUS_SENT_TO_DVLA,
JOB_STATUS_ERROR, JOB_STATUS_ERROR,
LETTER_TYPE, LETTER_TYPE,
SMS_TYPE SMS_TYPE
) )
from app.utils import get_london_midnight_in_utc from app.utils import get_london_midnight_in_utc
from app.celery.service_callback_tasks import create_delivery_status_callback_data
from app.v2.errors import JobIncompleteError from app.v2.errors import JobIncompleteError
from tests.app.db import ( from tests.app.aws.test_s3 import single_s3_object_stub
create_notification, create_service, create_template, create_job, create_service_callback_api
)
from tests.app.conftest import ( from tests.app.conftest import (
sample_job as create_sample_job, sample_job as create_sample_job,
sample_notification_history as create_notification_history, sample_notification_history as create_notification_history,
@@ -70,7 +64,9 @@ from tests.app.conftest import (
create_custom_template, create_custom_template,
datetime_in_past datetime_in_past
) )
from tests.app.aws.test_s3 import single_s3_object_stub from tests.app.db import (
create_notification, create_service, create_template, create_job, create_service_callback_api
)
from tests.conftest import set_config_values from tests.conftest import set_config_values
@@ -275,7 +271,7 @@ def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_
@freeze_time('2016-10-18T10:00:00') @freeze_time('2016-10-18T10:00:00')
def test_will_remove_csv_files_for_jobs_older_than_seven_days( def test_will_remove_csv_files_for_jobs_older_than_seven_days(
notify_db, notify_db_session, mocker, sample_template notify_db, notify_db_session, mocker, sample_template
): ):
mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3')
""" """
@@ -303,12 +299,13 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(
def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mocker): def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mocker):
send_mock = mocker.patch('app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa send_mock = mocker.patch(
'app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa
with patch.object( with patch.object(
PerformancePlatformClient, PerformancePlatformClient,
'active', 'active',
new_callable=PropertyMock new_callable=PropertyMock
) as mock_active: ) as mock_active:
mock_active.return_value = False mock_active.return_value = False
send_daily_performance_platform_stats() send_daily_performance_platform_stats()
@@ -318,12 +315,13 @@ def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mo
@freeze_time("2016-01-11 12:30:00") @freeze_time("2016-01-11 12:30:00")
def test_send_total_sent_notifications_to_performance_platform_calls_with_correct_totals( def test_send_total_sent_notifications_to_performance_platform_calls_with_correct_totals(
notify_db, notify_db,
notify_db_session, notify_db_session,
sample_template, sample_template,
mocker mocker
): ):
perf_mock = mocker.patch('app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa perf_mock = mocker.patch(
'app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa
notification_history = partial( notification_history = partial(
create_notification_history, create_notification_history,
@@ -346,9 +344,9 @@ def test_send_total_sent_notifications_to_performance_platform_calls_with_correc
notification_history(notification_type='email') notification_history(notification_type='email')
with patch.object( with patch.object(
PerformancePlatformClient, PerformancePlatformClient,
'active', 'active',
new_callable=PropertyMock new_callable=PropertyMock
) as mock_active: ) as mock_active:
mock_active.return_value = True mock_active.return_value = True
send_total_sent_notifications_to_performance_platform(yesterday) send_total_sent_notifications_to_performance_platform(yesterday)
@@ -360,8 +358,8 @@ def test_send_total_sent_notifications_to_performance_platform_calls_with_correc
def test_switch_current_sms_provider_on_slow_delivery_does_not_run_if_config_unset( def test_switch_current_sms_provider_on_slow_delivery_does_not_run_if_config_unset(
notify_api, notify_api,
mocker mocker
): ):
get_notifications_mock = mocker.patch( get_notifications_mock = mocker.patch(
'app.celery.scheduled_tasks.is_delivery_slow_for_provider' 'app.celery.scheduled_tasks.is_delivery_slow_for_provider'
@@ -379,9 +377,9 @@ def test_switch_current_sms_provider_on_slow_delivery_does_not_run_if_config_uns
def test_switch_providers_on_slow_delivery_runs_if_config_set( def test_switch_providers_on_slow_delivery_runs_if_config_set(
notify_api, notify_api,
mocker, mocker,
prepare_current_provider prepare_current_provider
): ):
get_notifications_mock = mocker.patch( get_notifications_mock = mocker.patch(
'app.celery.scheduled_tasks.is_delivery_slow_for_provider', 'app.celery.scheduled_tasks.is_delivery_slow_for_provider',
@@ -398,10 +396,10 @@ def test_switch_providers_on_slow_delivery_runs_if_config_set(
def test_switch_providers_triggers_on_slow_notification_delivery( def test_switch_providers_triggers_on_slow_notification_delivery(
notify_api, notify_api,
mocker, mocker,
prepare_current_provider, prepare_current_provider,
sample_user sample_user
): ):
mocker.patch('app.provider_details.switch_providers.get_user_by_id', return_value=sample_user) mocker.patch('app.provider_details.switch_providers.get_user_by_id', return_value=sample_user)
starting_provider = get_current_provider('sms') starting_provider = get_current_provider('sms')
@@ -420,10 +418,10 @@ def test_switch_providers_triggers_on_slow_notification_delivery(
def test_switch_providers_on_slow_delivery_does_not_switch_if_already_switched( def test_switch_providers_on_slow_delivery_does_not_switch_if_already_switched(
notify_api, notify_api,
mocker, mocker,
prepare_current_provider, prepare_current_provider,
sample_user sample_user
): ):
mocker.patch('app.provider_details.switch_providers.get_user_by_id', return_value=sample_user) mocker.patch('app.provider_details.switch_providers.get_user_by_id', return_value=sample_user)
starting_provider = get_current_provider('sms') starting_provider = get_current_provider('sms')
@@ -444,10 +442,10 @@ def test_switch_providers_on_slow_delivery_does_not_switch_if_already_switched(
def test_switch_providers_on_slow_delivery_does_not_switch_based_on_older_notifications( def test_switch_providers_on_slow_delivery_does_not_switch_based_on_older_notifications(
notify_api, notify_api,
mocker, mocker,
prepare_current_provider, prepare_current_provider,
sample_user, sample_user,
): ):
""" """
@@ -745,33 +743,6 @@ def test_tuesday_alert_if_letter_notifications_still_sending_reports_friday_lett
) )
def test_run_letter_jobs(client, mocker, sample_letter_template):
jobs = [create_job(template=sample_letter_template, job_status=JOB_STATUS_READY_TO_SEND),
create_job(template=sample_letter_template, job_status=JOB_STATUS_READY_TO_SEND)]
job_ids = [str(j.id) for j in jobs]
mocker.patch(
"app.celery.scheduled_tasks.dao_get_letter_job_ids_by_status",
return_value=job_ids
)
mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task")
run_letter_jobs()
mock_celery.assert_called_once_with(name=TaskNames.DVLA_JOBS,
args=(job_ids,),
queue=QueueNames.PROCESS_FTP)
def test_run_letter_jobs_does_nothing_if_no_ready_jobs(client, mocker, sample_letter_template):
create_job(sample_letter_template, job_status=JOB_STATUS_IN_PROGRESS)
create_job(sample_letter_template, job_status=JOB_STATUS_SENT_TO_DVLA)
mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task")
run_letter_jobs()
assert not mock_celery.called
def test_check_job_status_task_raises_job_incomplete_error(mocker, sample_template): def test_check_job_status_task_raises_job_incomplete_error(mocker, sample_template):
mock_celery = mocker.patch('app.celery.tasks.notify_celery.send_task') mock_celery = mocker.patch('app.celery.tasks.notify_celery.send_task')
job = create_job(template=sample_template, notification_count=3, job = create_job(template=sample_template, notification_count=3,
@@ -1040,7 +1011,6 @@ def test_dao_fetch_monthly_historical_stats_by_template_null_template_id_not_cou
def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None): def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None):
if subfolder == '2018-01-11/zips_sent': if subfolder == '2018-01-11/zips_sent':
return ['NOTIFY.20180111175007.ZIP.TXT', 'NOTIFY.20180111175008.ZIP.TXT'] return ['NOTIFY.20180111175007.ZIP.TXT', 'NOTIFY.20180111175008.ZIP.TXT']
if subfolder == 'root/dispatch': if subfolder == 'root/dispatch':
@@ -1064,7 +1034,7 @@ def test_letter_not_raise_alert_if_ack_files_match_zip_list(mocker, notify_db):
letter_raise_alert_if_no_ack_file_for_zip() letter_raise_alert_if_no_ack_file_for_zip()
yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # Datatime format on AWS yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # Datatime format on AWS
subfoldername = datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent' subfoldername = datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent'
assert mock_file_list.call_count == 2 assert mock_file_list.call_count == 2
assert mock_file_list.call_args_list == [ assert mock_file_list.call_args_list == [

View File

@@ -1,8 +1,8 @@
import uuid
from datetime import datetime, timedelta from datetime import datetime, timedelta
from functools import partial from functools import partial
import pytest
import uuid
import pytest
from freezegun import freeze_time from freezegun import freeze_time
from app.dao.jobs_dao import ( from app.dao.jobs_dao import (
@@ -14,23 +14,18 @@ from app.dao.jobs_dao import (
dao_get_future_scheduled_job_by_id_and_service_id, dao_get_future_scheduled_job_by_id_and_service_id,
dao_get_notification_outcomes_for_job, dao_get_notification_outcomes_for_job,
dao_update_job_status, dao_update_job_status,
dao_get_jobs_older_than_limited_by, dao_get_jobs_older_than_limited_by
dao_get_letter_job_ids_by_status) )
from app.models import ( from app.models import (
Job, Job,
EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, EMAIL_TYPE, SMS_TYPE, LETTER_TYPE
JOB_STATUS_READY_TO_SEND, JOB_STATUS_SENT_TO_DVLA, JOB_STATUS_FINISHED, JOB_STATUS_PENDING
) )
from tests.app.conftest import sample_notification as create_notification
from tests.app.conftest import sample_job as create_job from tests.app.conftest import sample_job as create_job
from tests.app.conftest import sample_notification as create_notification
from tests.app.conftest import sample_service as create_service from tests.app.conftest import sample_service as create_service
from tests.app.conftest import sample_template as create_template from tests.app.conftest import sample_template as create_template
from tests.app.db import ( from tests.app.db import (
create_user, create_user
create_job as create_db_job,
create_service as create_db_service,
create_template as create_db_template
) )
@@ -211,7 +206,6 @@ def test_get_jobs_for_service_with_limit_days_edge_case(notify_db, notify_db_ses
def test_get_jobs_for_service_in_processed_at_then_created_at_order(notify_db, notify_db_session, sample_template): def test_get_jobs_for_service_in_processed_at_then_created_at_order(notify_db, notify_db_session, sample_template):
_create_job = partial(create_job, notify_db, notify_db_session, sample_template.service, sample_template) _create_job = partial(create_job, notify_db, notify_db_session, sample_template.service, sample_template)
from_hour = partial(datetime, 2001, 1, 1) from_hour = partial(datetime, 2001, 1, 1)
@@ -335,11 +329,11 @@ def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_
'Report', 'Report',
]) ])
def test_get_jobs_for_service_doesnt_return_test_messages( def test_get_jobs_for_service_doesnt_return_test_messages(
notify_db, notify_db,
notify_db_session, notify_db_session,
sample_template, sample_template,
sample_job, sample_job,
file_name, file_name,
): ):
create_job( create_job(
notify_db, notify_db,
@@ -394,26 +388,3 @@ def assert_job_stat(job, result, sent, delivered, failed):
assert result.sent == sent assert result.sent == sent
assert result.delivered == delivered assert result.delivered == delivered
assert result.failed == failed assert result.failed == failed
def test_dao_get_letter_job_ids_by_status(sample_service):
another_service = create_db_service(service_name="another service")
sms_template = create_db_template(service=sample_service, template_type=SMS_TYPE)
email_template = create_db_template(service=sample_service, template_type=EMAIL_TYPE)
letter_template_1 = create_db_template(service=sample_service, template_type=LETTER_TYPE)
letter_template_2 = create_db_template(service=another_service, template_type=LETTER_TYPE)
letter_job_1 = create_db_job(letter_template_1, job_status=JOB_STATUS_READY_TO_SEND, original_file_name='1.csv')
letter_job_2 = create_db_job(letter_template_2, job_status=JOB_STATUS_READY_TO_SEND, original_file_name='2.csv')
ready_letter_job_ids = [str(letter_job_1.id), str(letter_job_2.id)]
create_db_job(sms_template, job_status=JOB_STATUS_FINISHED)
create_db_job(email_template, job_status=JOB_STATUS_FINISHED)
create_db_job(letter_template_1, job_status=JOB_STATUS_SENT_TO_DVLA)
create_db_job(letter_template_1, job_status=JOB_STATUS_FINISHED)
create_db_job(letter_template_2, job_status=JOB_STATUS_PENDING)
result = dao_get_letter_job_ids_by_status(JOB_STATUS_READY_TO_SEND)
assert len(result) == 2
assert set(result) == set(ready_letter_job_ids)

View File

@@ -1,67 +0,0 @@
from flask import json
from app.variables import LETTER_TEST_API_FILENAME
from tests import create_authorization_header
from tests.app.db import create_job
def test_send_letter_jobs(client, mocker, sample_letter_template):
mock_celery = mocker.patch("app.letters.rest.notify_celery.send_task")
job_1 = create_job(sample_letter_template)
job_2 = create_job(sample_letter_template)
job_3 = create_job(sample_letter_template)
job_ids = {"job_ids": [str(job_1.id), str(job_2.id), str(job_3.id)]}
auth_header = create_authorization_header()
response = client.post(
path='/send-letter-jobs',
data=json.dumps(job_ids),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
assert json.loads(response.get_data())['data'] == {'response': "Task created to send files to DVLA"}
mock_celery.assert_called_once_with(name="send-jobs-to-dvla",
args=(job_ids['job_ids'],),
queue="process-ftp-tasks")
def test_send_letter_jobs_throws_validation_error(client, mocker):
mock_celery = mocker.patch("app.letters.rest.notify_celery.send_task")
job_ids = {"job_ids": ["1", "2"]}
auth_header = create_authorization_header()
response = client.post(
path='/send-letter-jobs',
data=json.dumps(job_ids),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
assert not mock_celery.called
def test_get_letter_jobs_excludes_non_letter_jobs(client, sample_letter_job, sample_job):
auth_header = create_authorization_header()
response = client.get(
path='/letter-jobs',
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 200
json_resp = json.loads(response.get_data(as_text=True))
assert len(json_resp['data']) == 1
assert json_resp['data'][0]['id'] == str(sample_letter_job.id)
assert json_resp['data'][0]['service_name']['name'] == sample_letter_job.service.name
assert json_resp['data'][0]['job_status'] == 'pending'
def test_get_letter_jobs_excludes_test_jobs(admin_request, sample_letter_job):
sample_letter_job.original_file_name = LETTER_TEST_API_FILENAME
json_resp = admin_request.get('letter-job.get_letter_jobs')
assert len(json_resp['data']) == 0

View File

@@ -5,7 +5,7 @@ from flask import json
from flask import url_for from flask import url_for
import pytest import pytest
from app.config import TaskNames, QueueNames from app.config import QueueNames
from app.models import ( from app.models import (
Job, Job,
Notification, Notification,
@@ -381,8 +381,8 @@ def test_post_letter_notification_is_delivered_and_has_pdf_uploaded_to_test_lett
mocker mocker
): ):
sample_letter_service = create_service(service_permissions=['letter', 'precompiled_letter']) sample_letter_service = create_service(service_permissions=['letter', 'precompiled_letter'])
mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task')
s3mock = mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf', return_value='test.pdf') s3mock = mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf', return_value='test.pdf')
mock_celery = mocker.patch("app.letters.rest.notify_celery.send_task")
data = { data = {
"reference": "letter-reference", "reference": "letter-reference",
"content": "bGV0dGVyLWNvbnRlbnQ=" "content": "bGV0dGVyLWNvbnRlbnQ="
@@ -397,11 +397,6 @@ def test_post_letter_notification_is_delivered_and_has_pdf_uploaded_to_test_lett
notification = Notification.query.one() notification = Notification.query.one()
assert notification.status == NOTIFICATION_PENDING_VIRUS_CHECK assert notification.status == NOTIFICATION_PENDING_VIRUS_CHECK
s3mock.assert_called_once_with(ANY, b'letter-content', precompiled=True) s3mock.assert_called_once_with(ANY, b'letter-content', precompiled=True)
mock_celery.assert_called_once_with(
name=TaskNames.SCAN_FILE,
kwargs={'filename': 'test.pdf'},
queue=QueueNames.ANTIVIRUS
)
def test_post_letter_notification_persists_notification_reply_to_text( def test_post_letter_notification_persists_notification_reply_to_text(
@@ -466,8 +461,8 @@ def test_post_precompiled_letter_with_invalid_base64(client, notify_user, mocker
def test_post_precompiled_letter_notification_returns_201(client, notify_user, mocker, postage): def test_post_precompiled_letter_notification_returns_201(client, notify_user, mocker, postage):
sample_service = create_service(service_permissions=['letter', 'precompiled_letter']) sample_service = create_service(service_permissions=['letter', 'precompiled_letter'])
sample_service.postage = postage sample_service.postage = postage
s3mock = mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf') mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf')
mocker.patch("app.letters.rest.notify_celery.send_task") mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task')
data = { data = {
"reference": "letter-reference", "reference": "letter-reference",
"content": "bGV0dGVyLWNvbnRlbnQ=" "content": "bGV0dGVyLWNvbnRlbnQ="
@@ -480,7 +475,7 @@ def test_post_precompiled_letter_notification_returns_201(client, notify_user, m
assert response.status_code == 201, response.get_data(as_text=True) assert response.status_code == 201, response.get_data(as_text=True)
s3mock.assert_called_once_with(ANY, b'letter-content', precompiled=True) # s3mock.assert_called_once_with(ANY, b'letter-content', precompiled=True)
notification = Notification.query.one() notification = Notification.query.one()