Merge pull request #887 from alphagov/update-job-status

Update job status for letter jobs.
This commit is contained in:
Rebecca Law
2017-04-06 14:22:09 +01:00
committed by GitHub
4 changed files with 79 additions and 40 deletions

View File

@@ -22,7 +22,8 @@ from app.dao.jobs_dao import (
dao_update_job,
dao_get_job_by_id,
all_notifications_are_created_for_job,
dao_get_all_notifications_for_job)
dao_get_all_notifications_for_job,
dao_update_job_status)
from app.dao.notifications_dao import get_notification_by_id
from app.dao.services_dao import dao_fetch_service_by_id, fetch_todays_total_message_count
from app.dao.templates_dao import dao_get_template_by_id
@@ -31,7 +32,7 @@ from app.models import (
SMS_TYPE,
LETTER_TYPE,
KEY_TYPE_NORMAL,
JOB_STATUS_CANCELLED, JOB_STATUS_PENDING, JOB_STATUS_IN_PROGRESS, JOB_STATUS_FINISHED)
JOB_STATUS_CANCELLED, JOB_STATUS_PENDING, JOB_STATUS_IN_PROGRESS, JOB_STATUS_FINISHED, JOB_STATUS_READY_TO_SEND)
from app.notifications.process_notifications import persist_notification
from app.service.utils import service_allowed_to_send_to
from app.statsd_decorators import statsd
@@ -73,18 +74,20 @@ def process_job(job_id):
).enumerated_recipients_and_personalisation:
process_row(row_number, recipient, personalisation, template, job, service)
if template.template_type == LETTER_TYPE:
build_dvla_file.apply_async([str(job.id)], queue='process-job')
# temporary logging
current_app.logger.info("send job {} to build-dvla-file in the process-job queue".format(job_id))
else:
job.job_status = JOB_STATUS_FINISHED
finished = datetime.utcnow()
job.job_status = JOB_STATUS_FINISHED
job.processing_started = start
job.processing_finished = finished
dao_update_job(job)
current_app.logger.info(
"Job {} created at {} started at {} finished at {}".format(job_id, job.created_at, start, finished)
)
if template.template_type == LETTER_TYPE:
build_dvla_file.apply_async([str(job.id)], queue='process-job')
# temporary logging
current_app.logger.info("send job {} to build-dvla-file in the process-job queue".format(job_id))
def process_row(row_number, recipient, personalisation, template, job, service):
@@ -267,23 +270,14 @@ def persist_letter(
def build_dvla_file(self, job_id):
try:
if all_notifications_are_created_for_job(job_id):
file_contents = '\n'.join(
str(LetterDVLATemplate(
notification.template.__dict__,
notification.personalisation,
# This unique id is a 7 digits requested by DVLA, not known
# if this number needs to be sequential.
numeric_id=random.randint(1, int('9' * 7)),
contact_block=notification.service.letter_contact_block,
))
for notification in dao_get_all_notifications_for_job(job_id)
)
file_contents = create_dvla_file_contents(job_id)
s3upload(
filedata=file_contents + '\n',
region=current_app.config['AWS_REGION'],
bucket_name=current_app.config['DVLA_UPLOAD_BUCKET_NAME'],
file_location="{}-dvla-job.text".format(job_id)
)
dao_update_job_status(job_id, JOB_STATUS_READY_TO_SEND)
else:
current_app.logger.info("All notifications for job {} are not persisted".format(job_id))
self.retry(queue="retry", exc="All notifications for job {} are not persisted".format(job_id))
@@ -292,6 +286,21 @@ def build_dvla_file(self, job_id):
raise e
def create_dvla_file_contents(job_id):
file_contents = '\n'.join(
str(LetterDVLATemplate(
notification.template.__dict__,
notification.personalisation,
# This unique id is a 7 digits requested by DVLA, not known
# if this number needs to be sequential.
numeric_id=random.randint(1, int('9' * 7)),
contact_block=notification.service.letter_contact_block,
))
for notification in dao_get_all_notifications_for_job(job_id)
)
return file_contents
def s3upload(filedata, region, bucket_name, file_location):
# TODO: move this method to utils. Will need to change the filedata from here to send contents in filedata['data']
_s3 = resource('s3')

View File

@@ -116,6 +116,11 @@ def dao_update_job(job):
db.session.commit()
def dao_update_job_status(job_id, status):
db.session.query(Job).filter_by(id=job_id).update({'job_status': status})
db.session.commit()
def dao_get_jobs_older_than_limited_by(older_than=7, limit_days=2):
return Job.query.filter(
cast(Job.created_at, sql_date) < days_ago(older_than),

View File

@@ -12,7 +12,7 @@ from celery.exceptions import Retry
from app import (encryption, DATETIME_FORMAT)
from app.celery import provider_tasks
from app.celery import tasks
from app.celery.tasks import s3, build_dvla_file
from app.celery.tasks import s3, build_dvla_file, create_dvla_file_contents
from app.celery.tasks import (
process_job,
process_row,
@@ -22,7 +22,15 @@ from app.celery.tasks import (
get_template_class
)
from app.dao import jobs_dao, services_dao
from app.models import Notification, KEY_TYPE_TEAM, KEY_TYPE_TEST, KEY_TYPE_NORMAL, SMS_TYPE, EMAIL_TYPE, LETTER_TYPE
from app.models import (
Notification,
KEY_TYPE_TEAM,
KEY_TYPE_TEST,
KEY_TYPE_NORMAL,
SMS_TYPE,
EMAIL_TYPE,
LETTER_TYPE,
Job)
from tests.app import load_example_csv
from tests.app.conftest import (
@@ -308,12 +316,11 @@ def test_should_process_letter_job(sample_letter_job, mocker):
assert process_row_mock.call_count == 1
assert sample_letter_job.job_status == 'finished'
assert sample_letter_job.job_status == 'in progress'
tasks.build_dvla_file.apply_async.assert_called_once_with([str(sample_letter_job.id)], queue="process-job")
def test_should_process_all_sms_job(sample_job,
sample_job_with_placeholdered_template,
def test_should_process_all_sms_job(sample_job_with_placeholdered_template,
mocker):
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_sms'))
mocker.patch('app.celery.tasks.send_sms.apply_async')
@@ -970,8 +977,6 @@ def test_build_dvla_file(sample_letter_template, mocker):
job = create_job(template=sample_letter_template, notification_count=2)
create_notification(template=job.template, job=job)
create_notification(template=job.template, job=job)
mocker.patch("app.celery.tasks.random.randint", return_value=999)
mocked_upload = mocker.patch("app.celery.tasks.s3upload")
mocked_letter_template = mocker.patch("app.celery.tasks.LetterDVLATemplate")
mocked_letter_template_instance = mocked_letter_template.return_value
@@ -984,7 +989,33 @@ def test_build_dvla_file(sample_letter_template, mocker):
bucket_name=current_app.config['DVLA_UPLOAD_BUCKET_NAME'],
file_location="{}-dvla-job.text".format(job.id)
)
assert Job.query.get(job.id).job_status == 'ready to send'
def test_build_dvla_file_retries_if_all_notifications_are_not_created(sample_letter_template, mocker):
job = create_job(template=sample_letter_template, notification_count=2, job_status='in progress')
create_notification(template=job.template, job=job)
mocked = mocker.patch("app.celery.tasks.s3upload")
mocker.patch('app.celery.tasks.build_dvla_file.retry', side_effect=Retry)
with pytest.raises(Retry):
build_dvla_file(job.id)
mocked.assert_not_called()
tasks.build_dvla_file.retry.assert_called_with(queue='retry',
exc="All notifications for job {} are not persisted".format(job.id))
assert Job.query.get(job.id).job_status == 'in progress'
def test_create_dvla_file_contents(sample_letter_template, mocker):
mocker.patch("app.celery.tasks.random.randint", return_value=999)
job = create_job(template=sample_letter_template, notification_count=2)
create_notification(template=job.template, job=job)
create_notification(template=job.template, job=job)
mocked_letter_template = mocker.patch("app.celery.tasks.LetterDVLATemplate")
mocked_letter_template_instance = mocked_letter_template.return_value
mocked_letter_template_instance.__str__.return_value = "dvla|string"
create_dvla_file_contents(job.id)
# Template
assert mocked_letter_template.call_args[0][0]['subject'] == 'Template subject'
assert mocked_letter_template.call_args[0][0]['content'] == 'Dear Sir/Madam, Hello. Yours Truly, The Government.'
@@ -997,20 +1028,6 @@ def test_build_dvla_file(sample_letter_template, mocker):
assert mocked_letter_template.call_args[1]['contact_block'] == 'London,\nSW1A 1AA'
def test_build_dvla_file_retries_if_all_notifications_are_not_created(sample_letter_template, mocker):
job = create_job(template=sample_letter_template, notification_count=2)
create_notification(template=job.template, job=job)
mocked = mocker.patch("app.celery.tasks.s3upload")
mocker.patch('app.celery.tasks.build_dvla_file.retry', side_effect=Retry)
with pytest.raises(Retry):
build_dvla_file(job.id)
mocked.assert_not_called()
tasks.build_dvla_file.retry.assert_called_with(queue='retry',
exc="All notifications for job {} are not persisted".format(job.id))
@freeze_time("2017-03-23 11:09:00.061258")
def test_dvla_letter_template(sample_letter_notification):
t = {"content": sample_letter_notification.template.content,

View File

@@ -13,7 +13,9 @@ from app.dao.jobs_dao import (
dao_get_future_scheduled_job_by_id_and_service_id,
dao_get_notification_outcomes_for_job,
all_notifications_are_created_for_job,
dao_get_all_notifications_for_job, dao_get_jobs_older_than_limited_by)
dao_update_job_status,
dao_get_all_notifications_for_job,
dao_get_jobs_older_than_limited_by)
from app.models import Job
from tests.app.conftest import sample_notification as create_notification
@@ -355,3 +357,9 @@ def test_dao_get_all_notifications_for_job(notify_db, notify_db_session, sample_
create_notification(notify_db=notify_db, notify_db_session=notify_db_session, job=sample_job)
assert len(dao_get_all_notifications_for_job(sample_job.id)) == 3
def test_dao_update_job_status(sample_job):
dao_update_job_status(sample_job.id, 'sent to dvla')
updated_job = Job.query.get(sample_job.id)
assert updated_job.job_status == 'sent to dvla'