Get and use sender_id from S3 metadata

The `save_email` and `save_sms` jobs were updated previously to take an
optional `sender_id` and to use this if it was available. This commit
now gets the `sender_id` from the S3 metadata if it exists and passes it
through the the tasks which save the job notifications. This means SMS
and emails sent through jobs can use a specified `sender_id` instead of
the default.
This commit is contained in:
Katie Smith
2018-11-05 16:16:48 +00:00
parent 8b5d48b113
commit d20e35d075
4 changed files with 117 additions and 3 deletions

View File

@@ -122,12 +122,30 @@ def test_should_process_sms_job(sample_job, mocker):
(str(sample_job.service_id),
"uuid",
"something_encrypted"),
{},
queue="database-tasks"
)
job = jobs_dao.dao_get_job_by_id(sample_job.id)
assert job.job_status == 'finished'
def test_should_process_sms_job_with_sender_id(sample_job, mocker, fake_uuid):
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
mocker.patch('app.celery.tasks.save_sms.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
process_job(sample_job.id, sender_id=fake_uuid)
tasks.save_sms.apply_async.assert_called_once_with(
(str(sample_job.service_id),
"uuid",
"something_encrypted"),
{'sender_id': fake_uuid},
queue="database-tasks"
)
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db,
notify_db_session,
@@ -238,6 +256,7 @@ def test_should_process_email_job_if_exactly_on_send_limits(notify_db,
"uuid",
"something_encrypted",
),
{},
queue="database-tasks"
)
@@ -282,12 +301,33 @@ def test_should_process_email_job(email_job_with_placeholders, mocker):
"uuid",
"something_encrypted",
),
{},
queue="database-tasks"
)
job = jobs_dao.dao_get_job_by_id(email_job_with_placeholders.id)
assert job.job_status == 'finished'
def test_should_process_email_job_with_sender_id(email_job_with_placeholders, mocker, fake_uuid):
email_csv = """email_address,name
test@test.com,foo
"""
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=email_csv)
mocker.patch('app.celery.tasks.save_email.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
process_job(email_job_with_placeholders.id, sender_id=fake_uuid)
tasks.save_email.apply_async.assert_called_once_with(
(str(email_job_with_placeholders.service_id),
"uuid",
"something_encrypted"),
{'sender_id': fake_uuid},
queue="database-tasks"
)
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_process_letter_job(sample_letter_job, mocker):
csv = """address_line_1,address_line_2,address_line_3,address_line_4,postcode,name
@@ -393,8 +433,44 @@ def test_process_row_sends_letter_task(template_type, research_mode, expected_fu
# encrypted data
encrypt_mock.return_value,
),
{},
queue=expected_queue
)
def test_process_row_when_sender_id_is_provided(mocker, fake_uuid):
mocker.patch('app.celery.tasks.create_uuid', return_value='noti_uuid')
task_mock = mocker.patch('app.celery.tasks.save_sms.apply_async')
encrypt_mock = mocker.patch('app.celery.tasks.encryption.encrypt')
template = Mock(id='template_id', template_type=SMS_TYPE)
job = Mock(id='job_id', template_version='temp_vers')
service = Mock(id='service_id', research_mode=False)
process_row(
Row(
{'foo': 'bar', 'to': 'recip'},
index='row_num',
error_fn=lambda k, v: None,
recipient_column_headers=['to'],
placeholders={'foo'},
template=template,
),
template,
job,
service,
sender_id=fake_uuid
)
task_mock.assert_called_once_with(
(
'service_id',
'noti_uuid',
# encrypted data
encrypt_mock.return_value,
),
{'sender_id': fake_uuid},
queue='database-tasks'
)
# -------- save_sms and save_email tests -------- #

View File

@@ -98,6 +98,7 @@ def test_create_unscheduled_job(client, sample_template, mocker, fake_uuid):
app.celery.tasks.process_job.apply_async.assert_called_once_with(
([str(fake_uuid)]),
{'sender_id': None},
queue="job-tasks"
)
@@ -113,6 +114,36 @@ def test_create_unscheduled_job(client, sample_template, mocker, fake_uuid):
assert resp_json['data']['notification_count'] == 1
def test_create_unscheduled_job_with_sender_id_in_metadata(client, sample_template, mocker, fake_uuid):
mocker.patch('app.celery.tasks.process_job.apply_async')
mocker.patch('app.job.rest.get_job_metadata_from_s3', return_value={
'template_id': str(sample_template.id),
'original_file_name': 'thisisatest.csv',
'notification_count': '1',
'valid': 'True',
'sender_id': fake_uuid
})
data = {
'id': fake_uuid,
'created_by': str(sample_template.created_by.id),
}
path = '/service/{}/job'.format(sample_template.service.id)
auth_header = create_authorization_header()
headers = [('Content-Type', 'application/json'), auth_header]
response = client.post(
path,
data=json.dumps(data),
headers=headers)
assert response.status_code == 201
app.celery.tasks.process_job.apply_async.assert_called_once_with(
([str(fake_uuid)]),
{'sender_id': fake_uuid},
queue="job-tasks"
)
@freeze_time("2016-01-01 12:00:00.000000")
def test_create_scheduled_job(client, sample_template, mocker, fake_uuid):
scheduled_date = (datetime.utcnow() + timedelta(hours=95, minutes=59)).isoformat()