2016-06-20 13:33:53 +01:00
|
|
|
from datetime import (datetime)
|
2016-11-25 17:32:01 +00:00
|
|
|
|
2016-03-31 15:57:50 +01:00
|
|
|
from flask import current_app
|
2016-04-13 15:31:08 +01:00
|
|
|
from notifications_utils.recipients import (
|
2016-09-28 17:00:17 +01:00
|
|
|
RecipientCSV
|
2016-03-31 15:57:50 +01:00
|
|
|
)
|
2016-12-22 14:40:33 +00:00
|
|
|
from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate
|
2016-06-20 13:33:53 +01:00
|
|
|
from sqlalchemy.exc import SQLAlchemyError
|
2016-03-31 15:57:50 +01:00
|
|
|
from app import (
|
|
|
|
|
create_uuid,
|
|
|
|
|
DATETIME_FORMAT,
|
|
|
|
|
notify_celery,
|
2016-05-10 09:04:22 +01:00
|
|
|
encryption
|
2016-03-31 15:57:50 +01:00
|
|
|
)
|
|
|
|
|
from app.aws import s3
|
2016-09-28 15:05:50 +01:00
|
|
|
from app.celery import provider_tasks
|
2016-06-20 13:33:53 +01:00
|
|
|
from app.dao.jobs_dao import (
|
|
|
|
|
dao_update_job,
|
|
|
|
|
dao_get_job_by_id
|
|
|
|
|
)
|
2016-11-25 17:32:01 +00:00
|
|
|
from app.dao.notifications_dao import get_notification_by_id
|
2016-10-03 10:57:10 +01:00
|
|
|
from app.dao.services_dao import dao_fetch_service_by_id, fetch_todays_total_message_count
|
2016-06-20 13:33:53 +01:00
|
|
|
from app.dao.templates_dao import dao_get_template_by_id
|
2016-03-09 17:46:01 +00:00
|
|
|
from app.models import (
|
2016-06-29 11:50:54 +01:00
|
|
|
EMAIL_TYPE,
|
2016-06-30 17:32:49 +01:00
|
|
|
SMS_TYPE,
|
2017-01-17 12:00:34 +00:00
|
|
|
LETTER_TYPE,
|
2016-10-03 10:57:10 +01:00
|
|
|
KEY_TYPE_NORMAL
|
2016-03-09 17:46:01 +00:00
|
|
|
)
|
2016-11-11 10:41:39 +00:00
|
|
|
from app.notifications.process_notifications import persist_notification
|
2016-09-28 17:00:17 +01:00
|
|
|
from app.service.utils import service_allowed_to_send_to
|
2016-08-05 10:44:43 +01:00
|
|
|
from app.statsd_decorators import statsd
|
2016-03-31 15:57:50 +01:00
|
|
|
|
2016-03-09 14:41:36 +00:00
|
|
|
|
2016-02-24 17:12:30 +00:00
|
|
|
@notify_celery.task(name="process-job")
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="tasks")
|
2016-02-24 17:12:30 +00:00
|
|
|
def process_job(job_id):
|
2016-02-25 11:23:04 +00:00
|
|
|
start = datetime.utcnow()
|
2016-02-24 17:12:30 +00:00
|
|
|
job = dao_get_job_by_id(job_id)
|
2016-03-09 11:28:52 +00:00
|
|
|
|
2016-10-07 12:54:04 +01:00
|
|
|
if job.job_status != 'pending':
|
|
|
|
|
return
|
|
|
|
|
|
2016-03-09 11:28:52 +00:00
|
|
|
service = job.service
|
|
|
|
|
|
2016-11-11 10:41:39 +00:00
|
|
|
if __sending_limits_for_job_exceeded(service, job, job_id):
|
2016-03-09 13:57:53 +00:00
|
|
|
return
|
2016-03-09 11:28:52 +00:00
|
|
|
|
2016-10-05 14:56:32 +01:00
|
|
|
job.job_status = 'in progress'
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_update_job(job)
|
|
|
|
|
|
2016-12-22 14:40:33 +00:00
|
|
|
db_template = dao_get_template_by_id(job.template_id, job.template_version)
|
|
|
|
|
|
2017-01-24 10:53:41 +00:00
|
|
|
TemplateClass = get_template_class(db_template.template_type)
|
2016-12-22 14:40:33 +00:00
|
|
|
template = TemplateClass(db_template.__dict__)
|
2016-03-09 07:27:26 +00:00
|
|
|
|
2016-05-19 10:46:03 +01:00
|
|
|
for row_number, recipient, personalisation in RecipientCSV(
|
2016-04-07 13:44:04 +01:00
|
|
|
s3.get_job_from_s3(str(service.id), str(job_id)),
|
2016-03-09 14:41:36 +00:00
|
|
|
template_type=template.template_type,
|
|
|
|
|
placeholders=template.placeholders
|
2016-05-19 10:46:03 +01:00
|
|
|
).enumerated_recipients_and_personalisation:
|
2017-01-17 12:00:34 +00:00
|
|
|
process_row(row_number, recipient, personalisation, template, job, service)
|
2016-02-24 17:12:30 +00:00
|
|
|
|
2016-02-25 11:23:04 +00:00
|
|
|
finished = datetime.utcnow()
|
2016-10-05 14:56:32 +01:00
|
|
|
job.job_status = 'finished'
|
2016-02-25 11:23:04 +00:00
|
|
|
job.processing_started = start
|
|
|
|
|
job.processing_finished = finished
|
2016-02-24 17:12:30 +00:00
|
|
|
dao_update_job(job)
|
2016-02-25 11:23:04 +00:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"Job {} created at {} started at {} finished at {}".format(job_id, job.created_at, start, finished)
|
|
|
|
|
)
|
2016-02-09 13:31:45 +00:00
|
|
|
|
|
|
|
|
|
2017-01-17 12:00:34 +00:00
|
|
|
def process_row(row_number, recipient, personalisation, template, job, service):
|
|
|
|
|
template_type = template.template_type
|
|
|
|
|
encrypted = encryption.encrypt({
|
|
|
|
|
'template': str(template.id),
|
|
|
|
|
'template_version': job.template_version,
|
|
|
|
|
'job': str(job.id),
|
|
|
|
|
'to': recipient,
|
|
|
|
|
'row_number': row_number,
|
|
|
|
|
'personalisation': dict(personalisation)
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
send_fns = {
|
|
|
|
|
SMS_TYPE: send_sms,
|
|
|
|
|
EMAIL_TYPE: send_email,
|
2017-01-18 11:29:38 +00:00
|
|
|
LETTER_TYPE: persist_letter
|
2017-01-17 12:00:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
queues = {
|
|
|
|
|
SMS_TYPE: 'db-sms',
|
|
|
|
|
EMAIL_TYPE: 'db-email',
|
2017-01-18 11:29:38 +00:00
|
|
|
LETTER_TYPE: 'db-letter',
|
2017-01-17 12:00:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
send_fn = send_fns[template_type]
|
|
|
|
|
|
2017-01-18 11:29:38 +00:00
|
|
|
send_fn.apply_async(
|
|
|
|
|
(
|
|
|
|
|
str(service.id),
|
|
|
|
|
create_uuid(),
|
|
|
|
|
encrypted,
|
|
|
|
|
datetime.utcnow().strftime(DATETIME_FORMAT)
|
|
|
|
|
),
|
2017-01-17 12:00:34 +00:00
|
|
|
queue=queues[template_type] if not service.research_mode else 'research-mode'
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2016-11-11 10:41:39 +00:00
|
|
|
def __sending_limits_for_job_exceeded(service, job, job_id):
|
|
|
|
|
total_sent = fetch_todays_total_message_count(service.id)
|
|
|
|
|
|
|
|
|
|
if total_sent + job.notification_count > service.message_limit:
|
|
|
|
|
job.job_status = 'sending limits exceeded'
|
|
|
|
|
job.processing_finished = datetime.utcnow()
|
|
|
|
|
dao_update_job(job)
|
|
|
|
|
current_app.logger.info(
|
|
|
|
|
"Job {} size {} error. Sending limits {} exceeded".format(
|
|
|
|
|
job_id, job.notification_count, service.message_limit)
|
|
|
|
|
)
|
|
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
2016-08-10 08:46:37 +01:00
|
|
|
@notify_celery.task(bind=True, name="send-sms", max_retries=5, default_retry_delay=300)
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="tasks")
|
2016-06-30 17:32:49 +01:00
|
|
|
def send_sms(self,
|
|
|
|
|
service_id,
|
|
|
|
|
notification_id,
|
|
|
|
|
encrypted_notification,
|
|
|
|
|
created_at,
|
|
|
|
|
api_key_id=None,
|
|
|
|
|
key_type=KEY_TYPE_NORMAL):
|
2016-02-16 15:28:30 +00:00
|
|
|
notification = encryption.decrypt(encrypted_notification)
|
2016-03-01 08:48:27 +00:00
|
|
|
service = dao_fetch_service_by_id(service_id)
|
2016-05-10 09:04:22 +01:00
|
|
|
|
2016-09-07 09:57:20 +01:00
|
|
|
if not service_allowed_to_send_to(notification['to'], service, key_type):
|
2016-04-05 14:51:55 +01:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"SMS {} failed as restricted service".format(notification_id)
|
|
|
|
|
)
|
2016-05-31 14:55:06 +01:00
|
|
|
return
|
2016-03-03 12:05:18 +00:00
|
|
|
|
2016-02-15 16:01:14 +00:00
|
|
|
try:
|
2016-11-11 16:00:31 +00:00
|
|
|
saved_notification = persist_notification(template_id=notification['template'],
|
|
|
|
|
template_version=notification['template_version'],
|
|
|
|
|
recipient=notification['to'],
|
2016-12-19 16:45:18 +00:00
|
|
|
service=service,
|
2016-11-11 16:00:31 +00:00
|
|
|
personalisation=notification.get('personalisation'),
|
|
|
|
|
notification_type=SMS_TYPE,
|
|
|
|
|
api_key_id=api_key_id,
|
|
|
|
|
key_type=key_type,
|
|
|
|
|
created_at=created_at,
|
|
|
|
|
job_id=notification.get('job', None),
|
|
|
|
|
job_row_number=notification.get('row_number', None),
|
2016-11-25 17:32:01 +00:00
|
|
|
notification_id=notification_id
|
2016-11-11 16:00:31 +00:00
|
|
|
)
|
2016-11-11 10:41:39 +00:00
|
|
|
|
2016-09-28 15:29:10 +01:00
|
|
|
provider_tasks.deliver_sms.apply_async(
|
2016-11-16 16:15:30 +00:00
|
|
|
[str(saved_notification.id)],
|
2016-09-28 15:29:10 +01:00
|
|
|
queue='send-sms' if not service.research_mode else 'research-mode'
|
|
|
|
|
)
|
2016-04-04 15:02:25 +01:00
|
|
|
|
|
|
|
|
current_app.logger.info(
|
2016-11-11 16:00:31 +00:00
|
|
|
"SMS {} created at {} for job {}".format(saved_notification.id, created_at, notification.get('job', None))
|
2016-04-04 15:02:25 +01:00
|
|
|
)
|
2016-06-03 14:54:46 +01:00
|
|
|
|
2016-02-22 17:17:29 +00:00
|
|
|
except SQLAlchemyError as e:
|
2017-01-17 16:51:27 +00:00
|
|
|
handle_exception(self, notification, notification_id, e)
|
2016-02-22 17:17:29 +00:00
|
|
|
|
|
|
|
|
|
2016-08-10 08:46:37 +01:00
|
|
|
@notify_celery.task(bind=True, name="send-email", max_retries=5, default_retry_delay=300)
|
2016-08-05 10:44:43 +01:00
|
|
|
@statsd(namespace="tasks")
|
2017-01-17 12:00:34 +00:00
|
|
|
def send_email(self,
|
|
|
|
|
service_id,
|
2016-06-30 17:32:49 +01:00
|
|
|
notification_id,
|
|
|
|
|
encrypted_notification,
|
|
|
|
|
created_at,
|
|
|
|
|
api_key_id=None,
|
|
|
|
|
key_type=KEY_TYPE_NORMAL):
|
2016-02-22 17:17:29 +00:00
|
|
|
notification = encryption.decrypt(encrypted_notification)
|
2016-04-05 14:51:55 +01:00
|
|
|
service = dao_fetch_service_by_id(service_id)
|
2016-02-25 11:23:04 +00:00
|
|
|
|
2016-09-07 09:57:20 +01:00
|
|
|
if not service_allowed_to_send_to(notification['to'], service, key_type):
|
2016-07-01 14:42:40 +01:00
|
|
|
current_app.logger.info("Email {} failed as restricted service".format(notification_id))
|
2016-05-31 14:55:06 +01:00
|
|
|
return
|
2016-03-03 12:05:18 +00:00
|
|
|
|
2016-02-22 17:17:29 +00:00
|
|
|
try:
|
2016-11-11 16:00:31 +00:00
|
|
|
saved_notification = persist_notification(
|
2016-11-11 10:41:39 +00:00
|
|
|
template_id=notification['template'],
|
|
|
|
|
template_version=notification['template_version'],
|
|
|
|
|
recipient=notification['to'],
|
2016-12-19 16:45:18 +00:00
|
|
|
service=service,
|
2016-11-11 10:41:39 +00:00
|
|
|
personalisation=notification.get('personalisation'),
|
|
|
|
|
notification_type=EMAIL_TYPE,
|
|
|
|
|
api_key_id=api_key_id,
|
|
|
|
|
key_type=key_type,
|
2016-11-11 14:56:33 +00:00
|
|
|
created_at=created_at,
|
2016-11-11 10:41:39 +00:00
|
|
|
job_id=notification.get('job', None),
|
|
|
|
|
job_row_number=notification.get('row_number', None),
|
2016-11-25 17:32:01 +00:00
|
|
|
notification_id=notification_id
|
2016-09-07 13:45:37 +01:00
|
|
|
)
|
2016-02-22 17:17:29 +00:00
|
|
|
|
2016-09-28 15:29:10 +01:00
|
|
|
provider_tasks.deliver_email.apply_async(
|
2016-11-16 16:15:30 +00:00
|
|
|
[str(saved_notification.id)],
|
2016-09-28 15:29:10 +01:00
|
|
|
queue='send-email' if not service.research_mode else 'research-mode'
|
|
|
|
|
)
|
2016-04-04 15:02:25 +01:00
|
|
|
|
2016-11-11 16:09:09 +00:00
|
|
|
current_app.logger.info("Email {} created at {}".format(saved_notification.id, created_at))
|
2016-02-16 17:42:04 +00:00
|
|
|
except SQLAlchemyError as e:
|
2017-01-17 16:51:27 +00:00
|
|
|
handle_exception(self, notification, notification_id, e)
|
|
|
|
|
|
|
|
|
|
|
2017-01-18 11:29:38 +00:00
|
|
|
@notify_celery.task(bind=True, name="persist-letter", max_retries=5, default_retry_delay=300)
|
|
|
|
|
@statsd(namespace="tasks")
|
|
|
|
|
def persist_letter(
|
|
|
|
|
self,
|
|
|
|
|
service_id,
|
|
|
|
|
notification_id,
|
|
|
|
|
encrypted_notification,
|
|
|
|
|
created_at
|
|
|
|
|
):
|
|
|
|
|
notification = encryption.decrypt(encrypted_notification)
|
persist_letter saves address correctly to database
the `to` field stores either the phone number or the email address
of the recipient - it's a bit more complicated for letters, since
there are address lines 1 through 6, and a postcode. In utils, they're
stored alongside the personalisation, and we have to ensure that when
we persist to the database we keep as much parity with utils to make
our work easier. Aside from sending, the `to` field is also used to
show recipients on the front end report pages - we've decided that the
best thing to store here is address_line_1 - which is probably going to
be either a person's name, company name, or PO box number
Also, a lot of tests and test cleanup - I added create_template and
create_notification functions in db.py, so if you're creating new
fixtures you can use these functions, and you won't need to pass
notify_db and notify_db_session around, huzzah!
also removed create param from sample_notification since it's not used
anywhere
2017-01-19 12:10:32 +00:00
|
|
|
|
|
|
|
|
# we store the recipient as just the first item of the person's address
|
|
|
|
|
recipient = notification['personalisation']['addressline1']
|
|
|
|
|
|
2017-01-18 11:29:38 +00:00
|
|
|
service = dao_fetch_service_by_id(service_id)
|
|
|
|
|
try:
|
|
|
|
|
saved_notification = persist_notification(
|
|
|
|
|
template_id=notification['template'],
|
|
|
|
|
template_version=notification['template_version'],
|
persist_letter saves address correctly to database
the `to` field stores either the phone number or the email address
of the recipient - it's a bit more complicated for letters, since
there are address lines 1 through 6, and a postcode. In utils, they're
stored alongside the personalisation, and we have to ensure that when
we persist to the database we keep as much parity with utils to make
our work easier. Aside from sending, the `to` field is also used to
show recipients on the front end report pages - we've decided that the
best thing to store here is address_line_1 - which is probably going to
be either a person's name, company name, or PO box number
Also, a lot of tests and test cleanup - I added create_template and
create_notification functions in db.py, so if you're creating new
fixtures you can use these functions, and you won't need to pass
notify_db and notify_db_session around, huzzah!
also removed create param from sample_notification since it's not used
anywhere
2017-01-19 12:10:32 +00:00
|
|
|
recipient=recipient,
|
2017-01-18 11:29:38 +00:00
|
|
|
service=service,
|
persist_letter saves address correctly to database
the `to` field stores either the phone number or the email address
of the recipient - it's a bit more complicated for letters, since
there are address lines 1 through 6, and a postcode. In utils, they're
stored alongside the personalisation, and we have to ensure that when
we persist to the database we keep as much parity with utils to make
our work easier. Aside from sending, the `to` field is also used to
show recipients on the front end report pages - we've decided that the
best thing to store here is address_line_1 - which is probably going to
be either a person's name, company name, or PO box number
Also, a lot of tests and test cleanup - I added create_template and
create_notification functions in db.py, so if you're creating new
fixtures you can use these functions, and you won't need to pass
notify_db and notify_db_session around, huzzah!
also removed create param from sample_notification since it's not used
anywhere
2017-01-19 12:10:32 +00:00
|
|
|
personalisation=notification['personalisation'],
|
|
|
|
|
notification_type=LETTER_TYPE,
|
2017-01-18 11:29:38 +00:00
|
|
|
api_key_id=None,
|
|
|
|
|
key_type=KEY_TYPE_NORMAL,
|
|
|
|
|
created_at=created_at,
|
persist_letter saves address correctly to database
the `to` field stores either the phone number or the email address
of the recipient - it's a bit more complicated for letters, since
there are address lines 1 through 6, and a postcode. In utils, they're
stored alongside the personalisation, and we have to ensure that when
we persist to the database we keep as much parity with utils to make
our work easier. Aside from sending, the `to` field is also used to
show recipients on the front end report pages - we've decided that the
best thing to store here is address_line_1 - which is probably going to
be either a person's name, company name, or PO box number
Also, a lot of tests and test cleanup - I added create_template and
create_notification functions in db.py, so if you're creating new
fixtures you can use these functions, and you won't need to pass
notify_db and notify_db_session around, huzzah!
also removed create param from sample_notification since it's not used
anywhere
2017-01-19 12:10:32 +00:00
|
|
|
job_id=notification['job'],
|
|
|
|
|
job_row_number=notification['row_number'],
|
2017-01-18 11:29:38 +00:00
|
|
|
notification_id=notification_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# TODO: deliver letters
|
|
|
|
|
|
|
|
|
|
current_app.logger.info("Letter {} created at {}".format(saved_notification.id, created_at))
|
|
|
|
|
except SQLAlchemyError as e:
|
|
|
|
|
handle_exception(self, notification, notification_id, e)
|
|
|
|
|
|
|
|
|
|
|
2017-01-17 16:51:27 +00:00
|
|
|
def handle_exception(task, notification, notification_id, exc):
|
|
|
|
|
if not get_notification_by_id(notification_id):
|
|
|
|
|
retry_msg = '{task} notification for job {job} row number {row} and notification id {noti}'.format(
|
|
|
|
|
task=task.__name__,
|
|
|
|
|
job=notification.get('job', None),
|
|
|
|
|
row=notification.get('row_number', None),
|
|
|
|
|
noti=notification_id
|
|
|
|
|
)
|
|
|
|
|
# Sometimes, SQS plays the same message twice. We should be able to catch an IntegrityError, but it seems
|
|
|
|
|
# SQLAlchemy is throwing a FlushError. So we check if the notification id already exists then do not
|
|
|
|
|
# send to the retry queue.
|
|
|
|
|
current_app.logger.exception('Retry' + retry_msg)
|
|
|
|
|
try:
|
|
|
|
|
task.retry(queue="retry", exc=exc)
|
|
|
|
|
except task.MaxRetriesExceededError:
|
|
|
|
|
current_app.logger.exception('Retry' + retry_msg)
|
|
|
|
|
|
|
|
|
|
|
2017-01-24 10:53:41 +00:00
|
|
|
def get_template_class(template_type):
|
|
|
|
|
if template_type == SMS_TYPE:
|
2017-01-17 16:51:27 +00:00
|
|
|
return SMSMessageTemplate
|
2017-01-24 10:53:41 +00:00
|
|
|
elif template_type in (EMAIL_TYPE, LETTER_TYPE):
|
2017-01-17 16:51:27 +00:00
|
|
|
# since we don't need rendering capabilities (we only need to extract placeholders) both email and letter can
|
|
|
|
|
# use the same base template
|
|
|
|
|
return WithSubjectTemplate
|