Adds another job state to account for when sending limits have been exceeded.

This commit is contained in:
Martyn Inglis
2016-03-09 13:57:53 +00:00
parent 61af70a392
commit b0074449bd
4 changed files with 90 additions and 22 deletions

View File

@@ -31,15 +31,13 @@ def process_job(job_id):
day=job.created_at.strftime(DATE_FORMAT)
)
total_sent = 0
if stats:
sending_limit = service.limit
job_size = job.notification_count
total_sent = stats.emails_requested + stats.sms_requested
if total_sent + job_size > sending_limit:
finished = datetime.utcnow()
job.status = 'finished'
job.processing_finished = finished
if total_sent + job.notification_count > service.limit:
job.status = 'sending limits exceeded'
job.processing_finished = datetime.utcnow()
dao_update_job(job)
current_app.logger.info(
"Job {} size {} error. Sending limits {} exceeded".format(job_id, job.notification_count, service.limit)

View File

@@ -157,7 +157,7 @@ class Template(db.Model):
subject = db.Column(db.Text, index=False, unique=True, nullable=True)
JOB_STATUS_TYPES = ['pending', 'in progress', 'finished']
JOB_STATUS_TYPES = ['pending', 'in progress', 'finished', 'sending limits exceeded']
class Job(db.Model):

View File

@@ -0,0 +1,30 @@
"""empty message
Revision ID: 0037_more_job_states
Revises: 0036_notification_stats
Create Date: 2016-03-08 11:16:25.659463
"""
# revision identifiers, used by Alembic.
revision = '0037_more_job_states'
down_revision = '0036_notification_stats'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.drop_column('jobs', 'status')
op.execute('DROP TYPE job_status_types')
job_status_types = sa.Enum('pending', 'in progress', 'finished', 'sending limits exceeded', name='job_status_types')
job_status_types.create(op.get_bind())
op.add_column('jobs', sa.Column('status', job_status_types, nullable=True))
op.get_bind()
op.execute("update jobs set status='finished'")
op.alter_column('jobs', 'status', nullable=False)
def downgrade():
op.drop_column('jobs', 'status')
op.execute('DROP TYPE job_status_types')

View File

@@ -18,7 +18,8 @@ from tests.app.conftest import (
sample_user,
sample_template,
sample_job,
sample_email_template
sample_email_template,
sample_notification
)
@@ -55,28 +56,67 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db, notif
process_job(job.id)
s3.get_job_from_s3.assert_called_once_with(job.bucket_name, job.id)
s3.get_job_from_s3.assert_not_called()
job = jobs_dao.dao_get_job_by_id(job.id)
assert job.status == 'finished'
tasks.send_sms.apply_async.assert_not_called
assert job.status == 'sending limits exceeded'
tasks.send_sms.apply_async.assert_not_called()
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db, notify_db_session, mocker):
service = sample_service(notify_db, notify_db_session, limit=9)
def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify_db, notify_db_session, mocker):
service = sample_service(notify_db, notify_db_session, limit=1)
job = sample_job(notify_db, notify_db_session, service=service)
sample_notification(notify_db, notify_db_session, service=service, job=job)
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
mocker.patch('app.celery.tasks.send_sms.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
process_job(job.id)
job = jobs_dao.dao_get_job_by_id(job.id)
assert job.status == 'sending limits exceeded'
s3.get_job_from_s3.assert_not_called()
tasks.send_sms.apply_async.assert_not_called()
def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db, notify_db_session, mocker):
service = sample_service(notify_db, notify_db_session, limit=1)
template = sample_email_template(notify_db, notify_db_session, service=service)
job = sample_job(notify_db, notify_db_session, service=service, template=template)
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_email'))
sample_notification(notify_db, notify_db_session, service=service, job=job)
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms'))
mocker.patch('app.celery.tasks.send_email.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
process_job(job.id)
s3.get_job_from_s3.assert_called_once_with(job.bucket_name, job.id)
job = jobs_dao.dao_get_job_by_id(job.id)
assert job.status == 'finished'
assert job.status == 'sending limits exceeded'
s3.get_job_from_s3.assert_not_called()
tasks.send_email.apply_async.assert_not_called()
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db, notify_db_session, mocker):
service = sample_service(notify_db, notify_db_session, limit=0)
template = sample_email_template(notify_db, notify_db_session, service=service)
job = sample_job(notify_db, notify_db_session, service=service, template=template)
mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('email'))
mocker.patch('app.celery.tasks.send_email.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
process_job(job.id)
s3.get_job_from_s3.assert_not_called
job = jobs_dao.dao_get_job_by_id(job.id)
assert job.status == 'sending limits exceeded'
tasks.send_email.apply_async.assert_not_called