diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 613e3261c..af3d33f53 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -31,20 +31,18 @@ def process_job(job_id): day=job.created_at.strftime(DATE_FORMAT) ) + total_sent = 0 if stats: - sending_limit = service.limit - job_size = job.notification_count total_sent = stats.emails_requested + stats.sms_requested - if total_sent + job_size > sending_limit: - finished = datetime.utcnow() - job.status = 'finished' - job.processing_finished = finished - dao_update_job(job) - current_app.logger.info( - "Job {} size {} error. Sending limits {} exceeded".format(job_id, job.notification_count, service.limit) - ) - return + if total_sent + job.notification_count > service.limit: + job.status = 'sending limits exceeded' + job.processing_finished = datetime.utcnow() + dao_update_job(job) + current_app.logger.info( + "Job {} size {} error. Sending limits {} exceeded".format(job_id, job.notification_count, service.limit) + ) + return job.status = 'in progress' dao_update_job(job) diff --git a/app/models.py b/app/models.py index 1c2a10d3b..d4784d489 100644 --- a/app/models.py +++ b/app/models.py @@ -157,7 +157,7 @@ class Template(db.Model): subject = db.Column(db.Text, index=False, unique=True, nullable=True) -JOB_STATUS_TYPES = ['pending', 'in progress', 'finished'] +JOB_STATUS_TYPES = ['pending', 'in progress', 'finished', 'sending limits exceeded'] class Job(db.Model): diff --git a/migrations/versions/0037_more_job_states.py b/migrations/versions/0037_more_job_states.py new file mode 100644 index 000000000..ef6e5671b --- /dev/null +++ b/migrations/versions/0037_more_job_states.py @@ -0,0 +1,30 @@ +"""empty message + +Revision ID: 0037_more_job_states +Revises: 0036_notification_stats +Create Date: 2016-03-08 11:16:25.659463 + +""" + +# revision identifiers, used by Alembic. +revision = '0037_more_job_states' +down_revision = '0036_notification_stats' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +def upgrade(): + op.drop_column('jobs', 'status') + op.execute('DROP TYPE job_status_types') + job_status_types = sa.Enum('pending', 'in progress', 'finished', 'sending limits exceeded', name='job_status_types') + job_status_types.create(op.get_bind()) + op.add_column('jobs', sa.Column('status', job_status_types, nullable=True)) + op.get_bind() + op.execute("update jobs set status='finished'") + op.alter_column('jobs', 'status', nullable=False) + + +def downgrade(): + op.drop_column('jobs', 'status') + op.execute('DROP TYPE job_status_types') diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index b16e9fdc4..ee6f2cab2 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -18,7 +18,8 @@ from tests.app.conftest import ( sample_user, sample_template, sample_job, - sample_email_template + sample_email_template, + sample_notification ) @@ -55,28 +56,67 @@ def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db, notif process_job(job.id) - s3.get_job_from_s3.assert_called_once_with(job.bucket_name, job.id) + s3.get_job_from_s3.assert_not_called() job = jobs_dao.dao_get_job_by_id(job.id) - assert job.status == 'finished' - tasks.send_sms.apply_async.assert_not_called + assert job.status == 'sending limits exceeded' + tasks.send_sms.apply_async.assert_not_called() -@freeze_time("2016-01-01 11:09:00.061258") -def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db, notify_db_session, mocker): - service = sample_service(notify_db, notify_db_session, limit=9) +def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify_db, notify_db_session, mocker): + service = sample_service(notify_db, notify_db_session, limit=1) + job = sample_job(notify_db, notify_db_session, service=service) + + sample_notification(notify_db, notify_db_session, service=service, job=job) + + mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms')) + mocker.patch('app.celery.tasks.send_sms.apply_async') + mocker.patch('app.encryption.encrypt', return_value="something_encrypted") + mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") + + process_job(job.id) + + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.status == 'sending limits exceeded' + s3.get_job_from_s3.assert_not_called() + tasks.send_sms.apply_async.assert_not_called() + + +def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db, notify_db_session, mocker): + service = sample_service(notify_db, notify_db_session, limit=1) template = sample_email_template(notify_db, notify_db_session, service=service) job = sample_job(notify_db, notify_db_session, service=service, template=template) - mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_email')) + sample_notification(notify_db, notify_db_session, service=service, job=job) + + mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('sms')) mocker.patch('app.celery.tasks.send_email.apply_async') mocker.patch('app.encryption.encrypt', return_value="something_encrypted") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") process_job(job.id) - s3.get_job_from_s3.assert_called_once_with(job.bucket_name, job.id) job = jobs_dao.dao_get_job_by_id(job.id) - assert job.status == 'finished' + assert job.status == 'sending limits exceeded' + s3.get_job_from_s3.assert_not_called() + tasks.send_email.apply_async.assert_not_called() + + +@freeze_time("2016-01-01 11:09:00.061258") +def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db, notify_db_session, mocker): + service = sample_service(notify_db, notify_db_session, limit=0) + template = sample_email_template(notify_db, notify_db_session, service=service) + job = sample_job(notify_db, notify_db_session, service=service, template=template) + + mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('email')) + mocker.patch('app.celery.tasks.send_email.apply_async') + mocker.patch('app.encryption.encrypt', return_value="something_encrypted") + mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") + + process_job(job.id) + + s3.get_job_from_s3.assert_not_called + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.status == 'sending limits exceeded' tasks.send_email.apply_async.assert_not_called