Add an endpoint to cancel a job

If you schedule a job you might change your mind or circumstances might
change. So you need to be able to cancel it. This commit adds a `POST`
endpoint for individual jobs which sets their status to `cancelled`.

This also means adding a new status of `cancelled`, so there’s a
migration…
This commit is contained in:
Chris Hill-Scott
2016-09-01 14:31:01 +01:00
parent aa8ee3a8da
commit 4a7267be8b
7 changed files with 92 additions and 2 deletions

View File

@@ -44,6 +44,13 @@ def dao_get_scheduled_jobs():
.all()
def dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id):
return Job.query \
.filter_by(service_id=service_id, id=job_id) \
.filter(Job.job_status == 'scheduled', Job.scheduled_for > datetime.utcnow()) \
.one()
def dao_create_job(job):
db.session.add(job)
db.session.commit()

View File

@@ -7,8 +7,10 @@ from flask import (
from app.dao.jobs_dao import (
dao_create_job,
dao_update_job,
dao_get_job_by_service_id_and_job_id,
dao_get_jobs_by_service_id,
dao_get_future_scheduled_job_by_id_and_service_id,
dao_get_notification_outcomes_for_job
)
@@ -28,7 +30,7 @@ from app.schemas import (
from app.celery.tasks import process_job
from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING
from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, JOB_STATUS_CANCELLED
from app.utils import pagination_links
@@ -53,6 +55,15 @@ def get_job_by_service_and_job_id(service_id, job_id):
return jsonify(data=data)
@job.route('/<job_id>/cancel', methods=['POST'])
def cancel_job(service_id, job_id):
job = dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id)
job.job_status = JOB_STATUS_CANCELLED
dao_update_job(job)
return get_job_by_service_and_job_id(service_id, job_id)
@job.route('/<job_id>/notifications', methods=['GET'])
def get_all_notifications_for_service_job(service_id, job_id):
data = notifications_filter_schema.load(request.args).data

View File

@@ -308,6 +308,7 @@ JOB_STATUS_IN_PROGRESS = 'in progress'
JOB_STATUS_FINISHED = 'finished'
JOB_STATUS_SENDING_LIMITS_EXCEEDED = 'sending limits exceeded'
JOB_STATUS_SCHEDULED = 'scheduled'
JOB_STATUS_CANCELLED = 'cancelled'
class JobStatus(db.Model):

View File

@@ -0,0 +1,22 @@
"""empty message
Revision ID: 0051_cancelled_job_status
Revises: 0050_index_for_stats
Create Date: 2016-09-01 14:34:06.839381
"""
# revision identifiers, used by Alembic.
revision = '0051_cancelled_job_status'
down_revision = '0050_index_for_stats'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.execute("INSERT INTO job_status VALUES ('cancelled')")
def downgrade():
op.execute("UPDATE jobs SET job_status = 'finished' WHERE job_status = 'cancelled'")
op.execute("DELETE FROM job_status WHERE name = 'cancelled';")

View File

@@ -1,7 +1,7 @@
import requests_mock
import pytest
import uuid
from datetime import (datetime, date)
from datetime import (datetime, date, timedelta)
from flask import current_app
@@ -288,6 +288,22 @@ def sample_job_with_placeholdered_template(
)
@pytest.fixture(scope='function')
def sample_scheduled_job(
notify_db,
notify_db_session,
service=None
):
return sample_job(
notify_db,
notify_db_session,
service=service,
template=sample_template_with_placeholders(notify_db, notify_db_session),
scheduled_for=(datetime.utcnow() + timedelta(minutes=60)).isoformat(),
job_status='scheduled'
)
@pytest.fixture(scope='function')
def sample_email_job(notify_db,
notify_db_session,

View File

@@ -7,6 +7,7 @@ from app.dao.jobs_dao import (
dao_update_job,
dao_get_jobs_by_service_id,
dao_get_scheduled_jobs,
dao_get_future_scheduled_job_by_id_and_service_id,
dao_get_notification_outcomes_for_job
)
@@ -251,3 +252,10 @@ def test_get_scheduled_jobs_gets_ignores_jobs_scheduled_in_the_future(notify_db,
sample_job(notify_db, notify_db_session, scheduled_for=one_minute_in_the_future, job_status='scheduled')
jobs = dao_get_scheduled_jobs()
assert len(jobs) == 0
def test_get_future_scheduled_job_gets_a_job_yet_to_send(notify_db, notify_db_session):
one_hour_from_now = datetime.utcnow() + timedelta(minutes=60)
job = sample_job(notify_db, notify_db_session, scheduled_for=one_hour_from_now, job_status='scheduled')
result = dao_get_future_scheduled_job_by_id_and_service_id(job.id, job.service_id)
assert result.id == job.id

View File

@@ -109,6 +109,31 @@ def test_get_job_by_id(notify_api, sample_job):
assert resp_json['data']['created_by']['name'] == 'Test User'
def test_cancel_job(notify_api, sample_scheduled_job):
job_id = str(sample_scheduled_job.id)
service_id = sample_scheduled_job.service.id
with notify_api.test_request_context(), notify_api.test_client() as client:
path = '/service/{}/job/{}/cancel'.format(service_id, job_id)
auth_header = create_authorization_header(service_id=service_id)
response = client.post(path, headers=[auth_header])
assert response.status_code == 200
resp_json = json.loads(response.get_data(as_text=True))
assert resp_json['data']['id'] == job_id
assert resp_json['data']['job_status'] == 'cancelled'
def test_cant_cancel_normal_job(notify_api, sample_job, mocker):
job_id = str(sample_job.id)
service_id = sample_job.service.id
with notify_api.test_request_context(), notify_api.test_client() as client:
mock_update = mocker.patch('app.dao.jobs_dao.dao_update_job')
path = '/service/{}/job/{}/cancel'.format(service_id, job_id)
auth_header = create_authorization_header(service_id=service_id)
response = client.post(path, headers=[auth_header])
assert response.status_code == 404
assert mock_update.call_count == 0
def test_create_unscheduled_job(notify_api, sample_template, mocker, fake_uuid):
with notify_api.test_request_context():
with notify_api.test_client() as client: