mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-05 02:41:14 -05:00
Merge pull request #656 from alphagov/add-cancel-job-endpoint
Add an endpoint to cancel a job
This commit is contained in:
@@ -2,7 +2,7 @@ from datetime import date, timedelta, datetime
|
|||||||
from sqlalchemy import desc, asc, cast, Date as sql_date
|
from sqlalchemy import desc, asc, cast, Date as sql_date
|
||||||
from app import db
|
from app import db
|
||||||
from app.dao import days_ago
|
from app.dao import days_ago
|
||||||
from app.models import Job, NotificationHistory
|
from app.models import Job, NotificationHistory, JOB_STATUS_SCHEDULED
|
||||||
from app.statsd_decorators import statsd
|
from app.statsd_decorators import statsd
|
||||||
from sqlalchemy import func, asc
|
from sqlalchemy import func, asc
|
||||||
|
|
||||||
@@ -39,11 +39,25 @@ def dao_get_job_by_id(job_id):
|
|||||||
|
|
||||||
def dao_get_scheduled_jobs():
|
def dao_get_scheduled_jobs():
|
||||||
return Job.query \
|
return Job.query \
|
||||||
.filter(Job.job_status == 'scheduled', Job.scheduled_for < datetime.utcnow()) \
|
.filter(
|
||||||
|
Job.job_status == JOB_STATUS_SCHEDULED,
|
||||||
|
Job.scheduled_for < datetime.utcnow()
|
||||||
|
) \
|
||||||
.order_by(asc(Job.scheduled_for)) \
|
.order_by(asc(Job.scheduled_for)) \
|
||||||
.all()
|
.all()
|
||||||
|
|
||||||
|
|
||||||
|
def dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id):
|
||||||
|
return Job.query \
|
||||||
|
.filter(
|
||||||
|
Job.service_id == service_id,
|
||||||
|
Job.id == job_id,
|
||||||
|
Job.job_status == JOB_STATUS_SCHEDULED,
|
||||||
|
Job.scheduled_for > datetime.utcnow()
|
||||||
|
) \
|
||||||
|
.one()
|
||||||
|
|
||||||
|
|
||||||
def dao_create_job(job):
|
def dao_create_job(job):
|
||||||
db.session.add(job)
|
db.session.add(job)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|||||||
@@ -7,8 +7,10 @@ from flask import (
|
|||||||
|
|
||||||
from app.dao.jobs_dao import (
|
from app.dao.jobs_dao import (
|
||||||
dao_create_job,
|
dao_create_job,
|
||||||
|
dao_update_job,
|
||||||
dao_get_job_by_service_id_and_job_id,
|
dao_get_job_by_service_id_and_job_id,
|
||||||
dao_get_jobs_by_service_id,
|
dao_get_jobs_by_service_id,
|
||||||
|
dao_get_future_scheduled_job_by_id_and_service_id,
|
||||||
dao_get_notification_outcomes_for_job
|
dao_get_notification_outcomes_for_job
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -28,7 +30,7 @@ from app.schemas import (
|
|||||||
|
|
||||||
from app.celery.tasks import process_job
|
from app.celery.tasks import process_job
|
||||||
|
|
||||||
from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING
|
from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, JOB_STATUS_CANCELLED
|
||||||
|
|
||||||
from app.utils import pagination_links
|
from app.utils import pagination_links
|
||||||
|
|
||||||
@@ -53,6 +55,15 @@ def get_job_by_service_and_job_id(service_id, job_id):
|
|||||||
return jsonify(data=data)
|
return jsonify(data=data)
|
||||||
|
|
||||||
|
|
||||||
|
@job.route('/<job_id>/cancel', methods=['POST'])
|
||||||
|
def cancel_job(service_id, job_id):
|
||||||
|
job = dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id)
|
||||||
|
job.job_status = JOB_STATUS_CANCELLED
|
||||||
|
dao_update_job(job)
|
||||||
|
|
||||||
|
return get_job_by_service_and_job_id(service_id, job_id)
|
||||||
|
|
||||||
|
|
||||||
@job.route('/<job_id>/notifications', methods=['GET'])
|
@job.route('/<job_id>/notifications', methods=['GET'])
|
||||||
def get_all_notifications_for_service_job(service_id, job_id):
|
def get_all_notifications_for_service_job(service_id, job_id):
|
||||||
data = notifications_filter_schema.load(request.args).data
|
data = notifications_filter_schema.load(request.args).data
|
||||||
|
|||||||
@@ -305,6 +305,7 @@ JOB_STATUS_IN_PROGRESS = 'in progress'
|
|||||||
JOB_STATUS_FINISHED = 'finished'
|
JOB_STATUS_FINISHED = 'finished'
|
||||||
JOB_STATUS_SENDING_LIMITS_EXCEEDED = 'sending limits exceeded'
|
JOB_STATUS_SENDING_LIMITS_EXCEEDED = 'sending limits exceeded'
|
||||||
JOB_STATUS_SCHEDULED = 'scheduled'
|
JOB_STATUS_SCHEDULED = 'scheduled'
|
||||||
|
JOB_STATUS_CANCELLED = 'cancelled'
|
||||||
|
|
||||||
|
|
||||||
class JobStatus(db.Model):
|
class JobStatus(db.Model):
|
||||||
|
|||||||
22
migrations/versions/0051_cancelled_job_status.py
Normal file
22
migrations/versions/0051_cancelled_job_status.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 0051_cancelled_job_status
|
||||||
|
Revises: 0050_index_for_stats
|
||||||
|
Create Date: 2016-09-01 14:34:06.839381
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '0051_cancelled_job_status'
|
||||||
|
down_revision = '0050_index_for_stats'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.execute("INSERT INTO job_status VALUES ('cancelled')")
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.execute("UPDATE jobs SET job_status = 'finished' WHERE job_status = 'cancelled'")
|
||||||
|
op.execute("DELETE FROM job_status WHERE name = 'cancelled';")
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from datetime import (datetime, date)
|
from datetime import (datetime, date, timedelta)
|
||||||
|
|
||||||
import requests_mock
|
import requests_mock
|
||||||
import pytest
|
import pytest
|
||||||
@@ -287,6 +287,22 @@ def sample_job_with_placeholdered_template(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def sample_scheduled_job(
|
||||||
|
notify_db,
|
||||||
|
notify_db_session,
|
||||||
|
service=None
|
||||||
|
):
|
||||||
|
return sample_job(
|
||||||
|
notify_db,
|
||||||
|
notify_db_session,
|
||||||
|
service=service,
|
||||||
|
template=sample_template_with_placeholders(notify_db, notify_db_session),
|
||||||
|
scheduled_for=(datetime.utcnow() + timedelta(minutes=60)).isoformat(),
|
||||||
|
job_status='scheduled'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def sample_email_job(notify_db,
|
def sample_email_job(notify_db,
|
||||||
notify_db_session,
|
notify_db_session,
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from app.dao.jobs_dao import (
|
|||||||
dao_update_job,
|
dao_update_job,
|
||||||
dao_get_jobs_by_service_id,
|
dao_get_jobs_by_service_id,
|
||||||
dao_get_scheduled_jobs,
|
dao_get_scheduled_jobs,
|
||||||
|
dao_get_future_scheduled_job_by_id_and_service_id,
|
||||||
dao_get_notification_outcomes_for_job
|
dao_get_notification_outcomes_for_job
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -246,8 +247,11 @@ def test_get_scheduled_jobs_gets_ignores_jobs_not_scheduled(notify_db, notify_db
|
|||||||
assert jobs[0].id == job_scheduled.id
|
assert jobs[0].id == job_scheduled.id
|
||||||
|
|
||||||
|
|
||||||
def test_get_scheduled_jobs_gets_ignores_jobs_scheduled_in_the_future(notify_db, notify_db_session):
|
def test_get_scheduled_jobs_gets_ignores_jobs_scheduled_in_the_future(sample_scheduled_job):
|
||||||
one_minute_in_the_future = datetime.utcnow() + timedelta(minutes=1)
|
|
||||||
sample_job(notify_db, notify_db_session, scheduled_for=one_minute_in_the_future, job_status='scheduled')
|
|
||||||
jobs = dao_get_scheduled_jobs()
|
jobs = dao_get_scheduled_jobs()
|
||||||
assert len(jobs) == 0
|
assert len(jobs) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_future_scheduled_job_gets_a_job_yet_to_send(sample_scheduled_job):
|
||||||
|
result = dao_get_future_scheduled_job_by_id_and_service_id(sample_scheduled_job.id, sample_scheduled_job.service_id)
|
||||||
|
assert result.id == sample_scheduled_job.id
|
||||||
|
|||||||
@@ -109,6 +109,31 @@ def test_get_job_by_id(notify_api, sample_job):
|
|||||||
assert resp_json['data']['created_by']['name'] == 'Test User'
|
assert resp_json['data']['created_by']['name'] == 'Test User'
|
||||||
|
|
||||||
|
|
||||||
|
def test_cancel_job(notify_api, sample_scheduled_job):
|
||||||
|
job_id = str(sample_scheduled_job.id)
|
||||||
|
service_id = sample_scheduled_job.service.id
|
||||||
|
with notify_api.test_request_context(), notify_api.test_client() as client:
|
||||||
|
path = '/service/{}/job/{}/cancel'.format(service_id, job_id)
|
||||||
|
auth_header = create_authorization_header(service_id=service_id)
|
||||||
|
response = client.post(path, headers=[auth_header])
|
||||||
|
assert response.status_code == 200
|
||||||
|
resp_json = json.loads(response.get_data(as_text=True))
|
||||||
|
assert resp_json['data']['id'] == job_id
|
||||||
|
assert resp_json['data']['job_status'] == 'cancelled'
|
||||||
|
|
||||||
|
|
||||||
|
def test_cant_cancel_normal_job(notify_api, sample_job, mocker):
|
||||||
|
job_id = str(sample_job.id)
|
||||||
|
service_id = sample_job.service.id
|
||||||
|
with notify_api.test_request_context(), notify_api.test_client() as client:
|
||||||
|
mock_update = mocker.patch('app.dao.jobs_dao.dao_update_job')
|
||||||
|
path = '/service/{}/job/{}/cancel'.format(service_id, job_id)
|
||||||
|
auth_header = create_authorization_header(service_id=service_id)
|
||||||
|
response = client.post(path, headers=[auth_header])
|
||||||
|
assert response.status_code == 404
|
||||||
|
assert mock_update.call_count == 0
|
||||||
|
|
||||||
|
|
||||||
def test_create_unscheduled_job(notify_api, sample_template, mocker, fake_uuid):
|
def test_create_unscheduled_job(notify_api, sample_template, mocker, fake_uuid):
|
||||||
with notify_api.test_request_context():
|
with notify_api.test_request_context():
|
||||||
with notify_api.test_client() as client:
|
with notify_api.test_client() as client:
|
||||||
|
|||||||
Reference in New Issue
Block a user