diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index b4dfbd671..b0cb57660 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -6,10 +6,30 @@ from sqlalchemy.exc import SQLAlchemyError from app import notify_celery from app.clients import STATISTICS_FAILURE from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago +from app.dao.jobs_dao import dao_get_scheduled_jobs, dao_update_job from app.dao.notifications_dao import delete_notifications_created_more_than_a_week_ago, get_notifications, \ update_notification_status_by_id from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago from app.statsd_decorators import statsd +from app.models import JOB_STATUS_PENDING +from app.celery.tasks import process_job + + +@notify_celery.task(name="run-scheduled-jobs") +@statsd(namespace="tasks") +def run_scheduled_jobs(): + try: + jobs = dao_get_scheduled_jobs() + for job in jobs: + job.job_status = JOB_STATUS_PENDING + dao_update_job(job) + process_job.apply_async([str(job.id)], queue="process-job") + current_app.logger.info( + "Job ID {} added to process job queue".format(job.id) + ) + except SQLAlchemyError as e: + current_app.logger.exception("Failed to run scheduled jobs", e) + raise @notify_celery.task(name="delete-verify-codes") @@ -21,8 +41,8 @@ def delete_verify_codes(): current_app.logger.info( "Delete job started {} finished {} deleted {} verify codes".format(start, datetime.utcnow(), deleted) ) - except SQLAlchemyError: - current_app.logger.info("Failed to delete verify codes") + except SQLAlchemyError as e: + current_app.logger.exception("Failed to delete verify codes", e) raise @@ -39,8 +59,8 @@ def delete_successful_notifications(): deleted ) ) - except SQLAlchemyError: - current_app.logger.info("Failed to delete successful notifications") + except SQLAlchemyError as e: + current_app.logger.exception("Failed to delete successful notifications", e) raise @@ -60,8 +80,8 @@ def delete_failed_notifications(): deleted ) ) - except SQLAlchemyError: - current_app.logger.info("Failed to delete failed notifications") + except SQLAlchemyError as e: + current_app.logger.exception("Failed to delete failed notifications", e) raise @@ -74,8 +94,8 @@ def delete_invitations(): current_app.logger.info( "Delete job started {} finished {} deleted {} invitations".format(start, datetime.utcnow(), deleted) ) - except SQLAlchemyError: - current_app.logger.info("Failed to delete invitations") + except SQLAlchemyError as e: + current_app.logger.exception("Failed to delete invitations", e) raise @@ -88,7 +108,7 @@ def timeout_notifications(): for noti in notifications: try: if (now - noti.created_at) > timedelta( - seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') ): # TODO: think about making this a bulk update rather than one at a time. updated = update_notification_status_by_id(noti.id, 'temporary-failure') @@ -97,6 +117,6 @@ def timeout_notifications(): "Timeout period reached for notification ({}), status has been updated.".format(noti.id)) except Exception as e: current_app.logger.exception(e) - current_app.logger.error(( - "Exception raised trying to timeout notification ({})" - ", skipping notification update.").format(noti.id)) + current_app.logger.error( + "Exception raised trying to timeout notification ({}) skipping notification update.".format(noti.id) + ) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 1ebba4893..3ad1f84fd 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -1,4 +1,5 @@ -from sqlalchemy import desc, cast, Date as sql_date +from datetime import date, timedelta, datetime +from sqlalchemy import desc, asc, cast, Date as sql_date from app import db from app.dao import days_ago from app.models import Job, NotificationHistory @@ -36,6 +37,13 @@ def dao_get_job_by_id(job_id): return Job.query.filter_by(id=job_id).one() +def dao_get_scheduled_jobs(): + return Job.query \ + .filter(Job.job_status == 'scheduled', Job.scheduled_for < datetime.utcnow()) \ + .order_by(asc(Job.scheduled_for)) \ + .all() + + def dao_create_job(job): db.session.add(job) db.session.commit() diff --git a/app/job/rest.py b/app/job/rest.py index 8f5be6254..235700918 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -28,6 +28,8 @@ from app.schemas import ( from app.celery.tasks import process_job +from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING + from app.utils import pagination_links job = Blueprint('job', __name__, url_prefix='/service//job') @@ -104,6 +106,7 @@ def create_job(service_id): dao_fetch_service_by_id(service_id) data = request.get_json() + data.update({ "service": service_id }) @@ -115,7 +118,15 @@ def create_job(service_id): raise InvalidRequest(errors, status_code=400) data.update({"template_version": template.version}) + job = job_schema.load(data).data + + if job.scheduled_for: + job.job_status = JOB_STATUS_SCHEDULED + dao_create_job(job) - process_job.apply_async([str(job.id)], queue="process-job") + + if job.job_status == JOB_STATUS_PENDING: + process_job.apply_async([str(job.id)], queue="process-job") + return jsonify(data=job_schema.dump(job).data), 201 diff --git a/app/models.py b/app/models.py index 17e60f51c..e3bb53f2c 100644 --- a/app/models.py +++ b/app/models.py @@ -310,7 +310,7 @@ JOB_STATUS_SENDING_LIMITS_EXCEEDED = 'sending limits exceeded' JOB_STATUS_SCHEDULED = 'scheduled' -class JobStatusTypes(db.Model): +class JobStatus(db.Model): __tablename__ = 'job_status' name = db.Column(db.String(255), primary_key=True) @@ -362,7 +362,8 @@ class Job(db.Model): unique=False, nullable=True) job_status = db.Column( - db.String(255), db.ForeignKey('job_status.name'), index=True, nullable=True) + db.String(255), db.ForeignKey('job_status.name'), index=True, nullable=True, default='pending' + ) VERIFY_CODE_TYPES = [EMAIL_TYPE, SMS_TYPE] diff --git a/app/schemas.py b/app/schemas.py index 211d1ed4a..a053ab57c 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -1,10 +1,9 @@ import re from datetime import ( datetime, - date -) + date, + timedelta) from flask_marshmallow.fields import fields - from marshmallow import ( post_load, ValidationError, @@ -40,11 +39,31 @@ def _validate_positive_number(value, msg="Not a positive integer"): raise ValidationError(msg) +def _validate_datetime_not_more_than_24_hours_in_future(dte, msg="Date cannot be more than 24hrs in the future"): + if dte > datetime.utcnow() + timedelta(hours=24): + raise ValidationError(msg) + + def _validate_not_in_future(dte, msg="Date cannot be in the future"): if dte > date.today(): raise ValidationError(msg) +def _validate_not_in_past(dte, msg="Date cannot be in the past"): + if dte < date.today(): + raise ValidationError(msg) + + +def _validate_datetime_not_in_future(dte, msg="Date cannot be in the future"): + if dte > datetime.utcnow(): + raise ValidationError(msg) + + +def _validate_datetime_not_in_past(dte, msg="Date cannot be in the past"): + if dte < datetime.utcnow(): + raise ValidationError(msg) + + # TODO I think marshmallow provides a better integration and error handling. # Would be better to replace functionality in dao with the marshmallow supported # functionality. @@ -208,6 +227,15 @@ class JobSchema(BaseSchema): dump_to="created_by", only=["id", "name"], dump_only=True) created_by = field_for(models.Job, 'created_by', required=True, load_only=True) + job_status = field_for(models.JobStatus, 'name', required=False) + + scheduled_for = fields.DateTime() + + @validates('scheduled_for') + def validate_scheduled_for(self, value): + _validate_datetime_not_in_past(value) + _validate_datetime_not_more_than_24_hours_in_future(value) + class Meta: model = models.Job exclude = ( diff --git a/config.py b/config.py index d8067dc53..e71a5d51a 100644 --- a/config.py +++ b/config.py @@ -50,6 +50,11 @@ class Config(object): CELERY_TASK_SERIALIZER = 'json' CELERY_IMPORTS = ('app.celery.tasks', 'app.celery.scheduled_tasks') CELERYBEAT_SCHEDULE = { + 'run-scheduled-jobs': { + 'task': 'run-scheduled-jobs', + 'schedule': crontab(), + 'options': {'queue': 'periodic'} + }, 'delete-verify-codes': { 'task': 'delete-verify-codes', 'schedule': timedelta(minutes=63), diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 0564b1712..92478d7da 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -7,8 +7,11 @@ from app.celery.scheduled_tasks import (delete_verify_codes, delete_successful_notifications, delete_failed_notifications, delete_invitations, - timeout_notifications) -from tests.app.conftest import sample_notification + timeout_notifications, + run_scheduled_jobs) +from app.dao.jobs_dao import dao_get_job_by_id +from tests.app.conftest import sample_notification, sample_job +from mock import call def test_should_have_decorated_tasks_functions(): @@ -17,10 +20,11 @@ def test_should_have_decorated_tasks_functions(): assert delete_failed_notifications.__wrapped__.__name__ == 'delete_failed_notifications' assert timeout_notifications.__wrapped__.__name__ == 'timeout_notifications' assert delete_invitations.__wrapped__.__name__ == 'delete_invitations' + assert run_scheduled_jobs.__wrapped__.__name__ == 'run_scheduled_jobs' def test_should_call_delete_notifications_more_than_week_in_task(notify_api, mocker): - mocked = mocker.patch('app.celery.scheduled_tasksgit .delete_notifications_created_more_than_a_week_ago') + mocked = mocker.patch('app.celery.scheduled_tasks.delete_notifications_created_more_than_a_week_ago') delete_successful_notifications() assert mocked.assert_called_with('delivered') assert scheduled_tasks.delete_notifications_created_more_than_a_week_ago.call_count == 1 @@ -80,3 +84,39 @@ def test_not_update_status_of_notification_before_timeout(notify_api, seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') - 10)) timeout_notifications() assert not1.status == 'sending' + + +def test_should_update_scheduled_jobs_and_put_on_queue(notify_db, notify_db_session, mocker): + mocked = mocker.patch('app.celery.tasks.process_job.apply_async') + + one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) + job = sample_job(notify_db, notify_db_session, scheduled_for=one_minute_in_the_past, job_status='scheduled') + + run_scheduled_jobs() + + updated_job = dao_get_job_by_id(job.id) + assert updated_job.job_status == 'pending' + mocked.assert_called_with([str(job.id)], queue='process-job') + + +def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_session, mocker): + mocked = mocker.patch('app.celery.tasks.process_job.apply_async') + + one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) + ten_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=10) + twenty_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=20) + job_1 = sample_job(notify_db, notify_db_session, scheduled_for=one_minute_in_the_past, job_status='scheduled') + job_2 = sample_job(notify_db, notify_db_session, scheduled_for=ten_minutes_in_the_past, job_status='scheduled') + job_3 = sample_job(notify_db, notify_db_session, scheduled_for=twenty_minutes_in_the_past, job_status='scheduled') + + run_scheduled_jobs() + + assert dao_get_job_by_id(job_1.id).job_status == 'pending' + assert dao_get_job_by_id(job_2.id).job_status == 'pending' + assert dao_get_job_by_id(job_2.id).job_status == 'pending' + + mocked.assert_has_calls([ + call([str(job_3.id)], queue='process-job'), + call([str(job_2.id)], queue='process-job'), + call([str(job_1.id)], queue='process-job') + ]) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index a14dbb5fc..76aba362a 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -248,7 +248,9 @@ def sample_job(notify_db, service=None, template=None, notification_count=1, - created_at=datetime.utcnow()): + created_at=datetime.utcnow(), + job_status='pending', + scheduled_for=None): if service is None: service = sample_service(notify_db, notify_db_session) if template is None: @@ -263,7 +265,9 @@ def sample_job(notify_db, 'original_file_name': 'some.csv', 'notification_count': notification_count, 'created_at': created_at, - 'created_by': service.created_by + 'created_by': service.created_by, + 'job_status': job_status, + 'scheduled_for': scheduled_for } job = Job(**data) dao_create_job(job) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 0a69ffbc3..3674ef24c 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -6,7 +6,9 @@ from app.dao.jobs_dao import ( dao_create_job, dao_update_job, dao_get_jobs_by_service_id, - dao_get_notification_outcomes_for_job) + dao_get_scheduled_jobs, + dao_get_notification_outcomes_for_job +) from app.models import Job from tests.app.conftest import sample_notification, sample_job, sample_service @@ -222,3 +224,30 @@ def test_update_job(sample_job): job_from_db = Job.query.get(sample_job.id) assert job_from_db.status == 'in progress' + + +def test_get_scheduled_jobs_gets_all_jobs_in_scheduled_state_scheduled_before_now(notify_db, notify_db_session): + one_minute_ago = datetime.utcnow() - timedelta(minutes=1) + one_hour_ago = datetime.utcnow() - timedelta(minutes=60) + job_new = sample_job(notify_db, notify_db_session, scheduled_for=one_minute_ago, job_status='scheduled') + job_old = sample_job(notify_db, notify_db_session, scheduled_for=one_hour_ago, job_status='scheduled') + jobs = dao_get_scheduled_jobs() + assert len(jobs) == 2 + assert jobs[0].id == job_old.id + assert jobs[1].id == job_new.id + + +def test_get_scheduled_jobs_gets_ignores_jobs_not_scheduled(notify_db, notify_db_session): + one_minute_ago = datetime.utcnow() - timedelta(minutes=1) + sample_job(notify_db, notify_db_session) + job_scheduled = sample_job(notify_db, notify_db_session, scheduled_for=one_minute_ago, job_status='scheduled') + jobs = dao_get_scheduled_jobs() + assert len(jobs) == 1 + assert jobs[0].id == job_scheduled.id + + +def test_get_scheduled_jobs_gets_ignores_jobs_scheduled_in_the_future(notify_db, notify_db_session): + one_minute_in_the_future = datetime.utcnow() + timedelta(minutes=1) + sample_job(notify_db, notify_db_session, scheduled_for=one_minute_in_the_future, job_status='scheduled') + jobs = dao_get_scheduled_jobs() + assert len(jobs) == 0 diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 69e929876..df901cd1d 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -1,9 +1,10 @@ import json import uuid from datetime import datetime, timedelta +from freezegun import freeze_time import pytest - +import pytz import app.celery.tasks from tests import create_authorization_header @@ -94,7 +95,21 @@ def test_get_job_with_unknown_id_returns404(notify_api, sample_template, fake_uu } -def test_create_job(notify_api, sample_template, mocker, fake_uuid): +def test_get_job_by_id(notify_api, sample_job): + job_id = str(sample_job.id) + service_id = sample_job.service.id + with notify_api.test_request_context(): + with notify_api.test_client() as client: + path = '/service/{}/job/{}'.format(service_id, job_id) + auth_header = create_authorization_header(service_id=sample_job.service.id) + response = client.get(path, headers=[auth_header]) + assert response.status_code == 200 + resp_json = json.loads(response.get_data(as_text=True)) + assert resp_json['data']['id'] == job_id + assert resp_json['data']['created_by']['name'] == 'Test User' + + +def test_create_unscheduled_job(notify_api, sample_template, mocker, fake_uuid): with notify_api.test_request_context(): with notify_api.test_client() as client: mocker.patch('app.celery.tasks.process_job.apply_async') @@ -124,11 +139,121 @@ def test_create_job(notify_api, sample_template, mocker, fake_uuid): resp_json = json.loads(response.get_data(as_text=True)) assert resp_json['data']['id'] == fake_uuid - assert resp_json['data']['service'] == str(sample_template.service.id) + assert resp_json['data']['status'] == 'pending' + assert not resp_json['data']['scheduled_for'] + assert resp_json['data']['job_status'] == 'pending' assert resp_json['data']['template'] == str(sample_template.id) assert resp_json['data']['original_file_name'] == 'thisisatest.csv' +def test_create_scheduled_job(notify_api, sample_template, mocker, fake_uuid): + with notify_api.test_request_context(): + with notify_api.test_client() as client: + with freeze_time("2016-01-01 12:00:00.000000"): + scheduled_date = (datetime.utcnow() + timedelta(hours=23, minutes=59)).isoformat() + mocker.patch('app.celery.tasks.process_job.apply_async') + data = { + 'id': fake_uuid, + 'service': str(sample_template.service.id), + 'template': str(sample_template.id), + 'original_file_name': 'thisisatest.csv', + 'notification_count': 1, + 'created_by': str(sample_template.created_by.id), + 'scheduled_for': scheduled_date + } + path = '/service/{}/job'.format(sample_template.service.id) + auth_header = create_authorization_header(service_id=sample_template.service.id) + headers = [('Content-Type', 'application/json'), auth_header] + + response = client.post( + path, + data=json.dumps(data), + headers=headers) + assert response.status_code == 201 + + app.celery.tasks.process_job.apply_async.assert_not_called() + + resp_json = json.loads(response.get_data(as_text=True)) + + assert resp_json['data']['id'] == fake_uuid + assert resp_json['data']['status'] == 'pending' + assert resp_json['data']['scheduled_for'] == datetime(2016, 1, 2, 11, 59, 0, + tzinfo=pytz.UTC).isoformat() + assert resp_json['data']['job_status'] == 'scheduled' + assert resp_json['data']['template'] == str(sample_template.id) + assert resp_json['data']['original_file_name'] == 'thisisatest.csv' + + +def test_should_not_create_scheduled_job_more_then_24_hours_hence(notify_api, sample_template, mocker, fake_uuid): + with notify_api.test_request_context(): + with notify_api.test_client() as client: + with freeze_time("2016-01-01 11:09:00.061258"): + scheduled_date = (datetime.utcnow() + timedelta(hours=24, minutes=1)).isoformat() + + mocker.patch('app.celery.tasks.process_job.apply_async') + data = { + 'id': fake_uuid, + 'service': str(sample_template.service.id), + 'template': str(sample_template.id), + 'original_file_name': 'thisisatest.csv', + 'notification_count': 1, + 'created_by': str(sample_template.created_by.id), + 'scheduled_for': scheduled_date + } + path = '/service/{}/job'.format(sample_template.service.id) + auth_header = create_authorization_header(service_id=sample_template.service.id) + headers = [('Content-Type', 'application/json'), auth_header] + + print(json.dumps(data)) + response = client.post( + path, + data=json.dumps(data), + headers=headers) + assert response.status_code == 400 + + app.celery.tasks.process_job.apply_async.assert_not_called() + + resp_json = json.loads(response.get_data(as_text=True)) + assert resp_json['result'] == 'error' + assert 'scheduled_for' in resp_json['message'] + assert resp_json['message']['scheduled_for'] == ['Date cannot be more than 24hrs in the future'] + + +def test_should_not_create_scheduled_job_in_the_past(notify_api, sample_template, mocker, fake_uuid): + with notify_api.test_request_context(): + with notify_api.test_client() as client: + with freeze_time("2016-01-01 11:09:00.061258"): + scheduled_date = (datetime.utcnow() - timedelta(minutes=1)).isoformat() + + mocker.patch('app.celery.tasks.process_job.apply_async') + data = { + 'id': fake_uuid, + 'service': str(sample_template.service.id), + 'template': str(sample_template.id), + 'original_file_name': 'thisisatest.csv', + 'notification_count': 1, + 'created_by': str(sample_template.created_by.id), + 'scheduled_for': scheduled_date + } + path = '/service/{}/job'.format(sample_template.service.id) + auth_header = create_authorization_header(service_id=sample_template.service.id) + headers = [('Content-Type', 'application/json'), auth_header] + + print(json.dumps(data)) + response = client.post( + path, + data=json.dumps(data), + headers=headers) + assert response.status_code == 400 + + app.celery.tasks.process_job.apply_async.assert_not_called() + + resp_json = json.loads(response.get_data(as_text=True)) + assert resp_json['result'] == 'error' + assert 'scheduled_for' in resp_json['message'] + assert resp_json['message']['scheduled_for'] == ['Date cannot be in the past'] + + def test_create_job_returns_400_if_missing_data(notify_api, sample_template, mocker): with notify_api.test_request_context(): with notify_api.test_client() as client: @@ -287,35 +412,19 @@ def test_get_all_notifications_for_job_in_order_of_job_number(notify_api, @pytest.mark.parametrize( "expected_notification_count, status_args", [ - ( - 1, - '?status={}'.format(NOTIFICATION_STATUS_TYPES[0]) - ), - ( - 0, - '?status={}'.format(NOTIFICATION_STATUS_TYPES[1]) - ), - ( - 1, - '?status={}&status={}&status={}'.format( - *NOTIFICATION_STATUS_TYPES[0:3] - ) - ), - ( - 0, - '?status={}&status={}&status={}'.format( - *NOTIFICATION_STATUS_TYPES[3:6] - ) - ), + (1, '?status={}'.format(NOTIFICATION_STATUS_TYPES[0])), + (0, '?status={}'.format(NOTIFICATION_STATUS_TYPES[1])), + (1, '?status={}&status={}&status={}'.format(*NOTIFICATION_STATUS_TYPES[0:3])), + (0, '?status={}&status={}&status={}'.format(*NOTIFICATION_STATUS_TYPES[3:6])), ] ) def test_get_all_notifications_for_job_filtered_by_status( - notify_api, - notify_db, - notify_db_session, - sample_service, - expected_notification_count, - status_args + notify_api, + notify_db, + notify_db_session, + sample_service, + expected_notification_count, + status_args ): with notify_api.test_request_context(), notify_api.test_client() as client: job = create_job(notify_db, notify_db_session, service=sample_service) diff --git a/tests/app/notifications/rest/test_notification_statistics.py b/tests/app/notifications/rest/test_notification_statistics.py index 769fd7395..6e9de437a 100644 --- a/tests/app/notifications/rest/test_notification_statistics.py +++ b/tests/app/notifications/rest/test_notification_statistics.py @@ -2,6 +2,7 @@ from datetime import date, timedelta from flask import json from freezegun import freeze_time +from datetime import datetime from tests import create_authorization_header from tests.app.conftest import ( @@ -196,7 +197,6 @@ def test_get_notification_statistics_returns_both_existing_stats_and_generated_z assert response.status_code == 200 -@freeze_time('1955-11-05T12:00:00') def test_get_notification_statistics_returns_zeros_when_only_stats_for_different_date( notify_api, sample_notification_statistics @@ -208,7 +208,7 @@ def test_get_notification_statistics_returns_zeros_when_only_stats_for_different service_id=sample_notification_statistics.service_id ) response = client.get( - '/notifications/statistics?day={}'.format(date.today().isoformat()), + '/notifications/statistics?day={}'.format(datetime.utcnow().isoformat()), headers=[auth_header] ) diff --git a/tests/app/notifications/test_rest.py b/tests/app/notifications/test_rest.py index 778bbe66e..efdd6152c 100644 --- a/tests/app/notifications/test_rest.py +++ b/tests/app/notifications/test_rest.py @@ -1,4 +1,3 @@ -from datetime import datetime, timedelta import uuid import pytest diff --git a/tests/conftest.py b/tests/conftest.py index cf1789d6e..ea9fa15ad 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,7 +49,7 @@ def notify_db_session(request, notify_db): def teardown(): notify_db.session.remove() for tbl in reversed(notify_db.metadata.sorted_tables): - if tbl.name not in ["provider_details", "key_types", "branding_type"]: + if tbl.name not in ["provider_details", "key_types", "branding_type", "job_status"]: notify_db.engine.execute(tbl.delete()) notify_db.session.commit()