Merge pull request #631 from alphagov/add-new-column-to-jobs-for-delayed-sending

Adds new job_status table and FK to jobs.
This commit is contained in:
minglis
2016-08-25 12:59:16 +01:00
committed by GitHub
4 changed files with 124 additions and 53 deletions

View File

@@ -303,6 +303,19 @@ class ProviderDetails(db.Model):
JOB_STATUS_TYPES = ['pending', 'in progress', 'finished', 'sending limits exceeded']
JOB_STATUS_PENDING = 'pending'
JOB_STATUS_IN_PROGRESS = 'in progress'
JOB_STATUS_FINISHED = 'finished'
JOB_STATUS_SENDING_LIMITS_EXCEEDED = 'sending limits exceeded'
JOB_STATUS_SCHEDULED = 'scheduled'
class JobStatusTypes(db.Model):
__tablename__ = 'job_status'
name = db.Column(db.String(255), primary_key=True)
class Job(db.Model):
__tablename__ = 'jobs'
@@ -343,6 +356,13 @@ class Job(db.Model):
nullable=True)
created_by = db.relationship('User')
created_by_id = db.Column(UUID(as_uuid=True), db.ForeignKey('users.id'), index=True, nullable=False)
scheduled_for = db.Column(
db.DateTime,
index=True,
unique=False,
nullable=True)
job_status = db.Column(
db.String(255), db.ForeignKey('job_status.name'), index=True, nullable=True)
VERIFY_CODE_TYPES = [EMAIL_TYPE, SMS_TYPE]

View File

@@ -20,59 +20,68 @@ from app.models import Job, Template, NotificationHistory
def upgrade():
session = Session(bind=op.get_bind())
go_live = datetime.datetime.strptime('2016-05-18', '%Y-%m-%d')
notifications_history_start_date = datetime.datetime.strptime('2016-06-26 23:21:55', '%Y-%m-%d %H:%M:%S')
jobs = session.query(Job).join(Template).filter(Job.service_id == '95316ff0-e555-462d-a6e7-95d26fbfd091',
Job.created_at >= go_live,
Job.created_at < notifications_history_start_date).all()
for job in jobs:
for i in range(0, job.notifications_delivered):
notification = NotificationHistory(id=uuid.uuid4(),
job_id=job.id,
service_id=job.service_id,
template_id=job.template.id,
template_version=job.template_version,
key_type='normal',
content_char_count=len(job.template.content),
notification_type=job.template.template_type,
created_at=job.created_at,
sent_at=job.processing_finished,
sent_by='ses' if job.template.template_type == 'email' else 'mmg',
status='delivered')
session.add(notification)
for i in range(0, job.notifications_failed):
notification = NotificationHistory(id=uuid.uuid4(),
job_id=job.id,
service_id=job.service_id,
template_id=job.template.id,
template_version=job.template_version,
key_type='normal',
content_char_count=len(job.template.content),
notification_type=job.template.template_type,
created_at=job.created_at,
sent_at=job.processing_finished,
sent_by='ses' if job.template.template_type == 'email' else 'mmg',
status='permanent-failure')
session.add(notification)
session.commit()
#
# REMOVED
# This script has been applied and doesn't need to be re-applied
# note that by referencing the model objects in migration files, any subsequent alteration of the model and thus
# the database causes all previous migration scripts to fail as the model and DB will be inconsistent in this
# past state.
#
# session = Session(bind=op.get_bind())
#
# go_live = datetime.datetime.strptime('2016-05-18', '%Y-%m-%d')
# notifications_history_start_date = datetime.datetime.strptime('2016-06-26 23:21:55', '%Y-%m-%d %H:%M:%S')
# jobs = session.query(Job).join(Template).filter(Job.service_id == '95316ff0-e555-462d-a6e7-95d26fbfd091',
# Job.created_at >= go_live,
# Job.created_at < notifications_history_start_date).all()
#
# for job in jobs:
# for i in range(0, job.notifications_delivered):
# notification = NotificationHistory(id=uuid.uuid4(),
# job_id=job.id,
# service_id=job.service_id,
# template_id=job.template.id,
# template_version=job.template_version,
# key_type='normal',
# content_char_count=len(job.template.content),
# notification_type=job.template.template_type,
# created_at=job.created_at,
# sent_at=job.processing_finished,
# sent_by='ses' if job.template.template_type == 'email' else 'mmg',
# status='delivered')
#
# session.add(notification)
#
# for i in range(0, job.notifications_failed):
# notification = NotificationHistory(id=uuid.uuid4(),
# job_id=job.id,
# service_id=job.service_id,
# template_id=job.template.id,
# template_version=job.template_version,
# key_type='normal',
# content_char_count=len(job.template.content),
# notification_type=job.template.template_type,
# created_at=job.created_at,
# sent_at=job.processing_finished,
# sent_by='ses' if job.template.template_type == 'email' else 'mmg',
# status='permanent-failure')
# session.add(notification)
# session.commit()
pass
def downgrade():
### commands auto generated by Alembic - please adjust! ###
session = Session(bind=op.get_bind())
go_live = datetime.datetime.strptime('2016-05-18', '%Y-%m-%d')
notifications_history_start_date = datetime.datetime.strptime('2016-06-26 23:21:55', '%Y-%m-%d %H:%M:%S')
session.query(NotificationHistory).filter(
NotificationHistory.created_at >= go_live,
NotificationHistory.service_id == '95316ff0-e555-462d-a6e7-95d26fbfd091',
NotificationHistory.created_at < notifications_history_start_date).delete()
session.commit()
### end Alembic commands ###
# ### commands auto generated by Alembic - please adjust! ###
# session = Session(bind=op.get_bind())
#
# go_live = datetime.datetime.strptime('2016-05-18', '%Y-%m-%d')
# notifications_history_start_date = datetime.datetime.strptime('2016-06-26 23:21:55', '%Y-%m-%d %H:%M:%S')
#
# session.query(NotificationHistory).filter(
# NotificationHistory.created_at >= go_live,
# NotificationHistory.service_id == '95316ff0-e555-462d-a6e7-95d26fbfd091',
# NotificationHistory.created_at < notifications_history_start_date).delete()
#
# session.commit()
# ### end Alembic commands ###
pass

View File

@@ -0,0 +1,41 @@
"""empty message
Revision ID: 0048_job_scheduled_time
Revises: 0047_ukvi_spelling
Create Date: 2016-08-24 13:21:51.744526
"""
# revision identifiers, used by Alembic.
revision = '0048_job_scheduled_time'
down_revision = '0047_ukvi_spelling'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('job_status',
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('name')
)
op.add_column('jobs', sa.Column('job_status', sa.String(length=255), nullable=True))
op.add_column('jobs', sa.Column('scheduled_for', sa.DateTime(), nullable=True))
op.create_index(op.f('ix_jobs_job_status'), 'jobs', ['job_status'], unique=False)
op.create_index(op.f('ix_jobs_scheduled_for'), 'jobs', ['scheduled_for'], unique=False)
op.create_foreign_key(None, 'jobs', 'job_status', ['job_status'], ['name'])
op.execute("insert into job_status values ('pending')")
op.execute("insert into job_status values ('in progress')")
op.execute("insert into job_status values ('finished')")
op.execute("insert into job_status values ('sending limits exceeded')")
op.execute("insert into job_status values ('scheduled')")
def downgrade():
op.drop_constraint('jobs_job_status_fkey', 'jobs', type_='foreignkey')
op.drop_index(op.f('ix_jobs_scheduled_for'), table_name='jobs')
op.drop_index(op.f('ix_jobs_job_status'), table_name='jobs')
op.drop_column('jobs', 'scheduled_for')
op.drop_column('jobs', 'job_status')
op.drop_table('job_status')

View File

@@ -109,6 +109,7 @@ def test_create_job(notify_api, sample_template, mocker, fake_uuid):
path = '/service/{}/job'.format(sample_template.service.id)
auth_header = create_authorization_header(service_id=sample_template.service.id)
headers = [('Content-Type', 'application/json'), auth_header]
response = client.post(
path,
data=json.dumps(data),