Merge pull request #2230 from alphagov/delete-run-letter-jobs

Delete run letter jobs
This commit is contained in:
Rebecca Law
2018-11-20 12:04:28 +00:00
committed by GitHub
10 changed files with 61 additions and 294 deletions

View File

@@ -22,7 +22,6 @@ from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_
from app.dao.invited_org_user_dao import delete_org_invitations_created_more_than_two_days_ago
from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago
from app.dao.jobs_dao import (
dao_get_letter_job_ids_by_status,
dao_set_scheduled_jobs_to_pending,
dao_get_jobs_older_than_limited_by
)
@@ -52,7 +51,6 @@ from app.models import (
NOTIFICATION_SENDING,
LETTER_TYPE,
JOB_STATUS_IN_PROGRESS,
JOB_STATUS_READY_TO_SEND,
JOB_STATUS_ERROR,
SMS_TYPE,
EMAIL_TYPE,
@@ -336,7 +334,6 @@ def delete_dvla_response_files_older_than_seven_days():
@notify_celery.task(name="raise-alert-if-letter-notifications-still-sending")
@statsd(namespace="tasks")
def raise_alert_if_letter_notifications_still_sending():
today = datetime.utcnow().date()
# Do nothing on the weekend
@@ -370,19 +367,6 @@ def raise_alert_if_letter_notifications_still_sending():
current_app.logger.info(message)
@notify_celery.task(name="run-letter-jobs")
@statsd(namespace="tasks")
def run_letter_jobs():
job_ids = dao_get_letter_job_ids_by_status(JOB_STATUS_READY_TO_SEND)
if job_ids:
notify_celery.send_task(
name=TaskNames.DVLA_JOBS,
args=(job_ids,),
queue=QueueNames.PROCESS_FTP
)
current_app.logger.info("Queued {} ready letter job ids onto {}".format(len(job_ids), QueueNames.PROCESS_FTP))
@notify_celery.task(name='check-job-status')
@statsd(namespace="tasks")
def check_job_status():
@@ -446,14 +430,13 @@ def letter_raise_alert_if_no_ack_file_for_zip():
for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'],
subfolder=datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent',
suffix='.TXT'):
subname = key.split('/')[-1] # strip subfolder in name
subname = key.split('/')[-1] # strip subfolder in name
zip_file_set.add(subname.upper().rstrip('.TXT'))
# get acknowledgement file
ack_file_set = set()
yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # AWS datetime format
yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # AWS datetime format
for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'],
subfolder='root/dispatch', suffix='.ACK.txt', last_modified=yesterday):
@@ -465,7 +448,7 @@ def letter_raise_alert_if_no_ack_file_for_zip():
for key in ack_file_set:
if today_str in key:
content = s3.get_s3_file(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], key)
for zip_file in content.split('\n'): # each line
for zip_file in content.split('\n'): # each line
s = zip_file.split('|')
ack_content_set.add(s[0].upper())

View File

@@ -54,7 +54,6 @@ class QueueNames(object):
class TaskNames(object):
DVLA_JOBS = 'send-jobs-to-dvla'
PROCESS_INCOMPLETE_JOBS = 'process-incomplete-jobs'
ZIP_AND_SEND_LETTER_PDFS = 'zip-and-send-letter-pdfs'
SCAN_FILE = 'scan-file'

View File

@@ -144,21 +144,3 @@ def dao_get_all_letter_jobs():
).order_by(
desc(Job.created_at)
).all()
def dao_get_letter_job_ids_by_status(status):
jobs = db.session.query(
Job
).join(
Job.template
).filter(
Job.job_status == status,
Template.template_type == LETTER_TYPE,
# test letter jobs (or from research mode services) are created with a different filename,
# exclude them so we don't see them on the send to CSV
Job.original_file_name != LETTER_TEST_API_FILENAME
).order_by(
desc(Job.created_at)
).all()
return [str(job.id) for job in jobs]

View File

@@ -1,20 +1,3 @@
from app.schema_validation.definitions import uuid
letter_job_ids = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "list of job ids",
"type": "object",
"title": "job_ids",
"properties": {
"job_ids": {"type": "array",
"items": uuid,
"minItems": 1
},
},
"required": ["job_ids"]
}
letter_references = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "list of letter notification references",

View File

@@ -1,35 +1,16 @@
from flask import Blueprint, jsonify
from flask import request
from app import notify_celery
from app.celery.tasks import process_returned_letters_list
from app.config import QueueNames, TaskNames
from app.dao.jobs_dao import dao_get_all_letter_jobs
from app.schemas import job_schema
from app.v2.errors import register_errors
from app.letters.letter_schemas import letter_job_ids, letter_references
from app.config import QueueNames
from app.letters.letter_schemas import letter_references
from app.schema_validation import validate
from app.v2.errors import register_errors
letter_job = Blueprint("letter-job", __name__)
register_errors(letter_job)
@letter_job.route('/send-letter-jobs', methods=['POST'])
def send_letter_jobs():
job_ids = validate(request.get_json(), letter_job_ids)
notify_celery.send_task(name=TaskNames.DVLA_JOBS, args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP)
return jsonify(data={"response": "Task created to send files to DVLA"}), 201
@letter_job.route('/letter-jobs', methods=['GET'])
def get_letter_jobs():
letter_jobs = dao_get_all_letter_jobs()
data = job_schema.dump(letter_jobs, many=True).data
return jsonify(data=data), 200
@letter_job.route('/letters/returned', methods=['POST'])
def create_process_returned_letters_job():
references = validate(request.get_json(), letter_references)