Merge pull request #889 from alphagov/restrict-delete-files-job-query

Updates to the delete CSV file job to reduce the number jobs included in any run.
This commit is contained in:
minglis
2017-04-06 10:09:49 +01:00
committed by GitHub
4 changed files with 40 additions and 24 deletions

View File

@@ -10,7 +10,7 @@ from app.aws import s3
from app import notify_celery
from app import performance_platform_client
from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago
from app.dao.jobs_dao import dao_set_scheduled_jobs_to_pending, dao_get_jobs_older_than
from app.dao.jobs_dao import dao_set_scheduled_jobs_to_pending, dao_get_jobs_older_than_limited_by
from app.dao.notifications_dao import (
delete_notifications_created_more_than_a_week_ago,
dao_timeout_notifications,
@@ -28,7 +28,7 @@ from app.celery.tasks import process_job
@notify_celery.task(name="remove_csv_files")
@statsd(namespace="tasks")
def remove_csv_files():
jobs = dao_get_jobs_older_than(7)
jobs = dao_get_jobs_older_than_limited_by()
for job in jobs:
s3.remove_job_from_s3(job.service_id, job.id)
current_app.logger.info("Job ID {} has been removed from s3.".format(job.id))

View File

@@ -116,7 +116,8 @@ def dao_update_job(job):
db.session.commit()
def dao_get_jobs_older_than(limit_days):
def dao_get_jobs_older_than_limited_by(older_than=7, limit_days=2):
return Job.query.filter(
cast(Job.created_at, sql_date) < days_ago(limit_days)
cast(Job.created_at, sql_date) < days_ago(older_than),
cast(Job.created_at, sql_date) >= days_ago(older_than + limit_days)
).order_by(desc(Job.created_at)).all()