Merge pull request #667 from alphagov/delete-csv-file-after-7-days

Delete csv file after 7 days
This commit is contained in:
minglis
2016-09-13 09:03:48 +01:00
committed by GitHub
10 changed files with 70 additions and 39 deletions

0
app/aws/__init__.py Normal file
View File

View File

@@ -3,10 +3,10 @@ from datetime import datetime, timedelta
from flask import current_app
from sqlalchemy.exc import SQLAlchemyError
from app.aws import s3
from app import notify_celery
from app.clients import STATISTICS_FAILURE
from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago
from app.dao.jobs_dao import dao_get_scheduled_jobs, dao_update_job
from app.dao.jobs_dao import dao_get_scheduled_jobs, dao_update_job, dao_get_jobs_older_than
from app.dao.notifications_dao import delete_notifications_created_more_than_a_week_ago, get_notifications, \
update_notification_status_by_id
from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago
@@ -15,6 +15,15 @@ from app.models import JOB_STATUS_PENDING
from app.celery.tasks import process_job
@notify_celery.task(name="remove_csv_files")
@statsd(namespace="tasks")
def remove_csv_files():
jobs = dao_get_jobs_older_than(7)
for job in jobs:
s3.remove_job_from_s3(job.service_id, job.id)
current_app.logger.info("Job ID {} has been removed from s3.".format(job.id))
@notify_celery.task(name="run-scheduled-jobs")
@statsd(namespace="tasks")
def run_scheduled_jobs():

View File

@@ -101,20 +101,11 @@ def process_job(job_id):
job.processing_started = start
job.processing_finished = finished
dao_update_job(job)
remove_job.apply_async((str(job_id),), queue='remove-job')
current_app.logger.info(
"Job {} created at {} started at {} finished at {}".format(job_id, job.created_at, start, finished)
)
@notify_celery.task(name="remove-job")
@statsd(namespace="tasks")
def remove_job(job_id):
job = dao_get_job_by_id(job_id)
s3.remove_job_from_s3(job.service.id, str(job_id))
current_app.logger.info("Job {} has been removed from s3.".format(job_id))
@notify_celery.task(bind=True, name="send-sms", max_retries=5, default_retry_delay=300)
@statsd(namespace="tasks")
def send_sms(self,

View File

@@ -66,3 +66,9 @@ def dao_create_job(job):
def dao_update_job(job):
db.session.add(job)
db.session.commit()
def dao_get_jobs_older_than(limit_days):
return Job.query.filter(
cast(Job.created_at, sql_date) < days_ago(limit_days)
).order_by(desc(Job.created_at)).all()