Fix bug with deleting the S3 file.

Removed the duplicate method.
This commit is contained in:
Rebecca Law
2018-08-13 11:33:19 +01:00
parent ccaa1dfeb2
commit eb2c878edd
3 changed files with 8 additions and 19 deletions

View File

@@ -66,17 +66,6 @@ def get_s3_bucket_objects(bucket_name, subfolder='', older_than=7, limit_days=2)
return all_objects_in_bucket
def get_s3_object_by_prefix(bucket_name, prefix):
boto_client = client('s3', current_app.config['AWS_REGION'])
paginator = boto_client.get_paginator('list_objects_v2')
page_iterator = paginator.paginate(
Bucket=bucket_name,
Prefix=prefix
)
return page_iterator
def filter_s3_bucket_objects_within_date_range(bucket_objects, older_than=7, limit_days=2):
"""
S3 returns the Object['LastModified'] as an 'offset-aware' timestamp so the

View File

@@ -22,7 +22,7 @@ from sqlalchemy.sql import functions
from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES
from app import db, create_uuid
from app.aws.s3 import get_s3_object_by_prefix
from app.aws.s3 import remove_s3_object, get_s3_bucket_objects
from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE
from app.utils import midnight_n_days_ago, escape_special_characters
from app.errors import InvalidRequest
@@ -352,9 +352,9 @@ def _delete_letters_from_s3(query):
crown="C" if letter.service.crown else "N",
date=''
).upper()[:-5]
s3_objects = get_s3_object_by_prefix(bucket_name=bucket_name, prefix=prefix)
s3_objects = get_s3_bucket_objects(bucket_name=bucket_name, subfolder=prefix)
for s3_object in s3_objects:
s3_object.delete()
remove_s3_object(bucket_name, s3_object['Key'])
@statsd(namespace="dao")