Remove default creds from s3 module

This commit is contained in:
Ryan Ahearn
2023-03-03 16:01:12 -05:00
parent cb4ab8fb16
commit 22aa7e2787
2 changed files with 11 additions and 97 deletions

View File

@@ -1,25 +1,19 @@
import os
import botocore
from boto3 import Session, client
from boto3 import Session
from flask import current_app
FILE_LOCATION_STRUCTURE = 'service-{}-notify/{}.csv'
default_access_key = os.environ.get('AWS_ACCESS_KEY_ID')
default_secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
default_region = os.environ.get('AWS_REGION')
def get_s3_file(
bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key, region=default_region
bucket_name, file_location, access_key, secret_key, region
):
s3_file = get_s3_object(bucket_name, file_location, access_key, secret_key, region)
return s3_file.get()['Body'].read().decode('utf-8')
def get_s3_object(
bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key, region=default_region
bucket_name, file_location, access_key, secret_key, region
):
session = Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=region)
s3 = session.resource('s3')
@@ -27,7 +21,7 @@ def get_s3_object(
def file_exists(
bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key, region=default_region
bucket_name, file_location, access_key, secret_key, region
):
try:
# try and access metadata of object
@@ -85,28 +79,3 @@ def remove_contact_list_from_s3(service_id, contact_list_id):
def remove_s3_object(bucket_name, object_key, access_key, secret_key, region):
obj = get_s3_object(bucket_name, object_key, access_key, secret_key, region)
return obj.delete()
def get_list_of_files_by_suffix(
bucket_name,
subfolder='',
suffix='',
last_modified=None,
access_key=default_access_key,
secret_key=default_secret_key,
region=default_region
):
s3_client = client('s3', region, aws_access_key_id=access_key, aws_secret_access_key=secret_key)
paginator = s3_client.get_paginator('list_objects_v2')
page_iterator = paginator.paginate(
Bucket=bucket_name,
Prefix=subfolder
)
for page in page_iterator:
for obj in page.get('Contents', []):
key = obj['Key']
if key.lower().endswith(suffix.lower()):
if not last_modified or obj['LastModified'] >= last_modified:
yield key