Proactively specify aws region for s3 operations

This commit is contained in:
Ryan Ahearn
2022-09-26 10:56:59 -04:00
parent df5e8617e5
commit 538d2cbe4c
6 changed files with 41 additions and 15 deletions

View File

@@ -8,22 +8,23 @@ FILE_LOCATION_STRUCTURE = 'service-{}-notify/{}.csv'
default_access_key = os.environ.get('AWS_ACCESS_KEY_ID')
default_secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
default_region = os.environ.get('AWS_REGION')
def get_s3_file(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key):
s3_file = get_s3_object(bucket_name, file_location, access_key, secret_key)
def get_s3_file(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key, region=default_region):
s3_file = get_s3_object(bucket_name, file_location, access_key, secret_key, region)
return s3_file.get()['Body'].read().decode('utf-8')
def get_s3_object(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key):
session = Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key)
def get_s3_object(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key, region=default_region):
session = Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=region)
s3 = session.resource('s3')
return s3.Object(bucket_name, file_location)
def file_exists(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key):
def file_exists(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key, region=default_region):
try:
# try and access metadata of object
get_s3_object(bucket_name, file_location, access_key, secret_key).metadata
get_s3_object(bucket_name, file_location, access_key, secret_key, region).metadata
return True
except botocore.exceptions.ClientError as e:
if e.response['ResponseMetadata']['HTTPStatusCode'] == 404:
@@ -37,6 +38,7 @@ def get_job_location(service_id, job_id):
FILE_LOCATION_STRUCTURE.format(service_id, job_id),
current_app.config['CSV_UPLOAD_ACCESS_KEY'],
current_app.config['CSV_UPLOAD_SECRET_KEY'],
current_app.config['CSV_UPLOAD_REGION'],
)
@@ -46,6 +48,7 @@ def get_contact_list_location(service_id, contact_list_id):
FILE_LOCATION_STRUCTURE.format(service_id, contact_list_id),
current_app.config['CONTACT_LIST_ACCESS_KEY'],
current_app.config['CONTACT_LIST_SECRET_KEY'],
current_app.config['CONTACT_LIST_REGION'],
)
@@ -72,13 +75,21 @@ def remove_contact_list_from_s3(service_id, contact_list_id):
return remove_s3_object(*get_contact_list_location(service_id, contact_list_id))
def remove_s3_object(bucket_name, object_key):
obj = get_s3_object(bucket_name, object_key)
def remove_s3_object(bucket_name, object_key, access_key, secret_key, region):
obj = get_s3_object(bucket_name, object_key, access_key, secret_key, region)
return obj.delete()
def get_list_of_files_by_suffix(bucket_name, subfolder='', suffix='', last_modified=None, access_key=default_access_key, secret_key=default_secret_key):
s3_client = client('s3', current_app.config['AWS_REGION'], aws_access_key_id=access_key, aws_secret_access_key=secret_key)
def get_list_of_files_by_suffix(
bucket_name,
subfolder='',
suffix='',
last_modified=None,
access_key=default_access_key,
secret_key=default_secret_key,
region=default_region
):
s3_client = client('s3', region, aws_access_key_id=access_key, aws_secret_access_key=secret_key)
paginator = s3_client.get_paginator('list_objects_v2')
page_iterator = paginator.paginate(

View File

@@ -23,6 +23,7 @@ def extract_cloudfoundry_config():
os.environ['CSV_UPLOAD_BUCKET_NAME'] = bucket_service['credentials']['bucket']
os.environ['CSV_UPLOAD_ACCESS_KEY'] = bucket_service['credentials']['access_key_id']
os.environ['CSV_UPLOAD_SECRET_KEY'] = bucket_service['credentials']['secret_access_key']
os.environ['CSV_UPLOAD_REGION'] = bucket_service['credentials']['region']
# Contact List Bucket Name
bucket_service = find_by_service_name(vcap_services['s3'], f"notifications-api-contact-list-bucket-{os.environ['DEPLOY_ENV']}")
@@ -30,3 +31,4 @@ def extract_cloudfoundry_config():
os.environ['CONTACT_LIST_BUCKET_NAME'] = bucket_service['credentials']['bucket']
os.environ['CONTACT_LIST_ACCESS_KEY'] = bucket_service['credentials']['access_key_id']
os.environ['CONTACT_LIST_SECRET_KEY'] = bucket_service['credentials']['secret_access_key']
os.environ['CONTACT_LIST_REGION'] = bucket_service['credentials']['region']

View File

@@ -417,9 +417,11 @@ class Development(Config):
CSV_UPLOAD_BUCKET_NAME = 'local-notifications-csv-upload'
CSV_UPLOAD_ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
CSV_UPLOAD_SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
CSV_UPLOAD_REGION = os.environ.get('AWS_REGION', 'us-west-2')
CONTACT_LIST_BUCKET_NAME = 'local-contact-list'
CONTACT_LIST_ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
CONTACT_LIST_SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
CONTACT_LIST_REGION = os.environ.get('AWS_REGION', 'us-west-2')
# TEST_LETTERS_BUCKET_NAME = 'development-test-letters'
# DVLA_RESPONSE_BUCKET_NAME = 'notify.tools-ftp'
# LETTERS_PDF_BUCKET_NAME = 'development-letters-pdf'
@@ -546,9 +548,11 @@ class Live(Config):
CSV_UPLOAD_BUCKET_NAME = os.environ.get('CSV_UPLOAD_BUCKET_NAME', 'notifications-prototype-csv-upload') # created in gsa sandbox
CSV_UPLOAD_ACCESS_KEY = os.environ.get('CSV_UPLOAD_ACCESS_KEY')
CSV_UPLOAD_SECRET_KEY = os.environ.get('CSV_UPLOAD_SECRET_KEY')
CSV_UPLOAD_REGION = os.environ.get('CSV_UPLOAD_REGION')
CONTACT_LIST_BUCKET_NAME = os.environ.get('CONTACT_LIST_BUCKET_NAME', 'notifications-prototype-contact-list-upload') # created in gsa sandbox
CONTACT_LIST_ACCESS_KEY = os.environ.get('CONTACT_LIST_ACCESS_KEY')
CONTACT_LIST_SECRET_KEY = os.environ.get('CONTACT_LIST_SECRET_KEY')
CONTACT_LIST_REGION = os.environ.get('CONTACT_LIST_REGION')
# TODO: verify below buckets only used for letters
# TEST_LETTERS_BUCKET_NAME = 'production-test-letters' # not created in gsa sandbox
# DVLA_RESPONSE_BUCKET_NAME = 'notifications.service.gov.uk-ftp' # not created in gsa sandbox

View File

@@ -1,11 +1,16 @@
import os
from datetime import datetime, timedelta
import pytest
import pytz
from freezegun import freeze_time
from app.aws.s3 import get_list_of_files_by_suffix, get_s3_file
from app.aws.s3 import (
default_access_key,
default_region,
default_secret_key,
get_list_of_files_by_suffix,
get_s3_file,
)
from tests.app.conftest import datetime_in_past
@@ -24,8 +29,9 @@ def test_get_s3_file_makes_correct_call(notify_api, mocker):
get_s3_mock.assert_called_with(
'foo-bucket',
'bar-file.txt',
os.environ['AWS_ACCESS_KEY_ID'],
os.environ['AWS_SECRET_ACCESS_KEY']
default_access_key,
default_secret_key,
default_region,
)

View File

@@ -93,7 +93,8 @@ def test_update_letter_notifications_statuses_calls_with_correct_bucket_location
current_app.config['NOTIFY_EMAIL_DOMAIN']),
'NOTIFY-20170823160812-RSP.TXT',
os.environ['AWS_ACCESS_KEY_ID'],
os.environ['AWS_SECRET_ACCESS_KEY']
os.environ['AWS_SECRET_ACCESS_KEY'],
os.environ['AWS_REGION'],
)

View File

@@ -25,6 +25,7 @@ def vcap_services():
'credentials': {
'access_key_id': 'csv-access',
'bucket': 'csv-upload-bucket',
'region': 'us-gov-west-1',
'secret_access_key': 'csv-secret'
}
},
@@ -33,6 +34,7 @@ def vcap_services():
'credentials': {
'access_key_id': 'contact-access',
'bucket': 'contact-list-bucket',
'region': 'us-gov-west-1',
'secret_access_key': 'contact-secret'
}
}