Use correct access credentials for each bucket

This commit is contained in:
Ryan Ahearn
2022-09-21 15:02:43 -04:00
parent ec6c62739b
commit 8ede076708
7 changed files with 52 additions and 25 deletions

View File

@@ -28,6 +28,8 @@ env:
AWS_REGION: us-west-2
AWS_PINPOINT_REGION: us-west-2
AWS_US_TOLL_FREE_NUMBER: +18446120782
AWS_ACCESS_KEY_ID: not-a-real-key-id
AWS_SECRET_ACCESS_KEY: not-a-real-secret
jobs:
build:

View File

@@ -1,30 +1,29 @@
import os
import botocore
from boto3 import client, resource
from boto3 import Session, client
from flask import current_app
FILE_LOCATION_STRUCTURE = 'service-{}-notify/{}.csv'
default_access_key = os.environ.get('AWS_ACCESS_KEY_ID')
default_secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
def get_s3_file(bucket_name, file_location):
s3_file = get_s3_object(bucket_name, file_location)
def get_s3_file(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key):
s3_file = get_s3_object(bucket_name, file_location, access_key, secret_key)
return s3_file.get()['Body'].read().decode('utf-8')
def get_s3_object(bucket_name, file_location):
s3 = resource('s3')
def get_s3_object(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key):
session = Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key)
s3 = session.resource('s3')
return s3.Object(bucket_name, file_location)
def head_s3_object(bucket_name, file_location):
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.head_object
boto_client = client('s3', current_app.config['AWS_REGION'])
return boto_client.head_object(Bucket=bucket_name, Key=file_location)
def file_exists(bucket_name, file_location):
def file_exists(bucket_name, file_location, access_key=default_access_key, secret_key=default_secret_key):
try:
# try and access metadata of object
get_s3_object(bucket_name, file_location).metadata
get_s3_object(bucket_name, file_location, access_key, secret_key).metadata
return True
except botocore.exceptions.ClientError as e:
if e.response['ResponseMetadata']['HTTPStatusCode'] == 404:
@@ -36,6 +35,8 @@ def get_job_location(service_id, job_id):
return (
current_app.config['CSV_UPLOAD_BUCKET_NAME'],
FILE_LOCATION_STRUCTURE.format(service_id, job_id),
current_app.config['CSV_UPLOAD_ACCESS_KEY'],
current_app.config['CSV_UPLOAD_SECRET_KEY'],
)
@@ -43,6 +44,8 @@ def get_contact_list_location(service_id, contact_list_id):
return (
current_app.config['CONTACT_LIST_BUCKET_NAME'],
FILE_LOCATION_STRUCTURE.format(service_id, contact_list_id),
current_app.config['CONTACT_LIST_ACCESS_KEY'],
current_app.config['CONTACT_LIST_SECRET_KEY'],
)
@@ -74,8 +77,8 @@ def remove_s3_object(bucket_name, object_key):
return obj.delete()
def get_list_of_files_by_suffix(bucket_name, subfolder='', suffix='', last_modified=None):
s3_client = client('s3', current_app.config['AWS_REGION'])
def get_list_of_files_by_suffix(bucket_name, subfolder='', suffix='', last_modified=None, access_key=default_access_key, secret_key=default_secret_key):
s3_client = client('s3', current_app.config['AWS_REGION'], aws_access_key_id=access_key, aws_secret_access_key=secret_key)
paginator = s3_client.get_paginator('list_objects_v2')
page_iterator = paginator.paginate(

View File

@@ -18,11 +18,15 @@ def extract_cloudfoundry_config():
os.environ['REDIS_URL'] = vcap_services['aws-elasticache-redis'][0]['credentials']['uri']
# CSV Upload Bucket Name
csv_bucket_service = find_by_service_name(vcap_services['s3'], f"notifications-api-csv-upload-bucket-{os.environ['DEPLOY_ENV']}")
if csv_bucket_service:
os.environ['CSV_UPLOAD_BUCKET_NAME'] = csv_bucket_service['credentials']['bucket']
bucket_service = find_by_service_name(vcap_services['s3'], f"notifications-api-csv-upload-bucket-{os.environ['DEPLOY_ENV']}")
if bucket_service:
os.environ['CSV_UPLOAD_BUCKET_NAME'] = bucket_service['credentials']['bucket']
os.environ['CSV_UPLOAD_ACCESS_KEY'] = bucket_service['credentials']['access_key_id']
os.environ['CSV_UPLOAD_SECRET_KEY'] = bucket_service['credentials']['secret_access_key']
# Contact List Bucket Name
contact_bucket_service = find_by_service_name(vcap_services['s3'], f"notifications-api-contact-list-bucket-{os.environ['DEPLOY_ENV']}")
if contact_bucket_service:
os.environ['CONTACT_LIST_BUCKET_NAME'] = contact_bucket_service['credentials']['bucket']
bucket_service = find_by_service_name(vcap_services['s3'], f"notifications-api-contact-list-bucket-{os.environ['DEPLOY_ENV']}")
if bucket_service:
os.environ['CONTACT_LIST_BUCKET_NAME'] = bucket_service['credentials']['bucket']
os.environ['CONTACT_LIST_ACCESS_KEY'] = bucket_service['credentials']['access_key_id']
os.environ['CONTACT_LIST_SECRET_KEY'] = bucket_service['credentials']['secret_access_key']

View File

@@ -415,7 +415,11 @@ class Development(Config):
REDIS_ENABLED = os.environ.get('REDIS_ENABLED')
CSV_UPLOAD_BUCKET_NAME = 'local-notifications-csv-upload'
CSV_UPLOAD_ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
CSV_UPLOAD_SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
CONTACT_LIST_BUCKET_NAME = 'local-contact-list'
CONTACT_LIST_ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
CONTACT_LIST_SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
# TEST_LETTERS_BUCKET_NAME = 'development-test-letters'
# DVLA_RESPONSE_BUCKET_NAME = 'notify.tools-ftp'
# LETTERS_PDF_BUCKET_NAME = 'development-letters-pdf'
@@ -540,7 +544,11 @@ class Live(Config):
NOTIFY_ENVIRONMENT = 'live'
# buckets
CSV_UPLOAD_BUCKET_NAME = os.environ.get('CSV_UPLOAD_BUCKET_NAME', 'notifications-prototype-csv-upload') # created in gsa sandbox
CSV_UPLOAD_ACCESS_KEY = os.environ.get('CSV_UPLOAD_ACCESS_KEY')
CSV_UPLOAD_SECRET_KEY = os.environ.get('CSV_UPLOAD_SECRET_KEY')
CONTACT_LIST_BUCKET_NAME = os.environ.get('CONTACT_LIST_BUCKET_NAME', 'notifications-prototype-contact-list-upload') # created in gsa sandbox
CONTACT_LIST_ACCESS_KEY = os.environ.get('CONTACT_LIST_ACCESS_KEY')
CONTACT_LIST_SECRET_KEY = os.environ.get('CONTACT_LIST_SECRET_KEY')
# TODO: verify below buckets only used for letters
# TEST_LETTERS_BUCKET_NAME = 'production-test-letters' # not created in gsa sandbox
# DVLA_RESPONSE_BUCKET_NAME = 'notifications.service.gov.uk-ftp' # not created in gsa sandbox

View File

@@ -1,3 +1,4 @@
import os
from datetime import datetime, timedelta
import pytest
@@ -22,7 +23,9 @@ def test_get_s3_file_makes_correct_call(notify_api, mocker):
get_s3_mock.assert_called_with(
'foo-bucket',
'bar-file.txt'
'bar-file.txt',
os.environ['AWS_ACCESS_KEY_ID'],
os.environ['AWS_SECRET_ACCESS_KEY']
)

View File

@@ -1,3 +1,4 @@
import os
from collections import defaultdict, namedtuple
from datetime import date, datetime
@@ -90,7 +91,9 @@ def test_update_letter_notifications_statuses_calls_with_correct_bucket_location
update_letter_notifications_statuses(filename='NOTIFY-20170823160812-RSP.TXT')
s3_mock.assert_called_with('{}-ftp'.format(
current_app.config['NOTIFY_EMAIL_DOMAIN']),
'NOTIFY-20170823160812-RSP.TXT'
'NOTIFY-20170823160812-RSP.TXT',
os.environ['AWS_ACCESS_KEY_ID'],
os.environ['AWS_SECRET_ACCESS_KEY']
)

View File

@@ -23,13 +23,17 @@ def vcap_services():
{
'name': 'notifications-api-csv-upload-bucket-test',
'credentials': {
'bucket': 'csv-upload-bucket'
'access_key_id': 'csv-access',
'bucket': 'csv-upload-bucket',
'secret_access_key': 'csv-secret'
}
},
{
'name': 'notifications-api-contact-list-bucket-test',
'credentials': {
'bucket': 'contact-list-bucket'
'access_key_id': 'contact-access',
'bucket': 'contact-list-bucket',
'secret_access_key': 'contact-secret'
}
}
],