This commit is contained in:
Kenneth Kehl
2025-01-09 11:28:24 -08:00
parent fbd8643e74
commit 16bba7e4c4
3 changed files with 40 additions and 19 deletions

View File

@@ -10,7 +10,6 @@ from boto3 import Session
from flask import current_app
from app.clients import AWS_CLIENT_CONFIG
from app.utils import hilite
from notifications_utils import aware_utcnow
FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv"
@@ -84,7 +83,6 @@ def get_s3_client():
def get_s3_resource():
global s3_resource
if s3_resource is None:
print(hilite("S3 RESOURCE IS NONE, CREATING IT!"))
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
@@ -94,8 +92,6 @@ def get_s3_resource():
region_name=region,
)
s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
else:
print(hilite("S3 RESOURCE ALREADY EXSITS, REUSING IT!"))
return s3_resource

View File

@@ -13,11 +13,12 @@ AWS_CLIENT_CONFIG = Config(
s3={
"addressing_style": "virtual",
},
max_pool_connections=50,
use_fips_endpoint=True,
)
# Global variable
s3_resource = None
noti_s3_resource = None
default_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID")
default_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY")
@@ -25,21 +26,15 @@ default_region = os.environ.get("AWS_REGION")
def get_s3_resource():
global s3_resource
if s3_resource is None:
# print(hilite("S3 RESOURCE IS NONE, CREATING IT!"))
access_key = (default_access_key_id,)
secret_key = (default_secret_access_key,)
region = (default_region,)
global noti_s3_resource
if noti_s3_resource is None:
session = Session(
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
region_name=region,
aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID"),
aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY"),
region_name=os.environ.get("AWS_REGION"),
)
s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
# else:
# print(hilite("S3 RESOURCE ALREADY EXSITS, REUSING IT!"))
return s3_resource
noti_s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
return noti_s3_resource
def s3upload(

View File

@@ -1,9 +1,16 @@
from unittest.mock import MagicMock
from urllib.parse import parse_qs
import botocore
import pytest
from notifications_utils.s3 import S3ObjectNotFound, s3download, s3upload
from notifications_utils.s3 import (
AWS_CLIENT_CONFIG,
S3ObjectNotFound,
get_s3_resource,
s3download,
s3upload,
)
contents = "some file data"
region = "eu-west-1"
@@ -110,6 +117,29 @@ def test_s3upload_save_file_to_bucket_with_metadata(mocker):
assert metadata == {"status": "valid", "pages": "5"}
def test_get_s3_resource(mocker):
mock_session = mocker.patch("notifications_utils.s3.Session")
mock_current_app = mocker.patch("notifications_utils.s3.current_app")
sa_key = "sec"
sa_key = f"{sa_key}ret_access_key"
mock_current_app.config = {
"CSV_UPLOAD_BUCKET": {
"access_key_id": "test_access_key",
sa_key: "test_s_key",
"region": "us-west-100",
}
}
mock_s3_resource = MagicMock()
mock_session.return_value.resource.return_value = mock_s3_resource
result = get_s3_resource()
mock_session.return_value.resource.assert_called_once_with(
"s3", config=AWS_CLIENT_CONFIG
)
assert result == mock_s3_resource
def test_s3download_gets_file(mocker):
mock_s3_resource = mocker.Mock()