mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-22 16:31:15 -05:00
cleanup
This commit is contained in:
@@ -10,7 +10,6 @@ from boto3 import Session
|
|||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
from app.clients import AWS_CLIENT_CONFIG
|
from app.clients import AWS_CLIENT_CONFIG
|
||||||
from app.utils import hilite
|
|
||||||
from notifications_utils import aware_utcnow
|
from notifications_utils import aware_utcnow
|
||||||
|
|
||||||
FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv"
|
FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv"
|
||||||
@@ -84,7 +83,6 @@ def get_s3_client():
|
|||||||
def get_s3_resource():
|
def get_s3_resource():
|
||||||
global s3_resource
|
global s3_resource
|
||||||
if s3_resource is None:
|
if s3_resource is None:
|
||||||
print(hilite("S3 RESOURCE IS NONE, CREATING IT!"))
|
|
||||||
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
|
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
|
||||||
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
|
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
|
||||||
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
|
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
|
||||||
@@ -94,8 +92,6 @@ def get_s3_resource():
|
|||||||
region_name=region,
|
region_name=region,
|
||||||
)
|
)
|
||||||
s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
|
s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
|
||||||
else:
|
|
||||||
print(hilite("S3 RESOURCE ALREADY EXSITS, REUSING IT!"))
|
|
||||||
return s3_resource
|
return s3_resource
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -13,11 +13,12 @@ AWS_CLIENT_CONFIG = Config(
|
|||||||
s3={
|
s3={
|
||||||
"addressing_style": "virtual",
|
"addressing_style": "virtual",
|
||||||
},
|
},
|
||||||
|
max_pool_connections=50,
|
||||||
use_fips_endpoint=True,
|
use_fips_endpoint=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Global variable
|
# Global variable
|
||||||
s3_resource = None
|
noti_s3_resource = None
|
||||||
|
|
||||||
default_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID")
|
default_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID")
|
||||||
default_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
default_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
||||||
@@ -25,21 +26,15 @@ default_region = os.environ.get("AWS_REGION")
|
|||||||
|
|
||||||
|
|
||||||
def get_s3_resource():
|
def get_s3_resource():
|
||||||
global s3_resource
|
global noti_s3_resource
|
||||||
if s3_resource is None:
|
if noti_s3_resource is None:
|
||||||
# print(hilite("S3 RESOURCE IS NONE, CREATING IT!"))
|
|
||||||
access_key = (default_access_key_id,)
|
|
||||||
secret_key = (default_secret_access_key,)
|
|
||||||
region = (default_region,)
|
|
||||||
session = Session(
|
session = Session(
|
||||||
aws_access_key_id=access_key,
|
aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID"),
|
||||||
aws_secret_access_key=secret_key,
|
aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY"),
|
||||||
region_name=region,
|
region_name=os.environ.get("AWS_REGION"),
|
||||||
)
|
)
|
||||||
s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
|
noti_s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
|
||||||
# else:
|
return noti_s3_resource
|
||||||
# print(hilite("S3 RESOURCE ALREADY EXSITS, REUSING IT!"))
|
|
||||||
return s3_resource
|
|
||||||
|
|
||||||
|
|
||||||
def s3upload(
|
def s3upload(
|
||||||
|
|||||||
@@ -1,9 +1,16 @@
|
|||||||
|
from unittest.mock import MagicMock
|
||||||
from urllib.parse import parse_qs
|
from urllib.parse import parse_qs
|
||||||
|
|
||||||
import botocore
|
import botocore
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from notifications_utils.s3 import S3ObjectNotFound, s3download, s3upload
|
from notifications_utils.s3 import (
|
||||||
|
AWS_CLIENT_CONFIG,
|
||||||
|
S3ObjectNotFound,
|
||||||
|
get_s3_resource,
|
||||||
|
s3download,
|
||||||
|
s3upload,
|
||||||
|
)
|
||||||
|
|
||||||
contents = "some file data"
|
contents = "some file data"
|
||||||
region = "eu-west-1"
|
region = "eu-west-1"
|
||||||
@@ -110,6 +117,29 @@ def test_s3upload_save_file_to_bucket_with_metadata(mocker):
|
|||||||
assert metadata == {"status": "valid", "pages": "5"}
|
assert metadata == {"status": "valid", "pages": "5"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_s3_resource(mocker):
|
||||||
|
mock_session = mocker.patch("notifications_utils.s3.Session")
|
||||||
|
mock_current_app = mocker.patch("notifications_utils.s3.current_app")
|
||||||
|
sa_key = "sec"
|
||||||
|
sa_key = f"{sa_key}ret_access_key"
|
||||||
|
|
||||||
|
mock_current_app.config = {
|
||||||
|
"CSV_UPLOAD_BUCKET": {
|
||||||
|
"access_key_id": "test_access_key",
|
||||||
|
sa_key: "test_s_key",
|
||||||
|
"region": "us-west-100",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mock_s3_resource = MagicMock()
|
||||||
|
mock_session.return_value.resource.return_value = mock_s3_resource
|
||||||
|
result = get_s3_resource()
|
||||||
|
|
||||||
|
mock_session.return_value.resource.assert_called_once_with(
|
||||||
|
"s3", config=AWS_CLIENT_CONFIG
|
||||||
|
)
|
||||||
|
assert result == mock_s3_resource
|
||||||
|
|
||||||
|
|
||||||
def test_s3download_gets_file(mocker):
|
def test_s3download_gets_file(mocker):
|
||||||
|
|
||||||
mock_s3_resource = mocker.Mock()
|
mock_s3_resource = mocker.Mock()
|
||||||
|
|||||||
Reference in New Issue
Block a user