use singletons for s3 client

This commit is contained in:
Kenneth Kehl
2025-01-09 07:47:47 -08:00
parent 31198824cd
commit 5cedd6427d
3 changed files with 42 additions and 12 deletions

View File

@@ -10,6 +10,7 @@ from boto3 import Session
from flask import current_app from flask import current_app
from app.clients import AWS_CLIENT_CONFIG from app.clients import AWS_CLIENT_CONFIG
from app.utils import hilite
from notifications_utils import aware_utcnow from notifications_utils import aware_utcnow
FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv" FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv"
@@ -65,6 +66,7 @@ def clean_cache():
def get_s3_client(): def get_s3_client():
global s3_client global s3_client
if s3_client is None: if s3_client is None:
# print(hilite("S3 CLIENT IS NONE, CREATING IT!"))
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"] access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"] secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"] region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
@@ -74,12 +76,15 @@ def get_s3_client():
region_name=region, region_name=region,
) )
s3_client = session.client("s3") s3_client = session.client("s3")
# else:
# print(hilite("S3 CLIENT ALREADY EXISTS, REUSING IT!"))
return s3_client return s3_client
def get_s3_resource(): def get_s3_resource():
global s3_resource global s3_resource
if s3_resource is None: if s3_resource is None:
print(hilite("S3 RESOURCE IS NONE, CREATING IT!"))
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"] access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"] secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"] region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
@@ -89,6 +94,8 @@ def get_s3_resource():
region_name=region, region_name=region,
) )
s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG) s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
else:
print(hilite("S3 RESOURCE ALREADY EXSITS, REUSING IT!"))
return s3_resource return s3_resource

View File

@@ -2,10 +2,12 @@ import json
from datetime import datetime, timedelta from datetime import datetime, timedelta
from os import getenv, path from os import getenv, path
from boto3 import Session
from celery.schedules import crontab from celery.schedules import crontab
from kombu import Exchange, Queue from kombu import Exchange, Queue
import notifications_utils import notifications_utils
from app.clients import AWS_CLIENT_CONFIG
from app.cloudfoundry_config import cloud_config from app.cloudfoundry_config import cloud_config
@@ -51,6 +53,13 @@ class TaskNames(object):
SCAN_FILE = "scan-file" SCAN_FILE = "scan-file"
session = Session(
aws_access_key_id=getenv("CSV_AWS_ACCESS_KEY_ID"),
aws_secret_access_key=getenv("CSV_AWS_SECRET_ACCESS_KEY"),
region_name=getenv("CSV_AWS_REGION"),
)
class Config(object): class Config(object):
NOTIFY_APP_NAME = "api" NOTIFY_APP_NAME = "api"
DEFAULT_REDIS_EXPIRE_TIME = 4 * 24 * 60 * 60 DEFAULT_REDIS_EXPIRE_TIME = 4 * 24 * 60 * 60
@@ -166,6 +175,9 @@ class Config(object):
current_minute = (datetime.now().minute + 1) % 60 current_minute = (datetime.now().minute + 1) % 60
S3_CLIENT = session.client("s3")
S3_RESOURCE = session.resource("s3", config=AWS_CLIENT_CONFIG)
CELERY = { CELERY = {
"worker_max_tasks_per_child": 500, "worker_max_tasks_per_child": 500,
"task_ignore_result": True, "task_ignore_result": True,

View File

@@ -16,11 +16,32 @@ AWS_CLIENT_CONFIG = Config(
use_fips_endpoint=True, use_fips_endpoint=True,
) )
# Global variable
s3_resource = None
default_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID") default_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID")
default_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY") default_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY")
default_region = os.environ.get("AWS_REGION") default_region = os.environ.get("AWS_REGION")
def get_s3_resource():
global s3_resource
if s3_resource is None:
# print(hilite("S3 RESOURCE IS NONE, CREATING IT!"))
access_key = (default_access_key_id,)
secret_key = (default_secret_access_key,)
region = (default_region,)
session = Session(
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
region_name=region,
)
s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG)
# else:
# print(hilite("S3 RESOURCE ALREADY EXSITS, REUSING IT!"))
return s3_resource
def s3upload( def s3upload(
filedata, filedata,
region, region,
@@ -32,12 +53,7 @@ def s3upload(
access_key=default_access_key_id, access_key=default_access_key_id,
secret_key=default_secret_access_key, secret_key=default_secret_access_key,
): ):
session = Session( _s3 = get_s3_resource()
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
region_name=region,
)
_s3 = session.resource("s3", config=AWS_CLIENT_CONFIG)
key = _s3.Object(bucket_name, file_location) key = _s3.Object(bucket_name, file_location)
@@ -73,12 +89,7 @@ def s3download(
secret_key=default_secret_access_key, secret_key=default_secret_access_key,
): ):
try: try:
session = Session( s3 = get_s3_resource()
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
region_name=region,
)
s3 = session.resource("s3", config=AWS_CLIENT_CONFIG)
key = s3.Object(bucket_name, filename) key = s3.Object(bucket_name, filename)
return key.get()["Body"] return key.get()["Body"]
except botocore.exceptions.ClientError as error: except botocore.exceptions.ClientError as error: