mirror of
https://github.com/GSA/notifications-admin.git
synced 2026-02-06 03:13:42 -05:00
Merge pull request #420 from GSA/dev-infra-setup
Add scripts for provisioning admin development credentials
This commit is contained in:
1
.github/workflows/checks.yml
vendored
1
.github/workflows/checks.yml
vendored
@@ -13,7 +13,6 @@ env:
|
||||
WERKZEUG_DEBUG_PIN: off
|
||||
REDIS_ENABLED: 0
|
||||
NODE_VERSION: 16.15.1
|
||||
AWS_REGION: us-west-2
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
1
.github/workflows/daily_checks.yml
vendored
1
.github/workflows/daily_checks.yml
vendored
@@ -17,7 +17,6 @@ env:
|
||||
WERKZEUG_DEBUG_PIN: off
|
||||
REDIS_ENABLED: 0
|
||||
NODE_VERSION: 16.15.1
|
||||
AWS_REGION: us-west-2
|
||||
|
||||
jobs:
|
||||
dependency-audits:
|
||||
|
||||
32
README.md
32
README.md
@@ -13,7 +13,26 @@ The [Notify API](https://github.com/GSA/notifications-api) provides the UI's bac
|
||||
|
||||
## Local setup
|
||||
|
||||
If you are using VS Code, there are also instructions for [running inside Docker](./docs/docker-remote-containers.md)
|
||||
### Common steps
|
||||
|
||||
1. Install pre-requisites for setup:
|
||||
* [jq](https://stedolan.github.io/jq/): `brew install jq`
|
||||
* [terraform](https://www.terraform.io/): `brew install terraform` or `brew install tfenv` and use `tfenv` to install `terraform ~> 1.4.0`
|
||||
* [cf-cli@8](https://docs.cloudfoundry.org/cf-cli/install-go-cli.html): `brew install cloudfoundry/tap/cf-cli@8`
|
||||
1. [Log into cloud.gov](https://cloud.gov/docs/getting-started/setup/#set-up-the-command-line): `cf login -a api.fr.cloud.gov --sso`
|
||||
1. Ensure you have access to the `notify-local-dev` and `notify-staging` spaces in cloud.gov
|
||||
1. Run the API setup steps
|
||||
1. Run the development terraform with:
|
||||
|
||||
```
|
||||
$ cd terraform/development
|
||||
$ ./run.sh
|
||||
```
|
||||
|
||||
1. If you want to send data to New Relic from your local develpment environment, set `NEW_RELIC_LICENSE_KEY` within `.env`
|
||||
1. Follow the instructions for either `Direct installation` or `Docker installation` below
|
||||
|
||||
### Direct installation
|
||||
|
||||
1. Get the API running
|
||||
|
||||
@@ -23,19 +42,16 @@ If you are using VS Code, there are also instructions for [running inside Docker
|
||||
|
||||
`make bootstrap`
|
||||
|
||||
1. Create the .env file
|
||||
|
||||
```
|
||||
cp sample.env .env
|
||||
# follow the instructions in .env
|
||||
```
|
||||
|
||||
1. Run the Flask server
|
||||
|
||||
`make run-flask`
|
||||
|
||||
1. Go to http://localhost:6012
|
||||
|
||||
### Docker installation
|
||||
|
||||
If you are using VS Code, there are also instructions for [running inside Docker](./docs/docker-remote-containers.md)
|
||||
|
||||
## To test the application
|
||||
From a terminal within the running devcontainer:
|
||||
|
||||
|
||||
@@ -74,12 +74,12 @@ class Config(object):
|
||||
}
|
||||
|
||||
|
||||
def _default_s3_credentials(bucket_name):
|
||||
def _s3_credentials_from_env(bucket_prefix):
|
||||
return {
|
||||
'bucket': bucket_name,
|
||||
'access_key_id': getenv('AWS_ACCESS_KEY_ID'),
|
||||
'secret_access_key': getenv('AWS_SECRET_ACCESS_KEY'),
|
||||
'region': getenv('AWS_REGION')
|
||||
'bucket': getenv(f"{bucket_prefix}_BUCKET_NAME", f"{bucket_prefix}-test-bucket-name"),
|
||||
'access_key_id': getenv(f"{bucket_prefix}_AWS_ACCESS_KEY_ID"),
|
||||
'secret_access_key': getenv(f"{bucket_prefix}_AWS_SECRET_ACCESS_KEY"),
|
||||
'region': getenv(f"{bucket_prefix}_AWS_REGION")
|
||||
}
|
||||
|
||||
|
||||
@@ -93,9 +93,9 @@ class Development(Config):
|
||||
ASSET_PATH = '/static/'
|
||||
|
||||
# Buckets
|
||||
CSV_UPLOAD_BUCKET = _default_s3_credentials('local-notifications-csv-upload')
|
||||
CONTACT_LIST_BUCKET = _default_s3_credentials('local-contact-list')
|
||||
LOGO_UPLOAD_BUCKET = _default_s3_credentials('local-public-logos-tools')
|
||||
CSV_UPLOAD_BUCKET = _s3_credentials_from_env('CSV')
|
||||
CONTACT_LIST_BUCKET = _s3_credentials_from_env('CONTACT')
|
||||
LOGO_UPLOAD_BUCKET = _s3_credentials_from_env('LOGO')
|
||||
|
||||
# credential overrides
|
||||
DANGEROUS_SALT = 'development-notify-salt'
|
||||
@@ -115,11 +115,6 @@ class Test(Development):
|
||||
REDIS_URL = 'redis://you-forgot-to-mock-a-redis-call-to'
|
||||
LOGO_CDN_DOMAIN = 'static-logos.test.com'
|
||||
|
||||
# Buckets
|
||||
CSV_UPLOAD_BUCKET = _default_s3_credentials('test-csv-upload')
|
||||
CONTACT_LIST_BUCKET = _default_s3_credentials('test-contact-list')
|
||||
LOGO_UPLOAD_BUCKET = _default_s3_credentials('test-logo-upload')
|
||||
|
||||
|
||||
class Production(Config):
|
||||
HEADER_COLOUR = '#005EA5' # $govuk-blue
|
||||
|
||||
@@ -4,12 +4,7 @@ If you're working in VS Code, you can also leverage Docker for a containerized d
|
||||
|
||||
1. Get the API running, including the Docker network
|
||||
|
||||
1. Create the .env file
|
||||
|
||||
```
|
||||
cp sample.env .env
|
||||
# follow the instructions in .env
|
||||
```
|
||||
1. Uncomment the `Local Docker setup` lines in `.env` and comment out the `Local direct setup` lines.
|
||||
|
||||
1. Install the Remote-Containers plug-in in VS Code
|
||||
|
||||
@@ -23,4 +18,4 @@ If you're working in VS Code, you can also leverage Docker for a containerized d
|
||||
|
||||
1. Go to http://localhost:6012
|
||||
|
||||
NOTE: when you change .env in the future, you'll need to rebuild the devcontainer for the change to take effect. VS Code _should_ detect the change and prompt you with a toast notification during a cached build. If not, you can find a manual rebuild in command pallette or just `docker rm` the notifications-api container.
|
||||
NOTE: when you change .env in the future, you'll need to rebuild the devcontainer for the change to take effect. VS Code _should_ detect the change and prompt you with a toast notification during a cached build. If not, you can find a manual rebuild in command pallette or just `docker rm` the notifications-api container.
|
||||
|
||||
19
sample.env
19
sample.env
@@ -1,26 +1,9 @@
|
||||
# STEPS TO SET UP
|
||||
#
|
||||
# 1. Pull down AWS creds from cloud.gov using `cf env`, then update AWS section
|
||||
#
|
||||
# 2. If trying to send data to New Relic in development (monitor_mode: true),
|
||||
# pull down NEW_RELIC_LICENSE_KEY from cloud.gov using `cf env`, then update New Relic section
|
||||
#
|
||||
# 3. Uncomment either the Docker setup or the direct setup
|
||||
#
|
||||
# 4. Comment out the other setup
|
||||
#
|
||||
# See README.md for local setup instructions
|
||||
|
||||
# ## REBUILD THE DEVCONTAINER WHEN YOU MODIFY .ENV ###
|
||||
|
||||
#############################################################
|
||||
|
||||
# AWS
|
||||
AWS_REGION=us-west-2
|
||||
AWS_ACCESS_KEY_ID="don't write secrets to the sample file"
|
||||
AWS_SECRET_ACCESS_KEY="don't write secrets to the sample file"
|
||||
|
||||
#############################################################
|
||||
|
||||
# Application
|
||||
NOTIFY_ENVIRONMENT=development
|
||||
FLASK_APP=application.py
|
||||
|
||||
@@ -19,14 +19,15 @@ Options:
|
||||
|
||||
Notes:
|
||||
* OrgManager is required for terraform to create <env>-egress spaces
|
||||
* Requires cf-cli@8
|
||||
* Requires cf-cli@8 & jq
|
||||
"
|
||||
|
||||
cf_version=`cf --version | cut -d " " -f 3`
|
||||
if [[ $cf_version != 8.* ]]; then
|
||||
echo "$usage"
|
||||
echo "$usage" >&2
|
||||
exit 1
|
||||
fi
|
||||
command -v jq >/dev/null || { echo "$usage" >&2; exit 1; }
|
||||
|
||||
set -e
|
||||
set -o pipefail
|
||||
@@ -61,17 +62,17 @@ while getopts ":hms:u:r:o:" opt; do
|
||||
done
|
||||
|
||||
if [[ $space = "" || $service = "" ]]; then
|
||||
echo "$usage"
|
||||
echo "$usage" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cf target -o $org -s $space 1>&2
|
||||
cf target -o $org -s $space >&2
|
||||
|
||||
# create user account service
|
||||
cf create-service cloud-gov-service-account $role $service 1>&2
|
||||
cf create-service cloud-gov-service-account $role $service >&2
|
||||
|
||||
# create service key
|
||||
cf create-service-key $service service-account-key 1>&2
|
||||
cf create-service-key $service service-account-key >&2
|
||||
|
||||
# output service key to stdout in secrets.auto.tfvars format
|
||||
creds=`cf service-key $service service-account-key | tail -n +2 | jq '.credentials'`
|
||||
@@ -79,7 +80,7 @@ username=`echo $creds | jq -r '.username'`
|
||||
password=`echo $creds | jq -r '.password'`
|
||||
|
||||
if [[ $org_manager = "true" ]]; then
|
||||
cf set-org-role $username $org OrgManager 1>&2
|
||||
cf set-org-role $username $org OrgManager >&2
|
||||
fi
|
||||
|
||||
cat << EOF
|
||||
|
||||
74
terraform/development/main.tf
Normal file
74
terraform/development/main.tf
Normal file
@@ -0,0 +1,74 @@
|
||||
locals {
|
||||
cf_org_name = "gsa-tts-benefits-studio-prototyping"
|
||||
cf_space_name = "notify-local-dev"
|
||||
recursive_delete = true
|
||||
key_name = "${var.username}-admin-dev-key"
|
||||
}
|
||||
|
||||
data "cloudfoundry_space" "dev" {
|
||||
org_name = local.cf_org_name
|
||||
name = local.cf_space_name
|
||||
}
|
||||
|
||||
module "logo_upload_bucket" {
|
||||
source = "github.com/18f/terraform-cloudgov//s3?ref=v0.2.0"
|
||||
|
||||
cf_org_name = local.cf_org_name
|
||||
cf_space_name = local.cf_space_name
|
||||
recursive_delete = local.recursive_delete
|
||||
name = "${var.username}-logo-upload-bucket"
|
||||
}
|
||||
resource "cloudfoundry_service_key" "logo_key" {
|
||||
name = local.key_name
|
||||
service_instance = module.logo_upload_bucket.bucket_id
|
||||
}
|
||||
|
||||
data "cloudfoundry_service_instance" "csv_bucket" {
|
||||
name_or_id = "${var.username}-csv-upload-bucket"
|
||||
space = data.cloudfoundry_space.dev.id
|
||||
}
|
||||
resource "cloudfoundry_service_key" "csv_key" {
|
||||
name = local.key_name
|
||||
service_instance = data.cloudfoundry_service_instance.csv_bucket.id
|
||||
}
|
||||
|
||||
data "cloudfoundry_service_instance" "contact_list_bucket" {
|
||||
name_or_id = "${var.username}-contact-list-bucket"
|
||||
space = data.cloudfoundry_space.dev.id
|
||||
}
|
||||
resource "cloudfoundry_service_key" "contact_list_key" {
|
||||
name = local.key_name
|
||||
service_instance = data.cloudfoundry_service_instance.contact_list_bucket.id
|
||||
}
|
||||
|
||||
locals {
|
||||
credentials = <<EOM
|
||||
|
||||
#############################################################
|
||||
# CSV_UPLOAD_BUCKET
|
||||
CSV_BUCKET_NAME=${cloudfoundry_service_key.csv_key.credentials.bucket}
|
||||
CSV_AWS_ACCESS_KEY_ID=${cloudfoundry_service_key.csv_key.credentials.access_key_id}
|
||||
CSV_AWS_SECRET_ACCESS_KEY=${cloudfoundry_service_key.csv_key.credentials.secret_access_key}
|
||||
CSV_AWS_REGION=${cloudfoundry_service_key.csv_key.credentials.region}
|
||||
# CONTACT_LIST_BUCKET
|
||||
CONTACT_BUCKET_NAME=${cloudfoundry_service_key.contact_list_key.credentials.bucket}
|
||||
CONTACT_AWS_ACCESS_KEY_ID=${cloudfoundry_service_key.contact_list_key.credentials.access_key_id}
|
||||
CONTACT_AWS_SECRET_ACCESS_KEY=${cloudfoundry_service_key.contact_list_key.credentials.secret_access_key}
|
||||
CONTACT_AWS_REGION=${cloudfoundry_service_key.contact_list_key.credentials.region}
|
||||
# LOGO_UPLOAD_BUCKET
|
||||
LOGO_BUCKET_NAME=${cloudfoundry_service_key.logo_key.credentials.bucket}
|
||||
LOGO_AWS_ACCESS_KEY_ID=${cloudfoundry_service_key.logo_key.credentials.access_key_id}
|
||||
LOGO_AWS_SECRET_ACCESS_KEY=${cloudfoundry_service_key.logo_key.credentials.secret_access_key}
|
||||
LOGO_AWS_REGION=${cloudfoundry_service_key.logo_key.credentials.region}
|
||||
EOM
|
||||
}
|
||||
|
||||
resource "null_resource" "output_creds_to_env" {
|
||||
triggers = {
|
||||
always_run = timestamp()
|
||||
}
|
||||
provisioner "local-exec" {
|
||||
working_dir = "../.."
|
||||
command = "echo \"${local.credentials}\" >> .env"
|
||||
}
|
||||
}
|
||||
16
terraform/development/providers.tf
Normal file
16
terraform/development/providers.tf
Normal file
@@ -0,0 +1,16 @@
|
||||
terraform {
|
||||
required_version = "~> 1.0"
|
||||
required_providers {
|
||||
cloudfoundry = {
|
||||
source = "cloudfoundry-community/cloudfoundry"
|
||||
version = "0.50.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "cloudfoundry" {
|
||||
api_url = "https://api.fr.cloud.gov"
|
||||
user = var.cf_user
|
||||
password = var.cf_password
|
||||
app_logs_max = 30
|
||||
}
|
||||
73
terraform/development/run.sh
Executable file
73
terraform/development/run.sh
Executable file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
username=`whoami`
|
||||
org="gsa-tts-benefits-studio-prototyping"
|
||||
|
||||
usage="
|
||||
$0: Create development infrastructure
|
||||
|
||||
Usage:
|
||||
$0 -h
|
||||
$0 [-u <USER NAME>] [-k]
|
||||
|
||||
Options:
|
||||
-h: show help and exit
|
||||
-u <USER NAME>: your username. Default: $username
|
||||
-k: keep service user. Default is to remove them after run
|
||||
-d: Destroy development resources. Default is to create them
|
||||
|
||||
Notes:
|
||||
* Requires cf-cli@8
|
||||
* Requires terraform/development to be run on API app first, with the same [-u <USER NAME>]
|
||||
"
|
||||
|
||||
action="apply"
|
||||
creds="remove"
|
||||
|
||||
while getopts ":hkdu:" opt; do
|
||||
case "$opt" in
|
||||
u)
|
||||
username=${OPTARG}
|
||||
;;
|
||||
k)
|
||||
creds="keep"
|
||||
;;
|
||||
d)
|
||||
action="destroy"
|
||||
;;
|
||||
h)
|
||||
echo "$usage"
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
set -e
|
||||
|
||||
service_account="$username-terraform"
|
||||
|
||||
# ensure we're in the correct directory
|
||||
cd $(dirname $0)
|
||||
|
||||
if [[ ! -s "secrets.auto.tfvars" ]]; then
|
||||
# create user in notify-local-dev space to create s3 buckets
|
||||
../create_service_account.sh -s notify-local-dev -u $service_account > secrets.auto.tfvars
|
||||
fi
|
||||
|
||||
if [[ ! -f "../../.env" ]]; then
|
||||
cp ../../sample.env ../../.env
|
||||
fi
|
||||
|
||||
set +e
|
||||
|
||||
terraform init
|
||||
terraform $action -var="username=$username"
|
||||
|
||||
set -e
|
||||
|
||||
if [[ $creds = "remove" ]]; then
|
||||
../destroy_service_account.sh -s notify-local-dev -u $service_account
|
||||
rm secrets.auto.tfvars
|
||||
fi
|
||||
|
||||
exit 0
|
||||
5
terraform/development/variables.tf
Normal file
5
terraform/development/variables.tf
Normal file
@@ -0,0 +1,5 @@
|
||||
variable "cf_password" {
|
||||
sensitive = true
|
||||
}
|
||||
variable "cf_user" {}
|
||||
variable "username" {}
|
||||
@@ -1,6 +1,5 @@
|
||||
import uuid
|
||||
from io import BytesIO
|
||||
from os import getenv
|
||||
from unittest.mock import ANY
|
||||
|
||||
import pytest
|
||||
@@ -178,6 +177,7 @@ def test_upload_contact_list_page(client_request):
|
||||
def test_upload_csv_file_shows_error_banner(
|
||||
client_request,
|
||||
mocker,
|
||||
notify_admin,
|
||||
mock_s3_upload,
|
||||
mock_get_job_doesnt_exist,
|
||||
mock_get_users_by_service,
|
||||
@@ -205,13 +205,14 @@ def test_upload_csv_file_shows_error_banner(
|
||||
_data={'file': (BytesIO(''.encode('utf-8')), 'invalid.csv')},
|
||||
_follow_redirects=True,
|
||||
)
|
||||
bucket_creds = notify_admin.config['CONTACT_LIST_BUCKET']
|
||||
mock_upload.assert_called_once_with(
|
||||
filedata='',
|
||||
region='us-west-2',
|
||||
bucket_name='test-contact-list',
|
||||
region=bucket_creds['region'],
|
||||
bucket_name=bucket_creds['bucket'],
|
||||
file_location=f"service-{SERVICE_ONE_ID}-notify/{fake_uuid}.csv",
|
||||
access_key=getenv('AWS_ACCESS_KEY_ID'),
|
||||
secret_key=getenv('AWS_SECRET_ACCESS_KEY'),
|
||||
access_key=bucket_creds['access_key_id'],
|
||||
secret_key=bucket_creds['secret_access_key'],
|
||||
)
|
||||
mock_set_metadata.assert_called_once_with(
|
||||
ANY,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from collections import namedtuple
|
||||
from os import getenv
|
||||
from unittest.mock import call
|
||||
|
||||
import pytest
|
||||
@@ -14,21 +13,10 @@ from app.s3_client.s3_logo_client import (
|
||||
upload_email_logo,
|
||||
)
|
||||
|
||||
default_access_key = getenv('AWS_ACCESS_KEY_ID')
|
||||
default_secret_key = getenv('AWS_SECRET_ACCESS_KEY')
|
||||
default_region = getenv('AWS_REGION')
|
||||
bucket = 'test_bucket'
|
||||
bucket_credentials = {
|
||||
'bucket': bucket,
|
||||
'access_key_id': default_access_key,
|
||||
'secret_access_key': default_secret_key,
|
||||
'region': default_region
|
||||
}
|
||||
data = {'data': 'some_data'}
|
||||
filename = 'test.png'
|
||||
svg_filename = 'test.svg'
|
||||
upload_id = 'test_uuid'
|
||||
region = 'us-west-2'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -37,26 +25,29 @@ def upload_filename(fake_uuid):
|
||||
temp=TEMP_TAG.format(user_id=fake_uuid), unique_id=upload_id, filename=filename)
|
||||
|
||||
|
||||
def test_upload_email_logo_calls_correct_args(client_request, mocker, fake_uuid, upload_filename):
|
||||
@pytest.fixture
|
||||
def bucket_credentials(notify_admin):
|
||||
return notify_admin.config['LOGO_UPLOAD_BUCKET']
|
||||
|
||||
|
||||
def test_upload_email_logo_calls_correct_args(client_request, mocker, bucket_credentials, fake_uuid, upload_filename):
|
||||
mocker.patch('uuid.uuid4', return_value=upload_id)
|
||||
mocker.patch.dict('flask.current_app.config', {'LOGO_UPLOAD_BUCKET': bucket_credentials})
|
||||
mocked_s3_upload = mocker.patch('app.s3_client.s3_logo_client.utils_s3upload')
|
||||
|
||||
upload_email_logo(filename=filename, user_id=fake_uuid, filedata=data)
|
||||
|
||||
mocked_s3_upload.assert_called_once_with(
|
||||
filedata=data,
|
||||
region=region,
|
||||
region=bucket_credentials['region'],
|
||||
file_location=upload_filename,
|
||||
bucket_name=bucket,
|
||||
bucket_name=bucket_credentials['bucket'],
|
||||
content_type='image/png',
|
||||
access_key=default_access_key,
|
||||
secret_key=default_secret_key,
|
||||
access_key=bucket_credentials['access_key_id'],
|
||||
secret_key=bucket_credentials['secret_access_key'],
|
||||
)
|
||||
|
||||
|
||||
def test_persist_logo(client_request, mocker, fake_uuid, upload_filename):
|
||||
mocker.patch.dict('flask.current_app.config', {'LOGO_UPLOAD_BUCKET': bucket_credentials})
|
||||
def test_persist_logo(client_request, bucket_credentials, mocker, fake_uuid, upload_filename):
|
||||
mocked_get_s3_object = mocker.patch('app.s3_client.s3_logo_client.get_s3_object')
|
||||
mocked_delete_s3_object = mocker.patch('app.s3_client.s3_logo_client.delete_s3_object')
|
||||
|
||||
@@ -65,7 +56,11 @@ def test_persist_logo(client_request, mocker, fake_uuid, upload_filename):
|
||||
persist_logo(upload_filename, new_filename)
|
||||
|
||||
mocked_get_s3_object.assert_called_once_with(
|
||||
bucket, new_filename, default_access_key, default_secret_key, default_region)
|
||||
bucket_credentials['bucket'],
|
||||
new_filename,
|
||||
bucket_credentials['access_key_id'],
|
||||
bucket_credentials['secret_access_key'],
|
||||
bucket_credentials['region'])
|
||||
mocked_delete_s3_object.assert_called_once_with(upload_filename)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user