mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-24 09:21:39 -05:00
Merge pull request #20 from 18F/jim/071422/updatereadme
update readme with SES/SNS instructions
This commit is contained in:
10
Makefile
10
Makefile
@@ -146,16 +146,16 @@ cf-deploy: ## Deploys the app to Cloud Foundry
|
||||
cf-deploy-api-db-migration:
|
||||
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
|
||||
cf target -o ${CF_ORG} -s ${CF_SPACE}
|
||||
make -s CF_APP=notify-api-db-migration generate-manifest > ${CF_MANIFEST_PATH}
|
||||
make -s CF_APP=notifications-api generate-manifest > ${CF_MANIFEST_PATH}
|
||||
|
||||
cf push notify-api-db-migration --no-route -f ${CF_MANIFEST_PATH}
|
||||
cf push notifications-api --no-route -f ${CF_MANIFEST_PATH}
|
||||
rm ${CF_MANIFEST_PATH}
|
||||
|
||||
cf run-task notify-api-db-migration --command="flask db upgrade" --name api_db_migration
|
||||
cf run-task notifications-api --command="flask db upgrade" --name api_db_migration
|
||||
|
||||
.PHONY: cf-check-api-db-migration-task
|
||||
cf-check-api-db-migration-task: ## Get the status for the last notify-api-db-migration task
|
||||
@cf curl /v3/apps/`cf app --guid notify-api-db-migration`/tasks?order_by=-created_at | jq -r ".resources[0].state"
|
||||
cf-check-api-db-migration-task: ## Get the status for the last notifications-api task
|
||||
@cf curl /v3/apps/`cf app --guid notifications-api`/tasks?order_by=-created_at | jq -r ".resources[0].state"
|
||||
|
||||
.PHONY: cf-rollback
|
||||
cf-rollback: ## Rollbacks the app to the previous release
|
||||
|
||||
92
README.md
92
README.md
@@ -1,6 +1,7 @@
|
||||
# GOV.UK Notify API
|
||||
|
||||
Contains:
|
||||
|
||||
- the public-facing REST API for GOV.UK Notify, which teams can integrate with using [our clients](https://www.notifications.service.gov.uk/documentation)
|
||||
- an internal-only REST API built using Flask to manage services, users, templates, etc (this is what the [admin app](http://github.com/alphagov/notifications-admin) talks to)
|
||||
- asynchronous workers built using Celery to put things on queues and read them off to be processed, sent to providers, updated, etc
|
||||
@@ -19,33 +20,37 @@ We run python 3.9 both locally and in production.
|
||||
|
||||
To run the API you will need appropriate AWS credentials. See the [Wiki](https://github.com/alphagov/notifications-manuals/wiki/aws-accounts#how-to-set-up-local-development) for more details.
|
||||
|
||||
### `environment.sh`
|
||||
### `.env` file
|
||||
|
||||
Creating and edit an environment.sh file.
|
||||
|
||||
```
|
||||
echo "
|
||||
export NOTIFY_ENVIRONMENT='development'
|
||||
|
||||
export MMG_API_KEY='MMG_API_KEY'
|
||||
export FIRETEXT_API_KEY='FIRETEXT_ACTUAL_KEY'
|
||||
export NOTIFICATION_QUEUE_PREFIX='YOUR_OWN_PREFIX'
|
||||
|
||||
export FLASK_APP=application.py
|
||||
export FLASK_ENV=development
|
||||
export WERKZEUG_DEBUG_PIN=off
|
||||
"> environment.sh
|
||||
```
|
||||
Create and edit a .env file, based on sample.env
|
||||
|
||||
Things to change:
|
||||
|
||||
* Replace `YOUR_OWN_PREFIX` with `local_dev_<first name>`.
|
||||
* Run the following in the credentials repo to get the API keys.
|
||||
- Replace `YOUR_OWN_PREFIX` with `local_dev_<first name>`
|
||||
- Replace `NOTIFY_EMAIL_DOMAIN` with the domain your emails will come from (i.e. the "origination email" in your SES project)
|
||||
- Replace `SECRET_KEY` and `DANGEROUS_SALT` with high-entropy secret values
|
||||
- Set up AWS SES and SNS as indicated in next section (AWS Setup), fill in missing AWS env vars
|
||||
|
||||
```
|
||||
notify-pass credentials/firetext
|
||||
notify-pass credentials/mmg
|
||||
```
|
||||
### AWS Setup
|
||||
|
||||
**Steps to prepare SES**
|
||||
|
||||
1. Go to SES console for \$AWS_REGION and create new origin and destination emails. AWS will send a verification via email which you'll need to complete.
|
||||
2. Find and replace instances in the repo of "testsender", "testreceiver" and "dispostable.com", with your origin and destination email addresses, which you verified in step 1 above.
|
||||
|
||||
TODO: create env vars for these origin and destination email addresses for the root service, and create new migrations to update postgres seed fixtures
|
||||
|
||||
**Steps to prepare SNS**
|
||||
|
||||
1. Go to Pinpoints console for \$AWS_PINPOINT_REGION and choose "create new project", then "configure for sms"
|
||||
2. Tick the box at the top to enable SMS, choose "transactional" as the default type and save
|
||||
3. In the lefthand sidebar, go the "SMS and Voice" (bottom) and choose "Phone Numbers"
|
||||
4. Under "Number Settings" choose "Request Phone Number"
|
||||
5. Choose Toll-free number, tick SMS, untick Voice, choose "transactional", hit next and then "request"
|
||||
6. Go to SNS console for \$AWS_PINPOINT_REGION, look at lefthand sidebar under "Mobile" and go to "Text Messaging (SMS)"
|
||||
7. Scroll down to "Sandbox destination phone numbers" and tap "Add phone number" then follow the steps to verify (you'll need to be able to retrieve a code sent to each number)
|
||||
|
||||
At this point, you _should_ be able to complete both the email and phone verification steps of the Notify user sign up process! 🎉
|
||||
|
||||
### Secrets Detection
|
||||
|
||||
@@ -59,7 +64,7 @@ detect-secrets scan > .secrets.baseline
|
||||
|
||||
Ideally, you'll install `detect-secrets` so that it's accessible from any environment from which you _might_ commit. You can use `brew install` to make it available globally. You could also install via `pip install` inside a virtual environment, if you're sure you'll _only_ commit from that environment.
|
||||
|
||||
If you open .git/hooks/pre-commit you should see a simple bash script that runs the command below, reads the output and aborts before committing if detect-secrets finds a secret. You should be able to test it by staging a file with any high-entropy string like `"bblfwk3u4bt484+afw4avev5ae+afr4?/fa"` (it also has other ways to detect secrets, this is just the most straightforward to test).
|
||||
If you open .git/hooks/pre-commit you should see a simple bash script that runs the command below, reads the output and aborts before committing if detect-secrets finds a secret. You should be able to test it by staging a file with any high-entropy string like `"bblfwk3u4bt484+afw4avev5ae+afr4?/fa"` (it also has other ways to detect secrets, this is just the most straightforward to test).
|
||||
|
||||
You can permit exceptions by adding an inline comment containing `pragma: allowlist secret`
|
||||
|
||||
@@ -67,7 +72,7 @@ The command that is actually run by the pre-commit hook is: `git diff --staged -
|
||||
|
||||
You can also run against all tracked files staged or not: `git ls-files -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline`
|
||||
|
||||
### Postgres
|
||||
### Postgres [DEPRECATED]
|
||||
|
||||
Install [Postgres.app](http://postgresapp.com/).
|
||||
|
||||
@@ -79,7 +84,7 @@ Currently the API works with PostgreSQL 11. After installation, open the Postgre
|
||||
export PATH=${PATH}:/Applications/Postgres.app/Contents/Versions/11/bin/
|
||||
```
|
||||
|
||||
### Redis
|
||||
### Redis [DEPRECATED]
|
||||
|
||||
To switch redis on you'll need to install it locally. On a Mac you can do:
|
||||
|
||||
@@ -95,36 +100,31 @@ To use redis caching you need to switch it on with an environment variable:
|
||||
export REDIS_ENABLED=1
|
||||
```
|
||||
|
||||
## To run the application
|
||||
## To run the application
|
||||
|
||||
```
|
||||
# install dependencies, etc.
|
||||
make bootstrap
|
||||
# set up AWS SES/SNS as instructed above
|
||||
|
||||
# run the web app
|
||||
# create .env file as instructed above
|
||||
|
||||
# download vscode and install the Remote-Containers plug-in from Microsoft
|
||||
|
||||
# create the external docker network
|
||||
docker network create notify-network
|
||||
|
||||
# Using the command pallette, search "Remote Containers: Open folder in project" and choose devcontainer-api, then wait for docker to build
|
||||
|
||||
# Open a terminal in vscode and run the web server
|
||||
make run-flask
|
||||
|
||||
# run the background tasks
|
||||
# Open another terminal in vscode and run the background tasks
|
||||
make run-celery
|
||||
|
||||
# run scheduled tasks (optional)
|
||||
# Open a third terminal in vscode and run scheduled tasks (optional)
|
||||
make run-celery-beat
|
||||
```
|
||||
|
||||
We've had problems running Celery locally due to one of its dependencies: pycurl. Due to the complexity of the issue, we also support running Celery via Docker:
|
||||
|
||||
```
|
||||
# install dependencies, etc.
|
||||
make bootstrap-with-docker
|
||||
|
||||
# run the background tasks
|
||||
make run-celery-with-docker
|
||||
|
||||
# run scheduled tasks
|
||||
make run-celery-beat-with-docker
|
||||
```
|
||||
|
||||
## To test the application
|
||||
## To test the application
|
||||
|
||||
```
|
||||
# install dependencies, etc.
|
||||
@@ -140,11 +140,13 @@ care about: `flask db` contains alembic migration commands, and `flask command`
|
||||
example, to purge all dynamically generated functional test data, do the following:
|
||||
|
||||
Locally
|
||||
|
||||
```
|
||||
flask command purge_functional_test_data -u <functional tests user name prefix>
|
||||
```
|
||||
|
||||
On the server
|
||||
|
||||
```
|
||||
cf run-task notify-api "flask command purge_functional_test_data -u <functional tests user name prefix>"
|
||||
```
|
||||
|
||||
@@ -82,44 +82,44 @@ class TaskNames(object):
|
||||
|
||||
class Config(object):
|
||||
# URL of admin app
|
||||
ADMIN_BASE_URL = os.getenv('ADMIN_BASE_URL')
|
||||
ADMIN_BASE_URL = os.environ.get('ADMIN_BASE_URL')
|
||||
|
||||
# URL of api app (on AWS this is the internal api endpoint)
|
||||
API_HOST_NAME = os.getenv('API_HOST_NAME')
|
||||
API_HOST_NAME = os.environ.get('API_HOST_NAME')
|
||||
|
||||
# secrets that internal apps, such as the admin app or document download, must use to authenticate with the API
|
||||
ADMIN_CLIENT_ID = 'notify-admin'
|
||||
GOVUK_ALERTS_CLIENT_ID = 'govuk-alerts'
|
||||
GOVUK_ALERTS_CLIENT_ID = 'govuk-alerts' # TODO: can remove?
|
||||
|
||||
INTERNAL_CLIENT_API_KEYS = json.loads(
|
||||
os.environ.get('INTERNAL_CLIENT_API_KEYS', '{"notify-admin":["dev-notify-secret-key"]}')
|
||||
)
|
||||
) # TODO: handled by varsfile?
|
||||
|
||||
# encyption secret/salt
|
||||
ADMIN_CLIENT_SECRET = os.getenv('ADMIN_CLIENT_SECRET', 'dev-notify-secret-key')
|
||||
SECRET_KEY = os.getenv('SECRET_KEY', 'dev-notify-secret-key')
|
||||
DANGEROUS_SALT = os.getenv('DANGEROUS_SALT', 'dev-notify-salt ')
|
||||
ADMIN_CLIENT_SECRET = os.environ.get('ADMIN_CLIENT_SECRET')
|
||||
SECRET_KEY = os.environ.get('SECRET_KEY')
|
||||
DANGEROUS_SALT = os.environ.get('DANGEROUS_SALT')
|
||||
|
||||
# DB conection string
|
||||
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI')
|
||||
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI')
|
||||
|
||||
# AWS SMS
|
||||
AWS_PINPOINT_REGION = os.getenv("AWS_PINPOINT_REGION", "us-west-2")
|
||||
AWS_US_TOLL_FREE_NUMBER = os.getenv("AWS_US_TOLL_FREE_NUMBER", "+18446120782")
|
||||
AWS_PINPOINT_REGION = os.environ.get("AWS_PINPOINT_REGION")
|
||||
AWS_US_TOLL_FREE_NUMBER = os.environ.get("AWS_US_TOLL_FREE_NUMBER")
|
||||
|
||||
# MMG API Key
|
||||
MMG_API_KEY = os.getenv('MMG_API_KEY')
|
||||
MMG_API_KEY = os.environ.get('MMG_API_KEY', 'placeholder')
|
||||
|
||||
# Firetext API Key
|
||||
FIRETEXT_API_KEY = os.getenv("FIRETEXT_API_KEY")
|
||||
FIRETEXT_INTERNATIONAL_API_KEY = os.getenv("FIRETEXT_INTERNATIONAL_API_KEY", "placeholder")
|
||||
FIRETEXT_API_KEY = os.environ.get("FIRETEXT_API_KEY", "placeholder")
|
||||
FIRETEXT_INTERNATIONAL_API_KEY = os.environ.get("FIRETEXT_INTERNATIONAL_API_KEY", "placeholder")
|
||||
|
||||
# Prefix to identify queues in SQS
|
||||
NOTIFICATION_QUEUE_PREFIX = os.getenv('NOTIFICATION_QUEUE_PREFIX')
|
||||
NOTIFICATION_QUEUE_PREFIX = os.environ.get('NOTIFICATION_QUEUE_PREFIX')
|
||||
|
||||
# URL of redis instance
|
||||
REDIS_URL = os.getenv('REDIS_URL')
|
||||
REDIS_ENABLED = 1
|
||||
REDIS_URL = os.environ.get('REDIS_URL')
|
||||
REDIS_ENABLED = True
|
||||
EXPIRE_CACHE_TEN_MINUTES = 600
|
||||
EXPIRE_CACHE_EIGHT_DAYS = 8 * 24 * 60 * 60
|
||||
|
||||
@@ -128,7 +128,7 @@ class Config(object):
|
||||
|
||||
# Logging
|
||||
DEBUG = False
|
||||
NOTIFY_LOG_PATH = os.getenv('NOTIFY_LOG_PATH')
|
||||
NOTIFY_LOG_PATH = os.environ.get('NOTIFY_LOG_PATH')
|
||||
|
||||
# Cronitor
|
||||
CRONITOR_ENABLED = False
|
||||
@@ -349,12 +349,12 @@ class Config(object):
|
||||
}
|
||||
|
||||
# we can set celeryd_prefetch_multiplier to be 1 for celery apps which handle only long running tasks
|
||||
if os.getenv('CELERYD_PREFETCH_MULTIPLIER'):
|
||||
CELERY['worker_prefetch_multiplier'] = os.getenv('CELERYD_PREFETCH_MULTIPLIER')
|
||||
if os.environ.get('CELERYD_PREFETCH_MULTIPLIER'):
|
||||
CELERY['worker_prefetch_multiplier'] = os.environ.get('CELERYD_PREFETCH_MULTIPLIER')
|
||||
|
||||
FROM_NUMBER = 'development'
|
||||
|
||||
STATSD_HOST = os.getenv('STATSD_HOST')
|
||||
STATSD_HOST = os.environ.get('STATSD_HOST')
|
||||
STATSD_PORT = 8125
|
||||
STATSD_ENABLED = bool(STATSD_HOST)
|
||||
|
||||
@@ -409,7 +409,7 @@ class Development(Config):
|
||||
DEBUG = True
|
||||
SQLALCHEMY_ECHO = False
|
||||
|
||||
REDIS_ENABLED = os.getenv('REDIS_ENABLED') == '1'
|
||||
REDIS_ENABLED = True
|
||||
|
||||
CSV_UPLOAD_BUCKET_NAME = 'local-notifications-csv-upload'
|
||||
CONTACT_LIST_BUCKET_NAME = 'local-contact-list'
|
||||
@@ -436,12 +436,12 @@ class Development(Config):
|
||||
NOTIFY_LOG_PATH = 'application.log'
|
||||
NOTIFY_EMAIL_DOMAIN = "dispostable.com"
|
||||
|
||||
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI', 'postgresql://postgres:chummy@db:5432/notification_api')
|
||||
REDIS_URL = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
||||
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI', 'postgresql://postgres:chummy@db:5432/notification_api')
|
||||
REDIS_URL = os.environ.get('REDIS_URL')
|
||||
|
||||
ANTIVIRUS_ENABLED = os.getenv('ANTIVIRUS_ENABLED') == '1'
|
||||
ANTIVIRUS_ENABLED = os.environ.get('ANTIVIRUS_ENABLED') == '1'
|
||||
|
||||
API_HOST_NAME = os.getenv('API_HOST_NAME', 'http://localhost:6011')
|
||||
API_HOST_NAME = os.environ.get('API_HOST_NAME', 'http://localhost:6011')
|
||||
API_RATE_LIMIT_ENABLED = True
|
||||
DVLA_EMAIL_ADDRESSES = ['success@simulator.amazonses.com']
|
||||
|
||||
@@ -472,7 +472,7 @@ class Test(Development):
|
||||
LETTER_SANITISE_BUCKET_NAME = 'test-letters-sanitise'
|
||||
|
||||
# this is overriden in jenkins and on cloudfoundry
|
||||
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI', 'postgresql://postgres:chummy@db:5432/notification_api')
|
||||
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI', 'postgresql://postgres:chummy@db:5432/notification_api')
|
||||
|
||||
CELERY = {
|
||||
**Config.CELERY,
|
||||
@@ -532,26 +532,29 @@ class Staging(Config):
|
||||
class Live(Config):
|
||||
NOTIFY_EMAIL_DOMAIN = os.environ.get('NOTIFY_EMAIL_DOMAIN')
|
||||
NOTIFY_ENVIRONMENT = 'live'
|
||||
CSV_UPLOAD_BUCKET_NAME = 'live-notifications-csv-upload'
|
||||
CONTACT_LIST_BUCKET_NAME = 'production-contact-list'
|
||||
TEST_LETTERS_BUCKET_NAME = 'production-test-letters'
|
||||
DVLA_RESPONSE_BUCKET_NAME = 'notifications.service.gov.uk-ftp'
|
||||
LETTERS_PDF_BUCKET_NAME = 'production-letters-pdf'
|
||||
LETTERS_SCAN_BUCKET_NAME = 'production-letters-scan'
|
||||
INVALID_PDF_BUCKET_NAME = 'production-letters-invalid-pdf'
|
||||
TRANSIENT_UPLOADED_LETTERS = 'production-transient-uploaded-letters'
|
||||
LETTER_SANITISE_BUCKET_NAME = 'production-letters-sanitise'
|
||||
FROM_NUMBER = 'GOVUK'
|
||||
# buckets
|
||||
CSV_UPLOAD_BUCKET_NAME = 'notifications-prototype-csv-upload' # created in gsa sandbox
|
||||
CONTACT_LIST_BUCKET_NAME = 'notifications-prototype-contact-list-upload' # created in gsa sandbox
|
||||
# TODO: verify below buckets only used for letters
|
||||
TEST_LETTERS_BUCKET_NAME = 'production-test-letters' # not created in gsa sandbox
|
||||
DVLA_RESPONSE_BUCKET_NAME = 'notifications.service.gov.uk-ftp' # not created in gsa sandbox
|
||||
LETTERS_PDF_BUCKET_NAME = 'production-letters-pdf' # not created in gsa sandbox
|
||||
LETTERS_SCAN_BUCKET_NAME = 'production-letters-scan' # not created in gsa sandbox
|
||||
INVALID_PDF_BUCKET_NAME = 'production-letters-invalid-pdf' # not created in gsa sandbox
|
||||
TRANSIENT_UPLOADED_LETTERS = 'production-transient-uploaded-letters' # not created in gsa sandbox
|
||||
LETTER_SANITISE_BUCKET_NAME = 'production-letters-sanitise' # not created in gsa sandbox
|
||||
|
||||
FROM_NUMBER = 'US Notify'
|
||||
API_RATE_LIMIT_ENABLED = True
|
||||
CHECK_PROXY_HEADER = True
|
||||
SES_STUB_URL = None
|
||||
CRONITOR_ENABLED = True
|
||||
|
||||
# DEBUG = True
|
||||
REDIS_ENABLED = os.getenv('REDIS_ENABLED') == '1'
|
||||
REDIS_ENABLED = True
|
||||
|
||||
NOTIFY_LOG_PATH = os.getenv('NOTIFY_LOG_PATH', 'application.log')
|
||||
REDIS_URL = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
||||
NOTIFY_LOG_PATH = os.environ.get('NOTIFY_LOG_PATH', 'application.log')
|
||||
REDIS_URL = os.environ.get('REDIS_URL')
|
||||
|
||||
|
||||
class CloudFoundryConfig(Config):
|
||||
|
||||
@@ -23,9 +23,9 @@
|
||||
}
|
||||
},
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
// "ms-python.python",
|
||||
"donjayamanne.python-extension-pack",
|
||||
"ms-azuretools.vscode-docker",
|
||||
// "ms-azuretools.vscode-docker",
|
||||
"ms-python.vscode-pylance",
|
||||
"eamodio.gitlens",
|
||||
"wholroyd.jinja",
|
||||
@@ -33,10 +33,13 @@
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"yzhang.markdown-all-in-one",
|
||||
"ms-ossdata.vscode-postgresql",
|
||||
"GitHub.copilot",
|
||||
"ms-vsliveshare.vsliveshare",
|
||||
"mtxr.sqltools",
|
||||
"mtxr.sqltools-driver-pg"
|
||||
"GitHub.copilot"
|
||||
// "ms-vsliveshare.vsliveshare",
|
||||
// "mtxr.sqltools",
|
||||
// "mtxr.sqltools-driver-pg"
|
||||
],
|
||||
"forwardPorts": [
|
||||
6011
|
||||
],
|
||||
"postCreateCommand": "notify-dev-entrypoint.sh",
|
||||
"remoteUser": "vscode"
|
||||
|
||||
@@ -34,8 +34,6 @@ applications:
|
||||
SECRET_KEY: ((SECRET_KEY))
|
||||
AWS_REGION: us-west-2
|
||||
AWS_PINPOINT_REGION: us-west-2
|
||||
AWS_ACCESS_KEY_ID: ((AWS_ACCESS_KEY_ID))
|
||||
AWS_SECRET_ACCESS_KEY: ((AWS_SECRET_ACCESS_KEY))
|
||||
AWS_US_TOLL_FREE_NUMBER: +18446120782
|
||||
|
||||
DVLA_EMAIL_ADDRESSES: []
|
||||
|
||||
10
sample.env
10
sample.env
@@ -1,4 +1,4 @@
|
||||
### REBUILD THE DEVCONTAINER WHEN YOU MODIFY .ENV ###
|
||||
# ## REBUILD THE DEVCONTAINER WHEN YOU MODIFY .ENV ###
|
||||
|
||||
# Debug
|
||||
DEBUG=True
|
||||
@@ -38,7 +38,7 @@ SQLALCHEMY_DATABASE_TEST_URI=postgresql://postgres:chummy@db:5432/test_notificat
|
||||
|
||||
# AWS
|
||||
AWS_REGION=us-west-2
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
AWS_PINPOINT_REGION=
|
||||
AWS_US_TOLL_FREE_NUMBER=
|
||||
AWS_ACCESS_KEY_ID=DO_NOT_ADD_SECRETS_TO_THIS_SAMPLE_FILE
|
||||
AWS_SECRET_ACCESS_KEY=DO_NOT_ADD_SECRETS_TO_THIS_SAMPLE_FILE
|
||||
AWS_PINPOINT_REGION=us-west-2
|
||||
AWS_US_TOLL_FREE_NUMBER=+18001111111
|
||||
@@ -1,16 +1,12 @@
|
||||
SECRET_KEY: "dev-notify-secret-key" # pragma: allowlist secret
|
||||
DANGEROUS_SALT: "dev-notify-salt"
|
||||
AWS_ACCESS_KEY_ID: <replace me>
|
||||
AWS_SECRET_ACCESS_KEY: <replace me>
|
||||
|
||||
ADMIN_BASE_URL: https://notifications-admin.app.cloud.gov
|
||||
ADMIN_CLIENT_ID: notify-admin
|
||||
ADMIN_CLIENT_SECRET: dev-notify-secret-key
|
||||
API_HOST_NAME: https://notifications-api.app.cloud.gov
|
||||
AWS_ACCESS_KEY_ID: placeholder
|
||||
AWS_PINPOINT_REGION: us-west-2
|
||||
AWS_REGION: us-west-2
|
||||
AWS_SECRET_ACCESS_KEY: placeholder
|
||||
AWS_US_TOLL_FREE_NUMBER: 18446120782
|
||||
DANGEROUS_SALT: dev-notify-salt
|
||||
DVLA_EMAIL_ADDRESSES: []
|
||||
|
||||
Reference in New Issue
Block a user