Merge pull request #96 from GSA/stvnrlly-doc-update

Documentation update
This commit is contained in:
Steven Reilly
2022-10-28 13:34:27 -04:00
committed by GitHub
25 changed files with 2696 additions and 689 deletions

View File

@@ -1 +0,0 @@
.gitignore

109
.cfignore Normal file
View File

@@ -0,0 +1,109 @@
# from deploy-exclude.lst
*__pycache__*
.git/*
app/assets/*
bower_components/*
cache/*
.cache/*
node_modules/*
target/*
venv/*
build/*
.envrc
tests/.cache/*
.cf/*
# from .gitignore
queues.csv
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
.venv/
venv/
venv-freeze/
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
/cache
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
.pytest_cache
coverage.xml
test_results.xml
*,cover
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
.idea/
.vscode
# Mac
*.DS_Store
environment.sh
.envrc
.env
.env*
varsfile
celerybeat-schedule
# CloudFoundry
.cf
varsfile*
.secret*
/scripts/run_my_tests.sh
# Terraform
.terraform.lock.hcl
**/.terraform/*
secrets.auto.tfvars
terraform.tfstate
terraform.tfstate.backup

View File

@@ -13,3 +13,6 @@ runs:
uses: actions/setup-python@v3
with:
python-version: "3.9"
- name: Install pipenv
shell: bash
run: pip install --upgrade pipenv

View File

@@ -57,11 +57,11 @@ jobs:
env:
SQLALCHEMY_DATABASE_TEST_URI: postgresql://user:password@localhost:5432/test_notification_api
- name: Run style checks
run: flake8 .
run: pipenv run flake8 .
- name: Check imports alphabetized
run: isort --check-only ./app ./tests
run: pipenv run isort --check-only ./app ./tests
- name: Run tests
run: pytest -n4 --maxfail=10
run: pipenv run pytest -n4 --maxfail=10
env:
SQLALCHEMY_DATABASE_TEST_URI: postgresql://user:password@localhost:5432/test_notification_api
@@ -70,6 +70,8 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-project
- name: Create requirements.txt
run: pipenv requirements > requirements.txt
- uses: trailofbits/gh-action-pip-audit@v1.0.0
with:
inputs: requirements.txt

View File

@@ -37,6 +37,8 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-project
- name: Create requirements.txt
run: pipenv requirements > requirements.txt
- uses: trailofbits/gh-action-pip-audit@v1.0.0
with:
inputs: requirements.txt

View File

@@ -1,8 +0,0 @@
# see https://pyup.io/docs/configuration/ for all available options
schedule: "every week on wednesday"
search: False
requirements:
- requirements.in
- requirements_for_test.txt

152
Makefile
View File

@@ -7,20 +7,14 @@ APP_VERSION_FILE = app/version.py
GIT_BRANCH ?= $(shell git symbolic-ref --short HEAD 2> /dev/null || echo "detached")
GIT_COMMIT ?= $(shell git rev-parse HEAD)
CF_API ?= api.cloud.service.gov.uk
CF_ORG ?= govuk-notify
CF_SPACE ?= ${DEPLOY_ENV}
CF_HOME ?= ${HOME}
$(eval export CF_HOME)
## DEVELOPMENT
.PHONY: bootstrap
bootstrap: generate-version-file ## Set up everything to run the app
pip3 install -r requirements_for_test.txt
bootstrap: ## Set up everything to run the app
make generate-version-file
pipenv install --dev
createdb notification_api || true
(flask db upgrade) || true
(pipenv run flask db upgrade) || true
.PHONY: bootstrap-with-docker
bootstrap-with-docker: ## Build the image to run the app in Docker
@@ -28,31 +22,23 @@ bootstrap-with-docker: ## Build the image to run the app in Docker
.PHONY: run-flask
run-flask: ## Run flask
flask run -p 6011 --host=0.0.0.0
pipenv run flask run -p 6011 --host=0.0.0.0
.PHONY: run-celery
run-celery: ## Run celery, TODO remove purge for staging/prod
celery -A run_celery.notify_celery purge -f
celery \
pipenv run celery -A run_celery.notify_celery purge -f
pipenv run celery \
-A run_celery.notify_celery worker \
--pidfile="/tmp/celery.pid" \
--loglevel=INFO \
--concurrency=4
.PHONY: run-celery-with-docker
run-celery-with-docker: ## Run celery in Docker container (useful if you can't install pycurl locally)
./scripts/run_with_docker.sh make run-celery
.PHONY: run-celery-beat
run-celery-beat: ## Run celery beat
celery \
pipenv run celery \
-A run_celery.notify_celery beat \
--loglevel=INFO
.PHONY: run-celery-beat-with-docker
run-celery-beat-with-docker: ## Run celery beat in Docker container (useful if you can't install pycurl locally)
./scripts/run_with_docker.sh make run-celery-beat
.PHONY: help
help:
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
@@ -63,25 +49,25 @@ generate-version-file: ## Generates the app version file
.PHONY: test
test: ## Run tests
flake8 .
isort --check-only ./app ./tests
pytest -n4 --maxfail=10
pipenv run flake8 .
pipenv run isort --check-only ./app ./tests
pipenv run pytest -n4 --maxfail=10
.PHONY: freeze-requirements
freeze-requirements: ## Pin all requirements including sub dependencies into requirements.txt
pip install --upgrade pip-tools
pip-compile requirements.in
pipenv lock
pipenv requirements
.PHONY: audit
audit:
pip install --upgrade pip-audit
pip-audit -r requirements.txt -l --ignore-vuln PYSEC-2022-237
-pip-audit -r requirements_for_test.txt -l
pipenv requirements > requirements.txt
pipenv requirements --dev > requirements_for_test.txt
pipenv run pip-audit -r requirements.txt -l --ignore-vuln PYSEC-2022-237
-pipenv run pip-audit -r requirements_for_test.txt -l
.PHONY: static-scan
static-scan:
pip install bandit
bandit -r app/
pipenv run bandit -r app/
.PHONY: clean
clean:
@@ -90,90 +76,26 @@ clean:
## DEPLOYMENT
.PHONY: preview
preview: ## Set environment to preview
$(eval export DEPLOY_ENV=preview)
$(eval export DNS_NAME="notify.works")
@true
# .PHONY: cf-deploy-failwhale
# cf-deploy-failwhale:
# $(if ${CF_SPACE},,$(error Must target space, eg `make preview cf-deploy-failwhale`))
# cd ./paas-failwhale; cf push notify-api-failwhale -f manifest.yml
.PHONY: staging
staging: ## Set environment to staging
$(eval export DEPLOY_ENV=staging)
$(eval export DNS_NAME="staging-notify.works")
@true
# .PHONY: enable-failwhale
# enable-failwhale: ## Enable the failwhale app and disable api
# $(if ${DNS_NAME},,$(error Must target space, eg `make preview enable-failwhale`))
# # make sure failwhale is running first
# cf start notify-api-failwhale
.PHONY: production
production: ## Set environment to production
$(eval export DEPLOY_ENV=production)
$(eval export DNS_NAME="notifications.service.gov.uk")
@true
# cf map-route notify-api-failwhale ${DNS_NAME} --hostname api
# cf unmap-route notify-api ${DNS_NAME} --hostname api
# @echo "Failwhale is enabled"
.PHONY: cf-login
cf-login: ## Log in to Cloud Foundry
$(if ${CF_USERNAME},,$(error Must specify CF_USERNAME))
$(if ${CF_PASSWORD},,$(error Must specify CF_PASSWORD))
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
@echo "Logging in to Cloud Foundry on ${CF_API}"
@cf login -a "${CF_API}" -u ${CF_USERNAME} -p "${CF_PASSWORD}" -o "${CF_ORG}" -s "${CF_SPACE}"
# .PHONY: disable-failwhale
# disable-failwhale: ## Disable the failwhale app and enable api
# $(if ${DNS_NAME},,$(error Must target space, eg `make preview disable-failwhale`))
.PHONY: cf-deploy
cf-deploy: ## Deploys the app to Cloud Foundry
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
$(if ${CF_APP},,$(error Must specify CF_APP))
cf target -o ${CF_ORG} -s ${CF_SPACE}
@cf app --guid ${CF_APP} || exit 1
# cancel any existing deploys to ensure we can apply manifest (if a deploy is in progress you'll see ScaleDisabledDuringDeployment)
cf cancel-deployment ${CF_APP} || true
# fails after 15 mins if deploy doesn't work
CF_STARTUP_TIMEOUT=15 cf push ${CF_APP} --strategy=rolling
.PHONY: cf-deploy-api-db-migration
cf-deploy-api-db-migration:
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
cf target -o ${CF_ORG} -s ${CF_SPACE}
make -s CF_APP=notifications-api generate-manifest > ${CF_MANIFEST_PATH}
cf push notifications-api --no-route -f ${CF_MANIFEST_PATH}
rm ${CF_MANIFEST_PATH}
cf run-task notifications-api --command="flask db upgrade" --name api_db_migration
.PHONY: cf-check-api-db-migration-task
cf-check-api-db-migration-task: ## Get the status for the last notifications-api task
@cf curl /v3/apps/`cf app --guid notifications-api`/tasks?order_by=-created_at | jq -r ".resources[0].state"
.PHONY: cf-rollback
cf-rollback: ## Rollbacks the app to the previous release
$(if ${CF_APP},,$(error Must specify CF_APP))
rm ${CF_MANIFEST_PATH}
cf cancel-deployment ${CF_APP}
.PHONY: check-if-migrations-to-run
check-if-migrations-to-run:
@echo $(shell python3 scripts/check_if_new_migration.py)
.PHONY: cf-deploy-failwhale
cf-deploy-failwhale:
$(if ${CF_SPACE},,$(error Must target space, eg `make preview cf-deploy-failwhale`))
cd ./paas-failwhale; cf push notify-api-failwhale -f manifest.yml
.PHONY: enable-failwhale
enable-failwhale: ## Enable the failwhale app and disable api
$(if ${DNS_NAME},,$(error Must target space, eg `make preview enable-failwhale`))
# make sure failwhale is running first
cf start notify-api-failwhale
cf map-route notify-api-failwhale ${DNS_NAME} --hostname api
cf unmap-route notify-api ${DNS_NAME} --hostname api
@echo "Failwhale is enabled"
.PHONY: disable-failwhale
disable-failwhale: ## Disable the failwhale app and enable api
$(if ${DNS_NAME},,$(error Must target space, eg `make preview disable-failwhale`))
cf map-route notify-api ${DNS_NAME} --hostname api
cf unmap-route notify-api-failwhale ${DNS_NAME} --hostname api
cf stop notify-api-failwhale
@echo "Failwhale is disabled"
# cf map-route notify-api ${DNS_NAME} --hostname api
# cf unmap-route notify-api-failwhale ${DNS_NAME} --hostname api
# cf stop notify-api-failwhale
# @echo "Failwhale is disabled"

82
Pipfile Normal file
View File

@@ -0,0 +1,82 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
alembic = "==1.7.7"
amqp = "==5.1.1"
arrow = "==1.2.2"
asn1crypto = "==1.5.1"
async-timeout = "==4.0.2"
attrs = "==21.4.0"
awscli = "==1.24.8"
bcrypt = "==3.2.2"
beautifulsoup4 = "==4.11.1"
billiard = "==3.6.4.0"
bleach = "==4.1.0"
blinker = "==1.4"
boto3 = "==1.23.8"
botocore = "==1.26.8"
cachetools = "==5.1.0"
celery = {version = "==5.2.7", extras = ["redis"]}
certifi = "==2022.5.18.1"
cffi = "==1.15.0"
charset-normalizer = "==2.0.12"
click = "==8.1.3"
click-datetime = "==0.2"
click-didyoumean = "==0.3.0"
click-plugins = "==1.1.1"
click-repl = "==0.2.0"
colorama = "==0.4.4"
defusedxml = "==0.7.1"
deprecated = "==1.2.13"
dnspython = "==2.2.1"
docopt = "==0.6.2"
docutils = "==0.16"
eventlet = "==0.33.1"
flask = "~=2.1.2"
flask-bcrypt = "==1.0.1"
flask-marshmallow = "==0.14.0"
flask-migrate = "==3.1.0"
flask-redis = "==0.4.0"
flask-sqlalchemy = {version = "==2.5.1", ref = "aa7a61a5357cf6f5dcc135d98c781192457aa6fa", git = "https://github.com/pallets-eco/flask-sqlalchemy.git"}
gunicorn = {version = "==20.1.0", extras = ["eventlet"], ref = "1299ea9e967a61ae2edebe191082fd169b864c64", git = "https://github.com/benoitc/gunicorn.git"}
iso8601 = "==1.0.2"
itsdangerous = "==2.1.2"
jsonschema = {version = "==4.5.1", extras = ["format"]}
lxml = "==4.9.1"
marshmallow = "==3.15.0"
marshmallow-sqlalchemy = "==0.28.1"
notifications-python-client = "==6.3.0"
notifications-utils = {git = "https://github.com/GSA/notifications-utils.git"}
oscrypto = "==1.3.0"
psycopg2-binary = "==2.9.3"
pyjwt = "==2.4.0"
python-dotenv = "==0.20.0"
sqlalchemy = "==1.4.40"
werkzeug = "~=2.1.1"
# PaaS packages
awscli-cwlogs = "==1.4.6"
# gds metrics packages
prometheus-client = "==0.14.1"
gds-metrics = {ref = "6f1840a57b6fb1ee40b7e84f2f18ec229de8aa72", git = "https://github.com/alphagov/gds_metrics_python.git"}
[dev-packages]
flake8 = "==4.0.1"
flake8-bugbear = "==22.4.25"
isort = "==5.10.1"
moto = "==3.1.9"
pytest = "==7.1.2"
pytest-env = "==0.6.2"
pytest-mock = "==3.7.0"
pytest-cov = "==3.0.0"
pytest-xdist = "==2.5.0"
freezegun = "==1.2.1"
requests-mock = "==1.9.3"
jinja2-cli = {version = "==0.8.2", extras = ["yaml"]}
pip-audit = "*"
bandit = "*"
[requires]
python_version = "3.9"

2123
Pipfile.lock generated Normal file

File diff suppressed because it is too large Load Diff

219
README.md
View File

@@ -1,153 +1,100 @@
# US Notify API
Cloned from the brilliant work of the team at [GOV.UK Notify](https://github.com/alphagov/notifications-api), cheers!
This project is the core of [Notify](https://notifications-admin.app.cloud.gov/). It's cloned from the brilliant work of the team at [GOV.UK Notify](https://github.com/alphagov/notifications-api), cheers!
Contains:
This repo contains:
- the public-facing REST API for US Notify, which teams can integrate with using [our clients](https://www.notifications.service.gov.uk/documentation) [DOCS ARE STILL UK]
- an internal-only REST API built using Flask to manage services, users, templates, etc (this is what the [admin app](http://github.com/18F/notifications-admin) talks to)
- asynchronous workers built using Celery to put things on queues and read them off to be processed, sent to providers, updated, etc
- A public-facing REST API for Notify, which teams can integrate with using [API clients built by UK](https://www.notifications.service.gov.uk/documentation)
- An internal-only REST API built using Flask to manage services, users, templates, etc., which the [admin UI](http://github.com/18F/notifications-admin) talks to)
- Asynchronous workers built using Celery to put things on queues and read them off to be processed, sent to providers, updated, etc.
## QUICKSTART
---
If you are the first on your team to deploy, set up AWS SES/SNS as instructed in the AWS setup section below.
Our other repositories are:
Create .env file as described in the .env section below.
- [notifications-admin](https://github.com/GSA/notifications-admin)
- [notifications-utils](https://github.com/GSA/notifications-utils)
- [us-notify-compliance](https://github.com/GSA/us-notify-compliance/)
- [notify-python-demo](https://github.com/GSA/notify-python-demo)
Install VS Code
Open VS Code and install the Remote-Containers plug-in from Microsoft.
## Documentation, here and elsewhere
Make sure your docker daemon is running (on OS X, this is typically accomplished by opening the Docker Desktop app)
Also make sure there is NOT a Postgres daemon running on port 5432.
### About Notify
Create the external docker network:
- [Roadmap](https://notifications-admin.app.cloud.gov/features/roadmap)
- [Using the API](./docs/api-usage.md)
`docker network create notify-network`
### Infrastructure
Using the command palette (shift+cmd+p), search and select “Remote Containers: Open Folder in Container...”
When prompted, choose **devcontainer-api** folder (note: this is a *subfolder* of notification-api). This will startup the container in a new window (replacing the current one).
- [Overview, setup, and onboarding](./docs/infra-overview.md)
- [Database management](./docs/database-management.md)
After this page loads, hit "show logs” in bottom-right. The first time this runs it will need to build the Docker image, which will likely take several minutes.
### Common dev work
Select View->Open View..., then search/select “ports”. Await a green dot on the port view, then open a new terminal and run the web server:
`make run-flask`
- [Local setup](#local-setup)
- [Testing](./docs/testing.md)
- [Deploying](./docs/deploying.md)
- [Running one-off tasks](./docs/one-off-tasks.md)
Open another terminal and run the background tasks:
`make run-celery`
Confirm that everything is working by hitting localhost:6011 and it responds with a 200 OK.
---
## Setting Up
### `.env` file
Create and edit a .env file, based on sample.env.
NOTE: when you change .env in the future, you'll need to rebuild the devcontainer for the change to take effect. Vscode _should_ detect the change and prompt you with a toast notification during a cached build. If not, you can find a manual rebuild in command pallette or just `docker rm` the notifications-api container.
Things to change:
- If you're not the first to deploy, only replace the aws creds, get these from team lead
- Replace `NOTIFY_EMAIL_DOMAIN` with the domain your emails will come from (i.e. the "origination email" in your SES project)
- Replace `SECRET_KEY` and `DANGEROUS_SALT` with high-entropy secret values
- Set up AWS SES and SNS as indicated in next section (AWS Setup), fill in missing AWS env vars
### AWS Setup
**Steps to prepare SES**
1. Go to SES console for \$AWS_REGION and create new origin and destination emails. AWS will send a verification via email which you'll need to complete.
2. Find and replace instances in the repo of "testsender", "testreceiver" and "dispostable.com", with your origin and destination email addresses, which you verified in step 1 above.
TODO: create env vars for these origin and destination email addresses for the root service, and create new migrations to update postgres seed fixtures
**Steps to prepare SNS**
1. Go to Pinpoints console for \$AWS_PINPOINT_REGION and choose "create new project", then "configure for sms"
2. Tick the box at the top to enable SMS, choose "transactional" as the default type and save
3. In the lefthand sidebar, go the "SMS and Voice" (bottom) and choose "Phone Numbers"
4. Under "Number Settings" choose "Request Phone Number"
5. Choose Toll-free number, tick SMS, untick Voice, choose "transactional", hit next and then "request"
6. Go to SNS console for \$AWS_PINPOINT_REGION, look at lefthand sidebar under "Mobile" and go to "Text Messaging (SMS)"
7. Scroll down to "Sandbox destination phone numbers" and tap "Add phone number" then follow the steps to verify (you'll need to be able to retrieve a code sent to each number)
At this point, you _should_ be able to complete both the email and phone verification steps of the Notify user sign up process! 🎉
### Secrets Detection
```
brew install detect-secrets # or pip install detect-secrets
detect-secrets scan
#review output of above, make sure none of the baseline entries are sensitive
detect-secrets scan > .secrets.baseline
#creates the baseline file
```
Ideally, you'll install `detect-secrets` so that it's accessible from any environment from which you _might_ commit. You can use `brew install` to make it available globally. You could also install via `pip install` inside a virtual environment, if you're sure you'll _only_ commit from that environment.
If you open .git/hooks/pre-commit you should see a simple bash script that runs the command below, reads the output and aborts before committing if detect-secrets finds a secret. You should be able to test it by staging a file with any high-entropy string like `"bblfwk3u4bt484+afw4avev5ae+afr4?/fa"` (it also has other ways to detect secrets, this is just the most straightforward to test).
You can permit exceptions by adding an inline comment containing `pragma: allowlist secret`
The command that is actually run by the pre-commit hook is: `git diff --staged --name-only -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline`
You can also run against all tracked files staged or not: `git ls-files -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline`
### Postgres
Local postgres implementation is handled by [docker compose](https://github.com/18F/notifications-api/blob/main/docker-compose.devcontainer.yml)
### Redis
Local redis implementation is handled by [docker compose](https://github.com/18F/notifications-api/blob/main/docker-compose.devcontainer.yml)
## To test the application
```
# install dependencies, etc.
make bootstrap
make test
```
## To run a local OWASP scan
1. Run `make run-flask` from within the dev container.
2. On your host machine run:
```
docker run -v $(pwd):/zap/wrk/:rw --network="notify-network" -t owasp/zap2docker-weekly zap-api-scan.py -t http://dev:6011/_status -f openapi -c zap.conf
```
## To run scheduled tasks
```
# After scheduling some tasks, open a third terminal in your running devcontainer and run celery beat
make run-celery-beat
```
## To run one off tasks (Ignore for Quick Start)
Tasks are run through the `flask` command - run `flask --help` for more information. There are two sections we need to
care about: `flask db` contains alembic migration commands, and `flask command` contains all of our custom commands. For
example, to purge all dynamically generated functional test data, do the following:
Local (from inside the devcontainer)
```
flask command purge_functional_test_data -u <functional tests user name prefix>
```
Remote
```
cf run-task notify-api "flask command purge_functional_test_data -u <functional tests user name prefix>"
```
All commands and command options have a --help command if you need more information.
## Further documentation [DEPRECATED]
## UK docs that may still be helpful
- [Writing public APIs](docs/writing-public-apis.md)
- [Updating dependencies](https://github.com/alphagov/notifications-manuals/wiki/Dependencies)
## Local setup
### Direct installation
1. Set up Postgres && Redis on your machine
1. Install [pipenv](https://pipenv.pypa.io/en/latest/)
1. Install dependencies into a virtual environment
`make bootstrap`
1. Create the .env file
```
cp sample.env .env
# follow the instructions in .env
```
1. Run Flask
`make run-flask`
1. Run Celery
`make run-celery`
### VS Code && Docker installation
If you're working in VS Code, you can also leverage Docker for a containerized dev environment
1. Create the .env file
```
cp sample.env .env
# follow the instructions in .env
```
1. Install the Remote-Containers plug-in in VS Code
1. With Docker running, create the network:
`docker network create notify-network`
1. Using the command palette (shift+cmd+p) or green button thingy in the bottom left, search and select “Remote Containers: Open Folder in Container...” When prompted, choose **devcontainer-api** folder (note: this is a *subfolder* of notification-api). This will startup the container in a new window, replacing the current one.
1. Wait a few minutes while things happen 🍵
1. Open a VS Code terminal and run the Flask application:
`make run-flask`
1. Open another VS Code terminal and run Celery:
`make run-celery`
NOTE: when you change .env in the future, you'll need to rebuild the devcontainer for the change to take effect. Vscode _should_ detect the change and prompt you with a toast notification during a cached build. If not, you can find a manual rebuild in command pallette or just `docker rm` the notifications-api container.

View File

@@ -111,6 +111,10 @@ def purge_functional_test_data(user_email_prefix):
users, services, etc. Give an email prefix. Probably "notify-tests-preview".
"""
if os.getenv('NOTIFY_ENVIRONMENT', '') not in ['development', 'test']:
current_app.logger.error('Can only be run in development')
return
users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all()
for usr in users:
# Make sure the full email includes a uuid in it

View File

@@ -1,13 +0,0 @@
*__pycache__*
.git/*
app/assets/*
bower_components/*
cache/*
.cache/*
node_modules/*
target/*
venv/*
build/*
.envrc
tests/.cache/*
.cf/*

View File

@@ -30,14 +30,13 @@ cd /workspace
git status
make generate-version-file
pip3 install -r requirements.txt
pip3 install -r requirements_for_test.txt
pipenv install --dev
# Install virtualenv to support running the isolated make freeze-requirements from within the devcontainer
pip3 install virtualenv
# Upgrade schema of the notification_api database
flask db upgrade
pipenv run flask db upgrade
# Run flask server
# make run-flask

View File

@@ -30,8 +30,7 @@ cd /workspace
git status
make generate-version-file
pip3 install -r requirements.txt
pip3 install -r requirements_for_test.txt
pipenv install --dev
# Install virtualenv to support running the isolated make freeze-requirements from within the devcontainer
pip3 install virtualenv

10
docs/api-usage.md Normal file
View File

@@ -0,0 +1,10 @@
# API Usage
## Connecting to the API
To make life easier, the [UK API client libraries](https://www.notifications.service.gov.uk/documentation) are compatible with Notify.
For a usage example, see [our Python demo](https://github.com/GSA/notify-python-demo).
An API key can be created at https://notifications-admin.app.cloud.gov/services/YOUR_SERVICE_ID/api/keys. However, in order to successfully send messages, you will need to receive a secret header token from the Notify team.

View File

@@ -0,0 +1,59 @@
# Database management
## Initial state
In Notify, several aspects of the system are loaded into the database via migration. This means that
application setup requires loading and overwriting historical data in order to arrive at the current
configuration.
[Here are notes](https://docs.google.com/document/d/1ZgiUtJFvRBKBxB1ehiry2Dup0Q5iIwbdCU5spuqUFTo/edit#)
about what is loaded into which tables, and some plans for how we might manage that in the future.
Flask does not seem to have a great way to squash migrations, but rather wants you to recreate them
from the DB structure. This means it's easy to recreate the tables, but hard to recreate the initial data.
## Data Model Diagram
A diagram of Notify's data model is available [in our compliance repo](https://github.com/GSA/us-notify-compliance/blob/main/diagrams/rendered/apps/data.logical.pdf).
## Migrations
Create a migration:
```
flask db migrate
```
Trim any auto-generated stuff down to what you want, and manually rename it to be in numerical order.
We should only have one migration branch.
Running migrations locally:
```
flask db upgrade
```
This should happen automatically on cloud.gov, but if you need to run a one-off migration for some reason:
```
cf run-task notifications-api-staging --commmand "flask db upgrade" --name db-upgrade
```
## Purging user data
There is a Flask command to wipe user-created data (users, services, etc.).
The command should stop itself if it's run in a production environment, but, you know, please don't run it
in a production environment.
Running locally:
```
flask command purge_functional_test_data -u <functional tests user name prefix>
```
Running on cloud.gov:
```
cf run-task notify-api "flask command purge_functional_test_data -u <functional tests user name prefix>"
```

18
docs/deploying.md Normal file
View File

@@ -0,0 +1,18 @@
# Deploying
We deploy automatically to cloud.gov for production and staging environments.
Deployment runs via the [deployment action](../.github/workflows/deploy.yml) on GitHub, which pulls credentials from GitHub's secrets store.
The [action that we use](https://github.com/18F/cg-deploy-action) deploys using [a rolling strategy](https://docs.cloudfoundry.org/devguide/deploy-apps/rolling-deploy.html), so all deployments should have zero downtime.
The API has 2 deployment environments:
- Production, which deploys from `main`
- Staging, which does not, in fact, exist
Configurations for these are located in [the `deploy-config` folder](../deploy-config/).
In the event that a deployment includes a Terraform change, that change will run before any code is deployed to the environment. Each environment has its own Terraform GitHub Action to handle that change.
Failures in any of these GitHub workflows will be surfaced in the Pull Request related to the code change, and in the case of `checks.yml` actively prevent the PR from being merged. Failure in the Terraform workflow will not actively prevent the PR from being merged, but reviewers should not approve a PR with a failing terraform plan.

57
docs/infra-overview.md Normal file
View File

@@ -0,0 +1,57 @@
# Infrastructure overview
A diagram of the system is available [in our compliance repo](https://github.com/GSA/us-notify-compliance/blob/main/diagrams/rendered/apps/application.boundary.png).
Notify is a Flask application running on [cloud.gov](https://cloud.gov), which also brokers access to a PostgreSQL database and Redis store.
In addition to the Flask app, Notify uses Celery to manage the task queue. Celery stores tasks in Redis.
## Terraform
The cloud.gov environment is configured with Terraform. See [the `terraform` folder](../terraform/) to learn about that.
## AWS
In addition to services provisioned through cloud.gov, we have several services provisioned directly in AWS. Our AWS services are currently located in the us-west-2 region using the tts-sandbox account. We plan to move to GovCloud shortly.
To send messages, we use Amazon Web Services SNS and SES. In addition, we use AWS Pinpoint to provision and manage phone numbers, short codes, and long codes for sending SMS.
In SES, we are currently using the "sandbox" mode. This requires email addresses to be pre-registered in the AWS console in order to receive emails. The DKIM settings live under the verified domain entry.
In SNS, we have 3 topics for SMS receipts. These are not currently functional, so senders won't know the status of messages.
Through Pinpoint, the API needs at least one number so that the application itself can send SMS for authentication codes.
The API also has access to AWS S3 buckets for storing CSVs of messages and contact lists. It does not access a third S3 bucket that stores agency logos.
We may be able to provision these services through cloud.gov, as well. In addition to [s3 support](https://cloud.gov/docs/services/s3/), there is [an SES brokerpak](https://github.com/GSA-TTS/datagov-brokerpak-smtp) and work on an SNS brokerpak.
## Onboarding
- [ ] Join [the GSA GitHub org](https://github.com/GSA/GitHub-Administration#join-the-gsa-organization)
- [ ] Get permissions for the repos
- [ ] Get access to the cloud.gov org && space
- [ ] Get [access to AWS](https://handbook.tts.gsa.gov/launching-software/infrastructure/#cloud-service-provider-csp-sandbox-accounts), if necessary
- [ ] Pull down creds from cloud.gov and create the local .env file
- [ ] Do stuff!
## Setting up the infrastructure
### Steps to prepare SES
1. Go to SES console for \$AWS_REGION and create new origin and destination emails. AWS will send a verification via email which you'll need to complete.
2. Find and replace instances in the repo of "testsender", "testreceiver" and "dispostable.com", with your origin and destination email addresses, which you verified in step 1 above.
TODO: create env vars for these origin and destination email addresses for the root service, and create new migrations to update postgres seed fixtures
### Steps to prepare SNS
1. Go to Pinpoints console for \$AWS_PINPOINT_REGION and choose "create new project", then "configure for sms"
2. Tick the box at the top to enable SMS, choose "transactional" as the default type and save
3. In the lefthand sidebar, go the "SMS and Voice" (bottom) and choose "Phone Numbers"
4. Under "Number Settings" choose "Request Phone Number"
5. Choose Toll-free number, tick SMS, untick Voice, choose "transactional", hit next and then "request"
6. Go to SNS console for \$AWS_PINPOINT_REGION, look at lefthand sidebar under "Mobile" and go to "Text Messaging (SMS)"
7. Scroll down to "Sandbox destination phone numbers" and tap "Add phone number" then follow the steps to verify (you'll need to be able to retrieve a code sent to each number)
At this point, you _should_ be able to complete both the email and phone verification steps of the Notify user sign up process! 🎉

22
docs/one-off-tasks.md Normal file
View File

@@ -0,0 +1,22 @@
# One-off tasks
For these, we're using Flask commands, which live in [`/app/commands.py`](../app/commands.py).
This includes things that might be one-time operations! Using a command allows the operation to be tested,
both with `pytest` and with trial runs.
To run a command on cloud.gov, use this format:
```
cf run-task CLOUD-GOV-APP --commmand "YOUR COMMAND HERE" --name YOUR-COMMAND
```
[Here's more documentation](https://docs.cloudfoundry.org/devguide/using-tasks.html) about Cloud Foundry tasks.
## Celery scheduled tasks
After scheduling some tasks, run celery beat to get them moving:
```
make run-celery-beat
```

33
docs/testing.md Normal file
View File

@@ -0,0 +1,33 @@
# Testing
```
# install dependencies, etc.
make bootstrap
make test
```
This will run:
- flake8 for code styling
- isort for import styling
- pytest for the test suite
On GitHub, in addition to these tests, we run:
- bandit for code security
- pip-audit for dependency vulnerabilities
- OWASP for dynamic scanning
## CI testing
We're using GitHub Actions. See [/.github](../.github/) for the configuration.
In addition to commit-triggered scans, the `daily_checks.yml` workflow runs the relevant dependency audits, static scan, and/or dynamic scans at 10am UTC each day. Developers will be notified of failures in daily scans by GitHub notifications.
## To run a local OWASP scan
1. Run `make run-flask` from within the dev container.
2. On your host machine run:
```
docker run -v $(pwd):/zap/wrk/:rw --network="notify-network" -t owasp/zap2docker-weekly zap-api-scan.py -t http://dev:6011/_status -f openapi -c zap.conf
```

BIN
dump.rdb

Binary file not shown.

View File

@@ -1,39 +0,0 @@
# Run `make freeze-requirements` to update requirements.txt
# with package version changes made in requirements.in
cffi==1.15.0
celery[redis]==5.2.7
Flask-Bcrypt==1.0.1
flask-marshmallow==0.14.0
Flask-Migrate==3.1.0
git+https://github.com/pallets-eco/flask-sqlalchemy.git@aa7a61a5357cf6f5dcc135d98c781192457aa6fa#egg=Flask-SQLAlchemy==2.5.1
Flask==2.1.2
click-datetime==0.2
# Should be pinned until a new gunicorn release greater than 20.1.0 comes out. (Due to eventlet v0.33 compatibility issues)
git+https://github.com/benoitc/gunicorn.git@1299ea9e967a61ae2edebe191082fd169b864c64#egg=gunicorn[eventlet]==20.1.0
iso8601==1.0.2
itsdangerous==2.1.2
jsonschema[format]==4.5.1
marshmallow-sqlalchemy==0.28.1
marshmallow==3.15.0
psycopg2-binary==2.9.3
PyJWT==2.4.0
SQLAlchemy==1.4.40
cachetools==5.1.0
beautifulsoup4==4.11.1
lxml==4.9.1
defusedxml==0.7.1
Werkzeug==2.1.1
python-dotenv==0.20.0
oscrypto==1.3.0
notifications-python-client==6.3.0
# PaaS
awscli-cwlogs==1.4.6
notifications-utils @ git+https://github.com/GSA/notifications-utils.git
# gds-metrics requires prometheseus 0.2.0, override that requirement as 0.7.1 brings significant performance gains
prometheus-client==0.14.1
git+https://github.com/alphagov/gds_metrics_python.git@6f1840a57b6fb1ee40b7e84f2f18ec229de8aa72

View File

@@ -1,297 +0,0 @@
#
# This file is autogenerated by pip-compile with python 3.9
# To update, run:
#
# pip-compile requirements.in
#
alembic==1.7.7
# via flask-migrate
amqp==5.1.1
# via kombu
arrow==1.2.2
# via isoduration
asn1crypto==1.5.1
# via oscrypto
async-timeout==4.0.2
# via redis
attrs==21.4.0
# via jsonschema
awscli==1.24.8
# via awscli-cwlogs
awscli-cwlogs==1.4.6
# via -r requirements.in
bcrypt==3.2.2
# via flask-bcrypt
beautifulsoup4==4.11.1
# via -r requirements.in
billiard==3.6.4.0
# via celery
bleach==4.1.0
# via notifications-utils
blinker==1.4
# via gds-metrics
boto3==1.23.8
# via notifications-utils
botocore==1.26.8
# via
# awscli
# boto3
# s3transfer
cachetools==5.1.0
# via
# -r requirements.in
# notifications-utils
celery[redis]==5.2.7
# via -r requirements.in
certifi==2022.5.18.1
# via
# pyproj
# requests
cffi==1.15.0
# via
# -r requirements.in
# bcrypt
charset-normalizer==2.0.12
# via requests
click==8.1.3
# via
# celery
# click-datetime
# click-didyoumean
# click-plugins
# click-repl
# flask
click-datetime==0.2
# via -r requirements.in
click-didyoumean==0.3.0
# via celery
click-plugins==1.1.1
# via celery
click-repl==0.2.0
# via celery
colorama==0.4.4
# via awscli
defusedxml==0.7.1
# via -r requirements.in
deprecated==1.2.13
# via redis
dnspython==2.2.1
# via eventlet
docopt==0.6.2
# via notifications-python-client
docutils==0.16
# via awscli
eventlet==0.33.1
# via gunicorn
flask==2.1.2
# via
# -r requirements.in
# flask-bcrypt
# flask-marshmallow
# flask-migrate
# flask-redis
# flask-sqlalchemy
# gds-metrics
# notifications-utils
flask-bcrypt==1.0.1
# via -r requirements.in
flask-marshmallow==0.14.0
# via -r requirements.in
flask-migrate==3.1.0
# via -r requirements.in
flask-redis==0.4.0
# via notifications-utils
flask-sqlalchemy @ git+https://github.com/pallets-eco/flask-sqlalchemy.git@aa7a61a5357cf6f5dcc135d98c781192457aa6fa
# via
# -r requirements.in
# flask-migrate
fqdn==1.5.1
# via jsonschema
gds-metrics @ git+https://github.com/alphagov/gds_metrics_python.git@6f1840a57b6fb1ee40b7e84f2f18ec229de8aa72
# via -r requirements.in
geojson==2.5.0
# via notifications-utils
govuk-bank-holidays==0.11
# via notifications-utils
greenlet==1.1.2
# via
# eventlet
# sqlalchemy
gunicorn @ git+https://github.com/benoitc/gunicorn.git@1299ea9e967a61ae2edebe191082fd169b864c64
# via -r requirements.in
idna==3.3
# via
# jsonschema
# requests
importlib-metadata==4.12.0
# via flask
iso8601==1.0.2
# via -r requirements.in
isoduration==20.11.0
# via jsonschema
itsdangerous==2.1.2
# via
# -r requirements.in
# flask
# notifications-utils
jinja2==3.1.2
# via
# flask
# notifications-utils
jmespath==1.0.0
# via
# boto3
# botocore
jsonpointer==2.3
# via jsonschema
jsonschema[format]==4.5.1
# via -r requirements.in
kombu==5.2.4
# via celery
lxml==4.9.1
# via -r requirements.in
mako==1.2.2
# via alembic
markupsafe==2.1.1
# via
# jinja2
# mako
marshmallow==3.15.0
# via
# -r requirements.in
# flask-marshmallow
# marshmallow-sqlalchemy
marshmallow-sqlalchemy==0.28.1
# via -r requirements.in
mistune==0.8.4
# via notifications-utils
notifications-python-client==6.3.0
# via -r requirements.in
notifications-utils @ git+https://github.com/GSA/notifications-utils.git
# via -r requirements.in
orderedset==2.0.3
# via notifications-utils
oscrypto==1.3.0
# via -r requirements.in
packaging==21.3
# via
# bleach
# marshmallow
# marshmallow-sqlalchemy
# redis
phonenumbers==8.12.48
# via notifications-utils
prometheus-client==0.14.1
# via
# -r requirements.in
# gds-metrics
prompt-toolkit==3.0.29
# via click-repl
psycopg2-binary==2.9.3
# via -r requirements.in
pyasn1==0.4.8
# via rsa
pycparser==2.21
# via cffi
pyjwt==2.4.0
# via
# -r requirements.in
# notifications-python-client
pyparsing==3.0.9
# via packaging
pypdf2==2.0.0
# via notifications-utils
pyproj==3.3.1
# via notifications-utils
pyrsistent==0.18.1
# via jsonschema
python-dateutil==2.8.2
# via
# arrow
# awscli-cwlogs
# botocore
python-dotenv==0.20.0
# via -r requirements.in
python-json-logger==2.0.2
# via notifications-utils
pytz==2022.1
# via
# celery
# notifications-utils
pyyaml==5.4.1
# via
# awscli
# notifications-utils
redis==4.3.1
# via
# celery
# flask-redis
requests==2.27.1
# via
# awscli-cwlogs
# govuk-bank-holidays
# notifications-python-client
# notifications-utils
rfc3339-validator==0.1.4
# via jsonschema
rfc3987==1.3.8
# via jsonschema
rsa==4.7.2
# via awscli
s3transfer==0.5.2
# via
# awscli
# boto3
shapely==1.8.2
# via notifications-utils
six==1.16.0
# via
# awscli-cwlogs
# bleach
# click-repl
# eventlet
# flask-marshmallow
# python-dateutil
# rfc3339-validator
smartypants==2.0.1
# via notifications-utils
soupsieve==2.3.2.post1
# via beautifulsoup4
sqlalchemy==1.4.40
# via
# -r requirements.in
# alembic
# flask-sqlalchemy
# marshmallow-sqlalchemy
statsd==3.3.0
# via notifications-utils
typing-extensions==4.3.0
# via pypdf2
uri-template==1.2.0
# via jsonschema
urllib3==1.26.9
# via
# botocore
# requests
vine==5.0.0
# via
# amqp
# celery
# kombu
wcwidth==0.2.5
# via prompt-toolkit
webcolors==1.12
# via jsonschema
webencodings==0.5.1
# via bleach
werkzeug==2.1.1
# via
# -r requirements.in
# flask
wrapt==1.14.1
# via deprecated
zipp==3.8.1
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View File

@@ -1,14 +0,0 @@
--requirement requirements.txt
flake8==5.0.4
flake8-bugbear==22.9.23
isort==5.10.1
moto==3.1.9
pytest==7.1.2
pytest-env==0.6.2
pytest-mock==3.7.0
pytest-cov==3.0.0
pytest-xdist==2.5.0
freezegun==1.2.1
requests-mock==1.9.3
# used for creating manifest file locally
jinja2-cli[yaml]==0.8.2

View File

@@ -1,5 +1,47 @@
# STEPS TO SET UP
#
# 1. Pull down AWS creds from cloud.gov using `cf env`, then update AWS section
#
# 2. Uncomment either the Docker setup or the direct setup
#
# 3. Comment out the other setup
#
# 4. Replace `NOTIFY_EMAIL_DOMAIN` with the domain your emails will come from (i.e. the "origination email" in your SES project)
#
# 5. Replace `SECRET_KEY` and `DANGEROUS_SALT` with high-entropy secret values
#
# ## REBUILD THE DEVCONTAINER WHEN YOU MODIFY .ENV ###
#############################################################
# AWS
AWS_REGION=us-west-2
AWS_ACCESS_KEY_ID="don't write secrets to the sample file"
AWS_SECRET_ACCESS_KEY="don't write secrets to the sample file"
AWS_PINPOINT_REGION=us-west-2
AWS_US_TOLL_FREE_NUMBER=+18446120782
#############################################################
# Local Docker setup, all overwritten in cloud.gov
ADMIN_BASE_URL=http://admin:6012
API_HOST_NAME=http://dev:6011
REDIS_URL=redis://redis:6380
REDIS_ENABLED=1
SQLALCHEMY_DATABASE_URI=postgresql://postgres:chummy@db:5432/notification_api
SQLALCHEMY_DATABASE_TEST_URI=postgresql://postgres:chummy@db:5432/test_notification_api
# Local direct setup, all overwritten in cloud.gov
# ADMIN_BASE_URL=http://localhost:6012
# API_HOST_NAME=http://localhost:6011
# REDIS_URL=redis://localhost:6379
# REDIS_ENABLED=1
# SQLALCHEMY_DATABASE_URI=postgresql://localhost:5432/notification_api
# SQLALCHEMY_DATABASE_TEST_URI=postgresql://localhost:5432/test_notification_api
#############################################################
# Debug
DEBUG=True
ANTIVIRUS_ENABLED=0
@@ -10,9 +52,7 @@ NOTIFY_APP_NAME=api
NOTIFY_EMAIL_DOMAIN=dispostable.com
NOTIFY_LOG_PATH=/workspace/logs/app.log
# secrets that internal apps, such as the admin app or document download, must use to authenticate with the API
ADMIN_CLIENT_ID=notify-admin
ADMIN_CLIENT_SECRET=dev-notify-secret-key
#############################################################
# Flask
FLASK_APP=application.py
@@ -21,28 +61,6 @@ WERKZEUG_DEBUG_PIN=off
SECRET_KEY=dev-notify-secret-key
DANGEROUS_SALT=dev-notify-salt
# URL of admin app, this is overriden on cloudfoundry
ADMIN_BASE_URL=http://admin:6012
# URL of api app, this is overriden on cloudfoundry
API_HOST_NAME=http://dev:6011
# URL of redis instance, this is overriden on cloudfoundry
REDIS_URL=redis://redis:6380
REDIS_ENABLED=1
# DB connection string for local docker, overriden on remote with vcap env vars
SQLALCHEMY_DATABASE_URI=postgresql://postgres:chummy@db:5432/notification_api
# For testing in local docker
SQLALCHEMY_DATABASE_TEST_URI=postgresql://postgres:chummy@db:5432/test_notification_api
# DB connection string for local non-docker connection
# SQLALCHEMY_DATABASE_URI=postgresql://user:password@localhost:5432/notification_api
# AWS
AWS_REGION=us-west-2
AWS_ACCESS_KEY_ID="don't write secrets to the sample file"
AWS_SECRET_ACCESS_KEY="don't write secrets to the sample file"
AWS_PINPOINT_REGION=us-west-2
AWS_US_TOLL_FREE_NUMBER=+18446120782
# secrets that internal apps, such as the admin app or document download, must use to authenticate with the API
ADMIN_CLIENT_ID=notify-admin
ADMIN_CLIENT_SECRET=dev-notify-secret-key

View File

@@ -1,30 +0,0 @@
SECRET_KEY: "dev-notify-secret-key" # pragma: allowlist secret
DANGEROUS_SALT: "dev-notify-salt"
ADMIN_BASE_URL: https://notifications-admin.app.cloud.gov
ADMIN_CLIENT_ID: notify-admin
ADMIN_CLIENT_SECRET: dev-notify-secret-key
API_HOST_NAME: https://notifications-api.app.cloud.gov
AWS_PINPOINT_REGION: us-west-2
AWS_REGION: us-west-2
AWS_US_TOLL_FREE_NUMBER: 18446120782
DANGEROUS_SALT: dev-notify-salt
DVLA_EMAIL_ADDRESSES: []
FIRETEXT_API_KEY: placeholder
FIRETEXT_INBOUND_SMS_AUTH: {}
FIRETEXT_INTERNATIONAL_API_KEY: placeholder
FLASK_APP: application.py
FLASK_ENV: production
INTERNAL_CLIENT_API_KEYS: '{"notify-admin":["dev-notify-secret-key"]}'
MMG_API_KEY: placeholder
MMG_INBOUND_SMS_AUTH: {}
MMG_INBOUND_SMS_USERNAME: {}
NOTIFICATION_QUEUE_PREFIX: prototype_10x
NOTIFY_APP_NAME: api
NOTIFY_EMAIL_DOMAIN: dispostable.com
NOTIFY_ENVIRONMENT: live
NOTIFY_LOG_PATH: /home/vcap/logs/app.log
ROUTE_SECRET_KEY_1: dev-route-secret-key-1
ROUTE_SECRET_KEY_2: dev-route-secret-key-2
SECRET_KEY: dev-notify-secret-key
STATSD_HOST: localhost