Merge pull request #763 from alphagov/cloudfoundry

Run on Paas
This commit is contained in:
Andras Ferencz-Szabo
2017-01-17 11:28:38 +00:00
committed by GitHub
33 changed files with 1064 additions and 85 deletions

1
.cfignore Symbolic link
View File

@@ -0,0 +1 @@
.gitignore

2
.gitignore vendored
View File

@@ -67,6 +67,4 @@ environment.sh
celerybeat-schedule
app/version.py
wheelhouse/

248
Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,248 @@
#!groovy
def deployDatabaseMigrations(cfEnv) {
waitUntil {
try {
lock(cfEnv) {
withCredentials([
string(credentialsId: 'paas_username', variable: 'CF_USERNAME'),
string(credentialsId: 'paas_password', variable: 'CF_PASSWORD')
]) {
withEnv(["CF_SPACE=${cfEnv}"]) {
sh 'make cf-deploy-api-db-migration-with-docker'
}
}
}
true
} catch(err) {
echo "Deployment to ${cfEnv} failed: ${err}"
try {
slackSend channel: '#govuk-notify', message: "Deployment to ${cfEnv} failed. Please retry or abort: <${env.BUILD_URL}|${env.JOB_NAME} - #${env.BUILD_NUMBER}>", color: 'danger'
} catch(err2) {
echo "Sending Slack message failed: ${err2}"
}
input "Stage failed. Retry?"
false
}
}
}
def deploy(cfEnv) {
waitUntil {
try {
lock(cfEnv) {
withCredentials([
string(credentialsId: 'paas_username', variable: 'CF_USERNAME'),
string(credentialsId: 'paas_password', variable: 'CF_PASSWORD')
]) {
withEnv(["CF_SPACE=${cfEnv}"]) {
parallel deployApi: {
retry(3) {
sh 'make cf-deploy-api-with-docker'
}
}, deployDeliveryCeleryBeat: {
sleep(10)
withEnv(["CF_APP=notify-delivery-celery-beat"]) {
retry(3) {
sh 'make cf-deploy-delivery-with-docker'
}
}
}, deployDeliveryWorker: {
sleep(20)
withEnv(["CF_APP=notify-delivery-worker"]) {
retry(3) {
sh 'make cf-deploy-delivery-with-docker'
}
}
}, deployDeliveryWorkerSender: {
sleep(30)
withEnv(["CF_APP=notify-delivery-worker-sender"]) {
retry(3) {
sh 'make cf-deploy-delivery-with-docker'
}
}
}, deployDeliveryWorkerDatabase: {
sleep(40)
withEnv(["CF_APP=notify-delivery-worker-database"]) {
retry(3) {
sh 'make cf-deploy-delivery-with-docker'
}
}
}, deployDeliveryWorkerResearch: {
sleep(50)
withEnv(["CF_APP=notify-delivery-worker-research"]) {
retry(3) {
sh 'make cf-deploy-delivery-with-docker'
}
}
}
}
}
gitCommit = sh(script: 'git rev-parse HEAD', returnStdout: true).trim()
sh("git tag -f deployed-to-cf-${cfEnv} ${gitCommit}")
sh("git push -f origin deployed-to-cf-${cfEnv}")
}
true
} catch(err) {
echo "Deployment to ${cfEnv} failed: ${err}"
try {
slackSend channel: '#govuk-notify', message: "Deployment to ${cfEnv} failed. Please retry or abort: <${env.BUILD_URL}|${env.JOB_NAME} - #${env.BUILD_NUMBER}>", color: 'danger'
} catch(err2) {
echo "Sending Slack message failed: ${err2}"
}
input "Stage failed. Retry?"
false
}
}
}
def buildJobWithRetry(jobName) {
waitUntil {
try {
build job: jobName
true
} catch(err) {
echo "${jobName} failed: ${err}"
try {
slackSend channel: '#govuk-notify', message: "${jobName} failed. Please retry or abort: <${env.BUILD_URL}|${env.JOB_NAME} - #${env.BUILD_NUMBER}>", color: 'danger'
} catch(err2) {
echo "Sending Slack message failed: ${err2}"
}
input "${jobName} failed. Retry?"
false
}
}
}
try {
node {
stage('Build') {
git url: 'git@github.com:alphagov/notifications-api.git', branch: 'cloudfoundry', credentialsId: 'github_com_and_gds'
checkout scm
milestone 10
withEnv(["PIP_ACCEL_CACHE=${env.JENKINS_HOME}/cache/pip-accel"]) {
sh 'make cf-build-with-docker'
}
stash name: 'source', excludes: 'venv/**,wheelhouse/**', useDefaultExcludes: false
}
stage('Test') {
milestone 20
sh 'make test-with-docker'
try {
junit 'test_results.xml'
} catch(err) {
echo "Collecting jUnit results failed: ${err}"
}
try {
withCredentials([string(credentialsId: 'coveralls_repo_token_api', variable: 'COVERALLS_REPO_TOKEN')]) {
sh 'make coverage-with-docker'
}
} catch(err) {
echo "Coverage failed: ${err}"
}
}
stage('Preview') {
if (deployToPreview == "true") {
milestone 30
deployDatabaseMigrations 'preview'
buildJobWithRetry 'notify-functional-tests-preview'
deploy 'preview'
} else {
echo 'Preview skipped.'
}
}
stage('Preview tests') {
if (deployToPreview == "true") {
buildJobWithRetry 'notify-functional-tests-preview'
buildJobWithRetry 'run-ruby-client-integration-tests'
buildJobWithRetry 'run-python-client-integration-tests'
buildJobWithRetry 'run-net-client-integration-tests'
buildJobWithRetry 'run-node-client-integration-tests'
buildJobWithRetry 'run-java-client-integration-tests'
buildJobWithRetry 'run-php-client-integration-tests'
} else {
echo 'Preview tests skipped.'
}
}
}
stage('Staging') {
if (deployToStaging == "true") {
input 'Approve?'
milestone 40
node {
unstash 'source'
deployDatabaseMigrations 'staging'
buildJobWithRetry 'notify-functional-tests-staging'
deploy 'staging'
}
} else {
echo 'Staging skipped.'
}
}
stage('Staging tests') {
if (deployToStaging == "true") {
buildJobWithRetry 'notify-functional-tests-staging'
buildJobWithRetry 'notify-functional-provider-tests-staging'
} else {
echo 'Staging tests skipped'
}
}
stage('Prod') {
if (deployToProduction == "true") {
input 'Approve?'
milestone 50
node {
unstash 'source'
deployDatabaseMigrations 'production'
buildJobWithRetry 'notify-functional-admin-tests-production'
buildJobWithRetry 'notify-functional-api-email-test-production'
buildJobWithRetry 'notify-functional-api-sms-test-production'
deploy 'production'
}
} else {
echo 'Production skipped.'
}
}
stage('Prod tests') {
if (deployToProduction == "true") {
buildJobWithRetry 'notify-functional-admin-tests-production'
buildJobWithRetry 'notify-functional-api-email-test-production'
buildJobWithRetry 'notify-functional-api-sms-test-production'
buildJobWithRetry 'notify-functional-provider-email-test-production'
buildJobWithRetry 'notify-functional-provider-sms-test-production'
} else {
echo 'Production tests skipped.'
}
}
} catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException fie) {
currentBuild.result = 'ABORTED'
} catch (err) {
currentBuild.result = 'FAILURE'
echo "Pipeline failed: ${err}"
slackSend channel: '#govuk-notify', message: "${env.JOB_NAME} - #${env.BUILD_NUMBER} failed (<${env.BUILD_URL}|Open>)", color: 'danger'
} finally {
node {
try {
step([$class: 'Mailer', notifyEveryUnstableBuild: true, recipients: 'notify-support+jenkins@digital.cabinet-office.gov.uk', sendToIndividuals: false])
} catch(err) {
echo "Sending email failed: ${err}"
}
try {
sh 'make clean-docker-containers'
} catch(err) {
echo "Cleaning up Docker containers failed: ${err}"
}
}
}

152
Makefile
View File

@@ -8,7 +8,9 @@ APP_VERSION_FILE = app/version.py
GIT_BRANCH ?= $(shell git symbolic-ref --short HEAD 2> /dev/null || echo "detached")
GIT_COMMIT ?= $(shell git rev-parse HEAD)
DOCKER_BUILDER_IMAGE_NAME = govuk/notify-api-builder
DOCKER_IMAGE_TAG := $(shell cat docker/VERSION)
DOCKER_BUILDER_IMAGE_NAME = govuk/notify-api-builder:${DOCKER_IMAGE_TAG}
DOCKER_TTY ?= $(if ${JENKINS_HOME},,t)
BUILD_TAG ?= notifications-api-manual
BUILD_NUMBER ?= 0
@@ -17,6 +19,10 @@ BUILD_URL ?=
DOCKER_CONTAINER_PREFIX = ${USER}-${BUILD_TAG}
CF_API ?= api.cloud.service.gov.uk
CF_ORG ?= govuk-notify
CF_SPACE ?= ${DEPLOY_ENV}
.PHONY: help
help:
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
@@ -25,8 +31,8 @@ help:
venv: venv/bin/activate ## Create virtualenv if it does not exist
venv/bin/activate:
test -d venv || virtualenv venv
./venv/bin/pip install pip-accel
test -d venv || virtualenv venv -p python3
. venv/bin/activate && pip install pip-accel
.PHONY: check-env-vars
check-env-vars: ## Check mandatory environment variables
@@ -35,6 +41,12 @@ check-env-vars: ## Check mandatory environment variables
$(if ${AWS_ACCESS_KEY_ID},,$(error Must specify AWS_ACCESS_KEY_ID))
$(if ${AWS_SECRET_ACCESS_KEY},,$(error Must specify AWS_SECRET_ACCESS_KEY))
.PHONY: sandbox
sandbox: ## Set environment to sandbox
$(eval export DEPLOY_ENV=sandbox)
$(eval export DNS_NAME="cloudapps.digital")
@true
.PHONY: preview
preview: ## Set environment to preview
$(eval export DEPLOY_ENV=preview)
@@ -56,7 +68,7 @@ production: ## Set environment to production
.PHONY: dependencies
dependencies: venv ## Install build dependencies
mkdir -p ${PIP_ACCEL_CACHE}
PIP_ACCEL_CACHE=${PIP_ACCEL_CACHE} ./venv/bin/pip-accel install -r requirements_for_test.txt
. venv/bin/activate && PIP_ACCEL_CACHE=${PIP_ACCEL_CACHE} pip-accel install -r requirements_for_test.txt
.PHONY: generate-version-file
generate-version-file: ## Generates the app version file
@@ -64,7 +76,10 @@ generate-version-file: ## Generates the app version file
.PHONY: build
build: dependencies generate-version-file ## Build project
./venv/bin/pip-accel wheel --wheel-dir=wheelhouse -r requirements.txt
. venv/bin/activate && PIP_ACCEL_CACHE=${PIP_ACCEL_CACHE} pip-accel wheel --wheel-dir=wheelhouse -r requirements.txt
.PHONY: cf-build
cf-build: dependencies generate-version-file ## Build project for PAAS
.PHONY: build-codedeploy-artifact
build-codedeploy-artifact: ## Build the deploy artifact for CodeDeploy
@@ -125,19 +140,21 @@ deploy-check-autoscaling-processes: check-aws-vars ## Returns with the number of
.PHONY: coverage
coverage: venv ## Create coverage report
./venv/bin/coveralls
. venv/bin/activate && coveralls
.PHONY: prepare-docker-build-image
prepare-docker-build-image: ## Prepare the Docker builder image
mkdir -p ${PIP_ACCEL_CACHE}
make -C docker build-build-image
make -C docker build
.PHONY: build-with-docker
build-with-docker: prepare-docker-build-image ## Build inside a Docker container
@docker run -i --rm \
@docker run -i${DOCKER_TTY} --rm \
--name "${DOCKER_CONTAINER_PREFIX}-build" \
-v `pwd`:/var/project \
-v ${PIP_ACCEL_CACHE}:/var/project/cache/pip-accel \
-v "`pwd`:/var/project" \
-v "${PIP_ACCEL_CACHE}:/var/project/cache/pip-accel" \
-e UID=$(shell id -u) \
-e GID=$(shell id -g) \
-e GIT_COMMIT=${GIT_COMMIT} \
-e BUILD_NUMBER=${BUILD_NUMBER} \
-e BUILD_URL=${BUILD_URL} \
@@ -147,13 +164,34 @@ build-with-docker: prepare-docker-build-image ## Build inside a Docker container
-e HTTPS_PROXY="${HTTPS_PROXY}" \
-e NO_PROXY="${NO_PROXY}" \
${DOCKER_BUILDER_IMAGE_NAME} \
make build
gosu hostuser make build
.PHONY: cf-build-with-docker
cf-build-with-docker: prepare-docker-build-image ## Build inside a Docker container
@docker run -i${DOCKER_TTY} --rm \
--name "${DOCKER_CONTAINER_PREFIX}-build" \
-v "`pwd`:/var/project" \
-v "${PIP_ACCEL_CACHE}:/var/project/cache/pip-accel" \
-e UID=$(shell id -u) \
-e GID=$(shell id -g) \
-e GIT_COMMIT=${GIT_COMMIT} \
-e BUILD_NUMBER=${BUILD_NUMBER} \
-e BUILD_URL=${BUILD_URL} \
-e http_proxy="${HTTP_PROXY}" \
-e HTTP_PROXY="${HTTP_PROXY}" \
-e https_proxy="${HTTPS_PROXY}" \
-e HTTPS_PROXY="${HTTPS_PROXY}" \
-e NO_PROXY="${NO_PROXY}" \
${DOCKER_BUILDER_IMAGE_NAME} \
gosu hostuser make cf-build
.PHONY: test-with-docker
test-with-docker: prepare-docker-build-image create-docker-test-db ## Run tests inside a Docker container
@docker run -i --rm \
@docker run -i${DOCKER_TTY} --rm \
--name "${DOCKER_CONTAINER_PREFIX}-test" \
--link "${DOCKER_CONTAINER_PREFIX}-db:postgres" \
-e UID=$(shell id -u) \
-e GID=$(shell id -g) \
-e TEST_DATABASE=postgresql://postgres:postgres@postgres/test_notification_api \
-e GIT_COMMIT=${GIT_COMMIT} \
-e BUILD_NUMBER=${BUILD_NUMBER} \
@@ -163,9 +201,9 @@ test-with-docker: prepare-docker-build-image create-docker-test-db ## Run tests
-e https_proxy="${HTTPS_PROXY}" \
-e HTTPS_PROXY="${HTTPS_PROXY}" \
-e NO_PROXY="${NO_PROXY}" \
-v `pwd`:/var/project \
-v "`pwd`:/var/project" \
${DOCKER_BUILDER_IMAGE_NAME} \
make test
gosu hostuser make test
.PHONY: test-with-docker
create-docker-test-db: ## Start the test database in a Docker container
@@ -180,9 +218,11 @@ create-docker-test-db: ## Start the test database in a Docker container
# FIXME: CIRCLECI=1 is an ugly hack because the coveralls-python library sends the PR link only this way
.PHONY: coverage-with-docker
coverage-with-docker: prepare-docker-build-image ## Generates coverage report inside a Docker container
@docker run -i --rm \
@docker run -i${DOCKER_TTY} --rm \
--name "${DOCKER_CONTAINER_PREFIX}-coverage" \
-v `pwd`:/var/project \
-v "`pwd`:/var/project" \
-e UID=$(shell id -u) \
-e GID=$(shell id -g) \
-e COVERALLS_REPO_TOKEN=${COVERALLS_REPO_TOKEN} \
-e CIRCLECI=1 \
-e CI_NAME=${CI_NAME} \
@@ -196,11 +236,87 @@ coverage-with-docker: prepare-docker-build-image ## Generates coverage report in
-e HTTPS_PROXY="${HTTPS_PROXY}" \
-e NO_PROXY="${NO_PROXY}" \
${DOCKER_BUILDER_IMAGE_NAME} \
make coverage
gosu hostuser make coverage
.PHONY: clean-docker-containers
clean-docker-containers: ## Clean up any remaining docker containers
docker rm -f $(shell docker ps -q -f "name=${DOCKER_CONTAINER_PREFIX}") 2> /dev/null || true
.PHONY: clean
clean:
rm -rf node_modules cache target venv .coverage build tests/.cache
rm -rf node_modules cache target venv .coverage build tests/.cache wheelhouse
.PHONY: cf-login
cf-login: ## Log in to Cloud Foundry
$(if ${CF_USERNAME},,$(error Must specify CF_USERNAME))
$(if ${CF_PASSWORD},,$(error Must specify CF_PASSWORD))
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
@echo "Logging in to Cloud Foundry on ${CF_API}"
@cf login -a "${CF_API}" -u ${CF_USERNAME} -p "${CF_PASSWORD}" -o "${CF_ORG}" -s "${CF_SPACE}"
.PHONY: cf-deploy-api
cf-deploy-api: ## Deploys the API to Cloud Foundry
$(eval export ORIG_INSTANCES=$(shell cf curl /v2/apps/$(shell cf app --guid notify-api) | jq -r ".entity.instances"))
@echo "Original instance count: ${ORIG_INSTANCES}"
cf check-manifest notify-api -f manifest-api-${CF_SPACE}.yml
cf zero-downtime-push notify-api -f manifest-api-${CF_SPACE}.yml
cf scale -i ${ORIG_INSTANCES} notify-api
.PHONY: cf-push-api
cf-push-api: ##
cf push notify-api -f manifest-api-${CF_SPACE}.yml
.PHONY: cf-deploy-api-db-migration
cf-deploy-api-db-migration: ## Deploys the API db migration to Cloud Foundry
cf check-manifest notify-api-db-migration -f manifest-api-db-migration.yml
cf push notify-api-db-migration -f manifest-api-db-migration.yml
cf-push-api-db-migration: cf-deploy-api-db-migration ## Deploys the API db migration to Cloud Foundry
.PHONY: cf-deploy-delivery
cf-deploy-delivery: ## Deploys a delivery app to Cloud Foundry
$(if ${CF_APP},,$(error Must specify CF_APP))
$(eval export ORIG_INSTANCES=$(shell cf curl /v2/apps/$(shell cf app --guid ${CF_APP}) | jq -r ".entity.instances"))
@echo "Original instance count: ${ORIG_INSTANCES}"
cf check-manifest ${CF_APP} -f manifest-$(subst notify-,,${CF_APP}).yml
cf zero-downtime-push ${CF_APP} -f manifest-$(subst notify-,,${CF_APP}).yml
cf scale -i ${ORIG_INSTANCES} ${CF_APP}
.PHONY: cf-push-delivery
cf-push-delivery: ## Deploys a delivery app to Cloud Foundry
$(if ${CF_APP},,$(error Must specify CF_APP))
cf push ${CF_APP} -f manifest-$(subst notify-,,${CF_APP}).yml
define cf_deploy_with_docker
@docker run -i${DOCKER_TTY} --rm \
--name "${DOCKER_CONTAINER_PREFIX}-${1}" \
-v "`pwd`:/var/project" \
-e UID=$(shell id -u) \
-e GID=$(shell id -g) \
-e http_proxy="${HTTP_PROXY}" \
-e HTTP_PROXY="${HTTP_PROXY}" \
-e https_proxy="${HTTPS_PROXY}" \
-e HTTPS_PROXY="${HTTPS_PROXY}" \
-e NO_PROXY="${NO_PROXY}" \
-e CF_API="${CF_API}" \
-e CF_USERNAME="${CF_USERNAME}" \
-e CF_PASSWORD="${CF_PASSWORD}" \
-e CF_ORG="${CF_ORG}" \
-e CF_SPACE="${CF_SPACE}" \
-e CF_APP="${CF_APP}" \
${DOCKER_BUILDER_IMAGE_NAME} \
${2}
endef
.PHONY: cf-deploy-api-with-docker
cf-deploy-api-with-docker: prepare-docker-build-image ## Deploys the API to Cloud Foundry from a Docker container
$(call cf_deploy_with_docker,cf-deploy-api,make cf-login cf-deploy-api)
.PHONY: cf-deploy-api-db-migration-with-docker
cf-deploy-api-db-migration-with-docker: prepare-docker-build-image ## Deploys the API db migration to Cloud Foundry from a Docker container
$(call cf_deploy_with_docker,cf-deploy-api-db-migration,make cf-login cf-deploy-api-db-migration)
.PHONY: cf-deploy-delivery-with-docker
cf-deploy-delivery-with-docker: prepare-docker-build-image ## Deploys a delivery app to Cloud Foundry from a Docker container
$(if ${CF_APP},,$(error Must specify CF_APP))
$(call cf_deploy_with_docker,cf-deploy-delivery-${CF_APP},make cf-login cf-deploy-delivery)

1
app/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
version.py

View File

@@ -1,5 +1,6 @@
import os
import uuid
import json
from flask import Flask, _request_ctx_stack
from flask import request, url_for, g, jsonify
@@ -42,8 +43,12 @@ api_user = LocalProxy(lambda: _request_ctx_stack.top.api_user)
def create_app(app_name=None):
application = Flask(__name__)
from config import configs
application.config.from_object(configs[os.environ['NOTIFY_ENVIRONMENT']])
from app.config import configs
notify_environment = os.environ['NOTIFY_ENVIRONMENT']
application.config.from_object(configs[notify_environment])
if app_name:
application.config['NOTIFY_APP_NAME'] = app_name

View File

@@ -0,0 +1,62 @@
"""
Extracts cloudfoundry config from its json and populates the environment variables that we would expect to be populated
on local/aws boxes
"""
import os
import json
def extract_cloudfoundry_config():
vcap_services = json.loads(os.environ['VCAP_SERVICES'])
set_config_env_vars(vcap_services)
def set_config_env_vars(vcap_services):
# Postgres config
os.environ['SQLALCHEMY_DATABASE_URI'] = vcap_services['postgres'][0]['credentials']['uri']
vcap_application = json.loads(os.environ['VCAP_APPLICATION'])
os.environ['NOTIFY_ENVIRONMENT'] = vcap_application['space_name']
os.environ['LOGGING_STDOUT_JSON'] = '1'
# Notify common config
for s in vcap_services['user-provided']:
if s['name'] == 'notify-config':
extract_notify_config(s)
elif s['name'] == 'notify-aws':
extract_notify_aws_config(s)
elif s['name'] == 'hosted-graphite':
extract_hosted_graphite_config(s)
elif s['name'] == 'mmg':
extract_mmg_config(s)
elif s['name'] == 'firetext':
extract_firetext_config(s)
def extract_notify_config(notify_config):
os.environ['ADMIN_BASE_URL'] = notify_config['credentials']['admin_base_url']
os.environ['API_HOST_NAME'] = notify_config['credentials']['api_host_name']
os.environ['ADMIN_CLIENT_SECRET'] = notify_config['credentials']['admin_client_secret']
os.environ['SECRET_KEY'] = notify_config['credentials']['secret_key']
os.environ['DANGEROUS_SALT'] = notify_config['credentials']['dangerous_salt']
def extract_notify_aws_config(aws_config):
os.environ['NOTIFICATION_QUEUE_PREFIX'] = aws_config['credentials']['sqs_queue_prefix']
os.environ['AWS_ACCESS_KEY_ID'] = aws_config['credentials']['aws_access_key_id']
os.environ['AWS_SECRET_ACCESS_KEY'] = aws_config['credentials']['aws_secret_access_key']
def extract_hosted_graphite_config(hosted_graphite_config):
os.environ['STATSD_PREFIX'] = hosted_graphite_config['credentials']['statsd_prefix']
def extract_mmg_config(mmg_config):
os.environ['MMG_URL'] = mmg_config['credentials']['api_url']
os.environ['MMG_API_KEY'] = mmg_config['credentials']['api_key']
def extract_firetext_config(firetext_config):
os.environ['FIRETEXT_API_KEY'] = firetext_config['credentials']['api_key']
os.environ['LOADTESTING_API_KEY'] = firetext_config['credentials']['loadtesting_api_key']

View File

@@ -4,14 +4,20 @@ from kombu import Exchange, Queue
import os
class Config(object):
########################################
# Secrets that are held in credstash ###
########################################
if os.environ.get('VCAP_SERVICES'):
# on cloudfoundry, config is a json blob in VCAP_SERVICES - unpack it, and populate
# standard environment variables from it
from app.cloudfoundry_config import extract_cloudfoundry_config
extract_cloudfoundry_config()
class Config(object):
# URL of admin app
ADMIN_BASE_URL = os.environ['ADMIN_BASE_URL']
# URL of api app (on AWS this is the internal api endpoint)
API_HOST_NAME = os.getenv('API_HOST_NAME')
# admin app api key
ADMIN_CLIENT_SECRET = os.environ['ADMIN_CLIENT_SECRET']
@@ -42,12 +48,16 @@ class Config(object):
# URL of redis instance
REDIS_URL = os.getenv('REDIS_URL')
REDIS_ENABLED = os.getenv('REDIS_ENABLED') == '1'
# Logging
DEBUG = False
LOGGING_STDOUT_JSON = os.getenv('LOGGING_STDOUT_JSON') == '1'
###########################
# Default config values ###
###########################
DEBUG = False
NOTIFY_ENVIRONMENT = 'development'
ADMIN_CLIENT_USER_NAME = 'notify-admin'
AWS_REGION = 'eu-west-1'
@@ -126,8 +136,6 @@ class Config(object):
Queue('notify', Exchange('default'), routing_key='notify')
]
API_HOST_NAME = "http://localhost:6011"
NOTIFICATIONS_ALERT = 5 # five mins
FROM_NUMBER = 'development'
@@ -135,8 +143,6 @@ class Config(object):
STATSD_HOST = "statsd.hostedgraphite.com"
STATSD_PORT = 8125
REDIS_ENABLED = False
SENDING_NOTIFICATIONS_TIMEOUT_PERIOD = 259200 # 3 days
SIMULATED_EMAIL_ADDRESSES = ('simulate-delivered@notifications.service.gov.uk',
@@ -165,6 +171,7 @@ class Development(Config):
Queue('send-email', Exchange('default'), routing_key='send-email'),
Queue('research-mode', Exchange('default'), routing_key='research-mode')
]
API_HOST_NAME = "http://localhost:6011"
class Test(Config):
@@ -172,7 +179,6 @@ class Test(Config):
FROM_NUMBER = 'testing'
NOTIFY_ENVIRONMENT = 'test'
DEBUG = True
REDIS_ENABLED = True
CSV_UPLOAD_BUCKET_NAME = 'test-notifications-csv-upload'
STATSD_ENABLED = True
STATSD_HOST = "localhost"
@@ -184,6 +190,8 @@ class Test(Config):
Queue('send-email', Exchange('default'), routing_key='send-email'),
Queue('research-mode', Exchange('default'), routing_key='research-mode')
]
REDIS_ENABLED = True
API_HOST_NAME = "http://localhost:6011"
class Preview(Config):
@@ -192,7 +200,6 @@ class Preview(Config):
CSV_UPLOAD_BUCKET_NAME = 'preview-notifications-csv-upload'
API_HOST_NAME = 'http://admin-api.internal'
FROM_NUMBER = 'preview'
REDIS_ENABLED = True
class Staging(Config):
@@ -202,7 +209,6 @@ class Staging(Config):
STATSD_ENABLED = True
API_HOST_NAME = 'http://admin-api.internal'
FROM_NUMBER = 'stage'
REDIS_ENABLED = True
class Live(Config):
@@ -212,7 +218,18 @@ class Live(Config):
STATSD_ENABLED = True
API_HOST_NAME = 'http://admin-api.internal'
FROM_NUMBER = '40604'
REDIS_ENABLED = True
class CloudFoundryConfig(Config):
pass
# CloudFoundry sandbox
class Sandbox(CloudFoundryConfig):
NOTIFY_EMAIL_DOMAIN = 'notify.works'
NOTIFY_ENVIRONMENT = 'sandbox'
CSV_UPLOAD_BUCKET_NAME = 'cf-sandbox-notifications-csv-upload'
FROM_NUMBER = 'sandbox'
configs = {
@@ -220,5 +237,6 @@ configs = {
'test': Test,
'live': Live,
'staging': Staging,
'preview': Preview
'preview': Preview,
'sandbox': Sandbox
}

View File

@@ -3,8 +3,9 @@ from app import notify_celery, create_app
from credstash import getAllSecrets
import os
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
# On AWS get secrets and export to env, skip this on Cloud Foundry
if os.getenv('VCAP_SERVICES') is None:
os.environ.update(getAllSecrets(region="eu-west-1"))
application = create_app("delivery")
application.app_context().push()

5
db.py
View File

@@ -4,8 +4,9 @@ from app import create_app, db
from credstash import getAllSecrets
import os
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
# On AWS get secrets and export to env, skip this on Cloud Foundry
if os.getenv('VCAP_SERVICES') is None:
os.environ.update(getAllSecrets(region="eu-west-1"))
application = create_app()

58
docker/Dockerfile Normal file
View File

@@ -0,0 +1,58 @@
FROM python:3.4-slim
ARG HTTP_PROXY
ARG HTTPS_PROXY
ARG NO_PROXY
ENV PYTHONUNBUFFERED=1 \
DEBIAN_FRONTEND=noninteractive \
GOSU_VERSION=1.10
RUN \
echo "Install base packages" \
&& ([ -z "$HTTP_PROXY" ] || echo "Acquire::http::Proxy \"${HTTP_PROXY}\";" > /etc/apt/apt.conf.d/99HttpProxy) \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
make \
curl \
git \
build-essential \
zip \
libpq-dev \
jq \
&& echo "Clean up" \
&& rm -rf /var/lib/apt/lists/* /tmp/*
RUN \
echo "Install global pip packages" \
&& pip install \
virtualenv \
awscli \
wheel
RUN \
echo "Install Cloud Foundry CLI" \
&& curl -sSL "https://cli.run.pivotal.io/stable?release=debian64&source=github" -o /tmp/cloudfoundry-cli.deb \
&& dpkg -i /tmp/cloudfoundry-cli.deb \
&& cf install-plugin -r CF-Community -f "autopilot" \
&& cf install-plugin -r CF-Community -f "blue-green-deploy" \
&& cf install-plugin -r CF-Community -f "antifreeze"
COPY tianon.gpg /tmp/tianon.gpg
RUN \
echo "Install gosu" \
&& curl -sSL -o /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture)" \
&& curl -sSL -o /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture).asc" \
&& export GNUPGHOME="$(mktemp -d)" \
&& gpg --import /tmp/tianon.gpg \
&& gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu \
&& rm -r "$GNUPGHOME" /usr/local/bin/gosu.asc \
&& chmod +x /usr/local/bin/gosu \
&& gosu nobody true
WORKDIR /var/project
COPY entrypoint.sh /usr/local/bin/docker-entrypoint
ENTRYPOINT ["/usr/local/bin/docker-entrypoint"]

View File

@@ -1,31 +0,0 @@
FROM python:3.4-slim
ARG HTTP_PROXY
ARG HTTPS_PROXY
ARG NO_PROXY
ENV PYTHONUNBUFFERED=1 \
DEBIAN_FRONTEND=noninteractive
RUN \
echo "Install base packages" \
&& ([ -z "$HTTP_PROXY" ] || echo "Acquire::http::Proxy \"${HTTP_PROXY}\";" > /etc/apt/apt.conf.d/99HttpProxy) \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
make \
git \
build-essential \
zip \
libpq-dev \
&& echo "Clean up" \
&& rm -rf /var/lib/apt/lists/* /tmp/*
RUN \
echo "Install global pip packages" \
&& pip install \
virtualenv \
awscli \
wheel
WORKDIR /var/project

View File

@@ -1,17 +1,33 @@
.DEFAULT_GOAL := help
SHELL := /bin/bash
DOCKER_IMAGE_TAG := $(shell cat VERSION)
.PHONY: help
help:
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
.PHONY: build-build-image
build-build-image:
.PHONY: build
build:
docker build \
--pull \
--build-arg HTTP_PROXY="${HTTP_PROXY}" \
--build-arg HTTPS_PROXY="${HTTP_PROXY}" \
--build-arg NO_PROXY="${NO_PROXY}" \
-f Dockerfile-build \
-t govuk/notify-api-builder \
-t govuk/notify-api-builder:${DOCKER_IMAGE_TAG} \
.
.PHONY: bash
bash:
docker run -it --rm \
-e UID=$(shell id -u) \
-e GID=$(shell id -g) \
govuk/notify-api-builder:${DOCKER_IMAGE_TAG} \
bash
.PHONY: bash
bash-hostuser:
docker run -it --rm \
-e UID=$(shell id -u) \
-e GID=$(shell id -g) \
govuk/notify-api-builder:${DOCKER_IMAGE_TAG} \
gosu hostuser bash

1
docker/VERSION Normal file
View File

@@ -0,0 +1 @@
2

33
docker/entrypoint.sh Executable file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bash
set -eo pipefail; [[ "$TRACE" ]] && set -x
if [[ "$(id -u)" -ne 0 ]]; then
echo 'docker-entrypoint requires root' >&2
exit 1
fi
if [ -z "$UID" ] || [ "$UID" = "0" ]; then
echo "UID must be specified as a positive integer"
exit 1
fi
if [ -z "$GID" ] || [ "$GID" = "0" ]; then
echo "GID must be specified as positive integer"
exit 1
fi
USER=$(id -un $UID 2>/dev/null || echo "hostuser")
GROUP=$(getent group $GID | cut -d: -f1 || echo "hostgroup")
if [ "$USER" = "hostuser" ]; then
useradd -u $UID -s /bin/bash -m $USER
fi
if [ "$GROUP" = "hostgroup" ]; then
groupadd -g $GID $GROUP
fi
usermod -g $GROUP $USER
exec "$@"

BIN
docker/tianon.gpg Normal file

Binary file not shown.

View File

@@ -0,0 +1,19 @@
---
applications:
- name: notify-api-db-migration
buildpack: python_buildpack
command: python db.py db upgrade && sleep infinity
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
env:
NOTIFY_APP_NAME: public-api
no-route: true
health-check-type: none
instances: 1
memory: 128M

20
manifest-api-preview.yml Normal file
View File

@@ -0,0 +1,20 @@
---
applications:
- name: notify-api
buildpack: python_buildpack
command: gunicorn -w 5 -b 0.0.0.0:$PORT wsgi
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
env:
NOTIFY_APP_NAME: public-api
routes:
- route: notify-api-preview.cloudapps.digital
- route: api-paas.notify.works
instances: 1
memory: 512M

View File

@@ -0,0 +1,20 @@
---
applications:
- name: notify-api
buildpack: python_buildpack
command: gunicorn -w 5 -b 0.0.0.0:$PORT wsgi
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
env:
NOTIFY_APP_NAME: public-api
routes:
- route: notify-api-production.cloudapps.digital
- route: api-paas.notifications.service.gov.uk
instances: 2
memory: 2048M

19
manifest-api-sandbox.yml Normal file
View File

@@ -0,0 +1,19 @@
---
applications:
- name: notify-api
buildpack: python_buildpack
command: gunicorn -w 5 -b 0.0.0.0:$PORT wsgi
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
env:
NOTIFY_APP_NAME: public-api
routes:
- route: notify-api-sandbox.cloudapps.digital
instances: 1
memory: 512M

20
manifest-api-staging.yml Normal file
View File

@@ -0,0 +1,20 @@
---
applications:
- name: notify-api
buildpack: python_buildpack
command: gunicorn -w 5 -b 0.0.0.0:$PORT wsgi
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
env:
NOTIFY_APP_NAME: public-api
routes:
- route: notify-api-staging.cloudapps.digital
- route: api-paas.staging-notify.works
instances: 2
memory: 2048M

View File

@@ -0,0 +1,19 @@
---
applications:
- name: notify-delivery-celery-beat
buildpack: python_buildpack
health-check-type: none
no-route: true
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
instances: 2
memory: 128M
command: celery -A aws_run_celery.notify_celery beat --loglevel=INFO
env:
NOTIFY_APP_NAME: delivery-celery-beat

View File

@@ -0,0 +1,19 @@
---
applications:
- name: notify-delivery-worker-database
buildpack: python_buildpack
health-check-type: none
no-route: true
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
instances: 2
memory: 256M
command: celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email
env:
NOTIFY_APP_NAME: delivery-worker-database

View File

@@ -0,0 +1,19 @@
---
applications:
- name: notify-delivery-worker-research
buildpack: python_buildpack
health-check-type: none
no-route: true
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
instances: 2
memory: 256M
command: celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q research-mode
env:
NOTIFY_APP_NAME: delivery-worker-research

View File

@@ -0,0 +1,19 @@
---
applications:
- name: notify-delivery-worker-sender
buildpack: python_buildpack
health-check-type: none
no-route: true
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
instances: 2
memory: 256M
command: celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email
env:
NOTIFY_APP_NAME: delivery-worker-sender

View File

@@ -0,0 +1,19 @@
---
applications:
- name: notify-delivery-worker
buildpack: python_buildpack
health-check-type: none
no-route: true
services:
- notify-aws
- notify-config
- notify-db
- mmg
- firetext
- hosted-graphite
instances: 2
memory: 256M
command: celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11
env:
NOTIFY_APP_NAME: delivery-worker

View File

@@ -19,10 +19,11 @@ monotonic==1.2
statsd==3.2.1
jsonschema==2.5.1
Flask-Redis==0.1.0
gunicorn==19.6.0
# pin to minor version 3.1.x
notifications-python-client>=3.1,<3.2
git+https://github.com/alphagov/notifications-utils.git@13.0.1#egg=notifications-utils==13.0.1
git+https://github.com/alphagov/notifications-utils.git@13.1.0#egg=notifications-utils==13.1.0
git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3

1
runtime.txt Normal file
View File

@@ -0,0 +1 @@
python-3.5.2

View File

@@ -11,10 +11,11 @@ if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
# On AWS get secrets and export to env, skip this on Cloud Foundry
if os.getenv('VCAP_SERVICES') is None:
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
from app.config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]

View File

@@ -0,0 +1,167 @@
import os
import json
import pytest
from app.cloudfoundry_config import extract_cloudfoundry_config, set_config_env_vars
@pytest.fixture
def notify_config():
return {
'name': 'notify-config',
'credentials': {
'admin_base_url': 'admin base url',
'api_host_name': 'api host name',
'admin_client_secret': 'admin client secret',
'secret_key': 'secret key',
'dangerous_salt': 'dangerous salt',
}
}
@pytest.fixture
def aws_config():
return {
'name': 'notify-aws',
'credentials': {
'sqs_queue_prefix': 'sqs queue prefix',
'aws_access_key_id': 'aws access key id',
'aws_secret_access_key': 'aws secret access key',
}
}
@pytest.fixture
def hosted_graphite_config():
return {
'name': 'hosted-graphite',
'credentials': {
'statsd_prefix': 'statsd prefix'
}
}
@pytest.fixture
def mmg_config():
return {
'name': 'mmg',
'credentials': {
'api_url': 'mmg api url',
'api_key': 'mmg api key'
}
}
@pytest.fixture
def firetext_config():
return {
'name': 'firetext',
'credentials': {
'api_key': 'firetext api key',
'loadtesting_api_key': 'loadtesting api key'
}
}
@pytest.fixture
def postgres_config():
return [
{
'credentials': {
'uri': 'postgres uri'
}
}
]
@pytest.fixture
def cloudfoundry_config(
postgres_config,
notify_config,
aws_config,
hosted_graphite_config,
mmg_config,
firetext_config
):
return {
'postgres': postgres_config,
'user-provided': [
notify_config,
aws_config,
hosted_graphite_config,
mmg_config,
firetext_config
]
}
@pytest.fixture
def cloudfoundry_environ(monkeypatch, cloudfoundry_config):
monkeypatch.setenv('VCAP_SERVICES', json.dumps(cloudfoundry_config))
monkeypatch.setenv('VCAP_APPLICATION', '{"space_name": "🚀🌌"}')
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
def test_extract_cloudfoundry_config_populates_other_vars():
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgres uri'
assert os.environ['LOGGING_STDOUT_JSON'] == '1'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
def test_notify_config():
extract_cloudfoundry_config()
assert os.environ['ADMIN_BASE_URL'] == 'admin base url'
assert os.environ['API_HOST_NAME'] == 'api host name'
assert os.environ['ADMIN_CLIENT_SECRET'] == 'admin client secret'
assert os.environ['SECRET_KEY'] == 'secret key'
assert os.environ['DANGEROUS_SALT'] == 'dangerous salt'
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
def test_aws_config():
extract_cloudfoundry_config()
assert os.environ['NOTIFICATION_QUEUE_PREFIX'] == 'sqs queue prefix'
assert os.environ['AWS_ACCESS_KEY_ID'] == 'aws access key id'
assert os.environ['AWS_SECRET_ACCESS_KEY'] == 'aws secret access key'
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
def test_hosted_graphite_config():
extract_cloudfoundry_config()
assert os.environ['STATSD_PREFIX'] == 'statsd prefix'
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
def test_mmg_config():
extract_cloudfoundry_config()
assert os.environ['MMG_URL'] == 'mmg api url'
assert os.environ['MMG_API_KEY'] == 'mmg api key'
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
def test_firetext_config():
extract_cloudfoundry_config()
assert os.environ['FIRETEXT_API_KEY'] == 'firetext api key'
assert os.environ['LOADTESTING_API_KEY'] == 'loadtesting api key'

80
tests/app/test_config.py Normal file
View File

@@ -0,0 +1,80 @@
import os
import importlib
from unittest import mock
import pytest
from app import config
def cf_conf():
os.environ['ADMIN_BASE_URL'] = 'cf'
@pytest.fixture
def reload_config():
"""
Reset config, by simply re-running config.py from a fresh environment
"""
old_env = os.environ.copy()
yield
os.environ = old_env
importlib.reload(config)
def test_load_cloudfoundry_config_if_available(monkeypatch, reload_config):
os.environ['ADMIN_BASE_URL'] = 'env'
monkeypatch.setenv('VCAP_SERVICES', 'some json blob')
monkeypatch.setenv('VCAP_APPLICATION', 'some json blob')
with mock.patch('app.cloudfoundry_config.extract_cloudfoundry_config', side_effect=cf_conf) as cf_config:
# reload config so that its module level code (ie: all of it) is re-instantiated
importlib.reload(config)
assert cf_config.called
assert os.environ['ADMIN_BASE_URL'] == 'cf'
assert config.Config.ADMIN_BASE_URL == 'cf'
def test_load_config_if_cloudfoundry_not_available(monkeypatch, reload_config):
os.environ['ADMIN_BASE_URL'] = 'env'
monkeypatch.delenv('VCAP_SERVICES', raising=False)
with mock.patch('app.cloudfoundry_config.extract_cloudfoundry_config') as cf_config:
# reload config so that its module level code (ie: all of it) is re-instantiated
importlib.reload(config)
assert not cf_config.called
assert os.environ['ADMIN_BASE_URL'] == 'env'
assert config.Config.ADMIN_BASE_URL == 'env'
def test_cloudfoundry_config_has_different_defaults():
# these should always be set on Sandbox
assert config.Sandbox.REDIS_ENABLED is False
def test_logging_stdout_json_defaults_to_off(reload_config):
os.environ.pop('LOGGING_STDOUT_JSON', None)
assert config.Config.LOGGING_STDOUT_JSON is False
def test_logging_stdout_json_sets_to_off_if_not_recognised(reload_config):
os.environ['LOGGING_STDOUT_JSON'] = 'foo'
importlib.reload(config)
assert config.Config.LOGGING_STDOUT_JSON is False
def test_logging_stdout_json_sets_to_on_if_set_to_1(reload_config):
os.environ['LOGGING_STDOUT_JSON'] = '1'
importlib.reload(config)
assert config.Config.LOGGING_STDOUT_JSON is True

View File

@@ -80,9 +80,16 @@ def notify_db_session(notify_db):
notify_db.session.commit()
@pytest.fixture(scope='function')
def os_environ(mocker):
mocker.patch('os.environ', {})
@pytest.fixture
def os_environ():
"""
clear os.environ, and restore it after the test runs
"""
# for use whenever you expect code to edit environment variables
old_env = os.environ.copy()
os.environ = {}
yield
os.environ = old_env
@pytest.fixture(scope='function')

View File

@@ -4,8 +4,9 @@ from app import create_app
from credstash import getAllSecrets
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
# On AWS get secrets and export to env, skip this on Cloud Foundry
if os.getenv('VCAP_SERVICES') is None:
os.environ.update(getAllSecrets(region="eu-west-1"))
application = create_app()