Files
notifications-api/Makefile

179 lines
6.5 KiB
Makefile
Raw Normal View History

.DEFAULT_GOAL := help
SHELL := /bin/bash
DATE = $(shell date +%Y-%m-%d:%H:%M:%S)
APP_VERSION_FILE = app/version.py
2016-08-26 16:18:04 +01:00
GIT_BRANCH ?= $(shell git symbolic-ref --short HEAD 2> /dev/null || echo "detached")
GIT_COMMIT ?= $(shell git rev-parse HEAD)
DOCKER_BUILDER_IMAGE_NAME = govuk/notify-api-builder:master
BUILD_TAG ?= notifications-api-manual
BUILD_NUMBER ?= 0
DEPLOY_BUILD_NUMBER ?= ${BUILD_NUMBER}
BUILD_URL ?=
DOCKER_CONTAINER_PREFIX = ${USER}-${BUILD_TAG}
2016-12-08 12:12:45 +00:00
CF_API ?= api.cloud.service.gov.uk
CF_ORG ?= govuk-notify
CF_SPACE ?= ${DEPLOY_ENV}
CF_HOME ?= ${HOME}
$(eval export CF_HOME)
2016-12-08 12:12:45 +00:00
2017-02-13 18:04:08 +00:00
CF_MANIFEST_FILE = manifest-$(firstword $(subst -, ,$(subst notify-,,${CF_APP})))-${CF_SPACE}.yml
NOTIFY_CREDENTIALS ?= ~/.notify-credentials
## DEVELOPMENT
.PHONY: help
help:
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
.PHONY: generate-version-file
generate-version-file: ## Generates the app version file
@echo -e "__travis_commit__ = \"${GIT_COMMIT}\"\n__time__ = \"${DATE}\"\n__travis_job_number__ = \"${BUILD_NUMBER}\"\n__travis_job_url__ = \"${BUILD_URL}\"" > ${APP_VERSION_FILE}
.PHONY: test
test: generate-version-file ## Run tests
./scripts/run_tests.sh
Pin all application requirements in requirements.txt The list of top-level dependencies is moved to requirements-app.txt, which is used by `make freeze-requirements` to generate the full list of requirements in requirements.txt. This is based on alphagov/digitalmarketplace-api#615, so rationale from that PR applies here. We had a problem with unpinned packages on new deployments leading to failed tests (e.g. alphagov/notifications-admin#2144) which is why we're implementing this now. After re-evaluating pipenv again, this still seems like the least disruptive approach: * pyup.io has experimental support for Pipfile, but doesn't respect version ranges or updating hashes in the lock file * CloudFoundry buildpack recognizes and supports Pipfiles out of the box, but the support is relatively new. For example until recently CF would install dev packages during deployment. It's also based on generating a requirements file from the Pipfile, which doesn't properly support pinning VCS dependencies (eg it doesn't set the #egg= version, meaning pip will not upgrade the package if it's already installed). * pipenv has a strict dependency resolution algorithm, which doesn't appear to be well documented and can cause some unexpected failures. For example, pipenv doesn't seem to be able to install `awscli-cwlogs` package at all, believing it to have a version conflict for `botocore` (which it doesn't list as a direct dependency) while neither `pip` nor `pip-tools` highlight any issues with it. * While trying out `pipenv install` on our list of dependencies it would regularly fail to install utils with a "Will try again." message. While the installation succeeds after a retry, this doesn't inspire confidence. * The switch to Pipfile and pipenv-managed virtualenvs requires a series of changes to `make` targets and scripts - replacing `pip install` with `pipenv`, removing references to requirements files and prefixing commands with `pipenv run`. While it's likely to simplify the overall process of managing dependencies, it would require time to properly implement across our applications and environments (Jenkins, PaaS, docker containers, and dev machines).
2018-07-10 14:50:30 +01:00
.PHONY: freeze-requirements
freeze-requirements: ## Pin all requirements including sub dependencies into requirements.txt
Pin all application requirements in requirements.txt The list of top-level dependencies is moved to requirements-app.txt, which is used by `make freeze-requirements` to generate the full list of requirements in requirements.txt. This is based on alphagov/digitalmarketplace-api#615, so rationale from that PR applies here. We had a problem with unpinned packages on new deployments leading to failed tests (e.g. alphagov/notifications-admin#2144) which is why we're implementing this now. After re-evaluating pipenv again, this still seems like the least disruptive approach: * pyup.io has experimental support for Pipfile, but doesn't respect version ranges or updating hashes in the lock file * CloudFoundry buildpack recognizes and supports Pipfiles out of the box, but the support is relatively new. For example until recently CF would install dev packages during deployment. It's also based on generating a requirements file from the Pipfile, which doesn't properly support pinning VCS dependencies (eg it doesn't set the #egg= version, meaning pip will not upgrade the package if it's already installed). * pipenv has a strict dependency resolution algorithm, which doesn't appear to be well documented and can cause some unexpected failures. For example, pipenv doesn't seem to be able to install `awscli-cwlogs` package at all, believing it to have a version conflict for `botocore` (which it doesn't list as a direct dependency) while neither `pip` nor `pip-tools` highlight any issues with it. * While trying out `pipenv install` on our list of dependencies it would regularly fail to install utils with a "Will try again." message. While the installation succeeds after a retry, this doesn't inspire confidence. * The switch to Pipfile and pipenv-managed virtualenvs requires a series of changes to `make` targets and scripts - replacing `pip install` with `pipenv`, removing references to requirements files and prefixing commands with `pipenv run`. While it's likely to simplify the overall process of managing dependencies, it would require time to properly implement across our applications and environments (Jenkins, PaaS, docker containers, and dev machines).
2018-07-10 14:50:30 +01:00
rm -rf venv-freeze
virtualenv -p python3 venv-freeze
$$(pwd)/venv-freeze/bin/pip install -r requirements-app.txt
echo '# pyup: ignore file' > requirements.txt
echo '# This file is autogenerated. Do not edit it manually.' >> requirements.txt
Pin all application requirements in requirements.txt The list of top-level dependencies is moved to requirements-app.txt, which is used by `make freeze-requirements` to generate the full list of requirements in requirements.txt. This is based on alphagov/digitalmarketplace-api#615, so rationale from that PR applies here. We had a problem with unpinned packages on new deployments leading to failed tests (e.g. alphagov/notifications-admin#2144) which is why we're implementing this now. After re-evaluating pipenv again, this still seems like the least disruptive approach: * pyup.io has experimental support for Pipfile, but doesn't respect version ranges or updating hashes in the lock file * CloudFoundry buildpack recognizes and supports Pipfiles out of the box, but the support is relatively new. For example until recently CF would install dev packages during deployment. It's also based on generating a requirements file from the Pipfile, which doesn't properly support pinning VCS dependencies (eg it doesn't set the #egg= version, meaning pip will not upgrade the package if it's already installed). * pipenv has a strict dependency resolution algorithm, which doesn't appear to be well documented and can cause some unexpected failures. For example, pipenv doesn't seem to be able to install `awscli-cwlogs` package at all, believing it to have a version conflict for `botocore` (which it doesn't list as a direct dependency) while neither `pip` nor `pip-tools` highlight any issues with it. * While trying out `pipenv install` on our list of dependencies it would regularly fail to install utils with a "Will try again." message. While the installation succeeds after a retry, this doesn't inspire confidence. * The switch to Pipfile and pipenv-managed virtualenvs requires a series of changes to `make` targets and scripts - replacing `pip install` with `pipenv`, removing references to requirements files and prefixing commands with `pipenv run`. While it's likely to simplify the overall process of managing dependencies, it would require time to properly implement across our applications and environments (Jenkins, PaaS, docker containers, and dev machines).
2018-07-10 14:50:30 +01:00
cat requirements-app.txt >> requirements.txt
echo '' >> requirements.txt
$$(pwd)/venv-freeze/bin/pip freeze -r <(sed '/^--/d' requirements-app.txt) | sed -n '/The following requirements were added by pip freeze/,$$p' >> requirements.txt
rm -rf venv-freeze
.PHONY: test-requirements
test-requirements:
@diff requirements-app.txt requirements.txt | grep '<' \
&& { echo "requirements.txt doesn't match requirements-app.txt."; \
echo "Run 'make freeze-requirements' to update."; exit 1; } \
|| { echo "requirements.txt is up to date"; exit 0; }
.PHONY: prepare-docker-build-image
prepare-docker-build-image: generate-version-file ## Prepare the Docker builder image
docker build -f docker/Dockerfile \
--build-arg HTTP_PROXY="${HTTP_PROXY}" \
--build-arg HTTPS_PROXY="${HTTP_PROXY}" \
--build-arg NO_PROXY="${NO_PROXY}" \
-t ${DOCKER_BUILDER_IMAGE_NAME} \
.
.PHONY: test-with-docker
test-with-docker: prepare-docker-build-image create-docker-test-db ## Run tests inside a Docker container
2020-03-03 11:49:28 +00:00
@docker run -it --rm \
--name "${DOCKER_CONTAINER_PREFIX}-test" \
--link "${DOCKER_CONTAINER_PREFIX}-db:postgres" \
-e SQLALCHEMY_DATABASE_URI=postgresql://postgres:postgres@postgres/test_notification_api \
-e GIT_COMMIT=${GIT_COMMIT} \
-e BUILD_NUMBER=${BUILD_NUMBER} \
-e BUILD_URL=${BUILD_URL} \
2016-11-30 15:57:08 +00:00
-e http_proxy="${HTTP_PROXY}" \
-e HTTP_PROXY="${HTTP_PROXY}" \
-e https_proxy="${HTTPS_PROXY}" \
-e HTTPS_PROXY="${HTTPS_PROXY}" \
-e NO_PROXY="${NO_PROXY}" \
${DOCKER_BUILDER_IMAGE_NAME} \
make test
2017-12-28 18:29:31 +00:00
.PHONY: create-docker-test-db
create-docker-test-db: ## Start the test database in a Docker container
docker rm -f ${DOCKER_CONTAINER_PREFIX}-db 2> /dev/null || true
2016-08-26 16:18:04 +01:00
@docker run -d \
--name "${DOCKER_CONTAINER_PREFIX}-db" \
-e POSTGRES_PASSWORD="postgres" \
-e POSTGRES_DB=test_notification_api \
postgres:9.5
sleep 3
2016-08-26 16:18:04 +01:00
.PHONY: clean-docker-containers
clean-docker-containers: ## Clean up any remaining docker containers
docker rm -f $(shell docker ps -q -f "name=${DOCKER_CONTAINER_PREFIX}") 2> /dev/null || true
2016-12-08 12:12:45 +00:00
.PHONY: clean
clean:
rm -rf node_modules cache target venv .coverage build tests/.cache
2016-12-08 12:12:45 +00:00
## DEPLOYMENT
.PHONY: preview
preview: ## Set environment to preview
$(eval export DEPLOY_ENV=preview)
@true
.PHONY: staging
staging: ## Set environment to staging
$(eval export DEPLOY_ENV=staging)
@true
.PHONY: production
production: ## Set environment to production
$(eval export DEPLOY_ENV=production)
@true
2016-12-08 12:12:45 +00:00
.PHONY: cf-login
cf-login: ## Log in to Cloud Foundry
$(if ${CF_USERNAME},,$(error Must specify CF_USERNAME))
$(if ${CF_PASSWORD},,$(error Must specify CF_PASSWORD))
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
@echo "Logging in to Cloud Foundry on ${CF_API}"
@cf login -a "${CF_API}" -u ${CF_USERNAME} -p "${CF_PASSWORD}" -o "${CF_ORG}" -s "${CF_SPACE}"
.PHONY: generate-manifest
generate-manifest:
$(if ${CF_APP},,$(error Must specify CF_APP))
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
$(if $(shell which gpg2), $(eval export GPG=gpg2), $(eval export GPG=gpg))
$(if ${GPG_PASSPHRASE_TXT}, $(eval export DECRYPT_CMD=echo -n $$$${GPG_PASSPHRASE_TXT} | ${GPG} --quiet --batch --passphrase-fd 0 --pinentry-mode loopback -d), $(eval export DECRYPT_CMD=${GPG} --quiet --batch -d))
@jinja2 --strict manifest.yml.j2 \
-D environment=${CF_SPACE} \
-D CF_APP=${CF_APP} \
--format=yaml \
<(${DECRYPT_CMD} ${NOTIFY_CREDENTIALS}/credentials/${CF_SPACE}/paas/environment-variables.gpg) 2>&1
2017-02-13 18:04:08 +00:00
.PHONY: cf-deploy
cf-deploy: ## Deploys the app to Cloud Foundry
2017-02-13 18:04:08 +00:00
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
$(if ${CF_APP},,$(error Must specify CF_APP))
cf target -o ${CF_ORG} -s ${CF_SPACE}
2017-02-13 18:04:08 +00:00
@cf app --guid ${CF_APP} || exit 1
# cancel any existing deploys to ensure we can apply manifest (if a deploy is in progress you'll see ScaleDisabledDuringDeployment)
cf v3-cancel-zdt-push ${CF_APP} || true
cf v3-apply-manifest ${CF_APP} -f <(make -s generate-manifest)
CF_STARTUP_TIMEOUT=15 cf v3-zdt-push ${CF_APP} --wait-for-deploy-complete # fails after 15 mins if deploy doesn't work
2016-12-08 12:12:45 +00:00
.PHONY: cf-deploy-api-db-migration
2017-02-13 18:04:08 +00:00
cf-deploy-api-db-migration:
$(if ${CF_SPACE},,$(error Must specify CF_SPACE))
cf target -o ${CF_ORG} -s ${CF_SPACE}
cf push notify-api-db-migration --no-route -f <(make -s CF_APP=notify-api-db-migration generate-manifest)
cf run-task notify-api-db-migration "flask db upgrade" --name api_db_migration
2016-12-08 12:12:45 +00:00
.PHONY: cf-check-api-db-migration-task
cf-check-api-db-migration-task: ## Get the status for the last notify-api-db-migration task
@cf curl /v3/apps/`cf app --guid notify-api-db-migration`/tasks?order_by=-created_at | jq -r ".resources[0].state"
2017-02-13 18:04:08 +00:00
.PHONY: cf-rollback
cf-rollback: ## Rollbacks the app to the previous release
2016-12-08 12:12:45 +00:00
$(if ${CF_APP},,$(error Must specify CF_APP))
cf v3-cancel-zdt-push ${CF_APP}
2017-02-13 18:04:08 +00:00
.PHONY: check-if-migrations-to-run
check-if-migrations-to-run:
@echo $(shell python3 scripts/check_if_new_migration.py)