diff --git a/.github/actions/deploy-proxy/action.yml b/.github/actions/deploy-proxy/action.yml index ff9d1cbe8..13bdc494f 100644 --- a/.github/actions/deploy-proxy/action.yml +++ b/.github/actions/deploy-proxy/action.yml @@ -36,7 +36,7 @@ runs: - name: Copy config files shell: bash run: cp ./deploy-config/egress_proxy/${{ inputs.app }}.*.acl ${{ steps.create-temp-dir.outputs.path }} - - name: Build and deploy proxy + - name: Deploy proxy shell: bash working-directory: ${{ steps.create-temp-dir.outputs.path }} - run: make && ./bin/cf-deployproxy -a ${{ inputs.app }} -p egress-proxy -e egress_proxy + run: ./bin/cf-deployproxy -a ${{ inputs.app }} -p egress-proxy -e egress_proxy diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 0e9535f73..c6aef0e85 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -46,10 +46,12 @@ jobs: run: pipenv run flake8 . - name: Check imports alphabetized run: pipenv run isort --check-only ./app ./tests - - name: Run tests - run: pipenv run pytest -n4 --maxfail=10 + - name: Run tests with coverage + run: pipenv run coverage run --omit=*/notifications_utils/* -m pytest -n4 --maxfail=10 env: SQLALCHEMY_DATABASE_TEST_URI: postgresql://user:password@localhost:5432/test_notification_api + - name: Check coverage threshold + run: pipenv run coverage report --fail-under=50 validate-new-relic-config: runs-on: ubuntu-latest @@ -73,7 +75,7 @@ jobs: - uses: ./.github/actions/setup-project - name: Create requirements.txt run: pipenv requirements > requirements.txt - - uses: pypa/gh-action-pip-audit@v1.0.4 + - uses: pypa/gh-action-pip-audit@v1.0.6 with: inputs: requirements.txt diff --git a/.github/workflows/daily_checks.yml b/.github/workflows/daily_checks.yml index beac8fbb7..51e1a381f 100644 --- a/.github/workflows/daily_checks.yml +++ b/.github/workflows/daily_checks.yml @@ -27,7 +27,7 @@ jobs: - uses: ./.github/actions/setup-project - name: Create requirements.txt run: pipenv requirements > requirements.txt - - uses: pypa/gh-action-pip-audit@v1.0.4 + - uses: pypa/gh-action-pip-audit@v1.0.6 with: inputs: requirements.txt diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml new file mode 100644 index 000000000..288064a77 --- /dev/null +++ b/.github/workflows/deploy-prod.yml @@ -0,0 +1,87 @@ +name: Deploy to production environment + +on: + push: + branches: [ production ] + +permissions: + contents: read + +# deploy-prod and deploy-demo will run in parallel now. +# TODO: Research if we want to serialize them +# by moving the jobs into a single file similar to +# https://github.com/GSA/usnotify-ssb/blob/main/.github/workflows/apply.yml +jobs: + deploy: + runs-on: ubuntu-latest + environment: production + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 2 + + - name: Check for changes to Terraform + id: changed-terraform-files + uses: tj-actions/changed-files@v34 + with: + files: | + terraform/production + terraform/shared + .github/workflows/deploy-prod.yml + - name: Terraform init + if: steps.changed-terraform-files.outputs.any_changed == 'true' + working-directory: terraform/production + env: + AWS_ACCESS_KEY_ID: ${{ secrets.TERRAFORM_STATE_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.TERRAFORM_STATE_SECRET_ACCESS_KEY }} + run: terraform init + - name: Terraform apply + if: steps.changed-terraform-files.outputs.any_changed == 'true' + working-directory: terraform/production + env: + AWS_ACCESS_KEY_ID: ${{ secrets.TERRAFORM_STATE_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.TERRAFORM_STATE_SECRET_ACCESS_KEY }} + TF_VAR_cf_user: ${{ secrets.CLOUDGOV_USERNAME }} + TF_VAR_cf_password: ${{ secrets.CLOUDGOV_PASSWORD }} + run: terraform apply -auto-approve -input=false + + - uses: ./.github/actions/setup-project + - name: Install application dependencies + run: make bootstrap + + - name: Create requirements.txt because Cloud Foundry does a weird pipenv thing + run: pipenv requirements > requirements.txt + + - name: Deploy to cloud.gov + uses: 18f/cg-deploy-action@main + env: + DANGEROUS_SALT: ${{ secrets.DANGEROUS_SALT }} + SECRET_KEY: ${{ secrets.SECRET_KEY }} + ADMIN_CLIENT_SECRET: ${{ secrets.ADMIN_CLIENT_SECRET }} + NEW_RELIC_LICENSE_KEY: ${{ secrets.NEW_RELIC_LICENSE_KEY }} + with: + cf_username: ${{ secrets.CLOUDGOV_USERNAME }} + cf_password: ${{ secrets.CLOUDGOV_PASSWORD }} + cf_org: gsa-tts-benefits-studio-prototyping + cf_space: notify-production + push_arguments: >- + --vars-file deploy-config/production.yml + --var DANGEROUS_SALT="$DANGEROUS_SALT" + --var SECRET_KEY="$SECRET_KEY" + --var ADMIN_CLIENT_SECRET="$ADMIN_CLIENT_SECRET" + --var NEW_RELIC_LICENSE_KEY="$NEW_RELIC_LICENSE_KEY" + + - name: Check for changes to egress config + id: changed-egress-config + uses: tj-actions/changed-files@v34 + with: + files: | + deploy-config/egress_proxy/notify-api-production.*.acl + .github/actions/deploy-proxy/action.yml + .github/workflows/deploy-prod.yml + - name: Deploy egress proxy + if: steps.changed-egress-config.outputs.any_changed == 'true' + uses: ./.github/actions/deploy-proxy + with: + cf_space: notify-production + app: notify-api-production diff --git a/.github/workflows/drift.yml b/.github/workflows/drift.yml index 412290a49..616e72689 100644 --- a/.github/workflows/drift.yml +++ b/.github/workflows/drift.yml @@ -45,22 +45,22 @@ jobs: with: path: terraform/demo - # check_prod_drift: - # runs-on: ubuntu-latest - # name: Check for drift of production terraform configuration - # environment: production - # steps: - # - name: Checkout - # uses: actions/checkout@v3 - # with: - # ref: 'production' + check_prod_drift: + runs-on: ubuntu-latest + name: Check for drift of production terraform configuration + environment: production + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + ref: 'production' - # - name: Check for drift - # uses: dflook/terraform-check@v1 - # env: - # AWS_ACCESS_KEY_ID: ${{ secrets.TERRAFORM_STATE_ACCESS_KEY }} - # AWS_SECRET_ACCESS_KEY: ${{ secrets.TERRAFORM_STATE_SECRET_ACCESS_KEY }} - # TF_VAR_cf_user: ${{ secrets.CLOUDGOV_USERNAME }} - # TF_VAR_cf_password: ${{ secrets.CLOUDGOV_PASSWORD }} - # with: - # path: terraform/production + - name: Check for drift + uses: dflook/terraform-check@v1 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.TERRAFORM_STATE_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.TERRAFORM_STATE_SECRET_ACCESS_KEY }} + TF_VAR_cf_user: ${{ secrets.CLOUDGOV_USERNAME }} + TF_VAR_cf_password: ${{ secrets.CLOUDGOV_PASSWORD }} + with: + path: terraform/production diff --git a/.github/workflows/terraform-production.yml b/.github/workflows/terraform-production.yml index e48000438..afb10dcfb 100644 --- a/.github/workflows/terraform-production.yml +++ b/.github/workflows/terraform-production.yml @@ -2,7 +2,7 @@ name: Run Terraform plan in production on: pull_request: - branches: [ production-disabled-for-now ] + branches: [ production ] paths: [ 'terraform/**' ] defaults: diff --git a/.gitignore b/.gitignore index ede52acef..08b0bf758 100644 --- a/.gitignore +++ b/.gitignore @@ -44,6 +44,7 @@ pip-delete-this-directory.txt htmlcov/ .tox/ .coverage +.coverage_cache .coverage.* .cache .pytest_cache @@ -76,6 +77,7 @@ environment.sh varsfile celerybeat-schedule +celerybeat-schedule.db # CloudFoundry .cf diff --git a/Makefile b/Makefile index e6c70bc05..c2c83ca1f 100644 --- a/Makefile +++ b/Makefile @@ -43,6 +43,10 @@ run-celery-beat: ## Run celery beat -A run_celery.notify_celery beat \ --loglevel=INFO +.PHONY: cloudgov-user-report +cloudgov-user-report: + @pipenv run python -m terraform.ops.cloudgov_user_report + .PHONY: help help: @cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' @@ -53,10 +57,12 @@ generate-version-file: ## Generates the app version file .PHONY: test test: export NEW_RELIC_ENVIRONMENT=test -test: ## Run tests +test: ## Run tests and create coverage report pipenv run flake8 . pipenv run isort --check-only ./app ./tests - pipenv run pytest -n4 --maxfail=10 + pipenv run coverage run --omit=*/notifications_utils/* -m pytest --maxfail=10 + pipenv run coverage report --fail-under=50 + pipenv run coverage html -d .coverage_cache .PHONY: freeze-requirements freeze-requirements: ## Pin all requirements including sub dependencies into requirements.txt diff --git a/Pipfile b/Pipfile index b896c7240..d77b53d58 100644 --- a/Pipfile +++ b/Pipfile @@ -15,7 +15,7 @@ bcrypt = "==3.2.2" beautifulsoup4 = "==4.11.1" billiard = "==3.6.4.0" bleach = "==4.1.0" -blinker = "==1.4" +blinker = "~=1.4" boto3 = "==1.23.8" botocore = "==1.26.8" cachetools = "==5.1.0" @@ -35,7 +35,7 @@ dnspython = "==2.2.1" docopt = "==0.6.2" docutils = "==0.16" eventlet = "==0.33.1" -flask = "~=2.2" +flask = "~=2.3" flask-bcrypt = "==1.0.1" flask-marshmallow = "==0.14.0" flask-migrate = "==3.1.0" @@ -54,7 +54,7 @@ psycopg2-binary = "==2.9.3" pyjwt = "==2.4.0" python-dotenv = "==0.20.0" sqlalchemy = "==1.4.40" -werkzeug = "~=2.2" +werkzeug = "~=2.3" # gds metrics packages prometheus-client = "==0.14.1" gds-metrics = {version = "==0.2.4", ref = "6f1840a57b6fb1ee40b7e84f2f18ec229de8aa72", git = "https://github.com/alphagov/gds_metrics_python.git"} @@ -78,6 +78,7 @@ jinja2-cli = {version = "==0.8.2", extras = ["yaml"]} pip-audit = "*" bandit = "*" honcho = "*" +cloudfoundry-client = "*" [requires] python_version = "3.9" diff --git a/Pipfile.lock b/Pipfile.lock index 9b8b628f9..64f59ed65 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "56817ca82b5c531caab713deb89868f0fc1330f0b2f3f7bd123a47963388f5cc" + "sha256": "143054bb5631e9862e5343e2a7336fa2f97f1adb8ee1b7dc0a849eaca8b6c369" }, "pipfile-spec": 6, "requires": { @@ -115,10 +115,11 @@ }, "blinker": { "hashes": [ - "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6" + "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213", + "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0" ], "index": "pypi", - "version": "==1.4" + "version": "==1.6.2" }, "boto3": { "hashes": [ @@ -157,11 +158,11 @@ }, "certifi": { "hashes": [ - "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", - "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" + "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", + "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" ], "index": "pypi", - "version": "==2022.12.7" + "version": "==2023.5.7" }, "cffi": { "hashes": [ @@ -277,28 +278,28 @@ }, "cryptography": { "hashes": [ - "sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405", - "sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356", - "sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472", - "sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41", - "sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a", - "sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88", - "sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554", - "sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db", - "sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778", - "sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c", - "sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917", - "sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797", - "sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160", - "sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c", - "sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c", - "sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2", - "sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4", - "sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122", - "sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94" + "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440", + "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288", + "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b", + "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958", + "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b", + "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d", + "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a", + "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404", + "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b", + "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e", + "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2", + "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c", + "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b", + "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9", + "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b", + "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636", + "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99", + "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e", + "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9" ], "markers": "python_version >= '3.6'", - "version": "==40.0.1" + "version": "==40.0.2" }, "defusedxml": { "hashes": [ @@ -349,11 +350,11 @@ }, "flask": { "hashes": [ - "sha256:7eb373984bf1c770023fce9db164ed0c3353cd0b53f130f4693da0ca756a2e6d", - "sha256:c0bec9477df1cb867e5a67c9e1ab758de9cb4a3e52dd70681f59fa40a62b3f2d" + "sha256:77fd4e1249d8c9923de34907236b747ced06e5467ecac1a7bb7115ae0e9670b0", + "sha256:8c2f9abd47a9e8df7f0c3f091ce9497d011dc3b31effcf4c85a6e2b50f4114ef" ], "index": "pypi", - "version": "==2.2.3" + "version": "==2.3.2" }, "flask-bcrypt": { "hashes": [ @@ -499,15 +500,16 @@ "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" ], + "markers": "python_full_version >= '3.5.0'", "version": "==3.4" }, "importlib-metadata": { "hashes": [ - "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20", - "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09" + "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed", + "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705" ], "markers": "python_version < '3.10'", - "version": "==6.1.0" + "version": "==6.6.0" }, "iso8601": { "hashes": [ @@ -739,24 +741,24 @@ }, "newrelic": { "hashes": [ - "sha256:15d3088d9ab4d708e7b3826e651c2402f2cb9c72689e47a0badc8281ab20bfe9", - "sha256:2249a25b1ce967267604cb0ce3268256fb25da481312f3c7b04df90245708131", - "sha256:2fc9807a1e3277e1dbddb7cd84e00b1f70faea602f0bbe53109e0e68b0c20e3c", - "sha256:432a6fa9c0051154f4110f4203831f464c8ba5bc842e709639391175d4ba50e6", - "sha256:4f48e481ebb7d873fd16a7fe0df30383c834e75daa6b0b514e147b8d683d922d", - "sha256:69a7ed5788fb6347e96f7df18a641ea242d5dac7ccb76bbaa869851cade335c4", - "sha256:6fc4169f66e80f4b497d16be3759feaf5be08ec389b8f42b8b62ce632de9eb3f", - "sha256:95484f2ca36952831b47e3b054e808317a0a12597ca9ef5166789a425545de44", - "sha256:b5c53a5922c92d742eff37da87c113a18762cd06e310bfc430df02603298def9", - "sha256:d02acde140c6d4f549f36a2bda64025a18efc5b6457c8fa505146c7bcaaac23b", - "sha256:d39f6f3ffc458337e22ef4d4e7bacf5b0b2712feb7668538299d029bc87f9b7a", - "sha256:d7b510f3889fe57330bb85de78abcac2d0711b49e9da74fcd1b936a57b139238", - "sha256:eb3a27fd7b9d51941fb20c452aa3b3b2dd52fe652cda2d5d269dcc14f64ade6e", - "sha256:ec8d38d9f7c30c464cddd594cb390cf66a2b573d08e09ac231d3d349a26e0f96", - "sha256:f79a599b53894870dfdfcd88fa7ca7e81cac77f4a253ca0c08c58f400bb0a5ab" + "sha256:1bc307d06e2033637e7b484af22f540ca041fb23a54b311bcd5968ca1a64e4ef", + "sha256:435ac9e3791f78e05c9da8107a6ef49c13e62ac302696858fa2411198fe201ff", + "sha256:6662ec79493f23f9d0995a015177c87508bea4c541f7c9f17a61b503b82e1367", + "sha256:67902b3c53fa497dba887068166261d114ac2347c8a4908d735d7594cca163dc", + "sha256:6b4db0e7544232d4e6e835a02ee28637970576f8dce82ffcaa3d675246e822d5", + "sha256:796ed5ff44b04b41e051dc0112e5016e53a37e39e95023c45ff7ecd34c254a7d", + "sha256:84d1f71284efa5f1cae696161e0c3cb65eaa2f53116fe5e7c5a62be7d15d9536", + "sha256:9355f209ba8d82fd0f9d78d7cc1d9bef0ae4677b3cfed7b7aaec521adbe87559", + "sha256:9c0d5153b7363d5cb5cac7f8d1a4e03669b074afee2dda201851a67c7bed1e32", + "sha256:bcd3219e1e816a0fdb51ac993cac6744e6a835c13ee72e21d86bcbc2d16628ce", + "sha256:c4a0556c6ece49132ab1c32bfe398047a8311f9a8b6862b482495d132fcb0ad4", + "sha256:caccdf201735df80b470ddf772f60a154f2c07c0c1b2b3f6e999d55e79ce601e", + "sha256:d21af16cee1e0caf4c73c4c1b2d7ba9f33fe6a870d93135dc8b23ac592f49b38", + "sha256:da8f2dc31e182768fe314d8ceb6f42acd09956708846f8ae71f07f044a3aa05e", + "sha256:ef9c178329f8c04f0574908c1f04ff1f18b9eba55b869744583fee3eac48e571" ], "index": "pypi", - "version": "==8.7.1" + "version": "==8.8.0" }, "notifications-python-client": { "hashes": [ @@ -768,41 +770,41 @@ "notifications-utils": { "editable": true, "git": "https://github.com/GSA/notifications-utils.git", - "ref": "44127eac47d0825d083e51d5a2580a520ea2ee49" + "ref": "1492349e323df749771494f13e3eb268ee402a65" }, "numpy": { "hashes": [ - "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22", - "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f", - "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9", - "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96", - "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0", - "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a", - "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281", - "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04", - "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468", - "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253", - "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756", - "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a", - "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb", - "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d", - "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0", - "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910", - "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978", - "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5", - "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f", - "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a", - "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5", - "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2", - "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d", - "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95", - "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5", - "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d", - "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780", - "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa" + "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187", + "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812", + "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7", + "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4", + "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6", + "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0", + "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4", + "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570", + "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4", + "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f", + "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80", + "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289", + "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385", + "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078", + "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c", + "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463", + "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3", + "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950", + "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155", + "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7", + "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c", + "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096", + "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17", + "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf", + "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4", + "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02", + "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c", + "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b" ], "markers": "python_version >= '3.8'", - "version": "==1.24.2" + "version": "==1.24.3" }, "orderedset": { "hashes": [ @@ -828,10 +830,10 @@ }, "phonenumbers": { "hashes": [ - "sha256:1b7c75c4a5e62885a0be0a6b073b7e3f4ce6672fd42ab7b4a6b46359c40591d0", - "sha256:4ba92a917fb8f6e3fe33fd04b9d84299852ce237289129ee4eda47a043a5c6c4" + "sha256:107469114fd297258a485bdf8238d0522cb392db1257faf2bf23384ecbdb0e8a", + "sha256:3e3274d88cab3609b55ff5b93417075dbca2d13064f103fbf562e0ea1dda0f9a" ], - "version": "==8.13.8" + "version": "==8.13.11" }, "prometheus-client": { "hashes": [ @@ -846,7 +848,7 @@ "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b", "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f" ], - "markers": "python_full_version >= '3.7.0'", + "markers": "python_version >= '3.7'", "version": "==3.0.38" }, "psycopg2-binary": { @@ -916,21 +918,11 @@ }, "pyasn1": { "hashes": [ - "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", - "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", - "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", - "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", - "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", - "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", - "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", - "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", - "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", - "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", - "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3" + "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57", + "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde" ], - "version": "==0.4.8" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==0.5.0" }, "pycparser": { "hashes": [ @@ -1064,18 +1056,18 @@ }, "redis": { "hashes": [ - "sha256:2c19e6767c474f2e85167909061d525ed65bea9301c0770bb151e041b7ac89a2", - "sha256:73ec35da4da267d6847e47f68730fdd5f62e2ca69e3ef5885c6a78a9374c3893" + "sha256:77929bc7f5dab9adf3acba2d3bb7d7658f1e0c2f1cafe7eb36434e751c471119", + "sha256:dc87a0bdef6c8bfe1ef1e1c40be7034390c2ae02d92dcd0c7ca1729443899880" ], - "version": "==4.5.4" + "version": "==4.5.5" }, "requests": { "hashes": [ - "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", - "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" + "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", + "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" ], - "markers": "python_version >= '3.7' and python_version < '4'", - "version": "==2.28.2" + "index": "pypi", + "version": "==2.31.0" }, "rfc3339-validator": { "hashes": [ @@ -1096,7 +1088,7 @@ "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2", "sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9" ], - "markers": "python_version >= '3.5' and python_version < '4'", + "markers": "python_version < '4' and python_full_version >= '3.5.0'", "version": "==4.7.2" }, "s3transfer": { @@ -1109,11 +1101,11 @@ }, "setuptools": { "hashes": [ - "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a", - "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078" + "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f", + "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102" ], "markers": "python_version >= '3.7'", - "version": "==67.6.1" + "version": "==67.8.0" }, "shapely": { "hashes": [ @@ -1175,11 +1167,11 @@ }, "soupsieve": { "hashes": [ - "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955", - "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a" + "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8", + "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea" ], "markers": "python_version >= '3.7'", - "version": "==2.4" + "version": "==2.4.1" }, "sqlalchemy": { "hashes": [ @@ -1223,20 +1215,13 @@ "index": "pypi", "version": "==1.4.40" }, - "statsd": { - "hashes": [ - "sha256:99763da81bfea8daf6b3d22d11aaccb01a8d0f52ea521daab37e758a4ca7d128", - "sha256:c2676519927f7afade3723aca9ca8ea986ef5b059556a980a867721ca69df093" - ], - "version": "==4.0.1" - }, "typing-extensions": { "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" + "sha256:6ad00b63f849b7dcc313b70b6b304ed67b2b2963b3098a33efe18056b1a9a223", + "sha256:ff6b238610c747e44c268aa4bb23c8c735d665a63726df3f9431ce707f2aa768" ], "markers": "python_version < '3.10'", - "version": "==4.5.0" + "version": "==4.6.0" }, "uri-template": { "hashes": [ @@ -1284,11 +1269,11 @@ }, "werkzeug": { "hashes": [ - "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe", - "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612" + "sha256:1d5a58e0377d1fe39d061a5de4469e414e78ccb1e1e59c0f5ad6fa1c36c52b76", + "sha256:48e5e61472fee0ddee27ebad085614ebedb7af41e88f687aaf881afb723a162f" ], "index": "pypi", - "version": "==2.2.3" + "version": "==2.3.4" }, "wrapt": { "hashes": [ @@ -1381,6 +1366,115 @@ } }, "develop": { + "aiohttp": { + "hashes": [ + "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14", + "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391", + "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2", + "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e", + "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9", + "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd", + "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4", + "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b", + "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41", + "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567", + "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275", + "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54", + "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a", + "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef", + "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99", + "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da", + "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4", + "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e", + "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699", + "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04", + "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719", + "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131", + "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e", + "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f", + "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd", + "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f", + "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e", + "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1", + "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed", + "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4", + "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1", + "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777", + "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531", + "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b", + "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab", + "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8", + "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074", + "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc", + "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643", + "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01", + "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36", + "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24", + "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654", + "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d", + "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241", + "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51", + "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f", + "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2", + "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15", + "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf", + "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b", + "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71", + "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05", + "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52", + "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3", + "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6", + "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a", + "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519", + "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a", + "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333", + "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6", + "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d", + "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57", + "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c", + "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9", + "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea", + "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332", + "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5", + "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622", + "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71", + "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb", + "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a", + "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff", + "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945", + "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480", + "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6", + "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9", + "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd", + "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f", + "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a", + "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a", + "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949", + "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc", + "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75", + "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f", + "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10", + "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f" + ], + "markers": "python_version >= '3.6'", + "version": "==3.8.4" + }, + "aiosignal": { + "hashes": [ + "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc", + "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17" + ], + "markers": "python_version >= '3.7'", + "version": "==1.3.1" + }, + "async-timeout": { + "hashes": [ + "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15", + "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c" + ], + "index": "pypi", + "version": "==4.0.2" + }, "attrs": { "hashes": [ "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", @@ -1426,11 +1520,11 @@ }, "certifi": { "hashes": [ - "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", - "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" + "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", + "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" ], "index": "pypi", - "version": "==2022.12.7" + "version": "==2023.5.7" }, "cffi": { "hashes": [ @@ -1496,90 +1590,98 @@ "index": "pypi", "version": "==2.0.12" }, + "cloudfoundry-client": { + "hashes": [ + "sha256:1261ff57c7309406b8e8720991d861dcede23c8ee612c80f87330815623c8753", + "sha256:8293d8027e5ad5a902806603286cbab78f9639b92229fc216f798a15023c484a" + ], + "index": "pypi", + "version": "==1.34.2" + }, "coverage": { "extras": [ "toml" ], "hashes": [ - "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d", - "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4", - "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e", - "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab", - "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90", - "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6", - "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731", - "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540", - "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2", - "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292", - "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5", - "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b", - "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2", - "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0", - "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57", - "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3", - "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140", - "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84", - "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988", - "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67", - "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d", - "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2", - "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5", - "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9", - "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8", - "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd", - "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6", - "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be", - "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88", - "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25", - "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137", - "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968", - "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9", - "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef", - "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54", - "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512", - "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005", - "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f", - "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149", - "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d", - "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8", - "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7", - "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5", - "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016", - "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69", - "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212", - "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc", - "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8", - "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d", - "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd", - "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169" + "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3", + "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a", + "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813", + "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0", + "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a", + "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd", + "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139", + "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b", + "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252", + "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790", + "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045", + "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce", + "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200", + "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718", + "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b", + "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f", + "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5", + "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade", + "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5", + "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a", + "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8", + "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33", + "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e", + "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c", + "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3", + "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969", + "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068", + "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2", + "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771", + "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed", + "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212", + "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614", + "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88", + "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3", + "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c", + "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84", + "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11", + "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1", + "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1", + "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e", + "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1", + "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd", + "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47", + "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a", + "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c", + "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31", + "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5", + "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6", + "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303", + "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5", + "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47" ], "markers": "python_version >= '3.7'", - "version": "==7.2.2" + "version": "==7.2.5" }, "cryptography": { "hashes": [ - "sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405", - "sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356", - "sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472", - "sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41", - "sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a", - "sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88", - "sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554", - "sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db", - "sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778", - "sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c", - "sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917", - "sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797", - "sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160", - "sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c", - "sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c", - "sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2", - "sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4", - "sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122", - "sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94" + "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440", + "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288", + "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b", + "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958", + "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b", + "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d", + "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a", + "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404", + "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b", + "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e", + "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2", + "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c", + "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b", + "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9", + "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b", + "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636", + "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99", + "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e", + "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9" ], "markers": "python_version >= '3.6'", - "version": "==40.0.1" + "version": "==40.0.2" }, "cyclonedx-python-lib": { "hashes": [ @@ -1629,6 +1731,86 @@ "index": "pypi", "version": "==1.2.1" }, + "frozenlist": { + "hashes": [ + "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c", + "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f", + "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a", + "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784", + "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27", + "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d", + "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3", + "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678", + "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a", + "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483", + "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8", + "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf", + "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99", + "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c", + "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48", + "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5", + "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56", + "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e", + "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1", + "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401", + "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4", + "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e", + "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649", + "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a", + "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d", + "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0", + "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6", + "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d", + "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b", + "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6", + "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf", + "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef", + "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7", + "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842", + "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba", + "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420", + "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b", + "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d", + "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332", + "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936", + "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816", + "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91", + "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420", + "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448", + "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411", + "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4", + "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32", + "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b", + "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0", + "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530", + "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669", + "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7", + "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1", + "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5", + "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce", + "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4", + "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e", + "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2", + "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d", + "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9", + "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642", + "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0", + "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703", + "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb", + "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1", + "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13", + "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab", + "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38", + "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb", + "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb", + "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81", + "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8", + "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd", + "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4" + ], + "markers": "python_version >= '3.7'", + "version": "==1.3.3" + }, "gitdb": { "hashes": [ "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a", @@ -1666,6 +1848,7 @@ "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" ], + "markers": "python_full_version >= '3.5.0'", "version": "==3.4" }, "iniconfig": { @@ -1873,6 +2056,93 @@ ], "version": "==1.0.5" }, + "multidict": { + "hashes": [ + "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9", + "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8", + "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03", + "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710", + "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161", + "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664", + "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569", + "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067", + "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313", + "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706", + "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2", + "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636", + "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49", + "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93", + "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603", + "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0", + "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60", + "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4", + "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e", + "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1", + "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60", + "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951", + "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc", + "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe", + "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95", + "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d", + "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8", + "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed", + "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2", + "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775", + "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87", + "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c", + "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2", + "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98", + "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3", + "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe", + "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78", + "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660", + "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176", + "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e", + "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988", + "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c", + "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c", + "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0", + "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449", + "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f", + "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde", + "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5", + "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d", + "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac", + "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a", + "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9", + "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca", + "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11", + "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35", + "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063", + "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b", + "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982", + "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258", + "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1", + "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52", + "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480", + "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7", + "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461", + "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d", + "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc", + "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779", + "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a", + "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547", + "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0", + "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171", + "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf", + "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d", + "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba" + ], + "markers": "python_version >= '3.7'", + "version": "==6.0.4" + }, + "oauth2-client": { + "hashes": [ + "sha256:5381900448ff1ae762eb7c65c501002eac46bb5ca2f49477fdfeaf9e9969f284", + "sha256:7b938ba8166128a3c4c15ad23ca0c95a2468f8e8b6069d019ebc73360c15c7ca" + ], + "version": "==1.4.2" + }, "packageurl-python": { "hashes": [ "sha256:4bad1d3ea4feb5e7a1db5ca8fb690ac9c82ab18e08d500755947b853df68817d", @@ -1899,11 +2169,11 @@ }, "pip": { "hashes": [ - "sha256:236bcb61156d76c4b8a05821b988c7b8c35bf0da28a4b614e8d6ab5212c25c6f", - "sha256:cd015ea1bfb0fcef59d8a286c1f8bebcb983f6317719d415dc5351efb7cd7024" + "sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba", + "sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18" ], "markers": "python_version >= '3.7'", - "version": "==23.0.1" + "version": "==23.1.2" }, "pip-api": { "hashes": [ @@ -1926,7 +2196,7 @@ "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526", "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3" ], - "markers": "python_full_version >= '3.6.0'", + "markers": "python_version >= '3.6'", "version": "==32.0.1" }, "pluggy": { @@ -1937,6 +2207,32 @@ "markers": "python_version >= '3.6'", "version": "==1.0.0" }, + "polling2": { + "hashes": [ + "sha256:90b7da82cf7adbb48029724d3546af93f21ab6e592ec37c8c4619aedd010e342", + "sha256:ad86d56fbd7502f0856cac2d0109d595c18fa6c7fb12c88cee5e5d16c17286c1" + ], + "version": "==0.5.0" + }, + "protobuf": { + "hashes": [ + "sha256:2036a3a1e7fc27f973fa0a7888dce712393af644f4695385f117886abc792e39", + "sha256:32e78beda26d7a101fecf15d7a4a792278a0d26a31bc327ff05564a9d68ab8ee", + "sha256:346990f634272caac1f09efbcfbbacb23098b1f606d172534c6fa2d9758bb436", + "sha256:3b8905eafe4439076e1f58e9d1fa327025fd2777cf90f14083092ae47f77b0aa", + "sha256:3ce113b3f3362493bddc9069c2163a38f240a9ed685ff83e7bcb756b05e1deb0", + "sha256:410bcc0a5b279f634d3e16082ce221dfef7c3392fac723500e2e64d1806dd2be", + "sha256:5b9cd6097e6acae48a68cb29b56bc79339be84eca65b486910bb1e7a30e2b7c1", + "sha256:65f0ac96ef67d7dd09b19a46aad81a851b6f85f89725577f16de38f2d68ad477", + "sha256:91fac0753c3c4951fbb98a93271c43cc7cf3b93cf67747b3e600bb1e5cc14d61", + "sha256:95789b569418a3e32a53f43d7763be3d490a831e9c08042539462b6d972c2d7e", + "sha256:ac50be82491369a9ec3710565777e4da87c6d2e20404e0abb1f3a8f10ffd20f0", + "sha256:decf119d54e820f298ee6d89c72d6b289ea240c32c521f00433f9dc420595f38", + "sha256:f9510cac91e764e86acd74e2b7f7bc5e6127a7f3fb646d7c8033cfb84fd1176a" + ], + "markers": "python_version >= '3.7'", + "version": "==4.23.1" + }, "py": { "hashes": [ "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", @@ -1978,11 +2274,11 @@ }, "pygments": { "hashes": [ - "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297", - "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717" + "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c", + "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1" ], - "markers": "python_version >= '3.6'", - "version": "==2.14.0" + "markers": "python_version >= '3.7'", + "version": "==2.15.1" }, "pyparsing": { "hashes": [ @@ -2091,11 +2387,11 @@ }, "requests": { "hashes": [ - "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", - "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" + "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", + "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" ], - "markers": "python_version >= '3.7' and python_version < '4'", - "version": "==2.28.2" + "index": "pypi", + "version": "==2.31.0" }, "requests-mock": { "hashes": [ @@ -2122,11 +2418,11 @@ }, "rich": { "hashes": [ - "sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333", - "sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15" + "sha256:2d11b9b8dd03868f09b4fffadc84a6a8cda574e40dc90821bd845720ebb8e89c", + "sha256:69cdf53799e63f38b95b9bf9c875f8c90e78dd62b2f00c13a911c7a3b9fa4704" ], - "markers": "python_full_version >= '3.7.0'", - "version": "==13.3.3" + "markers": "python_version >= '3.7'", + "version": "==13.3.5" }, "s3transfer": { "hashes": [ @@ -2161,11 +2457,11 @@ }, "stevedore": { "hashes": [ - "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021", - "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771" + "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d", + "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c" ], "markers": "python_version >= '3.8'", - "version": "==5.0.0" + "version": "==5.1.0" }, "toml": { "hashes": [ @@ -2185,10 +2481,10 @@ }, "types-pyyaml": { "hashes": [ - "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8", - "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6" + "sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f", + "sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97" ], - "version": "==6.0.12.9" + "version": "==6.0.12.10" }, "urllib3": { "hashes": [ @@ -2205,13 +2501,21 @@ ], "version": "==0.5.1" }, + "websocket-client": { + "hashes": [ + "sha256:c7d67c13b928645f259d9b847ab5b57fd2d127213ca41ebd880de1f553b7c23b", + "sha256:f8c64e28cd700e7ba1f04350d66422b6833b82a796b525a51e740b8cc8dab4b1" + ], + "markers": "python_version >= '3.7'", + "version": "==1.5.2" + }, "werkzeug": { "hashes": [ - "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe", - "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612" + "sha256:1d5a58e0377d1fe39d061a5de4469e414e78ccb1e1e59c0f5ad6fa1c36c52b76", + "sha256:48e5e61472fee0ddee27ebad085614ebedb7af41e88f687aaf881afb723a162f" ], "index": "pypi", - "version": "==2.2.3" + "version": "==2.3.4" }, "xmltodict": { "hashes": [ @@ -2220,6 +2524,86 @@ ], "markers": "python_version >= '3.4'", "version": "==0.13.0" + }, + "yarl": { + "hashes": [ + "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571", + "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3", + "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3", + "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c", + "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7", + "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04", + "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191", + "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea", + "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4", + "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4", + "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095", + "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e", + "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74", + "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef", + "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33", + "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde", + "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45", + "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf", + "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b", + "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac", + "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0", + "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528", + "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716", + "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb", + "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18", + "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72", + "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6", + "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582", + "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5", + "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368", + "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc", + "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9", + "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be", + "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a", + "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80", + "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8", + "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6", + "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417", + "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574", + "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59", + "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608", + "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82", + "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1", + "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3", + "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d", + "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8", + "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc", + "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac", + "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8", + "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955", + "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0", + "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367", + "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb", + "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a", + "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623", + "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2", + "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6", + "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7", + "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4", + "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051", + "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938", + "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8", + "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9", + "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3", + "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5", + "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9", + "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333", + "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185", + "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3", + "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560", + "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b", + "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7", + "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78", + "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7" + ], + "markers": "python_version >= '3.7'", + "version": "==1.9.2" } } } diff --git a/README.md b/README.md index 34e367374..9ccbf6f8f 100644 --- a/README.md +++ b/README.md @@ -44,10 +44,15 @@ Our other repositories are: ### Common steps +On MacOS, using [Homebrew](https://brew.sh/) for package management is highly recommended. This helps avoid some known installation issues. + 1. Install pre-requisites for setup: * [jq](https://stedolan.github.io/jq/): `brew install jq` * [terraform](https://www.terraform.io/): `brew install terraform` or `brew install tfenv` and use `tfenv` to install `terraform ~> 1.4.0` * [cf-cli@8](https://docs.cloudfoundry.org/cf-cli/install-go-cli.html): `brew install cloudfoundry/tap/cf-cli@8` + * [postgresql](https://www.postgresql.org/): `brew install postgresql@15` (Homebrew requires a version pin, but any recent version will work) + * [redis](https://redis.io/): `brew install redis` + * [pyenv](https://github.com/pyenv/pyenv): `brew install pyenv` 1. [Log into cloud.gov](https://cloud.gov/docs/getting-started/setup/#set-up-the-command-line): `cf login -a api.fr.cloud.gov --sso` 1. Ensure you have access to the `notify-local-dev` and `notify-staging` spaces in cloud.gov 1. Run the development terraform with: @@ -111,6 +116,12 @@ If you're working in VS Code, you can also leverage Docker for a containerized d NOTE: when you change .env in the future, you'll need to rebuild the devcontainer for the change to take effect. VS Code _should_ detect the change and prompt you with a toast notification during a cached build. If not, you can find a manual rebuild in command pallette or just `docker rm` the notifications-api container. +### Known installation issues + +On M1 Macs, if you get a `fatal error: 'Python.h' file not found` message, try a different method of installing Python. Installation via `pyenv` is known to work. + +A direct installation of PostgreSQL will not put the `createdb` command on your `$PATH`. It can be added there in your shell startup script, or a Homebrew-managed installation of PostgreSQL will take care of it. + ## License && public domain Work through [commit `e604385`](https://github.com/GSA/notifications-api/commit/e604385e0cf4c2ab8c6451b7120ceb196cce21b5) is licensed by the UK government under the MIT license. Work after that commit is in the worldwide public domain. See [LICENSE.md](./LICENSE.md) for more information. diff --git a/app/__init__.py b/app/__init__.py index 20d93c37c..81e5c055a 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -23,13 +23,13 @@ from notifications_utils import logging, request_helper from notifications_utils.celery import NotifyCelery from notifications_utils.clients.encryption.encryption_client import Encryption from notifications_utils.clients.redis.redis_client import RedisClient -from notifications_utils.clients.statsd.statsd_client import StatsdClient from notifications_utils.clients.zendesk.zendesk_client import ZendeskClient from sqlalchemy import event from werkzeug.exceptions import HTTPException as WerkzeugHTTPException from werkzeug.local import LocalProxy from app.clients import NotificationProviderClients +from app.clients.cloudwatch.aws_cloudwatch import AwsCloudwatchClient from app.clients.document_download import DocumentDownloadClient from app.clients.email.aws_ses import AwsSesClient from app.clients.email.aws_ses_stub import AwsSesStubClient @@ -56,9 +56,9 @@ notify_celery = NotifyCelery() aws_ses_client = AwsSesClient() aws_ses_stub_client = AwsSesStubClient() aws_sns_client = AwsSnsClient() +aws_cloudwatch_client = AwsCloudwatchClient() encryption = Encryption() zendesk_client = ZendeskClient() -statsd_client = StatsdClient() redis_store = RedisClient() document_download_client = DocumentDownloadClient() metrics = GDSMetrics() @@ -91,15 +91,14 @@ def create_app(application): migrate.init_app(application, db=db) ma.init_app(application) zendesk_client.init_app(application) - statsd_client.init_app(application) logging.init_app(application) - aws_sns_client.init_app(application, statsd_client=statsd_client) + aws_sns_client.init_app(application) - aws_ses_client.init_app(statsd_client=statsd_client) + aws_ses_client.init_app() aws_ses_stub_client.init_app( - statsd_client=statsd_client, stub_url=application.config['SES_STUB_URL'] ) + aws_cloudwatch_client.init_app(application) # If a stub url is provided for SES, then use the stub client rather than the real SES boto client email_clients = [aws_ses_stub_client] if application.config['SES_STUB_URL'] else [aws_ses_client] notification_provider_clients.init_app( @@ -290,9 +289,7 @@ def init_app(app): def after_request(response): CONCURRENT_REQUESTS.dec() - response.headers.add('Access-Control-Allow-Origin', '*') - response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') - response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE') + response.headers.add('X-Content-Type-Options', 'nosniff') return response @app.errorhandler(Exception) @@ -301,20 +298,34 @@ def init_app(app): # error.code is set for our exception types. msg = getattr(error, 'message', str(error)) code = getattr(error, 'code', 500) - return jsonify(result='error', message=msg), code + response = make_response( + jsonify(result='error', message=msg), + code, + error.get_headers() + ) + response.content_type = "application/json" + return response @app.errorhandler(WerkzeugHTTPException) def werkzeug_exception(e): - return make_response( + response = make_response( jsonify(result='error', message=e.description), e.code, e.get_headers() ) + response.content_type = 'application/json' + return response @app.errorhandler(404) def page_not_found(e): msg = e.description or "Not found" - return jsonify(result='error', message=msg), 404 + response = make_response( + jsonify(result='error', message=msg), + 404, + e.get_headers() + ) + response.content_type = 'application/json' + return response def create_uuid(): diff --git a/app/aws/s3.py b/app/aws/s3.py index d7d7da139..d48cbd083 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -65,3 +65,14 @@ def remove_job_from_s3(service_id, job_id): def remove_s3_object(bucket_name, object_key, access_key, secret_key, region): obj = get_s3_object(bucket_name, object_key, access_key, secret_key, region) return obj.delete() + + +def remove_csv_object(object_key): + obj = get_s3_object( + current_app.config['CSV_UPLOAD_BUCKET']['bucket'], + object_key, + current_app.config['CSV_UPLOAD_BUCKET']['access_key_id'], + current_app.config['CSV_UPLOAD_BUCKET']['secret_access_key'], + current_app.config['CSV_UPLOAD_BUCKET']['region'] + ) + return obj.delete() diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index b812b2915..253291fe2 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -4,8 +4,9 @@ from flask import current_app from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.exc import SQLAlchemyError -from app import notify_celery, statsd_client +from app import notify_celery from app.aws import s3 +from app.aws.s3 import remove_csv_object from app.celery.process_ses_receipts_tasks import check_and_queue_callback_task from app.config import QueueNames from app.cronitor import cronitor @@ -14,6 +15,7 @@ from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention from app.dao.jobs_dao import ( dao_archive_job, dao_get_jobs_older_than_data_retention, + dao_get_unfinished_jobs, ) from app.dao.notifications_dao import ( dao_get_notifications_processing_time_stats, @@ -42,6 +44,19 @@ def _remove_csv_files(job_types): current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) +@notify_celery.task(name="cleanup-unfinished-jobs") +def cleanup_unfinished_jobs(): + now = datetime.utcnow() + jobs = dao_get_unfinished_jobs() + for job in jobs: + # The query already checks that the processing_finished time is null, so here we are saying + # if it started more than 4 hours ago, that's too long + acceptable_finish_time = job.processing_started + timedelta(minutes=5) + if now > acceptable_finish_time: + remove_csv_object(job.original_file_name) + dao_archive_job(job) + + @notify_celery.task(name="delete-notifications-older-than-retention") def delete_notifications_older_than_retention(): delete_email_notifications_older_than_retention.apply_async(queue=QueueNames.REPORTING) @@ -134,7 +149,6 @@ def timeout_notifications(): notifications = dao_timeout_notifications(cutoff_time) for notification in notifications: - statsd_client.incr(f'timeout-sending.{notification.sent_by}') check_and_queue_callback_task(notification) current_app.logger.info( @@ -162,6 +176,7 @@ def delete_inbound_sms(): @notify_celery.task(name='save-daily-notification-processing-time') @cronitor("save-daily-notification-processing-time") def save_daily_notification_processing_time(local_date=None): + # local_date is a string in the format of "YYYY-MM-DD" if local_date is None: # if a date is not provided, we run against yesterdays data diff --git a/app/celery/process_ses_receipts_tasks.py b/app/celery/process_ses_receipts_tasks.py index 8a0b3417f..d36a4e204 100644 --- a/app/celery/process_ses_receipts_tasks.py +++ b/app/celery/process_ses_receipts_tasks.py @@ -5,7 +5,7 @@ from celery.exceptions import Retry from flask import current_app, json from sqlalchemy.orm.exc import NoResultFound -from app import notify_celery, statsd_client +from app import notify_celery from app.celery.service_callback_tasks import ( create_complaint_callback_data, create_delivery_status_callback_data, @@ -92,11 +92,6 @@ def process_ses_results(self, response): "SES callback return status of {} for notification: {}".format(notification_status, notification.id) ) - statsd_client.incr("callback.ses.{}".format(notification_status)) - - if notification.sent_at: - statsd_client.timing_with_dates("callback.ses.elapsed-time", datetime.utcnow(), notification.sent_at) - check_and_queue_callback_task(notification) return True diff --git a/app/celery/provider_tasks.py b/app/celery/provider_tasks.py index 42a37215e..7ebb44bda 100644 --- a/app/celery/provider_tasks.py +++ b/app/celery/provider_tasks.py @@ -1,7 +1,11 @@ +from datetime import datetime, timedelta +from time import time +from zoneinfo import ZoneInfo + from flask import current_app from sqlalchemy.orm.exc import NoResultFound -from app import notify_celery +from app import aws_cloudwatch_client, notify_celery from app.clients.email import EmailClientNonRetryableException from app.clients.email.aws_ses import AwsSesClientThrottlingSendRateException from app.clients.sms import SmsClientResponseException @@ -13,17 +17,51 @@ from app.dao.notifications_dao import ( ) from app.delivery import send_to_providers from app.exceptions import NotificationTechnicalFailureException -from app.models import NOTIFICATION_TECHNICAL_FAILURE +from app.models import ( + NOTIFICATION_FAILED, + NOTIFICATION_SENT, + NOTIFICATION_TECHNICAL_FAILURE, +) + + +@notify_celery.task(bind=True, name="check_sms_delivery_receipt", max_retries=48, default_retry_delay=300) +def check_sms_delivery_receipt(self, message_id, notification_id, sent_at): + """ + This is called after deliver_sms to check the status of the message. This uses the same number of + retries and the same delay period as deliver_sms. In addition, this fires five minutes after + deliver_sms initially. So the idea is that most messages will succeed and show up in the logs quickly. + Other message will resolve successfully after a retry or to. A few will fail but it will take up to + 4 hours to know for sure. The call to check_sms will raise an exception if neither a success nor a + failure appears in the cloudwatch logs, so this should keep retrying until the log appears, or until + we run out of retries. + """ + status, provider_response = aws_cloudwatch_client.check_sms(message_id, notification_id, sent_at) + if status == 'success': + status = NOTIFICATION_SENT + else: + status = NOTIFICATION_FAILED + update_notification_status_by_id(notification_id, status, provider_response=provider_response) + current_app.logger.info(f"Updated notification {notification_id} with response '{provider_response}'") @notify_celery.task(bind=True, name="deliver_sms", max_retries=48, default_retry_delay=300) def deliver_sms(self, notification_id): try: + # Get the time we are doing the sending, to minimize the time period we need to check over for receipt + now = round(time() * 1000) current_app.logger.info("Start sending SMS for notification id: {}".format(notification_id)) notification = notifications_dao.get_notification_by_id(notification_id) if not notification: raise NoResultFound() - send_to_providers.send_sms_to_provider(notification) + message_id = send_to_providers.send_sms_to_provider(notification) + # We have to put it in the default US/Eastern timezone. From zones west of there, the delay + # will be ignored and it will fire immediately (although this probably only affects developer testing) + my_eta = datetime.now(ZoneInfo('US/Eastern')) + timedelta(seconds=300) + check_sms_delivery_receipt.apply_async( + [message_id, notification_id, now], + eta=my_eta, + queue=QueueNames.CHECK_SMS + ) except Exception as e: if isinstance(e, SmsClientResponseException): current_app.logger.warning( diff --git a/app/celery/research_mode_tasks.py b/app/celery/research_mode_tasks.py index 62344a41a..6e9c2d68b 100644 --- a/app/celery/research_mode_tasks.py +++ b/app/celery/research_mode_tasks.py @@ -5,6 +5,7 @@ from requests import HTTPError, request from app.celery.process_ses_receipts_tasks import process_ses_results from app.config import QueueNames +from app.dao.notifications_dao import get_notification_by_id from app.models import SMS_TYPE temp_fail = "2028675303" @@ -16,8 +17,8 @@ perm_fail_email = "perm-fail@simulator.notify" temp_fail_email = "temp-fail@simulator.notify" -def send_sms_response(provider, reference, to): - body = sns_callback(reference, to) +def send_sms_response(provider, reference): + body = sns_callback(reference) headers = {"Content-type": "application/json"} make_request(SMS_TYPE, provider, body, headers) @@ -59,25 +60,16 @@ def make_request(notification_type, provider, data, headers): return response.json() -def sns_callback(notification_id, to): - raise Exception("Need to update for SNS callback format along with test_send_to_providers") +def sns_callback(notification_id): + notification = get_notification_by_id(notification_id) - # example from mmg_callback - # if to.strip().endswith(temp_fail): - # # status: 4 - expired (temp failure) - # status = "4" - # elif to.strip().endswith(perm_fail): - # # status: 5 - rejected (perm failure) - # status = "5" - # else: - # # status: 3 - delivered - # status = "3" - - # return json.dumps({"reference": "mmg_reference", - # "CID": str(notification_id), - # "MSISDN": to, - # "status": status, - # "deliverytime": "2016-04-05 16:01:07"}) + # This will only work if all notifications, including successful ones, are in the notifications table + # If we decide to delete successful notifications, we will have to get this from notifications history + return json.dumps({ + "CID": str(notification_id), + "status": notification.status, + # "deliverytime": notification.completed_at + }) def ses_notification_callback(reference): diff --git a/app/clients/cloudwatch/__init__.py b/app/clients/cloudwatch/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/app/clients/cloudwatch/aws_cloudwatch.py b/app/clients/cloudwatch/aws_cloudwatch.py new file mode 100644 index 000000000..97de58219 --- /dev/null +++ b/app/clients/cloudwatch/aws_cloudwatch.py @@ -0,0 +1,89 @@ +import json +import re +import time + +from boto3 import client + +from app.clients import Client +from app.cloudfoundry_config import cloud_config + + +class AwsCloudwatchClient(Client): + """ + This client is responsible for retrieving sms delivery receipts from cloudwatch. + """ + + def init_app(self, current_app, *args, **kwargs): + self._client = client( + "logs", + region_name=cloud_config.sns_region, + aws_access_key_id=cloud_config.sns_access_key, + aws_secret_access_key=cloud_config.sns_secret_key + ) + super(Client, self).__init__(*args, **kwargs) + self.current_app = current_app + self._valid_sender_regex = re.compile(r"^\+?\d{5,14}$") + + @property + def name(self): + return 'cloudwatch' + + def _get_log(self, my_filter, log_group_name, sent_at): + + # Check all cloudwatch logs from the time the notification was sent (currently 5 minutes previously) until now + now = round(time.time() * 1000) + beginning = sent_at + next_token = None + all_log_events = [] + while True: + if next_token: + response = self._client.filter_log_events( + logGroupName=log_group_name, + filterPattern=my_filter, + nextToken=next_token, + startTime=beginning, + endTime=now + ) + else: + response = self._client.filter_log_events( + logGroupName=log_group_name, + filterPattern=my_filter, + startTime=beginning, + endTime=now + ) + log_events = response.get('events', []) + all_log_events.extend(log_events) + if len(log_events) > 0: + # We found it + break + next_token = response.get('nextToken') + if not next_token: + break + return all_log_events + + def check_sms(self, message_id, notification_id, created_at): + + # TODO this clumsy approach to getting the account number will be fixed as part of notify-api #258 + account_number = cloud_config.ses_domain_arn + account_number = account_number.replace('arn:aws:ses:us-west-2:', '') + account_number = account_number.split(":") + account_number = account_number[0] + + log_group_name = f'sns/us-west-2/{account_number}/DirectPublishToPhoneNumber' + filter_pattern = '{$.notification.messageId="XXXXX"}' + filter_pattern = filter_pattern.replace("XXXXX", message_id) + all_log_events = self._get_log(filter_pattern, log_group_name, created_at) + + if all_log_events and len(all_log_events) > 0: + event = all_log_events[0] + message = json.loads(event['message']) + return "success", message['delivery']['providerResponse'] + + log_group_name = f'sns/us-west-2/{account_number}/DirectPublishToPhoneNumber/Failure' + all_failed_events = self._get_log(filter_pattern, log_group_name, created_at) + if all_failed_events and len(all_failed_events) > 0: + event = all_failed_events[0] + message = json.loads(event['message']) + return "fail", message['delivery']['providerResponse'] + + raise Exception(f'No event found for message_id {message_id} notification_id {notification_id}') diff --git a/app/clients/email/aws_ses.py b/app/clients/email/aws_ses.py index 59d2243ac..6bd4050df 100644 --- a/app/clients/email/aws_ses.py +++ b/app/clients/email/aws_ses.py @@ -57,7 +57,7 @@ class AwsSesClient(EmailClient): Amazon SES email client. ''' - def init_app(self, statsd_client, *args, **kwargs): + def init_app(self, *args, **kwargs): self._client = client( 'ses', region_name=cloud_config.ses_region, @@ -65,7 +65,6 @@ class AwsSesClient(EmailClient): aws_secret_access_key=cloud_config.ses_secret_key ) super(AwsSesClient, self).__init__(*args, **kwargs) - self.statsd_client = statsd_client @property def name(self): @@ -110,7 +109,6 @@ class AwsSesClient(EmailClient): ReplyToAddresses=[punycode_encode_email(addr) for addr in reply_to_addresses] ) except botocore.exceptions.ClientError as e: - self.statsd_client.incr("clients.ses.error") # http://docs.aws.amazon.com/ses/latest/DeveloperGuide/api-error-codes.html if e.response['Error']['Code'] == 'InvalidParameterValue': @@ -121,16 +119,12 @@ class AwsSesClient(EmailClient): ): raise AwsSesClientThrottlingSendRateException(str(e)) else: - self.statsd_client.incr("clients.ses.error") raise AwsSesClientException(str(e)) except Exception as e: - self.statsd_client.incr("clients.ses.error") raise AwsSesClientException(str(e)) else: elapsed_time = monotonic() - start_time current_app.logger.info("AWS SES request finished in {}".format(elapsed_time)) - self.statsd_client.timing("clients.ses.request-time", elapsed_time) - self.statsd_client.incr("clients.ses.success") return response['MessageId'] diff --git a/app/clients/email/aws_ses_stub.py b/app/clients/email/aws_ses_stub.py index ef5fc8c13..6322beee4 100644 --- a/app/clients/email/aws_ses_stub.py +++ b/app/clients/email/aws_ses_stub.py @@ -12,8 +12,7 @@ class AwsSesStubClientException(EmailClientException): class AwsSesStubClient(EmailClient): - def init_app(self, statsd_client, stub_url): - self.statsd_client = statsd_client + def init_app(self, stub_url): self.url = stub_url @property @@ -39,11 +38,8 @@ class AwsSesStubClient(EmailClient): response_json = json.loads(response.text) except Exception as e: - self.statsd_client.incr("clients.ses_stub.error") raise AwsSesStubClientException(str(e)) else: elapsed_time = monotonic() - start_time current_app.logger.info("AWS SES stub request finished in {}".format(elapsed_time)) - self.statsd_client.timing("clients.ses_stub.request-time", elapsed_time) - self.statsd_client.incr("clients.ses_stub.success") return response_json['MessageId'] diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index 3f57c44c3..45bff2917 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -14,7 +14,7 @@ class AwsSnsClient(SmsClient): AwsSns sms client """ - def init_app(self, current_app, statsd_client, *args, **kwargs): + def init_app(self, current_app, *args, **kwargs): self._client = client( "sns", region_name=cloud_config.sns_region, @@ -23,7 +23,6 @@ class AwsSnsClient(SmsClient): ) super(SmsClient, self).__init__(*args, **kwargs) self.current_app = current_app - self.statsd_client = statsd_client self._valid_sender_regex = re.compile(r"^\+?\d{5,14}$") @property @@ -67,19 +66,14 @@ class AwsSnsClient(SmsClient): start_time = monotonic() response = self._client.publish(PhoneNumber=to, Message=content, MessageAttributes=attributes) except botocore.exceptions.ClientError as e: - self.statsd_client.incr("clients.sns.error") raise str(e) except Exception as e: - self.statsd_client.incr("clients.sns.error") raise str(e) finally: elapsed_time = monotonic() - start_time self.current_app.logger.info("AWS SNS request finished in {}".format(elapsed_time)) - self.statsd_client.timing("clients.sns.request-time", elapsed_time) - self.statsd_client.incr("clients.sns.success") return response["MessageId"] if not matched: - self.statsd_client.incr("clients.sns.error") self.current_app.logger.error("No valid numbers found in {}".format(to)) raise ValueError("No valid numbers found for SMS delivery") diff --git a/app/cloudfoundry_config.py b/app/cloudfoundry_config.py index 7fda0184d..62527c797 100644 --- a/app/cloudfoundry_config.py +++ b/app/cloudfoundry_config.py @@ -39,6 +39,15 @@ class CloudfoundryConfig: domain_arn = getenv('SES_DOMAIN_ARN', 'dev.notify.gov') return domain_arn.split('/')[-1] + # TODO remove this after notifications-api #258 + @property + def ses_domain_arn(self): + try: + domain_arn = self._ses_credentials('domain_arn') + except KeyError: + domain_arn = getenv('SES_DOMAIN_ARN', 'dev.notify.gov') + return domain_arn + @property def ses_region(self): try: diff --git a/app/commands.py b/app/commands.py index 40118bd7c..18ac0ae6f 100644 --- a/app/commands.py +++ b/app/commands.py @@ -11,7 +11,6 @@ from click_datetime import Datetime as click_dt from flask import current_app, json from notifications_python_client.authentication import create_jwt_token from notifications_utils.recipients import RecipientCSV -from notifications_utils.statsd_decorators import statsd from notifications_utils.template import SMSMessageTemplate from sqlalchemy import and_ from sqlalchemy.exc import IntegrityError @@ -19,6 +18,7 @@ from sqlalchemy.orm.exc import NoResultFound from app import db from app.aws import s3 +from app.celery.nightly_tasks import cleanup_unfinished_jobs from app.celery.tasks import process_row from app.dao.annual_billing_dao import ( dao_create_or_update_annual_billing_for_year, @@ -247,7 +247,6 @@ def bulk_invite_user_to_service(file_name, service_id, user_id, auth_type, permi @notify_command(name='archive-jobs-created-between-dates') @click.option('-s', '--start_date', required=True, help="start date inclusive", type=click_dt(format='%Y-%m-%d')) @click.option('-e', '--end_date', required=True, help="end date inclusive", type=click_dt(format='%Y-%m-%d')) -@statsd(namespace="tasks") def update_jobs_archived_flag(start_date, end_date): current_app.logger.info('Archiving jobs created between {} to {}'.format(start_date, end_date)) @@ -466,6 +465,12 @@ def fix_billable_units(): print("End fix_billable_units") +@notify_command(name='delete-unfinished-jobs') +def delete_unfinished_jobs(): + cleanup_unfinished_jobs() + print("End cleanup_unfinished_jobs") + + @notify_command(name='process-row-from-job') @click.option('-j', '--job_id', required=True, help='Job id') @click.option('-n', '--job_row_number', type=int, required=True, help='Job id') diff --git a/app/config.py b/app/config.py index a2f350986..2e9f8cf69 100644 --- a/app/config.py +++ b/app/config.py @@ -13,6 +13,7 @@ class QueueNames(object): PRIORITY = 'priority-tasks' DATABASE = 'database-tasks' SEND_SMS = 'send-sms-tasks' + CHECK_SMS = 'check-sms_tasks' SEND_EMAIL = 'send-email-tasks' RESEARCH_MODE = 'research-mode-tasks' REPORTING = 'reporting-tasks' @@ -33,6 +34,7 @@ class QueueNames(object): QueueNames.PERIODIC, QueueNames.DATABASE, QueueNames.SEND_SMS, + QueueNames.CHECK_SMS, QueueNames.SEND_EMAIL, QueueNames.RESEARCH_MODE, QueueNames.REPORTING, @@ -116,9 +118,6 @@ class Config(object): # Monitoring CRONITOR_ENABLED = False CRONITOR_KEYS = json.loads(getenv('CRONITOR_KEYS', '{}')) - STATSD_HOST = getenv('STATSD_HOST') - STATSD_PORT = 8125 - STATSD_ENABLED = bool(STATSD_HOST) # Antivirus ANTIVIRUS_ENABLED = getenv('ANTIVIRUS_ENABLED', '1') == '1' @@ -241,6 +240,11 @@ class Config(object): 'schedule': crontab(hour=2, minute=0), 'options': {'queue': QueueNames.PERIODIC} }, + 'cleanup-unfinished-jobs': { + 'task': 'cleanup-unfinished-jobs', + 'schedule': crontab(hour=0, minute=5), + 'options': {'queue': QueueNames.PERIODIC} + }, 'remove_sms_email_jobs': { 'task': 'remove_sms_email_jobs', 'schedule': crontab(hour=4, minute=0), @@ -291,6 +295,7 @@ def _s3_credentials_from_env(bucket_prefix): class Development(Config): DEBUG = True + NOTIFY_LOG_LEVEL = "DEBUG" SQLALCHEMY_ECHO = False DVLA_EMAIL_ADDRESSES = ['success@simulator.amazonses.com'] diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index da829171b..8fd7f22c0 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -43,6 +43,10 @@ def dao_get_job_by_service_id_and_job_id(service_id, job_id): return Job.query.filter_by(service_id=service_id, id=job_id).one() +def dao_get_unfinished_jobs(): + return Job.query.filter(Job.processing_finished.is_(None)).all() + + def dao_get_jobs_by_service_id( service_id, *, diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index f602e13ff..4f8bdf459 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -95,7 +95,7 @@ def _update_notification_status(notification, status, provider_response=None): @autocommit -def update_notification_status_by_id(notification_id, status, sent_by=None): +def update_notification_status_by_id(notification_id, status, sent_by=None, provider_response=None): notification = Notification.query.with_for_update().filter(Notification.id == notification_id).first() if not notification: @@ -121,6 +121,8 @@ def update_notification_status_by_id(notification_id, status, sent_by=None): and not country_records_delivery(notification.phone_prefix) ): return None + if provider_response: + notification.provider_response = provider_response if not notification.sent_by and sent_by: notification.sent_by = sent_by return _update_notification_status( diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index 84feb52cc..cd4a766a3 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -10,7 +10,7 @@ from notifications_utils.template import ( SMSMessageTemplate, ) -from app import create_uuid, db, notification_provider_clients, statsd_client +from app import create_uuid, db, notification_provider_clients from app.celery.research_mode_tasks import ( send_email_response, send_sms_response, @@ -38,7 +38,7 @@ from app.serialised_models import SerialisedService, SerialisedTemplate def send_sms_to_provider(notification): service = SerialisedService.from_id(notification.service_id) - + message_id = None if not service.active: technical_failure(notification=notification) return @@ -59,11 +59,9 @@ def send_sms_to_provider(notification): prefix=service.name, show_prefix=service.prefix_sms, ) - created_at = notification.created_at - key_type = notification.key_type if service.research_mode or notification.key_type == KEY_TYPE_TEST: update_notification_to_sending(notification, provider) - send_sms_response(provider.name, str(notification.id), notification.to) + send_sms_response(provider.name, str(notification.id)) else: try: @@ -81,7 +79,7 @@ def send_sms_to_provider(notification): 'international': notification.international, } db.session.close() # no commit needed as no changes to objects have been made above - provider.send_sms(**send_sms_kwargs) + message_id = provider.send_sms(**send_sms_kwargs) except Exception as e: notification.billable_units = template.fragment_count dao_update_notification(notification) @@ -90,18 +88,7 @@ def send_sms_to_provider(notification): else: notification.billable_units = template.fragment_count update_notification_to_sending(notification, provider) - - delta_seconds = (datetime.utcnow() - created_at).total_seconds() - statsd_client.timing("sms.total-time", delta_seconds) - - if key_type == KEY_TYPE_TEST: - statsd_client.timing("sms.test-key.total-time", delta_seconds) - else: - statsd_client.timing("sms.live-key.total-time", delta_seconds) - if service.high_volume: - statsd_client.timing("sms.live-key.high-volume.total-time", delta_seconds) - else: - statsd_client.timing("sms.live-key.not-high-volume.total-time", delta_seconds) + return message_id def send_email_to_provider(notification): @@ -112,7 +99,6 @@ def send_email_to_provider(notification): return if notification.status == 'created': provider = provider_to_use(EMAIL_TYPE, False) - template_dict = SerialisedTemplate.from_id_and_service_id( template_id=notification.template_id, service_id=service.id, version=notification.template_version ).__dict__ @@ -127,8 +113,6 @@ def send_email_to_provider(notification): template_dict, values=notification.personalisation ) - created_at = notification.created_at - key_type = notification.key_type if service.research_mode or notification.key_type == KEY_TYPE_TEST: notification.reference = str(create_uuid()) update_notification_to_sending(notification, provider) @@ -147,16 +131,6 @@ def send_email_to_provider(notification): ) notification.reference = reference update_notification_to_sending(notification, provider) - delta_seconds = (datetime.utcnow() - created_at).total_seconds() - - if key_type == KEY_TYPE_TEST: - statsd_client.timing("email.test-key.total-time", delta_seconds) - else: - statsd_client.timing("email.live-key.total-time", delta_seconds) - if service.high_volume: - statsd_client.timing("email.live-key.high-volume.total-time", delta_seconds) - else: - statsd_client.timing("email.live-key.not-high-volume.total-time", delta_seconds) def update_notification_to_sending(notification, provider): diff --git a/app/models.py b/app/models.py index 31741b41a..23eb4d107 100644 --- a/app/models.py +++ b/app/models.py @@ -109,7 +109,7 @@ class User(db.Model): platform_admin = db.Column(db.Boolean, nullable=False, default=False) current_session_id = db.Column(UUID(as_uuid=True), nullable=True) auth_type = db.Column( - db.String, db.ForeignKey('auth_type.name'), index=True, nullable=False, default=EMAIL_AUTH_TYPE + db.String, db.ForeignKey('auth_type.name'), index=True, nullable=False, default=SMS_AUTH_TYPE ) email_access_validated_at = db.Column( db.DateTime, index=False, unique=False, nullable=False, default=datetime.datetime.utcnow @@ -1653,7 +1653,7 @@ class InvitedUser(db.Model): db.ForeignKey('auth_type.name'), index=True, nullable=False, - default=EMAIL_AUTH_TYPE + default=SMS_AUTH_TYPE ) folder_permissions = db.Column(JSONB(none_as_null=True), nullable=False, default=[]) diff --git a/app/notifications/sns_cert_validator.py b/app/notifications/sns_cert_validator.py index 1b3f7ea3d..c06d06c49 100644 --- a/app/notifications/sns_cert_validator.py +++ b/app/notifications/sns_cert_validator.py @@ -16,7 +16,7 @@ VALID_SNS_TOPICS = Config.VALID_SNS_TOPICS _signing_cert_cache = {} _cert_url_re = re.compile( - r'sns\.([a-z]{1,3}-[a-z]+-[0-9]{1,2})\.amazonaws\.com', + r'sns\.([a-z]{1,3}(?:-gov)?-[a-z]+-[0-9]{1,2})\.amazonaws\.com', ) diff --git a/deploy-config/egress_proxy/notify-api-production.allow.acl b/deploy-config/egress_proxy/notify-api-production.allow.acl new file mode 100644 index 000000000..a6e4a2f65 --- /dev/null +++ b/deploy-config/egress_proxy/notify-api-production.allow.acl @@ -0,0 +1,4 @@ +email.us-gov-west-1.amazonaws.com +sns.us-gov-west-1.amazonaws.com +gov-collector.newrelic.com +egress-proxy-notify-api-production.apps.internal diff --git a/deploy-config/egress_proxy/notify-api-production.deny.acl b/deploy-config/egress_proxy/notify-api-production.deny.acl new file mode 100644 index 000000000..e69de29bb diff --git a/deploy-config/egress_proxy/notify-api-production.deploy.acl b/deploy-config/egress_proxy/notify-api-production.deploy.acl new file mode 100644 index 000000000..e5a3a541d --- /dev/null +++ b/deploy-config/egress_proxy/notify-api-production.deploy.acl @@ -0,0 +1 @@ +Update this file to force a re-deploy of the egress proxy even when notify-api-production..acl haven't changed diff --git a/docs/deploying.md b/docs/deploying.md index 2763a57a5..916a5279f 100644 --- a/docs/deploying.md +++ b/docs/deploying.md @@ -1,19 +1,20 @@ # Deploying -We deploy automatically to cloud.gov for demo and staging environments. +We deploy automatically to cloud.gov for production, demo, and staging environments. Deployment to staging runs via the [base deployment action](../.github/workflows/deploy.yml) on GitHub, which pulls credentials from GitHub's secrets store in the staging environment. Deployment to demo runs via the [demo deployment action](../.github/workflows/deploy-demo.yml) on GitHub, which pulls credentials from GitHub's secrets store in the demo environment. +Deployment to production runs via the [production deployment action](../.github/workflows/deploy-prod.yml) on GitHub, which pulls credentials from GitHub's secrets store in the production environment. + The [action that we use](https://github.com/18F/cg-deploy-action) deploys using [a rolling strategy](https://docs.cloudfoundry.org/devguide/deploy-apps/rolling-deploy.html), so all deployments should have zero downtime. -The API has 2 deployment environments: +The API has 3 deployment environments: - Staging, which deploys from `main` - Demo, which deploys from `production` - -In the future, we will add a Production deploy environment, which will deploy in parallel to Demo. +- Production, which deploys from `production` Configurations for these are located in [the `deploy-config` folder](../deploy-config/). diff --git a/docs/infra-overview.md b/docs/infra-overview.md index 7f366af79..920cf6199 100644 --- a/docs/infra-overview.md +++ b/docs/infra-overview.md @@ -49,9 +49,24 @@ Credentials for these services are created by running: 1. `cd terraform/development` 1. `./run.sh` +in both the api repository as well as the admin repository. + This will append credentials to your `.env` file. You will need to manually clean up any prior runs from that file if you run that command again. -Offboarding: Service key bindings can be cleaned up from cloud.gov by running `./run.sh -d` yourself, or another developer running `./run.sh -d -u USER_TO_CLEANUP` +You can remove your development infrastructure by running `./run.sh -d` + +#### Resetting + +`./reset.sh` can be used to import your development infrastructure information in case of a new computer or new working tree and the old terraform state file was not transferred. + +#### Offboarding + +`./reset.sh -u USER_TO_OFFBOARD` can be used to import another user's development resources in order to clean them up. Steps for use: + +1. Move your existing terraform state file aside temporarily, so it is not overwritten. +1. `./reset.sh -u USER_TO_OFFBOARD` +1. Answer no to the prompt about creating missing resources. +1. Run `./run.sh -u USER_TO_OFFBOARD -d` to fully remove the rest of that user's resources. ### Cloud.gov @@ -87,6 +102,24 @@ We are using [New Relic](https://one.newrelic.com/nr1-core?account=3389907) for These steps are required for new cloud.gov environments. Local development borrows SES & SNS infrastructure from the `notify-staging` cloud.gov space, so these steps are not required for new developers. +### Steps to do a clean prod deploy to cloud.gov + +Steps for deploying production from scratch. These can be updated for a new cloud.gov environment by subbing out `prod` or `production` for your desired environment within the steps. + +1. Deploy API app + 1. Update `terraform-production.yml` and `deploy-prod.yml` to point to the correct space and git branch. + 1. Ensure that the `domain` module is commented out in `terraform/production/main.tf` + 1. Run CI/CD pipeline on the `production` branch by opening a PR from `main` to `production` + 1. Create any necessary DNS records (check `notify-api-ses-production` service credentials for instructions) within https://github.com/18f/dns + 1. Follow the `Steps to prepare SES` below + 1. (Optional) if using a public API route, uncomment the `domain` module and re-trigger a deploy +1. Deploy Admin app + 1. Update `terraform-production.yml` and `deploy-prod.yml` to point to the correct space and git branch. + 1. Ensure that the `api_network_route` and `domain` modules are commented out in `terraform/production/main.tf` + 1. Run CI/CD pipeline on the `production` branch by opening a PR from `main` to `production` + 1. Create DNS records for `domain` module within https://github.com/18f/dns + 1. Uncomment the `api_network_route` and `domain` modules and re-trigger a deploy + ### Steps to prepare SES 1. After the first deploy of the application with the SSB-brokered SES service completes: @@ -100,6 +133,8 @@ TODO: create env vars for these origin and destination email addresses for the r #### Move SNS out of sandbox. +This should be complete for all regions U.S. Notify has been deployed to or is currently planned to be deployed to. + 1. Visit the SNS console for the region you will be sending from. Notes: 1. SNS settings are per-region, so each environment must have its own region 1. Pinpoint and SNS have confusing regional availability, so ensure both are available before submitting any requests. @@ -115,10 +150,8 @@ TODO: create env vars for these origin and destination email addresses for the r 1. Select `Toll-free registrations` and `Create registration` 1. Select the number you just created and then `Register existing toll-free number` 1. Complete and submit the form. Approval usually takes about 2 weeks. -1. Set this phone number as the `AWS_US_TOLL_FREE_NUMBER` in the environment you are creating +1. See the [run book](./run-book.md) for information on how to set those numbers. -#### Current Production Phone Numbers +Example answers for toll-free registration form -* +18447952263 - in use as default number. Notify's OTP messages and trial service messages are sent from this number -* +18447891134 - to be used by Pilot Partner 1 -* +18888402596 - to be used by Pilot Partner 2 +![example answers for toll-free registration form](./toll-free-registration.png) diff --git a/docs/message-sending-path.md b/docs/message-sending-path.md new file mode 100644 index 000000000..4d3774756 --- /dev/null +++ b/docs/message-sending-path.md @@ -0,0 +1,15 @@ +# How messages are queued and sent + +There are several ways for notifications to come into the API. + +- Messages sent through the API enter through `app/notifications/post_notifications.py` +- One-off messages sent from the UI enter through `create_one_off_notification` in `app/service/rest.py` +- CSV uploads enter through `app/job/rest.py` + +API messages and one-off UI messages come in one at a time, and take slightly-separate routes +that both end up at `persist_notification`, which writes to the database, and `provider_tasks.deliver_sms`, +which enqueues the sending. + +For CSV uploads, the CSV is first stored in S3 and queued as a `Job`. When the job runs, it iterates +through the rows, running `process_job.save_sms` to send notifications through `persist_notification` and +`provider_tasks.deliver_sms`. diff --git a/docs/run-book.md b/docs/run-book.md new file mode 100644 index 000000000..3619a377f --- /dev/null +++ b/docs/run-book.md @@ -0,0 +1,199 @@ +Run Book +======== + +Policies and Procedures needed before and during US Notify Operations. Many of these policies are taken from the U.S. Notify System Security & Privacy Plan (SSPP). + +Any changes to policies and procedures defined both here and in the SSPP must be kept in sync, and should be done collaboratively with the System ISSO and ISSM to ensure +that the security of the system is maintained. + +1. [Alerts, Notifications, Monitoring](#alerts) +1. [Restaging Apps](#restaging-apps) +1. [Smoke-testing the App](#smoke-testing) +1. [Configuration Management](#cm) +1. [DNS Changes](#dns) +1. [Known Gotchas](#gotcha) +1. [User Account Management](#ac) +1. [SMS Phone Number Management](#phone-numbers) + +## Alerts, Notifications, Monitoring + +Operational alerts are posted to the [#pb-notify-alerts](https://gsa-tts.slack.com/archives/C04U9BGHUDB) Slack channel. Please join this channel and enable push notifications for all messages whenever you are on call. + +[NewRelic](https://one.newrelic.com/) is being used for monitoring the application. [NewRelic Dashboard](https://onenr.io/08wokrnrvwx) can be filtered by environment and API, Admin, or Both. + +[Cloud.gov Logging](https://logs.fr.cloud.gov/) is used to view and search application and platform logs. + +In addition to the application logs, there are several tables in the application that store useful information for audit logging purposes: + +* `events` +* the various `*_history` tables + + +## Restaging Apps + +Our apps must be restaged whenever cloud.gov releases updates to buildpacks. Cloud.gov will send email notifications whenever buildpack updates affect a deployed app. + +Restaging the apps rebuilds them with the new buildpack, enabling us to take advantage of whatever bugfixes or security updates are present in the new buildpack. + +There are two GitHub Actions that automate this process. Each are run manually and must be run once for each environment to enable testing any changes in staging before running within demo and production environments. + +When `notify-api-`, `notify-admin-`, `egress-proxy-notify-api-`, and/or `egress-proxy-notify-admin-` need to be restaged: + +1. Navigate to [the Restage apps GitHub Action](https://github.com/GSA/notifications-api/actions/workflows/restage-apps.yml) +1. Click the `Run workflow` button to open a popup +1. Leave `Use workflow from` on it's default of `Branch: main` +1. Select the environment you need to restage from the dropdown +1. Click `Run workflow` within the popup +1. Repeat for other environments + +When `ssb-sms`, and/or `ssb-smtp` need to be restaged: + +1. Navigate to the [SSB Restage apps GitHub Action](https://github.com/GSA/usnotify-ssb/actions/workflows/restage-apps.yml) +1. Click the `Run workflow` button to open a popup +1. Leave `Use workflow from` on it's default of `Branch: main` +1. Select the environment (either `staging` or `production`) you need to restage from the dropdown +1. Click `Run workflow` within the popup +1. Repeat for other environments + +When `ssb-devel-sms` and/or `ssb-devel-smtp` need to be restaged: + +1. Navigate to the [SSB Restage apps GitHub Action](https://github.com/GSA/usnotify-ssb/actions/workflows/restage-apps.yml) +1. Click the `Run workflow` button to open a popup +1. Leave `Use workflow from` on it's default of `Branch: main` +1. Select the `development` environment from the dropdown +1. Click `Run workflow` within the popup + + +## Smoke-testing the App + +To ensure that notifications are passing through the application properly, the following steps can be taken to ensure all parts are operating correctly: + +1. Send yourself a password reset email. This will verify SES integration. The email can be deleted once received if you don't wish to change your password. +1. Log into the app. This will verify SNS integration for a one-off message. +1. Upload a CSV and schedule send for the soonest time after "Now". This will verify S3 connections as well as scheduler and worker processes are running properly. + +## Configuration Management + +Also known as: **How to move code from my machine to production** + +### Common Policies and Procedures + +1. All changes must be made in a feature branch and opened as a PR targetting the `main` branch. +1. All PRs must be approved by another developer +1. PRs to `main` and `production` branches must be merged by a someone with the `Administrator` role. +1. PR documentation includes a Security Impact Analysis +1. PRs that will impact the Security Posture must be approved by the US Notify ISSO. +1. Any PRs waiting for approval should be talked about during daily Standup meetings. + +### notifications-api & notifications-admin + +1. Changes are deployed to the `staging` environment after a successful `checks.yml` run on `main` branch. Branch Protections prevent pushing directly to `main` +1. Changes are deployed to the `demo` _and_ `production` environments after merging `main` into `production`. Branch Protections prevent pushing directly to `production` + +### usnotify-ssb + +1. Changes are deployed to `staging` and `production` environments after merging to the `main` branch. The `staging` deployment must be successful before `production` is attempted. Branch Protections prevent pushing directly to `main` + +### ttsnotify-brokerpak-sms + +1. A new release is created by pushing a tag to the repository on the `main` branch. +1. To include the new version in released SSB code, create a PR in the `usnotify-ssb` repo updating the version in use in `app-setup-sms.sh` + +### datagov-brokerpak-smtp + +1. To include new verisons of the SMTP brokerpak in released SSB code, create a PR in the `usnotify-ssb` repo updating the version in use in `app-setup-smtp.sh` + +### Vulnerability Mitigation Changes + +US_Notify Administrators are responsible for ensuring that remediations for vulnerabilities are implemented. Response times vary based on the level of vulnerability as follows: + +* Critical (Very High) - 15 days +* High - 30 days +* Medium - 90 days +* Low - 180 days +* Informational - 365 days (depending on the analysis of the issue) + +## DNS Changes + +U.S. Notify DNS records are maintained within [the 18f/dns repository](https://github.com/18F/dns/blob/main/terraform/notify.gov.tf). To create new DNS records for notify.gov or any subdomains: + +1. Update the `notify.gov.tf` terraform to update or create the new records within Route53 and push the branch to the 18f/dns repository. +1. Open a PR. +1. Verify that the plan output within circleci creates the records that you expect. +1. Request a PR review from the 18F/tts-tech-portfolio team +1. Once the PR is approved and merged, verify that the apply step happened correctly within [CircleCI](https://app.circleci.com/pipelines/github/18F/dns) + + +## Known Gotchas + +### SSB Service Bindings are failing + +
+
Problem:
+
Creating or deleting service keys is failing. SSB Logs reference failing to verify certificate/certificate valid for GUID A but not for GUID B
+
Solution:
+
Restage SSB apps using the restage apps action +
+ +### SNS Topic Subscriptions Don't Succeed + +
+
Problem:
+
When deploying a new environment, a race condition prevents SNS topic subscriptions from being successfully verified on the AWS side
+
Solution:
+
Manually re-request subscription confirmation from the AWS Console.
+
+ +## User Account Management + +Important policies: + +* Infrastructure Accounts and Application Platform Administrators must be approved by the System Owner (Amy) before creation, but people with `Administrator` role can actually do the creation and role assignments. +* At least one agency partner must act as the `User Manager` for their service, with permissions to manage their team according to their agency's policies and procedures. +* All users must utilize `.gov` email addresses. +* Users who leave the team or otherwise have role changes must have their accounts updated to reflect the new roles required (or disabled) within 14 days. +* SpaceDeployer credentials must be rotated within 14 days of anyone with SpaceDeveloper cloud.gov access leaving the team. +* A user report must be created annually (See AC-2(j)). `make cloudgov-user-report` can be used to create a full report of all cloud.gov users. + +### Types of Infrastructure Users + +| Role Name | System | Permissions | Who | Responsibilities | +| --------- | ------ | ----------- | --- | ---------------- | +| Administrator | GitHub | Admin | PBS Fed | Approve & Merge PRs into main and production | +| Administrator | AWS | `NotifyAdministrators` IAM UserGroup | PBS Fed | Read audit logs, verify & fix any AWS service issues within Production AWS account | +| Administrator | Cloud.gov | `OrgManager` | PBS Fed | Manage cloud.gov roles and permissions. Access to production spaces | +| DevOps Engineer | Cloud.gov | `SpaceManager` | PBS Fed or Contractor | Access to non-production spaces | +| DevOps Engineer | AWS | `NotifyAdministrators` IAM UserGroup | PBS Fed or Contractor | Access to non-production AWS accounts to verify & fix any AWS issues in the lower environments | +| Engineer | GitHub | Write | PBS Fed or Contractor | Write code & issues, submit PRs | + +### Types of Application Users + +| Role Name | Permissions | Who | Responsibilities | +| --------- | ----------- | --- | ---------------- | +| Platform Administrator | `platform_admin` | PBS Fed | Administer system settings within US Notify across Services | +| User Manager | `MANAGE_USERS` | Agency Partner | Manage service team members | +| User | any except `MANAGE_USERS` | Agency Partner | Use US Notify | + +### Service Accounts + +| Role Name | System | Permissions | Notes | +| --------- | ------ | ----------- | ----- | +| Cloud.gov Service Account | Cloud.gov | `OrgManager` and `SpaceDeveloper` | Creds stored in GitHub Environment secrets within api and admin app repos | +| SSB Deployment Account | AWS | `IAMFullAccess` | Creds stored in GitHub Environment secrets within usnotify-ssb repo | +| SSB Cloud.gov Service Account | Cloud.gov | `SpaceDeveloper` | Creds stored in GitHub Environment secrets within usnotify-ssb repo | +| SSB AWS Accounts | AWS | `sms_broker` or `smtp_broker` IAM role | Creds created and maintained by usnotify-ssb terraform | + +## SMS Phone Number Management + +See [Infrastructure Overview](./infra-overview.md#request-new-phone-numbers) for information about SMS phone numbers in AWS. + +Once you have a number, it must be set in the app in one of two ways: + +* For the default phone number, to be used by Notify itself for OTP codes and the default from number for services, set the phone number as the `AWS_US_TOLL_FREE_NUMBER` ENV variable in the environment you are creating +* For service-specific phone numbers, set the phone number in the Service's `Text message senders` in the settings tab. + +### Current Production Phone Numbers + +* +18447952263 - in use as default number. Notify's OTP messages and trial service messages are sent from this number +* +18447891134 - to be used by Pilot Partner 1 +* +18888402596 - to be used by Pilot Partner 2 diff --git a/docs/testing.md b/docs/testing.md index f4b1f5e93..40012b245 100644 --- a/docs/testing.md +++ b/docs/testing.md @@ -56,5 +56,5 @@ docker run -v $(pwd):/zap/wrk/:rw --network="notify-network" -t owasp/zap2docker The equivalent command if you are running the API locally: ``` -docker run -v $(pwd):/zap/wrk/:rw -t owasp/zap2docker-weekly zap-api-scan.py -t http://host.docker.internal:6011/docs/openapi.yml -f openapi -c zap.conf +docker run -v $(pwd):/zap/wrk/:rw -t owasp/zap2docker-weekly zap-api-scan.py -t http://host.docker.internal:6011/docs/openapi.yml -f openapi -c zap.conf -r report.html ``` diff --git a/docs/toll-free-registration.png b/docs/toll-free-registration.png new file mode 100644 index 000000000..bca4c27b2 Binary files /dev/null and b/docs/toll-free-registration.png differ diff --git a/manifest.yml b/manifest.yml index 021f934da..44dd270c4 100644 --- a/manifest.yml +++ b/manifest.yml @@ -2,6 +2,7 @@ applications: - name: notify-api-((env)) buildpack: python_buildpack + stack: cflinuxfs4 instances: 1 disk_quota: 1G routes: diff --git a/migrations/versions/0395_remove_intl_letters_perm.py b/migrations/versions/0395_remove_intl_letters_perm.py new file mode 100644 index 000000000..dd120b9b9 --- /dev/null +++ b/migrations/versions/0395_remove_intl_letters_perm.py @@ -0,0 +1,28 @@ +""" + +Revision ID: 0395_remove_international_letters_permission +Revises: 0394_remove_contact_list +Create Date: 2023-05-23 10:03:10.485368 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0395_remove_intl_letters_perm' +down_revision = '0394_remove_contact_list' + + +def upgrade(): + sql = """ + DELETE + FROM service_permissions + WHERE permission = 'international_letters' + """ + + conn = op.get_bind() + conn.execute(sql) + + +def downgrade(): + pass diff --git a/newrelic.ini b/newrelic.ini index 394ae3c1a..5f5fec1e7 100644 --- a/newrelic.ini +++ b/newrelic.ini @@ -216,7 +216,7 @@ app_name = us-notify-api (Demo) monitor_mode = true [newrelic:production] -app_name = us-notify-api +app_name = us-notify-api (Production) monitor_mode = true # --------------------------------------------------------------------------- diff --git a/sample.env b/sample.env index 71a451e1d..643c25c63 100644 --- a/sample.env +++ b/sample.env @@ -30,7 +30,6 @@ DEBUG=True ANTIVIRUS_ENABLED=0 REDIS_ENABLED=1 NOTIFY_ENVIRONMENT=development -STATSD_HOST=localhost SES_STUB_URL=None NOTIFY_APP_NAME=api diff --git a/statsd_mapping.yml b/statsd_mapping.yml deleted file mode 100644 index 83fb306dc..000000000 --- a/statsd_mapping.yml +++ /dev/null @@ -1,11 +0,0 @@ -defaults: - timer_type: histogram - buckets: [.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10, 25] - ttl: 0 # metrics do not expire - -mappings: -- match: (\w+)\.notifications\.(.+) - match_type: regex - name: "notifications_${2}" - labels: - space: "$1" diff --git a/terraform/development/providers.tf b/terraform/development/providers.tf index 59bb98a70..d8ae4488e 100644 --- a/terraform/development/providers.tf +++ b/terraform/development/providers.tf @@ -3,7 +3,7 @@ terraform { required_providers { cloudfoundry = { source = "cloudfoundry-community/cloudfoundry" - version = "0.50.5" + version = "0.50.7" } } } diff --git a/terraform/development/reset.sh b/terraform/development/reset.sh new file mode 100755 index 000000000..4e11c0e28 --- /dev/null +++ b/terraform/development/reset.sh @@ -0,0 +1,65 @@ +#!/usr/bin/env bash + +username=`whoami` +org="gsa-tts-benefits-studio-prototyping" + +usage=" +$0: Reset terraform state so run.sh can be run again or for a new username + +Usage: + $0 -h + $0 [-u ] + +Options: +-h: show help and exit +-u : your username. Default: $username + +Notes: +* Requires cf-cli@8 +" + +while getopts ":hu:" opt; do + case "$opt" in + u) + username=${OPTARG} + ;; + h) + echo "$usage" + exit 0 + ;; + esac +done + +read -p "Are you sure you want to import terraform state and remove existing service keys for $username (y/n)? " verify + +if [[ $verify != "y" ]]; then + exit 0 +fi + +# ensure we're in the correct directory +cd $(dirname $0) + +service_account="$username-terraform" + +if [[ ! -s "secrets.auto.tfvars" ]]; then + # create user in notify-local-dev space to create s3 buckets + ../create_service_account.sh -s notify-local-dev -u $service_account > secrets.auto.tfvars + + # grant user access to notify-staging to create a service key for SES and SNS + cg_username=`cf service-key $service_account service-account-key | tail -n +2 | jq -r '.credentials.username'` + cf set-space-role $cg_username $org notify-staging SpaceDeveloper +fi + +echo "Importing terraform state for $username" +terraform init + +key_name=$username-api-dev-key + +cf t -s notify-local-dev +terraform import -var "username=$username" module.csv_upload_bucket.cloudfoundry_service_instance.bucket $(cf service --guid $username-csv-upload-bucket) +cf delete-service-key -f $username-csv-upload-bucket $key_name +cf t -s notify-staging +cf delete-service-key -f notify-api-ses-staging $key_name +cf delete-service-key -f notify-api-sns-staging $key_name + +./run.sh -u $username diff --git a/terraform/development/run.sh b/terraform/development/run.sh index 0285d0946..c0297b2bb 100755 --- a/terraform/development/run.sh +++ b/terraform/development/run.sh @@ -8,7 +8,7 @@ $0: Create development infrastructure Usage: $0 -h - $0 [-u ] [-k] + $0 [-u ] [-k] [-d] Options: -h: show help and exit diff --git a/terraform/ops/cloudgov_user_report.py b/terraform/ops/cloudgov_user_report.py new file mode 100644 index 000000000..7a26fe28c --- /dev/null +++ b/terraform/ops/cloudgov_user_report.py @@ -0,0 +1,84 @@ +from subprocess import check_output + +from cloudfoundry_client.client import CloudFoundryClient + +ORG_NAME = "gsa-tts-benefits-studio-prototyping" + + +client = CloudFoundryClient.build_from_cf_config() +org_guid = check_output(f"cf org {ORG_NAME} --guid", shell=True).decode().strip() +space_guids = list(map(lambda item: item['guid'], client.v3.spaces.list(organization_guids=org_guid))) + + +class RoleCollector: + def __init__(self): + self._map = {} + + def add(self, role): + user = role.user + if self._map.get(user.guid) is None: + self._map[user.guid] = { + "user": user, + "roles": [role] + } + else: + self._map[user.guid]["roles"].append(role) + + def print(self): + for user_roles in self._map.values(): + user = user_roles['user'] + print(f"{user.type}: {user.username} has roles:") + for role in user_roles['roles']: + if role.space: + print(f" {role.type} in {role.space.name}") + else: + print(f" {role.type}") + + +role_collector = RoleCollector() + + +class User: + def __init__(self, entity): + self.guid = entity['guid'] + self._username = entity['username'] + self._is_service_account = entity['origin'] != 'gsa.gov' + self.type = 'Bot' if self._is_service_account else 'User' + + @property + def username(self): + if self._is_service_account: + return client.v3.service_credential_bindings.get( + self._username, include="service_instance" + ).service_instance()['name'] + else: + return self._username + + +class Space: + def __init__(self, entity): + self.name = entity['name'] + + +class Role: + def __init__(self, entity): + self._fields = entity + self.type = entity['type'] + self.user = User(entity.user()) + + @property + def space(self): + try: + return Space(self._fields.space()) + except AttributeError: + return None + + +for role in map(Role, client.v3.roles.list(organization_guids=org_guid, include="user")): + role_collector.add(role) +for role in map(Role, client.v3.roles.list(space_guids=space_guids, include="user")): + role_collector.add(role) + + +if __name__ == '__main__': + role_collector.print() diff --git a/terraform/production/main.tf b/terraform/production/main.tf index afe132ad4..574ae0741 100644 --- a/terraform/production/main.tf +++ b/terraform/production/main.tf @@ -13,7 +13,7 @@ module "database" { cf_space_name = local.cf_space_name name = "${local.app_name}-rds-${local.env}" recursive_delete = local.recursive_delete - rds_plan_name = "TKTK-production-rds-plan" + rds_plan_name = "small-psql-redundant" } module "redis" { @@ -23,7 +23,7 @@ module "redis" { cf_space_name = local.cf_space_name name = "${local.app_name}-redis-${local.env}" recursive_delete = local.recursive_delete - redis_plan_name = "TKTK-production-redis-plan" + redis_plan_name = "redis-3node-large" } module "csv_upload_bucket" { @@ -72,9 +72,10 @@ module "sns_sms" { ########################################################################### # The following lines need to be commented out for the initial `terraform apply` # It can be re-enabled after: +# TODO: decide on public API domain name # 1) the app has first been deployed # 2) the route has been manually created by an OrgManager: -# `cf create-domain TKTK-org-name TKTK-production-domain-name` +# `cf create-domain gsa-tts-benefits-studio-prototyping api.notify.gov` ########################################################################### # module "domain" { # source = "github.com/18f/terraform-cloudgov//domain?ref=v0.2.0" @@ -85,5 +86,5 @@ module "sns_sms" { # name = "${local.app_name}-domain-${local.env}" # recursive_delete = local.recursive_delete # cdn_plan_name = "domain" -# domain_name = "TKTK-production-domain-name" +# domain_name = "api.notify.gov" # } diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index 0599799e4..dd1191ae9 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -7,6 +7,7 @@ from freezegun import freeze_time from app.celery import nightly_tasks from app.celery.nightly_tasks import ( _delete_notifications_older_than_retention_by_type, + cleanup_unfinished_jobs, delete_email_notifications_older_than_retention, delete_inbound_sms, delete_sms_notifications_older_than_retention, @@ -15,7 +16,7 @@ from app.celery.nightly_tasks import ( save_daily_notification_processing_time, timeout_notifications, ) -from app.models import EMAIL_TYPE, SMS_TYPE, FactProcessingTime +from app.models import EMAIL_TYPE, SMS_TYPE, FactProcessingTime, Job from tests.app.db import ( create_job, create_notification, @@ -313,3 +314,17 @@ def test_delete_notifications_task_calls_task_for_services_that_have_sent_notifi 'datetime_to_delete_before': datetime(2021, 3, 26, 4, 0) }), ]) + + +def test_cleanup_unfinished_jobs(mocker): + mock_s3 = mocker.patch('app.celery.nightly_tasks.remove_csv_object') + mock_dao_archive = mocker.patch('app.celery.nightly_tasks.dao_archive_job') + mock_dao = mocker.patch('app.celery.nightly_tasks.dao_get_unfinished_jobs') + mock_job_unfinished = Job() + mock_job_unfinished.processing_started = datetime(2023, 1, 1, 0, 0, 0) + mock_job_unfinished.original_file_name = "blah" + + mock_dao.return_value = [mock_job_unfinished] + cleanup_unfinished_jobs() + mock_s3.assert_called_once_with('blah') + mock_dao_archive.assert_called_once_with(mock_job_unfinished) diff --git a/tests/app/celery/test_process_ses_receipts_tasks.py b/tests/app/celery/test_process_ses_receipts_tasks.py index 1ec8afcea..00225acce 100644 --- a/tests/app/celery/test_process_ses_receipts_tasks.py +++ b/tests/app/celery/test_process_ses_receipts_tasks.py @@ -4,7 +4,7 @@ from unittest.mock import ANY from freezegun import freeze_time -from app import encryption, statsd_client +from app import encryption from app.celery.process_ses_receipts_tasks import ( process_ses_results, remove_emails_from_bounce, @@ -141,8 +141,6 @@ def test_ses_callback_should_update_notification_status( sample_email_template, mocker): with freeze_time('2001-01-01T12:00:00'): - mocker.patch('app.statsd_client.incr') - mocker.patch('app.statsd_client.timing_with_dates') send_mock = mocker.patch( 'app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async' ) @@ -161,10 +159,6 @@ def test_ses_callback_should_update_notification_status( assert get_notification_by_id(notification.id).status == 'sending' assert process_ses_results(ses_notification_callback(reference='ref')) assert get_notification_by_id(notification.id).status == 'delivered' - statsd_client.timing_with_dates.assert_any_call( - "callback.ses.elapsed-time", datetime.utcnow(), notification.sent_at - ) - statsd_client.incr.assert_any_call("callback.ses.delivered") send_mock.assert_called_once_with([str(notification.id), ANY], queue="service-callbacks") # assert second arg is an encrypted string assert isinstance(send_mock.call_args.args[0][1], str) diff --git a/tests/app/celery/test_provider_tasks.py b/tests/app/celery/test_provider_tasks.py index f373c08d8..84382ef42 100644 --- a/tests/app/celery/test_provider_tasks.py +++ b/tests/app/celery/test_provider_tasks.py @@ -23,6 +23,7 @@ def test_should_call_send_sms_to_provider_from_deliver_sms_task( sample_notification, mocker): mocker.patch('app.delivery.send_to_providers.send_sms_to_provider') + mocker.patch('app.celery.provider_tasks.check_sms_delivery_receipt') deliver_sms(sample_notification.id) app.delivery.send_to_providers.send_sms_to_provider.assert_called_with(sample_notification) @@ -88,7 +89,6 @@ def test_should_retry_and_log_exception_for_non_SmsClientResponseException_excep assert mock_logger_exception.called -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_should_go_into_technical_error_if_exceeds_retries_on_deliver_sms_task(sample_notification, mocker): mocker.patch('app.delivery.send_to_providers.send_sms_to_provider', side_effect=Exception("EXPECTED")) mocker.patch('app.celery.provider_tasks.deliver_sms.retry', side_effect=MaxRetriesExceededError()) @@ -127,7 +127,6 @@ def test_should_add_to_retry_queue_if_notification_not_found_in_deliver_email_ta app.celery.provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks") -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") @pytest.mark.parametrize( 'exception_class', [ Exception(), @@ -149,7 +148,6 @@ def test_should_go_into_technical_error_if_exceeds_retries_on_deliver_email_task assert sample_notification.status == 'technical-failure' -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_should_technical_error_and_not_retry_if_EmailClientNonRetryableException(sample_notification, mocker): mocker.patch( 'app.delivery.send_to_providers.send_email_to_provider', diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index 987e4bc8f..02f5c0e2a 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -108,10 +108,9 @@ def test_create_nightly_notification_status_triggers_relevant_tasks( assert types == expected_types_aggregated -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_create_nightly_billing_for_day_checks_history( sample_service, - sample_sms_template, + sample_template, mocker ): yesterday = datetime.now() - timedelta(days=1) @@ -119,13 +118,13 @@ def test_create_nightly_billing_for_day_checks_history( create_notification( created_at=yesterday, - template=sample_sms_template, + template=sample_template, status='sending', ) create_notification_history( created_at=yesterday, - template=sample_sms_template, + template=sample_template, status='delivered', ) @@ -141,7 +140,6 @@ def test_create_nightly_billing_for_day_checks_history( assert record.notifications_sent == 2 -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") @pytest.mark.parametrize('second_rate, records_num, billable_units, multiplier', [(1.0, 1, 2, [1]), (2.0, 2, 1, [1, 2])]) @@ -193,7 +191,6 @@ def test_create_nightly_billing_for_day_sms_rate_multiplier( assert record.rate_multiplier == multiplier[i] -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_create_nightly_billing_for_day_different_templates( sample_service, sample_template, @@ -240,8 +237,7 @@ def test_create_nightly_billing_for_day_different_templates( assert record.rate_multiplier == multiplier[i] -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") -def test_create_nightly_billing_for_day_different_sent_by( +def test_create_nightly_billing_for_day_same_sent_by( sample_service, sample_template, sample_email_template, @@ -276,16 +272,15 @@ def test_create_nightly_billing_for_day_different_sent_by( create_nightly_billing_for_day(str(yesterday.date())) records = FactBilling.query.order_by('rate_multiplier').all() - assert len(records) == 2 + assert len(records) == 1 for _, record in enumerate(records): assert record.local_date == datetime.date(yesterday) assert record.rate == Decimal(1.33) - assert record.billable_units == 1 + assert record.billable_units == 2 assert record.rate_multiplier == 1.0 -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_create_nightly_billing_for_day_null_sent_by_sms( sample_service, sample_template, @@ -497,7 +492,6 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio assert sms_delivered_row.key_type == KEY_TYPE_NORMAL -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_create_nightly_notification_status_for_service_and_day_overwrites_old_data(notify_db_session): first_service = create_service(service_name='First Service') first_template = create_template(service=first_service) diff --git a/tests/app/celery/test_research_mode_tasks.py b/tests/app/celery/test_research_mode_tasks.py index 5bc401d54..6c9bad953 100644 --- a/tests/app/celery/test_research_mode_tasks.py +++ b/tests/app/celery/test_research_mode_tasks.py @@ -12,6 +12,7 @@ from app.celery.research_mode_tasks import ( sns_callback, ) from app.config import QueueNames +from app.models import NOTIFICATION_DELIVERED, NOTIFICATION_FAILED, Notification from tests.conftest import Matcher dvla_response_file_matcher = Matcher( @@ -20,24 +21,33 @@ dvla_response_file_matcher = Matcher( ) -@pytest.mark.skip(reason="Re-enable when SMS receipts exist") -def test_make_sns_callback(notify_api, rmock): +def test_make_sns_callback(notify_api, rmock, mocker): endpoint = "http://localhost:6011/notifications/sms/sns" + get_notification_by_id = mocker.patch('app.celery.research_mode_tasks.get_notification_by_id') + n = Notification() + n.id = 1234 + n.status = NOTIFICATION_DELIVERED + get_notification_by_id.return_value = n rmock.request( "POST", endpoint, json={"status": "success"}, status_code=200) - send_sms_response("sns", "1234", "2028675309") + send_sms_response("sns", "1234") assert rmock.called assert rmock.request_history[0].url == endpoint - assert json.loads(rmock.request_history[0].text)['MSISDN'] == '2028675309' + assert json.loads(rmock.request_history[0].text)['status'] == 'delivered' -@pytest.mark.skip(reason="Re-enable when SMS receipts exist") def test_callback_logs_on_api_call_failure(notify_api, rmock, mocker): endpoint = "http://localhost:6011/notifications/sms/sns" + get_notification_by_id = mocker.patch('app.celery.research_mode_tasks.get_notification_by_id') + n = Notification() + n.id = 1234 + n.status = NOTIFICATION_FAILED + get_notification_by_id.return_value = n + rmock.request( "POST", endpoint, @@ -46,12 +56,12 @@ def test_callback_logs_on_api_call_failure(notify_api, rmock, mocker): mock_logger = mocker.patch('app.celery.tasks.current_app.logger.error') with pytest.raises(HTTPError): - send_sms_response("mmg", "1234", "07700900001") + send_sms_response("sns", "1234") assert rmock.called assert rmock.request_history[0].url == endpoint mock_logger.assert_called_once_with( - 'API POST request on http://localhost:6011/notifications/sms/mmg failed with status 500' + 'API POST request on http://localhost:6011/notifications/sms/sns failed with status 500' ) @@ -65,31 +75,13 @@ def test_make_ses_callback(notify_api, mocker): assert mock_task.apply_async.call_args[0][0][0] == ses_notification_callback(some_ref) -@pytest.mark.skip(reason="Re-enable when SNS delivery receipts exist") -def test_delievered_sns_callback(): - phone_number = "2028675309" - data = json.loads(sns_callback("1234", phone_number)) - assert data['MSISDN'] == phone_number - assert data['status'] == "3" - assert data['reference'] == "sns_reference" - assert data['CID'] == "1234" - - -@pytest.mark.skip(reason="Re-enable when SNS delivery receipts exist") -def test_perm_failure_sns_callback(): - phone_number = "2028675302" - data = json.loads(sns_callback("1234", phone_number)) - assert data['MSISDN'] == phone_number - assert data['status'] == "5" - assert data['reference'] == "sns_reference" - assert data['CID'] == "1234" - - -@pytest.mark.skip(reason="Re-enable when SNS delivery receipts exist") -def test_temp_failure_sns_callback(): - phone_number = "2028675303" - data = json.loads(sns_callback("1234", phone_number)) - assert data['MSISDN'] == phone_number - assert data['status'] == "4" - assert data['reference'] == "sns_reference" +def test_delivered_sns_callback(mocker): + get_notification_by_id = mocker.patch('app.celery.research_mode_tasks.get_notification_by_id') + n = Notification() + n.id = 1234 + n.status = NOTIFICATION_DELIVERED + get_notification_by_id.return_value = n + + data = json.loads(sns_callback("1234")) + assert data['status'] == "delivered" assert data['CID'] == "1234" diff --git a/tests/app/clients/test_aws_cloudwatch.py b/tests/app/clients/test_aws_cloudwatch.py new file mode 100644 index 000000000..5a54383b5 --- /dev/null +++ b/tests/app/clients/test_aws_cloudwatch.py @@ -0,0 +1,87 @@ +import pytest +from flask import current_app + +from app import aws_cloudwatch_client + + +def test_check_sms_no_event_error_condition(notify_api, mocker): + boto_mock = mocker.patch.object(aws_cloudwatch_client, '_client', create=True) + # TODO + # we do this to get the AWS account number, and it seems like unit tests locally have + # access to the env variables but when we push the PR they do not. Is there a better way to get it? + mocker.patch.dict('os.environ', {"SES_DOMAIN_ARN": "1111:"}) + message_id = 'aaa' + notification_id = 'bbb' + boto_mock.filter_log_events.return_value = [] + with notify_api.app_context(): + aws_cloudwatch_client.init_app(current_app) + with pytest.raises(Exception): + aws_cloudwatch_client.check_sms(message_id, notification_id) + + +def side_effect(filterPattern, logGroupName, startTime, endTime): + if "Failure" in logGroupName and 'fail' in filterPattern: + return { + "events": + [ + { + 'logStreamName': '89db9712-c6d1-49f9-be7c-4caa7ed9efb1', + 'message': '{"delivery":{"destination":"+1661","providerResponse":"Invalid phone number"}}', + 'eventId': '37535432778099870001723210579798865345508698025292922880' + } + ] + } + + elif 'succeed' in filterPattern: + return { + "events": + [ + { + 'logStreamName': '89db9712-c6d1-49f9-be7c-4caa7ed9efb1', + 'timestamp': 1683147017911, + 'message': '{"delivery":{"destination":"+1661","providerResponse":"Phone accepted msg"}}', + 'ingestionTime': 1683147018026, + 'eventId': '37535432778099870001723210579798865345508698025292922880' + } + ] + } + else: + return {"events": []} + + +def test_check_sms_success(notify_api, mocker): + aws_cloudwatch_client.init_app(current_app) + boto_mock = mocker.patch.object(aws_cloudwatch_client, '_client', create=True) + boto_mock.filter_log_events.side_effect = side_effect + mocker.patch.dict('os.environ', {"SES_DOMAIN_ARN": "1111:"}) + + message_id = 'succeed' + notification_id = 'ccc' + with notify_api.app_context(): + aws_cloudwatch_client.check_sms(message_id, notification_id, 1000000000000) + + # We check the 'success' log group first and if we find the message_id, we are done, so there is only 1 call + assert boto_mock.filter_log_events.call_count == 1 + mock_call = str(boto_mock.filter_log_events.mock_calls[0]) + assert 'Failure' not in mock_call + assert 'succeed' in mock_call + assert 'notification.messageId' in mock_call + + +def test_check_sms_failure(notify_api, mocker): + aws_cloudwatch_client.init_app(current_app) + boto_mock = mocker.patch.object(aws_cloudwatch_client, '_client', create=True) + boto_mock.filter_log_events.side_effect = side_effect + mocker.patch.dict('os.environ', {"SES_DOMAIN_ARN": "1111:"}) + + message_id = 'fail' + notification_id = 'bbb' + with notify_api.app_context(): + aws_cloudwatch_client.check_sms(message_id, notification_id, 1000000000000) + + # We check the 'success' log group and find nothing, so we then check the 'fail' log group -- two calls. + assert boto_mock.filter_log_events.call_count == 2 + mock_call = str(boto_mock.filter_log_events.mock_calls[1]) + assert 'Failure' in mock_call + assert 'fail' in mock_call + assert 'notification.messageId' in mock_call diff --git a/tests/app/clients/test_aws_ses.py b/tests/app/clients/test_aws_ses.py index fe1384dd7..7e60a1f77 100644 --- a/tests/app/clients/test_aws_ses.py +++ b/tests/app/clients/test_aws_ses.py @@ -57,7 +57,6 @@ def test_should_be_none_if_unrecognised_status_code(): ], ids=['empty', 'single_email', 'punycode']) def test_send_email_handles_reply_to_address(notify_api, mocker, reply_to_address, expected_value): boto_mock = mocker.patch.object(aws_ses_client, '_client', create=True) - mocker.patch.object(aws_ses_client, 'statsd_client', create=True) with notify_api.app_context(): aws_ses_client.send_email( @@ -78,7 +77,6 @@ def test_send_email_handles_reply_to_address(notify_api, mocker, reply_to_addres def test_send_email_handles_punycode_to_address(notify_api, mocker): boto_mock = mocker.patch.object(aws_ses_client, '_client', create=True) - mocker.patch.object(aws_ses_client, 'statsd_client', create=True) with notify_api.app_context(): aws_ses_client.send_email( @@ -98,7 +96,6 @@ def test_send_email_handles_punycode_to_address(notify_api, mocker): def test_send_email_raises_invalid_parameter_value_error_as_EmailClientNonRetryableException(mocker): boto_mock = mocker.patch.object(aws_ses_client, '_client', create=True) - mocker.patch.object(aws_ses_client, 'statsd_client', create=True) error_response = { 'Error': { 'Code': 'InvalidParameterValue', @@ -107,7 +104,6 @@ def test_send_email_raises_invalid_parameter_value_error_as_EmailClientNonRetrya } } boto_mock.send_email.side_effect = botocore.exceptions.ClientError(error_response, 'opname') - mocker.patch.object(aws_ses_client, 'statsd_client', create=True) with pytest.raises(EmailClientNonRetryableException) as excinfo: aws_ses_client.send_email( @@ -122,7 +118,6 @@ def test_send_email_raises_invalid_parameter_value_error_as_EmailClientNonRetrya def test_send_email_raises_send_rate_throttling_as_AwsSesClientThrottlingSendRateException(mocker): boto_mock = mocker.patch.object(aws_ses_client, '_client', create=True) - mocker.patch.object(aws_ses_client, 'statsd_client', create=True) error_response = { 'Error': { 'Code': 'Throttling', @@ -143,7 +138,6 @@ def test_send_email_raises_send_rate_throttling_as_AwsSesClientThrottlingSendRat def test_send_email_does_not_raise_AwsSesClientThrottlingSendRateException_if_non_send_rate_throttling(mocker): boto_mock = mocker.patch.object(aws_ses_client, '_client', create=True) - mocker.patch.object(aws_ses_client, 'statsd_client', create=True) error_response = { 'Error': { 'Code': 'Throttling', @@ -164,7 +158,6 @@ def test_send_email_does_not_raise_AwsSesClientThrottlingSendRateException_if_no def test_send_email_raises_other_errs_as_AwsSesClientException(mocker): boto_mock = mocker.patch.object(aws_ses_client, '_client', create=True) - mocker.patch.object(aws_ses_client, 'statsd_client', create=True) error_response = { 'Error': { 'Code': 'ServiceUnavailable', @@ -173,7 +166,6 @@ def test_send_email_raises_other_errs_as_AwsSesClientException(mocker): } } boto_mock.send_email.side_effect = botocore.exceptions.ClientError(error_response, 'opname') - mocker.patch.object(aws_ses_client, 'statsd_client', create=True) with pytest.raises(AwsSesClientException) as excinfo: aws_ses_client.send_email( diff --git a/tests/app/clients/test_aws_sns.py b/tests/app/clients/test_aws_sns.py index 514e19d84..e730623f9 100644 --- a/tests/app/clients/test_aws_sns.py +++ b/tests/app/clients/test_aws_sns.py @@ -5,7 +5,6 @@ from app import aws_sns_client def test_send_sms_successful_returns_aws_sns_response(notify_api, mocker): boto_mock = mocker.patch.object(aws_sns_client, '_client', create=True) - mocker.patch.object(aws_sns_client, 'statsd_client', create=True) to = "6135555555" content = reference = 'foo' with notify_api.app_context(): @@ -22,7 +21,6 @@ def test_send_sms_successful_returns_aws_sns_response(notify_api, mocker): def test_send_sms_returns_raises_error_if_there_is_no_valid_number_is_found(notify_api, mocker): mocker.patch.object(aws_sns_client, '_client', create=True) - mocker.patch.object(aws_sns_client, 'statsd_client', create=True) to = "" content = reference = 'foo' with pytest.raises(ValueError) as excinfo: diff --git a/tests/app/clients/test_sms.py b/tests/app/clients/test_sms.py index 59d053845..5bd9bb758 100644 --- a/tests/app/clients/test_sms.py +++ b/tests/app/clients/test_sms.py @@ -1,6 +1,5 @@ import pytest -from app import statsd_client from app.clients.sms import SmsClient, SmsClientResponseException @@ -12,13 +11,12 @@ def fake_client(notify_api): return 'fake' fake_client = FakeSmsClient() - fake_client.init_app(notify_api, statsd_client) + # fake_client.init_app(notify_api) return fake_client -@pytest.mark.skip(reason="Needs updating for TTS: New SMS client") def test_send_sms(fake_client, mocker): - mock_send = mocker.patch.object(fake_client, 'try_send_sms') + mock_send = mocker.patch.object(fake_client, 'send_sms') fake_client.send_sms( to='to', @@ -29,14 +27,13 @@ def test_send_sms(fake_client, mocker): ) mock_send.assert_called_with( - 'to', 'content', 'reference', False, 'testing' + to='to', content='content', reference='reference', international=False, sender='testing' ) -@pytest.mark.skip(reason="Needs updating for TTS: New SMS client") def test_send_sms_error(fake_client, mocker): mocker.patch.object( - fake_client, 'try_send_sms', side_effect=SmsClientResponseException('error') + fake_client, 'send_sms', side_effect=SmsClientResponseException('error') ) with pytest.raises(SmsClientResponseException): diff --git a/tests/app/dao/test_fact_billing_dao.py b/tests/app/dao/test_fact_billing_dao.py index edb4e3692..f1f7650e3 100644 --- a/tests/app/dao/test_fact_billing_dao.py +++ b/tests/app/dao/test_fact_billing_dao.py @@ -211,8 +211,6 @@ def test_fetch_billing_data_for_day_returns_empty_list(notify_db_session): assert results == [] -# TODO: ready for reactivation? -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_fetch_billing_data_for_day_uses_correct_table(notify_db_session): service = create_service() create_service_data_retention(service, notification_type='email', days_of_retention=3) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 335dece62..6eb1d0206 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -116,7 +116,6 @@ def test_get_jobs_for_service(sample_template): assert one_job_from_db != other_job_from_db -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_get_jobs_for_service_with_limit_days_param(sample_template): one_job = create_job(sample_template) old_job = create_job(sample_template, created_at=datetime.now() - timedelta(days=8)) diff --git a/tests/app/dao/test_organisation_dao.py b/tests/app/dao/test_organisation_dao.py index 45cf5f589..eeb254616 100644 --- a/tests/app/dao/test_organisation_dao.py +++ b/tests/app/dao/test_organisation_dao.py @@ -90,10 +90,6 @@ def test_update_organisation(notify_db_session): (['ABC', 'DEF'], {'abc', 'def'}), ([], set()), (None, {'123', '456'}), - pytest.param( - ['abc', 'ABC'], {'abc'}, - marks=pytest.mark.xfail(raises=IntegrityError) - ), )) def test_update_organisation_domains_lowercases( notify_db_session, @@ -113,6 +109,29 @@ def test_update_organisation_domains_lowercases( assert {domain.domain for domain in organisation.domains} == expected_domains +@pytest.mark.parametrize('domain_list, expected_domains', ( + (['abc', 'ABC'], {'abc'}), +)) +def test_update_organisation_domains_lowercases_integrity_error( + notify_db_session, + domain_list, + expected_domains, +): + create_organisation() + + organisation = Organisation.query.one() + + # Seed some domains + dao_update_organisation(organisation.id, domains=['123', '456']) + + with pytest.raises(expected_exception=IntegrityError): + + # This should overwrite the seeded domains + dao_update_organisation(organisation.id, domains=domain_list) + + assert {domain.domain for domain in organisation.domains} == expected_domains + + def test_update_organisation_does_not_update_the_service_if_certain_attributes_not_provided( sample_service, sample_organisation, diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index 1ca40cedd..e55935c13 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -193,44 +193,6 @@ def test_get_sms_providers_for_update_returns_nothing_if_recent_updates(restore_ assert not resp -@pytest.mark.skip(reason="Reenable if/when we add a second SMS provider") -@pytest.mark.parametrize(['starting_priorities', 'expected_priorities'], [ - ({'sns': 50, 'other': 50}, {'sns': 40, 'other': 60}), - ({'sns': 0, 'other': 20}, {'sns': 0, 'other': 30}), # lower bound respected - ({'sns': 50, 'other': 100}, {'sns': 40, 'other': 100}), # upper bound respected - - # document what happens if they have unexpected values outside of the 0 - 100 range (due to manual setting from - # the admin app). the code never causes further issues, but sometimes doesn't actively reset the vaues to 0-100. - ({'sns': 150, 'other': 50}, {'sns': 140, 'other': 60}), - ({'sns': 50, 'other': 150}, {'sns': 40, 'other': 100}), - - ({'sns': -100, 'other': 50}, {'sns': 0, 'other': 60}), - ({'sns': 50, 'other': -100}, {'sns': 40, 'other': -90}), -]) -def test_reduce_sms_provider_priority_adjusts_provider_priorities( - mocker, - restore_provider_details, - notify_user, - starting_priorities, - expected_priorities, -): - mock_adjust = mocker.patch('app.dao.provider_details_dao._adjust_provider_priority') - - sns = get_provider_details_by_identifier('sns') - other = get_provider_details_by_identifier('other') - - sns.priority = starting_priorities['sns'] - other.priority = starting_priorities['other'] - # need to update these manually to avoid triggering the `onupdate` clause of the updated_at column - ProviderDetails.query.filter(ProviderDetails.notification_type == 'sms').update({'updated_at': datetime.min}) - - # switch away from sns. currently both 50/50 - dao_reduce_sms_provider_priority('sns', time_threshold=timedelta(minutes=10)) - - mock_adjust.assert_any_call(other, expected_priorities['other']) - mock_adjust.assert_any_call(sns, expected_priorities['sns']) - - def test_reduce_sms_provider_priority_does_nothing_if_providers_have_recently_changed( mocker, restore_provider_details, @@ -255,38 +217,6 @@ def test_reduce_sms_provider_priority_does_nothing_if_there_is_only_one_active_p assert mock_adjust.called is False -@pytest.mark.skip(reason="Reenable if/when we add a second SMS provider") -@pytest.mark.parametrize('existing_sns, existing_other, new_sns, new_other', [ - (50, 50, 60, 40), # not just 50/50 - 60/40 specifically - (65, 35, 60, 40), # doesn't overshoot if there's less than 10 difference - (0, 100, 10, 90), # only adjusts by 10 - (100, 100, 90, 90), # it tries to fix weird data - it will reduce both if needs be -]) -def test_adjust_provider_priority_back_to_resting_points_updates_all_providers( - restore_provider_details, - mocker, - existing_sns, - existing_other, - new_sns, - new_other -): - sns = get_provider_details_by_identifier('sns') - other = get_provider_details_by_identifier('other') - sns.priority = existing_sns - other.priority = existing_other - - mock_adjust = mocker.patch('app.dao.provider_details_dao._adjust_provider_priority') - mock_get_providers = mocker.patch('app.dao.provider_details_dao._get_sms_providers_for_update', return_value=[ - sns, other - ]) - - dao_adjust_provider_priority_back_to_resting_points() - - mock_get_providers.assert_called_once_with(timedelta(hours=1)) - mock_adjust.assert_any_call(sns, new_sns) - mock_adjust.assert_any_call(other, new_other) - - def test_adjust_provider_priority_back_to_resting_points_does_nothing_if_theyre_already_at_right_values( restore_provider_details, mocker, diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index d6ec13c3f..405035cc0 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -3,6 +3,7 @@ from datetime import datetime, timedelta from unittest import mock import pytest +import sqlalchemy from freezegun import freeze_time from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound @@ -585,7 +586,6 @@ def test_update_service_permission_creates_a_history_record_with_current_data(no assert history[2].version == 3 -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_create_service_and_history_is_transactional(notify_db_session): user = create_user() assert Service.query.count() == 0 @@ -596,10 +596,11 @@ def test_create_service_and_history_is_transactional(notify_db_session): restricted=False, created_by=user) - with pytest.raises(IntegrityError) as excinfo: + try: dao_create_service(service, user) + except sqlalchemy.exc.IntegrityError as seeei: + assert 'null value in column "name" of relation "services_history" violates not-null constraint' in str(seeei) - assert 'column "name" violates not-null constraint' in str(excinfo.value) assert Service.query.count() == 0 assert Service.get_history_model().query.count() == 0 @@ -826,7 +827,6 @@ def test_dao_fetch_todays_stats_for_service_only_includes_today_during_bst(notif assert not stats.get('permanent-failure') -@pytest.mark.skip(reason="Need a better way to test variable DST date") def test_dao_fetch_todays_stats_for_service_only_includes_today_when_clocks_fall_back(notify_db_session): template = create_template(service=create_service()) with freeze_time('2021-10-30T22:59:59'): @@ -871,7 +871,6 @@ def test_dao_fetch_todays_stats_for_service_only_includes_during_utc(notify_db_s assert not stats.get('permanent-failure') -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_dao_fetch_todays_stats_for_all_services_includes_all_services(notify_db_session): # two services, each with an email and sms notification service1 = create_service(service_name='service 1', email_from='service.1') @@ -910,7 +909,6 @@ def test_dao_fetch_todays_stats_for_all_services_only_includes_today(notify_db_s assert stats['failed'] == 1 -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_dao_fetch_todays_stats_for_all_services_groups_correctly(notify_db_session): service1 = create_service(service_name='service 1', email_from='service.1') service2 = create_service(service_name='service 2', email_from='service.2') @@ -937,7 +935,6 @@ def test_dao_fetch_todays_stats_for_all_services_groups_correctly(notify_db_sess service2.created_at, 'sms', 'created', 1) in stats -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_dao_fetch_todays_stats_for_all_services_includes_all_keys_by_default(notify_db_session): template = create_template(service=create_service()) create_notification(template=template, key_type=KEY_TYPE_NORMAL) @@ -950,7 +947,6 @@ def test_dao_fetch_todays_stats_for_all_services_includes_all_keys_by_default(no assert stats[0].count == 3 -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_dao_fetch_todays_stats_for_all_services_can_exclude_from_test_key(notify_db_session): template = create_template(service=create_service()) create_notification(template=template, key_type=KEY_TYPE_NORMAL) diff --git a/tests/app/dao/test_uploads_dao.py b/tests/app/dao/test_uploads_dao.py index a17a20a2e..4251a703a 100644 --- a/tests/app/dao/test_uploads_dao.py +++ b/tests/app/dao/test_uploads_dao.py @@ -1,6 +1,5 @@ from datetime import datetime, timedelta -import pytest from freezegun import freeze_time from app.dao.uploads_dao import dao_get_uploads_by_service_id @@ -38,39 +37,21 @@ def create_uploaded_template(service): ) -@pytest.mark.skip(reason="Investigate what remains after removing letters") @freeze_time("2020-02-02 09:00") # GMT time def test_get_uploads_for_service(sample_template): create_service_data_retention(sample_template.service, 'sms', days_of_retention=9) job = create_job(sample_template, processing_started=datetime.utcnow()) - letter_template = create_uploaded_template(sample_template.service) - letter = create_uploaded_letter(letter_template, sample_template.service) other_service = create_service(service_name="other service") other_template = create_template(service=other_service) other_job = create_job(other_template, processing_started=datetime.utcnow()) - other_letter_template = create_uploaded_template(other_service) - create_uploaded_letter(other_letter_template, other_service) uploads_from_db = dao_get_uploads_by_service_id(job.service_id).items other_uploads_from_db = dao_get_uploads_by_service_id(other_job.service_id).items - assert len(uploads_from_db) == 2 + assert len(uploads_from_db) == 1 assert uploads_from_db[0] == ( - None, - 'Uploaded letters', - 1, - 'letter', - None, - letter.created_at.replace(hour=22, minute=30, second=0, microsecond=0), - None, - letter.created_at.replace(hour=22, minute=30, second=0, microsecond=0), - None, - 'letter_day', - None, - ) - assert uploads_from_db[1] == ( job.id, job.original_file_name, job.notification_count, @@ -84,21 +65,8 @@ def test_get_uploads_for_service(sample_template): None, ) - assert len(other_uploads_from_db) == 2 - assert other_uploads_from_db[0] == ( - None, - 'Uploaded letters', - 1, - 'letter', - None, - letter.created_at.replace(hour=22, minute=30, second=0, microsecond=0), - None, - letter.created_at.replace(hour=22, minute=30, second=0, microsecond=0), - None, - "letter_day", - None, - ) - assert other_uploads_from_db[1] == (other_job.id, + assert len(other_uploads_from_db) == 1 + assert other_uploads_from_db[0] == (other_job.id, other_job.original_file_name, other_job.notification_count, other_job.template.template_type, @@ -110,81 +78,9 @@ def test_get_uploads_for_service(sample_template): "job", None) - assert uploads_from_db[1] != other_uploads_from_db[1] + assert uploads_from_db[0] != other_uploads_from_db[0] -@pytest.mark.skip(reason="Investigate what remains after removing letters") -@freeze_time("2020-02-02 18:00") -def test_get_uploads_for_service_groups_letters(sample_template): - letter_template = create_uploaded_template(sample_template.service) - - # Just gets into yesterday’s print run - create_uploaded_letter(letter_template, sample_template.service, created_at=( - datetime(2020, 2, 1, 22, 29, 59) - )) - - # Yesterday but in today’s print run - create_uploaded_letter(letter_template, sample_template.service, created_at=( - datetime(2020, 2, 1, 22, 30) - )) - # First thing today - create_uploaded_letter(letter_template, sample_template.service, created_at=( - datetime(2020, 2, 2, 5, 0) - )) - # Just before today’s print deadline - create_uploaded_letter(letter_template, sample_template.service, created_at=( - datetime(2020, 2, 2, 22, 29, 59) - )) - - # Just missed today’s print deadline - create_uploaded_letter(letter_template, sample_template.service, created_at=( - datetime(2020, 2, 2, 22, 30) - )) - - uploads_from_db = dao_get_uploads_by_service_id(sample_template.service_id).items - - assert [ - (upload.notification_count, upload.created_at) - for upload in uploads_from_db - ] == [ - (1, datetime(2020, 2, 3, 22, 30)), - (3, datetime(2020, 2, 2, 22, 30)), - (1, datetime(2020, 2, 1, 22, 30)), - ] - - -@pytest.mark.skip(reason="Investigate what remains after removing letters") -def test_get_uploads_does_not_return_cancelled_jobs_or_letters(sample_template): - create_job(sample_template, job_status='scheduled') - create_job(sample_template, job_status='cancelled') - letter_template = create_uploaded_template(sample_template.service) - create_uploaded_letter(letter_template, sample_template.service, status='cancelled') - - assert len(dao_get_uploads_by_service_id(sample_template.service_id).items) == 0 - - -@pytest.mark.skip(reason="Investigate what remains after removing letters") -def test_get_uploads_orders_by_created_at_desc(sample_template): - letter_template = create_uploaded_template(sample_template.service) - - upload_1 = create_job(sample_template, processing_started=datetime.utcnow(), - job_status=JOB_STATUS_IN_PROGRESS) - upload_2 = create_job(sample_template, processing_started=datetime.utcnow(), - job_status=JOB_STATUS_IN_PROGRESS) - create_uploaded_letter(letter_template, sample_template.service, status='delivered') - - results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items - - assert [ - (result.id, result.upload_type) for result in results - ] == [ - (None, 'letter_day'), - (upload_2.id, 'job'), - (upload_1.id, 'job'), - ] - - -@pytest.mark.skip(reason="Investigate what remains after removing letters") def test_get_uploads_orders_by_processing_started_desc(sample_template): days_ago = datetime.utcnow() - timedelta(days=3) upload_1 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1), @@ -201,121 +97,6 @@ def test_get_uploads_orders_by_processing_started_desc(sample_template): assert results[1].id == upload_2.id -@pytest.mark.skip(reason="Investigate what remains after removing letters") -@freeze_time("2020-10-27 16:15") # GMT time -def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_template): - letter_template = create_uploaded_template(sample_template.service) - - days_ago = datetime.utcnow() - timedelta(days=4) - create_uploaded_letter(letter_template, service=letter_template.service) - upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1), - created_at=days_ago, - job_status=JOB_STATUS_IN_PROGRESS) - upload_3 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2), - created_at=days_ago, - job_status=JOB_STATUS_IN_PROGRESS) - create_uploaded_letter(letter_template, service=letter_template.service, - created_at=datetime.utcnow() - timedelta(days=3)) - - results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items - - assert len(results) == 4 - assert results[0].id is None - assert results[1].id == upload_2.id - assert results[2].id == upload_3.id - assert results[3].id is None - - -@pytest.mark.skip(reason="Investigate what remains after removing letters") -@freeze_time('2020-04-02 14:00') # Few days after the clocks go forward -def test_get_uploads_only_gets_uploads_within_service_retention_period(sample_template): - letter_template = create_uploaded_template(sample_template.service) - create_service_data_retention(sample_template.service, 'sms', days_of_retention=3) - - days_ago = datetime.utcnow() - timedelta(days=4) - upload_1 = create_uploaded_letter(letter_template, service=letter_template.service) - upload_2 = create_job( - sample_template, processing_started=datetime.utcnow() - timedelta(days=1), created_at=days_ago, - job_status=JOB_STATUS_IN_PROGRESS - ) - # older than custom retention for sms: - create_job( - sample_template, processing_started=datetime.utcnow() - timedelta(days=5), created_at=days_ago, - job_status=JOB_STATUS_IN_PROGRESS - ) - upload_3 = create_uploaded_letter( - letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=3) - ) - - # older than retention for sms but within letter retention: - upload_4 = create_uploaded_letter( - letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=6) - ) - - # older than default retention for letters: - create_uploaded_letter( - letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=8) - ) - - results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items - - assert len(results) == 4 - - # Uploaded letters get their `created_at` shifted time of printing - # 21:30 EST == 16:30 UTC - assert results[0].created_at == upload_1.created_at.replace(hour=21, minute=30, second=0, microsecond=0) - - # Jobs keep their original `created_at` - assert results[1].created_at == upload_2.created_at.replace(hour=14, minute=00, second=0, microsecond=0) - - # Still in BST here… - assert results[2].created_at == upload_3.created_at.replace(hour=21, minute=30, second=0, microsecond=0) - - # Now we’ve gone far enough back to be in GMT - # 17:30 GMT == 17:30 UTC - assert results[3].created_at == upload_4.created_at.replace(hour=21, minute=30, second=0, microsecond=0) - - -@pytest.mark.skip(reason="Investigate what remains after removing letters") -@freeze_time('2020-02-02 14:00') -def test_get_uploads_is_paginated(sample_template): - letter_template = create_uploaded_template(sample_template.service) - - create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow() - timedelta(minutes=3), - ) - create_job( - sample_template, processing_started=datetime.utcnow() - timedelta(minutes=2), - job_status=JOB_STATUS_IN_PROGRESS, - ) - create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow() - timedelta(minutes=1), - ) - create_job( - sample_template, processing_started=datetime.utcnow(), - job_status=JOB_STATUS_IN_PROGRESS, - ) - - results = dao_get_uploads_by_service_id(sample_template.service_id, page=1, page_size=1) - - assert results.per_page == 1 - assert results.total == 3 - assert len(results.items) == 1 - assert results.items[0].created_at == datetime.utcnow().replace(hour=22, minute=30, second=0, microsecond=0) - assert results.items[0].notification_count == 2 - assert results.items[0].upload_type == 'letter_day' - - results = dao_get_uploads_by_service_id(sample_template.service_id, page=2, page_size=1) - - assert len(results.items) == 1 - assert results.items[0].created_at == datetime.utcnow().replace(hour=14, minute=0, second=0, microsecond=0) - assert results.items[0].notification_count == 1 - assert results.items[0].upload_type == 'job' - - -@pytest.mark.skip(reason="Investigate what remains after removing letters") def test_get_uploads_returns_empty_list(sample_service): items = dao_get_uploads_by_service_id(sample_service.id).items assert items == [] diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index c5bd57648..21e854c26 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -45,36 +45,6 @@ def setup_function(_function): send_to_providers.provider_cache.clear() -@pytest.mark.skip(reason="Reenable when we have more than 1 SMS provider") -def test_provider_to_use_should_return_random_provider(mocker, notify_db_session): - sns = get_provider_details_by_identifier('sns') - other = get_provider_details_by_identifier('other') - sns.priority = 60 - other.priority = 40 - mock_choices = mocker.patch('app.delivery.send_to_providers.random.choices', return_value=[sns]) - - ret = send_to_providers.provider_to_use('sms', international=True) - - mock_choices.assert_called_once_with([sns, other], weights=[60, 40]) - assert ret.name == 'sns' - - -@pytest.mark.skip(reason="Reenable when we have more than 1 SMS provider") -def test_provider_to_use_should_cache_repeated_calls(mocker, notify_db_session): - mock_choices = mocker.patch( - 'app.delivery.send_to_providers.random.choices', - wraps=send_to_providers.random.choices, - ) - - results = [ - send_to_providers.provider_to_use('sms', international=False) - for _ in range(10) - ] - - assert all(result == results[0] for result in results) - assert len(mock_choices.call_args_list) == 1 - - @pytest.mark.parametrize('international_provider_priority', ( # Since there’s only one international provider it should always # be used, no matter what its priority is set to @@ -93,18 +63,6 @@ def test_provider_to_use_should_only_return_sns_for_international( assert ret.name == 'sns' -@pytest.mark.skip(reason="Reenable when we have more than 1 SMS provider") -def test_provider_to_use_should_only_return_active_providers(mocker, restore_provider_details): - sns = get_provider_details_by_identifier('sns') - other = get_provider_details_by_identifier('other') - sns.active = False - other.active = True - - ret = send_to_providers.provider_to_use('sms') - - assert ret.name == 'other' - - def test_provider_to_use_raises_if_no_active_providers(mocker, restore_provider_details): sns = get_provider_details_by_identifier('sns') sns.active = False @@ -261,7 +219,7 @@ def test_should_call_send_sms_response_task_if_research_mode( notify_db_session, sample_service, sample_notification, mocker, research_mode, key_type ): mocker.patch('app.aws_sns_client.send_sms') - mocker.patch('app.delivery.send_to_providers.send_sms_response') + send_sms_response = mocker.patch('app.delivery.send_to_providers.send_sms_response') if research_mode: sample_service.research_mode = True @@ -275,8 +233,8 @@ def test_should_call_send_sms_response_task_if_research_mode( ) assert not aws_sns_client.send_sms.called - app.delivery.send_to_providers.send_sms_response.assert_called_once_with( - 'sns', str(sample_notification.id), sample_notification.to + send_sms_response.assert_called_once_with( + 'sns', str(sample_notification.id) ) persisted_notification = notifications_dao.get_notification_by_id(sample_notification.id) @@ -288,17 +246,15 @@ def test_should_call_send_sms_response_task_if_research_mode( assert not persisted_notification.personalisation -@pytest.mark.skip(reason="Needs updating when we get SMS delivery receipts done") def test_should_have_sending_status_if_fake_callback_function_fails(sample_notification, mocker): mocker.patch('app.delivery.send_to_providers.send_sms_response', side_effect=HTTPError) sample_notification.key_type = KEY_TYPE_TEST - with pytest.raises(HTTPError): send_to_providers.send_sms_to_provider( sample_notification ) - assert sample_notification.status == 'sending' + assert sample_notification.status == 'sent' assert sample_notification.sent_by == 'sns' @@ -389,14 +345,14 @@ def test_send_email_to_provider_should_call_research_mode_task_response_task_if_ reference = uuid.uuid4() mocker.patch('app.uuid.uuid4', return_value=reference) mocker.patch('app.aws_ses_client.send_email') - mocker.patch('app.delivery.send_to_providers.send_email_response') + send_email_response = mocker.patch('app.delivery.send_to_providers.send_email_response') send_to_providers.send_email_to_provider( notification ) assert not app.aws_ses_client.send_email.called - app.delivery.send_to_providers.send_email_response.assert_called_once_with(str(reference), 'john@smith.com') + send_email_response.assert_called_once_with(str(reference), 'john@smith.com') persisted_notification = Notification.query.filter_by(id=notification.id).one() assert persisted_notification.to == 'john@smith.com' assert persisted_notification.template_id == sample_email_template.id diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index 0b7cd71f2..33027e9b5 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -76,7 +76,6 @@ def test_post_to_get_inbound_sms_filters_international_user_number(admin_request )['data'] assert len(sms) == 2 - print(f'sms is: {sms}') assert sms[1]['id'] == str(one.id) assert sms[1]['user_number'] == str(one.user_number) diff --git a/tests/app/notifications/test_notifications_ses_callback.py b/tests/app/notifications/test_notifications_ses_callback.py index 1bff3bab2..da0bf4f50 100644 --- a/tests/app/notifications/test_notifications_ses_callback.py +++ b/tests/app/notifications/test_notifications_ses_callback.py @@ -86,7 +86,6 @@ def test_check_and_queue_callback_task(mocker, sample_notification): # callback_api doesn't match by equality for some # reason, so we need to take this approach instead - print(f'mock_create.mock_calls is: {mock_create.mock_calls}') mock_create_args = mock_create.mock_calls[0][1] assert mock_create_args[0] == sample_notification assert mock_create_args[1].id == callback_api.id diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index bb764a80a..52e797ebe 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -587,7 +587,6 @@ def test_check_reply_to_sms_type(sample_service): assert check_reply_to(sample_service.id, sms_sender.id, SMS_TYPE) == '123456' -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_check_if_service_can_send_files_by_email_raises_if_no_contact_link_set(sample_service): with pytest.raises(BadRequestError) as e: check_if_service_can_send_files_by_email( diff --git a/tests/app/organisation/test_invite_rest.py b/tests/app/organisation/test_invite_rest.py index a3e544370..905fa016f 100644 --- a/tests/app/organisation/test_invite_rest.py +++ b/tests/app/organisation/test_invite_rest.py @@ -10,7 +10,6 @@ from tests import create_admin_authorization_header from tests.app.db import create_invited_org_user -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") @pytest.mark.parametrize('platform_admin, expected_invited_by', ( (True, 'The GOV.UK Notify team'), (False, 'Test User') diff --git a/tests/app/organisation/test_rest.py b/tests/app/organisation/test_rest.py index 5aa2763f6..40de5aafa 100644 --- a/tests/app/organisation/test_rest.py +++ b/tests/app/organisation/test_rest.py @@ -127,10 +127,7 @@ def test_get_organisation_by_id_returns_domains(admin_request, notify_db_session ('foo.gov.uk', 200), ('bar.gov.uk', 200), ('oof.gov.uk', 404), - pytest.param( - 'rab.gov.uk', 200, - marks=pytest.mark.xfail(raises=AssertionError), - ), + ('rab.gov.uk', 200), (None, 400), ('personally.identifying.information@example.com', 400), )) @@ -152,7 +149,9 @@ def test_get_organisation_by_domain( domain=domain, ) - if expected_status == 200: + if domain == 'rab.gov.uk' and expected_status == 200: + assert response['id'] == str(other_org.id) + elif expected_status == 200: assert response['id'] == str(org.id) else: assert response['result'] == 'error' @@ -490,7 +489,6 @@ def test_post_update_organisation_set_mou_doesnt_email_if_no_signed_by( assert queue_mock.called is False -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") @pytest.mark.parametrize('on_behalf_of_name, on_behalf_of_email_address, templates_and_recipients', [ ( None, diff --git a/tests/app/performance_dashboard/test_rest.py b/tests/app/performance_dashboard/test_rest.py index 5ac33c1b0..0fe55dc04 100644 --- a/tests/app/performance_dashboard/test_rest.py +++ b/tests/app/performance_dashboard/test_rest.py @@ -1,7 +1,5 @@ from datetime import date -import pytest - from tests.app.db import ( create_ft_notification_status, create_process_time, @@ -9,11 +7,9 @@ from tests.app.db import ( ) -@pytest.mark.skip(reason="Needs updating for TTS: Needs updating for new providers") def test_performance_dashboard(sample_service, admin_request): template_sms = create_template(service=sample_service, template_type='sms', template_name='a') template_email = create_template(service=sample_service, template_type='email', template_name='b') - template_letter = create_template(service=sample_service, template_type='letter', template_name='c') create_ft_notification_status(local_date=date(2021, 2, 28), service=template_email.service, template=template_email, @@ -22,10 +18,6 @@ def test_performance_dashboard(sample_service, admin_request): service=template_sms.service, template=template_sms, count=5) - create_ft_notification_status(local_date=date(2021, 2, 28), - service=template_letter.service, - template=template_letter, - count=2) create_ft_notification_status(local_date=date(2021, 3, 1), service=template_email.service, template=template_email, @@ -34,10 +26,6 @@ def test_performance_dashboard(sample_service, admin_request): service=template_sms.service, template=template_sms, count=20) - create_ft_notification_status(local_date=date(2021, 3, 1), - service=template_letter.service, - template=template_letter, - count=3) create_ft_notification_status(local_date=date(2021, 3, 2), service=template_email.service, template=template_email, @@ -46,10 +34,6 @@ def test_performance_dashboard(sample_service, admin_request): service=template_sms.service, template=template_sms, count=30) - create_ft_notification_status(local_date=date(2021, 3, 2), - service=template_letter.service, - template=template_letter, - count=10) create_ft_notification_status(local_date=date(2021, 3, 3), service=template_email.service, template=template_email, @@ -58,10 +42,6 @@ def test_performance_dashboard(sample_service, admin_request): service=template_sms.service, template=template_sms, count=35) - create_ft_notification_status(local_date=date(2021, 3, 3), - service=template_letter.service, - template=template_letter, - count=15) create_process_time(local_date='2021-02-28', messages_total=15, messages_within_10_secs=14) create_process_time(local_date='2021-03-01', messages_total=35, messages_within_10_secs=34) @@ -72,13 +52,12 @@ def test_performance_dashboard(sample_service, admin_request): start_date='2021-03-01', end_date='2021-03-02') - assert results['total_notifications'] == 10+5+2+15+20+3+25+30+10+45+35+15 + assert results['total_notifications'] == 185 assert results['email_notifications'] == 10+15+25+45 assert results['sms_notifications'] == 5+20+30+35 - assert results['letter_notifications'] == 2+3+10+15 - assert results['notifications_by_type'] == [{"date": '2021-03-01', "emails": 15, "sms": 20, "letters": 3}, - {"date": '2021-03-02', "emails": 25, "sms": 30, "letters": 10}] - assert results['processing_time'] == [{"date": "2021-03-01", "percentage_under_10_seconds": 97.1428571428571}, + assert results['notifications_by_type'] == [{"date": '2021-03-01', "emails": 15, "sms": 20}, + {"date": '2021-03-02', "emails": 25, "sms": 30}] + assert results['processing_time'] == [{"date": "2021-03-01", "percentage_under_10_seconds": 97.14285714285714}, {"date": "2021-03-02", "percentage_under_10_seconds": 80.0}] assert results["live_service_count"] == 1 assert results["services_using_notify"][0]["service_name"] == sample_service.name diff --git a/tests/app/service/send_notification/test_send_notification.py b/tests/app/service/send_notification/test_send_notification.py index 83122dcbc..dbd8b8d85 100644 --- a/tests/app/service/send_notification/test_send_notification.py +++ b/tests/app/service/send_notification/test_send_notification.py @@ -169,12 +169,6 @@ def test_send_notification_with_placeholders_replaced(notify_api, sample_email_t ), '6', ), - pytest.param( - None, - ('we consider None equivalent to missing personalisation'), - '', - marks=pytest.mark.xfail - ), ]) def test_send_notification_with_placeholders_replaced_with_unusual_types( client, @@ -209,6 +203,43 @@ def test_send_notification_with_placeholders_replaced_with_unusual_types( assert response_data['subject'] == expected_subject +@pytest.mark.parametrize('personalisation, expected_body, expected_subject', [ + ( + None, + ('we consider None equivalent to missing personalisation'), + '', + ), +]) +def test_send_notification_with_placeholders_replaced_with_unusual_types_no_personalization( + client, + sample_email_template_with_placeholders, + mocker, + personalisation, + expected_body, + expected_subject, +): + mocker.patch('app.celery.provider_tasks.deliver_email.apply_async') + + response = client.post( + path='/notifications/email', + data=json.dumps( + { + 'to': 'ok@ok.com', + 'template': str(sample_email_template_with_placeholders.id), + 'personalisation': { + 'name': personalisation + } + } + ), + headers=[ + ('Content-Type', 'application/json'), + create_service_authorization_header(service_id=sample_email_template_with_placeholders.service.id) + ] + ) + + assert response.status_code == 400 + + def test_should_not_send_notification_for_archived_template(notify_api, sample_template): with notify_api.test_request_context(): with notify_api.test_client() as client: diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 3f9c98563..4c50545bd 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1944,7 +1944,6 @@ def test_get_detailed_service(sample_template, client, sample_service, today_onl assert service['statistics'][SMS_TYPE] == stats -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_get_services_with_detailed_flag(client, sample_template): notifications = [ create_notification(sample_template), @@ -1967,7 +1966,6 @@ def test_get_services_with_detailed_flag(client, sample_template): } -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_get_services_with_detailed_flag_excluding_from_test_key(client, sample_template): create_notification(sample_template, key_type=KEY_TYPE_NORMAL) create_notification(sample_template, key_type=KEY_TYPE_TEAM) @@ -2023,7 +2021,6 @@ def test_get_services_with_detailed_flag_defaults_to_today(client, mocker): assert resp.status_code == 200 -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_get_detailed_services_groups_by_service(notify_db_session): from app.service.rest import get_detailed_services @@ -2054,7 +2051,6 @@ def test_get_detailed_services_groups_by_service(notify_db_session): } -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_get_detailed_services_includes_services_with_no_notifications(notify_db_session): from app.service.rest import get_detailed_services diff --git a/tests/app/service_invite/test_service_invite_rest.py b/tests/app/service_invite/test_service_invite_rest.py index b413fca07..f27bf8acf 100644 --- a/tests/app/service_invite/test_service_invite_rest.py +++ b/tests/app/service_invite/test_service_invite_rest.py @@ -11,7 +11,6 @@ from tests import create_admin_authorization_header from tests.app.db import create_invited_user -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") @pytest.mark.parametrize('extra_args, expected_start_of_invite_url', [ ( {}, @@ -73,7 +72,6 @@ def test_create_invited_user( mocked.assert_called_once_with([(str(notification.id))], queue="notify-internal-tasks") -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_create_invited_user_without_auth_type(admin_request, sample_service, mocker, invitation_email_template): mocker.patch('app.celery.provider_tasks.deliver_email.apply_async') email_address = 'invited_user@service.gov.uk' @@ -126,13 +124,11 @@ def test_create_invited_user_invalid_email(client, sample_service, mocker, fake_ assert mocked.call_count == 0 -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_get_all_invited_users_by_service(client, notify_db_session, sample_service): invites = [] for i in range(0, 5): email = 'invited_user_{}@service.gov.uk'.format(i) invited_user = create_invited_user(sample_service, to_email_address=email) - invites.append(invited_user) url = '/service/{}/invite'.format(sample_service.id) diff --git a/tests/app/test_config.py b/tests/app/test_config.py index fe2fef296..23d67aafa 100644 --- a/tests/app/test_config.py +++ b/tests/app/test_config.py @@ -4,12 +4,13 @@ from app.config import QueueNames def test_queue_names_all_queues_correct(): # Need to ensure that all_queues() only returns queue names used in API queues = QueueNames.all_queues() - assert len(queues) == 15 + assert len(queues) == 16 assert set([ QueueNames.PRIORITY, QueueNames.PERIODIC, QueueNames.DATABASE, QueueNames.SEND_SMS, + QueueNames.CHECK_SMS, QueueNames.SEND_EMAIL, QueueNames.RESEARCH_MODE, QueueNames.REPORTING, diff --git a/tests/app/test_route_authentication.py b/tests/app/test_route_authentication.py index f1c2dec56..e8ed40583 100644 --- a/tests/app/test_route_authentication.py +++ b/tests/app/test_route_authentication.py @@ -1,8 +1,3 @@ - -import pytest - - -@pytest.mark.skip(reason="Needs updating for TTS") def test_all_routes_have_authentication(client): # This tests that each blueprint registered on the application has a before_request function registered. # The None row is removed from the comparison as that is not blueprint specific but app specific. diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index f06a26275..ce38f42e7 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -47,7 +47,6 @@ def test_get_user_list(admin_request, sample_service): assert sorted(expected_permissions) == sorted(fetched['permissions'][str(sample_service.id)]) -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_get_user(admin_request, sample_service, sample_organisation): """ Tests GET endpoint '/' to retrieve a single service. @@ -123,7 +122,6 @@ def test_post_user(admin_request, notify_db_session): assert user.auth_type == EMAIL_AUTH_TYPE -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_post_user_without_auth_type(admin_request, notify_db_session): User.query.delete() data = { @@ -618,7 +616,6 @@ def test_send_user_reset_password_should_send_reset_password_link(admin_request, assert notification.reply_to_text == notify_service.get_default_reply_to_email_address() -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") @pytest.mark.parametrize('data, expected_url', ( ({ 'email': 'notify@digital.cabinet-office.gov.uk', @@ -820,7 +817,6 @@ def test_activate_user_fails_if_already_active(admin_request, sample_user): assert sample_user.state == 'active' -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_update_user_auth_type(admin_request, sample_user): assert sample_user.auth_type == 'sms_auth' resp = admin_request.post( diff --git a/tests/app/user/test_rest_verify.py b/tests/app/user/test_rest_verify.py index 65217f0b2..f2d38002f 100644 --- a/tests/app/user/test_rest_verify.py +++ b/tests/app/user/test_rest_verify.py @@ -292,7 +292,6 @@ def test_send_sms_code_returns_204_when_too_many_codes_already_created(client, s assert VerifyCode.query.count() == 5 -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") @pytest.mark.parametrize('post_data, expected_url_starts_with', ( ( {}, @@ -387,7 +386,6 @@ def test_reset_failed_login_count_returns_404_when_user_does_not_exist(client): assert resp.status_code == 404 -@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") # we send sms_auth users and webauthn_auth users email code to validate their email access @pytest.mark.parametrize('auth_type', USER_AUTH_TYPES) @pytest.mark.parametrize('data, expected_auth_url', ( diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index 91189a3a1..adcadc24a 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -238,7 +238,6 @@ def test_should_cache_template_lookups_in_memory(mocker, client, sample_template assert Notification.query.count() == 5 -@pytest.mark.skip(reason="Needs updating for TTS: cloud.gov redis fails, local docker works, mock redis fails") def test_should_cache_template_and_service_in_redis(mocker, client, sample_template): from app.schemas import service_schema, template_schema @@ -289,7 +288,6 @@ def test_should_cache_template_and_service_in_redis(mocker, client, sample_templ assert templates_call[1]['ex'] == 604_800 -@pytest.mark.skip(reason="Needs updating for TTS: cloud.gov redis fails, local docker works, mock redis fails") def test_should_return_template_if_found_in_redis(mocker, client, sample_template): from app.schemas import service_schema, template_schema @@ -313,7 +311,7 @@ def test_should_return_template_if_found_in_redis(mocker, client, sample_templat mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async') data = { - 'phone_number': '+447700900855', + 'phone_number': '+16615555555', 'template_id': str(sample_template.id), } diff --git a/tests/app/v2/test_errors.py b/tests/app/v2/test_errors.py index 455829fda..b12357333 100644 --- a/tests/app/v2/test_errors.py +++ b/tests/app/v2/test_errors.py @@ -15,8 +15,6 @@ def app_for_test(): app = flask.Flask(__name__) app.config['TESTING'] = True init_app(app) - from app import statsd_client - statsd_client.init_app(app) from app.v2.errors import register_errors blue = Blueprint("v2_under_test", __name__, url_prefix='/v2/under_test')