From 5b2446ec627aedff50440d00d275f0c333375d9f Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 8 Apr 2024 09:35:51 -0700 Subject: [PATCH 01/40] add examples of common commands --- docs/all.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/all.md b/docs/all.md index 2e98b84d1..0ad78ae2b 100644 --- a/docs/all.md +++ b/docs/all.md @@ -531,6 +531,16 @@ cf run-task CLOUD-GOV-APP --command "flask command update-templates" --name YOUR [Here's more documentation](https://docs.cloudfoundry.org/devguide/using-tasks.html) about Cloud Foundry tasks. +# Commonly run commands + +(Note: to obtain the CLOUD_GOV_APP name, run `cf apps` and find the name of the app for the tier you are targeting) + +To promote a user to platform admin: +cf run-task --command "flask command promote-user-to-platform-admin --user-email-address=" + +To update templates: +cf run-task --command "flask command update-templates" + # Commands for test loading the local dev database All commands use the `-g` or `--generate` to determine how many instances to load to the db. The `-g` or `--generate` option is required and will always defult to 1. An example: `flask command add-test-uses-to-db -g 6` will generate 6 random users and insert them into the db. From 8d176c89988359107f6dcbac31d7903b6bb31c12 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 21:57:21 +0000 Subject: [PATCH 02/40] Bump werkzeug from 3.0.1 to 3.0.2 Bumps [werkzeug](https://github.com/pallets/werkzeug) from 3.0.1 to 3.0.2. - [Release notes](https://github.com/pallets/werkzeug/releases) - [Changelog](https://github.com/pallets/werkzeug/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/werkzeug/compare/3.0.1...3.0.2) --- updated-dependencies: - dependency-name: werkzeug dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 10 ++++++---- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1a322847b..8c2e521c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2071,6 +2071,7 @@ files = [ {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, @@ -3528,6 +3529,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4487,13 +4489,13 @@ test = ["websockets"] [[package]] name = "werkzeug" -version = "3.0.1" +version = "3.0.2" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, - {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, + {file = "werkzeug-3.0.2-py3-none-any.whl", hash = "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795"}, + {file = "werkzeug-3.0.2.tar.gz", hash = "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"}, ] [package.dependencies] @@ -4771,4 +4773,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "080f65216f06220f44ef2818095ff19bd790a37852c44e14fc44e91371c9925e" +content-hash = "3dcc493bc45068a1875df77b6488c9311e91f9a192fad1df14aa62b0a40879c4" diff --git a/pyproject.toml b/pyproject.toml index 9c74ba0a5..916bd20de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ psycopg2-binary = "==2.9.9" pyjwt = "==2.8.0" python-dotenv = "==1.0.0" sqlalchemy = "==1.4.40" -werkzeug = "^3.0.1" +werkzeug = "^3.0.2" faker = "^24.4.0" setuptools = "^69.2.0" From dc334e6686bbded8674accd2f9f3cac757303787 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Mon, 8 Apr 2024 15:15:01 -0700 Subject: [PATCH 03/40] Re-order Terraform README steps Co-authored-by: Ryan Ahearn --- terraform/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 40ab78a19..530920433 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -26,6 +26,7 @@ The bootstrap module is used to create an s3 bucket for later terraform runs to 1. Follow instructions under `Use bootstrap credentials` 1. Ensure that `import.sh` includes a line and correct IDs for any resources created 1. Run `./teardown_creds.sh` to remove the space deployer account used to create the s3 bucket +1. Copy `bucket` from `bucket_credentials` output to the backend block of `staging/providers.tf` and `production/providers.tf` ### To make changes to the bootstrap module @@ -40,6 +41,7 @@ The bootstrap module is used to create an s3 bucket for later terraform runs to ### Retrieving existing bucket credentials +1. Run `./import.sh` to pull existing terraform state into the local state 1. Run `./run.sh show` 1. Follow instructions under `Use bootstrap credentials` @@ -52,8 +54,6 @@ The bootstrap module is used to create an s3 bucket for later terraform runs to aws_secret_access_key = ``` -1. Copy `bucket` from `bucket_credentials` output to the backend block of `staging/providers.tf` and `production/providers.tf` - ## SpaceDeployers A [SpaceDeployer](https://cloud.gov/docs/services/cloud-gov-service-account/) account is required to run terraform or From 1f3b69ad530769ad18b906e9b0400ad833db8113 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Mon, 8 Apr 2024 15:28:34 -0700 Subject: [PATCH 04/40] Note "initial setup" section is outdated --- terraform/README.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 530920433..4523ff032 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -6,13 +6,15 @@ Prerequisite: install the `jq` JSON processor: `brew install jq` ## Initial setup +These instructions were used for deploying the project for the first time, years ago. We should not have to perfrom these steps again. They are provided here for reference. + 1. Manually run the bootstrap module following instructions under `Terraform State Credentials` 1. Setup CI/CD Pipeline to run Terraform - 1. Copy bootstrap credentials to your CI/CD secrets using the instructions in the base README - 1. Create a cloud.gov SpaceDeployer by following the instructions under `SpaceDeployers` - 1. Copy SpaceDeployer credentials to your CI/CD secrets using the instructions in the base README + 1. Copy bootstrap credentials to your CI/CD secrets using the instructions in the base README + 1. Create a cloud.gov SpaceDeployer by following the instructions under `SpaceDeployers` + 1. Copy SpaceDeployer credentials to your CI/CD secrets using the instructions in the base README 1. Manually Running Terraform - 1. Follow instructions under `Set up a new environment` to create your infrastructure + 1. Follow instructions under `Set up a new environment` to create your infrastructure ## Terraform State Credentials From f2ef5efb1df140b408f98adc6209eb108d33f84f Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Mon, 8 Apr 2024 15:35:50 -0700 Subject: [PATCH 05/40] References to headings into anchor links --- terraform/README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 4523ff032..52b4fc168 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -6,17 +6,17 @@ Prerequisite: install the `jq` JSON processor: `brew install jq` ## Initial setup -These instructions were used for deploying the project for the first time, years ago. We should not have to perfrom these steps again. They are provided here for reference. +These instructions were used for deploying the project for the first time, years ago. We should not have to perform these steps again. They are provided here for reference. -1. Manually run the bootstrap module following instructions under `Terraform State Credentials` +1. Manually run the bootstrap module following instructions under [Terraform State Credentials](#terraform-state-credentials) 1. Setup CI/CD Pipeline to run Terraform 1. Copy bootstrap credentials to your CI/CD secrets using the instructions in the base README - 1. Create a cloud.gov SpaceDeployer by following the instructions under `SpaceDeployers` + 1. Create a cloud.gov SpaceDeployer by following the instructions under [SpaceDeployers](#spacedeployers) 1. Copy SpaceDeployer credentials to your CI/CD secrets using the instructions in the base README 1. Manually Running Terraform - 1. Follow instructions under `Set up a new environment` to create your infrastructure + 1. Follow instructions under [Set up a new environment manually](#set-up-a-new-environment-manually) to create your infrastructure -## Terraform State Credentials +## Terraform state credentials The bootstrap module is used to create an s3 bucket for later terraform runs to store their state in. @@ -25,7 +25,7 @@ The bootstrap module is used to create an s3 bucket for later terraform runs to 1. Run `terraform init` 1. Run `./run.sh plan` to verify that the changes are what you expect 1. Run `./run.sh apply` to set up the bucket and retrieve credentials -1. Follow instructions under `Use bootstrap credentials` +1. Follow instructions under [Use bootstrap credentials](#use-bootstrap-credentials) 1. Ensure that `import.sh` includes a line and correct IDs for any resources created 1. Run `./teardown_creds.sh` to remove the space deployer account used to create the s3 bucket 1. Copy `bucket` from `bucket_credentials` output to the backend block of `staging/providers.tf` and `production/providers.tf` From e054d9b03f6aa39b1f89abd32360bde588a5c7c0 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Mon, 8 Apr 2024 17:11:20 -0700 Subject: [PATCH 06/40] Remove insecure output of creds, adjust README --- terraform/README.md | 38 ++++++++++++++++++++----------------- terraform/bootstrap/main.tf | 4 ---- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 52b4fc168..239865f9c 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -4,6 +4,25 @@ This directory holds the terraform modules for maintaining your complete persist Prerequisite: install the `jq` JSON processor: `brew install jq` +## Retrieving existing bucket credentials + +Assuming [initial setup](#initial-setup) is complete, new developers start here! + +1. Enter the bootstrap module with `cd bootstrap` +1. Run `./import.sh` to pull existing terraform state into the local state +1. Follow instructions under [Use bootstrap credentials](#use-bootstrap-credentials) + +### Use bootstrap credentials + +1. Run `./run.sh show -json`. +1. In the output, locate `access_key_id` and `secret_access_key` within `bucket_credentials`. These values are secret, so, don't share them with anyone or copy them to anywhere online. +1. Add the following to `~/.aws/credentials`: + ``` + [notify-terraform-backend] + aws_access_key_id = + aws_secret_access_key = + ``` + ## Initial setup These instructions were used for deploying the project for the first time, years ago. We should not have to perform these steps again. They are provided here for reference. @@ -22,9 +41,9 @@ The bootstrap module is used to create an s3 bucket for later terraform runs to ### Bootstrapping the state storage s3 buckets for the first time -1. Run `terraform init` +1. Within the `bootstrap` directory, run `terraform init` 1. Run `./run.sh plan` to verify that the changes are what you expect -1. Run `./run.sh apply` to set up the bucket and retrieve credentials +1. Run `./run.sh apply` to set up the bucket 1. Follow instructions under [Use bootstrap credentials](#use-bootstrap-credentials) 1. Ensure that `import.sh` includes a line and correct IDs for any resources created 1. Run `./teardown_creds.sh` to remove the space deployer account used to create the s3 bucket @@ -41,21 +60,6 @@ The bootstrap module is used to create an s3 bucket for later terraform runs to 1. Make your changes 1. Continue from step 2 of the boostrapping instructions -### Retrieving existing bucket credentials - -1. Run `./import.sh` to pull existing terraform state into the local state -1. Run `./run.sh show` -1. Follow instructions under `Use bootstrap credentials` - -#### Use bootstrap credentials - -1. Add the following to `~/.aws/credentials` - ``` - [notify-terraform-backend] - aws_access_key_id = - aws_secret_access_key = - ``` - ## SpaceDeployers A [SpaceDeployer](https://cloud.gov/docs/services/cloud-gov-service-account/) account is required to run terraform or diff --git a/terraform/bootstrap/main.tf b/terraform/bootstrap/main.tf index 625cb8093..2394f4ab8 100644 --- a/terraform/bootstrap/main.tf +++ b/terraform/bootstrap/main.tf @@ -14,7 +14,3 @@ resource "cloudfoundry_service_key" "bucket_creds" { name = "${local.s3_service_name}-access" service_instance = module.s3.bucket_id } - -output "bucket_credentials" { - value = cloudfoundry_service_key.bucket_creds.credentials -} From 150cbeb54f1c58eca5a751d9a3b600c4c45b2079 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Mon, 8 Apr 2024 18:32:20 -0700 Subject: [PATCH 07/40] Explain directory structure --- terraform/README.md | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 239865f9c..1890e409d 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -1,12 +1,10 @@ # Terraform -This directory holds the terraform modules for maintaining your complete persistent infrastructure. - -Prerequisite: install the `jq` JSON processor: `brew install jq` +This directory holds the Terraform modules for maintaining Notify.gov's infrastructure. You can [read about the structure](#structure) or [get set up to develop](#retrieving-existing-bucket-credentials). ## Retrieving existing bucket credentials -Assuming [initial setup](#initial-setup) is complete, new developers start here! +:green_book: Assuming [initial setup](#initial-setup) is complete, new developers start here! 1. Enter the bootstrap module with `cd bootstrap` 1. Run `./import.sh` to pull existing terraform state into the local state @@ -35,11 +33,11 @@ These instructions were used for deploying the project for the first time, years 1. Manually Running Terraform 1. Follow instructions under [Set up a new environment manually](#set-up-a-new-environment-manually) to create your infrastructure -## Terraform state credentials +### Terraform state credentials The bootstrap module is used to create an s3 bucket for later terraform runs to store their state in. -### Bootstrapping the state storage s3 buckets for the first time +#### Bootstrapping the state storage s3 buckets for the first time 1. Within the `bootstrap` directory, run `terraform init` 1. Run `./run.sh plan` to verify that the changes are what you expect @@ -49,7 +47,7 @@ The bootstrap module is used to create an s3 bucket for later terraform runs to 1. Run `./teardown_creds.sh` to remove the space deployer account used to create the s3 bucket 1. Copy `bucket` from `bucket_credentials` output to the backend block of `staging/providers.tf` and `production/providers.tf` -### To make changes to the bootstrap module +#### To make changes to the bootstrap module *This should not be necessary in most cases* @@ -106,7 +104,13 @@ The below steps rely on you first configuring access to the Terraform state in s ## Structure -Each environment has its own module, which relies on a shared module for everything except the providers code and environment specific variables and settings. +The `terraform` directory contains sub-directories (`staging`, `production`, etc.) named for deployment environments. Each of these is a *module*, which is just Terraform's word for a directory with some .tf files in it. Each module governs the infrastructure of the environment for which it is named. This directory structure forms "[bulkheads](https://blog.gruntwork.io/how-to-manage-terraform-state-28f5697e68fa)" which isolate Terraform commands to a single environment, limiting accidental damage. + +The `development` module is rather different from the other environment modules. While the other environments can be used to create (or destroy) cloud resources, the development module mostly just sets up access to pre-existing resources needed for local software development. + +The `bootstrap` directory is not an environment module. Instead, it sets up infrastructure needed to deploy Terraform in any of the environments. If you are new to the project, [this is where you should start](#retrieving-existing-bucket-credentials). Similarly, `shared` is not an environment; this module lends code to all the environments. + +Files within these directories look like this: ``` - bootstrap/ From e228a0265373e130a9dca0700514ef6fb38936e9 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Tue, 9 Apr 2024 11:26:50 -0700 Subject: [PATCH 08/40] Explanation of the purpose of bucket credentials --- terraform/README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/terraform/README.md b/terraform/README.md index 1890e409d..096c360aa 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -4,7 +4,9 @@ This directory holds the Terraform modules for maintaining Notify.gov's infrastr ## Retrieving existing bucket credentials -:green_book: Assuming [initial setup](#initial-setup) is complete, new developers start here! +:green_book: new developers start here! + +Assuming [initial setup](#initial-setup) is complete — which it should be if Notify.gov is online — Terraform state is stored in a remote backend. If you are going to be developing Terraform, you'll need to hook up to this backend: 1. Enter the bootstrap module with `cd bootstrap` 1. Run `./import.sh` to pull existing terraform state into the local state @@ -21,6 +23,8 @@ This directory holds the Terraform modules for maintaining Notify.gov's infrastr aws_secret_access_key = ``` +These credentials will allow Terraform to access the AWS/Cloud.gov bucket in which developers share Terraform state files. + ## Initial setup These instructions were used for deploying the project for the first time, years ago. We should not have to perform these steps again. They are provided here for reference. From 0bb80c9d22f218b8b80176920f8bb44277bf70fd Mon Sep 17 00:00:00 2001 From: Anastasia Gradova Date: Wed, 10 Apr 2024 19:57:53 -0600 Subject: [PATCH 09/40] Added sqlalchemy-utils and put methods in to create and delete the test_notifications_api db before and after the tests run. --- poetry.lock | 33 ++++++++++++++++++++++++++++++--- pyproject.toml | 1 + tests/conftest.py | 11 ++++++++--- 3 files changed, 39 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8c2e521c6..888ab0361 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2488,7 +2488,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, ] [[package]] @@ -3529,7 +3529,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4282,6 +4281,34 @@ postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "sqlalchemy-utils" +version = "0.41.2" +description = "Various utility functions for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, + {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, +] + +[package.dependencies] +SQLAlchemy = ">=1.3" + +[package.extras] +arrow = ["arrow (>=0.3.4)"] +babel = ["Babel (>=1.3)"] +color = ["colour (>=0.0.4)"] +encrypted = ["cryptography (>=0.6)"] +intervals = ["intervals (>=0.7.1)"] +password = ["passlib (>=1.6,<2.0)"] +pendulum = ["pendulum (>=2.0.5)"] +phone = ["phonenumbers (>=5.9.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +timezone = ["python-dateutil"] +url = ["furl (>=0.4.1)"] + [[package]] name = "stevedore" version = "5.2.0" @@ -4773,4 +4800,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "3dcc493bc45068a1875df77b6488c9311e91f9a192fad1df14aa62b0a40879c4" +content-hash = "4c2122299adba83cf4d541bdc16a6b5d3a5350cb96158d2a6a4a033f78ee612b" diff --git a/pyproject.toml b/pyproject.toml index 916bd20de..87ea476ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ sqlalchemy = "==1.4.40" werkzeug = "^3.0.2" faker = "^24.4.0" setuptools = "^69.2.0" +sqlalchemy-utils = "^0.41.2" [tool.poetry.group.dev.dependencies] diff --git a/tests/conftest.py b/tests/conftest.py index 7f0c2150b..d593c1b6f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,7 @@ import pytest from alembic.command import upgrade from alembic.config import Config from flask import Flask +from sqlalchemy_utils import database_exists, create_database, drop_database from app import create_app from app.dao.provider_details_dao import get_provider_details_by_identifier @@ -52,9 +53,10 @@ def _notify_db(notify_api): """ with notify_api.app_context() as app_context: db = app_context.app.extensions["sqlalchemy"] - assert ( - "test_notification_api" in db.engine.url.database - ), "dont run tests against main db" + + # Check if test_notification_api exists, if not, create + if not database_exists(db.engine.url): + create_database(db.engine.url) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ALEMBIC_CONFIG = os.path.join(BASE_DIR, "migrations") @@ -70,6 +72,9 @@ def _notify_db(notify_api): yield db db.session.remove() + # Check if test_notification_api exists, if so, drop + if database_exists(db.engine.url): + drop_database(db.engine.url) db.engine.dispose() From 3c92b3e9d8bac1989a3639dfc7d6899ab65b3f76 Mon Sep 17 00:00:00 2001 From: Anastasia Gradova Date: Wed, 10 Apr 2024 20:15:18 -0600 Subject: [PATCH 10/40] Updated for isort --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index d593c1b6f..4d9b60150 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,7 @@ import pytest from alembic.command import upgrade from alembic.config import Config from flask import Flask -from sqlalchemy_utils import database_exists, create_database, drop_database +from sqlalchemy_utils import create_database, database_exists, drop_database from app import create_app from app.dao.provider_details_dao import get_provider_details_by_identifier From 7dbcc1793db90268bdc749c4dd8cb48b10fb2f1d Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Wed, 10 Apr 2024 19:18:29 -0700 Subject: [PATCH 11/40] Add Troubleshooting section, note about local state in dev env --- terraform/README.md | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 096c360aa..0f86112e2 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -4,9 +4,9 @@ This directory holds the Terraform modules for maintaining Notify.gov's infrastr ## Retrieving existing bucket credentials -:green_book: new developers start here! +:green_book: New developers start here! -Assuming [initial setup](#initial-setup) is complete — which it should be if Notify.gov is online — Terraform state is stored in a remote backend. If you are going to be developing Terraform, you'll need to hook up to this backend: +Assuming [initial setup](#initial-setup) is complete — which it should be if Notify.gov is online — Terraform state is stored in a shared remote backend. If you are going to be writing Terraform for any of our deployment environments you'll need to hook up to this backend. (You don't need to do this if you are just writing code for the `development` module, becase it stores state locally on your laptop.) 1. Enter the bootstrap module with `cd bootstrap` 1. Run `./import.sh` to pull existing terraform state into the local state @@ -15,12 +15,12 @@ Assuming [initial setup](#initial-setup) is complete — which it should be ### Use bootstrap credentials 1. Run `./run.sh show -json`. -1. In the output, locate `access_key_id` and `secret_access_key` within `bucket_credentials`. These values are secret, so, don't share them with anyone or copy them to anywhere online. +1. In the output, locate `access_key_id` and `secret_access_key` within the `bucket_creds` resource. These values are secret, so, don't share them with anyone or copy them to anywhere online. 1. Add the following to `~/.aws/credentials`: ``` [notify-terraform-backend] - aws_access_key_id = - aws_secret_access_key = + aws_access_key_id = + aws_secret_access_key = ``` These credentials will allow Terraform to access the AWS/Cloud.gov bucket in which developers share Terraform state files. @@ -144,3 +144,16 @@ In the bootstrap module: - `run.sh` Helper script to set up a space deployer and run terraform. The terraform action (`show`/`plan`/`apply`/`destroy`) is passed as an argument - `teardown_creds.sh` Helper script to remove the space deployer setup as part of `run.sh` - `import.sh` Helper script to create a new local state file in case terraform changes are needed + +## Troubleshooting + +### Expired token + +``` +The token expired, was revoked, or the token ID is incorrect. Please log back in to re-authenticate. +``` +You need to re-authenticate with the Cloud Foundry CLI +``` +cf login -a api.fr.cloud.gov --sso +``` +You may also need to log in again to the Cloud.gov website. From e426402247eac50bd12200cc6dde1f7073ebc2b4 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Wed, 10 Apr 2024 19:30:05 -0700 Subject: [PATCH 12/40] Instruction on switching AWS CLI profiles --- terraform/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/terraform/README.md b/terraform/README.md index 0f86112e2..eae17cb15 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -22,6 +22,7 @@ Assuming [initial setup](#initial-setup) is complete — which it should be aws_access_key_id = aws_secret_access_key = ``` +1. Check which AWS profile you are using with `aws configure list`. If needed, use `export AWS_PROFILE=notify-terraform-backend` to change to profile and credentials you just added. These credentials will allow Terraform to access the AWS/Cloud.gov bucket in which developers share Terraform state files. From 5508787c248b99d986878e05e34646e48c1ed844 Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Thu, 11 Apr 2024 09:04:46 -0400 Subject: [PATCH 13/40] If the templates.json file changes, update it. Signed-off-by: Cliff Hill --- .github/workflows/deploy.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 049c49b36..104fa1521 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -79,6 +79,16 @@ jobs: --var NOTIFY_E2E_TEST_PASSWORD="$NOTIFY_E2E_TEST_PASSWORD" --var LOGIN_DOT_GOV_REGISTRATION_URL="$LOGIN_DOT_GOV_REGISTRATION_URL" + - name: Check for changes to templates.json + id: changed-templates + uses: tj-actions/changed-files@v41 + with: + files: | + app/config_files/templates.json + - name: Update templates + if: steps.changed-templates.outputs.any_changed == 'true' + run: cf run-task notify-api-staging --command "flask command update-templates" + - name: Check for changes to egress config id: changed-egress-config uses: tj-actions/changed-files@v41 From d7b23832b8e88aef35d601aa0b2ee4e33a8b68ac Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Thu, 11 Apr 2024 09:25:00 -0400 Subject: [PATCH 14/40] Making minor change to templates.json. Signed-off-by: Cliff Hill --- app/config_files/templates.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/config_files/templates.json b/app/config_files/templates.json index ebc379755..a37bebaf0 100644 --- a/app/config_files/templates.json +++ b/app/config_files/templates.json @@ -34,7 +34,7 @@ "name": "Notify SMS verify code", "type": "sms", "subject": "", - "content": ["((verify_code)) is your Notify.gov authentication code"] + "content": ["((verify_code)) is your Notify.gov authentication code."] }, { "id": "474e9242-823b-4f99-813d-ed392e7f1201", From 7b73e995304e02a486f0b15e0ae3c7b13c164f06 Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Thu, 11 Apr 2024 10:07:50 -0400 Subject: [PATCH 15/40] Making template update steps for demo and prod. Signed-off-by: Cliff Hill --- .github/workflows/deploy-demo.yml | 10 ++++++++++ .github/workflows/deploy-prod.yml | 10 ++++++++++ 2 files changed, 20 insertions(+) diff --git a/.github/workflows/deploy-demo.yml b/.github/workflows/deploy-demo.yml index 06f3f8091..945540b19 100644 --- a/.github/workflows/deploy-demo.yml +++ b/.github/workflows/deploy-demo.yml @@ -74,6 +74,16 @@ jobs: --var NOTIFY_E2E_TEST_PASSWORD="$NOTIFY_E2E_TEST_PASSWORD" --var LOGIN_DOT_GOV_REGISTRATION_URL="$LOGIN_DOT_GOV_REGISTRATION_URL" + - name: Check for changes to templates.json + id: changed-templates + uses: tj-actions/changed-files@v41 + with: + files: | + app/config_files/templates.json + - name: Update templates + if: steps.changed-templates.outputs.any_changed == 'true' + run: cf run-task notify-api-demo --command "flask command update-templates" + - name: Check for changes to egress config id: changed-egress-config uses: tj-actions/changed-files@v41 diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml index fb0257ddc..20d452b4a 100644 --- a/.github/workflows/deploy-prod.yml +++ b/.github/workflows/deploy-prod.yml @@ -78,6 +78,16 @@ jobs: --var NOTIFY_E2E_TEST_PASSWORD="$NOTIFY_E2E_TEST_PASSWORD" --var LOGIN_DOT_GOV_REGISTRATION_URL="$LOGIN_DOT_GOV_REGISTRATION_URL" + - name: Check for changes to templates.json + id: changed-templates + uses: tj-actions/changed-files@v41 + with: + files: | + app/config_files/templates.json + - name: Update templates + if: steps.changed-templates.outputs.any_changed == 'true' + run: cf run-task notify-api-production --command "flask command update-templates" + - name: Check for changes to egress config id: changed-egress-config uses: tj-actions/changed-files@v41 From 1ff05db360d83950b4a1d9dd2f089f4f8f8fb3f1 Mon Sep 17 00:00:00 2001 From: Anastasia Gradova Date: Thu, 11 Apr 2024 10:18:14 -0600 Subject: [PATCH 16/40] Moved sqlalchemy-utils to dev dependencies --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 87ea476ca..db0248ae4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,6 @@ sqlalchemy = "==1.4.40" werkzeug = "^3.0.2" faker = "^24.4.0" setuptools = "^69.2.0" -sqlalchemy-utils = "^0.41.2" [tool.poetry.group.dev.dependencies] @@ -78,6 +77,7 @@ pytest-xdist = "^3.5.0" radon = "^6.0.1" requests-mock = "^1.11.0" setuptools = "^69.0.3" +sqlalchemy-utils = "^0.41.2" vulture = "^2.10" From 39101f0789d51552a60bb76c5b6b8bf931492a1a Mon Sep 17 00:00:00 2001 From: Anastasia Gradova Date: Thu, 11 Apr 2024 10:24:06 -0600 Subject: [PATCH 17/40] updated lock file --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 888ab0361..b31b0a2d5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4800,4 +4800,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "4c2122299adba83cf4d541bdc16a6b5d3a5350cb96158d2a6a4a033f78ee612b" +content-hash = "fa95df65f7a51c8bc919a756d57b7f1fa36ad7ffbf5725be1f929bea21be6c20" From c5be822775b863b081d73acdd5d164bdd0c39178 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 17:00:36 +0000 Subject: [PATCH 18/40] Bump moto from 5.0.3 to 5.0.5 Bumps [moto](https://github.com/getmoto/moto) from 5.0.3 to 5.0.5. - [Release notes](https://github.com/getmoto/moto/releases) - [Changelog](https://github.com/getmoto/moto/blob/master/CHANGELOG.md) - [Commits](https://github.com/getmoto/moto/compare/5.0.3...5.0.5) --- updated-dependencies: - dependency-name: moto dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 30 ++++++++++++++++-------------- pyproject.toml | 2 +- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/poetry.lock b/poetry.lock index b31b0a2d5..c4c8a6be7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2385,13 +2385,13 @@ files = [ [[package]] name = "moto" -version = "5.0.3" +version = "5.0.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "moto-5.0.3-py2.py3-none-any.whl", hash = "sha256:261d312d1d69c2afccb450a0566666d7b75d76ed6a7d00aac278a9633b073ff0"}, - {file = "moto-5.0.3.tar.gz", hash = "sha256:070ac2edf89ad7aee28534481ce68e2f344c8a6a8fefec5427eea0d599bfdbdb"}, + {file = "moto-5.0.5-py2.py3-none-any.whl", hash = "sha256:4ecdd4084491a2f25f7a7925416dcf07eee0031ce724957439a32ef764b22874"}, + {file = "moto-5.0.5.tar.gz", hash = "sha256:2eaca2df7758f6868df420bf0725cd0b93d98709606f1fb8b2343b5bdc822d91"}, ] [package.dependencies] @@ -2406,24 +2406,25 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)", "setuptools"] apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)", "setuptools"] cognitoidp = ["joserfc (>=0.9.0)"] -dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] -dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.4)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.4)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)"] -s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.1)"] -s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.1)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.4)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.4)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)", "setuptools"] ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath-ng"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] [[package]] @@ -2488,7 +2489,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, + {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] [[package]] @@ -3529,6 +3530,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4800,4 +4802,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "fa95df65f7a51c8bc919a756d57b7f1fa36ad7ffbf5725be1f929bea21be6c20" +content-hash = "afef1d66bba595e006cfa7ce531c3f4871cb5a4f21a70e29a5b33c97b694646b" diff --git a/pyproject.toml b/pyproject.toml index db0248ae4..f7b20d9a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ freezegun = "^1.4.0" honcho = "*" isort = "^5.13.2" jinja2-cli = {version = "==0.8.2", extras = ["yaml"]} -moto = "==5.0.3" +moto = "==5.0.5" pip-audit = "*" pre-commit = "^3.6.0" pytest = "^7.4.4" From 8fba67170e30427e04bf8bb21ff35f030a584c8c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 11 Apr 2024 10:28:09 -0700 Subject: [PATCH 19/40] remove deprecated notification --- app/celery/scheduled_tasks.py | 85 +----------------------- poetry.lock | 2 +- tests/app/celery/test_scheduled_tasks.py | 48 ------------- tests/app/user/test_rest_verify.py | 16 ++--- 4 files changed, 11 insertions(+), 140 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 1742a310c..2c4d31d8c 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -1,4 +1,3 @@ -import os from datetime import datetime, timedelta from flask import current_app @@ -6,7 +5,7 @@ from notifications_utils.clients.zendesk.zendesk_client import NotifySupportTick from sqlalchemy import between from sqlalchemy.exc import SQLAlchemyError -from app import notify_celery, redis_store, zendesk_client +from app import notify_celery, zendesk_client from app.celery.tasks import ( get_recipient_csv_and_template_and_sender_id, process_incomplete_jobs, @@ -24,16 +23,12 @@ from app.dao.jobs_dao import ( find_jobs_with_missing_rows, find_missing_row_for_job, ) -from app.dao.notifications_dao import ( - dao_get_failed_notification_count, - notifications_not_yet_sent, -) +from app.dao.notifications_dao import notifications_not_yet_sent from app.dao.services_dao import ( dao_find_services_sending_to_tv_numbers, dao_find_services_with_high_failure_rates, ) from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago -from app.delivery.send_to_providers import provider_to_use from app.enums import JobStatus, NotificationType from app.models import Job from app.notifications.process_notifications import send_notification_to_queue @@ -92,82 +87,6 @@ def expire_or_delete_invitations(): raise -# TODO THIS IS ACTUALLY DEPRECATED, WE ARE REMOVING PHONE NUMBERS FROM THE DB -# SO THERE WILL BE NO REASON TO KEEP TRACK OF THIS COUNT -@notify_celery.task(name="check-db-notification-fails") -def check_db_notification_fails(): - """ - We are going to use redis to keep track of the previous fail count. - - If the number of fails is more than 100% of the limit, we want to send an alert every time this - runs, because it is urgent to fix it. - - If the number is more than 25%, 50% or 75% of the limit, we only want to send an alert - on a breach. I.e., if the last number was at 23% and the current number is 27%, send an email. - But if the last number was 26% and the current is 27%, don't. - """ - last_value = redis_store.get("LAST_DB_NOTIFICATION_COUNT") - if not last_value: - last_value = 0 - else: - last_value = int(last_value.decode("utf-8")) - - failed_count = dao_get_failed_notification_count() - if failed_count > last_value: - redis_store.set("LAST_DB_NOTIFICATION_COUNT", failed_count) - message = "" - curr_env = os.getenv("ENVIRONMENT") - if failed_count >= MAX_NOTIFICATION_FAILS: - message = f"We are over 100% in the db for failed notifications on {curr_env}" - elif ( - failed_count >= MAX_NOTIFICATION_FAILS * 0.9 - and last_value < MAX_NOTIFICATION_FAILS * 0.9 - ): - message = ( - "tts-notify-alerts@gsa.gov", - f"We crossed above 90% in the db for failed notifications on {curr_env}", - ) - - elif ( - failed_count >= MAX_NOTIFICATION_FAILS * 0.75 - and last_value < MAX_NOTIFICATION_FAILS * 0.75 - ): - message = ( - "tts-notify-alerts@gsa.gov", - f"We crossed above 75% in the db for failed notifications on {curr_env}", - ) - elif ( - failed_count >= MAX_NOTIFICATION_FAILS * 0.5 - and last_value < MAX_NOTIFICATION_FAILS * 0.5 - ): - message = ( - "tts-notify-alerts@gsa.gov", - f"We crossed above 50% in the db for failed notifications on {curr_env}", - ) - elif ( - failed_count >= MAX_NOTIFICATION_FAILS * 0.25 - and last_value < MAX_NOTIFICATION_FAILS * 0.25 - ): - message = ( - "tts-notify-alerts@gsa.gov", - f"We crossed above 25% in the db for failed notifications on {curr_env}", - ) - # suppress any spam coming from development tier - if message and curr_env != "development": - provider = provider_to_use(NotificationType.EMAIL, False) - from_address = '"{}" <{}@{}>'.format( - "Failed Notification Count Alert", - "test_sender", - current_app.config["NOTIFY_EMAIL_DOMAIN"], - ) - provider.send_email( - from_address, - "tts-notify-alerts@gsa.gov", - "DB Notification Failures Level Breached", - body=str(message), - ) - - @notify_celery.task(name="check-job-status") def check_job_status(): """ diff --git a/poetry.lock b/poetry.lock index 8c2e521c6..d2b32cc39 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2488,6 +2488,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] @@ -3529,7 +3530,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 94b586a3a..1652700f0 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -8,7 +8,6 @@ from notifications_utils.clients.zendesk.zendesk_client import NotifySupportTick from app.celery import scheduled_tasks from app.celery.scheduled_tasks import ( - check_db_notification_fails, check_for_missing_rows_in_completed_jobs, check_for_services_with_high_failure_rates_or_sending_to_tv_numbers, check_job_status, @@ -49,53 +48,6 @@ def test_should_call_expire_or_delete_invotations_on_expire_or_delete_invitation ) -def test_should_check_db_notification_fails_task_over_100_percent( - notify_db_session, mocker -): - mock_dao = mocker.patch( - "app.celery.scheduled_tasks.dao_get_failed_notification_count" - ) - mock_provider = mocker.patch("app.celery.scheduled_tasks.provider_to_use") - mock_dao.return_value = 100000 - check_db_notification_fails() - assert mock_provider.call_count == 1 - - -def test_should_check_db_notification_fails_task_less_than_25_percent( - notify_db_session, mocker -): - mock_dao = mocker.patch( - "app.celery.scheduled_tasks.dao_get_failed_notification_count" - ) - mock_redis = mocker.patch("app.celery.scheduled_tasks.redis_store") - mock_redis.get.return_value = 0 - mock_provider = mocker.patch("app.celery.scheduled_tasks.provider_to_use") - mock_dao.return_value = 10 - check_db_notification_fails() - assert mock_provider.call_count == 0 - - -def test_should_check_db_notification_fails_task_over_50_percent( - notify_db_session, mocker -): - # This tests that we only send an alert the 1st time we cross over 50%. We don't want - # to be sending the same alert every hour, especially as it might be quite normal for the db - # fails to be at 25 or 50 for long periods of time. - mock_dao = mocker.patch( - "app.celery.scheduled_tasks.dao_get_failed_notification_count" - ) - mock_provider = mocker.patch("app.celery.scheduled_tasks.provider_to_use") - mock_redis = mocker.patch("app.celery.scheduled_tasks.redis_store") - mock_dao.return_value = 5001 - mock_redis.get.return_value = "0".encode("utf-8") - check_db_notification_fails() - assert mock_provider.call_count == 1 - - mock_redis.get.return_value = "5001".encode("utf-8") - check_db_notification_fails() - assert mock_provider.call_count == 1 - - def test_should_update_scheduled_jobs_and_put_on_queue(mocker, sample_template): mocked = mocker.patch("app.celery.tasks.process_job.apply_async") diff --git a/tests/app/user/test_rest_verify.py b/tests/app/user/test_rest_verify.py index 74d90aaaf..21182972d 100644 --- a/tests/app/user/test_rest_verify.py +++ b/tests/app/user/test_rest_verify.py @@ -200,10 +200,10 @@ def test_send_user_sms_code(client, sample_user, sms_code_template, mocker): """ notify_service = dao_fetch_service_by_id(current_app.config["NOTIFY_SERVICE_ID"]) - mock_redis_get = mocker.patch("app.celery.scheduled_tasks.redis_store.raw_get") + mock_redis_get = mocker.patch("app.user.rest.redis_store.raw_get") mock_redis_get.return_value = "foo" - mocker.patch("app.celery.scheduled_tasks.redis_store.raw_set") + mocker.patch("app.user.rest.redis_store.raw_set") auth_header = create_admin_authorization_header() mocked = mocker.patch("app.user.rest.create_secret_code", return_value="11111") mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") @@ -241,10 +241,10 @@ def test_send_user_code_for_sms_with_optional_to_field( Tests POST endpoint /user//sms-code with optional to field """ - mock_redis_get = mocker.patch("app.celery.scheduled_tasks.redis_store.raw_get") + mock_redis_get = mocker.patch("app.user.rest.redis_store.raw_get") mock_redis_get.return_value = "foo" - mocker.patch("app.celery.scheduled_tasks.redis_store.raw_set") + mocker.patch("app.user.rest.redis_store.raw_set") to_number = "+447119876757" mocked = mocker.patch("app.user.rest.create_secret_code", return_value="11111") mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") @@ -468,10 +468,10 @@ def test_send_user_email_code( deliver_email = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") sample_user.auth_type = auth_type - mock_redis_get = mocker.patch("app.celery.scheduled_tasks.redis_store.raw_get") + mock_redis_get = mocker.patch("app.user.rest.redis_store.raw_get") mock_redis_get.return_value = "foo" - mocker.patch("app.celery.scheduled_tasks.redis_store.raw_set") + mocker.patch("app.user.rest.redis_store.raw_set") admin_request.post( "user.send_user_2fa_code", @@ -584,10 +584,10 @@ def test_user_verify_email_code_fails_if_code_already_used( def test_send_user_2fa_code_sends_from_number_for_international_numbers( client, sample_user, mocker, sms_code_template ): - mock_redis_get = mocker.patch("app.celery.scheduled_tasks.redis_store.raw_get") + mock_redis_get = mocker.patch("app.user.rest.redis_store.raw_get") mock_redis_get.return_value = "foo" - mocker.patch("app.celery.scheduled_tasks.redis_store.raw_set") + mocker.patch("app.user.rest.redis_store.raw_set") sample_user.mobile_number = "+601117224412" auth_header = create_admin_authorization_header() From 6f6ab1a0e2bc427ef4981ff5c864a463c5356353 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 21:29:41 +0000 Subject: [PATCH 20/40] Bump flask-migrate from 4.0.5 to 4.0.7 Bumps [flask-migrate](https://github.com/miguelgrinberg/flask-migrate) from 4.0.5 to 4.0.7. - [Release notes](https://github.com/miguelgrinberg/flask-migrate/releases) - [Changelog](https://github.com/miguelgrinberg/Flask-Migrate/blob/main/CHANGES.md) - [Commits](https://github.com/miguelgrinberg/flask-migrate/compare/v4.0.5...v4.0.7) --- updated-dependencies: - dependency-name: flask-migrate dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 945f67ef5..1eb977949 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1378,13 +1378,13 @@ tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-s [[package]] name = "flask-migrate" -version = "4.0.5" +version = "4.0.7" description = "SQLAlchemy database migrations for Flask applications using Alembic." optional = false python-versions = ">=3.6" files = [ - {file = "Flask-Migrate-4.0.5.tar.gz", hash = "sha256:d3f437a8b5f3849d1bb1b60e1b818efc564c66e3fefe90b62e5db08db295e1b1"}, - {file = "Flask_Migrate-4.0.5-py3-none-any.whl", hash = "sha256:613a2df703998e78716cace68cd83972960834424457f5b67f56e74fff950aef"}, + {file = "Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622"}, + {file = "Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617"}, ] [package.dependencies] @@ -2489,7 +2489,6 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] @@ -3531,6 +3530,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4802,4 +4802,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "afef1d66bba595e006cfa7ce531c3f4871cb5a4f21a70e29a5b33c97b694646b" +content-hash = "dcbbd4dd7bd9336e8c13f171c9dfe7e54194254c87d10711b1cd7748d38ce19c" diff --git a/pyproject.toml b/pyproject.toml index f7b20d9a9..47b6f1d58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ expiringdict = "==1.2.2" flask = "~=2.3" flask-bcrypt = "==1.0.1" flask-marshmallow = "==0.14.0" -flask-migrate = "==4.0.5" +flask-migrate = "==4.0.7" flask-redis = "==0.4.0" flask-sqlalchemy = "==3.0.5" gunicorn = {version = "==21.2.0", extras = ["eventlet"]} From 906609e4dede51e62359acf285899e95e4fefaae Mon Sep 17 00:00:00 2001 From: Carlo Costino Date: Thu, 11 Apr 2024 18:01:09 -0400 Subject: [PATCH 21/40] Update utils to 0.4.6 This changeset updates notifications-utils to 0.4.6 in order to address some dependency updates and incorporate a few minor changes. Signed-off-by: Carlo Costino --- poetry.lock | 40 +++++++++++++++++++--------------------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1eb977949..b21e31af4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -204,17 +204,17 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "awscli" -version = "1.32.79" +version = "1.32.83" description = "Universal Command Line Environment for AWS." optional = false python-versions = ">=3.8" files = [ - {file = "awscli-1.32.79-py3-none-any.whl", hash = "sha256:0d74c5aac7531094ec99cf9d15fe571b8bf1c7a8e08e5a9b611d283d1ad8fd84"}, - {file = "awscli-1.32.79.tar.gz", hash = "sha256:865179b663fafabd774128644ae102dfcfea751211d3054a336eea956cf43b22"}, + {file = "awscli-1.32.83-py3-none-any.whl", hash = "sha256:2fa897df5f1f150fa1d1c146b8acaf11963356dd9efcd6d201a1c77ad898b2ad"}, + {file = "awscli-1.32.83.tar.gz", hash = "sha256:c7e480ee911df228f98b284fb4d01e2bd1fe13a18998aecb4525f3a1993eabba"}, ] [package.dependencies] -botocore = "1.34.79" +botocore = "1.34.83" colorama = ">=0.2.5,<0.4.5" docutils = ">=0.10,<0.17" PyYAML = ">=3.10,<6.1" @@ -403,17 +403,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.79" +version = "1.34.83" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.79-py3-none-any.whl", hash = "sha256:265b0b4865e8c07e27abb32a31d2bd9129bb009b1d89ca0783776ec084886123"}, - {file = "boto3-1.34.79.tar.gz", hash = "sha256:139dd2d94eaa0e3213ff37ba7cf4cb2e3823269178fe8f3e33c965f680a9ddde"}, + {file = "boto3-1.34.83-py3-none-any.whl", hash = "sha256:33cf93f6de5176f1188c923f4de1ae149ed723b89ed12e434f2b2f628491769e"}, + {file = "boto3-1.34.83.tar.gz", hash = "sha256:9733ce811bd82feab506ad9309e375a79cabe8c6149061971c17754ce8997551"}, ] [package.dependencies] -botocore = ">=1.34.79,<1.35.0" +botocore = ">=1.34.83,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -422,13 +422,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.79" +version = "1.34.83" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.79-py3-none-any.whl", hash = "sha256:a42a014d3dbaa9ef123810592af69f9e55b456c5be3ac9efc037325685519e83"}, - {file = "botocore-1.34.79.tar.gz", hash = "sha256:6b59b0f7de219d383a2a633f6718c2600642ebcb707749dc6c67a6a436474b7a"}, + {file = "botocore-1.34.83-py3-none-any.whl", hash = "sha256:0a3fbbe018416aeefa8978454fb0b8129adbaf556647b72269bf02e4bf1f4161"}, + {file = "botocore-1.34.83.tar.gz", hash = "sha256:0f302aa76283d4df62b4fbb6d3d20115c1a8957fc02171257fc93904d69d5636"}, ] [package.dependencies] @@ -1708,13 +1708,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -2071,7 +2071,6 @@ files = [ {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, @@ -2674,7 +2673,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "0.4.5" +version = "0.4.6" description = "" optional = false python-versions = "^3.12.2" @@ -2685,8 +2684,8 @@ develop = false async-timeout = "^4.0.2" bleach = "^6.1.0" blinker = "^1.6.2" -boto3 = "^1.34.77" -botocore = "^1.34.79" +boto3 = "^1.34.83" +botocore = "^1.34.83" cachetools = "^5.3.0" certifi = "^2024.2.2" cffi = "^1.16.0" @@ -2697,7 +2696,7 @@ flask = "^2.3.2" flask-redis = "^0.4.0" geojson = "^3.0.1" govuk-bank-holidays = "^0.14" -idna = "^3.6" +idna = "^3.7" itsdangerous = "^2.1.2" jinja2 = "^3.1.3" jmespath = "^1.0.1" @@ -2726,7 +2725,7 @@ werkzeug = "^3.0.1" type = "git" url = "https://github.com/GSA/notifications-utils.git" reference = "HEAD" -resolved_reference = "7d1d2e9bb3791316231e97433c71da6a70c4d2ab" +resolved_reference = "d0db6073406bd160d2007edb9d00e41c9d5d44b7" [[package]] name = "numpy" @@ -3530,7 +3529,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, From a5e7383871ac7d3566900174e9ee3f223ef09eb1 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Thu, 11 Apr 2024 15:10:19 -0700 Subject: [PATCH 22/40] Rewrite manual new env instructions to be general workflow --- terraform/README.md | 63 ++++++++++++++++++++++++++++++--------------- 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index eae17cb15..73270129b 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -9,7 +9,7 @@ This directory holds the Terraform modules for maintaining Notify.gov's infrastr Assuming [initial setup](#initial-setup) is complete — which it should be if Notify.gov is online — Terraform state is stored in a shared remote backend. If you are going to be writing Terraform for any of our deployment environments you'll need to hook up to this backend. (You don't need to do this if you are just writing code for the `development` module, becase it stores state locally on your laptop.) 1. Enter the bootstrap module with `cd bootstrap` -1. Run `./import.sh` to pull existing terraform state into the local state +1. Run `./import.sh` to import the bucket containing remote terraform state into your local state 1. Follow instructions under [Use bootstrap credentials](#use-bootstrap-credentials) ### Use bootstrap credentials @@ -24,7 +24,7 @@ Assuming [initial setup](#initial-setup) is complete — which it should be ``` 1. Check which AWS profile you are using with `aws configure list`. If needed, use `export AWS_PROFILE=notify-terraform-backend` to change to profile and credentials you just added. -These credentials will allow Terraform to access the AWS/Cloud.gov bucket in which developers share Terraform state files. +These credentials will allow Terraform to access the AWS/Cloud.gov bucket in which developers share Terraform state files. Now you are ready to develop Terraform using the [Workflow for deployed environments](#workflow-for-deployed-environments). ## Initial setup @@ -36,11 +36,11 @@ These instructions were used for deploying the project for the first time, years 1. Create a cloud.gov SpaceDeployer by following the instructions under [SpaceDeployers](#spacedeployers) 1. Copy SpaceDeployer credentials to your CI/CD secrets using the instructions in the base README 1. Manually Running Terraform - 1. Follow instructions under [Set up a new environment manually](#set-up-a-new-environment-manually) to create your infrastructure + 1. Follow instructions under [Workflow for deployed environments](#workflow-for-deployed-environments) to create your infrastructure ### Terraform state credentials -The bootstrap module is used to create an s3 bucket for later terraform runs to store their state in. +The bootstrap module is used to create an s3 bucket for later terraform runs to store their state in. (If the bucket is already created, you should [Use bootstrap credentials](#use-bootstrap-credentials)) #### Bootstrapping the state storage s3 buckets for the first time @@ -70,43 +70,64 @@ deploy the application from the CI/CD pipeline. Create a new account by running: `./create_service_account.sh -s -u ` -## Set up a new environment manually +## Workflow for deployed environments -The below steps rely on you first configuring access to the Terraform state in s3 as described in [Terraform State Credentials](#terraform-state-credentials). +These are the steps for developing Terraform code for our deployed environment modules (`sandbox`, `demo`, `staging` and `production`) locally on your laptop. Or for setting a new deployment environment, or otherwise for running Terraform manually in any module that uses remote state. You don't need to do all this to run code in the `development` module, because it is not a deployed environment and it does not use remote state. -1. `cd` to the environment you are working in +:skull: Note that there is one risky step below (`apply`) which is safe only in the `sandbox` environment and **should not** be run in any other deployed environment. -1. Set up a SpaceDeployer +These steps assume shared [Terraform state credentials](#terraform-state-credentials) exist in s3, and that you are [Using those credentials](#use-bootstrap-credentials). + +1. `cd` to the environment you plan to work in. When developing new features/resources, try out your code in `sandbox`. Only once the code is proven should you copy-and-paste it to each higher environment. + +1. Run `cf spaces` and, from the output, copy the space name for the environment you are working in, such as `notify-sandbox`. + +1. Next you will set up a SpaceDeployer. Prepare to fill in these values: + * `` will be the string you copied from the prior step + * `` can be anything, although we recommend something that communicates the purpose of the deployer. For example: "circleci-deployer" for the credentials CircleCI uses to deploy the application, or "sandbox-" for credentials to run terraform manually. + Put those two values into this command: ```bash - # create a space deployer service instance that can log in with just a username and password - # the value of < SPACE_NAME > should be `staging` or `prod` depending on where you are working - # the value for < ACCOUNT_NAME > can be anything, although we recommend - # something that communicates the purpose of the deployer - # for example: circleci-deployer for the credentials CircleCI uses to - # deploy the application or -terraform for credentials to run terraform manually ./create_service_account.sh -s -u > secrets.auto.tfvars ``` - The script will output the `username` (as `cf_user`) and `password` (as `cf_password`) for your ``. Read more in the [cloud.gov service account documentation](https://cloud.gov/docs/services/cloud-gov-service-account/). + The script will output the `username` (as `cf_user`) and `password` (as `cf_password`) for your ``. Read more in the [cloud.gov service account documentation](https://cloud.gov/docs/services/cloud-gov-service-account/). Then, the command uses the redirection operator (`>`) to write that output to the `secrets.auto.tfvars` file. Terraform will find the username and password there, and use them as input variables. - The easiest way to use this script is to redirect the output directly to the `secrets.auto.tfvars` file it needs to be used in - -1. Run terraform from your new environment directory with +1. While till in an environment directory, initialize Terraform: ```bash terraform init + ``` + + If this command fails, you may need to run `terraform init -upgrade` to make sure new module versions are picked up. Or, `terraform init -migrate-state` to bump the remote backend. + +1. Then, run Terraform in a non-destructive way: + ```bash terraform plan ``` - If the `terraform init` command fails, you may need to run `terraform init -upgrade` to make sure new module versions are picked up. + This will show you any pending changes that Terraform is ready to make. Now is the time to write any code you are planning to write, re-running `terraform plan` to confirm that the code works as you develop. -1. Apply changes with `terraform apply`. +1. **Only if it is safe to do so**, apply your changes. -1. Remove the space deployer service instance if it doesn't need to be used again, such as when manually running terraform once. + > [!CAUTION] + > Applying changes in the wrong directory can mess up a deployed environment that people are relying on + + Double-check what directory you are in, like with the `pwd` command. You should probably only apply while in the `sandbox` directory / environment. + + Once you are sure it is safe, run: + ```bash + terraform apply + ``` + + This command *will deploy your changes* to the cloud. This is a healthy part of testing your code in the sandbox, or if you are creating a new environment. **Do not** apply in enviornments that people are relying upon. + +1. Remove the space deployer service instance when you are done manually running Terraform. ```bash # and have the same values as used above. ./destroy_service_account.sh -s -u ``` + Optionally, you can also `rm secrets.auto.tfvars` + ## Structure The `terraform` directory contains sub-directories (`staging`, `production`, etc.) named for deployment environments. Each of these is a *module*, which is just Terraform's word for a directory with some .tf files in it. Each module governs the infrastructure of the environment for which it is named. This directory structure forms "[bulkheads](https://blog.gruntwork.io/how-to-manage-terraform-state-28f5697e68fa)" which isolate Terraform commands to a single environment, limiting accidental damage. From 35bb04f082e1fccbcc8c3a92cc274585356cd804 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Thu, 11 Apr 2024 15:29:36 -0700 Subject: [PATCH 23/40] Note about risk of saving shared code --- terraform/README.md | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 73270129b..6ce0cee6e 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -72,7 +72,7 @@ deploy the application from the CI/CD pipeline. Create a new account by running: ## Workflow for deployed environments -These are the steps for developing Terraform code for our deployed environment modules (`sandbox`, `demo`, `staging` and `production`) locally on your laptop. Or for setting a new deployment environment, or otherwise for running Terraform manually in any module that uses remote state. You don't need to do all this to run code in the `development` module, because it is not a deployed environment and it does not use remote state. +These are the steps for developing Terraform code for our deployed environment modules (`sandbox`, `demo`, `staging` and `production`) locally on your laptop. Or for setting up a new deployment environment, or otherwise for running Terraform manually in any module that uses remote state. You don't need to do all this to run code in the `development` module, because it is not a deployed environment and it does not use remote state. :skull: Note that there is one risky step below (`apply`) which is safe only in the `sandbox` environment and **should not** be run in any other deployed environment. @@ -92,7 +92,7 @@ These steps assume shared [Terraform state credentials](#terraform-state-credent The script will output the `username` (as `cf_user`) and `password` (as `cf_password`) for your ``. Read more in the [cloud.gov service account documentation](https://cloud.gov/docs/services/cloud-gov-service-account/). Then, the command uses the redirection operator (`>`) to write that output to the `secrets.auto.tfvars` file. Terraform will find the username and password there, and use them as input variables. -1. While till in an environment directory, initialize Terraform: +1. While still in an environment directory, initialize Terraform: ```bash terraform init ``` @@ -104,7 +104,7 @@ These steps assume shared [Terraform state credentials](#terraform-state-credent terraform plan ``` - This will show you any pending changes that Terraform is ready to make. Now is the time to write any code you are planning to write, re-running `terraform plan` to confirm that the code works as you develop. + This will show you any pending changes that Terraform is ready to make. Now is the time to write any HCL code you are planning to write, re-running `terraform plan` to confirm that the code works as you develop. 1. **Only if it is safe to do so**, apply your changes. @@ -118,7 +118,7 @@ These steps assume shared [Terraform state credentials](#terraform-state-credent terraform apply ``` - This command *will deploy your changes* to the cloud. This is a healthy part of testing your code in the sandbox, or if you are creating a new environment. **Do not** apply in enviornments that people are relying upon. + This command *will deploy your changes* to the cloud. This is a healthy part of testing your code in the sandbox, or if you are creating a new environment (a new directory). **Do not** apply in environments that people are relying upon. 1. Remove the space deployer service instance when you are done manually running Terraform. ```bash @@ -134,7 +134,12 @@ The `terraform` directory contains sub-directories (`staging`, `production`, etc The `development` module is rather different from the other environment modules. While the other environments can be used to create (or destroy) cloud resources, the development module mostly just sets up access to pre-existing resources needed for local software development. -The `bootstrap` directory is not an environment module. Instead, it sets up infrastructure needed to deploy Terraform in any of the environments. If you are new to the project, [this is where you should start](#retrieving-existing-bucket-credentials). Similarly, `shared` is not an environment; this module lends code to all the environments. +The `bootstrap` directory is not an environment module. Instead, it sets up infrastructure needed to deploy Terraform in any of the environments. If you are new to the project, [this is where you should start](#retrieving-existing-bucket-credentials). + +Similarly, `shared` is not an environment. It is a module that lends code to all the environments. Please note that changes to `shared` codebase will be applied to all envrionments the next time CI/CD (or a user) runs Terraform in that environment. + +> [!WARNING] +> Editing `shared` code is risky because it will be applied to production Files within these directories look like this: From 3c141ef82359c732dc8fd0f2951172740d45348a Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Thu, 11 Apr 2024 15:52:21 -0700 Subject: [PATCH 24/40] Does the Caution markdown not work? --- terraform/README.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 6ce0cee6e..0220c215b 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -22,7 +22,7 @@ Assuming [initial setup](#initial-setup) is complete — which it should be aws_access_key_id = aws_secret_access_key = ``` -1. Check which AWS profile you are using with `aws configure list`. If needed, use `export AWS_PROFILE=notify-terraform-backend` to change to profile and credentials you just added. +1. Check which AWS profile you are using with `aws configure list`. If needed, use `export AWS_PROFILE=notify-terraform-backend` to change to the profile and credentials you just added. These credentials will allow Terraform to access the AWS/Cloud.gov bucket in which developers share Terraform state files. Now you are ready to develop Terraform using the [Workflow for deployed environments](#workflow-for-deployed-environments). @@ -85,12 +85,15 @@ These steps assume shared [Terraform state credentials](#terraform-state-credent 1. Next you will set up a SpaceDeployer. Prepare to fill in these values: * `` will be the string you copied from the prior step * `` can be anything, although we recommend something that communicates the purpose of the deployer. For example: "circleci-deployer" for the credentials CircleCI uses to deploy the application, or "sandbox-" for credentials to run terraform manually. + Put those two values into this command: ```bash ./create_service_account.sh -s -u > secrets.auto.tfvars ``` - The script will output the `username` (as `cf_user`) and `password` (as `cf_password`) for your ``. Read more in the [cloud.gov service account documentation](https://cloud.gov/docs/services/cloud-gov-service-account/). Then, the command uses the redirection operator (`>`) to write that output to the `secrets.auto.tfvars` file. Terraform will find the username and password there, and use them as input variables. + The script will output the `username` (as `cf_user`) and `password` (as `cf_password`) for your ``. The [cloud.gov service account documentation](https://cloud.gov/docs/services/cloud-gov-service-account/) has more information. + + The command uses the redirection operator (`>`) to write that output to the `secrets.auto.tfvars` file. Terraform will find the username and password there, and use them as input variables. 1. While still in an environment directory, initialize Terraform: ```bash @@ -104,11 +107,13 @@ These steps assume shared [Terraform state credentials](#terraform-state-credent terraform plan ``` - This will show you any pending changes that Terraform is ready to make. Now is the time to write any HCL code you are planning to write, re-running `terraform plan` to confirm that the code works as you develop. + This will show you any pending changes that Terraform is ready to make. + + :pencil: Now is the time to write any HCL code you are planning to write, re-running `terraform plan` to confirm that the code works as you develop. Keep in mind that any changes to the codebase that you commit will be run by the CI/CD pipeline. 1. **Only if it is safe to do so**, apply your changes. - > [!CAUTION] + > [!WARNING] > Applying changes in the wrong directory can mess up a deployed environment that people are relying on Double-check what directory you are in, like with the `pwd` command. You should probably only apply while in the `sandbox` directory / environment. From 7e9a504a641f363739031ce95254994484469926 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Thu, 11 Apr 2024 15:56:22 -0700 Subject: [PATCH 25/40] I guess you just cant indent cautions / warnings --- terraform/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/terraform/README.md b/terraform/README.md index 0220c215b..8b57a9d41 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -74,7 +74,8 @@ deploy the application from the CI/CD pipeline. Create a new account by running: These are the steps for developing Terraform code for our deployed environment modules (`sandbox`, `demo`, `staging` and `production`) locally on your laptop. Or for setting up a new deployment environment, or otherwise for running Terraform manually in any module that uses remote state. You don't need to do all this to run code in the `development` module, because it is not a deployed environment and it does not use remote state. -:skull: Note that there is one risky step below (`apply`) which is safe only in the `sandbox` environment and **should not** be run in any other deployed environment. +> [!CAUTION] +> There is one risky step below (`apply`) which is safe only in the `sandbox` environment and **should not** be run in any other deployed environment. These steps assume shared [Terraform state credentials](#terraform-state-credentials) exist in s3, and that you are [Using those credentials](#use-bootstrap-credentials). @@ -113,8 +114,7 @@ These steps assume shared [Terraform state credentials](#terraform-state-credent 1. **Only if it is safe to do so**, apply your changes. - > [!WARNING] - > Applying changes in the wrong directory can mess up a deployed environment that people are relying on + :skull: Applying changes in the wrong directory can mess up a deployed environment that people are relying on Double-check what directory you are in, like with the `pwd` command. You should probably only apply while in the `sandbox` directory / environment. From 0f3cdcf56a4f2e2b8e82edc4d8d1110881785c29 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 12 Apr 2024 09:23:22 -0700 Subject: [PATCH 26/40] fix tests --- poetry.lock | 2 + test_csv_files/multiple_sms.csv | 20 +++++----- test_csv_files/sms.csv | 2 +- tests/app/celery/test_tasks.py | 30 +++++++-------- .../test_process_notification.py | 14 +++---- tests/app/notifications/test_validators.py | 6 ++- .../test_send_notification.py | 8 ++-- .../test_send_one_off_notification.py | 1 + tests/app/service/test_rest.py | 30 ++++++++++----- tests/app/test_model.py | 2 +- tests/app/test_schemas.py | 2 +- tests/app/user/test_rest.py | 7 ++-- tests/app/user/test_rest_verify.py | 3 +- .../notifications/test_post_notifications.py | 37 ++++++++++--------- 14 files changed, 93 insertions(+), 71 deletions(-) diff --git a/poetry.lock b/poetry.lock index b21e31af4..df632df80 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2071,6 +2071,7 @@ files = [ {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, @@ -2488,6 +2489,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] diff --git a/test_csv_files/multiple_sms.csv b/test_csv_files/multiple_sms.csv index 2ecad9140..3253e0ae1 100644 --- a/test_csv_files/multiple_sms.csv +++ b/test_csv_files/multiple_sms.csv @@ -1,11 +1,11 @@ PhoneNumber,Name -+441234123121,chris -+441234123122,chris -+441234123123,chris -+441234123124,chris -+441234123125,chris -+441234123126,chris -+441234123127,chris -+441234123128,chris -+441234123129,chris -+441234123120,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris diff --git a/test_csv_files/sms.csv b/test_csv_files/sms.csv index 728639972..2227cbfe6 100644 --- a/test_csv_files/sms.csv +++ b/test_csv_files/sms.csv @@ -1,2 +1,2 @@ PHONE NUMBER, IGNORE THIS COLUMN -+441234123123, nope ++14254147755, nope diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 063770bfc..7b1463d2c 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -100,14 +100,14 @@ def test_should_process_sms_job(sample_job, mocker): s3.get_job_and_metadata_from_s3.assert_called_once_with( service_id=str(sample_job.service.id), job_id=str(sample_job.id) ) - assert encryption.encrypt.call_args[0][0]["to"] == "+441234123123" + assert encryption.encrypt.call_args[0][0]["to"] == "+14254147755" assert encryption.encrypt.call_args[0][0]["template"] == str(sample_job.template.id) assert ( encryption.encrypt.call_args[0][0]["template_version"] == sample_job.template.version ) assert encryption.encrypt.call_args[0][0]["personalisation"] == { - "phonenumber": "+441234123123" + "phonenumber": "+14254147755" } assert encryption.encrypt.call_args[0][0]["row_number"] == 0 tasks.save_sms.apply_async.assert_called_once_with( @@ -279,7 +279,7 @@ def test_should_process_all_sms_job(sample_job_with_placeholdered_template, mock service_id=str(sample_job_with_placeholdered_template.service.id), job_id=str(sample_job_with_placeholdered_template.id), ) - assert encryption.encrypt.call_args[0][0]["to"] == "+441234123120" + assert encryption.encrypt.call_args[0][0]["to"] == "+14254147755" assert encryption.encrypt.call_args[0][0]["template"] == str( sample_job_with_placeholdered_template.template.id ) @@ -288,7 +288,7 @@ def test_should_process_all_sms_job(sample_job_with_placeholdered_template, mock == sample_job_with_placeholdered_template.template.version ) # noqa assert encryption.encrypt.call_args[0][0]["personalisation"] == { - "phonenumber": "+441234123120", + "phonenumber": "+14254147755", "name": "chris", } assert tasks.save_sms.apply_async.call_count == 10 @@ -397,7 +397,7 @@ def test_should_send_template_to_correct_sms_task_and_persist( ): notification = _notification_json( sample_template_with_placeholders, - to="+447234123123", + to="+14254147755", personalisation={"name": "Jo"}, ) @@ -558,7 +558,7 @@ def test_should_not_save_email_if_restricted_service_and_invalid_email_address( def test_should_save_sms_template_to_and_persist_with_job_id(sample_job, mocker): notification = _notification_json( sample_job.template, - to="+447234123123", + to="+14254147755", job_id=sample_job.id, row_number=2, ) @@ -813,7 +813,7 @@ def test_should_use_email_template_and_persist_without_personalisation( def test_save_sms_should_go_to_retry_queue_if_database_errors(sample_template, mocker): - notification = _notification_json(sample_template, "+447234123123") + notification = _notification_json(sample_template, "+14254147755") expected_exception = SQLAlchemyError() @@ -1017,7 +1017,7 @@ def test_send_inbound_sms_to_service_post_https_request_to_service( inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1063,7 +1063,7 @@ def test_send_inbound_sms_to_service_does_not_sent_request_when_inbound_api_does inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1084,7 +1084,7 @@ def test_send_inbound_sms_to_service_retries_if_request_returns_500( inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1109,7 +1109,7 @@ def test_send_inbound_sms_to_service_retries_if_request_throws_unknown( inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1134,7 +1134,7 @@ def test_send_inbound_sms_to_service_does_not_retries_if_request_returns_404( inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1429,7 +1429,7 @@ def test_save_api_email_or_sms(mocker, sample_service, notification_type): data.update({"to": "jane.citizen@example.com"}) expected_queue = QueueNames.SEND_EMAIL else: - data.update({"to": "+447700900855"}) + data.update({"to": "+14254147755"}) expected_queue = QueueNames.SEND_SMS encrypted = encryption.encrypt(data) @@ -1483,7 +1483,7 @@ def test_save_api_email_dont_retry_if_notification_already_exists( data.update({"to": "jane.citizen@example.com"}) expected_queue = QueueNames.SEND_EMAIL else: - data.update({"to": "+447700900855"}) + data.update({"to": "+14254147755"}) expected_queue = QueueNames.SEND_SMS encrypted = encryption.encrypt(data) @@ -1576,7 +1576,7 @@ def test_save_tasks_use_cached_service_and_template( NotificationType.SMS, save_api_sms, QueueNames.SEND_SMS, - "+447700900855", + "+14254147755", ), ( NotificationType.EMAIL, diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 52198071a..160c96f97 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -76,7 +76,7 @@ def test_persist_notification_creates_and_save_to_db( notification = persist_notification( template_id=sample_template.id, template_version=sample_template.version, - recipient="+447111111111", + recipient="+14254147755", service=sample_template.service, personalisation={}, notification_type=NotificationType.SMS, @@ -120,7 +120,7 @@ def test_persist_notification_throws_exception_when_missing_template(sample_api_ persist_notification( template_id=None, template_version=None, - recipient="+447111111111", + recipient="+14254147755", service=sample_api_key.service, personalisation=None, notification_type=NotificationType.SMS, @@ -178,7 +178,7 @@ def test_persist_notification_cache_is_not_incremented_on_failure_to_create_noti persist_notification( template_id=None, template_version=None, - recipient="+447111111111", + recipient="+14254147755", service=sample_api_key.service, personalisation=None, notification_type=NotificationType.SMS, @@ -321,9 +321,9 @@ def test_simulated_recipient(notify_api, to_address, notification_type, expected @pytest.mark.parametrize( "recipient, expected_international, expected_prefix, expected_units", [ - ("+447900900123", True, "44", 1), # UK - ("+73122345678", True, "7", 1), # Russia - ("+360623400400", True, "36", 1), # Hungary + # ("+447900900123", True, "44", 1), # UK + # ("+73122345678", True, "7", 1), # Russia + # ("+360623400400", True, "36", 1), # Hungary ("2028675309", False, "1", 1), ], # USA ) @@ -382,7 +382,7 @@ def test_persist_notification_with_international_info_does_not_store_for_email( @pytest.mark.parametrize( "recipient, expected_recipient_normalised", [ - ("+4407900900123", "+447900900123"), + # ("+4407900900123", "+447900900123"), ("202-867-5309", "+12028675309"), ("1 202-867-5309", "+12028675309"), ("+1 (202) 867-5309", "+12028675309"), diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index 7dcb8dd1d..42d96c93d 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -165,7 +165,7 @@ def test_service_can_send_to_recipient_passes(key_type, notify_db_session): "user_number, recipient_number", [ ["+12028675309", "202-867-5309"], - ["+447513332413", "+44 (07513) 332413"], + # ["+447513332413", "+44 (07513) 332413"], ], ) def test_service_can_send_to_recipient_passes_with_non_normalized_number( @@ -569,6 +569,9 @@ def test_check_rate_limiting_validates_api_rate_limit_and_daily_limit( @pytest.mark.parametrize("key_type", [KeyType.TEST, KeyType.NORMAL]) +@pytest.mark.skip( + "We currently don't support international numbers, our validation fails before here" +) def test_validate_and_format_recipient_fails_when_international_number_and_service_does_not_allow_int_sms( key_type, notify_db_session, @@ -588,6 +591,7 @@ def test_validate_and_format_recipient_fails_when_international_number_and_servi @pytest.mark.parametrize("key_type", [KeyType.TEST, KeyType.NORMAL]) +@pytest.mark.skip("We currently don't support international numbers") def test_validate_and_format_recipient_succeeds_with_international_numbers_if_service_does_allow_int_sms( key_type, sample_service_full_permissions ): diff --git a/tests/app/service/send_notification/test_send_notification.py b/tests/app/service/send_notification/test_send_notification.py index d85cb939a..b1bd27988 100644 --- a/tests/app/service/send_notification/test_send_notification.py +++ b/tests/app/service/send_notification/test_send_notification.py @@ -89,7 +89,7 @@ def test_should_reject_bad_phone_numbers(notify_api, sample_template, mocker): @pytest.mark.parametrize( "template_type, to", [ - (TemplateType.SMS, "+447700900855"), + (TemplateType.SMS, "+14254147755"), (TemplateType.EMAIL, "ok@ok.com"), ], ) @@ -257,7 +257,7 @@ def test_should_not_send_notification_for_archived_template( sample_template.archived = True dao_update_template(sample_template) json_data = json.dumps( - {"to": "+447700900855", "template": sample_template.id} + {"to": "+14254147755", "template": sample_template.id} ) auth_header = create_service_authorization_header( service_id=sample_template.service_id @@ -276,7 +276,7 @@ def test_should_not_send_notification_for_archived_template( @pytest.mark.parametrize( "template_type, to", [ - (TemplateType.SMS, "+447700900855"), + (TemplateType.SMS, "+16618675309"), (TemplateType.EMAIL, "not-someone-we-trust@email-address.com"), ], ) @@ -1230,6 +1230,7 @@ def test_should_allow_store_original_number_on_sms_notification( assert "1" == notifications[0].to +@pytest.mark.skip("We don't support international at moment") def test_should_not_allow_sending_to_international_number_without_international_permission( client, sample_template, mocker ): @@ -1254,6 +1255,7 @@ def test_should_not_allow_sending_to_international_number_without_international_ assert error_json["message"] == "Cannot send to international mobile numbers" +@pytest.mark.skip("We don't support international at the moment") def test_should_allow_sending_to_international_number_with_international_permission( client, sample_service_full_permissions, mocker ): diff --git a/tests/app/service/send_notification/test_send_one_off_notification.py b/tests/app/service/send_notification/test_send_one_off_notification.py index 231b42be0..000e22005 100644 --- a/tests/app/service/send_notification/test_send_one_off_notification.py +++ b/tests/app/service/send_notification/test_send_one_off_notification.py @@ -98,6 +98,7 @@ def test_send_one_off_notification_calls_persist_correctly_for_sms( ) +@pytest.mark.skip("We currently don't support international") def test_send_one_off_notification_calls_persist_correctly_for_international_sms( persist_mock, celery_mock, notify_db_session ): diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index d1691c847..5535f814b 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -65,10 +65,20 @@ def test_get_service_list(client, service_factory): response = client.get("/service", headers=[auth_header]) assert response.status_code == 200 json_resp = json.loads(response.get_data(as_text=True)) - assert len(json_resp["data"]) == 3 - assert json_resp["data"][0]["name"] == "one" - assert json_resp["data"][1]["name"] == "two" - assert json_resp["data"][2]["name"] == "three" + + found_service_one = False + found_service_two = False + found_service_three = False + for item in json_resp["data"]: + if item["name"] == "one": + found_service_one = True + elif item["name"] == "two": + found_service_two = True + elif item["name"] == "three": + found_service_three = True + assert found_service_one is True + assert found_service_two is True + assert found_service_three is True def test_get_service_list_with_only_active_flag(client, service_factory): @@ -1262,7 +1272,7 @@ def test_add_existing_user_to_another_service_with_all_permissions( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) # they must exist in db first save_model_user(user_to_add, validated_email_access=True) @@ -1332,7 +1342,7 @@ def test_add_existing_user_to_another_service_with_send_permissions( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) @@ -1382,7 +1392,7 @@ def test_add_existing_user_to_another_service_with_manage_permissions( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) @@ -1433,7 +1443,7 @@ def test_add_existing_user_to_another_service_with_folder_permissions( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) @@ -1474,7 +1484,7 @@ def test_add_existing_user_to_another_service_with_manage_api_keys( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) @@ -1514,7 +1524,7 @@ def test_add_existing_user_to_non_existing_service_returns404( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) diff --git a/tests/app/test_model.py b/tests/app/test_model.py index bbd670412..aab74fac8 100644 --- a/tests/app/test_model.py +++ b/tests/app/test_model.py @@ -39,7 +39,7 @@ from tests.app.db import ( ) -@pytest.mark.parametrize("mobile_number", ["+447700900855", "+12348675309"]) +@pytest.mark.parametrize("mobile_number", ["+14254147755", "+12348675309"]) def test_should_build_service_guest_list_from_mobile_number(mobile_number): service_guest_list = ServiceGuestList.from_string( "service_id", diff --git a/tests/app/test_schemas.py b/tests/app/test_schemas.py index 55be8a6bb..151e319fb 100644 --- a/tests/app/test_schemas.py +++ b/tests/app/test_schemas.py @@ -60,7 +60,7 @@ def test_notification_schema_has_correct_status(sample_notification, schema_name [ ("name", "New User"), ("email_address", "newuser@mail.com"), - ("mobile_number", "+4407700900460"), + ("mobile_number", "+14254147755"), ], ) def test_user_update_schema_accepts_valid_attribute_pairs(user_attribute, user_value): diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 8ba087dcc..a388d264e 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -237,7 +237,7 @@ def test_cannot_create_user_with_empty_strings(admin_request, notify_db_session) [ ("name", "New User"), ("email_address", "newuser@mail.com"), - ("mobile_number", "+4407700900460"), + ("mobile_number", "+14254147755"), ], ) def test_post_user_attribute(admin_request, sample_user, user_attribute, user_value): @@ -273,13 +273,13 @@ def test_post_user_attribute(admin_request, sample_user, user_attribute, user_va ), ( "mobile_number", - "+4407700900460", + "+14254147755", dict( api_key_id=None, key_type=KeyType.NORMAL, notification_type=NotificationType.SMS, personalisation={}, - recipient="+4407700900460", + recipient="+14254147755", reply_to_text="testing", service=mock.ANY, template_id=uuid.UUID("8a31520f-4751-4789-8ea1-fe54496725eb"), @@ -315,6 +315,7 @@ def test_post_user_attribute_with_updated_by( mock_persist_notification.assert_not_called() +@pytest.mark.skip("We don't support international at the moment") def test_post_user_attribute_with_updated_by_sends_notification_to_international_from_number( admin_request, mocker, sample_user, team_member_mobile_edit_template ): diff --git a/tests/app/user/test_rest_verify.py b/tests/app/user/test_rest_verify.py index 21182972d..26eb085a4 100644 --- a/tests/app/user/test_rest_verify.py +++ b/tests/app/user/test_rest_verify.py @@ -245,7 +245,7 @@ def test_send_user_code_for_sms_with_optional_to_field( mock_redis_get.return_value = "foo" mocker.patch("app.user.rest.redis_store.raw_set") - to_number = "+447119876757" + to_number = "+14254147755" mocked = mocker.patch("app.user.rest.create_secret_code", return_value="11111") mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") auth_header = create_admin_authorization_header() @@ -581,6 +581,7 @@ def test_user_verify_email_code_fails_if_code_already_used( assert sample_user.current_session_id is None +@pytest.mark.skip("We don't support international at the moment") def test_send_user_2fa_code_sends_from_number_for_international_numbers( client, sample_user, mocker, sms_code_template ): diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index e9399808d..13cb579e3 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -41,7 +41,7 @@ def test_post_sms_notification_returns_201( ): mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(sample_template_with_placeholders.id), "personalisation": {" Name": "Jo"}, } @@ -92,7 +92,7 @@ def test_post_sms_notification_uses_inbound_number_as_sender( ) mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(template.id), "personalisation": {" Name": "Jo"}, } @@ -125,7 +125,7 @@ def test_post_sms_notification_uses_inbound_number_reply_to_as_sender( ) mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(template.id), "personalisation": {" Name": "Jo"}, } @@ -156,7 +156,7 @@ def test_post_sms_notification_returns_201_with_sms_sender_id( ) mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(sample_template_with_placeholders.id), "personalisation": {" Name": "Jo"}, "sms_sender_id": str(sms_sender.id), @@ -188,7 +188,7 @@ def test_post_sms_notification_uses_sms_sender_id_reply_to( ) mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(sample_template_with_placeholders.id), "personalisation": {" Name": "Jo"}, "sms_sender_id": str(sms_sender.id), @@ -294,7 +294,7 @@ def test_should_cache_template_and_service_in_redis(mocker, client, sample_templ mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(sample_template.id), } @@ -373,7 +373,7 @@ def test_should_return_template_if_found_in_redis(mocker, client, sample_templat @pytest.mark.parametrize( "notification_type, key_send_to, send_to", [ - (NotificationType.SMS, "phone_number", "+447700900855"), + (NotificationType.SMS, "phone_number", "+12028675309"), (NotificationType.EMAIL, "email_address", "sample@email.com"), ], ) @@ -402,7 +402,7 @@ def test_post_notification_returns_400_and_missing_template( @pytest.mark.parametrize( "notification_type, key_send_to, send_to", [ - (NotificationType.SMS, "phone_number", "+447700900855"), + (NotificationType.SMS, "phone_number", "+12028675309"), (NotificationType.EMAIL, "email_address", "sample@email.com"), ], ) @@ -432,7 +432,7 @@ def test_post_notification_returns_401_and_well_formed_auth_error( @pytest.mark.parametrize( "notification_type, key_send_to, send_to", [ - (NotificationType.SMS, "phone_number", "+447700900855"), + (NotificationType.SMS, "phone_number", "+12028675309"), (NotificationType.EMAIL, "email_address", "sample@email.com"), ], ) @@ -529,7 +529,6 @@ def test_post_email_notification_returns_201( ("simulate-delivered-2@notifications.service.gov.uk", NotificationType.EMAIL), ("simulate-delivered-3@notifications.service.gov.uk", NotificationType.EMAIL), ("+14254147167", NotificationType.SMS), - ("+14254147755", NotificationType.SMS), ], ) def test_should_not_persist_or_send_notification_if_simulated_recipient( @@ -652,6 +651,7 @@ def test_returns_a_429_limit_exceeded_if_rate_limit_exceeded( assert not deliver_mock.called +@pytest.mark.skip("We don't support international at the moment") def test_post_sms_notification_returns_400_if_not_allowed_to_send_int_sms( client, notify_db_session, @@ -689,7 +689,7 @@ def test_post_sms_notification_with_archived_reply_to_id_returns_400( ) mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": sample_template.id, "sms_sender_id": archived_sender.id, } @@ -781,7 +781,7 @@ def test_post_sms_notification_returns_400_if_number_not_in_guest_list( create_api_key(service=service, key_type=KeyType.TEAM) data = { - "phone_number": "+327700900855", + "phone_number": "+16615555555", "template_id": template.id, } auth_header = create_service_authorization_header( @@ -806,6 +806,7 @@ def test_post_sms_notification_returns_400_if_number_not_in_guest_list( ] +@pytest.mark.skip("We don't support international at the moment") def test_post_sms_notification_returns_201_if_allowed_to_send_int_sms( sample_service, sample_template, @@ -832,7 +833,7 @@ def test_post_sms_should_persist_supplied_sms_number( ): mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+(44) 77009-00855", + "phone_number": "+16615555555", "template_id": str(sample_template_with_placeholders.id), "personalisation": {" Name": "Jo"}, } @@ -888,7 +889,7 @@ def test_post_notification_with_wrong_type_of_sender( template = sample_template form_label = "email_reply_to_id" data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(template.id), form_label: fake_uuid, } @@ -1204,7 +1205,7 @@ def test_post_notification_returns_201_when_content_type_is_missing_but_payload_ if notification_type == NotificationType.EMAIL: valid_json.update({"email_address": sample_service.users[0].email_address}) else: - valid_json.update({"phone_number": "+447700900855"}) + valid_json.update({"phone_number": "+12028675309"}) response = client.post( path=f"/v2/notifications/{notification_type}", data=json.dumps(valid_json), @@ -1274,7 +1275,7 @@ def test_post_notifications_saves_email_or_sms_to_queue( ( data.update({"email_address": "joe.citizen@example.com"}) if notification_type == NotificationType.EMAIL - else data.update({"phone_number": "+447700900855"}) + else data.update({"phone_number": "+12028675309"}) ) response = client.post( @@ -1343,7 +1344,7 @@ def test_post_notifications_saves_email_or_sms_normally_if_saving_to_queue_fails ( data.update({"email_address": "joe.citizen@example.com"}) if notification_type == NotificationType.EMAIL - else data.update({"phone_number": "+447700900855"}) + else data.update({"phone_number": "+12028675309"}) ) response = client.post( @@ -1405,7 +1406,7 @@ def test_post_notifications_doesnt_use_save_queue_for_test_notifications( ( data.update({"email_address": "joe.citizen@example.com"}) if notification_type == NotificationType.EMAIL - else data.update({"phone_number": "+447700900855"}) + else data.update({"phone_number": "+12028675309"}) ) response = client.post( path=f"/v2/notifications/{notification_type}", From a1f57ac4c07150cd52e3e7b76eff8544242943e6 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 12 Apr 2024 11:29:33 -0700 Subject: [PATCH 27/40] make it possible to create e2e test user on staging --- app/commands.py | 4 ++-- poetry.lock | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/commands.py b/app/commands.py index 725e7ee99..1a445731f 100644 --- a/app/commands.py +++ b/app/commands.py @@ -721,8 +721,8 @@ def validate_mobile(ctx, param, value): # noqa @click.option("-s", "--state", default="active") @click.option("-d", "--admin", default=False, type=bool) def create_test_user(name, email, mobile_number, password, auth_type, state, admin): - if getenv("NOTIFY_ENVIRONMENT", "") not in ["development", "test"]: - current_app.logger.error("Can only be run in development") + if getenv("NOTIFY_ENVIRONMENT", "") not in ["development", "test", "staging"]: + current_app.logger.error("Can only be run in development, test, staging") return data = { diff --git a/poetry.lock b/poetry.lock index 1eb977949..0f0450987 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2489,6 +2489,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] @@ -3530,7 +3531,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, From d2d39b210b224d953b24f92f53f0e732cdd72d1f Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Fri, 12 Apr 2024 12:07:37 -0700 Subject: [PATCH 28/40] Note about the purpose of `deployers` in main.tf --- terraform/README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/terraform/README.md b/terraform/README.md index 8b57a9d41..4f8a971a0 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -1,6 +1,10 @@ # Terraform -This directory holds the Terraform modules for maintaining Notify.gov's infrastructure. You can [read about the structure](#structure) or [get set up to develop](#retrieving-existing-bucket-credentials). +This directory holds the Terraform modules for maintaining Notify.gov's API infrastructure. You can might want to: +* [read about the directory structure](#structure), or +* [get set up to develop HCL code](#retrieving-existing-bucket-credentials). + +The Admin app repo [has its own terraform directory](https://github.com/GSA/notifications-admin/tree/main/terraform) but a lot of the below instructions apply to both apps. ## Retrieving existing bucket credentials @@ -70,6 +74,8 @@ deploy the application from the CI/CD pipeline. Create a new account by running: `./create_service_account.sh -s -u ` +SpaceDeployers are also needed to run Terraform locally — they fill user and password input variables (via `deployers` within `main.tf`) that some of our Terraform modules require when they start running. Using a SpaceDeployer account locally is covered in [the next section](#workflow-for-deployed-environments). + ## Workflow for deployed environments These are the steps for developing Terraform code for our deployed environment modules (`sandbox`, `demo`, `staging` and `production`) locally on your laptop. Or for setting up a new deployment environment, or otherwise for running Terraform manually in any module that uses remote state. You don't need to do all this to run code in the `development` module, because it is not a deployed environment and it does not use remote state. From cdf87c02d1b48e21987597e019a757b510bdeae7 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Fri, 12 Apr 2024 14:43:10 -0700 Subject: [PATCH 29/40] Update terraform/README.md Co-authored-by: Carlo Costino --- terraform/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/terraform/README.md b/terraform/README.md index 4f8a971a0..b7488e99d 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -1,6 +1,6 @@ # Terraform -This directory holds the Terraform modules for maintaining Notify.gov's API infrastructure. You can might want to: +This directory holds the Terraform modules for maintaining Notify.gov's API infrastructure. You might want to: * [read about the directory structure](#structure), or * [get set up to develop HCL code](#retrieving-existing-bucket-credentials). From fdfb7e56e88cc61fc5d87458a6aa48b09186b97d Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Fri, 12 Apr 2024 14:43:23 -0700 Subject: [PATCH 30/40] Update terraform/README.md Co-authored-by: Carlo Costino --- terraform/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/terraform/README.md b/terraform/README.md index b7488e99d..27b4042f3 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -10,7 +10,7 @@ The Admin app repo [has its own terraform directory](https://github.com/GSA/noti :green_book: New developers start here! -Assuming [initial setup](#initial-setup) is complete — which it should be if Notify.gov is online — Terraform state is stored in a shared remote backend. If you are going to be writing Terraform for any of our deployment environments you'll need to hook up to this backend. (You don't need to do this if you are just writing code for the `development` module, becase it stores state locally on your laptop.) +Assuming [initial setup](#initial-setup) is complete — which it should be if Notify.gov is online — Terraform state is stored in a shared remote backend. If you are going to be writing Terraform for any of our deployment environments you'll need to hook up to this backend. (You don't need to do this if you are just writing code for the `development` module, because it stores state locally on your laptop.) 1. Enter the bootstrap module with `cd bootstrap` 1. Run `./import.sh` to import the bucket containing remote terraform state into your local state From 2cee820e743382e12c8cd274954fd67ef1151be0 Mon Sep 17 00:00:00 2001 From: John Skiles Skinner Date: Fri, 12 Apr 2024 14:44:48 -0700 Subject: [PATCH 31/40] Update terraform/README.md Co-authored-by: Carlo Costino --- terraform/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/terraform/README.md b/terraform/README.md index 27b4042f3..1d75967f0 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -19,7 +19,7 @@ Assuming [initial setup](#initial-setup) is complete — which it should be ### Use bootstrap credentials 1. Run `./run.sh show -json`. -1. In the output, locate `access_key_id` and `secret_access_key` within the `bucket_creds` resource. These values are secret, so, don't share them with anyone or copy them to anywhere online. +1. In the output, locate `access_key_id` and `secret_access_key` within the `bucket_creds` resource. These values are secret, so don't share them with anyone or copy them to anywhere online. 1. Add the following to `~/.aws/credentials`: ``` [notify-terraform-backend] From b950767ca6f284784702cbc55ea590725c4f9669 Mon Sep 17 00:00:00 2001 From: Carlo Costino Date: Tue, 16 Apr 2024 11:55:52 -0400 Subject: [PATCH 32/40] Updated dependencies and ignore gunicorn audit flag This changeset updates a couple of dependencies, including our Python dependency audit check, and specifically ignores a gunicorn audit flag that appeared on 4/16/2024. As soon as there is an update available for gunicorn that addresses the issue we will remove the flag to ignore the vulnerability report and update the dependency. Signed-off-by: Carlo Costino --- .github/workflows/checks.yml | 4 ++- poetry.lock | 48 +++++++++++++++++------------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index d9912761b..402404b29 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -86,9 +86,11 @@ jobs: - uses: ./.github/actions/setup-project - name: Create requirements.txt run: poetry export --without-hashes --format=requirements.txt > requirements.txt - - uses: pypa/gh-action-pip-audit@v1.0.6 + - uses: pypa/gh-action-pip-audit@v1.0.8 with: inputs: requirements.txt + ignore-vulns: | + GHSA-w3h3-4rj7-4ph4 static-scan: runs-on: ubuntu-latest diff --git a/poetry.lock b/poetry.lock index df632df80..b0b1e95c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -319,33 +319,33 @@ files = [ [[package]] name = "black" -version = "24.3.0" +version = "24.4.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, - {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, - {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, - {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, - {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, - {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, - {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, - {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, - {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, - {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, - {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, - {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, - {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, - {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, - {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, - {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, - {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, - {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, - {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, - {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, - {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, - {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, + {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, + {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, + {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, + {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, + {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, + {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, + {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, + {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, + {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, + {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, + {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, + {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, + {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, + {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, + {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, + {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, + {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, + {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, + {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, + {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, + {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, + {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, ] [package.dependencies] @@ -2071,7 +2071,6 @@ files = [ {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, @@ -2489,7 +2488,6 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] From cd00d14c1fdb8349d21806ae1e0fc4549dbbb73a Mon Sep 17 00:00:00 2001 From: Carlo Costino Date: Wed, 17 Apr 2024 18:02:21 -0400 Subject: [PATCH 33/40] Update gunicorn to latest release This changeset updates the gunicorn dependency to the latest release to address a recent CVE. Signed-off-by: Carlo Costino --- .github/workflows/checks.yml | 2 -- poetry.lock | 15 ++++++++------- pyproject.toml | 2 +- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 402404b29..ca05cde57 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -89,8 +89,6 @@ jobs: - uses: pypa/gh-action-pip-audit@v1.0.8 with: inputs: requirements.txt - ignore-vulns: | - GHSA-w3h3-4rj7-4ph4 static-scan: runs-on: ubuntu-latest diff --git a/poetry.lock b/poetry.lock index b0b1e95c4..56be37dda 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1635,23 +1635,24 @@ test = ["objgraph", "psutil"] [[package]] name = "gunicorn" -version = "21.2.0" +version = "22.0.0" description = "WSGI HTTP Server for UNIX" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0"}, - {file = "gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033"}, + {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, + {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, ] [package.dependencies] -eventlet = {version = ">=0.24.1", optional = true, markers = "extra == \"eventlet\""} +eventlet = {version = ">=0.24.1,<0.36.0 || >0.36.0", optional = true, markers = "extra == \"eventlet\""} packaging = "*" [package.extras] -eventlet = ["eventlet (>=0.24.1)"] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] tornado = ["tornado (>=0.2)"] [[package]] @@ -4800,4 +4801,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "dcbbd4dd7bd9336e8c13f171c9dfe7e54194254c87d10711b1cd7748d38ce19c" +content-hash = "ac4a9cfb1ee9b5d8824385113cc825e55aefa8ad599649bde17b0333ed304dcd" diff --git a/pyproject.toml b/pyproject.toml index 47b6f1d58..c22cc6933 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ flask-marshmallow = "==0.14.0" flask-migrate = "==4.0.7" flask-redis = "==0.4.0" flask-sqlalchemy = "==3.0.5" -gunicorn = {version = "==21.2.0", extras = ["eventlet"]} +gunicorn = {version = "==22.0.0", extras = ["eventlet"]} iso8601 = "==2.1.0" jsonschema = {version = "==4.20.0", extras = ["format"]} lxml = "==5.2.1" From 9ec75c57c837c4077059e4dc0126b5e9f790a88c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 18 Apr 2024 08:49:14 -0700 Subject: [PATCH 34/40] remove check-db-notification-fails task from schedule --- app/config.py | 5 ----- poetry.lock | 2 ++ 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/app/config.py b/app/config.py index 232dc7ac8..809a71ebe 100644 --- a/app/config.py +++ b/app/config.py @@ -199,11 +199,6 @@ class Config(object): "schedule": timedelta(minutes=66), "options": {"queue": QueueNames.PERIODIC}, }, - "check-db-notification-fails": { - "task": "check-db-notification-fails", - "schedule": crontab(minute="18, 48"), - "options": {"queue": QueueNames.PERIODIC}, - }, "check-job-status": { "task": "check-job-status", "schedule": crontab(), diff --git a/poetry.lock b/poetry.lock index 56be37dda..f64aebdac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2072,6 +2072,7 @@ files = [ {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, @@ -2489,6 +2490,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] From d014dd7c57d3fc690f4de72f210c0fe3b4034b65 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Apr 2024 16:13:51 +0000 Subject: [PATCH 35/40] Bump aiohttp from 3.9.3 to 3.9.4 Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.9.3 to 3.9.4. - [Release notes](https://github.com/aio-libs/aiohttp/releases) - [Changelog](https://github.com/aio-libs/aiohttp/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/aiohttp/compare/v3.9.3...v3.9.4) --- updated-dependencies: - dependency-name: aiohttp dependency-type: indirect ... Signed-off-by: dependabot[bot] --- poetry.lock | 156 ++++++++++++++++++++++++++-------------------------- 1 file changed, 78 insertions(+), 78 deletions(-) diff --git a/poetry.lock b/poetry.lock index f64aebdac..bc810f891 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,87 +2,87 @@ [[package]] name = "aiohttp" -version = "3.9.3" +version = "3.9.4" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, - {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, - {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, - {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, - {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, - {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, - {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, - {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, - {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, - {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, - {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, - {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, - {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, - {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, ] [package.dependencies] @@ -2490,7 +2490,6 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] @@ -3532,6 +3531,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, From 6281bdd3e3000a8285c9b5e5363c0876a5b027fa Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 18 Apr 2024 13:33:17 -0700 Subject: [PATCH 36/40] make email_address check case insensitive --- app/dao/users_dao.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py index 048c7ea22..d5ea2dde7 100644 --- a/app/dao/users_dao.py +++ b/app/dao/users_dao.py @@ -54,7 +54,8 @@ def get_login_gov_user(login_uuid, email_address): return user # Remove this 1 July 2025, all users should have login.gov uuids by now - user = User.query.filter_by(email_address=email_address).first() + user = User.query.filter(User.email_address.ilike(email_address)).first() + print(f"USER IS {user.email_address}") if user: save_user_attribute(user, {"login_uuid": login_uuid}) return user From 751e8ab077de656805ecc7d2d07b9ab0729611c9 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 19 Apr 2024 09:14:36 -0700 Subject: [PATCH 37/40] more debug --- app/service/rest.py | 11 +++++++++-- app/service_invite/rest.py | 11 ++++++++++- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index 953c83bb7..71dbda45f 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -102,7 +102,7 @@ from app.service.service_senders_schema import ( ) from app.service.utils import get_guest_list_objects from app.user.users_schema import post_set_permissions_schema -from app.utils import get_prev_next_pagination_links +from app.utils import get_prev_next_pagination_links, hilite service_blueprint = Blueprint("service", __name__) @@ -314,7 +314,9 @@ def get_users_for_service(service_id): def add_user_to_service(service_id, user_id): service = dao_fetch_service_by_id(service_id) user = get_user_by_id(user_id=user_id) - + # TODO REMOVE DEBUG + print(hilite(f"GOING TO ADD {user.name} to service {service.name}")) + # END DEBUG if user in service.users: error = "User id: {} already part of service id: {}".format(user_id, service_id) raise InvalidRequest(error, status_code=400) @@ -322,6 +324,7 @@ def add_user_to_service(service_id, user_id): data = request.get_json() validate(data, post_set_permissions_schema) + permissions = [ Permission(service_id=service_id, user_id=user_id, permission=p["permission"]) for p in data["permissions"] @@ -329,6 +332,10 @@ def add_user_to_service(service_id, user_id): folder_permissions = data.get("folder_permissions", []) dao_add_user_to_service(service, user, permissions, folder_permissions) + # TODO REMOVE DEBUG + print(hilite(f"ADDED {user.name} to service {service.name}")) + # END DEBUG + data = service_schema.dump(service) return jsonify(data=data), 201 diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index 264fb4a4b..72dc943c9 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -2,6 +2,7 @@ import json import os from datetime import datetime +from app.utils import hilite from flask import Blueprint, current_app, jsonify, request from itsdangerous import BadData, SignatureExpired from notifications_utils.url_safe_token import check_token, generate_token @@ -32,6 +33,10 @@ register_errors(service_invite) def _create_service_invite(invited_user, invite_link_host): + # TODO REMOVE DEBUG + print(hilite("ENTER _create_service_invite")) + # END DEBUG + template_id = current_app.config["INVITATION_EMAIL_TEMPLATE_ID"] template = dao_get_template_by_id(template_id) @@ -85,11 +90,15 @@ def _create_service_invite(invited_user, invite_link_host): # This is for the login.gov service invite on the # "Set Up Your Profile" path. + redis_key = f"service-invite-{invited_user.email_address}" redis_store.set( - f"service-invite-{invited_user.email_address}", + redis_key, json.dumps(data), ex=3600 * 24, ) + # TODO REMOVE DEBUG + print(hilite(f"Save this data {data} with this redis_key {redis_key}")) + # END DEBUG send_notification_to_queue(saved_notification, queue=QueueNames.NOTIFY) From 1ae239cdb849460372064fb5694165a83f5a655d Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 19 Apr 2024 09:27:58 -0700 Subject: [PATCH 38/40] fix flake8 --- app/service/rest.py | 1 - app/service_invite/rest.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index 71dbda45f..ce5083073 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -324,7 +324,6 @@ def add_user_to_service(service_id, user_id): data = request.get_json() validate(data, post_set_permissions_schema) - permissions = [ Permission(service_id=service_id, user_id=user_id, permission=p["permission"]) for p in data["permissions"] diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index 72dc943c9..2c666b92f 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -2,7 +2,6 @@ import json import os from datetime import datetime -from app.utils import hilite from flask import Blueprint, current_app, jsonify, request from itsdangerous import BadData, SignatureExpired from notifications_utils.url_safe_token import check_token, generate_token @@ -26,6 +25,7 @@ from app.notifications.process_notifications import ( send_notification_to_queue, ) from app.schemas import invited_user_schema +from app.utils import hilite service_invite = Blueprint("service_invite", __name__) From 075ac1f3d385fa4e15c0f325492ef9d65cd1e145 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 19 Apr 2024 10:12:27 -0700 Subject: [PATCH 39/40] use raw_get and raw_set for better debug of redis --- app/service_invite/rest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index 2c666b92f..81dcb98e2 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -91,13 +91,15 @@ def _create_service_invite(invited_user, invite_link_host): # This is for the login.gov service invite on the # "Set Up Your Profile" path. redis_key = f"service-invite-{invited_user.email_address}" - redis_store.set( + redis_store.raw_set( redis_key, json.dumps(data), ex=3600 * 24, ) # TODO REMOVE DEBUG print(hilite(f"Save this data {data} with this redis_key {redis_key}")) + did_we_save_it = redis_store.raw_get(redis_key) + print(hilite(f"Did we save the data successfully? {did_we_save_it}")) # END DEBUG send_notification_to_queue(saved_notification, queue=QueueNames.NOTIFY) From c02f32a263ec22b19af2ae5eec2aa286cca9195a Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 19 Apr 2024 10:33:26 -0700 Subject: [PATCH 40/40] use raw_set and raw_get, fix tests --- tests/app/service_invite/test_service_invite_rest.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/app/service_invite/test_service_invite_rest.py b/tests/app/service_invite/test_service_invite_rest.py index f36ad4ce5..e736a3042 100644 --- a/tests/app/service_invite/test_service_invite_rest.py +++ b/tests/app/service_invite/test_service_invite_rest.py @@ -31,6 +31,9 @@ def test_create_invited_user( extra_args, expected_start_of_invite_url, ): + mocker.patch("app.service_invite.rest.redis_store.raw_set") + mocker.patch("app.service_invite.rest.redis_store.raw_get") + mocked = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") email_address = "invited_user@service.gov.uk" invite_from = sample_service.users[0] @@ -92,6 +95,9 @@ def test_create_invited_user( def test_create_invited_user_without_auth_type( admin_request, sample_service, mocker, invitation_email_template ): + + mocker.patch("app.service_invite.rest.redis_store.raw_set") + mocker.patch("app.service_invite.rest.redis_store.raw_get") mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") email_address = "invited_user@service.gov.uk" invite_from = sample_service.users[0] @@ -213,6 +219,9 @@ def test_resend_expired_invite( invitation_email_template, mocker, ): + + mocker.patch("app.service_invite.rest.redis_store.raw_set") + mocker.patch("app.service_invite.rest.redis_store.raw_get") url = f"/service/{sample_expired_user.service_id}/invite/{sample_expired_user.id}/resend" mock_send = mocker.patch("app.service_invite.rest.send_notification_to_queue") mock_persist = mocker.patch("app.service_invite.rest.persist_notification")