diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index d9912761b..ca05cde57 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -86,7 +86,7 @@ jobs: - uses: ./.github/actions/setup-project - name: Create requirements.txt run: poetry export --without-hashes --format=requirements.txt > requirements.txt - - uses: pypa/gh-action-pip-audit@v1.0.6 + - uses: pypa/gh-action-pip-audit@v1.0.8 with: inputs: requirements.txt diff --git a/.github/workflows/deploy-demo.yml b/.github/workflows/deploy-demo.yml index 06f3f8091..945540b19 100644 --- a/.github/workflows/deploy-demo.yml +++ b/.github/workflows/deploy-demo.yml @@ -74,6 +74,16 @@ jobs: --var NOTIFY_E2E_TEST_PASSWORD="$NOTIFY_E2E_TEST_PASSWORD" --var LOGIN_DOT_GOV_REGISTRATION_URL="$LOGIN_DOT_GOV_REGISTRATION_URL" + - name: Check for changes to templates.json + id: changed-templates + uses: tj-actions/changed-files@v41 + with: + files: | + app/config_files/templates.json + - name: Update templates + if: steps.changed-templates.outputs.any_changed == 'true' + run: cf run-task notify-api-demo --command "flask command update-templates" + - name: Check for changes to egress config id: changed-egress-config uses: tj-actions/changed-files@v41 diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml index fb0257ddc..20d452b4a 100644 --- a/.github/workflows/deploy-prod.yml +++ b/.github/workflows/deploy-prod.yml @@ -78,6 +78,16 @@ jobs: --var NOTIFY_E2E_TEST_PASSWORD="$NOTIFY_E2E_TEST_PASSWORD" --var LOGIN_DOT_GOV_REGISTRATION_URL="$LOGIN_DOT_GOV_REGISTRATION_URL" + - name: Check for changes to templates.json + id: changed-templates + uses: tj-actions/changed-files@v41 + with: + files: | + app/config_files/templates.json + - name: Update templates + if: steps.changed-templates.outputs.any_changed == 'true' + run: cf run-task notify-api-production --command "flask command update-templates" + - name: Check for changes to egress config id: changed-egress-config uses: tj-actions/changed-files@v41 diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 049c49b36..104fa1521 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -79,6 +79,16 @@ jobs: --var NOTIFY_E2E_TEST_PASSWORD="$NOTIFY_E2E_TEST_PASSWORD" --var LOGIN_DOT_GOV_REGISTRATION_URL="$LOGIN_DOT_GOV_REGISTRATION_URL" + - name: Check for changes to templates.json + id: changed-templates + uses: tj-actions/changed-files@v41 + with: + files: | + app/config_files/templates.json + - name: Update templates + if: steps.changed-templates.outputs.any_changed == 'true' + run: cf run-task notify-api-staging --command "flask command update-templates" + - name: Check for changes to egress config id: changed-egress-config uses: tj-actions/changed-files@v41 diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 1742a310c..2c4d31d8c 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -1,4 +1,3 @@ -import os from datetime import datetime, timedelta from flask import current_app @@ -6,7 +5,7 @@ from notifications_utils.clients.zendesk.zendesk_client import NotifySupportTick from sqlalchemy import between from sqlalchemy.exc import SQLAlchemyError -from app import notify_celery, redis_store, zendesk_client +from app import notify_celery, zendesk_client from app.celery.tasks import ( get_recipient_csv_and_template_and_sender_id, process_incomplete_jobs, @@ -24,16 +23,12 @@ from app.dao.jobs_dao import ( find_jobs_with_missing_rows, find_missing_row_for_job, ) -from app.dao.notifications_dao import ( - dao_get_failed_notification_count, - notifications_not_yet_sent, -) +from app.dao.notifications_dao import notifications_not_yet_sent from app.dao.services_dao import ( dao_find_services_sending_to_tv_numbers, dao_find_services_with_high_failure_rates, ) from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago -from app.delivery.send_to_providers import provider_to_use from app.enums import JobStatus, NotificationType from app.models import Job from app.notifications.process_notifications import send_notification_to_queue @@ -92,82 +87,6 @@ def expire_or_delete_invitations(): raise -# TODO THIS IS ACTUALLY DEPRECATED, WE ARE REMOVING PHONE NUMBERS FROM THE DB -# SO THERE WILL BE NO REASON TO KEEP TRACK OF THIS COUNT -@notify_celery.task(name="check-db-notification-fails") -def check_db_notification_fails(): - """ - We are going to use redis to keep track of the previous fail count. - - If the number of fails is more than 100% of the limit, we want to send an alert every time this - runs, because it is urgent to fix it. - - If the number is more than 25%, 50% or 75% of the limit, we only want to send an alert - on a breach. I.e., if the last number was at 23% and the current number is 27%, send an email. - But if the last number was 26% and the current is 27%, don't. - """ - last_value = redis_store.get("LAST_DB_NOTIFICATION_COUNT") - if not last_value: - last_value = 0 - else: - last_value = int(last_value.decode("utf-8")) - - failed_count = dao_get_failed_notification_count() - if failed_count > last_value: - redis_store.set("LAST_DB_NOTIFICATION_COUNT", failed_count) - message = "" - curr_env = os.getenv("ENVIRONMENT") - if failed_count >= MAX_NOTIFICATION_FAILS: - message = f"We are over 100% in the db for failed notifications on {curr_env}" - elif ( - failed_count >= MAX_NOTIFICATION_FAILS * 0.9 - and last_value < MAX_NOTIFICATION_FAILS * 0.9 - ): - message = ( - "tts-notify-alerts@gsa.gov", - f"We crossed above 90% in the db for failed notifications on {curr_env}", - ) - - elif ( - failed_count >= MAX_NOTIFICATION_FAILS * 0.75 - and last_value < MAX_NOTIFICATION_FAILS * 0.75 - ): - message = ( - "tts-notify-alerts@gsa.gov", - f"We crossed above 75% in the db for failed notifications on {curr_env}", - ) - elif ( - failed_count >= MAX_NOTIFICATION_FAILS * 0.5 - and last_value < MAX_NOTIFICATION_FAILS * 0.5 - ): - message = ( - "tts-notify-alerts@gsa.gov", - f"We crossed above 50% in the db for failed notifications on {curr_env}", - ) - elif ( - failed_count >= MAX_NOTIFICATION_FAILS * 0.25 - and last_value < MAX_NOTIFICATION_FAILS * 0.25 - ): - message = ( - "tts-notify-alerts@gsa.gov", - f"We crossed above 25% in the db for failed notifications on {curr_env}", - ) - # suppress any spam coming from development tier - if message and curr_env != "development": - provider = provider_to_use(NotificationType.EMAIL, False) - from_address = '"{}" <{}@{}>'.format( - "Failed Notification Count Alert", - "test_sender", - current_app.config["NOTIFY_EMAIL_DOMAIN"], - ) - provider.send_email( - from_address, - "tts-notify-alerts@gsa.gov", - "DB Notification Failures Level Breached", - body=str(message), - ) - - @notify_celery.task(name="check-job-status") def check_job_status(): """ diff --git a/app/commands.py b/app/commands.py index 725e7ee99..1a445731f 100644 --- a/app/commands.py +++ b/app/commands.py @@ -721,8 +721,8 @@ def validate_mobile(ctx, param, value): # noqa @click.option("-s", "--state", default="active") @click.option("-d", "--admin", default=False, type=bool) def create_test_user(name, email, mobile_number, password, auth_type, state, admin): - if getenv("NOTIFY_ENVIRONMENT", "") not in ["development", "test"]: - current_app.logger.error("Can only be run in development") + if getenv("NOTIFY_ENVIRONMENT", "") not in ["development", "test", "staging"]: + current_app.logger.error("Can only be run in development, test, staging") return data = { diff --git a/app/config.py b/app/config.py index 232dc7ac8..809a71ebe 100644 --- a/app/config.py +++ b/app/config.py @@ -199,11 +199,6 @@ class Config(object): "schedule": timedelta(minutes=66), "options": {"queue": QueueNames.PERIODIC}, }, - "check-db-notification-fails": { - "task": "check-db-notification-fails", - "schedule": crontab(minute="18, 48"), - "options": {"queue": QueueNames.PERIODIC}, - }, "check-job-status": { "task": "check-job-status", "schedule": crontab(), diff --git a/app/config_files/templates.json b/app/config_files/templates.json index ebc379755..a37bebaf0 100644 --- a/app/config_files/templates.json +++ b/app/config_files/templates.json @@ -34,7 +34,7 @@ "name": "Notify SMS verify code", "type": "sms", "subject": "", - "content": ["((verify_code)) is your Notify.gov authentication code"] + "content": ["((verify_code)) is your Notify.gov authentication code."] }, { "id": "474e9242-823b-4f99-813d-ed392e7f1201", diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py index 048c7ea22..2f83e3bc2 100644 --- a/app/dao/users_dao.py +++ b/app/dao/users_dao.py @@ -54,7 +54,8 @@ def get_login_gov_user(login_uuid, email_address): return user # Remove this 1 July 2025, all users should have login.gov uuids by now - user = User.query.filter_by(email_address=email_address).first() + user = User.query.filter(User.email_address.ilike(email_address)).first() + if user: save_user_attribute(user, {"login_uuid": login_uuid}) return user diff --git a/app/service/rest.py b/app/service/rest.py index 953c83bb7..ce5083073 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -102,7 +102,7 @@ from app.service.service_senders_schema import ( ) from app.service.utils import get_guest_list_objects from app.user.users_schema import post_set_permissions_schema -from app.utils import get_prev_next_pagination_links +from app.utils import get_prev_next_pagination_links, hilite service_blueprint = Blueprint("service", __name__) @@ -314,7 +314,9 @@ def get_users_for_service(service_id): def add_user_to_service(service_id, user_id): service = dao_fetch_service_by_id(service_id) user = get_user_by_id(user_id=user_id) - + # TODO REMOVE DEBUG + print(hilite(f"GOING TO ADD {user.name} to service {service.name}")) + # END DEBUG if user in service.users: error = "User id: {} already part of service id: {}".format(user_id, service_id) raise InvalidRequest(error, status_code=400) @@ -329,6 +331,10 @@ def add_user_to_service(service_id, user_id): folder_permissions = data.get("folder_permissions", []) dao_add_user_to_service(service, user, permissions, folder_permissions) + # TODO REMOVE DEBUG + print(hilite(f"ADDED {user.name} to service {service.name}")) + # END DEBUG + data = service_schema.dump(service) return jsonify(data=data), 201 diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index 264fb4a4b..81dcb98e2 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -25,6 +25,7 @@ from app.notifications.process_notifications import ( send_notification_to_queue, ) from app.schemas import invited_user_schema +from app.utils import hilite service_invite = Blueprint("service_invite", __name__) @@ -32,6 +33,10 @@ register_errors(service_invite) def _create_service_invite(invited_user, invite_link_host): + # TODO REMOVE DEBUG + print(hilite("ENTER _create_service_invite")) + # END DEBUG + template_id = current_app.config["INVITATION_EMAIL_TEMPLATE_ID"] template = dao_get_template_by_id(template_id) @@ -85,11 +90,17 @@ def _create_service_invite(invited_user, invite_link_host): # This is for the login.gov service invite on the # "Set Up Your Profile" path. - redis_store.set( - f"service-invite-{invited_user.email_address}", + redis_key = f"service-invite-{invited_user.email_address}" + redis_store.raw_set( + redis_key, json.dumps(data), ex=3600 * 24, ) + # TODO REMOVE DEBUG + print(hilite(f"Save this data {data} with this redis_key {redis_key}")) + did_we_save_it = redis_store.raw_get(redis_key) + print(hilite(f"Did we save the data successfully? {did_we_save_it}")) + # END DEBUG send_notification_to_queue(saved_notification, queue=QueueNames.NOTIFY) diff --git a/docs/all.md b/docs/all.md index 2e98b84d1..0ad78ae2b 100644 --- a/docs/all.md +++ b/docs/all.md @@ -531,6 +531,16 @@ cf run-task CLOUD-GOV-APP --command "flask command update-templates" --name YOUR [Here's more documentation](https://docs.cloudfoundry.org/devguide/using-tasks.html) about Cloud Foundry tasks. +# Commonly run commands + +(Note: to obtain the CLOUD_GOV_APP name, run `cf apps` and find the name of the app for the tier you are targeting) + +To promote a user to platform admin: +cf run-task --command "flask command promote-user-to-platform-admin --user-email-address=" + +To update templates: +cf run-task --command "flask command update-templates" + # Commands for test loading the local dev database All commands use the `-g` or `--generate` to determine how many instances to load to the db. The `-g` or `--generate` option is required and will always defult to 1. An example: `flask command add-test-uses-to-db -g 6` will generate 6 random users and insert them into the db. diff --git a/poetry.lock b/poetry.lock index b6434016a..4aabcfa22 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,87 +2,87 @@ [[package]] name = "aiohttp" -version = "3.9.3" +version = "3.9.4" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, - {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, - {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, - {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, - {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, - {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, - {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, - {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, - {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, - {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, - {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, - {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, - {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, - {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, ] [package.dependencies] @@ -204,17 +204,17 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "awscli" -version = "1.32.79" +version = "1.32.83" description = "Universal Command Line Environment for AWS." optional = false python-versions = ">=3.8" files = [ - {file = "awscli-1.32.79-py3-none-any.whl", hash = "sha256:0d74c5aac7531094ec99cf9d15fe571b8bf1c7a8e08e5a9b611d283d1ad8fd84"}, - {file = "awscli-1.32.79.tar.gz", hash = "sha256:865179b663fafabd774128644ae102dfcfea751211d3054a336eea956cf43b22"}, + {file = "awscli-1.32.83-py3-none-any.whl", hash = "sha256:2fa897df5f1f150fa1d1c146b8acaf11963356dd9efcd6d201a1c77ad898b2ad"}, + {file = "awscli-1.32.83.tar.gz", hash = "sha256:c7e480ee911df228f98b284fb4d01e2bd1fe13a18998aecb4525f3a1993eabba"}, ] [package.dependencies] -botocore = "1.34.79" +botocore = "1.34.83" colorama = ">=0.2.5,<0.4.5" docutils = ">=0.10,<0.17" PyYAML = ">=3.10,<6.1" @@ -319,33 +319,33 @@ files = [ [[package]] name = "black" -version = "24.3.0" +version = "24.4.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, - {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, - {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, - {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, - {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, - {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, - {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, - {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, - {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, - {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, - {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, - {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, - {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, - {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, - {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, - {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, - {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, - {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, - {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, - {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, - {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, - {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, + {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, + {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, + {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, + {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, + {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, + {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, + {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, + {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, + {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, + {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, + {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, + {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, + {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, + {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, + {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, + {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, + {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, + {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, + {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, + {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, + {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, + {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, ] [package.dependencies] @@ -403,17 +403,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.79" +version = "1.34.83" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.79-py3-none-any.whl", hash = "sha256:265b0b4865e8c07e27abb32a31d2bd9129bb009b1d89ca0783776ec084886123"}, - {file = "boto3-1.34.79.tar.gz", hash = "sha256:139dd2d94eaa0e3213ff37ba7cf4cb2e3823269178fe8f3e33c965f680a9ddde"}, + {file = "boto3-1.34.83-py3-none-any.whl", hash = "sha256:33cf93f6de5176f1188c923f4de1ae149ed723b89ed12e434f2b2f628491769e"}, + {file = "boto3-1.34.83.tar.gz", hash = "sha256:9733ce811bd82feab506ad9309e375a79cabe8c6149061971c17754ce8997551"}, ] [package.dependencies] -botocore = ">=1.34.79,<1.35.0" +botocore = ">=1.34.83,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -422,13 +422,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.79" +version = "1.34.83" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.79-py3-none-any.whl", hash = "sha256:a42a014d3dbaa9ef123810592af69f9e55b456c5be3ac9efc037325685519e83"}, - {file = "botocore-1.34.79.tar.gz", hash = "sha256:6b59b0f7de219d383a2a633f6718c2600642ebcb707749dc6c67a6a436474b7a"}, + {file = "botocore-1.34.83-py3-none-any.whl", hash = "sha256:0a3fbbe018416aeefa8978454fb0b8129adbaf556647b72269bf02e4bf1f4161"}, + {file = "botocore-1.34.83.tar.gz", hash = "sha256:0f302aa76283d4df62b4fbb6d3d20115c1a8957fc02171257fc93904d69d5636"}, ] [package.dependencies] @@ -1634,23 +1634,24 @@ test = ["objgraph", "psutil"] [[package]] name = "gunicorn" -version = "21.2.0" +version = "22.0.0" description = "WSGI HTTP Server for UNIX" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0"}, - {file = "gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033"}, + {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, + {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, ] [package.dependencies] -eventlet = {version = ">=0.24.1", optional = true, markers = "extra == \"eventlet\""} +eventlet = {version = ">=0.24.1,<0.36.0 || >0.36.0", optional = true, markers = "extra == \"eventlet\""} packaging = "*" [package.extras] -eventlet = ["eventlet (>=0.24.1)"] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] tornado = ["tornado (>=0.2)"] [[package]] @@ -1707,13 +1708,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -2381,13 +2382,13 @@ files = [ [[package]] name = "moto" -version = "5.0.3" +version = "5.0.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "moto-5.0.3-py2.py3-none-any.whl", hash = "sha256:261d312d1d69c2afccb450a0566666d7b75d76ed6a7d00aac278a9633b073ff0"}, - {file = "moto-5.0.3.tar.gz", hash = "sha256:070ac2edf89ad7aee28534481ce68e2f344c8a6a8fefec5427eea0d599bfdbdb"}, + {file = "moto-5.0.5-py2.py3-none-any.whl", hash = "sha256:4ecdd4084491a2f25f7a7925416dcf07eee0031ce724957439a32ef764b22874"}, + {file = "moto-5.0.5.tar.gz", hash = "sha256:2eaca2df7758f6868df420bf0725cd0b93d98709606f1fb8b2343b5bdc822d91"}, ] [package.dependencies] @@ -2402,24 +2403,25 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)", "setuptools"] apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)", "setuptools"] cognitoidp = ["joserfc (>=0.9.0)"] -dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] -dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.4)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.4)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)"] -s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.1)"] -s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.1)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.4)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.4)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.4)", "pyparsing (>=3.0.7)", "setuptools"] ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath-ng"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] [[package]] @@ -2669,7 +2671,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "0.4.5" +version = "0.4.6" description = "" optional = false python-versions = "^3.12.2" @@ -2680,8 +2682,8 @@ develop = false async-timeout = "^4.0.2" bleach = "^6.1.0" blinker = "^1.7.0" -boto3 = "^1.34.77" -botocore = "^1.34.79" +boto3 = "^1.34.83" +botocore = "^1.34.83" cachetools = "^5.3.0" certifi = "^2024.2.2" cffi = "^1.16.0" @@ -2692,7 +2694,7 @@ flask = "^3.0.3" flask-redis = "^0.4.0" geojson = "^3.0.1" govuk-bank-holidays = "^0.14" -idna = "^3.6" +idna = "^3.7" itsdangerous = "^2.1.2" jinja2 = "^3.1.3" jmespath = "^1.0.1" @@ -2713,15 +2715,15 @@ s3transfer = "^0.10.1" shapely = "^2.0.1" six = "^1.16.0" smartypants = "^2.0.1" -urllib3 = "^2.0.7" +urllib3 = "^2.2.1" webencodings = "^0.5.1" werkzeug = "^3.0.1" [package.source] type = "git" url = "https://github.com/GSA/notifications-utils.git" -reference = "0b13705" -resolved_reference = "0b1370509ec0223b8f93fb35ba93a09fc51daef5" +reference = "d20efc2" +resolved_reference = "d20efc29d68ecbb55ef964db890d17426cf34a0f" [[package]] name = "numpy" @@ -4276,6 +4278,34 @@ postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "sqlalchemy-utils" +version = "0.41.2" +description = "Various utility functions for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, + {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, +] + +[package.dependencies] +SQLAlchemy = ">=1.3" + +[package.extras] +arrow = ["arrow (>=0.3.4)"] +babel = ["Babel (>=1.3)"] +color = ["colour (>=0.0.4)"] +encrypted = ["cryptography (>=0.6)"] +intervals = ["intervals (>=0.7.1)"] +password = ["passlib (>=1.6,<2.0)"] +pendulum = ["pendulum (>=2.0.5)"] +phone = ["phonenumbers (>=5.9.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +timezone = ["python-dateutil"] +url = ["furl (>=0.4.1)"] + [[package]] name = "stevedore" version = "5.2.0" @@ -4768,4 +4798,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "95411eea4439c5a4e7c73de0512401e2d4d3a658b440cdb2985f2d53831b0ecb" +content-hash = "4a4f784f065a9d8e661352635ce7cc72f5d74f017c1261cbc85a582f8042b335" diff --git a/pyproject.toml b/pyproject.toml index 8288be2ae..bac372212 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ flask-marshmallow = "==1.2.1" flask-migrate = "==4.0.7" flask-redis = "==0.4.0" flask-sqlalchemy = "==3.0.5" -gunicorn = {version = "==21.2.0", extras = ["eventlet"]} +gunicorn = {version = "==22.0.0", extras = ["eventlet"]} iso8601 = "==2.1.0" jsonschema = {version = "==4.21.1", extras = ["format"]} lxml = "==5.2.1" @@ -40,7 +40,7 @@ marshmallow = "==3.21.1" marshmallow-sqlalchemy = "==1.0.0" newrelic = "*" notifications-python-client = "==9.0.0" -notifications-utils = {git = "https://github.com/GSA/notifications-utils.git",rev = "0b13705"} +notifications-utils = {git = "https://github.com/GSA/notifications-utils.git",rev = "d20efc2"} oscrypto = "==1.3.0" packaging = "==23.2" poetry-dotenv-plugin = "==0.2.0" @@ -48,7 +48,7 @@ psycopg2-binary = "==2.9.9" pyjwt = "==2.8.0" python-dotenv = "==1.0.0" sqlalchemy = "==1.4.40" -werkzeug = "^3.0.1" +werkzeug = "^3.0.2" faker = "^24.4.0" @@ -64,7 +64,7 @@ freezegun = "^1.4.0" honcho = "*" isort = "^5.13.2" jinja2-cli = {version = "==0.8.2", extras = ["yaml"]} -moto = "==5.0.3" +moto = "==5.0.5" pip-audit = "*" pre-commit = "^3.6.0" pytest = "^8.1.1" @@ -74,6 +74,8 @@ pytest-cov = "^5.0.0" pytest-xdist = "^3.5.0" radon = "^6.0.1" requests-mock = "^1.11.0" +setuptools = "^69.0.3" +sqlalchemy-utils = "^0.41.2" vulture = "^2.10" diff --git a/terraform/README.md b/terraform/README.md index 40ab78a19..1d75967f0 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -1,33 +1,62 @@ # Terraform -This directory holds the terraform modules for maintaining your complete persistent infrastructure. +This directory holds the Terraform modules for maintaining Notify.gov's API infrastructure. You might want to: +* [read about the directory structure](#structure), or +* [get set up to develop HCL code](#retrieving-existing-bucket-credentials). -Prerequisite: install the `jq` JSON processor: `brew install jq` +The Admin app repo [has its own terraform directory](https://github.com/GSA/notifications-admin/tree/main/terraform) but a lot of the below instructions apply to both apps. + +## Retrieving existing bucket credentials + +:green_book: New developers start here! + +Assuming [initial setup](#initial-setup) is complete — which it should be if Notify.gov is online — Terraform state is stored in a shared remote backend. If you are going to be writing Terraform for any of our deployment environments you'll need to hook up to this backend. (You don't need to do this if you are just writing code for the `development` module, because it stores state locally on your laptop.) + +1. Enter the bootstrap module with `cd bootstrap` +1. Run `./import.sh` to import the bucket containing remote terraform state into your local state +1. Follow instructions under [Use bootstrap credentials](#use-bootstrap-credentials) + +### Use bootstrap credentials + +1. Run `./run.sh show -json`. +1. In the output, locate `access_key_id` and `secret_access_key` within the `bucket_creds` resource. These values are secret, so don't share them with anyone or copy them to anywhere online. +1. Add the following to `~/.aws/credentials`: + ``` + [notify-terraform-backend] + aws_access_key_id = + aws_secret_access_key = + ``` +1. Check which AWS profile you are using with `aws configure list`. If needed, use `export AWS_PROFILE=notify-terraform-backend` to change to the profile and credentials you just added. + +These credentials will allow Terraform to access the AWS/Cloud.gov bucket in which developers share Terraform state files. Now you are ready to develop Terraform using the [Workflow for deployed environments](#workflow-for-deployed-environments). ## Initial setup -1. Manually run the bootstrap module following instructions under `Terraform State Credentials` +These instructions were used for deploying the project for the first time, years ago. We should not have to perform these steps again. They are provided here for reference. + +1. Manually run the bootstrap module following instructions under [Terraform State Credentials](#terraform-state-credentials) 1. Setup CI/CD Pipeline to run Terraform - 1. Copy bootstrap credentials to your CI/CD secrets using the instructions in the base README - 1. Create a cloud.gov SpaceDeployer by following the instructions under `SpaceDeployers` - 1. Copy SpaceDeployer credentials to your CI/CD secrets using the instructions in the base README + 1. Copy bootstrap credentials to your CI/CD secrets using the instructions in the base README + 1. Create a cloud.gov SpaceDeployer by following the instructions under [SpaceDeployers](#spacedeployers) + 1. Copy SpaceDeployer credentials to your CI/CD secrets using the instructions in the base README 1. Manually Running Terraform - 1. Follow instructions under `Set up a new environment` to create your infrastructure + 1. Follow instructions under [Workflow for deployed environments](#workflow-for-deployed-environments) to create your infrastructure -## Terraform State Credentials +### Terraform state credentials -The bootstrap module is used to create an s3 bucket for later terraform runs to store their state in. +The bootstrap module is used to create an s3 bucket for later terraform runs to store their state in. (If the bucket is already created, you should [Use bootstrap credentials](#use-bootstrap-credentials)) -### Bootstrapping the state storage s3 buckets for the first time +#### Bootstrapping the state storage s3 buckets for the first time -1. Run `terraform init` +1. Within the `bootstrap` directory, run `terraform init` 1. Run `./run.sh plan` to verify that the changes are what you expect -1. Run `./run.sh apply` to set up the bucket and retrieve credentials -1. Follow instructions under `Use bootstrap credentials` +1. Run `./run.sh apply` to set up the bucket +1. Follow instructions under [Use bootstrap credentials](#use-bootstrap-credentials) 1. Ensure that `import.sh` includes a line and correct IDs for any resources created 1. Run `./teardown_creds.sh` to remove the space deployer account used to create the s3 bucket +1. Copy `bucket` from `bucket_credentials` output to the backend block of `staging/providers.tf` and `production/providers.tf` -### To make changes to the bootstrap module +#### To make changes to the bootstrap module *This should not be necessary in most cases* @@ -38,22 +67,6 @@ The bootstrap module is used to create an s3 bucket for later terraform runs to 1. Make your changes 1. Continue from step 2 of the boostrapping instructions -### Retrieving existing bucket credentials - -1. Run `./run.sh show` -1. Follow instructions under `Use bootstrap credentials` - -#### Use bootstrap credentials - -1. Add the following to `~/.aws/credentials` - ``` - [notify-terraform-backend] - aws_access_key_id = - aws_secret_access_key = - ``` - -1. Copy `bucket` from `bucket_credentials` output to the backend block of `staging/providers.tf` and `production/providers.tf` - ## SpaceDeployers A [SpaceDeployer](https://cloud.gov/docs/services/cloud-gov-service-account/) account is required to run terraform or @@ -61,46 +74,85 @@ deploy the application from the CI/CD pipeline. Create a new account by running: `./create_service_account.sh -s -u ` -## Set up a new environment manually +SpaceDeployers are also needed to run Terraform locally — they fill user and password input variables (via `deployers` within `main.tf`) that some of our Terraform modules require when they start running. Using a SpaceDeployer account locally is covered in [the next section](#workflow-for-deployed-environments). -The below steps rely on you first configuring access to the Terraform state in s3 as described in [Terraform State Credentials](#terraform-state-credentials). +## Workflow for deployed environments -1. `cd` to the environment you are working in +These are the steps for developing Terraform code for our deployed environment modules (`sandbox`, `demo`, `staging` and `production`) locally on your laptop. Or for setting up a new deployment environment, or otherwise for running Terraform manually in any module that uses remote state. You don't need to do all this to run code in the `development` module, because it is not a deployed environment and it does not use remote state. -1. Set up a SpaceDeployer +> [!CAUTION] +> There is one risky step below (`apply`) which is safe only in the `sandbox` environment and **should not** be run in any other deployed environment. + +These steps assume shared [Terraform state credentials](#terraform-state-credentials) exist in s3, and that you are [Using those credentials](#use-bootstrap-credentials). + +1. `cd` to the environment you plan to work in. When developing new features/resources, try out your code in `sandbox`. Only once the code is proven should you copy-and-paste it to each higher environment. + +1. Run `cf spaces` and, from the output, copy the space name for the environment you are working in, such as `notify-sandbox`. + +1. Next you will set up a SpaceDeployer. Prepare to fill in these values: + * `` will be the string you copied from the prior step + * `` can be anything, although we recommend something that communicates the purpose of the deployer. For example: "circleci-deployer" for the credentials CircleCI uses to deploy the application, or "sandbox-" for credentials to run terraform manually. + + Put those two values into this command: ```bash - # create a space deployer service instance that can log in with just a username and password - # the value of < SPACE_NAME > should be `staging` or `prod` depending on where you are working - # the value for < ACCOUNT_NAME > can be anything, although we recommend - # something that communicates the purpose of the deployer - # for example: circleci-deployer for the credentials CircleCI uses to - # deploy the application or -terraform for credentials to run terraform manually ./create_service_account.sh -s -u > secrets.auto.tfvars ``` - The script will output the `username` (as `cf_user`) and `password` (as `cf_password`) for your ``. Read more in the [cloud.gov service account documentation](https://cloud.gov/docs/services/cloud-gov-service-account/). + The script will output the `username` (as `cf_user`) and `password` (as `cf_password`) for your ``. The [cloud.gov service account documentation](https://cloud.gov/docs/services/cloud-gov-service-account/) has more information. - The easiest way to use this script is to redirect the output directly to the `secrets.auto.tfvars` file it needs to be used in + The command uses the redirection operator (`>`) to write that output to the `secrets.auto.tfvars` file. Terraform will find the username and password there, and use them as input variables. -1. Run terraform from your new environment directory with +1. While still in an environment directory, initialize Terraform: ```bash terraform init + ``` + + If this command fails, you may need to run `terraform init -upgrade` to make sure new module versions are picked up. Or, `terraform init -migrate-state` to bump the remote backend. + +1. Then, run Terraform in a non-destructive way: + ```bash terraform plan ``` - If the `terraform init` command fails, you may need to run `terraform init -upgrade` to make sure new module versions are picked up. + This will show you any pending changes that Terraform is ready to make. -1. Apply changes with `terraform apply`. + :pencil: Now is the time to write any HCL code you are planning to write, re-running `terraform plan` to confirm that the code works as you develop. Keep in mind that any changes to the codebase that you commit will be run by the CI/CD pipeline. -1. Remove the space deployer service instance if it doesn't need to be used again, such as when manually running terraform once. +1. **Only if it is safe to do so**, apply your changes. + + :skull: Applying changes in the wrong directory can mess up a deployed environment that people are relying on + + Double-check what directory you are in, like with the `pwd` command. You should probably only apply while in the `sandbox` directory / environment. + + Once you are sure it is safe, run: + ```bash + terraform apply + ``` + + This command *will deploy your changes* to the cloud. This is a healthy part of testing your code in the sandbox, or if you are creating a new environment (a new directory). **Do not** apply in environments that people are relying upon. + +1. Remove the space deployer service instance when you are done manually running Terraform. ```bash # and have the same values as used above. ./destroy_service_account.sh -s -u ``` + Optionally, you can also `rm secrets.auto.tfvars` + ## Structure -Each environment has its own module, which relies on a shared module for everything except the providers code and environment specific variables and settings. +The `terraform` directory contains sub-directories (`staging`, `production`, etc.) named for deployment environments. Each of these is a *module*, which is just Terraform's word for a directory with some .tf files in it. Each module governs the infrastructure of the environment for which it is named. This directory structure forms "[bulkheads](https://blog.gruntwork.io/how-to-manage-terraform-state-28f5697e68fa)" which isolate Terraform commands to a single environment, limiting accidental damage. + +The `development` module is rather different from the other environment modules. While the other environments can be used to create (or destroy) cloud resources, the development module mostly just sets up access to pre-existing resources needed for local software development. + +The `bootstrap` directory is not an environment module. Instead, it sets up infrastructure needed to deploy Terraform in any of the environments. If you are new to the project, [this is where you should start](#retrieving-existing-bucket-credentials). + +Similarly, `shared` is not an environment. It is a module that lends code to all the environments. Please note that changes to `shared` codebase will be applied to all envrionments the next time CI/CD (or a user) runs Terraform in that environment. + +> [!WARNING] +> Editing `shared` code is risky because it will be applied to production + +Files within these directories look like this: ``` - bootstrap/ @@ -130,3 +182,16 @@ In the bootstrap module: - `run.sh` Helper script to set up a space deployer and run terraform. The terraform action (`show`/`plan`/`apply`/`destroy`) is passed as an argument - `teardown_creds.sh` Helper script to remove the space deployer setup as part of `run.sh` - `import.sh` Helper script to create a new local state file in case terraform changes are needed + +## Troubleshooting + +### Expired token + +``` +The token expired, was revoked, or the token ID is incorrect. Please log back in to re-authenticate. +``` +You need to re-authenticate with the Cloud Foundry CLI +``` +cf login -a api.fr.cloud.gov --sso +``` +You may also need to log in again to the Cloud.gov website. diff --git a/terraform/bootstrap/main.tf b/terraform/bootstrap/main.tf index 625cb8093..2394f4ab8 100644 --- a/terraform/bootstrap/main.tf +++ b/terraform/bootstrap/main.tf @@ -14,7 +14,3 @@ resource "cloudfoundry_service_key" "bucket_creds" { name = "${local.s3_service_name}-access" service_instance = module.s3.bucket_id } - -output "bucket_credentials" { - value = cloudfoundry_service_key.bucket_creds.credentials -} diff --git a/test_csv_files/multiple_sms.csv b/test_csv_files/multiple_sms.csv index 2ecad9140..3253e0ae1 100644 --- a/test_csv_files/multiple_sms.csv +++ b/test_csv_files/multiple_sms.csv @@ -1,11 +1,11 @@ PhoneNumber,Name -+441234123121,chris -+441234123122,chris -+441234123123,chris -+441234123124,chris -+441234123125,chris -+441234123126,chris -+441234123127,chris -+441234123128,chris -+441234123129,chris -+441234123120,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris ++14254147755,chris diff --git a/test_csv_files/sms.csv b/test_csv_files/sms.csv index 728639972..2227cbfe6 100644 --- a/test_csv_files/sms.csv +++ b/test_csv_files/sms.csv @@ -1,2 +1,2 @@ PHONE NUMBER, IGNORE THIS COLUMN -+441234123123, nope ++14254147755, nope diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 94b586a3a..1652700f0 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -8,7 +8,6 @@ from notifications_utils.clients.zendesk.zendesk_client import NotifySupportTick from app.celery import scheduled_tasks from app.celery.scheduled_tasks import ( - check_db_notification_fails, check_for_missing_rows_in_completed_jobs, check_for_services_with_high_failure_rates_or_sending_to_tv_numbers, check_job_status, @@ -49,53 +48,6 @@ def test_should_call_expire_or_delete_invotations_on_expire_or_delete_invitation ) -def test_should_check_db_notification_fails_task_over_100_percent( - notify_db_session, mocker -): - mock_dao = mocker.patch( - "app.celery.scheduled_tasks.dao_get_failed_notification_count" - ) - mock_provider = mocker.patch("app.celery.scheduled_tasks.provider_to_use") - mock_dao.return_value = 100000 - check_db_notification_fails() - assert mock_provider.call_count == 1 - - -def test_should_check_db_notification_fails_task_less_than_25_percent( - notify_db_session, mocker -): - mock_dao = mocker.patch( - "app.celery.scheduled_tasks.dao_get_failed_notification_count" - ) - mock_redis = mocker.patch("app.celery.scheduled_tasks.redis_store") - mock_redis.get.return_value = 0 - mock_provider = mocker.patch("app.celery.scheduled_tasks.provider_to_use") - mock_dao.return_value = 10 - check_db_notification_fails() - assert mock_provider.call_count == 0 - - -def test_should_check_db_notification_fails_task_over_50_percent( - notify_db_session, mocker -): - # This tests that we only send an alert the 1st time we cross over 50%. We don't want - # to be sending the same alert every hour, especially as it might be quite normal for the db - # fails to be at 25 or 50 for long periods of time. - mock_dao = mocker.patch( - "app.celery.scheduled_tasks.dao_get_failed_notification_count" - ) - mock_provider = mocker.patch("app.celery.scheduled_tasks.provider_to_use") - mock_redis = mocker.patch("app.celery.scheduled_tasks.redis_store") - mock_dao.return_value = 5001 - mock_redis.get.return_value = "0".encode("utf-8") - check_db_notification_fails() - assert mock_provider.call_count == 1 - - mock_redis.get.return_value = "5001".encode("utf-8") - check_db_notification_fails() - assert mock_provider.call_count == 1 - - def test_should_update_scheduled_jobs_and_put_on_queue(mocker, sample_template): mocked = mocker.patch("app.celery.tasks.process_job.apply_async") diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 063770bfc..7b1463d2c 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -100,14 +100,14 @@ def test_should_process_sms_job(sample_job, mocker): s3.get_job_and_metadata_from_s3.assert_called_once_with( service_id=str(sample_job.service.id), job_id=str(sample_job.id) ) - assert encryption.encrypt.call_args[0][0]["to"] == "+441234123123" + assert encryption.encrypt.call_args[0][0]["to"] == "+14254147755" assert encryption.encrypt.call_args[0][0]["template"] == str(sample_job.template.id) assert ( encryption.encrypt.call_args[0][0]["template_version"] == sample_job.template.version ) assert encryption.encrypt.call_args[0][0]["personalisation"] == { - "phonenumber": "+441234123123" + "phonenumber": "+14254147755" } assert encryption.encrypt.call_args[0][0]["row_number"] == 0 tasks.save_sms.apply_async.assert_called_once_with( @@ -279,7 +279,7 @@ def test_should_process_all_sms_job(sample_job_with_placeholdered_template, mock service_id=str(sample_job_with_placeholdered_template.service.id), job_id=str(sample_job_with_placeholdered_template.id), ) - assert encryption.encrypt.call_args[0][0]["to"] == "+441234123120" + assert encryption.encrypt.call_args[0][0]["to"] == "+14254147755" assert encryption.encrypt.call_args[0][0]["template"] == str( sample_job_with_placeholdered_template.template.id ) @@ -288,7 +288,7 @@ def test_should_process_all_sms_job(sample_job_with_placeholdered_template, mock == sample_job_with_placeholdered_template.template.version ) # noqa assert encryption.encrypt.call_args[0][0]["personalisation"] == { - "phonenumber": "+441234123120", + "phonenumber": "+14254147755", "name": "chris", } assert tasks.save_sms.apply_async.call_count == 10 @@ -397,7 +397,7 @@ def test_should_send_template_to_correct_sms_task_and_persist( ): notification = _notification_json( sample_template_with_placeholders, - to="+447234123123", + to="+14254147755", personalisation={"name": "Jo"}, ) @@ -558,7 +558,7 @@ def test_should_not_save_email_if_restricted_service_and_invalid_email_address( def test_should_save_sms_template_to_and_persist_with_job_id(sample_job, mocker): notification = _notification_json( sample_job.template, - to="+447234123123", + to="+14254147755", job_id=sample_job.id, row_number=2, ) @@ -813,7 +813,7 @@ def test_should_use_email_template_and_persist_without_personalisation( def test_save_sms_should_go_to_retry_queue_if_database_errors(sample_template, mocker): - notification = _notification_json(sample_template, "+447234123123") + notification = _notification_json(sample_template, "+14254147755") expected_exception = SQLAlchemyError() @@ -1017,7 +1017,7 @@ def test_send_inbound_sms_to_service_post_https_request_to_service( inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1063,7 +1063,7 @@ def test_send_inbound_sms_to_service_does_not_sent_request_when_inbound_api_does inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1084,7 +1084,7 @@ def test_send_inbound_sms_to_service_retries_if_request_returns_500( inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1109,7 +1109,7 @@ def test_send_inbound_sms_to_service_retries_if_request_throws_unknown( inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1134,7 +1134,7 @@ def test_send_inbound_sms_to_service_does_not_retries_if_request_returns_404( inbound_sms = create_inbound_sms( service=sample_service, notify_number="0751421", - user_number="447700900111", + user_number="+14254147755", provider_date=datetime(2017, 6, 20), content="Here is some content", ) @@ -1429,7 +1429,7 @@ def test_save_api_email_or_sms(mocker, sample_service, notification_type): data.update({"to": "jane.citizen@example.com"}) expected_queue = QueueNames.SEND_EMAIL else: - data.update({"to": "+447700900855"}) + data.update({"to": "+14254147755"}) expected_queue = QueueNames.SEND_SMS encrypted = encryption.encrypt(data) @@ -1483,7 +1483,7 @@ def test_save_api_email_dont_retry_if_notification_already_exists( data.update({"to": "jane.citizen@example.com"}) expected_queue = QueueNames.SEND_EMAIL else: - data.update({"to": "+447700900855"}) + data.update({"to": "+14254147755"}) expected_queue = QueueNames.SEND_SMS encrypted = encryption.encrypt(data) @@ -1576,7 +1576,7 @@ def test_save_tasks_use_cached_service_and_template( NotificationType.SMS, save_api_sms, QueueNames.SEND_SMS, - "+447700900855", + "+14254147755", ), ( NotificationType.EMAIL, diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 52198071a..160c96f97 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -76,7 +76,7 @@ def test_persist_notification_creates_and_save_to_db( notification = persist_notification( template_id=sample_template.id, template_version=sample_template.version, - recipient="+447111111111", + recipient="+14254147755", service=sample_template.service, personalisation={}, notification_type=NotificationType.SMS, @@ -120,7 +120,7 @@ def test_persist_notification_throws_exception_when_missing_template(sample_api_ persist_notification( template_id=None, template_version=None, - recipient="+447111111111", + recipient="+14254147755", service=sample_api_key.service, personalisation=None, notification_type=NotificationType.SMS, @@ -178,7 +178,7 @@ def test_persist_notification_cache_is_not_incremented_on_failure_to_create_noti persist_notification( template_id=None, template_version=None, - recipient="+447111111111", + recipient="+14254147755", service=sample_api_key.service, personalisation=None, notification_type=NotificationType.SMS, @@ -321,9 +321,9 @@ def test_simulated_recipient(notify_api, to_address, notification_type, expected @pytest.mark.parametrize( "recipient, expected_international, expected_prefix, expected_units", [ - ("+447900900123", True, "44", 1), # UK - ("+73122345678", True, "7", 1), # Russia - ("+360623400400", True, "36", 1), # Hungary + # ("+447900900123", True, "44", 1), # UK + # ("+73122345678", True, "7", 1), # Russia + # ("+360623400400", True, "36", 1), # Hungary ("2028675309", False, "1", 1), ], # USA ) @@ -382,7 +382,7 @@ def test_persist_notification_with_international_info_does_not_store_for_email( @pytest.mark.parametrize( "recipient, expected_recipient_normalised", [ - ("+4407900900123", "+447900900123"), + # ("+4407900900123", "+447900900123"), ("202-867-5309", "+12028675309"), ("1 202-867-5309", "+12028675309"), ("+1 (202) 867-5309", "+12028675309"), diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index 7dcb8dd1d..42d96c93d 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -165,7 +165,7 @@ def test_service_can_send_to_recipient_passes(key_type, notify_db_session): "user_number, recipient_number", [ ["+12028675309", "202-867-5309"], - ["+447513332413", "+44 (07513) 332413"], + # ["+447513332413", "+44 (07513) 332413"], ], ) def test_service_can_send_to_recipient_passes_with_non_normalized_number( @@ -569,6 +569,9 @@ def test_check_rate_limiting_validates_api_rate_limit_and_daily_limit( @pytest.mark.parametrize("key_type", [KeyType.TEST, KeyType.NORMAL]) +@pytest.mark.skip( + "We currently don't support international numbers, our validation fails before here" +) def test_validate_and_format_recipient_fails_when_international_number_and_service_does_not_allow_int_sms( key_type, notify_db_session, @@ -588,6 +591,7 @@ def test_validate_and_format_recipient_fails_when_international_number_and_servi @pytest.mark.parametrize("key_type", [KeyType.TEST, KeyType.NORMAL]) +@pytest.mark.skip("We currently don't support international numbers") def test_validate_and_format_recipient_succeeds_with_international_numbers_if_service_does_allow_int_sms( key_type, sample_service_full_permissions ): diff --git a/tests/app/service/send_notification/test_send_notification.py b/tests/app/service/send_notification/test_send_notification.py index d85cb939a..b1bd27988 100644 --- a/tests/app/service/send_notification/test_send_notification.py +++ b/tests/app/service/send_notification/test_send_notification.py @@ -89,7 +89,7 @@ def test_should_reject_bad_phone_numbers(notify_api, sample_template, mocker): @pytest.mark.parametrize( "template_type, to", [ - (TemplateType.SMS, "+447700900855"), + (TemplateType.SMS, "+14254147755"), (TemplateType.EMAIL, "ok@ok.com"), ], ) @@ -257,7 +257,7 @@ def test_should_not_send_notification_for_archived_template( sample_template.archived = True dao_update_template(sample_template) json_data = json.dumps( - {"to": "+447700900855", "template": sample_template.id} + {"to": "+14254147755", "template": sample_template.id} ) auth_header = create_service_authorization_header( service_id=sample_template.service_id @@ -276,7 +276,7 @@ def test_should_not_send_notification_for_archived_template( @pytest.mark.parametrize( "template_type, to", [ - (TemplateType.SMS, "+447700900855"), + (TemplateType.SMS, "+16618675309"), (TemplateType.EMAIL, "not-someone-we-trust@email-address.com"), ], ) @@ -1230,6 +1230,7 @@ def test_should_allow_store_original_number_on_sms_notification( assert "1" == notifications[0].to +@pytest.mark.skip("We don't support international at moment") def test_should_not_allow_sending_to_international_number_without_international_permission( client, sample_template, mocker ): @@ -1254,6 +1255,7 @@ def test_should_not_allow_sending_to_international_number_without_international_ assert error_json["message"] == "Cannot send to international mobile numbers" +@pytest.mark.skip("We don't support international at the moment") def test_should_allow_sending_to_international_number_with_international_permission( client, sample_service_full_permissions, mocker ): diff --git a/tests/app/service/send_notification/test_send_one_off_notification.py b/tests/app/service/send_notification/test_send_one_off_notification.py index 231b42be0..000e22005 100644 --- a/tests/app/service/send_notification/test_send_one_off_notification.py +++ b/tests/app/service/send_notification/test_send_one_off_notification.py @@ -98,6 +98,7 @@ def test_send_one_off_notification_calls_persist_correctly_for_sms( ) +@pytest.mark.skip("We currently don't support international") def test_send_one_off_notification_calls_persist_correctly_for_international_sms( persist_mock, celery_mock, notify_db_session ): diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index d1691c847..5535f814b 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -65,10 +65,20 @@ def test_get_service_list(client, service_factory): response = client.get("/service", headers=[auth_header]) assert response.status_code == 200 json_resp = json.loads(response.get_data(as_text=True)) - assert len(json_resp["data"]) == 3 - assert json_resp["data"][0]["name"] == "one" - assert json_resp["data"][1]["name"] == "two" - assert json_resp["data"][2]["name"] == "three" + + found_service_one = False + found_service_two = False + found_service_three = False + for item in json_resp["data"]: + if item["name"] == "one": + found_service_one = True + elif item["name"] == "two": + found_service_two = True + elif item["name"] == "three": + found_service_three = True + assert found_service_one is True + assert found_service_two is True + assert found_service_three is True def test_get_service_list_with_only_active_flag(client, service_factory): @@ -1262,7 +1272,7 @@ def test_add_existing_user_to_another_service_with_all_permissions( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) # they must exist in db first save_model_user(user_to_add, validated_email_access=True) @@ -1332,7 +1342,7 @@ def test_add_existing_user_to_another_service_with_send_permissions( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) @@ -1382,7 +1392,7 @@ def test_add_existing_user_to_another_service_with_manage_permissions( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) @@ -1433,7 +1443,7 @@ def test_add_existing_user_to_another_service_with_folder_permissions( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) @@ -1474,7 +1484,7 @@ def test_add_existing_user_to_another_service_with_manage_api_keys( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) @@ -1514,7 +1524,7 @@ def test_add_existing_user_to_non_existing_service_returns404( name="Invited User", email_address="invited@digital.fake.gov", password="password", - mobile_number="+4477123456", + mobile_number="+14254147755", ) save_model_user(user_to_add, validated_email_access=True) diff --git a/tests/app/service_invite/test_service_invite_rest.py b/tests/app/service_invite/test_service_invite_rest.py index f36ad4ce5..e736a3042 100644 --- a/tests/app/service_invite/test_service_invite_rest.py +++ b/tests/app/service_invite/test_service_invite_rest.py @@ -31,6 +31,9 @@ def test_create_invited_user( extra_args, expected_start_of_invite_url, ): + mocker.patch("app.service_invite.rest.redis_store.raw_set") + mocker.patch("app.service_invite.rest.redis_store.raw_get") + mocked = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") email_address = "invited_user@service.gov.uk" invite_from = sample_service.users[0] @@ -92,6 +95,9 @@ def test_create_invited_user( def test_create_invited_user_without_auth_type( admin_request, sample_service, mocker, invitation_email_template ): + + mocker.patch("app.service_invite.rest.redis_store.raw_set") + mocker.patch("app.service_invite.rest.redis_store.raw_get") mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") email_address = "invited_user@service.gov.uk" invite_from = sample_service.users[0] @@ -213,6 +219,9 @@ def test_resend_expired_invite( invitation_email_template, mocker, ): + + mocker.patch("app.service_invite.rest.redis_store.raw_set") + mocker.patch("app.service_invite.rest.redis_store.raw_get") url = f"/service/{sample_expired_user.service_id}/invite/{sample_expired_user.id}/resend" mock_send = mocker.patch("app.service_invite.rest.send_notification_to_queue") mock_persist = mocker.patch("app.service_invite.rest.persist_notification") diff --git a/tests/app/test_model.py b/tests/app/test_model.py index bbd670412..aab74fac8 100644 --- a/tests/app/test_model.py +++ b/tests/app/test_model.py @@ -39,7 +39,7 @@ from tests.app.db import ( ) -@pytest.mark.parametrize("mobile_number", ["+447700900855", "+12348675309"]) +@pytest.mark.parametrize("mobile_number", ["+14254147755", "+12348675309"]) def test_should_build_service_guest_list_from_mobile_number(mobile_number): service_guest_list = ServiceGuestList.from_string( "service_id", diff --git a/tests/app/test_schemas.py b/tests/app/test_schemas.py index 55be8a6bb..151e319fb 100644 --- a/tests/app/test_schemas.py +++ b/tests/app/test_schemas.py @@ -60,7 +60,7 @@ def test_notification_schema_has_correct_status(sample_notification, schema_name [ ("name", "New User"), ("email_address", "newuser@mail.com"), - ("mobile_number", "+4407700900460"), + ("mobile_number", "+14254147755"), ], ) def test_user_update_schema_accepts_valid_attribute_pairs(user_attribute, user_value): diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 8ba087dcc..a388d264e 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -237,7 +237,7 @@ def test_cannot_create_user_with_empty_strings(admin_request, notify_db_session) [ ("name", "New User"), ("email_address", "newuser@mail.com"), - ("mobile_number", "+4407700900460"), + ("mobile_number", "+14254147755"), ], ) def test_post_user_attribute(admin_request, sample_user, user_attribute, user_value): @@ -273,13 +273,13 @@ def test_post_user_attribute(admin_request, sample_user, user_attribute, user_va ), ( "mobile_number", - "+4407700900460", + "+14254147755", dict( api_key_id=None, key_type=KeyType.NORMAL, notification_type=NotificationType.SMS, personalisation={}, - recipient="+4407700900460", + recipient="+14254147755", reply_to_text="testing", service=mock.ANY, template_id=uuid.UUID("8a31520f-4751-4789-8ea1-fe54496725eb"), @@ -315,6 +315,7 @@ def test_post_user_attribute_with_updated_by( mock_persist_notification.assert_not_called() +@pytest.mark.skip("We don't support international at the moment") def test_post_user_attribute_with_updated_by_sends_notification_to_international_from_number( admin_request, mocker, sample_user, team_member_mobile_edit_template ): diff --git a/tests/app/user/test_rest_verify.py b/tests/app/user/test_rest_verify.py index 74d90aaaf..26eb085a4 100644 --- a/tests/app/user/test_rest_verify.py +++ b/tests/app/user/test_rest_verify.py @@ -200,10 +200,10 @@ def test_send_user_sms_code(client, sample_user, sms_code_template, mocker): """ notify_service = dao_fetch_service_by_id(current_app.config["NOTIFY_SERVICE_ID"]) - mock_redis_get = mocker.patch("app.celery.scheduled_tasks.redis_store.raw_get") + mock_redis_get = mocker.patch("app.user.rest.redis_store.raw_get") mock_redis_get.return_value = "foo" - mocker.patch("app.celery.scheduled_tasks.redis_store.raw_set") + mocker.patch("app.user.rest.redis_store.raw_set") auth_header = create_admin_authorization_header() mocked = mocker.patch("app.user.rest.create_secret_code", return_value="11111") mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") @@ -241,11 +241,11 @@ def test_send_user_code_for_sms_with_optional_to_field( Tests POST endpoint /user//sms-code with optional to field """ - mock_redis_get = mocker.patch("app.celery.scheduled_tasks.redis_store.raw_get") + mock_redis_get = mocker.patch("app.user.rest.redis_store.raw_get") mock_redis_get.return_value = "foo" - mocker.patch("app.celery.scheduled_tasks.redis_store.raw_set") - to_number = "+447119876757" + mocker.patch("app.user.rest.redis_store.raw_set") + to_number = "+14254147755" mocked = mocker.patch("app.user.rest.create_secret_code", return_value="11111") mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") auth_header = create_admin_authorization_header() @@ -468,10 +468,10 @@ def test_send_user_email_code( deliver_email = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") sample_user.auth_type = auth_type - mock_redis_get = mocker.patch("app.celery.scheduled_tasks.redis_store.raw_get") + mock_redis_get = mocker.patch("app.user.rest.redis_store.raw_get") mock_redis_get.return_value = "foo" - mocker.patch("app.celery.scheduled_tasks.redis_store.raw_set") + mocker.patch("app.user.rest.redis_store.raw_set") admin_request.post( "user.send_user_2fa_code", @@ -581,13 +581,14 @@ def test_user_verify_email_code_fails_if_code_already_used( assert sample_user.current_session_id is None +@pytest.mark.skip("We don't support international at the moment") def test_send_user_2fa_code_sends_from_number_for_international_numbers( client, sample_user, mocker, sms_code_template ): - mock_redis_get = mocker.patch("app.celery.scheduled_tasks.redis_store.raw_get") + mock_redis_get = mocker.patch("app.user.rest.redis_store.raw_get") mock_redis_get.return_value = "foo" - mocker.patch("app.celery.scheduled_tasks.redis_store.raw_set") + mocker.patch("app.user.rest.redis_store.raw_set") sample_user.mobile_number = "+601117224412" auth_header = create_admin_authorization_header() diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index e9399808d..13cb579e3 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -41,7 +41,7 @@ def test_post_sms_notification_returns_201( ): mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(sample_template_with_placeholders.id), "personalisation": {" Name": "Jo"}, } @@ -92,7 +92,7 @@ def test_post_sms_notification_uses_inbound_number_as_sender( ) mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(template.id), "personalisation": {" Name": "Jo"}, } @@ -125,7 +125,7 @@ def test_post_sms_notification_uses_inbound_number_reply_to_as_sender( ) mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(template.id), "personalisation": {" Name": "Jo"}, } @@ -156,7 +156,7 @@ def test_post_sms_notification_returns_201_with_sms_sender_id( ) mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(sample_template_with_placeholders.id), "personalisation": {" Name": "Jo"}, "sms_sender_id": str(sms_sender.id), @@ -188,7 +188,7 @@ def test_post_sms_notification_uses_sms_sender_id_reply_to( ) mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(sample_template_with_placeholders.id), "personalisation": {" Name": "Jo"}, "sms_sender_id": str(sms_sender.id), @@ -294,7 +294,7 @@ def test_should_cache_template_and_service_in_redis(mocker, client, sample_templ mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(sample_template.id), } @@ -373,7 +373,7 @@ def test_should_return_template_if_found_in_redis(mocker, client, sample_templat @pytest.mark.parametrize( "notification_type, key_send_to, send_to", [ - (NotificationType.SMS, "phone_number", "+447700900855"), + (NotificationType.SMS, "phone_number", "+12028675309"), (NotificationType.EMAIL, "email_address", "sample@email.com"), ], ) @@ -402,7 +402,7 @@ def test_post_notification_returns_400_and_missing_template( @pytest.mark.parametrize( "notification_type, key_send_to, send_to", [ - (NotificationType.SMS, "phone_number", "+447700900855"), + (NotificationType.SMS, "phone_number", "+12028675309"), (NotificationType.EMAIL, "email_address", "sample@email.com"), ], ) @@ -432,7 +432,7 @@ def test_post_notification_returns_401_and_well_formed_auth_error( @pytest.mark.parametrize( "notification_type, key_send_to, send_to", [ - (NotificationType.SMS, "phone_number", "+447700900855"), + (NotificationType.SMS, "phone_number", "+12028675309"), (NotificationType.EMAIL, "email_address", "sample@email.com"), ], ) @@ -529,7 +529,6 @@ def test_post_email_notification_returns_201( ("simulate-delivered-2@notifications.service.gov.uk", NotificationType.EMAIL), ("simulate-delivered-3@notifications.service.gov.uk", NotificationType.EMAIL), ("+14254147167", NotificationType.SMS), - ("+14254147755", NotificationType.SMS), ], ) def test_should_not_persist_or_send_notification_if_simulated_recipient( @@ -652,6 +651,7 @@ def test_returns_a_429_limit_exceeded_if_rate_limit_exceeded( assert not deliver_mock.called +@pytest.mark.skip("We don't support international at the moment") def test_post_sms_notification_returns_400_if_not_allowed_to_send_int_sms( client, notify_db_session, @@ -689,7 +689,7 @@ def test_post_sms_notification_with_archived_reply_to_id_returns_400( ) mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": sample_template.id, "sms_sender_id": archived_sender.id, } @@ -781,7 +781,7 @@ def test_post_sms_notification_returns_400_if_number_not_in_guest_list( create_api_key(service=service, key_type=KeyType.TEAM) data = { - "phone_number": "+327700900855", + "phone_number": "+16615555555", "template_id": template.id, } auth_header = create_service_authorization_header( @@ -806,6 +806,7 @@ def test_post_sms_notification_returns_400_if_number_not_in_guest_list( ] +@pytest.mark.skip("We don't support international at the moment") def test_post_sms_notification_returns_201_if_allowed_to_send_int_sms( sample_service, sample_template, @@ -832,7 +833,7 @@ def test_post_sms_should_persist_supplied_sms_number( ): mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") data = { - "phone_number": "+(44) 77009-00855", + "phone_number": "+16615555555", "template_id": str(sample_template_with_placeholders.id), "personalisation": {" Name": "Jo"}, } @@ -888,7 +889,7 @@ def test_post_notification_with_wrong_type_of_sender( template = sample_template form_label = "email_reply_to_id" data = { - "phone_number": "+447700900855", + "phone_number": "+12028675309", "template_id": str(template.id), form_label: fake_uuid, } @@ -1204,7 +1205,7 @@ def test_post_notification_returns_201_when_content_type_is_missing_but_payload_ if notification_type == NotificationType.EMAIL: valid_json.update({"email_address": sample_service.users[0].email_address}) else: - valid_json.update({"phone_number": "+447700900855"}) + valid_json.update({"phone_number": "+12028675309"}) response = client.post( path=f"/v2/notifications/{notification_type}", data=json.dumps(valid_json), @@ -1274,7 +1275,7 @@ def test_post_notifications_saves_email_or_sms_to_queue( ( data.update({"email_address": "joe.citizen@example.com"}) if notification_type == NotificationType.EMAIL - else data.update({"phone_number": "+447700900855"}) + else data.update({"phone_number": "+12028675309"}) ) response = client.post( @@ -1343,7 +1344,7 @@ def test_post_notifications_saves_email_or_sms_normally_if_saving_to_queue_fails ( data.update({"email_address": "joe.citizen@example.com"}) if notification_type == NotificationType.EMAIL - else data.update({"phone_number": "+447700900855"}) + else data.update({"phone_number": "+12028675309"}) ) response = client.post( @@ -1405,7 +1406,7 @@ def test_post_notifications_doesnt_use_save_queue_for_test_notifications( ( data.update({"email_address": "joe.citizen@example.com"}) if notification_type == NotificationType.EMAIL - else data.update({"phone_number": "+447700900855"}) + else data.update({"phone_number": "+12028675309"}) ) response = client.post( path=f"/v2/notifications/{notification_type}", diff --git a/tests/conftest.py b/tests/conftest.py index 7f0c2150b..4d9b60150 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,7 @@ import pytest from alembic.command import upgrade from alembic.config import Config from flask import Flask +from sqlalchemy_utils import create_database, database_exists, drop_database from app import create_app from app.dao.provider_details_dao import get_provider_details_by_identifier @@ -52,9 +53,10 @@ def _notify_db(notify_api): """ with notify_api.app_context() as app_context: db = app_context.app.extensions["sqlalchemy"] - assert ( - "test_notification_api" in db.engine.url.database - ), "dont run tests against main db" + + # Check if test_notification_api exists, if not, create + if not database_exists(db.engine.url): + create_database(db.engine.url) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ALEMBIC_CONFIG = os.path.join(BASE_DIR, "migrations") @@ -70,6 +72,9 @@ def _notify_db(notify_api): yield db db.session.remove() + # Check if test_notification_api exists, if so, drop + if database_exists(db.engine.url): + drop_database(db.engine.url) db.engine.dispose()