diff --git a/.ds.baseline b/.ds.baseline new file mode 100644 index 000000000..4fba5f2ce --- /dev/null +++ b/.ds.baseline @@ -0,0 +1,388 @@ +{ + "version": "1.5.0", + "plugins_used": [ + { + "name": "ArtifactoryDetector" + }, + { + "name": "AWSKeyDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "name": "Base64HighEntropyString", + "limit": 4.5 + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "CloudantDetector" + }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "name": "GitLabTokenDetector" + }, + { + "name": "HexHighEntropyString", + "limit": 3.0 + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "IPPublicDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "name": "KeywordDetector", + "keyword_exclude": "" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "OpenAIDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "PypiTokenDetector" + }, + { + "name": "SendGridDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TelegramBotTokenDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "filters_used": [ + { + "path": "detect_secrets.filters.allowlist.is_line_allowlisted" + }, + { + "path": "detect_secrets.filters.common.is_baseline_file", + "filename": ".secrets.baseline" + }, + { + "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", + "min_level": 2 + }, + { + "path": "detect_secrets.filters.heuristic.is_indirect_reference" + }, + { + "path": "detect_secrets.filters.heuristic.is_likely_id_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_lock_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_potential_uuid" + }, + { + "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" + }, + { + "path": "detect_secrets.filters.heuristic.is_sequential_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_swagger_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_templated_secret" + } + ], + "results": { + ".github/workflows/checks.yml": [ + { + "type": "Secret Keyword", + "filename": ".github/workflows/checks.yml", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 27, + "is_secret": false + }, + { + "type": "Basic Auth Credentials", + "filename": ".github/workflows/checks.yml", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 44, + "is_secret": false + } + ], + ".github/workflows/daily_checks.yml": [ + { + "type": "Secret Keyword", + "filename": ".github/workflows/daily_checks.yml", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 61, + "is_secret": false + }, + { + "type": "Basic Auth Credentials", + "filename": ".github/workflows/daily_checks.yml", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 77, + "is_secret": false + } + ], + "app/enums.py": [ + { + "type": "Secret Keyword", + "filename": "app/enums.py", + "hashed_secret": "12322e07b94ee3c7cd65a2952ece441538b53eb3", + "is_verified": false, + "line_number": 123, + "is_secret": false + } + ], + "app/notifications/receive_notifications.py": [ + { + "type": "Base64 High Entropy String", + "filename": "app/notifications/receive_notifications.py", + "hashed_secret": "d70eab08607a4d05faa2d0d6647206599e9abc65", + "is_verified": false, + "line_number": 29, + "is_secret": false + } + ], + "deploy-config/sandbox.yml": [ + { + "type": "Secret Keyword", + "filename": "deploy-config/sandbox.yml", + "hashed_secret": "113151dd10316fcb0d5507b6215d78e2f3fe9e54", + "is_verified": false, + "line_number": 11, + "is_secret": false + } + ], + "sample.env": [ + { + "type": "Basic Auth Credentials", + "filename": "sample.env", + "hashed_secret": "5b98cf4c3d794c8af1fcd7991e89cd4e52fb42a4", + "is_verified": false, + "line_number": 16, + "is_secret": false + } + ], + "tests/app/aws/test_s3.py": [ + { + "type": "Hex High Entropy String", + "filename": "tests/app/aws/test_s3.py", + "hashed_secret": "67a74306b06d0c01624fe0d0249a570f4d093747", + "is_verified": false, + "line_number": 24, + "is_secret": false + } + ], + "tests/app/clients/test_document_download.py": [ + { + "type": "Secret Keyword", + "filename": "tests/app/clients/test_document_download.py", + "hashed_secret": "3acfb2c2b433c0ea7ff107e33df91b18e52f960f", + "is_verified": false, + "line_number": 14, + "is_secret": false + } + ], + "tests/app/clients/test_performance_platform.py": [ + { + "type": "Base64 High Entropy String", + "filename": "tests/app/clients/test_performance_platform.py", + "hashed_secret": "76bb66c38ac4046bf73cd4a2c35a2b0af94aeb61", + "is_verified": false, + "line_number": 84, + "is_secret": false + } + ], + "tests/app/dao/test_services_dao.py": [ + { + "type": "Secret Keyword", + "filename": "tests/app/dao/test_services_dao.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 261, + "is_secret": false + } + ], + "tests/app/dao/test_users_dao.py": [ + { + "type": "Secret Keyword", + "filename": "tests/app/dao/test_users_dao.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 52, + "is_secret": false + }, + { + "type": "Secret Keyword", + "filename": "tests/app/dao/test_users_dao.py", + "hashed_secret": "f2c57870308dc87f432e5912d4de6f8e322721ba", + "is_verified": false, + "line_number": 176, + "is_secret": false + } + ], + "tests/app/db.py": [ + { + "type": "Secret Keyword", + "filename": "tests/app/db.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 87, + "is_secret": false + } + ], + "tests/app/notifications/test_receive_notification.py": [ + { + "type": "Secret Keyword", + "filename": "tests/app/notifications/test_receive_notification.py", + "hashed_secret": "913a73b565c8e2c8ed94497580f619397709b8b6", + "is_verified": false, + "line_number": 24, + "is_secret": false + }, + { + "type": "Base64 High Entropy String", + "filename": "tests/app/notifications/test_receive_notification.py", + "hashed_secret": "d70eab08607a4d05faa2d0d6647206599e9abc65", + "is_verified": false, + "line_number": 54, + "is_secret": false + } + ], + "tests/app/notifications/test_validators.py": [ + { + "type": "Base64 High Entropy String", + "filename": "tests/app/notifications/test_validators.py", + "hashed_secret": "6c1a8443963d02d13ffe575a71abe19ea731fb66", + "is_verified": false, + "line_number": 768, + "is_secret": false + } + ], + "tests/app/service/test_rest.py": [ + { + "type": "Secret Keyword", + "filename": "tests/app/service/test_rest.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 1274, + "is_secret": false + } + ], + "tests/app/test_cloudfoundry_config.py": [ + { + "type": "Secret Keyword", + "filename": "tests/app/test_cloudfoundry_config.py", + "hashed_secret": "e5e178db7317356946d13e5d2da037d39ac61c71", + "is_verified": false, + "line_number": 12, + "is_secret": false + }, + { + "type": "Basic Auth Credentials", + "filename": "tests/app/test_cloudfoundry_config.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 14, + "is_secret": false + }, + { + "type": "Secret Keyword", + "filename": "tests/app/test_cloudfoundry_config.py", + "hashed_secret": "cfd48edeb81ba7d48cbddcf1eeede25ba67057e8", + "is_verified": false, + "line_number": 33, + "is_secret": false + } + ], + "tests/app/user/test_rest.py": [ + { + "type": "Secret Keyword", + "filename": "tests/app/user/test_rest.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 106, + "is_secret": false + }, + { + "type": "Secret Keyword", + "filename": "tests/app/user/test_rest.py", + "hashed_secret": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", + "is_verified": false, + "line_number": 962, + "is_secret": false + } + ], + "tests/notifications_utils/clients/antivirus/test_antivirus_client.py": [ + { + "type": "Secret Keyword", + "filename": "tests/notifications_utils/clients/antivirus/test_antivirus_client.py", + "hashed_secret": "932b25270abe1301c22c709a19082dff07d469ff", + "is_verified": false, + "line_number": 16, + "is_secret": false + } + ], + "tests/notifications_utils/clients/encryption/test_encryption_client.py": [ + { + "type": "Secret Keyword", + "filename": "tests/notifications_utils/clients/encryption/test_encryption_client.py", + "hashed_secret": "f1e923a9667de11be6a210849a8651c1bfd81605", + "is_verified": false, + "line_number": 13, + "is_secret": false + } + ], + "tests/notifications_utils/clients/zendesk/test_zendesk_client.py": [ + { + "type": "Secret Keyword", + "filename": "tests/notifications_utils/clients/zendesk/test_zendesk_client.py", + "hashed_secret": "913a73b565c8e2c8ed94497580f619397709b8b6", + "is_verified": false, + "line_number": 16, + "is_secret": false + } + ] + }, + "generated_at": "2024-05-20T15:20:28Z" +} diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index ca05cde57..830fcc30a 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -53,7 +53,7 @@ jobs: - name: Check for dead code run: make dead-code - name: Run tests with coverage - run: poetry run coverage run --omit=*/notifications_utils/* -m pytest --maxfail=10 + run: poetry run coverage run --omit=*/notifications_utils/*,*/migrations/* -m pytest --maxfail=10 env: SQLALCHEMY_DATABASE_TEST_URI: postgresql://user:password@localhost:5432/test_notification_api NOTIFY_E2E_TEST_EMAIL: ${{ secrets.NOTIFY_E2E_TEST_EMAIL }} @@ -62,7 +62,7 @@ jobs: NOTIFY_E2E_TEST_PASSWORD: ${{ secrets.NOTIFY_E2E_TEST_PASSWORD }} - name: Check coverage threshold # TODO get this back up to 95 - run: poetry run coverage report --fail-under=87 + run: poetry run coverage report --fail-under=95 validate-new-relic-config: runs-on: ubuntu-latest diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 000000000..07063750b --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,93 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "main", "production" ] + pull_request: + branches: [ "main", "production" ] + schedule: + - cron: '15 8 * * 2' + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners (GitHub.com only) + # Consider using larger runners or machines with greater resources for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} + permissions: + # required for all workflows + security-events: write + + # required to fetch internal or private CodeQL packs + packages: read + + # only required for workflows in private repositories + actions: read + contents: read + + strategy: + fail-fast: false + matrix: + include: + - language: python + build-mode: none + # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' + # Use `c-cpp` to analyze code written in C, C++ or both + # Use 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, + # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. + # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how + # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. + # â„šī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + - if: matrix.build-mode == 'manual' + shell: bash + run: | + echo 'If you are using a "manual" build mode for one or more of the' \ + 'languages you are analyzing, replace this with the commands to build' \ + 'your code, for example:' + echo ' make bootstrap' + echo ' make release' + exit 1 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 057c1ec16..cb3c48cae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.6.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -11,3 +11,14 @@ repos: - id: debug-statements - id: check-merge-conflict - id: check-toml + - id: check-ast + - id: fix-byte-order-marker + - id: detect-aws-credentials + args: [--allow-missing-credentials] + - id: detect-private-key + - id: mixed-line-ending +- repo: https://github.com/Yelp/detect-secrets + rev: v1.5.0 + hooks: + - id: detect-secrets + args: ['--baseline', '.ds.baseline'] diff --git a/Makefile b/Makefile index 6574c6181..9cb8bdac9 100644 --- a/Makefile +++ b/Makefile @@ -81,7 +81,8 @@ test: ## Run tests and create coverage report poetry run black . poetry run flake8 . poetry run isort --check-only ./app ./tests - poetry run coverage run -m pytest --maxfail=10 + poetry run coverage run --omit=*/notifications_utils/*,*/migrations/* -m pytest --maxfail=10 + poetry run coverage report -m --fail-under=95 poetry run coverage html -d .coverage_cache diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index 1c208104b..dd063be64 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -27,7 +27,7 @@ from app.dao.service_data_retention_dao import ( ) from app.enums import NotificationType from app.models import FactProcessingTime -from app.utils import get_midnight_in_utc +from app.utils import get_midnight_in_utc, utc_now @notify_celery.task(name="remove_sms_email_jobs") @@ -46,7 +46,7 @@ def _remove_csv_files(job_types): @notify_celery.task(name="cleanup-unfinished-jobs") def cleanup_unfinished_jobs(): - now = datetime.utcnow() + now = utc_now() jobs = dao_get_unfinished_jobs() for job in jobs: # The query already checks that the processing_finished time is null, so here we are saying @@ -88,7 +88,7 @@ def _delete_notifications_older_than_retention_by_type(notification_type): for f in flexible_data_retention: day_to_delete_backwards_from = get_midnight_in_utc( - datetime.utcnow() + utc_now() ).date() - timedelta(days=f.days_of_retention) delete_notifications_for_service_and_type.apply_async( @@ -100,7 +100,7 @@ def _delete_notifications_older_than_retention_by_type(notification_type): }, ) - seven_days_ago = get_midnight_in_utc(datetime.utcnow()).date() - timedelta(days=7) + seven_days_ago = get_midnight_in_utc(utc_now()).date() - timedelta(days=7) service_ids_with_data_retention = {x.service_id for x in flexible_data_retention} @@ -136,14 +136,14 @@ def _delete_notifications_older_than_retention_by_type(notification_type): def delete_notifications_for_service_and_type( service_id, notification_type, datetime_to_delete_before ): - start = datetime.utcnow() + start = utc_now() num_deleted = move_notifications_to_notification_history( notification_type, service_id, datetime_to_delete_before, ) if num_deleted: - end = datetime.utcnow() + end = utc_now() current_app.logger.info( f"delete-notifications-for-service-and-type: " f"service: {service_id}, " @@ -158,7 +158,7 @@ def delete_notifications_for_service_and_type( def timeout_notifications(): notifications = ["dummy value so len() > 0"] - cutoff_time = datetime.utcnow() - timedelta( + cutoff_time = utc_now() - timedelta( seconds=current_app.config.get("SENDING_NOTIFICATIONS_TIMEOUT_PERIOD") ) @@ -179,11 +179,11 @@ def timeout_notifications(): @cronitor("delete-inbound-sms") def delete_inbound_sms(): try: - start = datetime.utcnow() + start = utc_now() deleted = delete_inbound_sms_older_than_retention() current_app.logger.info( "Delete inbound sms job started {} finished {} deleted {} inbound sms notifications".format( - start, datetime.utcnow(), deleted + start, utc_now(), deleted ) ) except SQLAlchemyError: @@ -197,7 +197,7 @@ def save_daily_notification_processing_time(local_date=None): # local_date is a string in the format of "YYYY-MM-DD" if local_date is None: # if a date is not provided, we run against yesterdays data - local_date = (datetime.utcnow() - timedelta(days=1)).date() + local_date = (utc_now() - timedelta(days=1)).date() else: local_date = datetime.strptime(local_date, "%Y-%m-%d").date() diff --git a/app/celery/process_ses_receipts_tasks.py b/app/celery/process_ses_receipts_tasks.py index 73aa6a4d6..b44d18cc7 100644 --- a/app/celery/process_ses_receipts_tasks.py +++ b/app/celery/process_ses_receipts_tasks.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import timedelta import iso8601 from celery.exceptions import Retry @@ -22,6 +22,7 @@ from app.dao.service_callback_api_dao import ( ) from app.enums import CallbackType, NotificationStatus from app.models import Complaint +from app.utils import utc_now @notify_celery.task( @@ -57,7 +58,7 @@ def process_ses_results(self, response): message_time = iso8601.parse_date(ses_message["mail"]["timestamp"]).replace( tzinfo=None ) - if datetime.utcnow() - message_time < timedelta(minutes=5): + if utc_now() - message_time < timedelta(minutes=5): current_app.logger.info( f"Notification not found for reference: {reference}" f"(while attempting update to {notification_status}). " diff --git a/app/celery/provider_tasks.py b/app/celery/provider_tasks.py index 5d8d05ca2..7d32c9326 100644 --- a/app/celery/provider_tasks.py +++ b/app/celery/provider_tasks.py @@ -1,6 +1,6 @@ import json import os -from datetime import datetime, timedelta +from datetime import timedelta from flask import current_app from sqlalchemy.orm.exc import NoResultFound @@ -18,6 +18,7 @@ from app.dao.notifications_dao import ( from app.delivery import send_to_providers from app.enums import NotificationStatus from app.exceptions import NotificationTechnicalFailureException +from app.utils import utc_now # This is the amount of time to wait after sending an sms message before we check the aws logs and look for delivery # receipts @@ -113,9 +114,7 @@ def deliver_sms(self, notification_id): message_id = send_to_providers.send_sms_to_provider(notification) # We have to put it in UTC. For other timezones, the delay # will be ignored and it will fire immediately (although this probably only affects developer testing) - my_eta = datetime.utcnow() + timedelta( - seconds=DELIVERY_RECEIPT_DELAY_IN_SECONDS - ) + my_eta = utc_now() + timedelta(seconds=DELIVERY_RECEIPT_DELAY_IN_SECONDS) check_sms_delivery_receipt.apply_async( [message_id, notification_id, notification.created_at], eta=my_eta, diff --git a/app/celery/reporting_tasks.py b/app/celery/reporting_tasks.py index aa8f6fece..87c3269be 100644 --- a/app/celery/reporting_tasks.py +++ b/app/celery/reporting_tasks.py @@ -9,6 +9,7 @@ from app.dao.fact_billing_dao import fetch_billing_data_for_day, update_fact_bil from app.dao.fact_notification_status_dao import update_fact_notification_status from app.dao.notifications_dao import get_service_ids_with_notifications_on_date from app.enums import NotificationType +from app.utils import utc_now @notify_celery.task(name="create-nightly-billing") @@ -17,7 +18,7 @@ def create_nightly_billing(day_start=None): # day_start is a datetime.date() object. e.g. # up to 4 days of data counting back from day_start is consolidated if day_start is None: - day_start = datetime.utcnow().date() - timedelta(days=1) + day_start = utc_now().date() - timedelta(days=1) else: # When calling the task its a string in the format of "YYYY-MM-DD" day_start = datetime.strptime(day_start, "%Y-%m-%d").date() @@ -39,9 +40,9 @@ def create_nightly_billing_for_day(process_day): f"create-nightly-billing-for-day task for {process_day}: started" ) - start = datetime.utcnow() + start = utc_now() transit_data = fetch_billing_data_for_day(process_day=process_day) - end = datetime.utcnow() + end = utc_now() current_app.logger.info( f"create-nightly-billing-for-day task for {process_day}: data fetched in {(end - start).seconds} seconds" @@ -78,7 +79,7 @@ def create_nightly_notification_status(): mean the aggregated results are temporarily incorrect. """ - yesterday = datetime.utcnow().date() - timedelta(days=1) + yesterday = utc_now().date() - timedelta(days=1) for notification_type in (NotificationType.SMS, NotificationType.EMAIL): days = 4 @@ -107,14 +108,14 @@ def create_nightly_notification_status_for_service_and_day( ): process_day = datetime.strptime(process_day, "%Y-%m-%d").date() - start = datetime.utcnow() + start = utc_now() update_fact_notification_status( process_day=process_day, notification_type=notification_type, service_id=service_id, ) - end = datetime.utcnow() + end = utc_now() current_app.logger.info( f"create-nightly-notification-status-for-service-and-day task update " f"for {service_id}, {notification_type} for {process_day}: " diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 7da92a2a3..3597bdbb7 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import timedelta from flask import current_app from sqlalchemy import between @@ -31,6 +31,7 @@ from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago from app.enums import JobStatus, NotificationType from app.models import Job from app.notifications.process_notifications import send_notification_to_queue +from app.utils import utc_now from notifications_utils.clients.zendesk.zendesk_client import NotifySupportTicket MAX_NOTIFICATION_FAILS = 10000 @@ -52,11 +53,11 @@ def run_scheduled_jobs(): @notify_celery.task(name="delete-verify-codes") def delete_verify_codes(): try: - start = datetime.utcnow() + start = utc_now() deleted = delete_codes_older_created_more_than_a_day_ago() current_app.logger.info( "Delete job started {} finished {} deleted {} verify codes".format( - start, datetime.utcnow(), deleted + start, utc_now(), deleted ) ) except SQLAlchemyError: @@ -67,20 +68,20 @@ def delete_verify_codes(): @notify_celery.task(name="expire-or-delete-invitations") def expire_or_delete_invitations(): try: - start = datetime.utcnow() + start = utc_now() expired_invites = expire_invitations_created_more_than_two_days_ago() current_app.logger.info( - f"Expire job started {start} finished {datetime.utcnow()} expired {expired_invites} invitations" + f"Expire job started {start} finished {utc_now()} expired {expired_invites} invitations" ) except SQLAlchemyError: current_app.logger.exception("Failed to expire invitations") raise try: - start = datetime.utcnow() + start = utc_now() deleted_invites = delete_org_invitations_created_more_than_two_days_ago() current_app.logger.info( - f"Delete job started {start} finished {datetime.utcnow()} deleted {deleted_invites} invitations" + f"Delete job started {start} finished {utc_now()} deleted {deleted_invites} invitations" ) except SQLAlchemyError: current_app.logger.exception("Failed to delete invitations") @@ -101,8 +102,8 @@ def check_job_status(): update the job_status to 'error' process the rows in the csv that are missing (in another task) just do the check here. """ - thirty_minutes_ago = datetime.utcnow() - timedelta(minutes=30) - thirty_five_minutes_ago = datetime.utcnow() - timedelta(minutes=35) + thirty_minutes_ago = utc_now() - timedelta(minutes=30) + thirty_five_minutes_ago = utc_now() - timedelta(minutes=35) incomplete_in_progress_jobs = Job.query.filter( Job.job_status == JobStatus.IN_PROGRESS, @@ -179,8 +180,8 @@ def check_for_missing_rows_in_completed_jobs(): name="check-for-services-with-high-failure-rates-or-sending-to-tv-numbers" ) def check_for_services_with_high_failure_rates_or_sending_to_tv_numbers(): - start_date = datetime.utcnow() - timedelta(days=1) - end_date = datetime.utcnow() + start_date = utc_now() - timedelta(days=1) + end_date = utc_now() message = "" services_with_failures = dao_find_services_with_high_failure_rates( diff --git a/app/celery/tasks.py b/app/celery/tasks.py index c94b93789..e0428152a 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -1,5 +1,4 @@ import json -from datetime import datetime from flask import current_app from requests import HTTPError, RequestException, request @@ -24,7 +23,7 @@ from app.notifications.process_notifications import persist_notification from app.notifications.validators import check_service_over_total_message_limit from app.serialised_models import SerialisedService, SerialisedTemplate from app.service.utils import service_allowed_to_send_to -from app.utils import DATETIME_FORMAT +from app.utils import DATETIME_FORMAT, utc_now from app.v2.errors import TotalRequestsError from notifications_utils.recipients import RecipientCSV @@ -32,7 +31,7 @@ from notifications_utils.recipients import RecipientCSV @notify_celery.task(name="process-job") def process_job(job_id, sender_id=None): """Update job status, get csv data from s3, and begin processing csv rows.""" - start = datetime.utcnow() + start = utc_now() job = dao_get_job_by_id(job_id) current_app.logger.info( "Starting process-job task for job id {} with status: {}".format( @@ -82,7 +81,7 @@ def process_job(job_id, sender_id=None): def job_complete(job, resumed=False, start=None): job.job_status = JobStatus.FINISHED - finished = datetime.utcnow() + finished = utc_now() job.processing_finished = finished dao_update_job(job) @@ -157,7 +156,7 @@ def __total_sending_limits_for_job_exceeded(service, job, job_id): return False except TotalRequestsError: job.job_status = "sending limits exceeded" - job.processing_finished = datetime.utcnow() + job.processing_finished = utc_now() dao_update_job(job) current_app.logger.error( "Job {} size {} error. Total sending limits {} exceeded".format( @@ -211,7 +210,7 @@ def save_sms(self, service_id, notification_id, encrypted_notification, sender_i notification_type=NotificationType.SMS, api_key_id=None, key_type=KeyType.NORMAL, - created_at=datetime.utcnow(), + created_at=utc_now(), created_by_id=created_by_id, job_id=notification.get("job", None), job_row_number=notification.get("row_number", None), @@ -272,7 +271,7 @@ def save_email( notification_type=NotificationType.EMAIL, api_key_id=None, key_type=KeyType.NORMAL, - created_at=datetime.utcnow(), + created_at=utc_now(), job_id=notification.get("job", None), job_row_number=notification.get("row_number", None), notification_id=notification_id, @@ -438,7 +437,7 @@ def process_incomplete_jobs(job_ids): # reset the processing start time so that the check_job_status scheduled task doesn't pick this job up again for job in jobs: job.job_status = JobStatus.IN_PROGRESS - job.processing_started = datetime.utcnow() + job.processing_started = utc_now() dao_update_job(job) current_app.logger.info("Resuming Job(s) {}".format(job_ids)) diff --git a/app/clients/cloudwatch/aws_cloudwatch.py b/app/clients/cloudwatch/aws_cloudwatch.py index 0deac089e..d010957ac 100644 --- a/app/clients/cloudwatch/aws_cloudwatch.py +++ b/app/clients/cloudwatch/aws_cloudwatch.py @@ -1,7 +1,7 @@ import json import os import re -from datetime import datetime, timedelta +from datetime import timedelta from boto3 import client from flask import current_app @@ -9,6 +9,7 @@ from flask import current_app from app.clients import AWS_CLIENT_CONFIG, Client from app.cloudfoundry_config import cloud_config from app.exceptions import NotificationTechnicalFailureException +from app.utils import utc_now class AwsCloudwatchClient(Client): @@ -50,7 +51,7 @@ class AwsCloudwatchClient(Client): def _get_log(self, my_filter, log_group_name, sent_at): # Check all cloudwatch logs from the time the notification was sent (currently 5 minutes previously) until now - now = datetime.utcnow() + now = utc_now() beginning = sent_at next_token = None all_log_events = [] @@ -112,7 +113,7 @@ class AwsCloudwatchClient(Client): # TODO this clumsy approach to getting the account number will be fixed as part of notify-api #258 account_number = self._extract_account_number(cloud_config.ses_domain_arn) - time_now = datetime.utcnow() + time_now = utc_now() log_group_name = f"sns/{region}/{account_number[4]}/DirectPublishToPhoneNumber" filter_pattern = '{$.notification.messageId="XXXXX"}' filter_pattern = filter_pattern.replace("XXXXX", message_id) diff --git a/app/commands.py b/app/commands.py index 5637832cb..826c2013b 100644 --- a/app/commands.py +++ b/app/commands.py @@ -61,7 +61,7 @@ from app.models import ( TemplateHistory, User, ) -from app.utils import get_midnight_in_utc +from app.utils import get_midnight_in_utc, utc_now from notifications_utils.recipients import RecipientCSV from notifications_utils.template import SMSMessageTemplate from tests.app.db import ( @@ -327,7 +327,7 @@ def update_jobs_archived_flag(start_date, end_date): total_updated = 0 while process_date < end_date: - start_time = datetime.utcnow() + start_time = utc_now() sql = """update jobs set archived = true where diff --git a/app/complaint/complaint_rest.py b/app/complaint/complaint_rest.py index 122534c36..ff558a013 100644 --- a/app/complaint/complaint_rest.py +++ b/app/complaint/complaint_rest.py @@ -6,7 +6,7 @@ from app.complaint.complaint_schema import complaint_count_request from app.dao.complaint_dao import fetch_count_of_complaints, fetch_paginated_complaints from app.errors import register_errors from app.schema_validation import validate -from app.utils import pagination_links +from app.utils import pagination_links, utc_now complaint_blueprint = Blueprint("complaint", __name__, url_prefix="/complaint") @@ -35,7 +35,7 @@ def get_complaint_count(): validate(request.args, complaint_count_request) # If start and end date are not set, we are expecting today's stats. - today = str(datetime.utcnow().date()) + today = str(utc_now().date()) start_date = datetime.strptime( request.args.get("start_date", today), "%Y-%m-%d" diff --git a/app/dao/api_key_dao.py b/app/dao/api_key_dao.py index cd5dd3f2b..06266ab18 100644 --- a/app/dao/api_key_dao.py +++ b/app/dao/api_key_dao.py @@ -1,11 +1,12 @@ import uuid -from datetime import datetime, timedelta +from datetime import timedelta from sqlalchemy import func, or_ from app import db from app.dao.dao_utils import autocommit, version_class from app.models import ApiKey +from app.utils import utc_now @autocommit @@ -23,7 +24,7 @@ def save_model_api_key(api_key): @version_class(ApiKey) def expire_api_key(service_id, api_key_id): api_key = ApiKey.query.filter_by(id=api_key_id, service_id=service_id).one() - api_key.expiry_date = datetime.utcnow() + api_key.expiry_date = utc_now() db.session.add(api_key) @@ -32,7 +33,7 @@ def get_model_api_keys(service_id, id=None): return ApiKey.query.filter_by( id=id, service_id=service_id, expiry_date=None ).one() - seven_days_ago = datetime.utcnow() - timedelta(days=7) + seven_days_ago = utc_now() - timedelta(days=7) return ApiKey.query.filter( or_( ApiKey.expiry_date == None, # noqa diff --git a/app/dao/date_util.py b/app/dao/date_util.py index 66aadc9df..7acc587aa 100644 --- a/app/dao/date_util.py +++ b/app/dao/date_util.py @@ -1,6 +1,8 @@ import calendar from datetime import date, datetime, time, timedelta +from app.utils import utc_now + def get_months_for_financial_year(year): return [ @@ -23,7 +25,7 @@ def get_calendar_year_dates(year): def get_current_calendar_year(): - now = datetime.utcnow() + now = utc_now() current_year = int(now.strftime("%Y")) year = current_year return get_calendar_year(year) diff --git a/app/dao/fact_billing_dao.py b/app/dao/fact_billing_dao.py index 97f346cf5..14d82835b 100644 --- a/app/dao/fact_billing_dao.py +++ b/app/dao/fact_billing_dao.py @@ -1,4 +1,4 @@ -from datetime import date, datetime, timedelta +from datetime import date, timedelta from flask import current_app from sqlalchemy import Date, Integer, and_, desc, func, union @@ -18,7 +18,7 @@ from app.models import ( Rate, Service, ) -from app.utils import get_midnight_in_utc +from app.utils import get_midnight_in_utc, utc_now def fetch_sms_free_allowance_remainder_until_date(end_date): @@ -198,7 +198,7 @@ def fetch_monthly_billing_for_year(service_id, year): we also update the table on-the-fly if we need accurate data for this year. """ _, year_end = get_calendar_year_dates(year) - today = datetime.utcnow().date() + today = utc_now().date() # if year end date is less than today, we are calculating for data in the past and have no need for deltas. if year_end >= today: @@ -535,7 +535,7 @@ def update_fact_billing(data, process_day): set_={ "notifications_sent": stmt.excluded.notifications_sent, "billable_units": stmt.excluded.billable_units, - "updated_at": datetime.utcnow(), + "updated_at": utc_now(), }, ) db.session.connection().execute(stmt) @@ -699,7 +699,7 @@ def query_organization_sms_usage_for_year(organization_id, year): def fetch_usage_year_for_organization(organization_id, year): year_start, year_end = get_calendar_year_dates(year) - today = datetime.utcnow().date() + today = utc_now().date() services = dao_get_organization_live_services(organization_id) # if year end date is less than today, we are calculating for data in the past and have no need for deltas. diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 5eee7d8c2..22c87fe83 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import timedelta from sqlalchemy import Date, case, func from sqlalchemy.dialects.postgresql import insert @@ -19,6 +19,7 @@ from app.utils import ( get_midnight_in_utc, get_month_from_utc_column, midnight_n_days_ago, + utc_now, ) @@ -128,7 +129,7 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days( service_id, by_template=False, limit_days=7 ): start_date = midnight_n_days_ago(limit_days) - now = datetime.utcnow() + now = utc_now() stats_for_7_days = db.session.query( FactNotificationStatus.notification_type.cast(db.Text).label( "notification_type" @@ -212,8 +213,8 @@ def fetch_notification_status_totals_for_all_services(start_date, end_date): FactNotificationStatus.key_type, ) ) - today = get_midnight_in_utc(datetime.utcnow()) - if start_date <= datetime.utcnow().date() <= end_date: + today = get_midnight_in_utc(utc_now()) + if start_date <= utc_now().date() <= end_date: stats_for_today = ( db.session.query( Notification.notification_type.cast(db.Text).label("notification_type"), @@ -299,8 +300,8 @@ def fetch_stats_for_all_services_by_date_range( if not include_from_test_key: stats = stats.filter(FactNotificationStatus.key_type != KeyType.TEST) - if start_date <= datetime.utcnow().date() <= end_date: - today = get_midnight_in_utc(datetime.utcnow()) + if start_date <= utc_now().date() <= end_date: + today = get_midnight_in_utc(utc_now()) subquery = ( db.session.query( Notification.notification_type.label("notification_type"), @@ -395,8 +396,8 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): ) ) - if start_date <= datetime.utcnow() <= end_date: - today = get_midnight_in_utc(datetime.utcnow()) + if start_date <= utc_now() <= end_date: + today = get_midnight_in_utc(utc_now()) month = get_month_from_utc_column(Notification.created_at) stats_for_today = ( diff --git a/app/dao/fact_processing_time_dao.py b/app/dao/fact_processing_time_dao.py index 350de270a..af8efcf10 100644 --- a/app/dao/fact_processing_time_dao.py +++ b/app/dao/fact_processing_time_dao.py @@ -1,11 +1,10 @@ -from datetime import datetime - from sqlalchemy.dialects.postgresql import insert from sqlalchemy.sql.expression import case from app import db from app.dao.dao_utils import autocommit from app.models import FactProcessingTime +from app.utils import utc_now @autocommit @@ -27,7 +26,7 @@ def insert_update_processing_time(processing_time): set_={ "messages_total": stmt.excluded.messages_total, "messages_within_10_secs": stmt.excluded.messages_within_10_secs, - "updated_at": datetime.utcnow(), + "updated_at": utc_now(), }, ) db.session.connection().execute(stmt) diff --git a/app/dao/invited_org_user_dao.py b/app/dao/invited_org_user_dao.py index 3ed122371..2bcf36a05 100644 --- a/app/dao/invited_org_user_dao.py +++ b/app/dao/invited_org_user_dao.py @@ -1,7 +1,8 @@ -from datetime import datetime, timedelta +from datetime import timedelta from app import db from app.models import InvitedOrganizationUser +from app.utils import utc_now def save_invited_org_user(invited_org_user): @@ -28,9 +29,7 @@ def get_invited_org_users_for_organization(organization_id): def delete_org_invitations_created_more_than_two_days_ago(): deleted = ( db.session.query(InvitedOrganizationUser) - .filter( - InvitedOrganizationUser.created_at <= datetime.utcnow() - timedelta(days=2) - ) + .filter(InvitedOrganizationUser.created_at <= utc_now() - timedelta(days=2)) .delete() ) db.session.commit() diff --git a/app/dao/invited_user_dao.py b/app/dao/invited_user_dao.py index ab83c2534..a342f504d 100644 --- a/app/dao/invited_user_dao.py +++ b/app/dao/invited_user_dao.py @@ -1,8 +1,9 @@ -from datetime import datetime, timedelta +from datetime import timedelta from app import db from app.enums import InvitedUserStatus from app.models import InvitedUser +from app.utils import utc_now def save_invited_user(invited_user): @@ -41,7 +42,7 @@ def expire_invitations_created_more_than_two_days_ago(): expired = ( db.session.query(InvitedUser) .filter( - InvitedUser.created_at <= datetime.utcnow() - timedelta(days=2), + InvitedUser.created_at <= utc_now() - timedelta(days=2), InvitedUser.status.in_((InvitedUserStatus.PENDING,)), ) .update({InvitedUser.status: InvitedUserStatus.EXPIRED}) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 209fe76d6..a278bb7fe 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime, timedelta +from datetime import timedelta from flask import current_app from sqlalchemy import and_, asc, desc, func @@ -13,7 +13,7 @@ from app.models import ( ServiceDataRetention, Template, ) -from app.utils import midnight_n_days_ago +from app.utils import midnight_n_days_ago, utc_now def dao_get_notification_outcomes_for_job(service_id, job_id): @@ -110,7 +110,7 @@ def dao_set_scheduled_jobs_to_pending(): jobs = ( Job.query.filter( Job.job_status == JobStatus.SCHEDULED, - Job.scheduled_for < datetime.utcnow(), + Job.scheduled_for < utc_now(), ) .order_by(asc(Job.scheduled_for)) .with_for_update() @@ -131,7 +131,7 @@ def dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id): Job.service_id == service_id, Job.id == job_id, Job.job_status == JobStatus.SCHEDULED, - Job.scheduled_for > datetime.utcnow(), + Job.scheduled_for > utc_now(), ).one() @@ -152,7 +152,7 @@ def dao_get_jobs_older_than_data_retention(notification_types): ServiceDataRetention.notification_type.in_(notification_types) ).all() jobs = [] - today = datetime.utcnow().date() + today = utc_now().date() for f in flexible_data_retention: end_date = today - timedelta(days=f.days_of_retention) @@ -193,8 +193,8 @@ def dao_get_jobs_older_than_data_retention(notification_types): def find_jobs_with_missing_rows(): # Jobs can be a maximum of 100,000 rows. It typically takes 10 minutes to create all those notifications. # Using 20 minutes as a condition seems reasonable. - ten_minutes_ago = datetime.utcnow() - timedelta(minutes=20) - yesterday = datetime.utcnow() - timedelta(days=1) + ten_minutes_ago = utc_now() - timedelta(minutes=20) + yesterday = utc_now() - timedelta(days=1) jobs_with_rows_missing = ( db.session.query(Job) .filter( diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index f00ae4a9b..57d49ad9e 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import timedelta from flask import current_app from sqlalchemy import asc, desc, or_, select, text, union @@ -16,6 +16,7 @@ from app.utils import ( escape_special_characters, get_midnight_in_utc, midnight_n_days_ago, + utc_now, ) from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES from notifications_utils.recipients import ( @@ -95,7 +96,7 @@ def _update_notification_status( current_status=notification.status, status=status ) notification.status = status - notification.sent_at = datetime.utcnow() + notification.sent_at = utc_now() if provider_response: notification.provider_response = provider_response if carrier: @@ -179,7 +180,7 @@ def update_notification_status_by_reference(reference, status): @autocommit def dao_update_notification(notification): - notification.updated_at = datetime.utcnow() + notification.updated_at = utc_now() # notify-api-742 remove phone numbers from db notification.to = "1" notification.normalised_to = "1" @@ -327,10 +328,10 @@ def sanitize_successful_notification_by_id(notification_id, carrier, provider_re "notification_id": notification_id, "carrier": carrier, "response": provider_response, - "sent_at": datetime.utcnow(), + "sent_at": utc_now(), } - db.session.execute(update_query, input_params) + db.session.execute(text(update_query), input_params) db.session.commit() @@ -437,7 +438,7 @@ def dao_timeout_notifications(cutoff_time, limit=100000): Set email and SMS notifications (only) to "temporary-failure" status if they're still sending from before the specified cutoff_time. """ - updated_at = datetime.utcnow() + updated_at = utc_now() current_statuses = [NotificationStatus.SENDING, NotificationStatus.PENDING] new_status = NotificationStatus.TEMPORARY_FAILURE @@ -599,9 +600,7 @@ def dao_get_last_notification_added_for_job_id(job_id): def notifications_not_yet_sent(should_be_sending_after_seconds, notification_type): - older_than_date = datetime.utcnow() - timedelta( - seconds=should_be_sending_after_seconds - ) + older_than_date = utc_now() - timedelta(seconds=should_be_sending_after_seconds) notifications = Notification.query.filter( Notification.created_at <= older_than_date, @@ -622,8 +621,7 @@ def _duplicate_update_warning(notification, status): id=notification.id, old_status=notification.status, new_status=status, - time_diff=datetime.utcnow() - - (notification.updated_at or notification.created_at), + time_diff=utc_now() - (notification.updated_at or notification.created_at), type=notification.notification_type, sent_by=notification.sent_by, service_id=notification.service_id, diff --git a/app/dao/provider_details_dao.py b/app/dao/provider_details_dao.py index 0cb22adcd..73132a44e 100644 --- a/app/dao/provider_details_dao.py +++ b/app/dao/provider_details_dao.py @@ -7,6 +7,7 @@ from app import db from app.dao.dao_utils import autocommit from app.enums import NotificationType from app.models import FactBilling, ProviderDetails, ProviderDetailsHistory, User +from app.utils import utc_now def get_provider_details_by_id(provider_details_id): @@ -66,7 +67,7 @@ def _get_sms_providers_for_update(time_threshold): # if something updated recently, don't update again. If the updated_at is null, treat it as min time if any( - (provider.updated_at or datetime.min) > datetime.utcnow() - time_threshold + (provider.updated_at or datetime.min) > utc_now() - time_threshold for provider in q ): current_app.logger.info( @@ -102,7 +103,7 @@ def _update_provider_details_without_commit(provider_details): Doesn't commit, for when you need to control the database transaction manually """ provider_details.version += 1 - provider_details.updated_at = datetime.utcnow() + provider_details.updated_at = utc_now() history = ProviderDetailsHistory.from_original(provider_details) db.session.add(provider_details) db.session.add(history) @@ -111,7 +112,7 @@ def _update_provider_details_without_commit(provider_details): def dao_get_provider_stats(): # this query does not include the current day since the task to populate ft_billing runs overnight - current_datetime = datetime.utcnow() + current_datetime = utc_now() first_day_of_the_month = current_datetime.date().replace(day=1) subquery = ( diff --git a/app/dao/service_callback_api_dao.py b/app/dao/service_callback_api_dao.py index 75ea69f09..a1a39d982 100644 --- a/app/dao/service_callback_api_dao.py +++ b/app/dao/service_callback_api_dao.py @@ -1,16 +1,15 @@ -from datetime import datetime - from app import create_uuid, db from app.dao.dao_utils import autocommit, version_class from app.enums import CallbackType from app.models import ServiceCallbackApi +from app.utils import utc_now @autocommit @version_class(ServiceCallbackApi) def save_service_callback_api(service_callback_api): service_callback_api.id = create_uuid() - service_callback_api.created_at = datetime.utcnow() + service_callback_api.created_at = utc_now() db.session.add(service_callback_api) @@ -24,7 +23,7 @@ def reset_service_callback_api( if bearer_token: service_callback_api.bearer_token = bearer_token service_callback_api.updated_by_id = updated_by_id - service_callback_api.updated_at = datetime.utcnow() + service_callback_api.updated_at = utc_now() db.session.add(service_callback_api) diff --git a/app/dao/service_data_retention_dao.py b/app/dao/service_data_retention_dao.py index 1e14127d7..b95ca5720 100644 --- a/app/dao/service_data_retention_dao.py +++ b/app/dao/service_data_retention_dao.py @@ -1,8 +1,7 @@ -from datetime import datetime - from app import db from app.dao.dao_utils import autocommit from app.models import ServiceDataRetention +from app.utils import utc_now def fetch_service_data_retention_by_id(service_id, data_retention_id): @@ -50,7 +49,7 @@ def update_service_data_retention( updated_count = ServiceDataRetention.query.filter( ServiceDataRetention.id == service_data_retention_id, ServiceDataRetention.service_id == service_id, - ).update({"days_of_retention": days_of_retention, "updated_at": datetime.utcnow()}) + ).update({"days_of_retention": days_of_retention, "updated_at": utc_now()}) return updated_count diff --git a/app/dao/service_inbound_api_dao.py b/app/dao/service_inbound_api_dao.py index 11634d3ee..a04affe9e 100644 --- a/app/dao/service_inbound_api_dao.py +++ b/app/dao/service_inbound_api_dao.py @@ -1,15 +1,14 @@ -from datetime import datetime - from app import create_uuid, db from app.dao.dao_utils import autocommit, version_class from app.models import ServiceInboundApi +from app.utils import utc_now @autocommit @version_class(ServiceInboundApi) def save_service_inbound_api(service_inbound_api): service_inbound_api.id = create_uuid() - service_inbound_api.created_at = datetime.utcnow() + service_inbound_api.created_at = utc_now() db.session.add(service_inbound_api) @@ -23,7 +22,7 @@ def reset_service_inbound_api( if bearer_token: service_inbound_api.bearer_token = bearer_token service_inbound_api.updated_by_id = updated_by_id - service_inbound_api.updated_at = datetime.utcnow() + service_inbound_api.updated_at = utc_now() db.session.add(service_inbound_api) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index f6724f247..36427d8a8 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime, timedelta +from datetime import timedelta from flask import current_app from sqlalchemy import Float, cast, select @@ -44,6 +44,7 @@ from app.utils import ( escape_special_characters, get_archived_db_column_value, get_midnight_in_utc, + utc_now, ) @@ -252,7 +253,7 @@ def dao_archive_service(service_id): for api_key in service.api_keys: if not api_key.expiry_date: - api_key.expiry_date = datetime.utcnow() + api_key.expiry_date = utc_now() def dao_fetch_service_by_id_and_user(service_id, user_id): @@ -404,7 +405,7 @@ def delete_service_and_all_associated_db_objects(service): def dao_fetch_todays_stats_for_service(service_id): - today = datetime.utcnow().date() + today = utc_now().date() start_date = get_midnight_in_utc(today) return ( db.session.query( @@ -481,7 +482,7 @@ def dao_fetch_stats_for_service_from_day_for_user(service_id, day, user_id): def dao_fetch_todays_stats_for_all_services( include_from_test_key=True, only_active=True ): - today = datetime.utcnow().date() + today = utc_now().date() start_date = get_midnight_in_utc(today) end_date = get_midnight_in_utc(today + timedelta(days=1)) @@ -544,7 +545,7 @@ def dao_suspend_service(service_id): for api_key in service.api_keys: if not api_key.expiry_date: - api_key.expiry_date = datetime.utcnow() + api_key.expiry_date = utc_now() service.active = False diff --git a/app/dao/templates_dao.py b/app/dao/templates_dao.py index 26cdc2497..55d4363d6 100644 --- a/app/dao/templates_dao.py +++ b/app/dao/templates_dao.py @@ -1,11 +1,11 @@ import uuid -from datetime import datetime from sqlalchemy import asc, desc from app import db from app.dao.dao_utils import VersionOptions, autocommit, version_class from app.models import Template, TemplateHistory, TemplateRedacted +from app.utils import utc_now @autocommit @@ -39,7 +39,7 @@ def dao_update_template(template): @autocommit def dao_redact_template(template, user_id): template.template_redacted.redact_personalisation = True - template.template_redacted.updated_at = datetime.utcnow() + template.template_redacted.updated_at = utc_now() template.template_redacted.updated_by_id = user_id db.session.add(template.template_redacted) diff --git a/app/dao/uploads_dao.py b/app/dao/uploads_dao.py index cdbe9d247..1f7b7021c 100644 --- a/app/dao/uploads_dao.py +++ b/app/dao/uploads_dao.py @@ -1,4 +1,3 @@ -from datetime import datetime from os import getenv from flask import current_app @@ -7,7 +6,7 @@ from sqlalchemy import String, and_, desc, func, literal, text from app import db from app.enums import JobStatus, NotificationStatus, NotificationType from app.models import Job, Notification, ServiceDataRetention, Template -from app.utils import midnight_n_days_ago +from app.utils import midnight_n_days_ago, utc_now def _get_printing_day(created_at): @@ -40,7 +39,7 @@ def _naive_gmt_to_utc(column): def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size=50): # Hardcoded filter to exclude cancelled or scheduled jobs # for the moment, but we may want to change this method take 'statuses' as a argument in the future - today = datetime.utcnow().date() + today = utc_now().date() jobs_query_filter = [ Job.service_id == service_id, Job.original_file_name != current_app.config["TEST_MESSAGE_FILENAME"], diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py index 58b660aba..d7291b35c 100644 --- a/app/dao/users_dao.py +++ b/app/dao/users_dao.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime, timedelta +from datetime import timedelta from secrets import randbelow import sqlalchemy @@ -14,7 +14,7 @@ from app.dao.service_user_dao import dao_get_service_users_by_user_id from app.enums import AuthType, PermissionType from app.errors import InvalidRequest from app.models import Organization, Service, User, VerifyCode -from app.utils import escape_special_characters, get_archived_db_column_value +from app.utils import escape_special_characters, get_archived_db_column_value, utc_now def _remove_values_for_keys_if_present(dict, keys): @@ -76,9 +76,9 @@ def save_model_user( ): if password: user.password = password - user.password_changed_at = datetime.utcnow() + user.password_changed_at = utc_now() if validated_email_access: - user.email_access_validated_at = datetime.utcnow() + user.email_access_validated_at = utc_now() if update_dict: _remove_values_for_keys_if_present(update_dict, ["id", "password_changed_at"]) db.session.query(User).filter_by(id=user.id).update(update_dict or {}) @@ -90,7 +90,7 @@ def save_model_user( def create_user_code(user, code, code_type): verify_code = VerifyCode( code_type=code_type, - expiry_datetime=datetime.utcnow() + timedelta(minutes=30), + expiry_datetime=utc_now() + timedelta(minutes=30), user=user, ) verify_code.code = code @@ -111,7 +111,7 @@ def get_user_code(user, code, code_type): def delete_codes_older_created_more_than_a_day_ago(): deleted = ( db.session.query(VerifyCode) - .filter(VerifyCode.created_at < datetime.utcnow() - timedelta(hours=24)) + .filter(VerifyCode.created_at < utc_now() - timedelta(hours=24)) .delete() ) db.session.commit() @@ -138,7 +138,7 @@ def delete_user_verify_codes(user): def count_user_verify_codes(user): query = VerifyCode.query.filter( VerifyCode.user == user, - VerifyCode.expiry_datetime > datetime.utcnow(), + VerifyCode.expiry_datetime > utc_now(), VerifyCode.code_used.is_(False), ) return query.count() @@ -179,7 +179,7 @@ def reset_failed_login_count(user): def update_user_password(user, password): # reset failed login count - they've just reset their password so should be fine user.password = password - user.password_changed_at = datetime.utcnow() + user.password_changed_at = utc_now() db.session.add(user) db.session.commit() diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index 8a06d820a..a9b90d368 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -1,5 +1,4 @@ import json -from datetime import datetime from urllib import parse from cachetools import TTLCache, cached @@ -14,6 +13,7 @@ from app.dao.provider_details_dao import get_provider_details_by_notification_ty from app.enums import BrandType, KeyType, NotificationStatus, NotificationType from app.exceptions import NotificationTechnicalFailureException from app.serialised_models import SerialisedService, SerialisedTemplate +from app.utils import utc_now from notifications_utils.template import ( HTMLEmailTemplate, PlainTextEmailTemplate, @@ -177,7 +177,7 @@ def send_email_to_provider(notification): def update_notification_to_sending(notification, provider): - notification.sent_at = datetime.utcnow() + notification.sent_at = utc_now() notification.sent_by = provider.name if notification.status not in NotificationStatus.completed_types(): notification.status = NotificationStatus.SENDING diff --git a/app/history_meta.py b/app/history_meta.py index f7bef7076..1ef19f198 100644 --- a/app/history_meta.py +++ b/app/history_meta.py @@ -15,12 +15,12 @@ session events. """ -import datetime - from sqlalchemy import Column, ForeignKeyConstraint, Integer, Table, util from sqlalchemy.orm import attributes, object_mapper, registry from sqlalchemy.orm.properties import ColumnProperty, RelationshipProperty +from app.utils import utc_now + def col_references_table(col, table): for fk in col.foreign_keys: @@ -236,10 +236,10 @@ def create_history(obj, history_cls=None): if not obj.version: obj.version = 1 - obj.created_at = datetime.datetime.utcnow() + obj.created_at = utc_now() else: obj.version += 1 - now = datetime.datetime.utcnow() + now = utc_now() obj.updated_at = now data["updated_at"] = now diff --git a/app/models.py b/app/models.py index 71eea3295..e6c3c66ae 100644 --- a/app/models.py +++ b/app/models.py @@ -1,4 +1,3 @@ -import datetime import itertools import uuid @@ -36,6 +35,7 @@ from app.utils import ( DATETIME_FORMAT, DATETIME_FORMAT_NO_TIMEZONE, get_dt_string_or_none, + utc_now, ) from notifications_utils.clients.encryption.encryption_client import EncryptionError from notifications_utils.recipients import ( @@ -115,14 +115,14 @@ class User(db.Model): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, index=False, unique=False, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) _password = db.Column(db.String, index=False, unique=False, nullable=False) mobile_number = db.Column(db.String, index=False, unique=False, nullable=True) @@ -131,7 +131,7 @@ class User(db.Model): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) logged_in_at = db.Column(db.DateTime, nullable=True) failed_login_count = db.Column(db.Integer, nullable=False, default=0) @@ -144,7 +144,7 @@ class User(db.Model): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) preferred_timezone = db.Column( db.Text, @@ -368,12 +368,12 @@ class Organization(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) agreement_signed = db.Column(db.Boolean, nullable=True) agreement_signed_at = db.Column(db.DateTime, nullable=True) @@ -488,14 +488,14 @@ class Service(db.Model, Versioned): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, index=False, unique=False, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) active = db.Column( db.Boolean, @@ -626,12 +626,12 @@ class AnnualBilling(db.Model): updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) UniqueConstraint( "financial_year_start", @@ -698,13 +698,13 @@ class InboundNumber(db.Model): ) created_at = db.Column( db.DateTime, - default=datetime.datetime.utcnow, + default=utc_now(), nullable=False, ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) def serialize(self): @@ -753,13 +753,13 @@ class ServiceSmsSender(db.Model): ) created_at = db.Column( db.DateTime, - default=datetime.datetime.utcnow, + default=utc_now(), nullable=False, ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) def get_reply_to_text(self): @@ -798,7 +798,7 @@ class ServicePermission(db.Model): ) created_at = db.Column( db.DateTime, - default=datetime.datetime.utcnow, + default=utc_now(), nullable=False, ) @@ -826,7 +826,7 @@ class ServiceGuestList(db.Model): service = db.relationship("Service", backref="guest_list") recipient_type = enum_column(RecipientType, nullable=False) recipient = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, default=datetime.datetime.utcnow) + created_at = db.Column(db.DateTime, default=utc_now()) @classmethod def from_string(cls, service_id, recipient_type, recipient): @@ -867,7 +867,7 @@ class ServiceInboundApi(db.Model, Versioned): _bearer_token = db.Column("bearer_token", db.String(), nullable=False) created_at = db.Column( db.DateTime, - default=datetime.datetime.utcnow, + default=utc_now(), nullable=False, ) updated_at = db.Column(db.DateTime, nullable=True) @@ -914,7 +914,7 @@ class ServiceCallbackApi(db.Model, Versioned): _bearer_token = db.Column("bearer_token", db.String(), nullable=False) created_at = db.Column( db.DateTime, - default=datetime.datetime.utcnow, + default=utc_now(), nullable=False, ) updated_at = db.Column(db.DateTime, nullable=True) @@ -974,14 +974,14 @@ class ApiKey(db.Model, Versioned): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, index=False, unique=False, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) created_by = db.relationship("User") created_by_id = db.Column( @@ -1101,9 +1101,9 @@ class TemplateBase(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) - updated_at = db.Column(db.DateTime, onupdate=datetime.datetime.utcnow) + updated_at = db.Column(db.DateTime, onupdate=utc_now()) content = db.Column(db.Text, nullable=False) archived = db.Column(db.Boolean, nullable=False, default=False) hidden = db.Column(db.Boolean, nullable=False, default=False) @@ -1250,7 +1250,7 @@ class TemplateRedacted(db.Model): updated_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_by_id = db.Column( UUID(as_uuid=True), @@ -1304,7 +1304,7 @@ class ProviderDetails(db.Model): updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) created_by_id = db.Column( UUID(as_uuid=True), @@ -1326,9 +1326,7 @@ class ProviderDetailsHistory(db.Model, HistoryModel): notification_type = enum_column(NotificationType, nullable=False) active = db.Column(db.Boolean, nullable=False) version = db.Column(db.Integer, primary_key=True, nullable=False) - updated_at = db.Column( - db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow - ) + updated_at = db.Column(db.DateTime, nullable=True, onupdate=utc_now()) created_by_id = db.Column( UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=True ) @@ -1359,14 +1357,14 @@ class Job(db.Model): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, index=False, unique=False, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) notification_count = db.Column(db.Integer, nullable=False) notifications_sent = db.Column(db.Integer, nullable=False, default=0) @@ -1410,7 +1408,7 @@ class VerifyCode(db.Model): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) @property @@ -1508,7 +1506,7 @@ class Notification(db.Model): index=False, unique=False, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) status = enum_column( NotificationStatus, @@ -1785,7 +1783,7 @@ class NotificationHistory(db.Model, HistoryModel): index=False, unique=False, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) status = enum_column( NotificationStatus, @@ -1854,7 +1852,7 @@ class InvitedUser(db.Model): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) status = enum_column( InvitedUserStatus, @@ -1893,7 +1891,7 @@ class InvitedOrganizationUser(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) status = enum_column( @@ -1939,7 +1937,7 @@ class Permission(db.Model): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) __table_args__ = ( @@ -1962,7 +1960,7 @@ class Event(db.Model): index=False, unique=False, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) data = db.Column(JSON, nullable=False) @@ -1986,7 +1984,7 @@ class InboundSms(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) service_id = db.Column( UUID(as_uuid=True), @@ -2066,12 +2064,12 @@ class ServiceEmailReplyTo(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) def serialize(self): @@ -2112,12 +2110,12 @@ class FactBilling(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) @@ -2149,12 +2147,12 @@ class FactNotificationStatus(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) @@ -2167,12 +2165,12 @@ class FactProcessingTime(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) @@ -2195,7 +2193,7 @@ class Complaint(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) def serialize(self): @@ -2234,12 +2232,12 @@ class ServiceDataRetention(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) __table_args__ = ( @@ -2288,12 +2286,12 @@ class WebauthnCredential(db.Model): created_at = db.Column( db.DateTime, nullable=False, - default=datetime.datetime.utcnow, + default=utc_now(), ) updated_at = db.Column( db.DateTime, nullable=True, - onupdate=datetime.datetime.utcnow, + onupdate=utc_now(), ) def serialize(self): diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 8f542d31a..2a423767d 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -1,5 +1,4 @@ import uuid -from datetime import datetime from flask import current_app @@ -12,6 +11,7 @@ from app.dao.notifications_dao import ( ) from app.enums import KeyType, NotificationStatus, NotificationType from app.models import Notification +from app.utils import utc_now from app.v2.errors import BadRequestError from notifications_utils.recipients import ( format_email_address, @@ -77,7 +77,7 @@ def persist_notification( document_download_count=None, updated_at=None, ): - notification_created_at = created_at or datetime.utcnow() + notification_created_at = created_at or utc_now() if not notification_id: notification_id = uuid.uuid4() diff --git a/app/notifications/rest.py b/app/notifications/rest.py index 9c5806ede..f52bd1933 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -64,7 +64,11 @@ def get_notification_by_id(notification_id): @notifications.route("/notifications", methods=["GET"]) def get_all_notifications(): + current_app.logger.debug("enter get_all_notifications()") data = notifications_filter_schema.load(request.args) + current_app.logger.debug( + f"get_all_notifications() data {data} request.args {request.args}" + ) include_jobs = data.get("include_jobs", False) page = data.get("page", 1) @@ -96,19 +100,18 @@ def get_all_notifications(): notification.to = recipient notification.normalised_to = recipient - return ( - jsonify( - notifications=notification_with_personalisation_schema.dump( - pagination.items, many=True - ), - page_size=page_size, - total=pagination.total, - links=pagination_links( - pagination, ".get_all_notifications", **request.args.to_dict() - ), + result = jsonify( + notifications=notification_with_personalisation_schema.dump( + pagination.items, many=True + ), + page_size=page_size, + total=pagination.total, + links=pagination_links( + pagination, ".get_all_notifications", **request.args.to_dict() ), - 200, ) + current_app.logger.debug(f"result={result}") + return result, 200 @notifications.route("/notifications/", methods=["POST"]) diff --git a/app/organization/invite_rest.py b/app/organization/invite_rest.py index f87605435..caa803485 100644 --- a/app/organization/invite_rest.py +++ b/app/organization/invite_rest.py @@ -165,7 +165,7 @@ def validate_invitation_token(token): ) except SignatureExpired: errors = { - "invitation": "Your invitation to GOV.UK Notify has expired. " + "invitation": "Your invitation to Notify.gov has expired. " "Please ask the person that invited you to send you another one" } raise InvalidRequest(errors, status_code=400) diff --git a/app/performance_dashboard/rest.py b/app/performance_dashboard/rest.py index 4810dc17b..52267a353 100644 --- a/app/performance_dashboard/rest.py +++ b/app/performance_dashboard/rest.py @@ -12,6 +12,7 @@ from app.performance_dashboard.performance_dashboard_schema import ( performance_dashboard_request, ) from app.schema_validation import validate +from app.utils import utc_now performance_dashboard_blueprint = Blueprint( "performance_dashboard", __name__, url_prefix="/performance-dashboard" @@ -29,7 +30,7 @@ def get_performance_dashboard(): validate(request.args, performance_dashboard_request) # If start and end date are not set, we are expecting today's stats. - today = str(datetime.utcnow().date()) + today = str(utc_now().date()) start_date = datetime.strptime( request.args.get("start_date", today), diff --git a/app/platform_stats/rest.py b/app/platform_stats/rest.py index 447e924d1..17f2faabb 100644 --- a/app/platform_stats/rest.py +++ b/app/platform_stats/rest.py @@ -17,7 +17,7 @@ from app.errors import InvalidRequest, register_errors from app.platform_stats.platform_stats_schema import platform_stats_request from app.schema_validation import validate from app.service.statistics import format_admin_stats -from app.utils import get_midnight_in_utc +from app.utils import get_midnight_in_utc, utc_now platform_stats_blueprint = Blueprint("platform_stats", __name__) @@ -30,7 +30,7 @@ def get_platform_stats(): validate(request.args, platform_stats_request) # If start and end date are not set, we are expecting today's stats. - today = str(datetime.utcnow().date()) + today = str(utc_now().date()) start_date = datetime.strptime( request.args.get("start_date", today), "%Y-%m-%d" diff --git a/app/schema_validation/__init__.py b/app/schema_validation/__init__.py index c4f8f6486..04be11d1e 100644 --- a/app/schema_validation/__init__.py +++ b/app/schema_validation/__init__.py @@ -1,10 +1,11 @@ import json -from datetime import datetime, timedelta +from datetime import timedelta from uuid import UUID from iso8601 import ParseError, iso8601 from jsonschema import Draft7Validator, FormatChecker, ValidationError +from app.utils import utc_now from notifications_utils.recipients import ( InvalidEmailError, InvalidPhoneError, @@ -41,9 +42,9 @@ def validate_schema_date_with_hour(instance): if isinstance(instance, str): try: dt = iso8601.parse_date(instance).replace(tzinfo=None) - if dt < datetime.utcnow(): + if dt < utc_now(): raise ValidationError("datetime can not be in the past") - if dt > datetime.utcnow() + timedelta(hours=24): + if dt > utc_now() + timedelta(hours=24): raise ValidationError("datetime can only be 24 hours in the future") except ParseError: raise ValidationError( diff --git a/app/schemas.py b/app/schemas.py index 7b47da593..8657eeafe 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import timedelta from uuid import UUID from dateutil.parser import parse @@ -19,7 +19,7 @@ from app import ma, models from app.dao.permissions_dao import permission_dao from app.enums import ServicePermissionType, TemplateType from app.models import ServicePermission -from app.utils import DATETIME_FORMAT_NO_TIMEZONE, get_template_instance +from app.utils import DATETIME_FORMAT_NO_TIMEZONE, get_template_instance, utc_now from notifications_utils.recipients import ( InvalidEmailError, InvalidPhoneError, @@ -41,12 +41,12 @@ def _validate_positive_number(value, msg="Not a positive integer"): def _validate_datetime_not_more_than_96_hours_in_future( dte, msg="Date cannot be more than 96hrs in the future" ): - if dte > datetime.utcnow() + timedelta(hours=96): + if dte > utc_now() + timedelta(hours=96): raise ValidationError(msg) def _validate_datetime_not_in_past(dte, msg="Date cannot be in the past"): - if dte < datetime.utcnow(): + if dte < utc_now(): raise ValidationError(msg) diff --git a/app/service/rest.py b/app/service/rest.py index 0d015c08a..117b414df 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -109,7 +109,7 @@ from app.service.service_senders_schema import ( ) from app.service.utils import get_guest_list_objects from app.user.users_schema import post_set_permissions_schema -from app.utils import get_prev_next_pagination_links, hilite +from app.utils import get_prev_next_pagination_links, hilite, utc_now service_blueprint = Blueprint("service", __name__) @@ -151,7 +151,7 @@ def get_services(): include_from_test_key = request.args.get("include_from_test_key", "True") != "False" # If start and end date are not set, we are expecting today's stats. - today = str(datetime.utcnow().date()) + today = str(utc_now().date()) start_date = datetime.strptime( request.args.get("start_date", today), "%Y-%m-%d" @@ -423,12 +423,17 @@ def get_service_history(service_id): @service_blueprint.route("//notifications", methods=["GET", "POST"]) def get_all_notifications_for_service(service_id): + current_app.logger.debug("enter get_all_notifications_for_service") if request.method == "GET": data = notifications_filter_schema.load(request.args) + current_app.logger.debug( + f"use GET, request.args {request.args} and data {data}" + ) elif request.method == "POST": # Must transform request.get_json() to MultiDict as NotificationsFilterSchema expects a MultiDict. # Unlike request.args, request.get_json() does not return a MultiDict but instead just a dict. data = notifications_filter_schema.load(MultiDict(request.get_json())) + current_app.logger.debug(f"use POST, request {request.get_json()} data {data}") if data.get("to"): notification_type = ( @@ -455,6 +460,10 @@ def get_all_notifications_for_service(service_id): # for whether to show pagination links count_pages = data.get("count_pages", True) + current_app.logger.debug( + f"get pagination with {service_id} service_id filters {data} \ + limit_days {limit_days} include_jobs {include_jobs} include_one_off {include_one_off}" + ) pagination = notifications_dao.get_notifications_for_service( service_id, filter_dict=data, @@ -496,6 +505,13 @@ def get_all_notifications_for_service(service_id): notifications = notification_with_template_schema.dump( pagination.items, many=True ) + current_app.logger.debug(f"number of notifications are {len(notifications)}") + + if len(notifications) > 0: + current_app.logger.debug(f"first notification is {notifications[0]}") + else: + current_app.logger.debug("there are no notifications to show") + # We try and get the next page of results to work out if we need provide a pagination link to the next page # in our response if it exists. Note, this could be done instead by changing `count_pages` in the previous # call to be True which will enable us to use Flask-Sqlalchemy to tell if there is a next page of results but @@ -618,7 +634,7 @@ def get_monthly_notification_stats(service_id): statistics.add_monthly_notification_status_stats(data, stats) - now = datetime.utcnow() + now = utc_now() if end_date > now: todays_deltas = fetch_notification_status_for_service_for_day( now, service_id=service_id @@ -716,7 +732,7 @@ def get_service_statistics(service_id, today_only, limit_days=7): def get_detailed_services( start_date, end_date, only_active=False, include_from_test_key=True ): - if start_date == datetime.utcnow().date(): + if start_date == utc_now().date(): stats = dao_fetch_todays_stats_for_all_services( include_from_test_key=include_from_test_key, only_active=only_active ) diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index 02899d3e9..2fb5dca67 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -1,7 +1,6 @@ import base64 import json import os -from datetime import datetime from flask import Blueprint, current_app, jsonify, request from itsdangerous import BadData, SignatureExpired @@ -25,7 +24,7 @@ from app.notifications.process_notifications import ( send_notification_to_queue, ) from app.schemas import invited_user_schema -from app.utils import hilite +from app.utils import hilite, utc_now from notifications_utils.url_safe_token import check_token, generate_token service_invite = Blueprint("service_invite", __name__) @@ -156,7 +155,7 @@ def resend_service_invite(service_id, invited_user_id): invited_user_id=invited_user_id, ) - fetched.created_at = datetime.utcnow() + fetched.created_at = utc_now() fetched.status = InvitedUserStatus.PENDING current_data = {k: v for k, v in invited_user_schema.dump(fetched).items()} @@ -202,7 +201,7 @@ def validate_service_invitation_token(token): ) except SignatureExpired: errors = { - "invitation": "Your invitation to GOV.UK Notify has expired. " + "invitation": "Your invitation to Notify.gov has expired. " "Please ask the person that invited you to send you another one" } raise InvalidRequest(errors, status_code=400) diff --git a/app/user/rest.py b/app/user/rest.py index ea2da8eee..760334841 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -1,6 +1,5 @@ import json import uuid -from datetime import datetime from urllib.parse import urlencode from flask import Blueprint, abort, current_app, jsonify, request @@ -55,7 +54,7 @@ from app.user.users_schema import ( post_verify_code_schema, post_verify_webauthn_schema, ) -from app.utils import url_with_token +from app.utils import url_with_token, utc_now from notifications_utils.recipients import is_us_phone_number, use_numeric_sender user_blueprint = Blueprint("user", __name__) @@ -222,15 +221,15 @@ def verify_user_code(user_id): # only relevant from sms increment_failed_login_count(user_to_verify) raise InvalidRequest("Code not found", status_code=404) - if datetime.utcnow() > code.expiry_datetime or code.code_used: + if utc_now() > code.expiry_datetime or code.code_used: # sms and email increment_failed_login_count(user_to_verify) raise InvalidRequest("Code has expired", status_code=400) user_to_verify.current_session_id = str(uuid.uuid4()) - user_to_verify.logged_in_at = datetime.utcnow() + user_to_verify.logged_in_at = utc_now() if data["code_type"] == CodeType.EMAIL: - user_to_verify.email_access_validated_at = datetime.utcnow() + user_to_verify.email_access_validated_at = utc_now() user_to_verify.failed_login_count = 0 save_model_user(user_to_verify) @@ -263,7 +262,7 @@ def complete_login_after_webauthn_authentication_attempt(user_id): if successful: user.current_session_id = str(uuid.uuid4()) - user.logged_in_at = datetime.utcnow() + user.logged_in_at = utc_now() user.failed_login_count = 0 save_model_user(user) else: @@ -676,7 +675,7 @@ def get_organizations_and_services_for_user(user_id): def _create_reset_password_url(email, next_redirect, base_url=None): - data = json.dumps({"email": email, "created_at": str(datetime.utcnow())}) + data = json.dumps({"email": email, "created_at": str(utc_now())}) static_url_part = "/new-password/" full_url = url_with_token( data, static_url_part, current_app.config, base_url=base_url diff --git a/app/utils.py b/app/utils.py index 22f9a034c..e286d6539 100644 --- a/app/utils.py +++ b/app/utils.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from flask import url_for from sqlalchemy import func @@ -94,7 +94,7 @@ def midnight_n_days_ago(number_of_days): """ Returns midnight a number of days ago. Takes care of daylight savings etc. """ - return get_midnight_in_utc(datetime.utcnow() - timedelta(days=number_of_days)) + return get_midnight_in_utc(utc_now() - timedelta(days=number_of_days)) def escape_special_characters(string): @@ -104,7 +104,7 @@ def escape_special_characters(string): def get_archived_db_column_value(column): - date = datetime.utcnow().strftime("%Y-%m-%d") + date = utc_now().strftime("%Y-%m-%d") return f"_archived_{date}_{column}" @@ -132,3 +132,15 @@ def hilite(message): ansi_green = "\033[32m" ansi_reset = "\033[0m" return f"{ansi_green}{message}{ansi_reset}" + + +def aware_utcnow(): + return datetime.now(timezone.utc) + + +def naive_utcnow(): + return aware_utcnow().replace(tzinfo=None) + + +def utc_now(): + return naive_utcnow() diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index 3c8fa1fdb..856179f85 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -1,6 +1,5 @@ import functools import uuid -from datetime import datetime import botocore from flask import abort, current_app, jsonify, request @@ -27,7 +26,7 @@ from app.notifications.validators import ( validate_template, ) from app.schema_validation import validate -from app.utils import DATETIME_FORMAT +from app.utils import DATETIME_FORMAT, utc_now from app.v2.errors import BadRequestError from app.v2.notifications import v2_notification_blueprint from app.v2.notifications.create_response import ( @@ -226,7 +225,7 @@ def save_email_or_sms_to_queue( "reply_to_text": reply_to_text, "document_download_count": document_download_count, "status": NotificationStatus.CREATED, - "created_at": datetime.utcnow().strftime(DATETIME_FORMAT), + "created_at": utc_now().strftime(DATETIME_FORMAT), } encrypted = encryption.encrypt(data) diff --git a/migrations/versions/0336_broadcast_msg_content_2.py b/migrations/versions/0336_broadcast_msg_content_2.py index a42cbc24e..014059d00 100644 --- a/migrations/versions/0336_broadcast_msg_content_2.py +++ b/migrations/versions/0336_broadcast_msg_content_2.py @@ -8,10 +8,11 @@ Create Date: 2020-12-04 15:06:22.544803 import sqlalchemy as sa from alembic import op -from notifications_utils.template import BroadcastMessageTemplate from sqlalchemy.dialects import postgresql from sqlalchemy.orm.session import Session +from notifications_utils.template import BroadcastMessageTemplate + revision = "0336_broadcast_msg_content_2" down_revision = "0335_broadcast_msg_content" diff --git a/notifications_utils/__init__.py b/notifications_utils/__init__.py index 84a55d644..1a94f3b94 100644 --- a/notifications_utils/__init__.py +++ b/notifications_utils/__init__.py @@ -1,4 +1,5 @@ import re +from datetime import datetime, timezone SMS_CHAR_COUNT_LIMIT = 918 # 153 * 6, no network issues but check with providers before upping this further LETTER_MAX_PAGE_COUNT = 10 @@ -23,3 +24,15 @@ email_with_smart_quotes_regex = re.compile( # and then later remove when performing tricky formatting operations MAGIC_SEQUENCE = "đŸ‡Ŧ🇧đŸĻâœ‰ī¸" magic_sequence_regex = re.compile(MAGIC_SEQUENCE) + + +def aware_utcnow(): + return datetime.now(timezone.utc) + + +def naive_utcnow(): + return aware_utcnow().replace(tzinfo=None) + + +def utc_now(): + return naive_utcnow() diff --git a/notifications_utils/clients/redis/__init__.py b/notifications_utils/clients/redis/__init__.py index 93a77d561..d201b3332 100644 --- a/notifications_utils/clients/redis/__init__.py +++ b/notifications_utils/clients/redis/__init__.py @@ -1,11 +1,11 @@ -from datetime import datetime +from app.utils import utc_now from .request_cache import RequestCache # noqa: F401 (unused import) def total_limit_cache_key(service_id): return "{}-{}-{}".format( - str(service_id), datetime.utcnow().strftime("%Y-%m-%d"), "total-count" + str(service_id), utc_now().strftime("%Y-%m-%d"), "total-count" ) diff --git a/notifications_utils/letter_timings.py b/notifications_utils/letter_timings.py index 62abf2c21..b171d6c3b 100644 --- a/notifications_utils/letter_timings.py +++ b/notifications_utils/letter_timings.py @@ -1,9 +1,10 @@ from collections import namedtuple -from datetime import datetime, time, timedelta +from datetime import time, timedelta import pytz from govuk_bank_holidays.bank_holidays import BankHolidays +from app.utils import utc_now from notifications_utils.countries.data import Postage from notifications_utils.timezones import utc_string_to_aware_gmt_datetime @@ -101,11 +102,7 @@ def get_letter_timings(upload_time, postage): # print deadline is 3pm BST printed_by = set_gmt_hour(print_day, hour=15) - now = ( - datetime.utcnow() - .replace(tzinfo=pytz.utc) - .astimezone(pytz.timezone("Europe/London")) - ) + now = utc_now().replace(tzinfo=pytz.utc).astimezone(pytz.timezone("Europe/London")) return LetterTimings( printed_by=printed_by, @@ -135,7 +132,7 @@ def too_late_to_cancel_letter(notification_created_at): time_created_at = notification_created_at day_created_on = time_created_at.date() - current_time = datetime.utcnow() + current_time = utc_now() current_day = current_time.date() if ( _after_letter_processing_deadline() @@ -152,14 +149,14 @@ def too_late_to_cancel_letter(notification_created_at): def _after_letter_processing_deadline(): - current_utc_datetime = datetime.utcnow() + current_utc_datetime = utc_now() bst_time = current_utc_datetime.time() return bst_time >= LETTER_PROCESSING_DEADLINE def _notification_created_before_today_deadline(notification_created_at): - current_bst_datetime = datetime.utcnow() + current_bst_datetime = utc_now() todays_deadline = current_bst_datetime.replace( hour=LETTER_PROCESSING_DEADLINE.hour, minute=LETTER_PROCESSING_DEADLINE.minute, diff --git a/notifications_utils/recipients.py b/notifications_utils/recipients.py index 0d8536c33..68e2cb101 100644 --- a/notifications_utils/recipients.py +++ b/notifications_utils/recipients.py @@ -17,9 +17,7 @@ from notifications_utils.formatters import ( strip_and_remove_obscure_whitespace, ) from notifications_utils.insensitive_dict import InsensitiveDict -from notifications_utils.international_billing_rates import ( - INTERNATIONAL_BILLING_RATES, -) +from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES from notifications_utils.postal_address import ( address_line_7_key, address_lines_1_to_6_and_postcode_keys, diff --git a/notifications_utils/template.py b/notifications_utils/template.py index 302fd3899..62dce8a55 100644 --- a/notifications_utils/template.py +++ b/notifications_utils/template.py @@ -1,7 +1,6 @@ import math import re from abc import ABC, abstractmethod -from datetime import datetime from functools import lru_cache from html import unescape from os import path @@ -13,6 +12,7 @@ from notifications_utils import ( LETTER_MAX_PAGE_COUNT, MAGIC_SEQUENCE, SMS_CHAR_COUNT_LIMIT, + utc_now, ) from notifications_utils.countries.data import Postage from notifications_utils.field import Field, PlainTextField @@ -43,10 +43,7 @@ from notifications_utils.markdown import ( notify_letter_preview_markdown, notify_plain_text_email_markdown, ) -from notifications_utils.postal_address import ( - PostalAddress, - address_lines_1_to_7_keys, -) +from notifications_utils.postal_address import PostalAddress, address_lines_1_to_7_keys from notifications_utils.sanitise_text import SanitiseSMS from notifications_utils.take import Take from notifications_utils.template_change import TemplateChange @@ -742,7 +739,7 @@ class BaseLetterTemplate(SubjectMixin, Template): ) self.admin_base_url = admin_base_url self.logo_file_name = logo_file_name - self.date = date or datetime.utcnow() + self.date = date or utc_now() @property def subject(self): diff --git a/poetry.lock b/poetry.lock index 4601694ab..8aa19a612 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -204,17 +204,17 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "awscli" -version = "1.32.106" +version = "1.33.1" description = "Universal Command Line Environment for AWS." optional = false python-versions = ">=3.8" files = [ - {file = "awscli-1.32.106-py3-none-any.whl", hash = "sha256:32f050c2c5f73c0be2eb71449aba7bb6db2c9569ea1c4f28357389f5acbb5a82"}, - {file = "awscli-1.32.106.tar.gz", hash = "sha256:75f7b3277acc7b6598495af7012661af4c5f51d956312902ce8ec2176e9bb06d"}, + {file = "awscli-1.33.1-py3-none-any.whl", hash = "sha256:d446a99e6d2a05a2456f4d3a12240b8d7c0b7cdc42485b4444bbfb3659039cde"}, + {file = "awscli-1.33.1.tar.gz", hash = "sha256:1a5b5d7e438c0f53ede9142b9e92f25ba955e587f559ddaccba3ceab2568fcb8"}, ] [package.dependencies] -botocore = "1.34.106" +botocore = "1.34.119" colorama = ">=0.2.5,<0.4.7" docutils = ">=0.10,<0.17" PyYAML = ">=3.10,<6.1" @@ -403,17 +403,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.106" +version = "1.34.116" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.106-py3-none-any.whl", hash = "sha256:d3be4e1dd5d546a001cd4da805816934cbde9d395316546e9411fec341ade5cf"}, - {file = "boto3-1.34.106.tar.gz", hash = "sha256:6165b8cf1c7e625628ab28b32f9027064c8f5e5fca1c38d7fc228cd22069a19f"}, + {file = "boto3-1.34.116-py3-none-any.whl", hash = "sha256:e7f5ab2d1f1b90971a2b9369760c2c6bae49dae98c084a5c3f5c78e3968ace15"}, + {file = "boto3-1.34.116.tar.gz", hash = "sha256:53cb8aeb405afa1cd2b25421e27a951aeb568026675dec020587861fac96ac87"}, ] [package.dependencies] -botocore = ">=1.34.106,<1.35.0" +botocore = ">=1.34.116,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -422,13 +422,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.106" +version = "1.34.119" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.106-py3-none-any.whl", hash = "sha256:4baf0e27c2dfc4f4d0dee7c217c716e0782f9b30e8e1fff983fce237d88f73ae"}, - {file = "botocore-1.34.106.tar.gz", hash = "sha256:921fa5202f88c3e58fdcb4b3acffd56d65b24bca47092ee4b27aa988556c0be6"}, + {file = "botocore-1.34.119-py3-none-any.whl", hash = "sha256:4bdf7926a1290b2650d62899ceba65073dd2693e61c35f5cdeb3a286a0aaa27b"}, + {file = "botocore-1.34.119.tar.gz", hash = "sha256:b253f15b24b87b070e176af48e8ef146516090429d30a7d8b136a4c079b28008"}, ] [package.dependencies] @@ -553,13 +553,13 @@ zstd = ["zstandard (==0.22.0)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -864,63 +864,63 @@ files = [ [[package]] name = "coverage" -version = "7.5.1" +version = "7.5.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"}, - {file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"}, - {file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"}, - {file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"}, - {file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"}, - {file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"}, - {file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"}, - {file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"}, - {file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"}, - {file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"}, - {file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"}, - {file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"}, - {file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"}, - {file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"}, - {file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"}, - {file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"}, - {file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"}, - {file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"}, - {file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"}, - {file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"}, - {file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"}, - {file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, ] [package.extras] @@ -993,13 +993,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "cyclonedx-python-lib" -version = "7.3.4" +version = "7.4.0" description = "Python library for CycloneDX" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "cyclonedx_python_lib-7.3.4-py3-none-any.whl", hash = "sha256:8b6dc39f2281feb7fbf9b174fa5d8d3f8f7b51fd6f1d83e9b4c9bbd60ec2ab91"}, - {file = "cyclonedx_python_lib-7.3.4.tar.gz", hash = "sha256:f374855bd6b736b3a6be4eec93b5ca7f160c8282fe4ba5486518da11dbe83f1b"}, + {file = "cyclonedx_python_lib-7.4.0-py3-none-any.whl", hash = "sha256:fc423e7f46d772e5ded29a48cb0743233e692e5853c49b829efc0f59014efde1"}, + {file = "cyclonedx_python_lib-7.4.0.tar.gz", hash = "sha256:09b10736a7f440262578fa40f470b448de1ebf3c7a71e2ff0a4af0781d3a3b42"}, ] [package.dependencies] @@ -1041,6 +1041,25 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "detect-secrets" +version = "1.5.0" +description = "Tool for detecting secrets in the codebase" +optional = false +python-versions = "*" +files = [ + {file = "detect_secrets-1.5.0-py3-none-any.whl", hash = "sha256:e24e7b9b5a35048c313e983f76c4bd09dad89f045ff059e354f9943bf45aa060"}, + {file = "detect_secrets-1.5.0.tar.gz", hash = "sha256:6bb46dcc553c10df51475641bb30fd69d25645cc12339e46c824c1e0c388898a"}, +] + +[package.dependencies] +pyyaml = "*" +requests = "*" + +[package.extras] +gibberish = ["gibberish-detector"] +word-list = ["pyahocorasick"] + [[package]] name = "distlib" version = "0.3.8" @@ -1242,13 +1261,13 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"] [[package]] name = "faker" -version = "25.2.0" +version = "25.5.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-25.2.0-py3-none-any.whl", hash = "sha256:cfe97c4857c4c36ee32ea4aaabef884895992e209bae4cbd26807cf3e05c6918"}, - {file = "Faker-25.2.0.tar.gz", hash = "sha256:45b84f47ff1ef86e3d1a8d11583ca871ecf6730fad0660edadc02576583a2423"}, + {file = "Faker-25.5.0-py3-none-any.whl", hash = "sha256:edb85040a47ef1b30ccd8c4b6f07ee3cb4bd64aab1483be4efe75816ee2e2e36"}, + {file = "Faker-25.5.0.tar.gz", hash = "sha256:84d454fc9fef0b73428e00bdf45a36c04568c75f22727e990071580840cfbb84"}, ] [package.dependencies] @@ -2079,9 +2098,13 @@ files = [ {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, @@ -2154,13 +2177,13 @@ source = ["Cython (>=3.0.10)"] [[package]] name = "mako" -version = "1.3.3" +version = "1.3.5" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.3-py3-none-any.whl", hash = "sha256:5324b88089a8978bf76d1629774fcc2f1c07b82acdf00f4c5dd8ceadfffc4b40"}, - {file = "Mako-1.3.3.tar.gz", hash = "sha256:e16c01d9ab9c11f7290eef1cfefc093fb5a45ee4a3da09e2fec2e4d1bae54e73"}, + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, ] [package.dependencies] @@ -2366,13 +2389,13 @@ files = [ [[package]] name = "moto" -version = "5.0.7" +version = "5.0.9" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "moto-5.0.7-py2.py3-none-any.whl", hash = "sha256:c0214c1361fb1dc85f587d9ce17cd988c6f69ff0ed54d43789654022e0e744f2"}, - {file = "moto-5.0.7.tar.gz", hash = "sha256:f2cde691dc4bc675e318a65f018902ac7f89d61bf2646052f7df215d212f069e"}, + {file = "moto-5.0.9-py2.py3-none-any.whl", hash = "sha256:21a13e02f83d6a18cfcd99949c96abb2e889f4bd51c4c6a3ecc8b78765cb854e"}, + {file = "moto-5.0.9.tar.gz", hash = "sha256:eb71f1cba01c70fff1f16086acb24d6d9aeb32830d646d8989f98a29aeae24ba"}, ] [package.dependencies] @@ -2585,40 +2608,40 @@ files = [ [[package]] name = "newrelic" -version = "9.9.1" +version = "9.10.0" description = "New Relic Python Agent" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "newrelic-9.9.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:474499f482da7f58b5039f2c42dea2880d878b30729ae563bb1498a0bb30be44"}, - {file = "newrelic-9.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3c99cc368a3cfd9ce40ca4bbe2fe3bdd5f7d37865ea5e4bf811ba6fd0d00152d"}, - {file = "newrelic-9.9.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3ef567a779b068297c040f7410153135fb12e51e4a82084675b0cf142c407551"}, - {file = "newrelic-9.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:303117d3402659afac45174dfe7c595b7d4b3c0812a76b712c251c91ef95c430"}, - {file = "newrelic-9.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c813e9c7bdb1381cb0eda4925e07aa8ee21e111b5025d02261605eaabb129f1"}, - {file = "newrelic-9.9.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5d688917307d083d7fa6f3b31eec40c5a3782b160383230f5f644e2d4ae2a26"}, - {file = "newrelic-9.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5710910ceb847f8806540e6934764fff6823d7dcc6d30955e9ecb012e20efbfd"}, - {file = "newrelic-9.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aefa66f59d62ec22a6d347afa73c24bd723521c4cc0fdce7f51c71bfe85c42bc"}, - {file = "newrelic-9.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afdb30c4f89d0f089ac05ca50a383f94cfcdb07aab0b9722d2d5af09626ab304"}, - {file = "newrelic-9.9.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6361af2a60ab60a5757b13ce0b9b4efeee577a228637b9b8b449d47ec81fdd"}, - {file = "newrelic-9.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7aa1be0d0530d0c566dee2c4d43765aba9fc5fae256fac110ba57aae6ae8d8c4"}, - {file = "newrelic-9.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad34b8eb60f33b0eab9ed7727cdb9452ad7d4381a2c5397e6ed3d4895833fd1"}, - {file = "newrelic-9.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e613f1ffd0d35b1f866382eeee52d8aa9576d82f3de818a84aa2e56c08f1868"}, - {file = "newrelic-9.9.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3264e305ae0e973f3a02f7394460f4c7366822e8a3509cd08b2093f9cb5def5"}, - {file = "newrelic-9.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2b165328c05fd2c006cf1f476bebb281579944418a13903e802344660b13332c"}, - {file = "newrelic-9.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e3226ac2c0c57955a00a11f6cf982dd6747490254ed322d6fcf36077bfc37386"}, - {file = "newrelic-9.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673ed069516fa4d168cd12b7319bcadf75fbc9f0ebcd147916e281b2bc16c551"}, - {file = "newrelic-9.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40820a3dff89cc8e242f0543fabd1692333458f627ebad6f2e56f6c9db7d2efe"}, - {file = "newrelic-9.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ddb2d4a2fc3f88c5d1c0b4dec2f8eb89907541501f2ec7ac14e5506ea702e0f5"}, - {file = "newrelic-9.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d50fa347584967c15e574a2503fdcafcd13c86c17e589021eae5432d4aad1cca"}, - {file = "newrelic-9.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fbca7a8749eadb05eacdfb68af938dc1045c6be8bcc83375d15a840172b5f40e"}, - {file = "newrelic-9.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d6feba8968662c7a84ee6fe837d3be8c53a7126398ded3283634bb51dc43e94"}, - {file = "newrelic-9.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:eec85620708aea387b602db61fb43504efc5b5fcb7b627d2cbe0a33c3fe10ab9"}, - {file = "newrelic-9.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:21e280c027835062f54be2df48f32834dcc98f382b049c14ee35b80aa7b48ea0"}, - {file = "newrelic-9.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fb0e56324df855c3079d7d86fd6b35e79727759de8c8517be9c06d482092c3b"}, - {file = "newrelic-9.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c43a14c48dd8f752da348c3ec80cb500b9ead12abcd40d29d39a0bb8a62a3a0d"}, - {file = "newrelic-9.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:763faab4868b0226906c17ef0419dab527964f489cb2e3818d57d0484762cb2e"}, - {file = "newrelic-9.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f41343548aad28b7722c85d00079b4e61ef48d5a6bdf757c458a5fe860bb099"}, - {file = "newrelic-9.9.1.tar.gz", hash = "sha256:e49c734058c7b6a6c199e8c2657187143061a6eda92cc8ba67739de88a9e203d"}, + {file = "newrelic-9.10.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:a4d4e5670082225ca7ef0ee986ef8e6588f4e530a05d43d66f9368459c0b1f18"}, + {file = "newrelic-9.10.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f4605bc4feb114235e242dfe260b75ec85d0894f5400aa7f30e75fbbc0423b3f"}, + {file = "newrelic-9.10.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d3be6c97d007ceb142f908f5ab2444807b44dc600a0b7f3254dc685b5b03fd10"}, + {file = "newrelic-9.10.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4e573d49c1543a488d6567906a9b2cb0c748cdbf80724c322b06874f8e47c789"}, + {file = "newrelic-9.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae0515f7ab19f1a5dd14e31506420d1b86014c5e1340c2a210833248bc765dae"}, + {file = "newrelic-9.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acf5cdcafd2971933ad2f9e836284957f4a3eababe88f063cf53b1b1f67f1a16"}, + {file = "newrelic-9.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5d18236bf4a80fca4eb1db03448ed72bf8e16b84b3a4ed5fcc29bb91c2d05d54"}, + {file = "newrelic-9.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:744c815f15ec06e441c11a6c57042d2eca8c41401c11de6f47b3e105d952b9bd"}, + {file = "newrelic-9.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:524ed5bfa09d330746b45e0087765da994ca34802cce032063041e404e58414c"}, + {file = "newrelic-9.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad9cd5459b8c620ab7a876bd5d920c3ef2943948d1262a42289d4f8d16dadab"}, + {file = "newrelic-9.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4404c649b5e6165dcdd59091092c19b292a43cc96520d5ffd718b628fb866096"}, + {file = "newrelic-9.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2576bbec0b640d9b76454dcfd5b2f03078e0bb062a7ea3952a8db7b9972c352"}, + {file = "newrelic-9.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77537a020ce84033f39210e46cc43bb3927cec3fb4b34b5c4df802e96fddaedf"}, + {file = "newrelic-9.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2236f70b8c6aa79635f2175e7315d032f3a80dfd65ad9c9ed12a921f5df4c655"}, + {file = "newrelic-9.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b8201a33caf7632b2e55e3f9687584ad6956aaf5751485cdb2bad7c428a9b400"}, + {file = "newrelic-9.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6ed4bc2c9a44dfe59958eeecf1f327f0a0fb6324b5e609515bc511944d12db74"}, + {file = "newrelic-9.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cc3ddb26c0615ba4e18f87453bca57f0688a43d2fcdd50e2771a77515cfc3ba"}, + {file = "newrelic-9.10.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09912303e04bee6aa1fe1c671e87b4e8e55461081a96210895828798f5ba8c3f"}, + {file = "newrelic-9.10.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40368dca0d423efe40b210686d7018787d4365a24ee1deca136b3b7c9d850325"}, + {file = "newrelic-9.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56f4c309a07a2c66243b12d18056c32aa704735469741495642c31be4a1c77fa"}, + {file = "newrelic-9.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d68fc707d896dc7da8d6939bcc1f995bf9e463c2b911fc63250a10e1502a234"}, + {file = "newrelic-9.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cd462804a6ede617fb3b4b126e9083b3ee8b4ed1250f7cc12299ebacb785432"}, + {file = "newrelic-9.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ceef4fef2a5cffb69e9e1742bd18a35625ca62c3856c7016c22be68ec876753d"}, + {file = "newrelic-9.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1f11d9c17b50982fcc39de71f6592a61920ec5e5c29b9105edc9f8fb7f2480b9"}, + {file = "newrelic-9.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf6757d422954e61082715dbba4208cae17bf3720006bc337c3f87f19ede2876"}, + {file = "newrelic-9.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae84bacfdc60792bd04e681027cc5c58e6737a04c652e9be2eda84abe21f57f5"}, + {file = "newrelic-9.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:667722cf1f4ed9f6cd99f4fbe247fc2bdb941935528e14a93659ba2c651dc889"}, + {file = "newrelic-9.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0c18210648889416da3de61aa282248e012cb507ba9841511407f922fff9a52"}, + {file = "newrelic-9.10.0.tar.gz", hash = "sha256:02db25b0fd2fc835efe4a7f1c92dbc5bbb95125341aba07152041aa6a5666cda"}, ] [package.extras] @@ -2626,26 +2649,23 @@ infinite-tracing = ["grpcio", "protobuf"] [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.0" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, + {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "notifications-python-client" -version = "9.0.0" +version = "9.1.0" description = "Python API client for GOV.UK Notify." optional = false python-versions = ">=3.7" files = [ - {file = "notifications_python_client-9.0.0-py3-none-any.whl", hash = "sha256:664a5b5da2aa1a00efa8106bfa4855db04da95d79586e5edfb0411637d20d2d9"}, + {file = "notifications_python_client-9.1.0-py3-none-any.whl", hash = "sha256:43b8f738dfa81ae3aa656d91e361dbc51316194f8b9a430706b03347fa7a01bc"}, ] [package.dependencies] @@ -2806,13 +2826,13 @@ ptyprocess = ">=0.5" [[package]] name = "phonenumbers" -version = "8.13.36" +version = "8.13.37" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.36-py2.py3-none-any.whl", hash = "sha256:68e06d20ae2f8fe5c7c7fd5b433f4257bc3cc747dc5196a029c7898ea449b012"}, - {file = "phonenumbers-8.13.36.tar.gz", hash = "sha256:b4e2371e35a1172aa2c91c9200b1e48e87b9355eb575768dd38058fc8d72c9ff"}, + {file = "phonenumbers-8.13.37-py2.py3-none-any.whl", hash = "sha256:4ea00ef5012422c08c7955c21131e7ae5baa9a3ef52cf2d561e963f023006b80"}, + {file = "phonenumbers-8.13.37.tar.gz", hash = "sha256:bd315fed159aea0516f7c367231810fe8344d5bec26156b88fa18374c11d1cf2"}, ] [[package]] @@ -2903,13 +2923,13 @@ testing = ["pytest", "pytest-cov", "wheel"] [[package]] name = "platformdirs" -version = "4.2.1" +version = "4.2.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, - {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] @@ -2995,18 +3015,18 @@ python-dotenv = ">=0.10.0" [[package]] name = "poetry-plugin-export" -version = "1.7.1" +version = "1.8.0" description = "Poetry plugin to export the dependencies to various formats" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "poetry_plugin_export-1.7.1-py3-none-any.whl", hash = "sha256:b2258e53ae0d369a73806f957ed0e726eb95c571a0ce8b1f273da686528cc1da"}, - {file = "poetry_plugin_export-1.7.1.tar.gz", hash = "sha256:cf62cfb6218a904290ba6db3bc1a24aa076d10f81c48c6e48b2ded430131e22e"}, + {file = "poetry_plugin_export-1.8.0-py3-none-any.whl", hash = "sha256:adbe232cfa0cc04991ea3680c865cf748bff27593b9abcb1f35fb50ed7ba2c22"}, + {file = "poetry_plugin_export-1.8.0.tar.gz", hash = "sha256:1fa6168a85d59395d835ca564bc19862a7c76061e60c3e7dfaec70d50937fc61"}, ] [package.dependencies] -poetry = ">=1.8.0,<2.0.0" -poetry-core = ">=1.7.0,<2.0.0" +poetry = ">=1.8.0,<3.0.0" +poetry-core = ">=1.7.0,<3.0.0" [[package]] name = "polling2" @@ -3039,13 +3059,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.45" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.45-py3-none-any.whl", hash = "sha256:a29b89160e494e3ea8622b09fa5897610b437884dcdcd054fdc1308883326c2a"}, + {file = "prompt_toolkit-3.0.45.tar.gz", hash = "sha256:07c60ee4ab7b7e90824b61afa840c8f5aad2d46b3e2e10acc33d8ecc94a49089"}, ] [package.dependencies] @@ -3053,22 +3073,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "5.26.1" +version = "5.27.0" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.26.1-cp310-abi3-win32.whl", hash = "sha256:3c388ea6ddfe735f8cf69e3f7dc7611e73107b60bdfcf5d0f024c3ccd3794e23"}, - {file = "protobuf-5.26.1-cp310-abi3-win_amd64.whl", hash = "sha256:e6039957449cb918f331d32ffafa8eb9255769c96aa0560d9a5bf0b4e00a2a33"}, - {file = "protobuf-5.26.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:38aa5f535721d5bb99861166c445c4105c4e285c765fbb2ac10f116e32dcd46d"}, - {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fbfe61e7ee8c1860855696e3ac6cfd1b01af5498facc6834fcc345c9684fb2ca"}, - {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f7417703f841167e5a27d48be13389d52ad705ec09eade63dfc3180a959215d7"}, - {file = "protobuf-5.26.1-cp38-cp38-win32.whl", hash = "sha256:d693d2504ca96750d92d9de8a103102dd648fda04540495535f0fec7577ed8fc"}, - {file = "protobuf-5.26.1-cp38-cp38-win_amd64.whl", hash = "sha256:9b557c317ebe6836835ec4ef74ec3e994ad0894ea424314ad3552bc6e8835b4e"}, - {file = "protobuf-5.26.1-cp39-cp39-win32.whl", hash = "sha256:b9ba3ca83c2e31219ffbeb9d76b63aad35a3eb1544170c55336993d7a18ae72c"}, - {file = "protobuf-5.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ee014c2c87582e101d6b54260af03b6596728505c79f17c8586e7523aaa8f8c"}, - {file = "protobuf-5.26.1-py3-none-any.whl", hash = "sha256:da612f2720c0183417194eeaa2523215c4fcc1a1949772dc65f05047e08d5932"}, - {file = "protobuf-5.26.1.tar.gz", hash = "sha256:8ca2a1d97c290ec7b16e4e5dff2e5ae150cc1582f55b5ab300d45cb0dfa90e51"}, + {file = "protobuf-5.27.0-cp310-abi3-win32.whl", hash = "sha256:2f83bf341d925650d550b8932b71763321d782529ac0eaf278f5242f513cc04e"}, + {file = "protobuf-5.27.0-cp310-abi3-win_amd64.whl", hash = "sha256:b276e3f477ea1eebff3c2e1515136cfcff5ac14519c45f9b4aa2f6a87ea627c4"}, + {file = "protobuf-5.27.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:744489f77c29174328d32f8921566fb0f7080a2f064c5137b9d6f4b790f9e0c1"}, + {file = "protobuf-5.27.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:f51f33d305e18646f03acfdb343aac15b8115235af98bc9f844bf9446573827b"}, + {file = "protobuf-5.27.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:56937f97ae0dcf4e220ff2abb1456c51a334144c9960b23597f044ce99c29c89"}, + {file = "protobuf-5.27.0-cp38-cp38-win32.whl", hash = "sha256:a17f4d664ea868102feaa30a674542255f9f4bf835d943d588440d1f49a3ed15"}, + {file = "protobuf-5.27.0-cp38-cp38-win_amd64.whl", hash = "sha256:aabbbcf794fbb4c692ff14ce06780a66d04758435717107c387f12fb477bf0d8"}, + {file = "protobuf-5.27.0-cp39-cp39-win32.whl", hash = "sha256:587be23f1212da7a14a6c65fd61995f8ef35779d4aea9e36aad81f5f3b80aec5"}, + {file = "protobuf-5.27.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cb65fc8fba680b27cf7a07678084c6e68ee13cab7cace734954c25a43da6d0f"}, + {file = "protobuf-5.27.0-py3-none-any.whl", hash = "sha256:673ad60f1536b394b4fa0bcd3146a4130fcad85bfe3b60eaa86d6a0ace0fa374"}, + {file = "protobuf-5.27.0.tar.gz", hash = "sha256:07f2b9a15255e3cf3f137d884af7972407b556a7a220912b252f26dc3121e6bf"}, ] [[package]] @@ -3279,13 +3299,13 @@ files = [ [[package]] name = "pytest" -version = "8.2.0" +version = "8.2.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, - {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, + {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, + {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, ] [package.dependencies] @@ -3455,6 +3475,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3509,101 +3530,104 @@ toml = ["tomli (>=2.0.1)"] [[package]] name = "rapidfuzz" -version = "3.9.0" +version = "3.9.2" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.9.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd375c4830fee11d502dd93ecadef63c137ae88e1aaa29cc15031fa66d1e0abb"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:55e2c5076f38fc1dbaacb95fa026a3e409eee6ea5ac4016d44fb30e4cad42b20"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:488f74126904db6b1bea545c2f3567ea882099f4c13f46012fe8f4b990c683df"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3f2d1ea7cd57dfcd34821e38b4924c80a31bcf8067201b1ab07386996a9faee"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b11e602987bcb4ea22b44178851f27406fca59b0836298d0beb009b504dba266"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3083512e9bf6ed2bb3d25883922974f55e21ae7f8e9f4e298634691ae1aee583"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b33c6d4b3a1190bc0b6c158c3981535f9434e8ed9ffa40cf5586d66c1819fb4b"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcb95fde22f98e6d0480db8d6038c45fe2d18a338690e6f9bba9b82323f3469"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:08d8b49b3a4fb8572e480e73fcddc750da9cbb8696752ee12cca4bf8c8220d52"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e721842e6b601ebbeb8cc5e12c75bbdd1d9e9561ea932f2f844c418c31256e82"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7988363b3a415c5194ce1a68d380629247f8713e669ad81db7548eb156c4f365"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2d267d4c982ab7d177e994ab1f31b98ff3814f6791b90d35dda38307b9e7c989"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bb28ab5300cf974c7eb68ea21125c493e74b35b1129e629533468b2064ae0a2"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-win32.whl", hash = "sha256:1b1f74997b6d94d66375479fa55f70b1c18e4d865d7afcd13f0785bfd40a9d3c"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:c56d2efdfaa1c642029f3a7a5bb76085c5531f7a530777be98232d2ce142553c"}, - {file = "rapidfuzz-3.9.0-cp310-cp310-win_arm64.whl", hash = "sha256:6a83128d505cac76ea560bb9afcb3f6986e14e50a6f467db9a31faef4bd9b347"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e2218d62ab63f3c5ad48eced898854d0c2c327a48f0fb02e2288d7e5332a22c8"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36bf35df2d6c7d5820da20a6720aee34f67c15cd2daf8cf92e8141995c640c25"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:905b01a9b633394ff6bb5ebb1c5fd660e0e180c03fcf9d90199cc6ed74b87cf7"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33cfabcb7fd994938a6a08e641613ce5fe46757832edc789c6a5602e7933d6fa"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1179dcd3d150a67b8a678cd9c84f3baff7413ff13c9e8fe85e52a16c97e24c9b"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47d97e28c42f1efb7781993b67c749223f198f6653ef177a0c8f2b1c516efcaf"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28da953eb2ef9ad527e536022da7afff6ace7126cdd6f3e21ac20f8762e76d2c"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:182b4e11de928fb4834e8f8b5ecd971b5b10a86fabe8636ab65d3a9b7e0e9ca7"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c74f2da334ce597f31670db574766ddeaee5d9430c2c00e28d0fbb7f76172036"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:014ac55b03f4074f903248ded181f3000f4cdbd134e6155cbf643f0eceb4f70f"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c4ef34b2ddbf448f1d644b4ec6475df8bbe5b9d0fee173ff2e87322a151663bd"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fc02157f521af15143fae88f92ef3ddcc4e0cff05c40153a9549dc0fbdb9adb3"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ff08081c49b18ba253a99e6a47f492e6ee8019e19bbb6ddc3ed360cd3ecb2f62"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-win32.whl", hash = "sha256:b9bf90b3d96925cbf8ef44e5ee3cf39ef0c422f12d40f7a497e91febec546650"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5d5684f54d82d9b0cf0b2701e55a630527a9c3dd5ddcf7a2e726a475ac238f2"}, - {file = "rapidfuzz-3.9.0-cp311-cp311-win_arm64.whl", hash = "sha256:a2de844e0e971d7bd8aa41284627dbeacc90e750b90acfb016836553c7a63192"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f81fe99a69ac8ee3fd905e70c62f3af033901aeb60b69317d1d43d547b46e510"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:633b9d03fc04abc585c197104b1d0af04b1f1db1abc99f674d871224cd15557a"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab872cb57ae97c54ba7c71a9e3c9552beb57cb907c789b726895576d1ea9af6f"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdd8c15c3a14e409507fdf0c0434ec481d85c6cbeec8bdcd342a8cd1eda03825"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2444d8155d9846f206e2079bb355b85f365d9457480b0d71677a112d0a7f7128"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83bd3d01f04061c3660742dc85143a89d49fd23eb31eccbf60ad56c4b955617"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ca799f882364e69d0872619afb19efa3652b7133c18352e4a3d86a324fb2bb1"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6993d361f28b9ef5f0fa4e79b8541c2f3507be7471b9f9cb403a255e123b31e1"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:170822a1b1719f02b58e3dce194c8ad7d4c5b39be38c0fdec603bd19c6f9cf81"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e86e39c1c1a0816ceda836e6f7bd3743b930cbc51a43a81bb433b552f203f25"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:731269812ea837e0b93d913648e404736407408e33a00b75741e8f27c590caa2"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8e5ff882d3a3d081157ceba7e0ebc7fac775f95b08cbb143accd4cece6043819"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2003071aa633477a01509890c895f9ef56cf3f2eaa72c7ec0b567f743c1abcba"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-win32.whl", hash = "sha256:13857f9070600ea1f940749f123b02d0b027afbaa45e72186df0f278915761d0"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:134b7098ac109834eeea81424b6822f33c4c52bf80b81508295611e7a21be12a"}, - {file = "rapidfuzz-3.9.0-cp312-cp312-win_arm64.whl", hash = "sha256:2a96209f046fe328be30fc43f06e3d4b91f0d5b74e9dcd627dbfd65890fa4a5e"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:544b0bf9d17170720809918e9ccd0d482d4a3a6eca35630d8e1459f737f71755"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d536f8beb8dd82d6efb20fe9f82c2cfab9ffa0384b5d184327e393a4edde91d"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:30f7609da871510583f87484a10820b26555a473a90ab356cdda2f3b4456256c"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f4a2468432a1db491af6f547fad8f6d55fa03e57265c2f20e5eaceb68c7907e"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a7ec4676242c8a430509cff42ce98bca2fbe30188a63d0f60fdcbfd7e84970"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dcb523243e988c849cf81220164ec3bbed378a699e595a8914fffe80596dc49f"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4eea3bf72c4fe68e957526ffd6bcbb403a21baa6b3344aaae2d3252313df6199"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4514980a5d204c076dd5b756960f6b1b7598f030009456e6109d76c4c331d03c"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9a06a99f1335fe43464d7121bc6540de7cd9c9475ac2025babb373fe7f27846b"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6c1ed63345d1581c39d4446b1a8c8f550709656ce2a3c88c47850b258167f3c2"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cd2e6e97daf17ebb3254285cf8dd86c60d56d6cf35c67f0f9a557ef26bd66290"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9bc0f7e6256a9c668482c41c8a3de5d0aa12e8ca346dcc427b97c7edb82cba48"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c09f4e87e82a164c9db769474bc61f8c8b677f2aeb0234b8abac73d2ecf9799"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-win32.whl", hash = "sha256:e65b8f7921bf60cbb207c132842a6b45eefef48c4c3b510eb16087d6c08c70af"}, - {file = "rapidfuzz-3.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9d6478957fb35c7844ad08f2442b62ba76c1857a56370781a707eefa4f4981e1"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:65d9250a4b0bf86320097306084bc3ca479c8f5491927c170d018787793ebe95"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47b7c0840afa724db3b1a070bc6ed5beab73b4e659b1d395023617fc51bf68a2"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a16c48c6df8fb633efbbdea744361025d01d79bca988f884a620e63e782fe5b"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48105991ff6e4a51c7f754df500baa070270ed3d41784ee0d097549bc9fcb16d"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a7f273906b3c7cc6d63a76e088200805947aa0bc1ada42c6a0e582e19c390d7"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c396562d304e974b4b0d5cd3afc4f92c113ea46a36e6bc62e45333d6aa8837e"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68da1b70458fea5290ec9a169fcffe0c17ff7e5bb3c3257e63d7021a50601a8e"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c5b8f9a7b177af6ce7c6ad5b95588b4b73e37917711aafa33b2e79ee80fe709"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3c42a238bf9dd48f4ccec4c6934ac718225b00bb3a438a008c219e7ccb3894c7"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a365886c42177b2beab475a50ba311b59b04f233ceaebc4c341f6f91a86a78e2"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ce897b5dafb7fb7587a95fe4d449c1ea0b6d9ac4462fbafefdbbeef6eee4cf6a"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:413ac49bae291d7e226a5c9be65c71b2630b3346bce39268d02cb3290232e4b7"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8982fc3bd49d55a91569fc8a3feba0de4cef0b391ff9091be546e9df075b81"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-win32.whl", hash = "sha256:3904d0084ab51f82e9f353031554965524f535522a48ec75c30b223eb5a0a488"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:3733aede16ea112728ffeafeb29ccc62e095ed8ec816822fa2a82e92e2c08696"}, - {file = "rapidfuzz-3.9.0-cp39-cp39-win_arm64.whl", hash = "sha256:fc4e26f592b51f97acf0a3f8dfed95e4d830c6a8fbf359361035df836381ab81"}, - {file = "rapidfuzz-3.9.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e33362e98c7899b5f60dcb06ada00acd8673ce0d59aefe9a542701251fd00423"}, - {file = "rapidfuzz-3.9.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb67cf43ad83cb886cbbbff4df7dcaad7aedf94d64fca31aea0da7d26684283c"}, - {file = "rapidfuzz-3.9.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2e106cc66453bb80d2ad9c0044f8287415676df5c8036d737d05d4b9cdbf8e"}, - {file = "rapidfuzz-3.9.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1256915f7e7a5cf2c151c9ac44834b37f9bd1c97e8dec6f936884f01b9dfc7d"}, - {file = "rapidfuzz-3.9.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ae643220584518cbff8bf2974a0494d3e250763af816b73326a512c86ae782ce"}, - {file = "rapidfuzz-3.9.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:491274080742110427f38a6085bb12dffcaff1eef12dccf9e8758398c7e3957e"}, - {file = "rapidfuzz-3.9.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bc5559b9b94326922c096b30ae2d8e5b40b2e9c2c100c2cc396ad91bcb84d30"}, - {file = "rapidfuzz-3.9.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:849160dc0f128acb343af514ca827278005c1d00148d025e4035e034fc2d8c7f"}, - {file = "rapidfuzz-3.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:623883fb78e692d54ed7c43b09beec52c6685f10a45a7518128e25746667403b"}, - {file = "rapidfuzz-3.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d20ab9abc7e19767f1951772a6ab14cb4eddd886493c2da5ee12014596ad253f"}, - {file = "rapidfuzz-3.9.0.tar.gz", hash = "sha256:b182f0fb61f6ac435e416eb7ab330d62efdbf9b63cf0c7fa12d1f57c2eaaf6f3"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:45e0c3e279e70589381f47ad410de7211bac943e827eb09eb8339d2124abca90"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:280ef2f3066df9c486ffd3874d2489978fb8021044c47c006eb96be8d47917d7"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe128ac0e05ca3a71d8ff18e70884a64fde00b6fbd2b4d9f59f7a4d798257c55"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fbc0f6e1b6f4063b937d0edcf0a56cbc1d7179ade9b7d6c849c94e44a7b20f6"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:df19455c2fb85e86a721111b84ac8dd3685194f0edc9faefb226731ad3e134a7"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:801a5d97c465a3467b3cdf50cdcdadec129ddca582b24430f5d24c715c80be9b"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81f218524596d261a6cb33cda965687e62dd30def478d39f0befa243642c3985"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5c61d53f293b4e3286919b0e081513367afabcb5aef0b6f899d006117778e558"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0ed70fc6627ae37319f822e5d8d21d561044e0b3331b6f0e6904476faa8d8ed7"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:96fa229d06ee005d2f46374fb2af65590a590a6fa2fd56e66474829f5fa9adfe"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6609e881b57cabb40d515cc226bbf570e32e768bd2cc688ba026a45ffbc60875"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:204fd4d293ef4d409c4142ddf830b7613924b998670f67e512ab1f880a60218a"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-win32.whl", hash = "sha256:5b331a09446bc8f8971cf488c9e6c0f7dbf2739828588e063cf08fd400638a24"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:01a9975984953fe549649e6a4c3f0d9c60707acf458184ec09678d6a57560112"}, + {file = "rapidfuzz-3.9.2-cp310-cp310-win_arm64.whl", hash = "sha256:ca4af5d7fc9c17bdc498aa1cab9ecf5140c8535c9cedeba1990bbe4b8be75098"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:300ab53981a5d6831fe7e0f30c407c79520ad0f0ab51b2cece8717689026f495"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4828642acdb075154ce2ff3260f8afb6a17b5b0c8a437efbadac06e9995dd7b"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b262883c3ce93dee1a9a974992961c8098e96b8142e2e01cabdb15ea8105c4a"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf8582d85e35641734d6c1f43eb37c1f2a5eda338d3cfa8e651e078246b9ec58"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e33b61ef87e1876d216c479fa2256233b3bb0424465ab2db1d94ab7b8649ae1c"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fa1b3eb21756003a6a3977847dd4e0e9a26e2e02731d9daa5e92a9258e7f0db"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923ae0301a56356364f1159e3005fbeb2191e7a0e8705d5cc1b481d9eea27b97"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8e4041cfd87f0a022aa8a9a187d3b0824e35be2bd9b3bceada11578ddd9ad65"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1f832b430f976727bdbba009ee64acda25412602976fbfb2113d41e765d81849"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6ce5e57e0c6acf5a98ffbdfaf8bccb6e41fbddb9eda3e041f4cc69b7cade5fa0"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d65f34e71102d9cbe733d4ba1c645e7623eef850562501bab1ac79d217831436"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5dd9ba4df0db46b9f909289e4687cc7721c622985c4cd169969005dd30fc1e24"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-win32.whl", hash = "sha256:34c8bca3fef33d7e71f290de68be2184fac7a9e136fa0ed22b17ec597e181406"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:91e1a8872c0b8aef95c33db86d25e8bdea6f557b9cdf683123c25035b2bcfb8e"}, + {file = "rapidfuzz-3.9.2-cp311-cp311-win_arm64.whl", hash = "sha256:ed02d73e46b7a4604d2bc1e0364b25f204862d40dd162f6b36ee22b9bf6d9df2"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ae6c4ba2778b097397968130f2b0cb795cdc415c115539a49ce798f606152ad5"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7270556ddebaa98fb777f493f17ed6a733b3527de16c43342bce1db109042845"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4625273447bdd94f2ab06b2951cd8b74356c3a48552208279a3ec2947ceee141"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5107b5ec8821453f7cac70b2d0bc4866699b25bff4819ada8b28bf2b11e87f65"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b04c851d309df8261ed42951444db657936234ceddf4032f4409b0214c95ecbe"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aeefff80f3f5d6841c30ffe0cdc84d62874de5a64cff509ae26fbd7478297af8"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cdc106b5a99edd46443449c767287dbb5d4464a7536475a365e368e7ee4d651"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ce253a2b7a71a01a4abac71ac31fd05f6ac1f1cd2af2d98fa80fe5c402175e54"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5c30407cadbfe99753b7a996f0dd6da490b1e27d318c01db227e8f49770a01ec"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:fb3fc387783f70387a91aababd8a5faeb230931b655ad99bcf838cd72404ba66"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c409852a89535ec8720301a847bab198c1c14d0f34ed07dfabbb90b1dbfc506d"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8603050e547249c1cc8a8dc6a49917076572ea69b04bc51eb1748c403cfc9f46"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-win32.whl", hash = "sha256:77bdb96e82d8831f0dd6db83e2ff0d4a731cff53e926d029c65a1dc3ae0f160a"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:09f354fa28e0fd170c6e4eea5e97eea0dba43761067df93109f49a5414ca8584"}, + {file = "rapidfuzz-3.9.2-cp312-cp312-win_arm64.whl", hash = "sha256:168299c9a2b4f20f10c1bb96d8da0bb05bf1f3b9957be3a0bae5db65ce9f095f"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d87621d60078f87cb52082b1cbf9849afeaa1cb6d0a2b072fce25fe21c8675b4"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c447d0e534418ef3eaabcd890d85c7e9f289c1c6ef6e060a0b1f239799781747"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7161b205f25eff5f88ab809fb05a2a102634e06f452c0deb9535c9f41cd7b0a"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f13a6bbadba8fdd42676c1213ebc692bba9fac00f7db0ae92acc06bb734294c4"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54534743820a15bd0dc30a0a0010825be337973236550fd63587700a7950bbca"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea61851a4c2f93148aa2779458fb3f70a62342d77c9ec3d9d08445c8485b738"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e941f81a60351a842976fea208e6a6701a5899eb8a80b907e57d7c3099337900"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1bbfaf439e48efe3a48cada946cf7678b09c818ce9668e09dac40d05b772f6f8"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:574f464da18d660712e9776072572d462cf6a26144c833d18d9c93778286e023"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8a56c494246d29aacf5ac93ca3cf338d79588a1a5c05d8f496c3f4d7127e9031"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2943b0f17195c000948a7668bb11979ea0e50079a3d3db9d139e51b68c3a7c26"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:27214f93555d4f9b7b1baf107a6ba13e9daee21f1ec6e36418556d04a7ee4d9b"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-win32.whl", hash = "sha256:876c6628fec6241262c27f8fda3c73bab88e205e9b9394c8868361e2eda59048"}, + {file = "rapidfuzz-3.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:cf1952b486589ffcfbde2015ca9be15e0f4b0e63d1e2c25f3daec0263fda4e69"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1ca9a135060ee4d887d6af86493c3e0eb1c99ca205bca943fe5994dc93e648d5"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:723518c9a18e8bda996d77aa9307b6f8b0e77905702b2772b020adf24191073a"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65eb9aeae73ac60e53a9d6c509daaa217ea256a5e184eb8920c9b15295c48677"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef2964f4eb9a37487c96e5e32167a3c4fa51bf8e899853d0ac67e0465a27702c"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c64a252c96f29667c206726903bb9705c5195f01850360c9b9268de92ac878dc"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b32b03398517b5e33c7f36d625a00fcb1c955b9fe3c939325688175fb21730"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec5f7b1bac77439b624f5acbd8bfe61e7b833678701068b43f7a489c151427c0"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5fd1b49fba8b4b9172eed5b131c1e9864d4d76bebea34359274f16a3591e5f44"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c05b033fc3ff043f48e744f67038af7fd34003047c7810f24bec7c01ce7da05b"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c3bea20db89b510d78d017b349b9d87159c32418693ddf091d9035dbe20b4dc0"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:77226a77590f83ee073f4f8cc86a1232da88e24d19d349361faa169fb17ba1cd"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:83ed8bc2c942dc61ab739bbca1ead791143b4639dc92156d3060bd0b6f4541ea"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-win32.whl", hash = "sha256:2db70f64974c10b76ae37d5cff6124dce791def815d4fdf5ac16fe60be88d905"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:bdead23114206dea4a22ed3aad6565b99a9e4b3fff9837c423afc556d2814b1a"}, + {file = "rapidfuzz-3.9.2-cp39-cp39-win_arm64.whl", hash = "sha256:0ec69ad076cfc7c88323d671613e40bb8754ba95a203556d9a7759e60f0544e8"}, + {file = "rapidfuzz-3.9.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:018360654881e75131b227aa96cdaba543c438da881c70a12ca0c86e2c4083b2"}, + {file = "rapidfuzz-3.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:eaa8178ec9238f32f15b6e49f70b852accda0a848448c4e30bce77c6624ebaba"}, + {file = "rapidfuzz-3.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dd79b0f90ce609df96d0d48ef4327cf1f0415b9274588a466d3610a775d2f9"}, + {file = "rapidfuzz-3.9.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04a1c38a72a50f3e6d346a33d53fa51ba390552b3592fca64a07e54d749b439b"}, + {file = "rapidfuzz-3.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77ca96eec40e815f0cf10b00008f295fd26ca43792a844cf62588a8ea614e160"}, + {file = "rapidfuzz-3.9.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c01c515a928f295f49d588b6523f44b474f047f9f2de0079bc57bcd00b870778"}, + {file = "rapidfuzz-3.9.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:07e14ef260b6f4ee03dff07a0ac95a16aff1ddbc7e6171e07e49d2d61526f3be"}, + {file = "rapidfuzz-3.9.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:64f3480bddc12b89969930f12a50a1aeb53e09aad41cf8b27694d83ca1cc7864"}, + {file = "rapidfuzz-3.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3c9e33ec21755bda1878095537cb84848e9cf6510d4837d22144ba04e33df29"}, + {file = "rapidfuzz-3.9.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a70045e84225697ddf67d656aa25b70d6802e2ff339d51f9545fca5b9b13fb8c"}, + {file = "rapidfuzz-3.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9ec1fd328518c33adb9171afe8735137cb7b492e4a81cddc23568f9980c235c"}, + {file = "rapidfuzz-3.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:1fd8458fdac232766d55593c1228c70968f382fdc376c25685273f99b5d1d921"}, + {file = "rapidfuzz-3.9.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a373748fddb5403b562b6d682082de360bb08395f44e3cb7e74819461e39a16c"}, + {file = "rapidfuzz-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:45f80856db3e22cb5f96ad1572aa1d004714514625ed4668144661d8a7c7e61f"}, + {file = "rapidfuzz-3.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:663e52cf878e0ccbbad0744eb3e2bb83a784645b146f15611bac225bc218f19b"}, + {file = "rapidfuzz-3.9.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbe4d3034a8cfe59a2b477375ad7d739b3e5935f10af08abdf64aae55780cad"}, + {file = "rapidfuzz-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd38abfda97e42b30093f207108dcba944beab1edf6624ba757cf57354063177"}, + {file = "rapidfuzz-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:16b41fe360387283a3184ce72d4d26d1928e7ce809268a88e8491a776dd770af"}, + {file = "rapidfuzz-3.9.2.tar.gz", hash = "sha256:c899d78709f8d4bd0059784fa27a9f6c53d04fc4aeaa21de7c0c8e34a7154e88"}, ] [package.extras] @@ -3729,13 +3753,13 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -3997,19 +4021,18 @@ jeepney = ">=0.6" [[package]] name = "setuptools" -version = "69.5.1" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shapely" @@ -4275,13 +4298,13 @@ files = [ [[package]] name = "trove-classifiers" -version = "2024.4.10" +version = "2024.5.22" description = "Canonical source for classifiers on PyPI (pypi.org)." optional = false python-versions = "*" files = [ - {file = "trove-classifiers-2024.4.10.tar.gz", hash = "sha256:49f40bb6a746b72a1cba4f8d55ee8252169cda0f70802e3fd24f04b7fb25a492"}, - {file = "trove_classifiers-2024.4.10-py3-none-any.whl", hash = "sha256:678bd6fcc5218d72e3304e27a608acc9b91e17bd00c3f3d8c968497c843ad98b"}, + {file = "trove_classifiers-2024.5.22-py3-none-any.whl", hash = "sha256:c43ade18704823e4afa3d9db7083294bc4708a5e02afbcefacd0e9d03a7a24ef"}, + {file = "trove_classifiers-2024.5.22.tar.gz", hash = "sha256:8a6242bbb5c9ae88d34cf665e816b287d2212973c8777dfaef5ec18d72ac1d03"}, ] [[package]] @@ -4297,13 +4320,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, + {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, ] [[package]] @@ -4361,13 +4384,13 @@ files = [ [[package]] name = "virtualenv" -version = "20.26.1" +version = "20.26.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.1-py3-none-any.whl", hash = "sha256:7aa9982a728ae5892558bff6a2839c00b9ed145523ece2274fad6f414690ae75"}, - {file = "virtualenv-20.26.1.tar.gz", hash = "sha256:604bfdceaeece392802e6ae48e69cec49168b9c5f4a44e483963f9242eb0e78b"}, + {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, + {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, ] [package.dependencies] @@ -4729,4 +4752,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "f471a058bdf49d0645c24438f5b0ce4b3038000c9185cbfeb5058ccb58c5724b" +content-hash = "37ce0fb252a872b65bfee753a4e794cd5f11a532f3162e760f1396331f6592df" diff --git a/pyproject.toml b/pyproject.toml index 19b2e4a83..4f8911a19 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,8 +11,8 @@ python = "^3.12.2" alembic = "==1.13.1" amqp = "==5.2.0" beautifulsoup4 = "==4.12.3" -boto3 = "^1.34.106" -botocore = "^1.34.106" +boto3 = "^1.34.116" +botocore = "^1.34.119" cachetools = "==5.3.3" celery = {version = "==5.4.0", extras = ["redis"]} certifi = ">=2022.12.7" @@ -39,7 +39,7 @@ lxml = "==5.2.2" marshmallow = "==3.21.2" marshmallow-sqlalchemy = "==1.0.0" newrelic = "*" -notifications-python-client = "==9.0.0" +notifications-python-client = "==9.1.0" oscrypto = "==1.3.0" packaging = "==24.0" poetry-dotenv-plugin = "==0.2.0" @@ -48,7 +48,7 @@ pyjwt = "==2.8.0" python-dotenv = "==1.0.1" sqlalchemy = "==2.0.30" werkzeug = "^3.0.3" -faker = "^25.2.0" +faker = "^25.5.0" async-timeout = "^4.0.3" bleach = "^6.1.0" geojson = "^3.1.0" @@ -62,6 +62,22 @@ regex = "^2024.5.15" shapely = "^2.0.4" smartypants = "^2.0.1" mistune = "0.8.4" +blinker = "^1.8.2" +cryptography = "^42.0.7" +idna = "^3.7" +jmespath = "^1.0.1" +markupsafe = "^2.1.5" +pycparser = "^2.22" +python-dateutil = "^2.9.0.post0" +pyyaml = "^6.0.1" +s3transfer = "^0.10.1" +six = "^1.16.0" +urllib3 = "^2.2.1" +webencodings = "^0.5.1" +itsdangerous = "^2.2.0" +jinja2 = "^3.1.4" +redis = "^5.0.4" +requests = "^2.32.3" [tool.poetry.group.dev.dependencies] @@ -76,19 +92,20 @@ freezegun = "^1.5.1" honcho = "*" isort = "^5.13.2" jinja2-cli = {version = "==0.8.2", extras = ["yaml"]} -moto = "==5.0.7" +moto = "==5.0.9" pip-audit = "*" pre-commit = "^3.7.1" -pytest = "^8.1.1" +pytest = "^8.2.1" pytest-env = "^1.1.3" pytest-mock = "^3.14.0" pytest-cov = "^5.0.0" pytest-xdist = "^3.5.0" radon = "^6.0.1" requests-mock = "^1.11.0" -setuptools = "^69.0.3" +setuptools = "^70.0.0" sqlalchemy-utils = "^0.41.2" vulture = "^2.10" +detect-secrets = "^1.5.0" [build-system] diff --git a/terraform/README.md b/terraform/README.md index 4cb2c7cf7..a828d3708 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -214,3 +214,17 @@ This error indicates that the Cloud Foundry user account (or service account) ne You have exceeded your organization's services limit. ``` Too many Cloud Foundry services have been created without being destroyed. Perhaps Terraform developers have forgotten to delete their SpaceDeployers after they finish with them. List `cf services` to see. + +### Unknown error +``` +Error: Service Instance xx-name-xx failed xx-UUID-xx, reason: [Job (xx-UUID-xx) failed: An unknown error occurred.] +``` +This unhelpful message may be clarified by looking in the Cloud.gov web UI. Among the list of service instances (Cloud Foundry → Organizations → gsa-tts-benefits-studio → Spaces → your-space-name → Service instances) check for pending or erroring items. Refer below if you discover a [domain identity verification](#Domain_identity_verification) error. + +The audit event logs may also provide insight. They are visible in web UI or [in the terminal](https://v3-apidocs.cloudfoundry.org/version/3.159.0/#audit-events). + +### Domain identity verification +``` +Error: Error creating SES domain identity verification: Expected domain verification Success, but was in state Pending +``` +This error comes via the [Supplementary Service Broker](https://github.com/GSA/usnotify-ssb/) and originates from the [SMTP Brokerpak](https://github.com/GSA-TTS/datagov-brokerpak-smtp) it uses. You can run the [broker provisioning locally](https://github.com/GSA-TTS/datagov-brokerpak-smtp/tree/main/terraform/provision) to tinker with the error. diff --git a/terraform/sandbox/main.tf b/terraform/sandbox/main.tf index f194efdd4..cc99b033b 100644 --- a/terraform/sandbox/main.tf +++ b/terraform/sandbox/main.tf @@ -3,35 +3,32 @@ locals { cf_space_name = "notify-sandbox" env = "sandbox" app_name = "notify-api" - recursive_delete = true + recursive_delete = true # deprecated, still used in shared modules } module "database" { - source = "github.com/18f/terraform-cloudgov//database?ref=v0.7.1" + source = "github.com/GSA-TTS/terraform-cloudgov//database?ref=v1.0.0" cf_org_name = local.cf_org_name cf_space_name = local.cf_space_name name = "${local.app_name}-rds-${local.env}" - recursive_delete = local.recursive_delete rds_plan_name = "micro-psql" } module "redis" { - source = "github.com/18f/terraform-cloudgov//redis?ref=v0.9.1" + source = "github.com/GSA-TTS/terraform-cloudgov//redis?ref=v1.0.0" cf_org_name = local.cf_org_name cf_space_name = local.cf_space_name name = "${local.app_name}-redis-${local.env}" - recursive_delete = local.recursive_delete redis_plan_name = "redis-dev" } module "csv_upload_bucket" { - source = "github.com/18f/terraform-cloudgov//s3?ref=v0.9.1" + source = "github.com/GSA-TTS/terraform-cloudgov//s3?ref=v1.0.0" cf_org_name = local.cf_org_name cf_space_name = local.cf_space_name - recursive_delete = local.recursive_delete name = "${local.app_name}-csv-upload-bucket-${local.env}" } diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py index 8b903daa6..c009c369c 100644 --- a/tests/app/aws/test_s3.py +++ b/tests/app/aws/test_s3.py @@ -1,5 +1,4 @@ import os -from datetime import datetime from os import getenv import pytest @@ -13,6 +12,7 @@ from app.aws.s3 import ( remove_csv_object, remove_s3_object, ) +from app.utils import utc_now default_access_key = getenv("CSV_AWS_ACCESS_KEY_ID") default_secret_key = getenv("CSV_AWS_SECRET_ACCESS_KEY") @@ -23,7 +23,7 @@ def single_s3_object_stub(key="foo", last_modified=None): return { "ETag": '"d41d8cd98f00b204e9800998ecf8427e"', "Key": key, - "LastModified": last_modified or datetime.utcnow(), + "LastModified": last_modified or utc_now(), } diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index e6d3be737..3a0526622 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -19,6 +19,7 @@ from app.celery.nightly_tasks import ( ) from app.enums import NotificationType, TemplateType from app.models import FactProcessingTime, Job +from app.utils import utc_now from tests.app.db import ( create_job, create_notification, @@ -62,7 +63,7 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( """ mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") - seven_days_ago = datetime.utcnow() - timedelta(days=7) + seven_days_ago = utc_now() - timedelta(days=7) just_under_seven_days = seven_days_ago + timedelta(seconds=1) eight_days_ago = seven_days_ago - timedelta(days=1) nine_days_ago = eight_days_ago - timedelta(days=1) @@ -115,9 +116,9 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period( template_type=TemplateType.EMAIL, ) - four_days_ago = datetime.utcnow() - timedelta(days=4) - eight_days_ago = datetime.utcnow() - timedelta(days=8) - thirty_one_days_ago = datetime.utcnow() - timedelta(days=31) + four_days_ago = utc_now() - timedelta(days=4) + eight_days_ago = utc_now() - timedelta(days=8) + thirty_one_days_ago = utc_now() - timedelta(days=31) job1_to_delete = create_job(sms_template_service_1, created_at=four_days_ago) job2_to_delete = create_job(email_template_service_1, created_at=eight_days_ago) @@ -369,21 +370,21 @@ def test_delete_notifications_task_calls_task_for_services_that_have_sent_notifi # will be deleted as service has no custom retention, but past our default 7 days create_notification( service_will_delete_1.templates[0], - created_at=datetime.utcnow() - timedelta(days=8), + created_at=utc_now() - timedelta(days=8), ) create_notification( service_will_delete_2.templates[0], - created_at=datetime.utcnow() - timedelta(days=8), + created_at=utc_now() - timedelta(days=8), ) # will be kept as it's recent, and we won't run delete_notifications_for_service_and_type create_notification( - nothing_to_delete_sms_template, created_at=datetime.utcnow() - timedelta(days=2) + nothing_to_delete_sms_template, created_at=utc_now() - timedelta(days=2) ) # this is an old notification, but for email not sms, so we won't run delete_notifications_for_service_and_type create_notification( nothing_to_delete_email_template, - created_at=datetime.utcnow() - timedelta(days=8), + created_at=utc_now() - timedelta(days=8), ) mock_subtask = mocker.patch( diff --git a/tests/app/celery/test_process_ses_receipts_tasks.py b/tests/app/celery/test_process_ses_receipts_tasks.py index 0b9a45b23..226394eeb 100644 --- a/tests/app/celery/test_process_ses_receipts_tasks.py +++ b/tests/app/celery/test_process_ses_receipts_tasks.py @@ -1,5 +1,4 @@ import json -from datetime import datetime from unittest.mock import ANY from freezegun import freeze_time @@ -18,6 +17,7 @@ from app.celery.test_key_tasks import ( from app.dao.notifications_dao import get_notification_by_id from app.enums import CallbackType, NotificationStatus from app.models import Complaint +from app.utils import utc_now from tests.app.conftest import create_sample_notification from tests.app.db import ( create_notification, @@ -136,7 +136,7 @@ def test_process_ses_results(sample_email_template): create_notification( sample_email_template, reference="ref1", - sent_at=datetime.utcnow(), + sent_at=utc_now(), status=NotificationStatus.SENDING, ) @@ -147,7 +147,7 @@ def test_process_ses_results_retry_called(sample_email_template, mocker): create_notification( sample_email_template, reference="ref1", - sent_at=datetime.utcnow(), + sent_at=utc_now(), status=NotificationStatus.SENDING, ) mocker.patch( @@ -198,7 +198,7 @@ def test_ses_callback_should_update_notification_status( template=sample_email_template, reference="ref", status=NotificationStatus.SENDING, - sent_at=datetime.utcnow(), + sent_at=utc_now(), ) create_service_callback_api( service=sample_email_template.service, url="https://original_url.com" @@ -294,7 +294,7 @@ def test_ses_callback_does_not_call_send_delivery_status_if_no_db_entry( template=sample_email_template, reference="ref", status=NotificationStatus.SENDING, - sent_at=datetime.utcnow(), + sent_at=utc_now(), ) assert ( get_notification_by_id(notification.id).status == NotificationStatus.SENDING @@ -318,7 +318,7 @@ def test_ses_callback_should_update_multiple_notification_status_sent( notify_db_session, template=sample_email_template, reference="ref1", - sent_at=datetime.utcnow(), + sent_at=utc_now(), status=NotificationStatus.SENDING, ) create_sample_notification( @@ -326,7 +326,7 @@ def test_ses_callback_should_update_multiple_notification_status_sent( notify_db_session, template=sample_email_template, reference="ref2", - sent_at=datetime.utcnow(), + sent_at=utc_now(), status=NotificationStatus.SENDING, ) create_sample_notification( @@ -334,7 +334,7 @@ def test_ses_callback_should_update_multiple_notification_status_sent( notify_db_session, template=sample_email_template, reference="ref3", - sent_at=datetime.utcnow(), + sent_at=utc_now(), status=NotificationStatus.SENDING, ) create_service_callback_api( @@ -358,7 +358,7 @@ def test_ses_callback_should_set_status_to_temporary_failure( template=sample_email_template, reference="ref", status=NotificationStatus.SENDING, - sent_at=datetime.utcnow(), + sent_at=utc_now(), ) create_service_callback_api( service=notification.service, url="https://original_url.com" @@ -384,7 +384,7 @@ def test_ses_callback_should_set_status_to_permanent_failure( template=sample_email_template, reference="ref", status=NotificationStatus.SENDING, - sent_at=datetime.utcnow(), + sent_at=utc_now(), ) create_service_callback_api( service=sample_email_template.service, url="https://original_url.com" @@ -412,7 +412,7 @@ def test_ses_callback_should_send_on_complaint_to_user_callback_api( notification = create_notification( template=sample_email_template, reference="ref1", - sent_at=datetime.utcnow(), + sent_at=utc_now(), status=NotificationStatus.SENDING, ) response = ses_complaint_callback() diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index 031f4e9b0..a32f68fc3 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -15,6 +15,7 @@ from app.config import QueueNames from app.dao.fact_billing_dao import get_rate from app.enums import KeyType, NotificationStatus, NotificationType, TemplateType from app.models import FactBilling, FactNotificationStatus, Notification +from app.utils import utc_now from tests.app.db import ( create_notification, create_notification_history, @@ -420,7 +421,7 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio template_type=TemplateType.EMAIL, ) - process_day = datetime.utcnow().date() - timedelta(days=5) + process_day = utc_now().date() - timedelta(days=5) with freeze_time(datetime.combine(process_day, time.max)): create_notification( template=first_template, @@ -449,9 +450,7 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio ) # these created notifications from a different day get ignored - with freeze_time( - datetime.combine(datetime.utcnow().date() - timedelta(days=4), time.max) - ): + with freeze_time(datetime.combine(utc_now().date() - timedelta(days=4), time.max)): create_notification(template=first_template) create_notification_history(template=second_template) @@ -519,7 +518,7 @@ def test_create_nightly_notification_status_for_service_and_day_overwrites_old_d ): first_service = create_service(service_name="First Service") first_template = create_template(service=first_service) - process_day = datetime.utcnow().date() + process_day = utc_now().date() # first run: one notification, expect one row (just one status) notification = create_notification( diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 73b6b6074..90a29f5ed 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -1,5 +1,5 @@ from collections import namedtuple -from datetime import datetime, timedelta +from datetime import timedelta from unittest import mock from unittest.mock import ANY, call @@ -18,6 +18,7 @@ from app.celery.scheduled_tasks import ( from app.config import QueueNames, Test from app.dao.jobs_dao import dao_get_job_by_id from app.enums import JobStatus, NotificationStatus, TemplateType +from app.utils import utc_now from notifications_utils.clients.zendesk.zendesk_client import NotifySupportTicket from tests.app import load_example_csv from tests.app.db import create_job, create_notification, create_template @@ -51,7 +52,7 @@ def test_should_call_expire_or_delete_invotations_on_expire_or_delete_invitation def test_should_update_scheduled_jobs_and_put_on_queue(mocker, sample_template): mocked = mocker.patch("app.celery.tasks.process_job.apply_async") - one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) + one_minute_in_the_past = utc_now() - timedelta(minutes=1) job = create_job( sample_template, job_status=JobStatus.SCHEDULED, @@ -68,9 +69,9 @@ def test_should_update_scheduled_jobs_and_put_on_queue(mocker, sample_template): def test_should_update_all_scheduled_jobs_and_put_on_queue(sample_template, mocker): mocked = mocker.patch("app.celery.tasks.process_job.apply_async") - one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) - ten_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=10) - twenty_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=20) + one_minute_in_the_past = utc_now() - timedelta(minutes=1) + ten_minutes_in_the_past = utc_now() - timedelta(minutes=10) + twenty_minutes_in_the_past = utc_now() - timedelta(minutes=20) job_1 = create_job( sample_template, job_status=JobStatus.SCHEDULED, @@ -107,8 +108,8 @@ def test_check_job_status_task_calls_process_incomplete_jobs(mocker, sample_temp job = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.IN_PROGRESS, ) create_notification(template=sample_template, job=job) @@ -124,9 +125,9 @@ def test_check_job_status_task_calls_process_incomplete_jobs_when_scheduled_job_ job = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.IN_PROGRESS, ) check_job_status() @@ -141,8 +142,8 @@ def test_check_job_status_task_calls_process_incomplete_jobs_for_pending_schedul job = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), job_status=JobStatus.PENDING, ) @@ -159,7 +160,7 @@ def test_check_job_status_task_does_not_call_process_incomplete_jobs_for_non_sch create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), + created_at=utc_now() - timedelta(hours=2), job_status=JobStatus.PENDING, ) check_job_status() @@ -174,17 +175,17 @@ def test_check_job_status_task_calls_process_incomplete_jobs_for_multiple_jobs( job = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.IN_PROGRESS, ) job_2 = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.IN_PROGRESS, ) check_job_status() @@ -199,23 +200,23 @@ def test_check_job_status_task_only_sends_old_tasks(mocker, sample_template): job = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.IN_PROGRESS, ) create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=29), + created_at=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=29), job_status=JobStatus.IN_PROGRESS, ) create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(minutes=50), - scheduled_for=datetime.utcnow() - timedelta(minutes=29), + created_at=utc_now() - timedelta(minutes=50), + scheduled_for=utc_now() - timedelta(minutes=29), job_status=JobStatus.PENDING, ) check_job_status() @@ -229,16 +230,16 @@ def test_check_job_status_task_sets_jobs_to_error(mocker, sample_template): job = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.IN_PROGRESS, ) job_2 = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=29), + created_at=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=29), job_status=JobStatus.IN_PROGRESS, ) check_job_status() @@ -267,33 +268,33 @@ def test_replay_created_notifications(notify_db_session, sample_service, mocker) # notifications expected to be resent old_sms = create_notification( template=sms_template, - created_at=datetime.utcnow() - timedelta(seconds=older_than), + created_at=utc_now() - timedelta(seconds=older_than), status=NotificationStatus.CREATED, ) old_email = create_notification( template=email_template, - created_at=datetime.utcnow() - timedelta(seconds=older_than), + created_at=utc_now() - timedelta(seconds=older_than), status=NotificationStatus.CREATED, ) # notifications that are not to be resent create_notification( template=sms_template, - created_at=datetime.utcnow() - timedelta(seconds=older_than), + created_at=utc_now() - timedelta(seconds=older_than), status=NotificationStatus.SENDING, ) create_notification( template=email_template, - created_at=datetime.utcnow() - timedelta(seconds=older_than), + created_at=utc_now() - timedelta(seconds=older_than), status=NotificationStatus.DELIVERED, ) create_notification( template=sms_template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.CREATED, ) create_notification( template=email_template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.CREATED, ) @@ -310,16 +311,16 @@ def test_check_job_status_task_does_not_raise_error(sample_template): create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.FINISHED, ) create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.FINISHED, ) @@ -351,7 +352,7 @@ def test_check_for_missing_rows_in_completed_jobs_ignores_old_and_new_jobs( template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - offset, + processing_finished=utc_now() - offset, ) for i in range(0, 4): create_notification(job=job, job_row_number=i) @@ -373,7 +374,7 @@ def test_check_for_missing_rows_in_completed_jobs(mocker, sample_email_template) template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=20), + processing_finished=utc_now() - timedelta(minutes=20), ) for i in range(0, 4): create_notification(job=job, job_row_number=i) @@ -400,7 +401,7 @@ def test_check_for_missing_rows_in_completed_jobs_calls_save_email( template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=20), + processing_finished=utc_now() - timedelta(minutes=20), ) for i in range(0, 4): create_notification(job=job, job_row_number=i) @@ -430,7 +431,7 @@ def test_check_for_missing_rows_in_completed_jobs_uses_sender_id( template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=20), + processing_finished=utc_now() - timedelta(minutes=20), ) for i in range(0, 4): create_notification(job=job, job_row_number=i) diff --git a/tests/app/celery/test_service_callback_tasks.py b/tests/app/celery/test_service_callback_tasks.py index 51761320b..2c0df1374 100644 --- a/tests/app/celery/test_service_callback_tasks.py +++ b/tests/app/celery/test_service_callback_tasks.py @@ -11,7 +11,7 @@ from app.celery.service_callback_tasks import ( send_delivery_status_to_service, ) from app.enums import CallbackType, NotificationStatus, NotificationType -from app.utils import DATETIME_FORMAT +from app.utils import DATETIME_FORMAT, utc_now from tests.app.db import ( create_complaint, create_notification, @@ -101,7 +101,7 @@ def test_send_complaint_to_service_posts_https_request_to_service_with_encrypted "complaint_id": str(complaint.id), "reference": notification.client_reference, "to": notification.to, - "complaint_date": datetime.utcnow().strftime(DATETIME_FORMAT), + "complaint_date": utc_now().strftime(DATETIME_FORMAT), } assert request_mock.call_count == 1 diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 8c5b264de..593926c18 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -36,7 +36,7 @@ from app.enums import ( ) from app.models import Job, Notification from app.serialised_models import SerialisedService, SerialisedTemplate -from app.utils import DATETIME_FORMAT +from app.utils import DATETIME_FORMAT, utc_now from notifications_utils.recipients import Row from notifications_utils.template import PlainTextEmailTemplate, SMSMessageTemplate from tests.app import load_example_csv @@ -419,7 +419,7 @@ def test_should_send_template_to_correct_sms_task_and_persist( == sample_template_with_placeholders.version ) assert persisted_notification.status == NotificationStatus.CREATED - assert persisted_notification.created_at <= datetime.utcnow() + assert persisted_notification.created_at <= utc_now() assert not persisted_notification.sent_at assert not persisted_notification.sent_by assert not persisted_notification.job_id @@ -455,7 +455,7 @@ def test_should_save_sms_if_restricted_service_and_valid_number( assert persisted_notification.template_id == template.id assert persisted_notification.template_version == template.version assert persisted_notification.status == NotificationStatus.CREATED - assert persisted_notification.created_at <= datetime.utcnow() + assert persisted_notification.created_at <= utc_now() assert not persisted_notification.sent_at assert not persisted_notification.sent_by assert not persisted_notification.job_id @@ -565,7 +565,7 @@ def test_should_save_sms_template_to_and_persist_with_job_id(sample_job, mocker) mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") notification_id = uuid.uuid4() - now = datetime.utcnow() + now = utc_now() save_sms( sample_job.service.id, notification_id, @@ -676,7 +676,7 @@ def test_save_email_should_use_template_version_from_job_not_latest( dao_update_template(sample_email_template) t = dao_get_template_by_id(sample_email_template.id) assert t.version > version_on_notification - now = datetime.utcnow() + now = utc_now() save_email( sample_email_template.service_id, uuid.uuid4(), @@ -706,7 +706,7 @@ def test_should_use_email_template_subject_placeholders( mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") notification_id = uuid.uuid4() - now = datetime.utcnow() + now = utc_now() save_email( sample_email_template_with_placeholders.service_id, notification_id, @@ -791,7 +791,7 @@ def test_should_use_email_template_and_persist_without_personalisation( notification_id = uuid.uuid4() - now = datetime.utcnow() + now = utc_now() save_email( sample_email_template.service_id, notification_id, @@ -1157,9 +1157,9 @@ def test_process_incomplete_job_sms(mocker, sample_template): job = create_job( template=sample_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.ERROR, ) @@ -1189,9 +1189,9 @@ def test_process_incomplete_job_with_notifications_all_sent(mocker, sample_templ job = create_job( template=sample_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.ERROR, ) @@ -1229,9 +1229,9 @@ def test_process_incomplete_jobs_sms(mocker, sample_template): job = create_job( template=sample_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.ERROR, ) create_notification(sample_template, job, 0) @@ -1243,9 +1243,9 @@ def test_process_incomplete_jobs_sms(mocker, sample_template): job2 = create_job( template=sample_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.ERROR, ) @@ -1282,9 +1282,9 @@ def test_process_incomplete_jobs_no_notifications_added(mocker, sample_template) job = create_job( template=sample_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.ERROR, ) @@ -1339,9 +1339,9 @@ def test_process_incomplete_job_email(mocker, sample_email_template): job = create_job( template=sample_email_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.ERROR, ) @@ -1371,22 +1371,22 @@ def test_process_incomplete_jobs_sets_status_to_in_progress_and_resets_processin job1 = create_job( sample_template, - processing_started=datetime.utcnow() - timedelta(minutes=30), + processing_started=utc_now() - timedelta(minutes=30), job_status=JobStatus.ERROR, ) job2 = create_job( sample_template, - processing_started=datetime.utcnow() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.ERROR, ) process_incomplete_jobs([str(job1.id), str(job2.id)]) assert job1.job_status == JobStatus.IN_PROGRESS - assert job1.processing_started == datetime.utcnow() + assert job1.processing_started == utc_now() assert job2.job_status == JobStatus.IN_PROGRESS - assert job2.processing_started == datetime.utcnow() + assert job2.processing_started == utc_now() assert mock_process_incomplete_job.mock_calls == [ call(str(job1.id)), @@ -1422,7 +1422,7 @@ def test_save_api_email_or_sms(mocker, sample_service, notification_type): "reply_to_text": None, "document_download_count": 0, "status": NotificationStatus.CREATED, - "created_at": datetime.utcnow().strftime(DATETIME_FORMAT), + "created_at": utc_now().strftime(DATETIME_FORMAT), } if notification_type == NotificationType.EMAIL: @@ -1476,7 +1476,7 @@ def test_save_api_email_dont_retry_if_notification_already_exists( "reply_to_text": "our.email@gov.uk", "document_download_count": 0, "status": NotificationStatus.CREATED, - "created_at": datetime.utcnow().strftime(DATETIME_FORMAT), + "created_at": utc_now().strftime(DATETIME_FORMAT), } if notification_type == NotificationType.EMAIL: @@ -1621,7 +1621,7 @@ def test_save_api_tasks_use_cache( "reply_to_text": "our.email@gov.uk", "document_download_count": 0, "status": NotificationStatus.CREATED, - "created_at": datetime.utcnow().strftime(DATETIME_FORMAT), + "created_at": utc_now().strftime(DATETIME_FORMAT), } ) diff --git a/tests/app/clients/test_aws_cloudwatch.py b/tests/app/clients/test_aws_cloudwatch.py index 2eb70c94b..b9529037b 100644 --- a/tests/app/clients/test_aws_cloudwatch.py +++ b/tests/app/clients/test_aws_cloudwatch.py @@ -1,10 +1,8 @@ -# import pytest -from datetime import datetime - import pytest from flask import current_app from app import aws_cloudwatch_client +from app.utils import utc_now def test_check_sms_no_event_error_condition(notify_api, mocker): @@ -87,7 +85,7 @@ def test_check_sms_success(notify_api, mocker): message_id = "succeed" notification_id = "ccc" - created_at = datetime.utcnow() + created_at = utc_now() with notify_api.app_context(): aws_cloudwatch_client.check_sms(message_id, notification_id, created_at) @@ -109,7 +107,7 @@ def test_check_sms_failure(notify_api, mocker): ) message_id = "fail" notification_id = "bbb" - created_at = datetime.utcnow() + created_at = utc_now() with notify_api.app_context(): aws_cloudwatch_client.check_sms(message_id, notification_id, created_at) diff --git a/tests/app/complaint/test_complaint_rest.py b/tests/app/complaint/test_complaint_rest.py index 305b72837..7a881fab7 100644 --- a/tests/app/complaint/test_complaint_rest.py +++ b/tests/app/complaint/test_complaint_rest.py @@ -1,9 +1,10 @@ import json -from datetime import date, datetime +from datetime import date from flask import url_for from freezegun import freeze_time +from app.utils import utc_now from tests import create_admin_authorization_header from tests.app.db import ( create_complaint, @@ -93,7 +94,7 @@ def test_get_complaint_sets_start_and_end_date_to_today_if_not_specified( ) dao_mock.assert_called_once_with( - start_date=datetime.utcnow().date(), end_date=datetime.utcnow().date() + start_date=utc_now().date(), end_date=utc_now().date() ) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == 5 diff --git a/tests/app/conftest.py b/tests/app/conftest.py index e96ed1069..25e9f3f08 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -46,6 +46,7 @@ from app.models import ( Template, TemplateHistory, ) +from app.utils import utc_now from tests import create_admin_authorization_header from tests.app.db import ( create_api_key, @@ -91,7 +92,7 @@ def create_sample_notification( normalised_to=None, ): if created_at is None: - created_at = datetime.utcnow() + created_at = utc_now() if service is None: service = create_service(check_if_service_exists=True) if template is None: @@ -383,7 +384,7 @@ def sample_job(notify_db_session): "template_version": template.version, "original_file_name": "some.csv", "notification_count": 1, - "created_at": datetime.utcnow(), + "created_at": utc_now(), "created_by": service.created_by, "job_status": JobStatus.PENDING, "scheduled_for": None, @@ -410,7 +411,7 @@ def sample_scheduled_job(sample_template_with_placeholders): return create_job( sample_template_with_placeholders, job_status=JobStatus.SCHEDULED, - scheduled_for=(datetime.utcnow() + timedelta(minutes=60)).isoformat(), + scheduled_for=(utc_now() + timedelta(minutes=60)).isoformat(), ) @@ -437,7 +438,7 @@ def sample_notification_with_job(notify_db_session): @pytest.fixture(scope="function") def sample_notification(notify_db_session): - created_at = datetime.utcnow() + created_at = utc_now() service = create_service(check_if_service_exists=True) template = create_template(service=service) @@ -484,7 +485,7 @@ def sample_notification(notify_db_session): @pytest.fixture(scope="function") def sample_email_notification(notify_db_session): - created_at = datetime.utcnow() + created_at = utc_now() service = create_service(check_if_service_exists=True) template = create_template(service, template_type=TemplateType.EMAIL) job = create_job(template) @@ -519,8 +520,8 @@ def sample_email_notification(notify_db_session): @pytest.fixture(scope="function") def sample_notification_history(notify_db_session, sample_template): - created_at = datetime.utcnow() - sent_at = datetime.utcnow() + created_at = utc_now() + sent_at = utc_now() notification_type = sample_template.template_type api_key = create_api_key(sample_template.service, key_type=KeyType.NORMAL) @@ -575,7 +576,7 @@ def sample_expired_user(notify_db_session): "from_user": from_user, "permissions": "send_messages,manage_service,manage_api_keys", "folder_permissions": ["folder_1_id", "folder_2_id"], - "created_at": datetime.utcnow() - timedelta(days=3), + "created_at": utc_now() - timedelta(days=3), "status": InvitedUserStatus.EXPIRED, } expired_user = InvitedUser(**data) diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index e0ca6cd47..4bc1ce5ba 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -36,6 +36,7 @@ from app.enums import ( TemplateType, ) from app.models import Job, Notification, NotificationHistory +from app.utils import utc_now from tests.app.db import ( create_ft_notification_status, create_job, @@ -701,7 +702,7 @@ def test_should_limit_notifications_return_by_day_limit_plus_one(sample_template with freeze_time(past_date): create_notification( sample_template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.FAILED, ) @@ -777,7 +778,7 @@ def _notification_json(sample_template, job_id=None, id=None, status=None): "service_id": sample_template.service.id, "template_id": sample_template.id, "template_version": sample_template.version, - "created_at": datetime.utcnow(), + "created_at": utc_now(), "billable_units": 1, "notification_type": sample_template.template_type, "key_type": KeyType.NORMAL, @@ -792,7 +793,7 @@ def _notification_json(sample_template, job_id=None, id=None, status=None): def test_dao_timeout_notifications(sample_template): - with freeze_time(datetime.utcnow() - timedelta(minutes=2)): + with freeze_time(utc_now() - timedelta(minutes=2)): created = create_notification( sample_template, status=NotificationStatus.CREATED, @@ -810,7 +811,7 @@ def test_dao_timeout_notifications(sample_template): status=NotificationStatus.DELIVERED, ) - temporary_failure_notifications = dao_timeout_notifications(datetime.utcnow()) + temporary_failure_notifications = dao_timeout_notifications(utc_now()) assert len(temporary_failure_notifications) == 2 assert Notification.query.get(created.id).status == NotificationStatus.CREATED @@ -828,7 +829,7 @@ def test_dao_timeout_notifications(sample_template): def test_dao_timeout_notifications_only_updates_for_older_notifications( sample_template, ): - with freeze_time(datetime.utcnow() + timedelta(minutes=10)): + with freeze_time(utc_now() + timedelta(minutes=10)): sending = create_notification( sample_template, status=NotificationStatus.SENDING, @@ -838,7 +839,7 @@ def test_dao_timeout_notifications_only_updates_for_older_notifications( status=NotificationStatus.PENDING, ) - temporary_failure_notifications = dao_timeout_notifications(datetime.utcnow()) + temporary_failure_notifications = dao_timeout_notifications(utc_now()) assert len(temporary_failure_notifications) == 0 assert Notification.query.get(sending.id).status == NotificationStatus.SENDING @@ -913,23 +914,23 @@ def test_get_notifications_created_by_api_or_csv_are_returned_correctly_excludin sample_test_api_key, ): create_notification( - template=sample_job.template, created_at=datetime.utcnow(), job=sample_job + template=sample_job.template, created_at=utc_now(), job=sample_job ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_api_key, key_type=sample_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_team_api_key, key_type=sample_team_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_test_api_key, key_type=sample_test_api_key.key_type, ) @@ -959,24 +960,24 @@ def test_get_notifications_with_a_live_api_key_type( ): create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), job=sample_job, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_api_key, key_type=sample_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_team_api_key, key_type=sample_team_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_test_api_key, key_type=sample_test_api_key.key_type, ) @@ -1001,23 +1002,23 @@ def test_get_notifications_with_a_test_api_key_type( sample_job, sample_api_key, sample_team_api_key, sample_test_api_key ): create_notification( - template=sample_job.template, created_at=datetime.utcnow(), job=sample_job + template=sample_job.template, created_at=utc_now(), job=sample_job ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_api_key, key_type=sample_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_team_api_key, key_type=sample_team_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_test_api_key, key_type=sample_test_api_key.key_type, ) @@ -1045,24 +1046,24 @@ def test_get_notifications_with_a_team_api_key_type( ): create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), job=sample_job, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_api_key, key_type=sample_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_team_api_key, key_type=sample_team_api_key.key_type, ) create_notification( sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_test_api_key, key_type=sample_test_api_key.key_type, ) @@ -1090,25 +1091,25 @@ def test_should_exclude_test_key_notifications_by_default( ): create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), job=sample_job, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_api_key, key_type=sample_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_team_api_key, key_type=sample_team_api_key.key_type, ) create_notification( template=sample_job.template, - created_at=datetime.utcnow(), + created_at=utc_now(), api_key=sample_test_api_key, key_type=sample_test_api_key.key_type, ) @@ -1699,9 +1700,9 @@ def test_dao_get_notifications_by_to_field_orders_by_created_at_desc(sample_temp ) notification_a_minute_ago = notification( - created_at=datetime.utcnow() - timedelta(minutes=1) + created_at=utc_now() - timedelta(minutes=1) ) - notification = notification(created_at=datetime.utcnow()) + notification = notification(created_at=utc_now()) notifications = dao_get_notifications_by_recipient_or_reference( sample_template.service_id, @@ -1718,9 +1719,9 @@ def test_dao_get_last_notification_added_for_job_id_valid_job_id(sample_template job = create_job( template=sample_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.IN_PROGRESS, ) create_notification(sample_template, job, 0) @@ -1734,9 +1735,9 @@ def test_dao_get_last_notification_added_for_job_id_no_notifications(sample_temp job = create_job( template=sample_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), + created_at=utc_now() - timedelta(hours=2), + scheduled_for=utc_now() - timedelta(minutes=31), + processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.IN_PROGRESS, ) @@ -1890,17 +1891,17 @@ def test_notifications_not_yet_sent(sample_service, notification_type): template = create_template(service=sample_service, template_type=notification_type) old_notification = create_notification( template=template, - created_at=datetime.utcnow() - timedelta(seconds=older_than), + created_at=utc_now() - timedelta(seconds=older_than), status=NotificationStatus.CREATED, ) create_notification( template=template, - created_at=datetime.utcnow() - timedelta(seconds=older_than), + created_at=utc_now() - timedelta(seconds=older_than), status=NotificationStatus.SENDING, ) create_notification( template=template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.CREATED, ) @@ -1917,17 +1918,17 @@ def test_notifications_not_yet_sent_return_no_rows(sample_service, notification_ template = create_template(service=sample_service, template_type=notification_type) create_notification( template=template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.CREATED, ) create_notification( template=template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.SENDING, ) create_notification( template=template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.DELIVERED, ) diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index 086f3c9e9..e22721216 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -9,6 +9,7 @@ from app.dao.notifications_dao import ( ) from app.enums import KeyType, NotificationStatus, NotificationType, TemplateType from app.models import Notification, NotificationHistory +from app.utils import utc_now from tests.app.db import ( create_notification, create_notification_history, @@ -22,20 +23,20 @@ def test_move_notifications_does_nothing_if_notification_history_row_already_exi ): notification = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=8), + created_at=utc_now() - timedelta(days=8), status=NotificationStatus.TEMPORARY_FAILURE, ) create_notification_history( id=notification.id, template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=8), + created_at=utc_now() - timedelta(days=8), status=NotificationStatus.DELIVERED, ) move_notifications_to_notification_history( NotificationType.EMAIL, sample_email_template.service_id, - datetime.utcnow(), + utc_now(), 1, ) @@ -184,58 +185,58 @@ def test_insert_notification_history_delete_notifications(sample_email_template) # should be deleted n1 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=1, minutes=4), + created_at=utc_now() - timedelta(days=1, minutes=4), status=NotificationStatus.DELIVERED, ) n2 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=1, minutes=20), + created_at=utc_now() - timedelta(days=1, minutes=20), status=NotificationStatus.PERMANENT_FAILURE, ) n3 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=1, minutes=30), + created_at=utc_now() - timedelta(days=1, minutes=30), status=NotificationStatus.TEMPORARY_FAILURE, ) n4 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=1, minutes=59), + created_at=utc_now() - timedelta(days=1, minutes=59), status=NotificationStatus.TEMPORARY_FAILURE, ) n5 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=1, hours=1), + created_at=utc_now() - timedelta(days=1, hours=1), status=NotificationStatus.SENDING, ) n6 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=1, minutes=61), + created_at=utc_now() - timedelta(days=1, minutes=61), status=NotificationStatus.PENDING, ) n7 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=1, hours=1, seconds=1), + created_at=utc_now() - timedelta(days=1, hours=1, seconds=1), status=NotificationStatus.VALIDATION_FAILED, ) n8 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(days=1, minutes=20), + created_at=utc_now() - timedelta(days=1, minutes=20), status=NotificationStatus.CREATED, ) # should NOT be deleted - wrong status n9 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(hours=1), + created_at=utc_now() - timedelta(hours=1), status=NotificationStatus.DELIVERED, ) n10 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(hours=1), + created_at=utc_now() - timedelta(hours=1), status=NotificationStatus.TECHNICAL_FAILURE, ) n11 = create_notification( template=sample_email_template, - created_at=datetime.utcnow() - timedelta(hours=23, minutes=59), + created_at=utc_now() - timedelta(hours=23, minutes=59), status=NotificationStatus.CREATED, ) @@ -244,7 +245,7 @@ def test_insert_notification_history_delete_notifications(sample_email_template) del_count = insert_notification_history_delete_notifications( notification_type=sample_email_template.template_type, service_id=sample_email_template.service_id, - timestamp_to_delete_backwards_from=datetime.utcnow() - timedelta(days=1), + timestamp_to_delete_backwards_from=utc_now() - timedelta(days=1), ) assert del_count == 8 notifications = Notification.query.all() @@ -260,24 +261,24 @@ def test_insert_notification_history_delete_notifications_more_notifications_tha ): create_notification( template=sample_template, - created_at=datetime.utcnow() + timedelta(minutes=4), + created_at=utc_now() + timedelta(minutes=4), status=NotificationStatus.DELIVERED, ) create_notification( template=sample_template, - created_at=datetime.utcnow() + timedelta(minutes=20), + created_at=utc_now() + timedelta(minutes=20), status=NotificationStatus.PERMANENT_FAILURE, ) create_notification( template=sample_template, - created_at=datetime.utcnow() + timedelta(minutes=30), + created_at=utc_now() + timedelta(minutes=30), status=NotificationStatus.TEMPORARY_FAILURE, ) del_count = insert_notification_history_delete_notifications( notification_type=sample_template.template_type, service_id=sample_template.service_id, - timestamp_to_delete_backwards_from=datetime.utcnow() + timedelta(hours=1), + timestamp_to_delete_backwards_from=utc_now() + timedelta(hours=1), qry_limit=1, ) @@ -293,7 +294,7 @@ def test_insert_notification_history_delete_notifications_only_insert_delete_for ): notification_to_move = create_notification( template=sample_email_template, - created_at=datetime.utcnow() + timedelta(minutes=4), + created_at=utc_now() + timedelta(minutes=4), status=NotificationStatus.DELIVERED, ) another_service = create_service(service_name="Another service") @@ -302,14 +303,14 @@ def test_insert_notification_history_delete_notifications_only_insert_delete_for ) notification_to_stay = create_notification( template=another_template, - created_at=datetime.utcnow() + timedelta(minutes=4), + created_at=utc_now() + timedelta(minutes=4), status=NotificationStatus.DELIVERED, ) del_count = insert_notification_history_delete_notifications( notification_type=sample_email_template.template_type, service_id=sample_email_template.service_id, - timestamp_to_delete_backwards_from=datetime.utcnow() + timedelta(hours=1), + timestamp_to_delete_backwards_from=utc_now() + timedelta(hours=1), ) assert del_count == 1 @@ -326,19 +327,19 @@ def test_insert_notification_history_delete_notifications_insert_for_key_type( ): create_notification( template=sample_template, - created_at=datetime.utcnow() - timedelta(hours=4), + created_at=utc_now() - timedelta(hours=4), status=NotificationStatus.DELIVERED, key_type=KeyType.NORMAL, ) create_notification( template=sample_template, - created_at=datetime.utcnow() - timedelta(hours=4), + created_at=utc_now() - timedelta(hours=4), status=NotificationStatus.DELIVERED, key_type=KeyType.TEAM, ) with_test_key = create_notification( template=sample_template, - created_at=datetime.utcnow() - timedelta(hours=4), + created_at=utc_now() - timedelta(hours=4), status=NotificationStatus.DELIVERED, key_type=KeyType.TEST, ) @@ -346,7 +347,7 @@ def test_insert_notification_history_delete_notifications_insert_for_key_type( del_count = insert_notification_history_delete_notifications( notification_type=sample_template.template_type, service_id=sample_template.service_id, - timestamp_to_delete_backwards_from=datetime.utcnow(), + timestamp_to_delete_backwards_from=utc_now(), ) assert del_count == 2 diff --git a/tests/app/dao/notification_dao/test_notification_dao_template_usage.py b/tests/app/dao/notification_dao/test_notification_dao_template_usage.py index c22482aca..9c55fe903 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_template_usage.py +++ b/tests/app/dao/notification_dao/test_notification_dao_template_usage.py @@ -1,13 +1,14 @@ -from datetime import datetime, timedelta +from datetime import timedelta from app.dao.notifications_dao import dao_get_last_date_template_was_used +from app.utils import utc_now from tests.app.db import create_ft_notification_status, create_notification def test_dao_get_last_date_template_was_used_returns_local_date_from_stats_table( sample_template, ): - last_status_date = (datetime.utcnow() - timedelta(days=2)).date() + last_status_date = (utc_now() - timedelta(days=2)).date() create_ft_notification_status(local_date=last_status_date, template=sample_template) last_used_date = dao_get_last_date_template_was_used( @@ -19,10 +20,10 @@ def test_dao_get_last_date_template_was_used_returns_local_date_from_stats_table def test_dao_get_last_date_template_was_used_returns_created_at_from_notifications( sample_template, ): - last_notification_date = datetime.utcnow() - timedelta(hours=2) + last_notification_date = utc_now() - timedelta(hours=2) create_notification(template=sample_template, created_at=last_notification_date) - last_status_date = (datetime.utcnow() - timedelta(days=2)).date() + last_status_date = (utc_now() - timedelta(days=2)).date() create_ft_notification_status(local_date=last_status_date, template=sample_template) last_used_date = dao_get_last_date_template_was_used( template_id=sample_template.id, service_id=sample_template.service_id diff --git a/tests/app/dao/test_api_key_dao.py b/tests/app/dao/test_api_key_dao.py index 3bbe758e3..f63391143 100644 --- a/tests/app/dao/test_api_key_dao.py +++ b/tests/app/dao/test_api_key_dao.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import timedelta import pytest from sqlalchemy.exc import IntegrityError @@ -13,6 +13,7 @@ from app.dao.api_key_dao import ( ) from app.enums import KeyType from app.models import ApiKey +from app.utils import utc_now def test_save_api_key_should_create_new_api_key_and_history(sample_service): @@ -43,7 +44,7 @@ def test_expire_api_key_should_update_the_api_key_and_create_history_record( expire_api_key(service_id=sample_api_key.service_id, api_key_id=sample_api_key.id) all_api_keys = get_model_api_keys(service_id=sample_api_key.service_id) assert len(all_api_keys) == 1 - assert all_api_keys[0].expiry_date <= datetime.utcnow() + assert all_api_keys[0].expiry_date <= utc_now() assert all_api_keys[0].secret == sample_api_key.secret assert all_api_keys[0].id == sample_api_key.id assert all_api_keys[0].service_id == sample_api_key.service_id @@ -107,7 +108,7 @@ def test_save_api_key_can_create_key_with_same_name_if_other_is_expired(sample_s "name": "normal api key", "created_by": sample_service.created_by, "key_type": KeyType.NORMAL, - "expiry_date": datetime.utcnow(), + "expiry_date": utc_now(), } ) save_model_api_key(expired_api_key) @@ -153,7 +154,7 @@ def test_should_not_return_revoked_api_keys_older_than_7_days( "name": sample_service.name, "created_by": sample_service.created_by, "key_type": KeyType.NORMAL, - "expiry_date": datetime.utcnow() - timedelta(days=days_old), + "expiry_date": utc_now() - timedelta(days=days_old), } ) save_model_api_key(expired_api_key) diff --git a/tests/app/dao/test_complaint_dao.py b/tests/app/dao/test_complaint_dao.py index 2c1125790..4a073ee13 100644 --- a/tests/app/dao/test_complaint_dao.py +++ b/tests/app/dao/test_complaint_dao.py @@ -9,6 +9,7 @@ from app.dao.complaint_dao import ( ) from app.enums import TemplateType from app.models import Complaint +from app.utils import utc_now from tests.app.db import ( create_complaint, create_notification, @@ -55,7 +56,7 @@ def test_fetch_complaint_by_service_returns_one( service_id=sample_service.id, ses_feedback_id=str(uuid.uuid4()), complaint_type="abuse", - complaint_date=datetime.utcnow(), + complaint_date=utc_now(), ) save_complaint(complaint) @@ -83,22 +84,22 @@ def test_fetch_complaint_by_service_return_many(notify_db_session): service_id=service_1.id, ses_feedback_id=str(uuid.uuid4()), complaint_type="abuse", - complaint_date=datetime.utcnow(), + complaint_date=utc_now(), ) complaint_2 = Complaint( notification_id=notification_2.id, service_id=service_2.id, ses_feedback_id=str(uuid.uuid4()), complaint_type="abuse", - complaint_date=datetime.utcnow(), + complaint_date=utc_now(), ) complaint_3 = Complaint( notification_id=notification_3.id, service_id=service_2.id, ses_feedback_id=str(uuid.uuid4()), complaint_type="abuse", - complaint_date=datetime.utcnow(), - created_at=datetime.utcnow() + timedelta(minutes=1), + complaint_date=utc_now(), + created_at=utc_now() + timedelta(minutes=1), ) save_complaint(complaint_1) diff --git a/tests/app/dao/test_fact_billing_dao.py b/tests/app/dao/test_fact_billing_dao.py index 0282d6983..30f2cd1c3 100644 --- a/tests/app/dao/test_fact_billing_dao.py +++ b/tests/app/dao/test_fact_billing_dao.py @@ -23,6 +23,7 @@ from app.dao.fact_billing_dao import ( from app.dao.organization_dao import dao_add_service_to_organization from app.enums import KeyType, NotificationStatus, NotificationType, TemplateType from app.models import FactBilling +from app.utils import utc_now from tests.app.db import ( create_annual_billing, create_ft_billing, @@ -101,7 +102,7 @@ def test_fetch_billing_data_for_today_includes_data_with_the_right_key_type( key_type=key_type, ) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day(today.date()) assert len(results) == 1 assert results[0].notifications_sent == 2 @@ -118,7 +119,7 @@ def test_fetch_billing_data_for_day_only_calls_query_for_permission_type( sms_template = create_template(service=service, template_type=TemplateType.SMS) create_notification(template=email_template, status=NotificationStatus.DELIVERED) create_notification(template=sms_template, status=NotificationStatus.DELIVERED) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day( process_day=today.date(), check_permissions=True ) @@ -137,7 +138,7 @@ def test_fetch_billing_data_for_day_only_calls_query_for_all_channels( sms_template = create_template(service=service, template_type=TemplateType.SMS) create_notification(template=email_template, status=NotificationStatus.DELIVERED) create_notification(template=sms_template, status=NotificationStatus.DELIVERED) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day( process_day=today.date(), check_permissions=False, @@ -192,7 +193,7 @@ def test_fetch_billing_data_for_day_is_grouped_by_template_and_notification_type create_notification(template=email_template, status=NotificationStatus.DELIVERED) create_notification(template=sms_template, status=NotificationStatus.DELIVERED) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day(today.date()) assert len(results) == 2 assert results[0].notifications_sent == 1 @@ -207,7 +208,7 @@ def test_fetch_billing_data_for_day_is_grouped_by_service(notify_db_session): create_notification(template=email_template, status=NotificationStatus.DELIVERED) create_notification(template=sms_template, status=NotificationStatus.DELIVERED) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day(today.date()) assert len(results) == 2 assert results[0].notifications_sent == 1 @@ -228,7 +229,7 @@ def test_fetch_billing_data_for_day_is_grouped_by_provider(notify_db_session): sent_by="sns", ) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day(today.date()) assert len(results) == 1 assert results[0].notifications_sent == 2 @@ -249,7 +250,7 @@ def test_fetch_billing_data_for_day_is_grouped_by_rate_mulitplier(notify_db_sess rate_multiplier=2, ) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day(today.date()) assert len(results) == 2 assert results[0].notifications_sent == 1 @@ -270,7 +271,7 @@ def test_fetch_billing_data_for_day_is_grouped_by_international(notify_db_sessio international=False, ) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day(today.date()) assert len(results) == 2 assert all(result.notifications_sent == 1 for result in results) @@ -286,7 +287,7 @@ def test_fetch_billing_data_for_day_is_grouped_by_notification_type(notify_db_se create_notification(template=email_template, status=NotificationStatus.DELIVERED) create_notification(template=email_template, status=NotificationStatus.DELIVERED) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day(today.date()) assert len(results) == 2 notification_types = [x.notification_type for x in results] @@ -294,7 +295,7 @@ def test_fetch_billing_data_for_day_is_grouped_by_notification_type(notify_db_se def test_fetch_billing_data_for_day_returns_empty_list(notify_db_session): - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day(today.date()) assert results == [] @@ -307,7 +308,7 @@ def test_fetch_billing_data_for_day_uses_correct_table(notify_db_session): sms_template = create_template(service=service, template_type=TemplateType.SMS) email_template = create_template(service=service, template_type=TemplateType.EMAIL) - five_days_ago = datetime.utcnow() - timedelta(days=5) + five_days_ago = utc_now() - timedelta(days=5) create_notification( template=sms_template, status=NotificationStatus.DELIVERED, @@ -337,7 +338,7 @@ def test_fetch_billing_data_for_day_returns_list_for_given_service(notify_db_ses create_notification(template=template, status=NotificationStatus.DELIVERED) create_notification(template=template_2, status=NotificationStatus.DELIVERED) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day( process_day=today.date(), service_id=service.id ) @@ -352,7 +353,7 @@ def test_fetch_billing_data_for_day_bills_correctly_for_status(notify_db_session for status in NotificationStatus: create_notification(template=sms_template, status=status) create_notification(template=email_template, status=status) - today = datetime.utcnow() + today = utc_now() results = fetch_billing_data_for_day( process_day=today.date(), service_id=service.id ) @@ -368,13 +369,11 @@ def test_fetch_billing_data_for_day_bills_correctly_for_status(notify_db_session def test_get_rates_for_billing(notify_db_session): create_rate( - start_date=datetime.utcnow(), value=12, notification_type=NotificationType.EMAIL + start_date=utc_now(), value=12, notification_type=NotificationType.EMAIL ) + create_rate(start_date=utc_now(), value=22, notification_type=NotificationType.SMS) create_rate( - start_date=datetime.utcnow(), value=22, notification_type=NotificationType.SMS - ) - create_rate( - start_date=datetime.utcnow(), value=33, notification_type=NotificationType.EMAIL + start_date=utc_now(), value=33, notification_type=NotificationType.EMAIL ) rates = get_rates_for_billing() @@ -500,7 +499,7 @@ def test_fetch_monthly_billing_for_year_adds_data_for_today(notify_db_session): template = create_template(service=service, template_type=TemplateType.SMS) create_rate( - start_date=datetime.utcnow() - timedelta(days=1), + start_date=utc_now() - timedelta(days=1), value=0.158, notification_type=NotificationType.SMS, ) @@ -960,7 +959,7 @@ def test_fetch_usage_year_for_organization_populates_ft_billing_for_today( notify_db_session, ): create_rate( - start_date=datetime.utcnow() - timedelta(days=1), + start_date=utc_now() - timedelta(days=1), value=0.65, notification_type=NotificationType.SMS, ) @@ -968,7 +967,7 @@ def test_fetch_usage_year_for_organization_populates_ft_billing_for_today( service = create_service() template = create_template(service=service) dao_add_service_to_organization(service=service, organization_id=new_org.id) - current_year = datetime.utcnow().year + current_year = utc_now().year create_annual_billing( service_id=service.id, free_sms_fragment_limit=10, @@ -992,7 +991,7 @@ def test_fetch_usage_year_for_organization_calculates_cost_from_multiple_rates( ): old_rate_date = date(2022, 4, 29) new_rate_date = date(2022, 5, 1) - current_year = datetime.utcnow().year + current_year = utc_now().year org = create_organization(name="Organization 1") @@ -1033,7 +1032,7 @@ def test_fetch_usage_year_for_organization_calculates_cost_from_multiple_rates( @freeze_time("2022-05-01 13:30") def test_fetch_usage_year_for_organization_when_no_usage(notify_db_session): - current_year = datetime.utcnow().year + current_year = utc_now().year org = create_organization(name="Organization 1") @@ -1059,11 +1058,11 @@ def test_fetch_usage_year_for_organization_when_no_usage(notify_db_session): @freeze_time("2022-05-01 13:30") def test_fetch_usage_year_for_organization_only_queries_present_year(notify_db_session): - current_year = datetime.utcnow().year + current_year = utc_now().year last_year = current_year - 1 date_two_years_ago = date(2021, 3, 31) date_in_last_financial_year = date(2022, 3, 31) - date_in_this_year = datetime.utcnow().date() + date_in_this_year = utc_now().date() org = create_organization(name="Organization 1") @@ -1133,20 +1132,20 @@ def test_fetch_usage_year_for_organization_only_returns_data_for_live_services( dao_add_service_to_organization(service=live_service, organization_id=org.id) dao_add_service_to_organization(service=trial_service, organization_id=org.id) create_ft_billing( - local_date=datetime.utcnow().date(), + local_date=utc_now().date(), template=sms_template, rate=0.0158, billable_unit=19, notifications_sent=19, ) create_ft_billing( - local_date=datetime.utcnow().date(), + local_date=utc_now().date(), template=email_template, billable_unit=0, notifications_sent=100, ) create_ft_billing( - local_date=datetime.utcnow().date(), + local_date=utc_now().date(), template=trial_sms_template, billable_unit=200, rate=0.0158, @@ -1172,9 +1171,9 @@ def test_fetch_usage_year_for_organization_only_returns_data_for_live_services( def test_query_organization_sms_usage_for_year_handles_multiple_services( notify_db_session, ): - today = datetime.utcnow().date() - yesterday = datetime.utcnow().date() - timedelta(days=1) - current_year = datetime.utcnow().year + today = utc_now().date() + yesterday = utc_now().date() - timedelta(days=1) + current_year = utc_now().year org = create_organization(name="Organization 1") @@ -1269,7 +1268,7 @@ def test_query_organization_sms_usage_for_year_handles_multiple_rates( ): old_rate_date = date(2022, 4, 29) new_rate_date = date(2022, 5, 1) - current_year = datetime.utcnow().year + current_year = utc_now().year org = create_organization(name="Organization 1") diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index d7d5cc9cb..4c7030b2e 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -19,6 +19,7 @@ from app.dao.fact_notification_status_dao import ( ) from app.enums import KeyType, NotificationStatus, NotificationType, TemplateType from app.models import FactNotificationStatus +from app.utils import utc_now from tests.app.db import ( create_ft_notification_status, create_job, @@ -643,10 +644,8 @@ def test_fetch_monthly_template_usage_for_service(sample_service): template=template_two, count=5, ) - create_notification( - template=template_two, created_at=datetime.utcnow() - timedelta(days=1) - ) - create_notification(template=template_two, created_at=datetime.utcnow()) + create_notification(template=template_two, created_at=utc_now() - timedelta(days=1)) + create_notification(template=template_two, created_at=utc_now()) results = fetch_monthly_template_usage_for_service( datetime(2017, 4, 1), datetime(2018, 3, 31), sample_service.id ) @@ -713,7 +712,7 @@ def test_fetch_monthly_template_usage_for_service_does_join_to_notifications_if_ template=template_one, count=3, ) - create_notification(template=template_one, created_at=datetime.utcnow()) + create_notification(template=template_one, created_at=utc_now()) results = fetch_monthly_template_usage_for_service( datetime(2018, 1, 1), datetime(2018, 2, 20), template_one.service_id ) @@ -747,7 +746,7 @@ def test_fetch_monthly_template_usage_for_service_does_not_include_cancelled_sta ) create_notification( template=sample_template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.CANCELLED, ) results = fetch_monthly_template_usage_for_service( @@ -771,7 +770,7 @@ def test_fetch_monthly_template_usage_for_service_does_not_include_test_notifica ) create_notification( template=sample_template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.DELIVERED, key_type=KeyType.TEST, ) diff --git a/tests/app/dao/test_invited_user_dao.py b/tests/app/dao/test_invited_user_dao.py index cedee16ea..da52e52e7 100644 --- a/tests/app/dao/test_invited_user_dao.py +++ b/tests/app/dao/test_invited_user_dao.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime, timedelta +from datetime import timedelta import pytest from sqlalchemy.orm.exc import NoResultFound @@ -14,6 +14,7 @@ from app.dao.invited_user_dao import ( ) from app.enums import InvitedUserStatus, PermissionType from app.models import InvitedUser +from app.utils import utc_now from tests.app.db import create_invited_user @@ -196,7 +197,7 @@ def make_invitation(user, service, age=None, email_address="test@test.com"): from_user=user, service=service, status=InvitedUserStatus.PENDING, - created_at=datetime.utcnow() - (age or timedelta(hours=0)), + created_at=utc_now() - (age or timedelta(hours=0)), permissions=PermissionType.MANAGE_SETTINGS, folder_permissions=[str(uuid.uuid4())], ) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 0831999e1..ca98257e5 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -20,6 +20,7 @@ from app.dao.jobs_dao import ( ) from app.enums import JobStatus, NotificationStatus from app.models import Job, NotificationType, TemplateType +from app.utils import utc_now from tests.app.db import ( create_job, create_notification, @@ -228,8 +229,8 @@ def test_update_job(sample_job): def test_set_scheduled_jobs_to_pending_gets_all_jobs_in_scheduled_state_before_now( sample_template, ): - one_minute_ago = datetime.utcnow() - timedelta(minutes=1) - one_hour_ago = datetime.utcnow() - timedelta(minutes=60) + one_minute_ago = utc_now() - timedelta(minutes=1) + one_hour_ago = utc_now() - timedelta(minutes=60) job_new = create_job( sample_template, scheduled_for=one_minute_ago, @@ -249,7 +250,7 @@ def test_set_scheduled_jobs_to_pending_gets_all_jobs_in_scheduled_state_before_n def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_not_scheduled( sample_template, sample_job ): - one_minute_ago = datetime.utcnow() - timedelta(minutes=1) + one_minute_ago = utc_now() - timedelta(minutes=1) job_scheduled = create_job( sample_template, scheduled_for=one_minute_ago, @@ -268,8 +269,8 @@ def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_scheduled_in_the_future def test_set_scheduled_jobs_to_pending_updates_rows(sample_template): - one_minute_ago = datetime.utcnow() - timedelta(minutes=1) - one_hour_ago = datetime.utcnow() - timedelta(minutes=60) + one_minute_ago = utc_now() - timedelta(minutes=1) + one_hour_ago = utc_now() - timedelta(minutes=60) create_job( sample_template, scheduled_for=one_minute_ago, @@ -298,7 +299,7 @@ def test_should_get_jobs_seven_days_old(sample_template): """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ - seven_days_ago = datetime.utcnow() - timedelta(days=7) + seven_days_ago = utc_now() - timedelta(days=7) within_seven_days = seven_days_ago + timedelta(seconds=1) eight_days_ago = seven_days_ago - timedelta(days=1) @@ -367,8 +368,8 @@ def test_get_jobs_for_service_doesnt_return_test_messages( @freeze_time("2016-10-31 10:00:00") def test_should_get_jobs_seven_days_old_by_scheduled_for_date(sample_service): - six_days_ago = datetime.utcnow() - timedelta(days=6) - eight_days_ago = datetime.utcnow() - timedelta(days=8) + six_days_ago = utc_now() - timedelta(days=6) + eight_days_ago = utc_now() - timedelta(days=8) sms_template = create_template(sample_service, template_type=TemplateType.SMS) create_job(sms_template, created_at=eight_days_ago) @@ -405,7 +406,7 @@ def test_find_jobs_with_missing_rows(sample_email_template): template=sample_email_template, notification_count=3, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=20), + processing_finished=utc_now() - timedelta(minutes=20), ) for i in range(0, 3): create_notification(job=healthy_job, job_row_number=i) @@ -413,7 +414,7 @@ def test_find_jobs_with_missing_rows(sample_email_template): template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=20), + processing_finished=utc_now() - timedelta(minutes=20), ) for i in range(0, 4): create_notification(job=job_with_missing_rows, job_row_number=i) @@ -431,7 +432,7 @@ def test_find_jobs_with_missing_rows_returns_nothing_for_a_job_completed_less_th template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=9), + processing_finished=utc_now() - timedelta(minutes=9), ) for i in range(0, 4): create_notification(job=job, job_row_number=i) @@ -448,7 +449,7 @@ def test_find_jobs_with_missing_rows_returns_nothing_for_a_job_completed_more_th template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(days=1), + processing_finished=utc_now() - timedelta(days=1), ) for i in range(0, 4): create_notification(job=job, job_row_number=i) @@ -474,7 +475,7 @@ def test_find_jobs_with_missing_rows_doesnt_return_jobs_that_are_not_finished( template=sample_email_template, notification_count=5, job_status=status, - processing_finished=datetime.utcnow() - timedelta(minutes=11), + processing_finished=utc_now() - timedelta(minutes=11), ) for i in range(0, 4): create_notification(job=job, job_row_number=i) @@ -489,7 +490,7 @@ def test_find_missing_row_for_job(sample_email_template): template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=11), + processing_finished=utc_now() - timedelta(minutes=11), ) create_notification(job=job, job_row_number=0) create_notification(job=job, job_row_number=1) @@ -506,7 +507,7 @@ def test_find_missing_row_for_job_more_than_one_missing_row(sample_email_templat template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=11), + processing_finished=utc_now() - timedelta(minutes=11), ) create_notification(job=job, job_row_number=0) create_notification(job=job, job_row_number=1) @@ -525,7 +526,7 @@ def test_find_missing_row_for_job_return_none_when_row_isnt_missing( template=sample_email_template, notification_count=5, job_status=JobStatus.FINISHED, - processing_finished=datetime.utcnow() - timedelta(minutes=11), + processing_finished=utc_now() - timedelta(minutes=11), ) for i in range(0, 5): create_notification(job=job, job_row_number=i) diff --git a/tests/app/dao/test_organization_dao.py b/tests/app/dao/test_organization_dao.py index a03b5fa8a..edffdd1d4 100644 --- a/tests/app/dao/test_organization_dao.py +++ b/tests/app/dao/test_organization_dao.py @@ -1,4 +1,3 @@ -import datetime import uuid import pytest @@ -18,6 +17,7 @@ from app.dao.organization_dao import ( ) from app.enums import OrganizationType from app.models import Organization, Service +from app.utils import utc_now from tests.app.db import ( create_domain, create_email_branding, @@ -65,7 +65,7 @@ def test_update_organization(notify_db_session): "name": "new name", "organization_type": OrganizationType.STATE, "agreement_signed": True, - "agreement_signed_at": datetime.datetime.utcnow(), + "agreement_signed_at": utc_now(), "agreement_signed_by_id": user.id, "agreement_signed_version": 999.99, "email_branding_id": email_branding.id, diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index 8af524fa6..b03d965d0 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -16,6 +16,7 @@ from app.dao.provider_details_dao import ( ) from app.enums import NotificationType, TemplateType from app.models import ProviderDetails, ProviderDetailsHistory +from app.utils import utc_now from tests.app.db import create_ft_billing, create_service, create_template from tests.conftest import set_config @@ -158,7 +159,7 @@ def test_adjust_provider_priority_sets_priority( _adjust_provider_priority(sns_provider, 50) - assert sns_provider.updated_at == datetime.utcnow() + assert sns_provider.updated_at == utc_now() assert sns_provider.created_by.id == notify_user.id assert sns_provider.priority == 50 diff --git a/tests/app/dao/test_service_data_retention_dao.py b/tests/app/dao/test_service_data_retention_dao.py index 1d60c619b..98f5d9f17 100644 --- a/tests/app/dao/test_service_data_retention_dao.py +++ b/tests/app/dao/test_service_data_retention_dao.py @@ -1,5 +1,4 @@ import uuid -from datetime import datetime import pytest from sqlalchemy.exc import IntegrityError @@ -13,6 +12,7 @@ from app.dao.service_data_retention_dao import ( ) from app.enums import NotificationType from app.models import ServiceDataRetention +from app.utils import utc_now from tests.app.db import create_service, create_service_data_retention @@ -102,7 +102,7 @@ def test_insert_service_data_retention(sample_service): assert results[0].service_id == sample_service.id assert results[0].notification_type == NotificationType.EMAIL assert results[0].days_of_retention == 3 - assert results[0].created_at.date() == datetime.utcnow().date() + assert results[0].created_at.date() == utc_now().date() def test_insert_service_data_retention_throws_unique_constraint(sample_service): @@ -137,8 +137,8 @@ def test_update_service_data_retention(sample_service): assert results[0].service_id == sample_service.id assert results[0].notification_type == NotificationType.SMS assert results[0].days_of_retention == 5 - assert results[0].created_at.date() == datetime.utcnow().date() - assert results[0].updated_at.date() == datetime.utcnow().date() + assert results[0].created_at.date() == utc_now().date() + assert results[0].updated_at.date() == utc_now().date() def test_update_service_data_retention_does_not_update_if_row_does_not_exist( diff --git a/tests/app/dao/test_service_email_reply_to_dao.py b/tests/app/dao/test_service_email_reply_to_dao.py index c69838bd5..851ecb870 100644 --- a/tests/app/dao/test_service_email_reply_to_dao.py +++ b/tests/app/dao/test_service_email_reply_to_dao.py @@ -31,9 +31,10 @@ def test_dao_get_reply_to_by_service_id(notify_db_session): results = dao_get_reply_to_by_service_id(service_id=service.id) assert len(results) == 3 + # TODO we had to change the order around, why? assert default_reply_to == results[0] - assert another_reply_to == results[1] - assert second_reply_to == results[2] + assert another_reply_to == results[2] + assert second_reply_to == results[1] def test_dao_get_reply_to_by_service_id_does_not_return_archived_reply_tos( diff --git a/tests/app/dao/test_service_sms_sender_dao.py b/tests/app/dao/test_service_sms_sender_dao.py index 50b2a71ff..9ca05e711 100644 --- a/tests/app/dao/test_service_sms_sender_dao.py +++ b/tests/app/dao/test_service_sms_sender_dao.py @@ -159,16 +159,11 @@ def test_dao_update_service_sms_sender_switches_default(notify_db_session): is_default=True, sms_sender="updated", ) - sms_senders = ( - ServiceSmsSender.query.filter_by(service_id=service.id) - .order_by(ServiceSmsSender.created_at) - .all() - ) - assert len(sms_senders) == 2 - assert sms_senders[0].sms_sender == "testing" - assert not sms_senders[0].is_default - assert sms_senders[1].sms_sender == "updated" - assert sms_senders[1].is_default + sms_senders = ServiceSmsSender.query.filter_by(service_id=service.id).all() + + expected = {("testing", False), ("updated", True)} + results = {(sender.sms_sender, sender.is_default) for sender in sms_senders} + assert expected == results def test_dao_update_service_sms_sender_raises_exception_when_no_default_after_update( diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index 565bc52e9..6441f20e0 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -67,6 +67,7 @@ from app.models import ( VerifyCode, user_folder_permissions, ) +from app.utils import utc_now from tests.app.db import ( create_annual_billing, create_api_key, @@ -1493,8 +1494,8 @@ def test_dao_find_services_sending_to_tv_numbers(notify_db_session, fake_uuid): status=NotificationStatus.DELIVERED, ) - start_date = datetime.utcnow() - timedelta(days=1) - end_date = datetime.utcnow() + start_date = utc_now() - timedelta(days=1) + end_date = utc_now() result = dao_find_services_sending_to_tv_numbers(start_date, end_date, threshold=4) assert len(result) == 1 @@ -1541,8 +1542,8 @@ def test_dao_find_services_with_high_failure_rates(notify_db_session, fake_uuid) status=NotificationStatus.PERMANENT_FAILURE, ) # below threshold is excluded - start_date = datetime.utcnow() - timedelta(days=1) - end_date = datetime.utcnow() + start_date = utc_now() - timedelta(days=1) + end_date = utc_now() result = dao_find_services_with_high_failure_rates( start_date, end_date, threshold=3 diff --git a/tests/app/dao/test_uploads_dao.py b/tests/app/dao/test_uploads_dao.py index 5a4fb33b8..0310c6e44 100644 --- a/tests/app/dao/test_uploads_dao.py +++ b/tests/app/dao/test_uploads_dao.py @@ -1,9 +1,10 @@ -from datetime import datetime, timedelta +from datetime import timedelta from freezegun import freeze_time from app.dao.uploads_dao import dao_get_uploads_by_service_id from app.enums import JobStatus, NotificationStatus, NotificationType, TemplateType +from app.utils import utc_now from tests.app.db import ( create_job, create_notification, @@ -47,11 +48,11 @@ def test_get_uploads_for_service(sample_template): create_service_data_retention( sample_template.service, NotificationType.SMS, days_of_retention=9 ) - job = create_job(sample_template, processing_started=datetime.utcnow()) + job = create_job(sample_template, processing_started=utc_now()) other_service = create_service(service_name="other service") other_template = create_template(service=other_service) - other_job = create_job(other_template, processing_started=datetime.utcnow()) + other_job = create_job(other_template, processing_started=utc_now()) uploads_from_db = dao_get_uploads_by_service_id(job.service_id).items other_uploads_from_db = dao_get_uploads_by_service_id(other_job.service_id).items @@ -91,16 +92,16 @@ def test_get_uploads_for_service(sample_template): def test_get_uploads_orders_by_processing_started_desc(sample_template): - days_ago = datetime.utcnow() - timedelta(days=3) + days_ago = utc_now() - timedelta(days=3) upload_1 = create_job( sample_template, - processing_started=datetime.utcnow() - timedelta(days=1), + processing_started=utc_now() - timedelta(days=1), created_at=days_ago, job_status=JobStatus.IN_PROGRESS, ) upload_2 = create_job( sample_template, - processing_started=datetime.utcnow() - timedelta(days=2), + processing_started=utc_now() - timedelta(days=2), created_at=days_ago, job_status=JobStatus.IN_PROGRESS, ) diff --git a/tests/app/dao/test_users_dao.py b/tests/app/dao/test_users_dao.py index e38a395b5..9c8770913 100644 --- a/tests/app/dao/test_users_dao.py +++ b/tests/app/dao/test_users_dao.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime, timedelta +from datetime import timedelta import pytest from freezegun import freeze_time @@ -28,6 +28,7 @@ from app.dao.users_dao import ( from app.enums import AuthType, CodeType, PermissionType from app.errors import InvalidRequest from app.models import User, VerifyCode +from app.utils import utc_now from tests.app.db import ( create_permissions, create_service, @@ -59,7 +60,7 @@ def test_create_user(notify_db_session, phone_number, expected_phone_number): assert user_query.email_address == email assert user_query.id == user.id assert user_query.mobile_number == expected_phone_number - assert user_query.email_access_validated_at == datetime.utcnow() + assert user_query.email_access_validated_at == utc_now() assert not user_query.platform_admin @@ -146,8 +147,8 @@ def make_verify_code(user, age=None, expiry_age=None, code="12335", code_used=Fa verify_code = VerifyCode( code_type=CodeType.SMS, _code=code, - created_at=datetime.utcnow() - (age or timedelta(hours=0)), - expiry_datetime=datetime.utcnow() - (expiry_age or timedelta(0)), + created_at=utc_now() - (age or timedelta(hours=0)), + expiry_datetime=utc_now() - (expiry_age or timedelta(0)), user=user, code_used=code_used, ) @@ -172,16 +173,16 @@ def test_update_user_attribute(client, sample_user, user_attribute, user_value): @freeze_time("2020-01-24T12:00:00") def test_update_user_password(notify_api, notify_db_session, sample_user): - sample_user.password_changed_at = datetime.utcnow() - timedelta(days=1) + sample_user.password_changed_at = utc_now() - timedelta(days=1) password = "newpassword" assert not sample_user.check_password(password) update_user_password(sample_user, password) assert sample_user.check_password(password) - assert sample_user.password_changed_at == datetime.utcnow() + assert sample_user.password_changed_at == utc_now() def test_count_user_verify_codes(sample_user): - with freeze_time(datetime.utcnow() + timedelta(hours=1)): + with freeze_time(utc_now() + timedelta(hours=1)): make_verify_code(sample_user, code_used=True) make_verify_code(sample_user, expiry_age=timedelta(hours=2)) [make_verify_code(sample_user) for i in range(5)] diff --git a/tests/app/db.py b/tests/app/db.py index 9bd55aba8..b62f99b4e 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -69,6 +69,7 @@ from app.models import ( User, WebauthnCredential, ) +from app.utils import utc_now def create_user( @@ -265,7 +266,7 @@ def create_notification( template = job.template if created_at is None: - created_at = datetime.utcnow() + created_at = utc_now() if to_field is None: to_field = ( @@ -280,8 +281,8 @@ def create_notification( NotificationStatus.VIRUS_SCAN_FAILED, NotificationStatus.PENDING_VIRUS_CHECK, ): - sent_at = sent_at or datetime.utcnow() - updated_at = updated_at or datetime.utcnow() + sent_at = sent_at or utc_now() + updated_at = updated_at or utc_now() if not one_off and (job is None and api_key is None): # we did not specify in test - lets create it @@ -354,11 +355,11 @@ def create_notification_history( template = job.template if created_at is None: - created_at = datetime.utcnow() + created_at = utc_now() if status != NotificationStatus.CREATED: - sent_at = sent_at or datetime.utcnow() - updated_at = updated_at or datetime.utcnow() + sent_at = sent_at or utc_now() + updated_at = updated_at or utc_now() data = { "id": id or uuid.uuid4(), @@ -412,7 +413,7 @@ def create_job( "template_version": template.version, "original_file_name": original_file_name, "notification_count": notification_count, - "created_at": created_at or datetime.utcnow(), + "created_at": created_at or utc_now(), "created_by": template.created_by, "job_status": job_status, "scheduled_for": scheduled_for, @@ -456,10 +457,10 @@ def create_inbound_sms( inbound = InboundSms( service=service, - created_at=created_at or datetime.utcnow(), + created_at=created_at or utc_now(), notify_number=service.get_inbound_number(), user_number=user_number, - provider_date=provider_date or datetime.utcnow(), + provider_date=provider_date or utc_now(), provider_reference=provider_reference or "foo", content=content, provider=provider, @@ -769,7 +770,7 @@ def create_complaint(service=None, notification=None, created_at=None): service_id=service.id, ses_feedback_id=str(uuid.uuid4()), complaint_type="abuse", - complaint_date=datetime.utcnow(), + complaint_date=utc_now(), created_at=created_at if created_at else datetime.now(), ) db.session.add(complaint) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 0ad34fdea..af65dd766 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -1,6 +1,5 @@ import json from collections import namedtuple -from datetime import datetime from unittest.mock import ANY import pytest @@ -18,6 +17,7 @@ from app.enums import BrandType, KeyType, NotificationStatus, NotificationType from app.exceptions import NotificationTechnicalFailureException from app.models import EmailBranding, Notification from app.serialised_models import SerialisedService +from app.utils import utc_now from tests.app.db import ( create_email_branding, create_notification, @@ -105,7 +105,7 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist( notification = Notification.query.filter_by(id=db_notification.id).one() assert notification.status == NotificationStatus.SENDING - assert notification.sent_at <= datetime.utcnow() + assert notification.sent_at <= utc_now() assert notification.sent_by == "sns" assert notification.billable_units == 1 assert notification.personalisation == {"name": "Jo"} @@ -147,7 +147,7 @@ def test_should_send_personalised_template_to_correct_email_provider_and_persist notification = Notification.query.filter_by(id=db_notification.id).one() assert notification.status == NotificationStatus.SENDING - assert notification.sent_at <= datetime.utcnow() + assert notification.sent_at <= utc_now() assert notification.sent_by == "ses" assert notification.personalisation == {"name": "Jo"} diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index fd45c0253..da1230a1b 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -4,6 +4,7 @@ import pytest from freezegun import freeze_time from app.enums import NotificationType +from app.utils import utc_now from tests.app.db import ( create_inbound_sms, create_service, @@ -243,7 +244,7 @@ def test_get_most_recent_inbound_sms_for_service_respects_data_retention( ): create_service_data_retention(sample_service, NotificationType.SMS, 5) for i in range(10): - created = datetime.utcnow() - timedelta(days=i) + created = utc_now() - timedelta(days=i) create_inbound_sms( sample_service, user_number="44770090000{}".format(i), @@ -288,7 +289,7 @@ def test_get_inbound_sms_for_service_respects_data_retention( ): create_service_data_retention(sample_service, NotificationType.SMS, 5) for i in range(10): - created = datetime.utcnow() - timedelta(days=i) + created = utc_now() - timedelta(days=i) create_inbound_sms( sample_service, user_number="44770090000{}".format(i), created_at=created ) diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 670a02ca3..6d4112058 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -16,6 +16,7 @@ from app.enums import ( NotificationType, TemplateType, ) +from app.utils import utc_now from tests import create_admin_authorization_header from tests.app.db import ( create_ft_notification_status, @@ -152,7 +153,7 @@ def test_create_unscheduled_job_with_sender_id_in_metadata( @freeze_time("2016-01-01 12:00:00.000000") def test_create_scheduled_job(client, sample_template, mocker, fake_uuid): - scheduled_date = (datetime.utcnow() + timedelta(hours=95, minutes=59)).isoformat() + scheduled_date = (utc_now() + timedelta(hours=95, minutes=59)).isoformat() mocker.patch("app.celery.tasks.process_job.apply_async") mocker.patch( "app.job.rest.get_job_metadata_from_s3", @@ -250,7 +251,7 @@ def test_create_job_returns_400_if_file_is_invalid( def test_should_not_create_scheduled_job_more_then_96_hours_in_the_future( client, sample_template, mocker, fake_uuid ): - scheduled_date = (datetime.utcnow() + timedelta(hours=96, minutes=1)).isoformat() + scheduled_date = (utc_now() + timedelta(hours=96, minutes=1)).isoformat() mocker.patch("app.celery.tasks.process_job.apply_async") mocker.patch( "app.job.rest.get_job_metadata_from_s3", @@ -287,7 +288,7 @@ def test_should_not_create_scheduled_job_more_then_96_hours_in_the_future( def test_should_not_create_scheduled_job_in_the_past( client, sample_template, mocker, fake_uuid ): - scheduled_date = (datetime.utcnow() - timedelta(minutes=1)).isoformat() + scheduled_date = (utc_now() - timedelta(minutes=1)).isoformat() mocker.patch("app.celery.tasks.process_job.apply_async") mocker.patch( "app.job.rest.get_job_metadata_from_s3", @@ -656,7 +657,7 @@ def test_get_job_by_id_with_stats_for_old_job_where_notifications_have_been_purg old_job = create_job( sample_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(days=9), + created_at=utc_now() - timedelta(days=9), job_status=JobStatus.FINISHED, ) @@ -771,8 +772,8 @@ def test_get_jobs_with_limit_days(admin_request, sample_template): def test_get_jobs_should_return_statistics(admin_request, sample_template): - now = datetime.utcnow() - earlier = datetime.utcnow() - timedelta(days=1) + now = utc_now() + earlier = utc_now() - timedelta(days=1) job_1 = create_job(sample_template, processing_started=earlier) job_2 = create_job(sample_template, processing_started=now) create_notification(job=job_1, status=NotificationStatus.CREATED) @@ -807,8 +808,8 @@ def test_get_jobs_should_return_no_stats_if_no_rows_in_notifications( admin_request, sample_template, ): - now = datetime.utcnow() - earlier = datetime.utcnow() - timedelta(days=1) + now = utc_now() + earlier = utc_now() - timedelta(days=1) job_1 = create_job(sample_template, created_at=earlier) job_2 = create_job(sample_template, created_at=now) diff --git a/tests/app/organization/test_invite_rest.py b/tests/app/organization/test_invite_rest.py index 71e8c12ad..3b3c2387d 100644 --- a/tests/app/organization/test_invite_rest.py +++ b/tests/app/organization/test_invite_rest.py @@ -249,7 +249,7 @@ def test_validate_invitation_token_for_expired_token_returns_400(client): json_resp = json.loads(response.get_data(as_text=True)) assert json_resp["result"] == "error" assert json_resp["message"] == { - "invitation": "Your invitation to GOV.UK Notify has expired. " + "invitation": "Your invitation to Notify.gov has expired. " "Please ask the person that invited you to send you another one" } diff --git a/tests/app/organization/test_rest.py b/tests/app/organization/test_rest.py index 914dca008..04b68884b 100644 --- a/tests/app/organization/test_rest.py +++ b/tests/app/organization/test_rest.py @@ -1,5 +1,4 @@ import uuid -from datetime import datetime import pytest from flask import current_app @@ -13,6 +12,7 @@ from app.dao.organization_dao import ( from app.dao.services_dao import dao_archive_service from app.enums import OrganizationType from app.models import AnnualBilling, Organization +from app.utils import utc_now from tests.app.db import ( create_annual_billing, create_domain, @@ -835,7 +835,7 @@ def test_get_organization_services_usage(admin_request, notify_db_session): service_id=service.id, free_sms_fragment_limit=10, financial_year_start=2019 ) create_ft_billing( - local_date=datetime.utcnow().date(), + local_date=utc_now().date(), template=template, billable_unit=19, rate=0.060, @@ -873,7 +873,7 @@ def test_get_organization_services_usage_sort_active_first( service_id=service.id, free_sms_fragment_limit=10, financial_year_start=2019 ) create_ft_billing( - local_date=datetime.utcnow().date(), + local_date=utc_now().date(), template=template, billable_unit=19, rate=0.060, diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 5535f814b..8a97046e0 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -36,6 +36,7 @@ from app.models import ( ServiceSmsSender, User, ) +from app.utils import utc_now from tests import create_admin_authorization_header from tests.app.db import ( create_annual_billing, @@ -369,7 +370,7 @@ def test_get_service_by_id_should_404_if_no_service_for_user(notify_api, sample_ def test_get_service_by_id_returns_go_live_user_and_go_live_at( admin_request, sample_user ): - now = datetime.utcnow() + now = utc_now() service = create_service(user=sample_user, go_live_user=sample_user, go_live_at=now) json_resp = admin_request.get("service.get_service_by_id", service_id=service.id) assert json_resp["data"]["go_live_user"] == str(sample_user.id) @@ -2270,9 +2271,7 @@ def test_get_detailed_services_groups_by_service(notify_db_session): create_notification(service_1_template, status=NotificationStatus.DELIVERED) create_notification(service_1_template, status=NotificationStatus.CREATED) - data = get_detailed_services( - start_date=datetime.utcnow().date(), end_date=datetime.utcnow().date() - ) + data = get_detailed_services(start_date=utc_now().date(), end_date=utc_now().date()) data = sorted(data, key=lambda x: x["name"]) assert len(data) == 2 @@ -2315,9 +2314,7 @@ def test_get_detailed_services_includes_services_with_no_notifications( service_1_template = create_template(service_1) create_notification(service_1_template) - data = get_detailed_services( - start_date=datetime.utcnow().date(), end_date=datetime.utcnow().date() - ) + data = get_detailed_services(start_date=utc_now().date(), end_date=utc_now().date()) data = sorted(data, key=lambda x: x["name"]) assert len(data) == 2 @@ -2359,7 +2356,7 @@ def test_get_detailed_services_only_includes_todays_notifications(sample_templat with freeze_time("2015-10-10T12:00:00"): data = get_detailed_services( - start_date=datetime.utcnow().date(), end_date=datetime.utcnow().date() + start_date=utc_now().date(), end_date=utc_now().date() ) data = sorted(data, key=lambda x: x["id"]) @@ -2386,29 +2383,29 @@ def test_get_detailed_services_for_date_range( from app.service.rest import get_detailed_services create_ft_notification_status( - local_date=(datetime.utcnow() - timedelta(days=3)).date(), + local_date=(utc_now() - timedelta(days=3)).date(), service=sample_template.service, notification_type=NotificationType.SMS, ) create_ft_notification_status( - local_date=(datetime.utcnow() - timedelta(days=2)).date(), + local_date=(utc_now() - timedelta(days=2)).date(), service=sample_template.service, notification_type=NotificationType.SMS, ) create_ft_notification_status( - local_date=(datetime.utcnow() - timedelta(days=1)).date(), + local_date=(utc_now() - timedelta(days=1)).date(), service=sample_template.service, notification_type=NotificationType.SMS, ) create_notification( template=sample_template, - created_at=datetime.utcnow(), + created_at=utc_now(), status=NotificationStatus.DELIVERED, ) - start_date = (datetime.utcnow() - timedelta(days=start_date_delta)).date() - end_date = (datetime.utcnow() - timedelta(days=end_date_delta)).date() + start_date = (utc_now() - timedelta(days=start_date_delta)).date() + end_date = (utc_now() - timedelta(days=end_date_delta)).date() data = get_detailed_services( only_active=False, diff --git a/tests/app/service/test_statistics_rest.py b/tests/app/service/test_statistics_rest.py index 735730d63..2163f8f36 100644 --- a/tests/app/service/test_statistics_rest.py +++ b/tests/app/service/test_statistics_rest.py @@ -11,6 +11,7 @@ from app.enums import ( StatisticsType, TemplateType, ) +from app.utils import utc_now from tests.app.db import ( create_ft_notification_status, create_notification, @@ -28,7 +29,7 @@ def test_get_template_usage_by_month_returns_correct_data( template=sample_template, count=3, ) - create_notification(sample_template, created_at=datetime.utcnow()) + create_notification(sample_template, created_at=utc_now()) resp_json = admin_request.get( "service.get_monthly_template_usage", @@ -74,7 +75,7 @@ def test_get_template_usage_by_month_returns_two_templates( template=sample_template, count=3, ) - create_notification(sample_template, created_at=datetime.utcnow()) + create_notification(sample_template, created_at=utc_now()) resp_json = admin_request.get( "service.get_monthly_template_usage", diff --git a/tests/app/service_invite/test_service_invite_rest.py b/tests/app/service_invite/test_service_invite_rest.py index 0f2f20b50..0f60dc3e3 100644 --- a/tests/app/service_invite/test_service_invite_rest.py +++ b/tests/app/service_invite/test_service_invite_rest.py @@ -335,7 +335,7 @@ def test_validate_invitation_token_for_expired_token_returns_400(client): json_resp = json.loads(response.get_data(as_text=True)) assert json_resp["result"] == "error" assert json_resp["message"] == { - "invitation": "Your invitation to GOV.UK Notify has expired. " + "invitation": "Your invitation to Notify.gov has expired. " "Please ask the person that invited you to send you another one" } diff --git a/tests/app/template/test_rest_history.py b/tests/app/template/test_rest_history.py index 6aa234de0..737b8940d 100644 --- a/tests/app/template/test_rest_history.py +++ b/tests/app/template/test_rest_history.py @@ -5,6 +5,7 @@ from flask import url_for from app.dao.templates_dao import dao_update_template from app.enums import TemplateProcessType +from app.utils import utc_now from tests import create_admin_authorization_header @@ -32,7 +33,7 @@ def test_template_history_version(notify_api, sample_user, sample_template): datetime.strptime( json_resp["data"]["created_at"], "%Y-%m-%d %H:%M:%S.%f" ).date() - == datetime.utcnow().date() + == utc_now().date() ) diff --git a/tests/app/template_statistics/test_rest.py b/tests/app/template_statistics/test_rest.py index be6b368ab..1ae65b22e 100644 --- a/tests/app/template_statistics/test_rest.py +++ b/tests/app/template_statistics/test_rest.py @@ -1,12 +1,12 @@ import uuid -from datetime import datetime, timedelta +from datetime import timedelta from unittest.mock import Mock import pytest from freezegun import freeze_time from app.enums import NotificationStatus, TemplateType -from app.utils import DATETIME_FORMAT +from app.utils import DATETIME_FORMAT, utc_now from tests.app.db import create_ft_notification_status, create_notification # get_template_statistics_for_service_by_day @@ -119,9 +119,9 @@ def test_get_template_statistics_for_service_by_day_returns_empty_list_if_no_tem def test_get_last_used_datetime_for_template(admin_request, sample_template): - date_from_notification = datetime.utcnow() - timedelta(hours=2) + date_from_notification = utc_now() - timedelta(hours=2) create_notification(template=sample_template, created_at=date_from_notification) - date_from_ft_status = (datetime.utcnow() - timedelta(days=2)).date() + date_from_ft_status = (utc_now() - timedelta(days=2)).date() create_ft_notification_status( local_date=date_from_ft_status, template=sample_template ) diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index a96eae599..7eee00bbf 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -37,6 +37,7 @@ from app.models import ( Template, User, ) +from app.utils import utc_now from tests.app.db import ( create_annual_billing, create_job, @@ -101,7 +102,7 @@ def test_update_jobs_archived_flag(notify_db_session, notify_api): sms_template = create_template(service=service, template_type=TemplateType.SMS) create_job(sms_template) - right_now = datetime.datetime.utcnow() + right_now = utc_now() tomorrow = right_now + datetime.timedelta(days=1) right_now = right_now.strftime("%Y-%m-%d") diff --git a/tests/app/test_model.py b/tests/app/test_model.py index aab74fac8..e74ef06ff 100644 --- a/tests/app/test_model.py +++ b/tests/app/test_model.py @@ -1,5 +1,3 @@ -from datetime import datetime - import pytest from freezegun import freeze_time from sqlalchemy.exc import IntegrityError @@ -26,6 +24,7 @@ from app.models import ( VerifyCode, filter_null_value_fields, ) +from app.utils import utc_now from tests.app.db import ( create_inbound_number, create_notification, @@ -385,7 +384,7 @@ def test_user_password(): def test_annual_billing_serialize(): - now = datetime.utcnow() + now = utc_now() ab = AnnualBilling() service = Service() ab.service = service @@ -449,7 +448,7 @@ def test_rate_str(): ), ) def test_organization_agreement_mou(notify_db_session, agreement_type, expected): - now = datetime.utcnow() + now = utc_now() agree = Agreement() agree.id = "whatever" agree.start_time = now @@ -469,7 +468,7 @@ def test_organization_agreement_mou(notify_db_session, agreement_type, expected) ), ) def test_organization_agreement_active(notify_db_session, agreement_status, expected): - now = datetime.utcnow() + now = utc_now() agree = Agreement() agree.id = "whatever" agree.start_time = now @@ -485,7 +484,7 @@ def test_agreement_serialize(): agree = Agreement() agree.id = "abc" - now = datetime.utcnow() + now = utc_now() agree.start_time = now agree.end_time = now serialize = agree.serialize() diff --git a/tests/app/user/test_rest_verify.py b/tests/app/user/test_rest_verify.py index 26eb085a4..c1855787b 100644 --- a/tests/app/user/test_rest_verify.py +++ b/tests/app/user/test_rest_verify.py @@ -12,12 +12,13 @@ from app.dao.services_dao import dao_fetch_service_by_id from app.dao.users_dao import create_user_code from app.enums import AuthType, CodeType from app.models import Notification, User, VerifyCode +from app.utils import utc_now from tests import create_admin_authorization_header @freeze_time("2016-01-01T12:00:00") def test_user_verify_sms_code(client, sample_sms_code): - sample_sms_code.user.logged_in_at = datetime.utcnow() - timedelta(days=1) + sample_sms_code.user.logged_in_at = utc_now() - timedelta(days=1) assert not VerifyCode.query.first().code_used assert sample_sms_code.user.current_session_id is None data = json.dumps( @@ -31,8 +32,8 @@ def test_user_verify_sms_code(client, sample_sms_code): ) assert resp.status_code == 204 assert VerifyCode.query.first().code_used - assert sample_sms_code.user.logged_in_at == datetime.utcnow() - assert sample_sms_code.user.email_access_validated_at != datetime.utcnow() + assert sample_sms_code.user.logged_in_at == utc_now() + assert sample_sms_code.user.email_access_validated_at != utc_now() assert sample_sms_code.user.current_session_id is not None @@ -122,7 +123,7 @@ def test_user_verify_code_expired_code_and_increments_failed_login_count( @freeze_time("2016-01-01 10:00:00.000000") def test_user_verify_password(client, sample_user): - yesterday = datetime.utcnow() - timedelta(days=1) + yesterday = utc_now() - timedelta(days=1) sample_user.logged_in_at = yesterday data = json.dumps({"password": "password"}) auth_header = create_admin_authorization_header() @@ -288,8 +289,8 @@ def test_send_sms_code_returns_204_when_too_many_codes_already_created( verify_code = VerifyCode( code_type=CodeType.SMS, _code=12345, - created_at=datetime.utcnow() - timedelta(minutes=10), - expiry_datetime=datetime.utcnow() + timedelta(minutes=40), + created_at=utc_now() - timedelta(minutes=10), + expiry_datetime=utc_now() + timedelta(minutes=40), user=sample_user, ) db.session.add(verify_code) @@ -537,8 +538,8 @@ def test_send_email_code_returns_404_for_bad_input_data(admin_request): # we send iAuthType.SMS and AuthType.WEBAUTHN users email code to validate their email access @pytest.mark.parametrize("auth_type", AuthType) def test_user_verify_email_code(admin_request, sample_user, auth_type): - sample_user.logged_in_at = datetime.utcnow() - timedelta(days=1) - sample_user.email_access_validated_at = datetime.utcnow() - timedelta(days=1) + sample_user.logged_in_at = utc_now() - timedelta(days=1) + sample_user.email_access_validated_at = utc_now() - timedelta(days=1) sample_user.auth_type = auth_type magic_code = str(uuid.uuid4()) verify_code = create_user_code(sample_user, magic_code, CodeType.EMAIL) @@ -553,8 +554,8 @@ def test_user_verify_email_code(admin_request, sample_user, auth_type): ) assert verify_code.code_used - assert sample_user.logged_in_at == datetime.utcnow() - assert sample_user.email_access_validated_at == datetime.utcnow() + assert sample_user.logged_in_at == utc_now() + assert sample_user.email_access_validated_at == utc_now() assert sample_user.current_session_id is not None diff --git a/tests/notifications_utils/clients/redis/test_redis_client.py b/tests/notifications_utils/clients/redis/test_redis_client.py index 536cce967..c9eb63240 100644 --- a/tests/notifications_utils/clients/redis/test_redis_client.py +++ b/tests/notifications_utils/clients/redis/test_redis_client.py @@ -1,10 +1,10 @@ import uuid -from datetime import datetime from unittest.mock import Mock, call import pytest from freezegun import freeze_time +from app.utils import utc_now from notifications_utils.clients.redis.redis_client import RedisClient, prepare_value @@ -208,9 +208,7 @@ def test_delete_multi(mocked_redis_client): (1.2, 1.2), (uuid.UUID(int=0), "00000000-0000-0000-0000-000000000000"), pytest.param({"a": 1}, None, marks=pytest.mark.xfail(raises=ValueError)), - pytest.param( - datetime.utcnow(), None, marks=pytest.mark.xfail(raises=ValueError) - ), + pytest.param(utc_now(), None, marks=pytest.mark.xfail(raises=ValueError)), ], ) def test_prepare_value(input, output): diff --git a/tests/notifications_utils/test_letter_timings.py b/tests/notifications_utils/test_letter_timings.py index aecc9c744..f93d32e99 100644 --- a/tests/notifications_utils/test_letter_timings.py +++ b/tests/notifications_utils/test_letter_timings.py @@ -4,6 +4,7 @@ import pytest import pytz from freezegun import freeze_time +from app.utils import utc_now from notifications_utils.letter_timings import ( get_letter_timings, letter_can_be_cancelled, @@ -188,7 +189,7 @@ def test_get_estimated_delivery_date_for_letter( def test_letter_cannot_be_cancelled_if_letter_status_is_not_created_or_pending_virus_check( status, ): - notification_created_at = datetime.utcnow() + notification_created_at = utc_now() assert not letter_can_be_cancelled(status, notification_created_at)