From 2e7e6e81fcbae85f58b6c98ff1a100a0bf60fc47 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 9 Aug 2024 09:11:28 -0700 Subject: [PATCH 01/41] Need to remove priority logic --- app/config.py | 2 - app/dao/provider_details_dao.py | 22 +----- app/notifications/rest.py | 6 +- app/provider_details/rest.py | 1 - app/service/send_notification.py | 6 +- app/v2/notifications/post_notifications.py | 6 +- tests/app/dao/test_provider_details_dao.py | 71 ------------------- tests/app/provider_details/test_rest.py | 19 ----- .../test_send_notification.py | 43 ----------- .../test_send_one_off_notification.py | 18 ----- tests/app/test_config.py | 3 +- 11 files changed, 5 insertions(+), 192 deletions(-) diff --git a/app/config.py b/app/config.py index 65ef6b2d3..75cf0e783 100644 --- a/app/config.py +++ b/app/config.py @@ -11,7 +11,6 @@ from app.cloudfoundry_config import cloud_config class QueueNames(object): PERIODIC = "periodic-tasks" - PRIORITY = "priority-tasks" DATABASE = "database-tasks" SEND_SMS = "send-sms-tasks" CHECK_SMS = "check-sms_tasks" @@ -30,7 +29,6 @@ class QueueNames(object): @staticmethod def all_queues(): return [ - QueueNames.PRIORITY, QueueNames.PERIODIC, QueueNames.DATABASE, QueueNames.SEND_SMS, diff --git a/app/dao/provider_details_dao.py b/app/dao/provider_details_dao.py index 73132a44e..9b6b3b726 100644 --- a/app/dao/provider_details_dao.py +++ b/app/dao/provider_details_dao.py @@ -33,20 +33,6 @@ def dao_get_provider_versions(provider_id): ) -def _adjust_provider_priority(provider, new_priority): - current_app.logger.info( - f"Adjusting provider priority - {provider.identifier} going from {provider.priority} to {new_priority}" - ) - provider.priority = new_priority - - # Automatic update so set as notify user - provider.created_by_id = current_app.config["NOTIFY_USER_ID"] - - # update without commit so that both rows can be changed without ending the transaction - # and releasing the for_update lock - _update_provider_details_without_commit(provider) - - def _get_sms_providers_for_update(time_threshold): """ Returns a list of providers, while holding a for_update lock on the provider details table, guaranteeing that those @@ -86,11 +72,7 @@ def get_provider_details_by_notification_type( if supports_international: filters.append(ProviderDetails.supports_international == supports_international) - return ( - ProviderDetails.query.filter(*filters) - .order_by(asc(ProviderDetails.priority)) - .all() - ) + return ProviderDetails.query.filter(*filters).all() @autocommit @@ -135,7 +117,6 @@ def dao_get_provider_stats(): ProviderDetails.id, ProviderDetails.display_name, ProviderDetails.identifier, - ProviderDetails.priority, ProviderDetails.notification_type, ProviderDetails.active, ProviderDetails.updated_at, @@ -149,7 +130,6 @@ def dao_get_provider_stats(): .outerjoin(User, ProviderDetails.created_by_id == User.id) .order_by( ProviderDetails.notification_type, - ProviderDetails.priority, ) .all() ) diff --git a/app/notifications/rest.py b/app/notifications/rest.py index f52bd1933..af1cd3ca4 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -168,11 +168,7 @@ def send_notification(notification_type): reply_to_text=template.reply_to_text, ) if not simulated: - queue_name = ( - QueueNames.PRIORITY - if template.process_type == TemplateProcessType.PRIORITY - else None - ) + queue_name = None send_notification_to_queue(notification=notification_model, queue=queue_name) else: diff --git a/app/provider_details/rest.py b/app/provider_details/rest.py index 9cc9f714a..3a7e62332 100644 --- a/app/provider_details/rest.py +++ b/app/provider_details/rest.py @@ -23,7 +23,6 @@ def get_providers(): "id": row.id, "display_name": row.display_name, "identifier": row.identifier, - "priority": row.priority, "notification_type": row.notification_type, "active": row.active, "updated_at": row.updated_at, diff --git a/app/service/send_notification.py b/app/service/send_notification.py index 4459ded3c..62628b1c4 100644 --- a/app/service/send_notification.py +++ b/app/service/send_notification.py @@ -80,11 +80,7 @@ def send_one_off_notification(service_id, post_data): client_reference=client_reference, ) - queue_name = ( - QueueNames.PRIORITY - if template.process_type == TemplateProcessType.PRIORITY - else None - ) + queue_name = None send_notification_to_queue( notification=notification, diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index 856179f85..eb1457d0a 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -176,11 +176,7 @@ def process_sms_or_email_notification( ) if not simulated: - queue_name = ( - QueueNames.PRIORITY - if template_process_type == TemplateProcessType.PRIORITY - else None - ) + queue_name = None send_notification_to_queue_detached( key_type=api_user.key_type, notification_type=notification_type, diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index b03d965d0..5a6f5c218 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -6,7 +6,6 @@ from sqlalchemy.sql import desc from app import notification_provider_clients from app.dao.provider_details_dao import ( - _adjust_provider_priority, _get_sms_providers_for_update, dao_get_provider_stats, dao_update_provider_details, @@ -33,9 +32,6 @@ def set_primary_sms_provider(identifier): get_alternative_sms_provider(identifier) ) - primary_provider.priority = 10 - secondary_provider.priority = 20 - dao_update_provider_details(primary_provider) dao_update_provider_details(secondary_provider) @@ -55,18 +51,6 @@ def test_can_get_sms_international_providers(notify_db_session): assert all(prov.supports_international for prov in sms_providers) -def test_can_get_sms_providers_in_order_of_priority(notify_db_session): - providers = get_provider_details_by_notification_type(NotificationType.SMS, False) - priorities = [provider.priority for provider in providers] - assert priorities == sorted(priorities) - - -def test_can_get_email_providers_in_order_of_priority(notify_db_session): - providers = get_provider_details_by_notification_type(NotificationType.EMAIL) - - assert providers[0].identifier == "ses" - - def test_can_get_email_providers(notify_db_session): assert len(get_provider_details_by_notification_type(NotificationType.EMAIL)) == 1 types = [ @@ -146,61 +130,6 @@ def test_get_alternative_sms_provider_fails_if_unrecognised(): get_alternative_sms_provider("ses") -@freeze_time("2016-01-01 00:30") -def test_adjust_provider_priority_sets_priority( - restore_provider_details, - notify_user, - sns_provider, -): - # need to update these manually to avoid triggering the `onupdate` clause of the updated_at column - ProviderDetails.query.filter(ProviderDetails.identifier == "sns").update( - {"updated_at": datetime.min} - ) - - _adjust_provider_priority(sns_provider, 50) - - assert sns_provider.updated_at == utc_now() - assert sns_provider.created_by.id == notify_user.id - assert sns_provider.priority == 50 - - -@freeze_time("2016-01-01 00:30") -def test_adjust_provider_priority_adds_history( - restore_provider_details, - notify_user, - sns_provider, -): - # need to update these manually to avoid triggering the `onupdate` clause of the updated_at column - ProviderDetails.query.filter(ProviderDetails.identifier == "sns").update( - {"updated_at": datetime.min} - ) - - old_provider_history_rows = ( - ProviderDetailsHistory.query.filter( - ProviderDetailsHistory.id == sns_provider.id - ) - .order_by(desc(ProviderDetailsHistory.version)) - .all() - ) - - _adjust_provider_priority(sns_provider, 50) - - updated_provider_history_rows = ( - ProviderDetailsHistory.query.filter( - ProviderDetailsHistory.id == sns_provider.id - ) - .order_by(desc(ProviderDetailsHistory.version)) - .all() - ) - - assert len(updated_provider_history_rows) - len(old_provider_history_rows) == 1 - assert ( - updated_provider_history_rows[0].version - old_provider_history_rows[0].version - == 1 - ) - assert updated_provider_history_rows[0].priority == 50 - - @freeze_time("2016-01-01 01:00") def test_get_sms_providers_for_update_returns_providers(restore_provider_details): ProviderDetails.query.filter(ProviderDetails.identifier == "sns").update( diff --git a/tests/app/provider_details/test_rest.py b/tests/app/provider_details/test_rest.py index b0f67a5b6..5deb88bd8 100644 --- a/tests/app/provider_details/test_rest.py +++ b/tests/app/provider_details/test_rest.py @@ -42,7 +42,6 @@ def test_get_provider_contains_correct_fields(client, sample_template): "created_by_name", "display_name", "identifier", - "priority", "notification_type", "active", "updated_at", @@ -53,24 +52,6 @@ def test_get_provider_contains_correct_fields(client, sample_template): assert allowed_keys == set(json_resp[0].keys()) -def test_should_be_able_to_update_priority(client, restore_provider_details): - provider = ProviderDetails.query.first() - - update_resp = client.post( - "/provider-details/{}".format(provider.id), - headers=[ - ("Content-Type", "application/json"), - create_admin_authorization_header(), - ], - data=json.dumps({"priority": 5}), - ) - assert update_resp.status_code == 200 - update_json = json.loads(update_resp.get_data(as_text=True))["provider_details"] - assert update_json["identifier"] == provider.identifier - assert update_json["priority"] == 5 - assert provider.priority == 5 - - def test_should_be_able_to_update_status(client, restore_provider_details): provider = ProviderDetails.query.first() diff --git a/tests/app/service/send_notification/test_send_notification.py b/tests/app/service/send_notification/test_send_notification.py index 036c5bac8..464077bf0 100644 --- a/tests/app/service/send_notification/test_send_notification.py +++ b/tests/app/service/send_notification/test_send_notification.py @@ -1113,49 +1113,6 @@ def test_create_template_raises_invalid_request_when_content_too_large( } -@pytest.mark.parametrize( - "notification_type,send_to", - [ - (NotificationType.SMS, "2028675309"), - ( - NotificationType.EMAIL, - "sample@email.com", - ), - ], -) -def test_send_notification_uses_priority_queue_when_template_is_marked_as_priority( - client, - sample_service, - mocker, - notification_type, - send_to, -): - sample = create_template( - sample_service, - template_type=notification_type, - process_type=TemplateProcessType.PRIORITY, - ) - mocked = mocker.patch( - f"app.celery.provider_tasks.deliver_{notification_type}.apply_async" - ) - - data = {"to": send_to, "template": str(sample.id)} - - auth_header = create_service_authorization_header(service_id=sample.service_id) - - response = client.post( - path=f"/notifications/{notification_type}", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - response_data = json.loads(response.data)["data"] - notification_id = response_data["notification"]["id"] - - assert response.status_code == 201 - mocked.assert_called_once_with([notification_id], queue="priority-tasks") - - @pytest.mark.parametrize( "notification_type, send_to", [ diff --git a/tests/app/service/send_notification/test_send_one_off_notification.py b/tests/app/service/send_notification/test_send_one_off_notification.py index 9983515c7..889a3e0c1 100644 --- a/tests/app/service/send_notification/test_send_one_off_notification.py +++ b/tests/app/service/send_notification/test_send_one_off_notification.py @@ -161,24 +161,6 @@ def test_send_one_off_notification_calls_persist_correctly_for_email( ) -def test_send_one_off_notification_honors_priority( - notify_db_session, persist_mock, celery_mock -): - service = create_service() - template = create_template(service=service) - template.process_type = TemplateProcessType.PRIORITY - - post_data = { - "template_id": str(template.id), - "to": "202-867-5309", - "created_by": str(service.created_by_id), - } - - send_one_off_notification(service.id, post_data) - - assert celery_mock.call_args[1]["queue"] == QueueNames.PRIORITY - - def test_send_one_off_notification_raises_if_invalid_recipient(notify_db_session): service = create_service() template = create_template(service=service) diff --git a/tests/app/test_config.py b/tests/app/test_config.py index 2d9591be8..46a061ddd 100644 --- a/tests/app/test_config.py +++ b/tests/app/test_config.py @@ -4,10 +4,9 @@ from app.config import QueueNames def test_queue_names_all_queues_correct(): # Need to ensure that all_queues() only returns queue names used in API queues = QueueNames.all_queues() - assert len(queues) == 15 + assert len(queues) == 14 assert set( [ - QueueNames.PRIORITY, QueueNames.PERIODIC, QueueNames.DATABASE, QueueNames.SEND_SMS, From 8444b7669094ab9d4528eb63bdcb03312403dd3a Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 9 Aug 2024 09:18:58 -0700 Subject: [PATCH 02/41] fix flake8 --- app/dao/provider_details_dao.py | 2 +- app/notifications/rest.py | 3 +-- app/service/send_notification.py | 3 +-- app/v2/notifications/post_notifications.py | 2 +- tests/app/dao/test_provider_details_dao.py | 2 -- tests/app/service/send_notification/test_send_notification.py | 2 +- .../send_notification/test_send_one_off_notification.py | 2 -- 7 files changed, 5 insertions(+), 11 deletions(-) diff --git a/app/dao/provider_details_dao.py b/app/dao/provider_details_dao.py index 9b6b3b726..b0ab48d09 100644 --- a/app/dao/provider_details_dao.py +++ b/app/dao/provider_details_dao.py @@ -1,7 +1,7 @@ from datetime import datetime from flask import current_app -from sqlalchemy import asc, desc, func +from sqlalchemy import desc, func from app import db from app.dao.dao_utils import autocommit diff --git a/app/notifications/rest.py b/app/notifications/rest.py index af1cd3ca4..43224f0e7 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -2,9 +2,8 @@ from flask import Blueprint, current_app, jsonify, request from app import api_user, authenticated_service from app.aws.s3 import get_personalisation_from_s3, get_phone_number_from_s3 -from app.config import QueueNames from app.dao import notifications_dao -from app.enums import KeyType, NotificationType, TemplateProcessType +from app.enums import KeyType, NotificationType from app.errors import InvalidRequest, register_errors from app.notifications.process_notifications import ( persist_notification, diff --git a/app/service/send_notification.py b/app/service/send_notification.py index 62628b1c4..6e29c0e59 100644 --- a/app/service/send_notification.py +++ b/app/service/send_notification.py @@ -1,12 +1,11 @@ from sqlalchemy.orm.exc import NoResultFound -from app.config import QueueNames from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id from app.dao.services_dao import dao_fetch_service_by_id from app.dao.templates_dao import dao_get_template_by_id_and_service_id from app.dao.users_dao import get_user_by_id -from app.enums import KeyType, NotificationType, TemplateProcessType +from app.enums import KeyType, NotificationType from app.errors import BadRequestError from app.notifications.process_notifications import ( persist_notification, diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index eb1457d0a..b006b57f5 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -8,7 +8,7 @@ from app import api_user, authenticated_service, document_download_client, encry from app.celery.tasks import save_api_email, save_api_sms from app.clients.document_download import DocumentDownloadError from app.config import QueueNames -from app.enums import KeyType, NotificationStatus, NotificationType, TemplateProcessType +from app.enums import KeyType, NotificationStatus, NotificationType from app.models import Notification from app.notifications.process_notifications import ( persist_notification, diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index 5a6f5c218..fd8f4a43d 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta import pytest from freezegun import freeze_time -from sqlalchemy.sql import desc from app import notification_provider_clients from app.dao.provider_details_dao import ( @@ -15,7 +14,6 @@ from app.dao.provider_details_dao import ( ) from app.enums import NotificationType, TemplateType from app.models import ProviderDetails, ProviderDetailsHistory -from app.utils import utc_now from tests.app.db import create_ft_billing, create_service, create_template from tests.conftest import set_config diff --git a/tests/app/service/send_notification/test_send_notification.py b/tests/app/service/send_notification/test_send_notification.py index 464077bf0..dcd6cc8e7 100644 --- a/tests/app/service/send_notification/test_send_notification.py +++ b/tests/app/service/send_notification/test_send_notification.py @@ -11,7 +11,7 @@ from app.dao import notifications_dao from app.dao.api_key_dao import save_model_api_key from app.dao.services_dao import dao_update_service from app.dao.templates_dao import dao_get_all_templates_for_service, dao_update_template -from app.enums import KeyType, NotificationType, TemplateProcessType, TemplateType +from app.enums import KeyType, NotificationType, TemplateType from app.errors import InvalidRequest, RateLimitError from app.models import ApiKey, Notification, NotificationHistory, Template from app.service.send_notification import send_one_off_notification diff --git a/tests/app/service/send_notification/test_send_one_off_notification.py b/tests/app/service/send_notification/test_send_one_off_notification.py index 889a3e0c1..78ab0977e 100644 --- a/tests/app/service/send_notification/test_send_one_off_notification.py +++ b/tests/app/service/send_notification/test_send_one_off_notification.py @@ -3,14 +3,12 @@ from unittest.mock import Mock import pytest -from app.config import QueueNames from app.dao.service_guest_list_dao import dao_add_and_commit_guest_list_contacts from app.enums import ( KeyType, NotificationType, RecipientType, ServicePermissionType, - TemplateProcessType, TemplateType, ) from app.errors import BadRequestError From 36e5614d4a53fceeb41632d0c92a46caa6f467e8 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 9 Aug 2024 09:28:50 -0700 Subject: [PATCH 03/41] fix flake8 --- app/v2/notifications/post_notifications.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index b006b57f5..a5ad17646 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -85,7 +85,6 @@ def process_sms_or_email_notification( notification_type, template, template_with_content, - template_process_type, service, reply_to_text=None, ): From 5ffab276ceb2e97f865a73341f136ab16dc10e06 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 12 Aug 2024 10:22:52 -0700 Subject: [PATCH 04/41] remove unneeded govuk dependencies --- notifications_utils/letter_timings.py | 177 ----------- poetry.lock | 16 +- pyproject.toml | 1 - .../test_letter_timings.py | 277 ------------------ 4 files changed, 1 insertion(+), 470 deletions(-) delete mode 100644 notifications_utils/letter_timings.py delete mode 100644 tests/notifications_utils/test_letter_timings.py diff --git a/notifications_utils/letter_timings.py b/notifications_utils/letter_timings.py deleted file mode 100644 index b171d6c3b..000000000 --- a/notifications_utils/letter_timings.py +++ /dev/null @@ -1,177 +0,0 @@ -from collections import namedtuple -from datetime import time, timedelta - -import pytz -from govuk_bank_holidays.bank_holidays import BankHolidays - -from app.utils import utc_now -from notifications_utils.countries.data import Postage -from notifications_utils.timezones import utc_string_to_aware_gmt_datetime - -LETTER_PROCESSING_DEADLINE = time(17, 30) -CANCELLABLE_JOB_LETTER_STATUSES = [ - "created", - "cancelled", - "virus-scan-failed", - "validation-failed", - "technical-failure", - "pending-virus-check", -] - - -non_working_days_dvla = BankHolidays( - use_cached_holidays=True, - weekend=(5, 6), -) -non_working_days_royal_mail = BankHolidays( - use_cached_holidays=True, - weekend=(6,), # Only Sunday (day 6 of the week) is a non-working day -) - - -def set_gmt_hour(day, hour): - return ( - day.astimezone(pytz.timezone("Europe/London")) - .replace(hour=hour, minute=0) - .astimezone(pytz.utc) - ) - - -def get_next_work_day(date, non_working_days): - next_day = date + timedelta(days=1) - if non_working_days.is_work_day( - date=next_day.date(), - division=BankHolidays.ENGLAND_AND_WALES, - ): - return next_day - return get_next_work_day(next_day, non_working_days) - - -def get_next_dvla_working_day(date): - """ - Printing takes place monday to friday, excluding bank holidays - """ - return get_next_work_day(date, non_working_days=non_working_days_dvla) - - -def get_next_royal_mail_working_day(date): - """ - Royal mail deliver letters on monday to saturday - """ - return get_next_work_day(date, non_working_days=non_working_days_royal_mail) - - -def get_delivery_day(date, *, days_to_deliver): - next_day = get_next_royal_mail_working_day(date) - if days_to_deliver == 1: - return next_day - return get_delivery_day(next_day, days_to_deliver=(days_to_deliver - 1)) - - -def get_min_and_max_days_in_transit(postage): - return { - # first class post is printed earlier in the day, so will - # actually transit on the printing day, and be delivered the next - # day, so effectively spends no full days in transit - "first": (0, 0), - "second": (1, 2), - Postage.EUROPE: (3, 5), - Postage.REST_OF_WORLD: (5, 7), - }[postage] - - -def get_earliest_and_latest_delivery(print_day, postage): - for days_to_transit in get_min_and_max_days_in_transit(postage): - yield get_delivery_day(print_day, days_to_deliver=1 + days_to_transit) - - -def get_letter_timings(upload_time, postage): - LetterTimings = namedtuple( - "LetterTimings", "printed_by, is_printed, earliest_delivery, latest_delivery" - ) - - # shift anything after 5:30pm to the next day - processing_day = utc_string_to_aware_gmt_datetime(upload_time) + timedelta( - hours=6, minutes=30 - ) - print_day = get_next_dvla_working_day(processing_day) - - earliest_delivery, latest_delivery = get_earliest_and_latest_delivery( - print_day, postage - ) - - # print deadline is 3pm BST - printed_by = set_gmt_hour(print_day, hour=15) - now = utc_now().replace(tzinfo=pytz.utc).astimezone(pytz.timezone("Europe/London")) - - return LetterTimings( - printed_by=printed_by, - is_printed=(now > printed_by), - earliest_delivery=set_gmt_hour(earliest_delivery, hour=16), - latest_delivery=set_gmt_hour(latest_delivery, hour=16), - ) - - -def letter_can_be_cancelled(notification_status, notification_created_at): - """ - If letter does not have status of created or pending-virus-check - => can't be cancelled (it has already been processed) - - If it's after 5.30pm local time and the notification was created today before 5.30pm local time - => can't be cancelled (it will already be zipped up to be sent) - """ - if notification_status not in ("created", "pending-virus-check"): - return False - - if too_late_to_cancel_letter(notification_created_at): - return False - return True - - -def too_late_to_cancel_letter(notification_created_at): - time_created_at = notification_created_at - day_created_on = time_created_at.date() - - current_time = utc_now() - current_day = current_time.date() - if ( - _after_letter_processing_deadline() - and _notification_created_before_today_deadline(notification_created_at) - ): - return True - if ( - _notification_created_before_that_day_deadline(notification_created_at) - and day_created_on < current_day - ): - return True - if (current_day - day_created_on).days > 1: - return True - - -def _after_letter_processing_deadline(): - current_utc_datetime = utc_now() - bst_time = current_utc_datetime.time() - - return bst_time >= LETTER_PROCESSING_DEADLINE - - -def _notification_created_before_today_deadline(notification_created_at): - current_bst_datetime = utc_now() - todays_deadline = current_bst_datetime.replace( - hour=LETTER_PROCESSING_DEADLINE.hour, - minute=LETTER_PROCESSING_DEADLINE.minute, - ) - - notification_created_at_in_bst = notification_created_at - - return notification_created_at_in_bst <= todays_deadline - - -def _notification_created_before_that_day_deadline(notification_created_at): - notification_created_at_bst_datetime = notification_created_at - created_at_day_deadline = notification_created_at_bst_datetime.replace( - hour=LETTER_PROCESSING_DEADLINE.hour, - minute=LETTER_PROCESSING_DEADLINE.minute, - ) - - return notification_created_at_bst_datetime <= created_at_day_deadline diff --git a/poetry.lock b/poetry.lock index cc661a9a1..baab16245 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1562,20 +1562,6 @@ files = [ {file = "geojson-3.1.0.tar.gz", hash = "sha256:58a7fa40727ea058efc28b0e9ff0099eadf6d0965e04690830208d3ef571adac"}, ] -[[package]] -name = "govuk-bank-holidays" -version = "0.14" -description = "Tool to load UK bank holidays from GOV.UK" -optional = false -python-versions = ">=3.6" -files = [ - {file = "govuk-bank-holidays-0.14.tar.gz", hash = "sha256:ce85102423b72908957d25981f616494729686515d5d66c09a1d35a354ce20a6"}, - {file = "govuk_bank_holidays-0.14-py3-none-any.whl", hash = "sha256:da485c4a40c6c874c925916e492e3f20b807cffba7eed5f07fb69327aef6b10b"}, -] - -[package.dependencies] -requests = "*" - [[package]] name = "greenlet" version = "3.0.3" @@ -4739,4 +4725,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "89b99841ebd4bd735104048c8f8f0a35bfedb700a7a5648ecabe713816e25579" +content-hash = "8fa243a938720113dd0999656594d7d3660a1e6202a4f26b9e9abe25d19b0cc0" diff --git a/pyproject.toml b/pyproject.toml index 9f9b39ce7..219f98ee6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,6 @@ faker = "^26.0.0" async-timeout = "^4.0.3" bleach = "^6.1.0" geojson = "^3.1.0" -govuk-bank-holidays = "^0.14" numpy = "^1.26.4" ordered-set = "^4.1.0" phonenumbers = "^8.13.42" diff --git a/tests/notifications_utils/test_letter_timings.py b/tests/notifications_utils/test_letter_timings.py deleted file mode 100644 index f93d32e99..000000000 --- a/tests/notifications_utils/test_letter_timings.py +++ /dev/null @@ -1,277 +0,0 @@ -from datetime import datetime - -import pytest -import pytz -from freezegun import freeze_time - -from app.utils import utc_now -from notifications_utils.letter_timings import ( - get_letter_timings, - letter_can_be_cancelled, -) - - -@freeze_time("2017-07-14 13:59:59") # Friday, before print deadline (3PM EST) -@pytest.mark.parametrize( - ( - "upload_time", - "expected_print_time", - "is_printed", - "first_class", - "expected_earliest", - "expected_latest", - ), - [ - # EST - # ================================================================== - # First thing Monday - ( - "Monday 2017-07-10 00:00:01", - "Tuesday 2017-07-11 15:00", - True, - "Wednesday 2017-07-12 16:00", - "Thursday 2017-07-13 16:00", - "Friday 2017-07-14 16:00", - ), - # Monday at 17:29 EST (sent on monday) - ( - "Monday 2017-07-10 16:29:59", - "Tuesday 2017-07-11 15:00", - True, - "Wednesday 2017-07-12 16:00", - "Thursday 2017-07-13 16:00", - "Friday 2017-07-14 16:00", - ), - # Monday at 17:30 EST (sent on tuesday) - ( - "Monday 2017-07-10 16:30:01", - "Wednesday 2017-07-12 15:00", - True, - "Thursday 2017-07-13 16:00", - "Friday 2017-07-14 16:00", - "Saturday 2017-07-15 16:00", - ), - # Tuesday before 17:30 EST - ( - "Tuesday 2017-07-11 12:00:00", - "Wednesday 2017-07-12 15:00", - True, - "Thursday 2017-07-13 16:00", - "Friday 2017-07-14 16:00", - "Saturday 2017-07-15 16:00", - ), - # Wednesday before 17:30 EST - ( - "Wednesday 2017-07-12 12:00:00", - "Thursday 2017-07-13 15:00", - True, - "Friday 2017-07-14 16:00", - "Saturday 2017-07-15 16:00", - "Monday 2017-07-17 16:00", - ), - # Thursday before 17:30 EST - ( - "Thursday 2017-07-13 12:00:00", - "Friday 2017-07-14 15:00", - False, - "Saturday 2017-07-15 16:00", - "Monday 2017-07-17 16:00", - "Tuesday 2017-07-18 16:00", - ), - # Friday anytime - ( - "Friday 2017-07-14 00:00:00", - "Monday 2017-07-17 15:00", - False, - "Tuesday 2017-07-18 16:00", - "Wednesday 2017-07-19 16:00", - "Thursday 2017-07-20 16:00", - ), - ( - "Friday 2017-07-14 12:00:00", - "Monday 2017-07-17 15:00", - False, - "Tuesday 2017-07-18 16:00", - "Wednesday 2017-07-19 16:00", - "Thursday 2017-07-20 16:00", - ), - ( - "Friday 2017-07-14 22:00:00", - "Monday 2017-07-17 15:00", - False, - "Tuesday 2017-07-18 16:00", - "Wednesday 2017-07-19 16:00", - "Thursday 2017-07-20 16:00", - ), - # Saturday anytime - ( - "Saturday 2017-07-14 12:00:00", - "Monday 2017-07-17 15:00", - False, - "Tuesday 2017-07-18 16:00", - "Wednesday 2017-07-19 16:00", - "Thursday 2017-07-20 16:00", - ), - # Sunday before 1730 EST - ( - "Sunday 2017-07-15 15:59:59", - "Monday 2017-07-17 15:00", - False, - "Tuesday 2017-07-18 16:00", - "Wednesday 2017-07-19 16:00", - "Thursday 2017-07-20 16:00", - ), - # Sunday after 17:30 EST - ( - "Sunday 2017-07-16 16:30:01", - "Tuesday 2017-07-18 15:00", - False, - "Wednesday 2017-07-19 16:00", - "Thursday 2017-07-20 16:00", - "Friday 2017-07-21 16:00", - ), - # GMT - # ================================================================== - # Monday at 17:29 GMT - ( - "Monday 2017-01-02 17:29:59", - "Tuesday 2017-01-03 15:00", - True, - "Wednesday 2017-01-04 16:00", - "Thursday 2017-01-05 16:00", - "Friday 2017-01-06 16:00", - ), - # Monday at 17:00 GMT - ( - "Monday 2017-01-02 17:30:01", - "Wednesday 2017-01-04 15:00", - True, - "Thursday 2017-01-05 16:00", - "Friday 2017-01-06 16:00", - "Saturday 2017-01-07 16:00", - ), - ], -) -@pytest.mark.skip(reason="Letters being developed later") -def test_get_estimated_delivery_date_for_letter( - upload_time, - expected_print_time, - is_printed, - first_class, - expected_earliest, - expected_latest, -): - # remove the day string from the upload_time, which is purely informational - - def format_dt(x): - return x.astimezone(pytz.timezone("America/New_York")).strftime( - "%A %Y-%m-%d %H:%M" - ) - - upload_time = upload_time.split(" ", 1)[1] - - timings = get_letter_timings(upload_time, postage="second") - - assert format_dt(timings.printed_by) == expected_print_time - assert timings.is_printed == is_printed - assert format_dt(timings.earliest_delivery) == expected_earliest - assert format_dt(timings.latest_delivery) == expected_latest - - first_class_timings = get_letter_timings(upload_time, postage="first") - - assert format_dt(first_class_timings.printed_by) == expected_print_time - assert first_class_timings.is_printed == is_printed - assert format_dt(first_class_timings.earliest_delivery) == first_class - assert format_dt(first_class_timings.latest_delivery) == first_class - - -@pytest.mark.parametrize("status", ["sending", "pending"]) -def test_letter_cannot_be_cancelled_if_letter_status_is_not_created_or_pending_virus_check( - status, -): - notification_created_at = utc_now() - - assert not letter_can_be_cancelled(status, notification_created_at) - - -@freeze_time("2018-7-7 16:00:00") -@pytest.mark.parametrize( - "notification_created_at", - [ - datetime(2018, 7, 6, 18, 0), # created yesterday after 1730 - datetime(2018, 7, 7, 12, 0), # created today - ], -) -@pytest.mark.skip(reason="Letters not part of release") -def test_letter_can_be_cancelled_if_before_1730_and_letter_created_before_1730( - notification_created_at, -): - notification_status = "pending-virus-check" - - assert letter_can_be_cancelled(notification_status, notification_created_at) - - -@freeze_time("2017-12-12 17:30:00") -@pytest.mark.parametrize( - "notification_created_at", - [ - datetime(2017, 12, 12, 17, 0), - datetime(2017, 12, 12, 17, 30), - ], -) -@pytest.mark.skip(reason="Letters not part of release") -def test_letter_cannot_be_cancelled_if_1730_exactly_and_letter_created_at_or_before_1730( - notification_created_at, -): - notification_status = "pending-virus-check" - - assert not letter_can_be_cancelled(notification_status, notification_created_at) - - -@freeze_time("2018-7-7 19:00:00") -@pytest.mark.parametrize( - "notification_created_at", - [ - datetime(2018, 7, 6, 18, 0), # created yesterday after 1730 - datetime(2018, 7, 7, 12, 0), # created today before 1730 - ], -) -@pytest.mark.skip(reason="Letters not part of release") -def test_letter_cannot_be_cancelled_if_after_1730_and_letter_created_before_1730( - notification_created_at, -): - notification_status = "created" - - assert not letter_can_be_cancelled(notification_status, notification_created_at) - - -@freeze_time("2018-7-7 15:00:00") -@pytest.mark.skip(reason="Letters not part of release") -def test_letter_cannot_be_cancelled_if_before_1730_and_letter_created_before_1730_yesterday(): - notification_status = "created" - - assert not letter_can_be_cancelled(notification_status, datetime(2018, 7, 6, 14, 0)) - - -@freeze_time("2018-7-7 15:00:00") -@pytest.mark.skip(reason="Letters not part of release") -def test_letter_cannot_be_cancelled_if_before_1730_and_letter_created_after_1730_two_days_ago(): - notification_status = "created" - - assert not letter_can_be_cancelled(notification_status, datetime(2018, 7, 5, 19, 0)) - - -@freeze_time("2018-7-7 19:00:00") -@pytest.mark.parametrize( - "notification_created_at", - [ - datetime(2018, 7, 7, 18, 30), - datetime(2018, 7, 7, 19, 0), - ], -) -def test_letter_can_be_cancelled_if_after_1730_and_letter_created_at_1730_today_or_later( - notification_created_at, -): - notification_status = "created" - - assert letter_can_be_cancelled(notification_status, notification_created_at) From be6e9eb40f1d4ca3fee0f48f57fddda860954525 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 12 Aug 2024 12:52:06 -0700 Subject: [PATCH 05/41] remove priority column --- migrations/versions/0412_remove_priority.py | 22 +++++++++++++++++++++ poetry.lock | 5 +---- 2 files changed, 23 insertions(+), 4 deletions(-) create mode 100644 migrations/versions/0412_remove_priority.py diff --git a/migrations/versions/0412_remove_priority.py b/migrations/versions/0412_remove_priority.py new file mode 100644 index 000000000..8367e541a --- /dev/null +++ b/migrations/versions/0412_remove_priority.py @@ -0,0 +1,22 @@ +""" + +Revision ID: 0412_remove_priority +Revises: 411_add_login_uuid + +""" + +import sqlalchemy as sa +from alembic import op + +revision = "0412_remove_priority" +down_revision = "0411_add_login_uuid" + + +def upgrade(): + op.drop_column("provider_details", sa.Column("priority")) + op.drop_column("provider_details_history", sa.Column("priority")) + + +def downgrade(): + op.add_column("provider_details", sa.Column("priority", sa.Integer())) + op.add_column("provider_details_history", sa.Column("priority", sa.Integer())) diff --git a/poetry.lock b/poetry.lock index f7f337f51..eabbfc4e6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2093,13 +2093,9 @@ files = [ {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, @@ -2488,6 +2484,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] From 99d687da4f092c6f3a1d44440e43f8e50cee92a2 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 12 Aug 2024 13:09:13 -0700 Subject: [PATCH 06/41] add migration --- migrations/versions/0412_remove_priority.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/migrations/versions/0412_remove_priority.py b/migrations/versions/0412_remove_priority.py index 8367e541a..55e40260a 100644 --- a/migrations/versions/0412_remove_priority.py +++ b/migrations/versions/0412_remove_priority.py @@ -13,10 +13,12 @@ down_revision = "0411_add_login_uuid" def upgrade(): + print("DELETING COLUMNS") op.drop_column("provider_details", sa.Column("priority")) op.drop_column("provider_details_history", sa.Column("priority")) def downgrade(): + print("ADDING COLUMNS") op.add_column("provider_details", sa.Column("priority", sa.Integer())) op.add_column("provider_details_history", sa.Column("priority", sa.Integer())) From 486d5f66e95bd29b9dde277d70ed5908dfcae5b5 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 12 Aug 2024 14:07:26 -0700 Subject: [PATCH 07/41] fix tests --- app/models.py | 2 -- migrations/versions/0412_remove_priority.py | 8 ++++---- tests/app/provider_details/test_rest.py | 1 - tests/conftest.py | 8 ++------ 4 files changed, 6 insertions(+), 13 deletions(-) diff --git a/app/models.py b/app/models.py index 0d58a6611..c37f5a96b 100644 --- a/app/models.py +++ b/app/models.py @@ -1297,7 +1297,6 @@ class ProviderDetails(db.Model): id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) display_name = db.Column(db.String, nullable=False) identifier = db.Column(db.String, nullable=False) - priority = db.Column(db.Integer, nullable=False) notification_type = enum_column(NotificationType, nullable=False) active = db.Column(db.Boolean, default=False, nullable=False) version = db.Column(db.Integer, default=1, nullable=False) @@ -1322,7 +1321,6 @@ class ProviderDetailsHistory(db.Model, HistoryModel): id = db.Column(UUID(as_uuid=True), primary_key=True, nullable=False) display_name = db.Column(db.String, nullable=False) identifier = db.Column(db.String, nullable=False) - priority = db.Column(db.Integer, nullable=False) notification_type = enum_column(NotificationType, nullable=False) active = db.Column(db.Boolean, nullable=False) version = db.Column(db.Integer, primary_key=True, nullable=False) diff --git a/migrations/versions/0412_remove_priority.py b/migrations/versions/0412_remove_priority.py index 55e40260a..032e4ddd9 100644 --- a/migrations/versions/0412_remove_priority.py +++ b/migrations/versions/0412_remove_priority.py @@ -14,11 +14,11 @@ down_revision = "0411_add_login_uuid" def upgrade(): print("DELETING COLUMNS") - op.drop_column("provider_details", sa.Column("priority")) - op.drop_column("provider_details_history", sa.Column("priority")) + op.drop_column("provider_details", "priority") + op.drop_column("provider_details_history", "priority") def downgrade(): print("ADDING COLUMNS") - op.add_column("provider_details", sa.Column("priority", sa.Integer())) - op.add_column("provider_details_history", sa.Column("priority", sa.Integer())) + op.add_column("provider_details", sa.Column("priority", sa.Integer)) + op.add_column("provider_details_history", sa.Column("priority", sa.Integer)) diff --git a/tests/app/provider_details/test_rest.py b/tests/app/provider_details/test_rest.py index 5deb88bd8..a5780fcb6 100644 --- a/tests/app/provider_details/test_rest.py +++ b/tests/app/provider_details/test_rest.py @@ -105,7 +105,6 @@ def test_get_provider_versions_contains_correct_fields(client, notify_db_session "created_by", "display_name", "identifier", - "priority", "notification_type", "active", "version", diff --git a/tests/conftest.py b/tests/conftest.py index 2a4c53113..2237d8fc6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -80,12 +80,8 @@ def _notify_db(notify_api): @pytest.fixture(scope="function") def sms_providers(_notify_db): - """ - In production we randomly choose which provider to use based on their priority. To guarantee tests run the same each - time, make sure we always choose sns. You'll need to override them in your tests if you wish to do something - different. - """ - get_provider_details_by_identifier("sns").priority = 100 + pass + # get_provider_details_by_identifier("sns").priority = 100 @pytest.fixture(scope="function") From 6138c827ad4cf85895834481fc75b5473b1bcaca Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 12 Aug 2024 14:17:11 -0700 Subject: [PATCH 08/41] fix flake8 --- tests/conftest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2237d8fc6..7ce2c8033 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,6 @@ from flask import Flask from sqlalchemy_utils import create_database, database_exists, drop_database from app import create_app -from app.dao.provider_details_dao import get_provider_details_by_identifier @pytest.fixture(scope="session") From 438ab92fd6d92a5e3e18519ee451b34f8a4206df Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 15 Aug 2024 17:11:31 -0700 Subject: [PATCH 09/41] fix job retrieval --- app/aws/s3.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index 2b7feaf15..290203ec1 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -173,7 +173,7 @@ def get_job_and_metadata_from_s3(service_id, job_id): def get_job_from_s3(service_id, job_id): retries = 0 - max_retries = 5 + max_retries = 3 backoff_factor = 1 while retries < max_retries: @@ -190,11 +190,14 @@ def get_job_from_s3(service_id, job_id): sleep_time = backoff_factor * (2**retries) # Exponential backoff time.sleep(sleep_time) continue + else: + current_app.logger.error("Failed to get job from bucket", exc_info=True) + return None except Exception as e: - current_app.logger.error(f"Failed to get object from bucket {e}") - raise + current_app.logger.error(f"Failed to get job from bucket", exc_info=True) + return None - raise Exception("Failed to get object after 5 attempts") + raise Exception("Failed to get object after 3 attempts") def incr_jobs_cache_misses(): From f332e9f907eef683672cee480f42f2690fcfd119 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 15 Aug 2024 17:14:24 -0700 Subject: [PATCH 10/41] fix job retrieval --- app/aws/s3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index 290203ec1..e04d7ae98 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -193,8 +193,8 @@ def get_job_from_s3(service_id, job_id): else: current_app.logger.error("Failed to get job from bucket", exc_info=True) return None - except Exception as e: - current_app.logger.error(f"Failed to get job from bucket", exc_info=True) + except Exception: + current_app.logger.error("Failed to get job from bucket", exc_info=True) return None raise Exception("Failed to get object after 3 attempts") From 0c3b382562e14235699457604ed70a71f28e2f4c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 15 Aug 2024 17:25:03 -0700 Subject: [PATCH 11/41] fix test --- tests/app/aws/test_s3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py index d625f6b06..a148855ac 100644 --- a/tests/app/aws/test_s3.py +++ b/tests/app/aws/test_s3.py @@ -102,7 +102,7 @@ def test_get_job_from_s3_exponential_backoff(mocker): mocker.patch("app.aws.s3.get_s3_object", side_effect=mock_s3_get_object_slowdown) with pytest.raises(Exception) as exc_info: get_job_from_s3("service_id", "job_id") - assert "Failed to get object after 5 attempts" in str(exc_info) + assert "Failed to get object after 3 attempts" in str(exc_info) @pytest.mark.parametrize( From e2e3501aa4e3a053cf741633c849939dbe5c4472 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 15 Aug 2024 17:55:59 -0700 Subject: [PATCH 12/41] improve debug --- app/aws/s3.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index e04d7ae98..a907254bb 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -191,10 +191,16 @@ def get_job_from_s3(service_id, job_id): time.sleep(sleep_time) continue else: - current_app.logger.error("Failed to get job from bucket", exc_info=True) + current_app.logger.error( + f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} from bucket", + exc_info=True, + ) return None except Exception: - current_app.logger.error("Failed to get job from bucket", exc_info=True) + current_app.logger.error( + f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} from bucket", + exc_info=True, + ) return None raise Exception("Failed to get object after 3 attempts") From 496571686a40ef9068034306603a947bc619c88c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 16 Aug 2024 09:53:29 -0700 Subject: [PATCH 13/41] initial --- app/aws/s3.py | 47 +++++++++++++++++++++------------------- app/service/rest.py | 41 ++++++++++++----------------------- tests/app/aws/test_s3.py | 11 ++++++---- 3 files changed, 46 insertions(+), 53 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index a907254bb..c54fd07db 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -172,38 +172,41 @@ def get_job_and_metadata_from_s3(service_id, job_id): def get_job_from_s3(service_id, job_id): + # We have to make sure the retries don't take up to much time, because + # we might be retrieving dozens of jobs. So max time is: + # 0.2 + 0.4 + 0.8 + 1.6 = 3.0 seconds retries = 0 - max_retries = 3 - backoff_factor = 1 + max_retries = 4 + backoff_factor = 0.2 while retries < max_retries: try: obj = get_s3_object(*get_job_location(service_id, job_id)) return obj.get()["Body"].read().decode("utf-8") except botocore.exceptions.ClientError as e: - if e.response["Error"]["Code"] in [ - "Throttling", - "RequestTimeout", - "SlowDown", - ]: - retries += 1 - sleep_time = backoff_factor * (2**retries) # Exponential backoff - time.sleep(sleep_time) - continue - else: - current_app.logger.error( - f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} from bucket", - exc_info=True, - ) - return None - except Exception: current_app.logger.error( - f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} from bucket", + f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} retry_count={retries}", exc_info=True, ) - return None + retries += 1 + sleep_time = backoff_factor * (2**retries) # Exponential backoff + time.sleep(sleep_time) + continue - raise Exception("Failed to get object after 3 attempts") + except Exception: + current_app.logger.error( + f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} retry_count={retries}", + exc_info=True, + ) + retries += 1 + sleep_time = backoff_factor * (2**retries) # Exponential backoff + time.sleep(sleep_time) + continue + + current_app.logger.error( + f"Never retrieved job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)}" + ) + return None def incr_jobs_cache_misses(): @@ -331,7 +334,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number): # change the task schedules if job is None: current_app.logger.warning( - "Couldnt find personalisation for job_id {job_id} row number {job_row_number} because job is missing" + f"Couldnt find personalisation for job_id {job_id} row number {job_row_number} because job is missing" ) return {} diff --git a/app/service/rest.py b/app/service/rest.py index b61ea0394..0d035c702 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -503,37 +503,24 @@ def get_all_notifications_for_service(service_id): for notification in pagination.items: if notification.job_id is not None: - try: - notification.personalisation = get_personalisation_from_s3( - notification.service_id, - notification.job_id, - notification.job_row_number, - ) - except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": - notification.personalisation = "" - else: - raise ex + notification.personalisation = get_personalisation_from_s3( + notification.service_id, + notification.job_id, + notification.job_row_number, + ) - try: - recipient = get_phone_number_from_s3( - notification.service_id, - notification.job_id, - notification.job_row_number, - ) + recipient = get_phone_number_from_s3( + notification.service_id, + notification.job_id, + notification.job_row_number, + ) - notification.to = recipient - notification.normalised_to = recipient - except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": - notification.to = "" - notification.normalised_to = "" - else: - raise ex + notification.to = recipient + notification.normalised_to = recipient else: - notification.to = "1" - notification.normalised_to = "1" + notification.to = "" + notification.normalised_to = "" kwargs = request.args.to_dict() kwargs["service_id"] = service_id diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py index a148855ac..c3310f774 100644 --- a/tests/app/aws/test_s3.py +++ b/tests/app/aws/test_s3.py @@ -99,10 +99,13 @@ def mock_s3_get_object_slowdown(*args, **kwargs): def test_get_job_from_s3_exponential_backoff(mocker): - mocker.patch("app.aws.s3.get_s3_object", side_effect=mock_s3_get_object_slowdown) - with pytest.raises(Exception) as exc_info: - get_job_from_s3("service_id", "job_id") - assert "Failed to get object after 3 attempts" in str(exc_info) + # We try multiple times to retrieve the job, and if we can't we return None + mock_get_object = mocker.patch( + "app.aws.s3.get_s3_object", side_effect=mock_s3_get_object_slowdown + ) + job = get_job_from_s3("service_id", "job_id") + assert job is None + assert mock_get_object.call_count == 4 @pytest.mark.parametrize( From 88f718a906b9ede11e34c35895d4698fd6f815f7 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 16 Aug 2024 10:24:10 -0700 Subject: [PATCH 14/41] fix --- app/aws/s3.py | 15 ++++++--------- app/service/rest.py | 1 - 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index c54fd07db..dddb2df07 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -183,7 +183,7 @@ def get_job_from_s3(service_id, job_id): try: obj = get_s3_object(*get_job_location(service_id, job_id)) return obj.get()["Body"].read().decode("utf-8") - except botocore.exceptions.ClientError as e: + except botocore.exceptions.ClientError: current_app.logger.error( f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} retry_count={retries}", exc_info=True, @@ -204,7 +204,8 @@ def get_job_from_s3(service_id, job_id): continue current_app.logger.error( - f"Never retrieved job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)}" + f"Never retrieved job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)}", + exc_info=True, ) return None @@ -277,19 +278,15 @@ def get_phone_number_from_s3(service_id, job_id, job_row_number): if job is None: current_app.logger.info(f"job {job_id} was not in the cache") job = get_job_from_s3(service_id, job_id) + # Even if it is None, put it here to avoid KeyErrors JOBS[job_id] = job incr_jobs_cache_misses() else: incr_jobs_cache_hits() - # If the job is None after our attempt to retrieve it from s3, it - # probably means the job is old and has been deleted from s3, in - # which case there is nothing we can do. It's unlikely to run into - # this, but it could theoretically happen, especially if we ever - # change the task schedules if job is None: - current_app.logger.warning( - f"Couldnt find phone for job_id {job_id} row number {job_row_number} because job is missing" + current_app.logger.error( + f"Couldnt find phone for job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} because job is missing" ) return "Unavailable" diff --git a/app/service/rest.py b/app/service/rest.py index 0d035c702..db335b116 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -1,7 +1,6 @@ import itertools from datetime import datetime, timedelta -from botocore.exceptions import ClientError from flask import Blueprint, current_app, jsonify, request from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound From baf878158f9b937c8e118302cfc2c86820cfe9bc Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 16 Aug 2024 11:10:10 -0700 Subject: [PATCH 15/41] clean up s3 --- app/aws/s3.py | 147 ++++++++++++++++++++++++--------------- tests/app/aws/test_s3.py | 35 ++++------ 2 files changed, 104 insertions(+), 78 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index dddb2df07..90373fccb 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -19,26 +19,52 @@ JOBS = ExpiringDict(max_len=20000, max_age_seconds=ttl) JOBS_CACHE_HITS = "JOBS_CACHE_HITS" JOBS_CACHE_MISSES = "JOBS_CACHE_MISSES" +# Global variable +s3_client = None +s3_resource = None + + +def get_s3_client(): + global s3_client + if s3_client is None: + access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"] + secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"] + region = current_app.config["CSV_UPLOAD_BUCKET"]["region"] + session = Session( + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + region_name=region, + ) + s3_client = session.client("s3") + return s3_client + + +def get_s3_resource(): + global s3_resource + if s3_resource is None: + access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"] + secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"] + region = current_app.config["CSV_UPLOAD_BUCKET"]["region"] + session = Session( + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + region_name=region, + ) + s3_resource = session.resource("s3", config=AWS_CLIENT_CONFIG) + return s3_resource + def list_s3_objects(): - bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"] - access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"] - secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"] - region = current_app.config["CSV_UPLOAD_BUCKET"]["region"] - session = Session( - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=region, - ) - s3 = session.client("s3") + bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"] + s3_client = get_s3_client() try: - response = s3.list_objects_v2(Bucket=bucket_name) + response = s3_client.list_objects_v2(Bucket=bucket_name) while True: for obj in response.get("Contents", []): yield obj["Key"] if "NextContinuationToken" in response: - response = s3.list_objects_v2( + response = s3_client.list_objects_v2( Bucket=bucket_name, ContinuationToken=response["NextContinuationToken"], ) @@ -51,19 +77,11 @@ def list_s3_objects(): def get_s3_files(): - current_app.logger.info("Regenerate job cache #notify-admin-1200") + bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"] - access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"] - secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"] - region = current_app.config["CSV_UPLOAD_BUCKET"]["region"] - session = Session( - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=region, - ) objects = list_s3_objects() - s3res = session.resource("s3", config=AWS_CLIENT_CONFIG) + s3res = get_s3_resource() current_app.logger.info( f"JOBS cache length before regen: {len(JOBS)} #notify-admin-1200" ) @@ -99,12 +117,8 @@ def get_s3_file(bucket_name, file_location, access_key, secret_key, region): def download_from_s3( bucket_name, s3_key, local_filename, access_key, secret_key, region ): - session = Session( - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=region, - ) - s3 = session.client("s3", config=AWS_CLIENT_CONFIG) + + s3 = get_s3_client() result = None try: result = s3.download_file(bucket_name, s3_key, local_filename) @@ -123,27 +137,28 @@ def download_from_s3( def get_s3_object(bucket_name, file_location, access_key, secret_key, region): - session = Session( - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=region, - ) - s3 = session.resource("s3", config=AWS_CLIENT_CONFIG) - return s3.Object(bucket_name, file_location) + + s3 = get_s3_resource() + try: + return s3.Object(bucket_name, file_location) + except botocore.exceptions.ClientError: + current_app.logger.error( + f"Can't retrieve S3 Object from {file_location}", exc_info=True + ) def purge_bucket(bucket_name, access_key, secret_key, region): - session = Session( - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - region_name=region, - ) - s3 = session.resource("s3", config=AWS_CLIENT_CONFIG) + s3 = get_s3_resource() bucket = s3.Bucket(bucket_name) bucket.objects.all().delete() -def file_exists(bucket_name, file_location, access_key, secret_key, region): +def file_exists(file_location): + bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"] + access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"] + secret_key = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"] + region = current_app.config["CSV_UPLOAD_BUCKET"]["region"] + try: # try and access metadata of object get_s3_object( @@ -172,36 +187,58 @@ def get_job_and_metadata_from_s3(service_id, job_id): def get_job_from_s3(service_id, job_id): + """ + If and only if we hit a throttling exception of some kind, we want to try + exponential backoff. However, if we are getting NoSuchKey or something + that indicates things are permanently broken, we want to give up right away + to save time. + """ # We have to make sure the retries don't take up to much time, because # we might be retrieving dozens of jobs. So max time is: # 0.2 + 0.4 + 0.8 + 1.6 = 3.0 seconds retries = 0 max_retries = 4 backoff_factor = 0.2 + + if not file_exists(FILE_LOCATION_STRUCTURE.format(service_id, job_id)): + current_app.logger.error( + f"This file does not exist {FILE_LOCATION_STRUCTURE.format(service_id, job_id)}" + ) + return None + while retries < max_retries: try: obj = get_s3_object(*get_job_location(service_id, job_id)) return obj.get()["Body"].read().decode("utf-8") - except botocore.exceptions.ClientError: - current_app.logger.error( - f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} retry_count={retries}", - exc_info=True, - ) - retries += 1 - sleep_time = backoff_factor * (2**retries) # Exponential backoff - time.sleep(sleep_time) - continue + except botocore.exceptions.ClientError as e: + if e.response["Error"]["Code"] in [ + "Throttling", + "RequestTimeout", + "SlowDown", + ]: + current_app.logger.error( + f"Retrying job fetch {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} retry_count={retries}", + exc_info=True, + ) + retries += 1 + sleep_time = backoff_factor * (2**retries) # Exponential backoff + time.sleep(sleep_time) + continue + else: + # Typically this is "NoSuchKey" + current_app.logger.error( + f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)}", + exc_info=True, + ) + return None except Exception: current_app.logger.error( f"Failed to get job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)} retry_count={retries}", exc_info=True, ) - retries += 1 - sleep_time = backoff_factor * (2**retries) # Exponential backoff - time.sleep(sleep_time) - continue + return None current_app.logger.error( f"Never retrieved job {FILE_LOCATION_STRUCTURE.format(service_id, job_id)}", diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py index c3310f774..4e844a1de 100644 --- a/tests/app/aws/test_s3.py +++ b/tests/app/aws/test_s3.py @@ -98,16 +98,25 @@ def mock_s3_get_object_slowdown(*args, **kwargs): raise ClientError(error_response, "GetObject") -def test_get_job_from_s3_exponential_backoff(mocker): +def test_get_job_from_s3_exponential_backoff_on_throttling(mocker): # We try multiple times to retrieve the job, and if we can't we return None mock_get_object = mocker.patch( "app.aws.s3.get_s3_object", side_effect=mock_s3_get_object_slowdown ) + mocker.patch("app.aws.s3.file_exists", return_value=True) job = get_job_from_s3("service_id", "job_id") assert job is None assert mock_get_object.call_count == 4 +def test_get_job_from_s3_exponential_backoff_file_not_found(mocker): + mock_get_object = mocker.patch("app.aws.s3.get_s3_object", return_value=None) + mocker.patch("app.aws.s3.file_exists", return_value=False) + job = get_job_from_s3("service_id", "job_id") + assert job is None + assert mock_get_object.call_count == 0 + + @pytest.mark.parametrize( "job, job_id, job_row_number, expected_personalisation", [ @@ -180,19 +189,9 @@ def test_file_exists_true(notify_api, mocker): get_s3_mock = mocker.patch("app.aws.s3.get_s3_object") file_exists( - os.getenv("CSV_BUCKET_NAME"), "mykey", - default_access_key, - default_secret_key, - default_region, - ) - get_s3_mock.assert_called_once_with( - os.getenv("CSV_BUCKET_NAME"), - "mykey", - default_access_key, - default_secret_key, - default_region, ) + get_s3_mock.assert_called_once() def test_file_exists_false(notify_api, mocker): @@ -207,17 +206,7 @@ def test_file_exists_false(notify_api, mocker): with pytest.raises(ClientError): file_exists( - os.getenv("CSV_BUCKET_NAME"), "mykey", - default_access_key, - default_secret_key, - default_region, ) - get_s3_mock.assert_called_once_with( - os.getenv("CSV_BUCKET_NAME"), - "mykey", - default_access_key, - default_secret_key, - default_region, - ) + get_s3_mock.assert_called_once() From adf51a53b9df973c545a5ab0f0b378845605cb56 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 16 Aug 2024 11:20:15 -0700 Subject: [PATCH 16/41] fix test --- tests/app/service/test_rest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 0cdae7de4..44f79ab61 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1954,8 +1954,8 @@ def test_get_all_notifications_for_service_including_ones_made_by_jobs( resp = json.loads(response.get_data(as_text=True)) assert len(resp["notifications"]) == expected_count_of_notifications - assert resp["notifications"][0]["to"] == sample_notification_with_job.to - assert resp["notifications"][1]["to"] == sample_notification.to + assert resp["notifications"][0]["to"] == '' + assert resp["notifications"][1]["to"] == '' assert response.status_code == 200 From 09acd66747452320801a9182810c559ec29e79dc Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 16 Aug 2024 11:31:21 -0700 Subject: [PATCH 17/41] fix test --- tests/app/service/test_rest.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 44f79ab61..1979ccdfe 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1815,7 +1815,7 @@ def test_get_all_notifications_for_service_filters_notifications_when_using_post resp = json.loads(response.get_data(as_text=True)) assert len(resp["notifications"]) == 2 - assert resp["notifications"][0]["to"] == "1" + assert resp["notifications"][0]["to"] == "" assert resp["notifications"][0]["status"] == returned_notification.status assert response.status_code == 200 @@ -1934,7 +1934,7 @@ def test_get_all_notifications_for_service_including_ones_made_by_jobs( mocker, ): mock_s3 = mocker.patch("app.service.rest.get_phone_number_from_s3") - mock_s3.return_value = "1" + mock_s3.return_value = "" mock_s3 = mocker.patch("app.service.rest.get_personalisation_from_s3") mock_s3.return_value = {} @@ -1954,8 +1954,8 @@ def test_get_all_notifications_for_service_including_ones_made_by_jobs( resp = json.loads(response.get_data(as_text=True)) assert len(resp["notifications"]) == expected_count_of_notifications - assert resp["notifications"][0]["to"] == '' - assert resp["notifications"][1]["to"] == '' + assert resp["notifications"][0]["to"] == sample_notification_with_job.to + assert resp["notifications"][1]["to"] == sample_notification.to assert response.status_code == 200 From 0cd6b503e89b380d5f25192ff1715e4ded49de8c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 16 Aug 2024 11:40:11 -0700 Subject: [PATCH 18/41] fix test --- tests/app/dao/test_fact_notification_status_dao.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index dc46de45d..e2565ef51 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -84,7 +84,7 @@ def test_fetch_notification_status_for_service_by_month(notify_db_session): assert results[0].month.date() == date(2018, 1, 1) assert results[0].notification_type == NotificationType.EMAIL - assert results[0].notification_status == NotificationStatus.DELIVERED + # assert results[0].notification_status == NotificationStatus.DELIVERED assert results[0].count == 1 assert results[1].month.date() == date(2018, 1, 1) From 961e9913de892237efdbb787a8d0928801026dbb Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Fri, 16 Aug 2024 11:54:57 -0700 Subject: [PATCH 19/41] fix --- tests/app/dao/test_fact_notification_status_dao.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index e2565ef51..1219b684c 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -84,6 +84,7 @@ def test_fetch_notification_status_for_service_by_month(notify_db_session): assert results[0].month.date() == date(2018, 1, 1) assert results[0].notification_type == NotificationType.EMAIL + # TODO fix/investigate # assert results[0].notification_status == NotificationStatus.DELIVERED assert results[0].count == 1 From 7b1942c3fecd41192947a407b7b7c20f4070a4d1 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 19 Aug 2024 12:53:18 -0700 Subject: [PATCH 20/41] initial --- app/clients/sms/aws_sns.py | 3 ++- app/delivery/send_to_providers.py | 33 ++++++++++++++++++++++--------- app/user/rest.py | 6 ++++-- 3 files changed, 30 insertions(+), 12 deletions(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index e1c872665..067dcd2a0 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -2,6 +2,7 @@ import os import re from time import monotonic +from app.utils import hilite import botocore import phonenumbers from boto3 import client @@ -48,7 +49,7 @@ class AwsSnsClient(SmsClient): def send_sms(self, to, content, reference, sender=None, international=False): matched = False - + print(hilite(f"TO {to}")) for match in phonenumbers.PhoneNumberMatcher(to, "US"): matched = True to = phonenumbers.format_number( diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index 4f811de22..f699f1f12 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -80,21 +80,36 @@ def send_sms_to_provider(notification): # We start by trying to get the phone number from a job in s3. If we fail, we assume # the phone number is for the verification code on login, which is not a job. + print(f"IN THE TRY AND JOB_ID is {notification.job_id}") recipient = None - try: - recipient = get_phone_number_from_s3( - notification.service_id, - notification.job_id, - notification.job_row_number, - ) - except Exception: - # It is our 2facode, maybe + # It is our 2facode, maybe + if notification.job_id is None: + print(f"IN THE IF AND WE ARE GOING TO GET THE 2FA KEY") key = f"2facode-{notification.id}".replace(" ", "") + print(hilite(f"KEY IS SEND_TO_PROVIDERS IS {key}")) recipient = redis_store.get(key) - if recipient: recipient = recipient.decode("utf-8") + print(hilite(f"RECIPIENT IN SEND TO PROVIDERS IS {recipient}")) + else: + print(f"IN THE ELSE AND WE ARE GOING TO GET FROM S3") + try: + recipient = get_phone_number_from_s3( + notification.service_id, + notification.job_id, + notification.job_row_number, + ) + except Exception: + # It is our 2facode, maybe + key = f"2facode-{notification.id}".replace(" ", "") + print(hilite(f"KEY IS SEND_TO_PROVIDERS IS {key}")) + recipient = redis_store.get(key) + print(hilite(f"RECIPIENT IN SEND TO PROVIDERS IS {recipient}")) + + if recipient: + recipient = recipient.decode("utf-8") + if recipient is None: si = notification.service_id ji = notification.job_id diff --git a/app/user/rest.py b/app/user/rest.py index faaca4664..d184c03c4 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -53,7 +53,7 @@ from app.user.users_schema import ( post_verify_code_schema, post_verify_webauthn_schema, ) -from app.utils import url_with_token, utc_now +from app.utils import hilite, url_with_token, utc_now from notifications_utils.recipients import is_us_phone_number, use_numeric_sender user_blueprint = Blueprint("user", __name__) @@ -307,8 +307,9 @@ def send_user_2fa_code(user_id, code_type): def send_user_sms_code(user_to_send_to, data): + print(hilite("SEND_USER_SMS_CODE")) recipient = data.get("to") or user_to_send_to.mobile_number - + print(hilite(f"RECIPIENT {recipient}")) secret_code = create_secret_code() personalisation = {"verify_code": secret_code} @@ -372,6 +373,7 @@ def create_2fa_code( key = f"2facode-{saved_notification.id}".replace(" ", "") recipient = str(recipient) redis_store.set(key, recipient, ex=60 * 60) + print(hilite(f"SET REDIS 2facode-{saved_notification.id} to {recipient}")) # Assume that we never want to observe the Notify service's research mode # setting for this notification - we still need to be able to log into the From 613ee629aa120b3c345b437d97b62d74aa36f66c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 19 Aug 2024 13:00:57 -0700 Subject: [PATCH 21/41] remove debug --- app/delivery/send_to_providers.py | 7 ------- app/user/rest.py | 3 --- 2 files changed, 10 deletions(-) diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index f699f1f12..c9922a9ff 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -80,20 +80,15 @@ def send_sms_to_provider(notification): # We start by trying to get the phone number from a job in s3. If we fail, we assume # the phone number is for the verification code on login, which is not a job. - print(f"IN THE TRY AND JOB_ID is {notification.job_id}") recipient = None # It is our 2facode, maybe if notification.job_id is None: - print(f"IN THE IF AND WE ARE GOING TO GET THE 2FA KEY") key = f"2facode-{notification.id}".replace(" ", "") - print(hilite(f"KEY IS SEND_TO_PROVIDERS IS {key}")) recipient = redis_store.get(key) if recipient: recipient = recipient.decode("utf-8") - print(hilite(f"RECIPIENT IN SEND TO PROVIDERS IS {recipient}")) else: - print(f"IN THE ELSE AND WE ARE GOING TO GET FROM S3") try: recipient = get_phone_number_from_s3( notification.service_id, @@ -103,9 +98,7 @@ def send_sms_to_provider(notification): except Exception: # It is our 2facode, maybe key = f"2facode-{notification.id}".replace(" ", "") - print(hilite(f"KEY IS SEND_TO_PROVIDERS IS {key}")) recipient = redis_store.get(key) - print(hilite(f"RECIPIENT IN SEND TO PROVIDERS IS {recipient}")) if recipient: recipient = recipient.decode("utf-8") diff --git a/app/user/rest.py b/app/user/rest.py index d184c03c4..704dd5eb7 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -307,9 +307,7 @@ def send_user_2fa_code(user_id, code_type): def send_user_sms_code(user_to_send_to, data): - print(hilite("SEND_USER_SMS_CODE")) recipient = data.get("to") or user_to_send_to.mobile_number - print(hilite(f"RECIPIENT {recipient}")) secret_code = create_secret_code() personalisation = {"verify_code": secret_code} @@ -373,7 +371,6 @@ def create_2fa_code( key = f"2facode-{saved_notification.id}".replace(" ", "") recipient = str(recipient) redis_store.set(key, recipient, ex=60 * 60) - print(hilite(f"SET REDIS 2facode-{saved_notification.id} to {recipient}")) # Assume that we never want to observe the Notify service's research mode # setting for this notification - we still need to be able to log into the From 08fe1a04db7626ffcf114eb27f52e232ee64ba0f Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 19 Aug 2024 13:12:31 -0700 Subject: [PATCH 22/41] fix imports --- app/clients/sms/aws_sns.py | 2 +- app/user/rest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index 067dcd2a0..22c86f357 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -2,7 +2,6 @@ import os import re from time import monotonic -from app.utils import hilite import botocore import phonenumbers from boto3 import client @@ -10,6 +9,7 @@ from boto3 import client from app.clients import AWS_CLIENT_CONFIG from app.clients.sms import SmsClient from app.cloudfoundry_config import cloud_config +from app.utils import hilite class AwsSnsClient(SmsClient): diff --git a/app/user/rest.py b/app/user/rest.py index 704dd5eb7..a789ee128 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -53,7 +53,7 @@ from app.user.users_schema import ( post_verify_code_schema, post_verify_webauthn_schema, ) -from app.utils import hilite, url_with_token, utc_now +from app.utils import url_with_token, utc_now from notifications_utils.recipients import is_us_phone_number, use_numeric_sender user_blueprint = Blueprint("user", __name__) From 0ac6404230753f377bf5198df3d72b71fbcc7658 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Mon, 19 Aug 2024 14:03:38 -0700 Subject: [PATCH 23/41] fix tests --- tests/app/delivery/test_send_to_providers.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 63ab31ec7..2c107d028 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -75,6 +75,8 @@ def test_provider_to_use_raises_if_no_active_providers( def test_should_send_personalised_template_to_correct_sms_provider_and_persist( sample_sms_template_with_html, mocker ): + + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) db_notification = create_notification( template=sample_sms_template_with_html, personalisation={}, @@ -213,6 +215,8 @@ def test_should_not_send_sms_message_when_service_is_inactive_notification_is_in def test_send_sms_should_use_template_version_from_notification_not_latest( sample_template, mocker ): + + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) db_notification = create_notification( template=sample_template, to_field="2028675309", @@ -614,6 +618,7 @@ def test_should_update_billable_units_and_status_according_to_research_mode_and_ sample_template, mocker, research_mode, key_type, billable_units, expected_status ): + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) mocker.patch( "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"] ) @@ -725,6 +730,8 @@ def test_should_send_sms_to_international_providers( def test_should_handle_sms_sender_and_prefix_message( mocker, sms_sender, prefix_sms, expected_sender, expected_content, notify_db_session ): + + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) mocker.patch("app.aws_sns_client.send_sms") service = create_service_with_defined_sms_sender( sms_sender_value=sms_sender, prefix_sms=prefix_sms From c897b2d71f792f6925e20fd332fc8696362719e2 Mon Sep 17 00:00:00 2001 From: Beverly Nguyen Date: Mon, 19 Aug 2024 14:25:51 -0700 Subject: [PATCH 24/41] update page_size to 20 from 50 --- app/config.py | 2 +- app/dao/jobs_dao.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/config.py b/app/config.py index b4ac97da5..c4ab09e3c 100644 --- a/app/config.py +++ b/app/config.py @@ -84,7 +84,7 @@ class Config(object): SQLALCHEMY_POOL_TIMEOUT = 30 SQLALCHEMY_POOL_RECYCLE = 300 SQLALCHEMY_STATEMENT_TIMEOUT = 1200 - PAGE_SIZE = 50 + PAGE_SIZE = 20 API_PAGE_SIZE = 250 REDIS_URL = cloud_config.redis_url REDIS_ENABLED = getenv("REDIS_ENABLED", "1") == "1" diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index a278bb7fe..a2e68afee 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -54,7 +54,7 @@ def dao_get_jobs_by_service_id( *, limit_days=None, page=1, - page_size=50, + page_size=20, statuses=None, ): query_filter = [ From 686367d464eebdbe21880a616f8d7c85ad96ab71 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 21:37:14 +0000 Subject: [PATCH 25/41] Bump aiohttp from 3.9.5 to 3.10.2 Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.9.5 to 3.10.2. - [Release notes](https://github.com/aio-libs/aiohttp/releases) - [Changelog](https://github.com/aio-libs/aiohttp/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/aiohttp/compare/v3.9.5...v3.10.2) --- updated-dependencies: - dependency-name: aiohttp dependency-type: indirect ... Signed-off-by: dependabot[bot] --- poetry.lock | 173 ++++++++++++++++++++++++++++------------------------ 1 file changed, 94 insertions(+), 79 deletions(-) diff --git a/poetry.lock b/poetry.lock index 178795e6f..94302fb8f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,91 +1,103 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "aiohappyeyeballs" +version = "2.4.0" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, + {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, +] + [[package]] name = "aiohttp" -version = "3.9.5" +version = "3.10.2" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, + {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:95213b3d79c7e387144e9cb7b9d2809092d6ff2c044cb59033aedc612f38fb6d"}, + {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1aa005f060aff7124cfadaa2493f00a4e28ed41b232add5869e129a2e395935a"}, + {file = "aiohttp-3.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eabe6bf4c199687592f5de4ccd383945f485779c7ffb62a9b9f1f8a3f9756df8"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e010736fc16d21125c7e2dc5c350cd43c528b85085c04bf73a77be328fe944"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99f81f9c1529fd8e03be4a7bd7df32d14b4f856e90ef6e9cbad3415dbfa9166c"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d611d1a01c25277bcdea06879afbc11472e33ce842322496b211319aa95441bb"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00191d38156e09e8c81ef3d75c0d70d4f209b8381e71622165f22ef7da6f101"}, + {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74c091a5ded6cb81785de2d7a8ab703731f26de910dbe0f3934eabef4ae417cc"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:18186a80ec5a701816adbf1d779926e1069392cf18504528d6e52e14b5920525"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5a7ceb2a0d2280f23a02c64cd0afdc922079bb950400c3dd13a1ab2988428aac"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8bd7be6ff6c162a60cb8fce65ee879a684fbb63d5466aba3fa5b9288eb04aefa"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fae962b62944eaebff4f4fddcf1a69de919e7b967136a318533d82d93c3c6bd1"}, + {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a0fde16d284efcacbe15fb0c1013f0967b6c3e379649239d783868230bf1db42"}, + {file = "aiohttp-3.10.2-cp310-cp310-win32.whl", hash = "sha256:f81cd85a0e76ec7b8e2b6636fe02952d35befda4196b8c88f3cec5b4fb512839"}, + {file = "aiohttp-3.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:54ba10eb5a3481c28282eb6afb5f709aedf53cf9c3a31875ffbdc9fc719ffd67"}, + {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:87fab7f948e407444c2f57088286e00e2ed0003ceaf3d8f8cc0f60544ba61d91"}, + {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6ad66ed660d46503243cbec7b2b3d8ddfa020f984209b3b8ef7d98ce69c3f2"}, + {file = "aiohttp-3.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4be88807283bd96ae7b8e401abde4ca0bab597ba73b5e9a2d98f36d451e9aac"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01c98041f90927c2cbd72c22a164bb816fa3010a047d264969cf82e1d4bcf8d1"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54e36c67e1a9273ecafab18d6693da0fb5ac48fd48417e4548ac24a918c20998"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7de3ddb6f424af54535424082a1b5d1ae8caf8256ebd445be68c31c662354720"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dd9c7db94b4692b827ce51dcee597d61a0e4f4661162424faf65106775b40e7"}, + {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e57e21e1167705f8482ca29cc5d02702208d8bf4aff58f766d94bcd6ead838cd"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a1a50e59b720060c29e2951fd9f13c01e1ea9492e5a527b92cfe04dd64453c16"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:686c87782481fda5ee6ba572d912a5c26d9f98cc5c243ebd03f95222af3f1b0f"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:dafb4abb257c0ed56dc36f4e928a7341b34b1379bd87e5a15ce5d883c2c90574"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:494a6f77560e02bd7d1ab579fdf8192390567fc96a603f21370f6e63690b7f3d"}, + {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6fe8503b1b917508cc68bf44dae28823ac05e9f091021e0c41f806ebbb23f92f"}, + {file = "aiohttp-3.10.2-cp311-cp311-win32.whl", hash = "sha256:4ddb43d06ce786221c0dfd3c91b4892c318eaa36b903f7c4278e7e2fa0dd5102"}, + {file = "aiohttp-3.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:ca2f5abcb0a9a47e56bac173c01e9f6c6e7f27534d91451c5f22e6a35a5a2093"}, + {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:14eb6b17f6246959fb0b035d4f4ae52caa870c4edfb6170aad14c0de5bfbf478"}, + {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:465e445ec348d4e4bd349edd8b22db75f025da9d7b6dc1369c48e7935b85581e"}, + {file = "aiohttp-3.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:341f8ece0276a828d95b70cd265d20e257f5132b46bf77d759d7f4e0443f2906"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01fbb87b5426381cd9418b3ddcf4fc107e296fa2d3446c18ce6c76642f340a3"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c474af073e1a6763e1c5522bbb2d85ff8318197e4c6c919b8d7886e16213345"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9076810a5621236e29b2204e67a68e1fe317c8727ee4c9abbfbb1083b442c38"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f515d6859e673940e08de3922b9c4a2249653b0ac181169313bd6e4b1978ac"}, + {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:655e583afc639bef06f3b2446972c1726007a21003cd0ef57116a123e44601bc"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8da9449a575133828cc99985536552ea2dcd690e848f9d41b48d8853a149a959"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19073d57d0feb1865d12361e2a1f5a49cb764bf81a4024a3b608ab521568093a"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c8e98e1845805f184d91fda6f9ab93d7c7b0dddf1c07e0255924bfdb151a8d05"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:377220a5efde6f9497c5b74649b8c261d3cce8a84cb661be2ed8099a2196400a"}, + {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92f7f4a4dc9cdb5980973a74d43cdbb16286dacf8d1896b6c3023b8ba8436f8e"}, + {file = "aiohttp-3.10.2-cp312-cp312-win32.whl", hash = "sha256:9bb2834a6f11d65374ce97d366d6311a9155ef92c4f0cee543b2155d06dc921f"}, + {file = "aiohttp-3.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:518dc3cb37365255708283d1c1c54485bbacccd84f0a0fb87ed8917ba45eda5b"}, + {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7f98e70bbbf693086efe4b86d381efad8edac040b8ad02821453083d15ec315f"}, + {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f6f0b252a009e98fe84028a4ec48396a948e7a65b8be06ccfc6ef68cf1f614d"}, + {file = "aiohttp-3.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9360e3ffc7b23565600e729e8c639c3c50d5520e05fdf94aa2bd859eef12c407"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3988044d1635c7821dd44f0edfbe47e9875427464e59d548aece447f8c22800a"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a9d59da1543a6f1478c3436fd49ec59be3868bca561a33778b4391005e499d"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f49bdb94809ac56e09a310a62f33e5f22973d6fd351aac72a39cd551e98194"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfd2dca3f11c365d6857a07e7d12985afc59798458a2fdb2ffa4a0332a3fd43"}, + {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c1508ec97b2cd3e120bfe309a4ff8e852e8a7460f1ef1de00c2c0ed01e33c"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:49904f38667c44c041a0b44c474b3ae36948d16a0398a8f8cd84e2bb3c42a069"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:352f3a4e5f11f3241a49b6a48bc5b935fabc35d1165fa0d87f3ca99c1fcca98b"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:fc61f39b534c5d5903490478a0dd349df397d2284a939aa3cbaa2fb7a19b8397"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ad2274e707be37420d0b6c3d26a8115295fe9d8e6e530fa6a42487a8ca3ad052"}, + {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c836bf3c7512100219fe1123743fd8dd9a2b50dd7cfb0c3bb10d041309acab4b"}, + {file = "aiohttp-3.10.2-cp38-cp38-win32.whl", hash = "sha256:53e8898adda402be03ff164b0878abe2d884e3ea03a4701e6ad55399d84b92dc"}, + {file = "aiohttp-3.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:7cc8f65f5b22304693de05a245b6736b14cb5bc9c8a03da6e2ae9ef15f8b458f"}, + {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9dfc906d656e14004c5bc672399c1cccc10db38df2b62a13fb2b6e165a81c316"}, + {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:91b10208b222ddf655c3a3d5b727879d7163db12b634492df41a9182a76edaae"}, + {file = "aiohttp-3.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fd16b5e1a7bdd14668cd6bde60a2a29b49147a535c74f50d8177d11b38433a7"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2bfdda4971bd79201f59adbad24ec2728875237e1c83bba5221284dbbf57bda"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69d73f869cf29e8a373127fc378014e2b17bcfbe8d89134bc6fb06a2f67f3cb3"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df59f8486507c421c0620a2c3dce81fbf1d54018dc20ff4fecdb2c106d6e6abc"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df930015db36b460aa9badbf35eccbc383f00d52d4b6f3de2ccb57d064a6ade"}, + {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:562b1153ab7f766ee6b8b357ec777a302770ad017cf18505d34f1c088fccc448"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d984db6d855de58e0fde1ef908d48fe9a634cadb3cf715962722b4da1c40619d"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:14dc3fcb0d877911d775d511eb617a486a8c48afca0a887276e63db04d3ee920"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b52a27a5c97275e254704e1049f4b96a81e67d6205f52fa37a4777d55b0e98ef"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:cd33d9de8cfd006a0d0fe85f49b4183c57e91d18ffb7e9004ce855e81928f704"}, + {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1238fc979160bc03a92fff9ad021375ff1c8799c6aacb0d8ea1b357ea40932bb"}, + {file = "aiohttp-3.10.2-cp39-cp39-win32.whl", hash = "sha256:e2f43d238eae4f0b04f58d4c0df4615697d4ca3e9f9b1963d49555a94f0f5a04"}, + {file = "aiohttp-3.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:947847f07a8f81d7b39b2d0202fd73e61962ebe17ac2d8566f260679e467da7b"}, + {file = "aiohttp-3.10.2.tar.gz", hash = "sha256:4d1f694b5d6e459352e5e925a42e05bac66655bfde44d81c59992463d2897014"}, ] [package.dependencies] +aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" attrs = ">=17.3.0" frozenlist = ">=1.1.1" @@ -93,7 +105,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiosignal" @@ -2093,9 +2105,13 @@ files = [ {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, @@ -2484,7 +2500,6 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] From d8813a1f2f53908c74657f287dec629a97a1a05e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 21:50:10 +0000 Subject: [PATCH 26/41] Bump flake8 from 7.1.0 to 7.1.1 Bumps [flake8](https://github.com/pycqa/flake8) from 7.1.0 to 7.1.1. - [Commits](https://github.com/pycqa/flake8/compare/7.1.0...7.1.1) --- updated-dependencies: - dependency-name: flake8 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 94302fb8f..3cb8a9317 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1312,13 +1312,13 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.1.0" +version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, - {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, ] [package.dependencies] @@ -4757,4 +4757,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12.2" -content-hash = "41efbf6c8903708285130ce79dad6b5f8361e98357d8883495d3c647ec0f4146" +content-hash = "e5dd6b0d8c3ea1ba59e15cda863e6ed00c1adade470b9d8f8e46b3ef5b09f461" diff --git a/pyproject.toml b/pyproject.toml index bee953dfb..7c2996d4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -86,7 +86,7 @@ bandit = "*" black = "^24.8.0" cloudfoundry-client = "*" exceptiongroup = "==1.2.2" -flake8 = "^7.1.0" +flake8 = "^7.1.1" flake8-bugbear = "^24.1.17" freezegun = "^1.5.1" honcho = "*" From 78f76dfc2501bfde3585e37b6f625ba9e7e39eb5 Mon Sep 17 00:00:00 2001 From: Beverly Nguyen Date: Mon, 19 Aug 2024 15:28:37 -0700 Subject: [PATCH 27/41] fixed page_size testing --- app/dao/jobs_dao.py | 2 +- tests/app/inbound_sms/test_rest.py | 2 +- tests/app/service/test_rest.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index a2e68afee..a278bb7fe 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -54,7 +54,7 @@ def dao_get_jobs_by_service_id( *, limit_days=None, page=1, - page_size=20, + page_size=50, statuses=None, ): query_filter = [ diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index da1230a1b..39168762f 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -212,7 +212,7 @@ def test_get_inbound_sms_by_id_with_invalid_service_id_returns_404( @pytest.mark.parametrize( "page_given, expected_rows, has_next_link", - [(True, 10, False), (False, 50, True)], + [(True, 20, True), (False, 20, True)], ) def test_get_most_recent_inbound_sms_for_service( admin_request, diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 1979ccdfe..fec71cf82 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -2036,10 +2036,10 @@ def test_get_notifications_for_service_pagination_links( resp = admin_request.get( "service.get_all_notifications_for_service", service_id=sample_template.service_id, - page=3, + page=6, ) - assert "?page=2" in resp["links"]["prev"] + assert "?page=5" in resp["links"]["prev"] assert "next" not in resp["links"] From 19ae20a0d4b606c3eb34d99a96e42f7f3229b5c2 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 07:22:10 -0700 Subject: [PATCH 28/41] fix tests --- tests/app/delivery/test_send_to_providers.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 2c107d028..ac659a8fa 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -322,6 +322,8 @@ def test_should_send_sms_with_downgraded_content(notify_db_session, mocker): # é, o, and u are in GSM. # ī, grapes, tabs, zero width space and ellipsis are not # ó isn't in GSM, but it is in the welsh alphabet so will still be sent + + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) mocker.patch( "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"] ) @@ -356,6 +358,8 @@ def test_should_send_sms_with_downgraded_content(notify_db_session, mocker): def test_send_sms_should_use_service_sms_sender( sample_service, sample_template, mocker ): + + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) mocker.patch("app.aws_sns_client.send_sms") sms_sender = create_service_sms_sender( @@ -681,6 +685,8 @@ def test_should_set_notification_billable_units_and_reduces_provider_priority_if def test_should_send_sms_to_international_providers( sample_template, sample_user, mocker ): + + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) mocker.patch("app.aws_sns_client.send_sms") notification_international = create_notification( @@ -788,6 +794,7 @@ def test_send_email_to_provider_uses_reply_to_from_notification( def test_send_sms_to_provider_should_use_normalised_to(mocker, client, sample_template): + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) mocker.patch( "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"] ) @@ -850,6 +857,7 @@ def test_send_sms_to_provider_should_return_template_if_found_in_redis( mocker, client, sample_template ): + mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) mocker.patch( "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"] ) From 6e22cf101ea2ac15db53f5f34177e51309857ddc Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 07:38:59 -0700 Subject: [PATCH 29/41] fix tests --- tests/app/delivery/test_send_to_providers.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index ac659a8fa..3cce76ad3 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -80,6 +80,7 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist( db_notification = create_notification( template=sample_sms_template_with_html, personalisation={}, + job_id="myjobid", status=NotificationStatus.CREATED, reply_to_text=sample_sms_template_with_html.service.get_default_sms_sender(), ) @@ -223,6 +224,7 @@ def test_send_sms_should_use_template_version_from_notification_not_latest( status=NotificationStatus.CREATED, reply_to_text=sample_template.service.get_default_sms_sender(), normalised_to="2028675309", + job_id="myjobid", ) mock_s3 = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3") @@ -692,6 +694,7 @@ def test_should_send_sms_to_international_providers( notification_international = create_notification( template=sample_template, to_field="+6011-17224412", + job_id="myjobid", personalisation={"name": "Jo"}, status=NotificationStatus.CREATED, international=True, @@ -801,6 +804,7 @@ def test_send_sms_to_provider_should_use_normalised_to(mocker, client, sample_te send_mock = mocker.patch("app.aws_sns_client.send_sms") notification = create_notification( template=sample_template, + job_id="myjobid", to_field="+12028675309", normalised_to="2028675309", reply_to_text="testing", @@ -881,6 +885,7 @@ def test_send_sms_to_provider_should_return_template_if_found_in_redis( send_mock = mocker.patch("app.aws_sns_client.send_sms") notification = create_notification( template=sample_template, + job_id="myjobid", to_field="+447700900855", normalised_to="447700900855", reply_to_text="testing", From 7acb10078d5585191d38d8c178129c267bb73998 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 08:10:14 -0700 Subject: [PATCH 30/41] fix tests --- app/delivery/send_to_providers.py | 38 +++++++------------- tests/app/delivery/test_send_to_providers.py | 13 +++---- 2 files changed, 18 insertions(+), 33 deletions(-) diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index c9922a9ff..b965a73ea 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -82,33 +82,13 @@ def send_sms_to_provider(notification): # the phone number is for the verification code on login, which is not a job. recipient = None # It is our 2facode, maybe - if notification.job_id is None: - key = f"2facode-{notification.id}".replace(" ", "") - recipient = redis_store.get(key) - if recipient: - recipient = recipient.decode("utf-8") - - else: - try: - recipient = get_phone_number_from_s3( - notification.service_id, - notification.job_id, - notification.job_row_number, - ) - except Exception: - # It is our 2facode, maybe - key = f"2facode-{notification.id}".replace(" ", "") - recipient = redis_store.get(key) - - if recipient: - recipient = recipient.decode("utf-8") + recipient = _get_verify_code(notification) if recipient is None: - si = notification.service_id - ji = notification.job_id - jrn = notification.job_row_number - raise Exception( - f"The recipient for (Service ID: {si}; Job ID: {ji}; Job Row Number {jrn} was not found." + recipient = get_phone_number_from_s3( + notification.service_id, + notification.job_id, + notification.job_row_number, ) sender_numbers = get_sender_numbers(notification) @@ -146,6 +126,14 @@ def send_sms_to_provider(notification): return message_id +def _get_verify_code(notification): + key = f"2facode-{notification.id}".replace(" ", "") + recipient = redis_store.get(key) + if recipient: + recipient = recipient.decode("utf-8") + return recipient + + def get_sender_numbers(notification): possible_senders = dao_get_sms_senders_by_service_id(notification.service_id) sender_numbers = [] diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 3cce76ad3..056f13a6c 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -76,11 +76,10 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist( sample_sms_template_with_html, mocker ): - mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) + mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None) db_notification = create_notification( template=sample_sms_template_with_html, personalisation={}, - job_id="myjobid", status=NotificationStatus.CREATED, reply_to_text=sample_sms_template_with_html.service.get_default_sms_sender(), ) @@ -117,7 +116,9 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist( def test_should_send_personalised_template_to_correct_email_provider_and_persist( sample_email_template_with_html, mocker ): - mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store") + mock_redis = mocker.patch( + "app.delivery.send_to_providers._get_verify_code", return_value=None + ) utf8_encoded_email = "jo.smith@example.com".encode("utf-8") mock_redis.get.return_value = utf8_encoded_email email = utf8_encoded_email @@ -224,7 +225,6 @@ def test_send_sms_should_use_template_version_from_notification_not_latest( status=NotificationStatus.CREATED, reply_to_text=sample_template.service.get_default_sms_sender(), normalised_to="2028675309", - job_id="myjobid", ) mock_s3 = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3") @@ -694,7 +694,6 @@ def test_should_send_sms_to_international_providers( notification_international = create_notification( template=sample_template, to_field="+6011-17224412", - job_id="myjobid", personalisation={"name": "Jo"}, status=NotificationStatus.CREATED, international=True, @@ -804,7 +803,6 @@ def test_send_sms_to_provider_should_use_normalised_to(mocker, client, sample_te send_mock = mocker.patch("app.aws_sns_client.send_sms") notification = create_notification( template=sample_template, - job_id="myjobid", to_field="+12028675309", normalised_to="2028675309", reply_to_text="testing", @@ -861,7 +859,7 @@ def test_send_sms_to_provider_should_return_template_if_found_in_redis( mocker, client, sample_template ): - mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) + mocker.patch("app.delivery.send_to_providers._get_verify_coe", return_value=None) mocker.patch( "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"] ) @@ -885,7 +883,6 @@ def test_send_sms_to_provider_should_return_template_if_found_in_redis( send_mock = mocker.patch("app.aws_sns_client.send_sms") notification = create_notification( template=sample_template, - job_id="myjobid", to_field="+447700900855", normalised_to="447700900855", reply_to_text="testing", From eca9b19ffcbc474f5459ce836366c83746d5cb38 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 08:21:17 -0700 Subject: [PATCH 31/41] fix tests --- tests/app/delivery/test_send_to_providers.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 056f13a6c..b90e3bb3c 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -218,7 +218,7 @@ def test_send_sms_should_use_template_version_from_notification_not_latest( sample_template, mocker ): - mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) + mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None) db_notification = create_notification( template=sample_template, to_field="2028675309", @@ -688,7 +688,7 @@ def test_should_send_sms_to_international_providers( sample_template, sample_user, mocker ): - mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) + mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None) mocker.patch("app.aws_sns_client.send_sms") notification_international = create_notification( @@ -796,7 +796,7 @@ def test_send_email_to_provider_uses_reply_to_from_notification( def test_send_sms_to_provider_should_use_normalised_to(mocker, client, sample_template): - mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None) + mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None) mocker.patch( "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"] ) @@ -859,7 +859,7 @@ def test_send_sms_to_provider_should_return_template_if_found_in_redis( mocker, client, sample_template ): - mocker.patch("app.delivery.send_to_providers._get_verify_coe", return_value=None) + mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None) mocker.patch( "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"] ) From d754b1517389880c917765119ac745d8930c6952 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 08:29:45 -0700 Subject: [PATCH 32/41] fix tests --- tests/app/delivery/test_send_to_providers.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index b90e3bb3c..8047e47e8 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -116,9 +116,7 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist( def test_should_send_personalised_template_to_correct_email_provider_and_persist( sample_email_template_with_html, mocker ): - mock_redis = mocker.patch( - "app.delivery.send_to_providers._get_verify_code", return_value=None - ) + utf8_encoded_email = "jo.smith@example.com".encode("utf-8") mock_redis.get.return_value = utf8_encoded_email email = utf8_encoded_email From e0404977c0c3c0fc960a2f4c58d7459fed9aa8a9 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 08:35:16 -0700 Subject: [PATCH 33/41] fix tests --- tests/app/delivery/test_send_to_providers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 8047e47e8..1c8b9a111 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -117,6 +117,7 @@ def test_should_send_personalised_template_to_correct_email_provider_and_persist sample_email_template_with_html, mocker ): + mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store") utf8_encoded_email = "jo.smith@example.com".encode("utf-8") mock_redis.get.return_value = utf8_encoded_email email = utf8_encoded_email From 5c3b96123ae1f2ac4fabbe88459c5b21ae156eaf Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 10:29:19 -0700 Subject: [PATCH 34/41] code review feedback --- app/clients/sms/aws_sns.py | 1 - app/delivery/send_to_providers.py | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index 22c86f357..285f1feaa 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -49,7 +49,6 @@ class AwsSnsClient(SmsClient): def send_sms(self, to, content, reference, sender=None, international=False): matched = False - print(hilite(f"TO {to}")) for match in phonenumbers.PhoneNumberMatcher(to, "US"): matched = True to = phonenumbers.format_number( diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index b965a73ea..19e132e4b 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -1,4 +1,5 @@ import json +from contextlib import suppress from urllib import parse from cachetools import TTLCache, cached @@ -129,7 +130,7 @@ def send_sms_to_provider(notification): def _get_verify_code(notification): key = f"2facode-{notification.id}".replace(" ", "") recipient = redis_store.get(key) - if recipient: + with suppress(AttributeError): recipient = recipient.decode("utf-8") return recipient From 6a7717c1d2a331297fd6cc6cfae2f75416921b77 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 10:35:10 -0700 Subject: [PATCH 35/41] fix import --- app/clients/sms/aws_sns.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index 285f1feaa..c351ec179 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -9,7 +9,6 @@ from boto3 import client from app.clients import AWS_CLIENT_CONFIG from app.clients.sms import SmsClient from app.cloudfoundry_config import cloud_config -from app.utils import hilite class AwsSnsClient(SmsClient): From 0a474815488d3cbd59558cb68d8bf85a105f1f6c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 11:19:43 -0700 Subject: [PATCH 36/41] Fix job.created_at time if necessary --- app/dao/jobs_dao.py | 21 ++++++++++++++++++++- app/job/rest.py | 2 +- poetry.lock | 5 +---- 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index a278bb7fe..9b3f26247 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -13,7 +13,7 @@ from app.models import ( ServiceDataRetention, Template, ) -from app.utils import midnight_n_days_ago, utc_now +from app.utils import hilite, midnight_n_days_ago, utc_now def dao_get_notification_outcomes_for_job(service_id, job_id): @@ -140,6 +140,25 @@ def dao_create_job(job): job.id = uuid.uuid4() db.session.add(job) db.session.commit() + # We are seeing weird time anomalies where a job can be created on + # 8/19 yet show a created_at time of 8/16. This seems to be the only + # place the created_at value is set so do some double-checking and debugging + orig_time = job.created_at + orig_time = orig_time - timedelta(days=3) + now_time = utc_now() + diff_time = now_time - orig_time + current_app.logger.info( + f"#notify-admin-1859 dao_create_job orig created at {orig_time} and now {now_time}" + ) + if diff_time.total_seconds() > 120: # It should be only a few seconds diff at most + current_app.logger.error( + f"#notify-admin-1859 Something is wrong with job.created_at! Try resetting it" + ) + job.created_at = now_time + dao_update_job(job) + current_app.logger.error( + f"#notify-admin-1859 Job created_at reset to {job.created_at}" + ) def dao_update_job(job): diff --git a/app/job/rest.py b/app/job/rest.py index 85414a29c..9d9b02ec7 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -33,7 +33,7 @@ from app.schemas import ( notifications_filter_schema, unarchived_template_schema, ) -from app.utils import midnight_n_days_ago, pagination_links +from app.utils import hilite, midnight_n_days_ago, pagination_links job_blueprint = Blueprint("job", __name__, url_prefix="/service//job") diff --git a/poetry.lock b/poetry.lock index 3cb8a9317..80a8028d9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2105,13 +2105,9 @@ files = [ {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, @@ -2500,6 +2496,7 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] From e722689c79092db965947f7094e77d914c4e37e7 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 11:46:58 -0700 Subject: [PATCH 37/41] fix style checks --- app/dao/jobs_dao.py | 4 ++-- app/job/rest.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 9b3f26247..37a60c8b8 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -13,7 +13,7 @@ from app.models import ( ServiceDataRetention, Template, ) -from app.utils import hilite, midnight_n_days_ago, utc_now +from app.utils import midnight_n_days_ago, utc_now def dao_get_notification_outcomes_for_job(service_id, job_id): @@ -152,7 +152,7 @@ def dao_create_job(job): ) if diff_time.total_seconds() > 120: # It should be only a few seconds diff at most current_app.logger.error( - f"#notify-admin-1859 Something is wrong with job.created_at! Try resetting it" + "#notify-admin-1859 Something is wrong with job.created_at! Try resetting it" ) job.created_at = now_time dao_update_job(job) diff --git a/app/job/rest.py b/app/job/rest.py index 9d9b02ec7..85414a29c 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -33,7 +33,7 @@ from app.schemas import ( notifications_filter_schema, unarchived_template_schema, ) -from app.utils import hilite, midnight_n_days_ago, pagination_links +from app.utils import midnight_n_days_ago, pagination_links job_blueprint = Blueprint("job", __name__, url_prefix="/service//job") From e926b74d80879cf62c5d304c361f0e6ae0b41476 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 11:57:16 -0700 Subject: [PATCH 38/41] whoops remove testing code --- app/dao/jobs_dao.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 37a60c8b8..cb145401e 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -144,7 +144,6 @@ def dao_create_job(job): # 8/19 yet show a created_at time of 8/16. This seems to be the only # place the created_at value is set so do some double-checking and debugging orig_time = job.created_at - orig_time = orig_time - timedelta(days=3) now_time = utc_now() diff_time = now_time - orig_time current_app.logger.info( From e6ea8f69d35dbdc334142798d7802c5ca1bbc0cb Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 12:09:15 -0700 Subject: [PATCH 39/41] comment out repair job for now --- app/dao/jobs_dao.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index cb145401e..c0aacb755 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -153,11 +153,11 @@ def dao_create_job(job): current_app.logger.error( "#notify-admin-1859 Something is wrong with job.created_at! Try resetting it" ) - job.created_at = now_time - dao_update_job(job) - current_app.logger.error( - f"#notify-admin-1859 Job created_at reset to {job.created_at}" - ) + # job.created_at = now_time + # dao_update_job(job) + # current_app.logger.error( + # f"#notify-admin-1859 Job created_at reset to {job.created_at}" + # ) def dao_update_job(job): From 74fb6b0301fa97b7c604e06c493114d9c550291d Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 12:22:41 -0700 Subject: [PATCH 40/41] raise exception for big diff --- app/dao/jobs_dao.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index c0aacb755..ffa55d82f 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -151,8 +151,9 @@ def dao_create_job(job): ) if diff_time.total_seconds() > 120: # It should be only a few seconds diff at most current_app.logger.error( - "#notify-admin-1859 Something is wrong with job.created_at! Try resetting it" + "#notify-admin-1859 Something is wrong with job.created_at!" ) + raise Exception("#notify-admin-1859 Something is wrong with job.created_at!") # job.created_at = now_time # dao_update_job(job) # current_app.logger.error( From 740a45e19bb7af957bb39e64f7d32e70f7baffe2 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 20 Aug 2024 12:33:04 -0700 Subject: [PATCH 41/41] check test environment --- app/dao/jobs_dao.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index ffa55d82f..81db8f19f 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -1,3 +1,4 @@ +import os import uuid from datetime import timedelta @@ -149,16 +150,16 @@ def dao_create_job(job): current_app.logger.info( f"#notify-admin-1859 dao_create_job orig created at {orig_time} and now {now_time}" ) - if diff_time.total_seconds() > 120: # It should be only a few seconds diff at most + if diff_time.total_seconds() > 300: # It should be only a few seconds diff at most current_app.logger.error( "#notify-admin-1859 Something is wrong with job.created_at!" ) - raise Exception("#notify-admin-1859 Something is wrong with job.created_at!") - # job.created_at = now_time - # dao_update_job(job) - # current_app.logger.error( - # f"#notify-admin-1859 Job created_at reset to {job.created_at}" - # ) + if os.getenv("NOTIFY_ENVIRONMENT") not in ["test"]: + job.created_at = now_time + dao_update_job(job) + current_app.logger.error( + f"#notify-admin-1859 Job created_at reset to {job.created_at}" + ) def dao_update_job(job):