From 5b405d41aa68776e4b85ef82defdb09d5e06a3c7 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 12:07:42 -0700
Subject: [PATCH 01/39] break test so we can see coverage numbers
---
tests/app/test_commands.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py
index 46dd2b0c1..8375aa7a9 100644
--- a/tests/app/test_commands.py
+++ b/tests/app/test_commands.py
@@ -437,7 +437,8 @@ def test_download_csv_file_by_name(notify_api, mocker):
"NonExistentName",
],
)
- mock_download.assert_called_once()
+ mock_download.assert_not_called()
+ # mock_download.assert_called_once()
def test_promote_user_to_platform_admin_no_result_found(
From a0c27975a57ff40a31dfdea37165c008d787f1d4 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 12:40:31 -0700
Subject: [PATCH 02/39] break test so we can see coverage numbers
---
tests/app/test_commands.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py
index 8375aa7a9..46dd2b0c1 100644
--- a/tests/app/test_commands.py
+++ b/tests/app/test_commands.py
@@ -437,8 +437,7 @@ def test_download_csv_file_by_name(notify_api, mocker):
"NonExistentName",
],
)
- mock_download.assert_not_called()
- # mock_download.assert_called_once()
+ mock_download.assert_called_once()
def test_promote_user_to_platform_admin_no_result_found(
From 5d265135d35fd1afcccca7ac64280f413d27c109 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 13:08:34 -0700
Subject: [PATCH 03/39] write a test
---
app/delivery/send_to_providers.py | 24 +++++++++++---------
tests/app/delivery/test_send_to_providers.py | 17 ++++++++++++++
2 files changed, 30 insertions(+), 11 deletions(-)
diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py
index 745b46cab..07763823f 100644
--- a/app/delivery/send_to_providers.py
+++ b/app/delivery/send_to_providers.py
@@ -98,17 +98,7 @@ def send_sms_to_provider(notification):
# TODO This is temporary to test the capability of validating phone numbers
# The future home of the validation is TBD
- if "+" not in recipient:
- recipient_lookup = f"+{recipient}"
- else:
- recipient_lookup = recipient
- if recipient_lookup in current_app.config[
- "SIMULATED_SMS_NUMBERS"
- ] and os.getenv("NOTIFY_ENVIRONMENT") in ["development", "test"]:
- current_app.logger.info(hilite("#validate-phone-number fired"))
- aws_pinpoint_client.validate_phone_number("01", recipient)
- else:
- current_app.logger.info(hilite("#validate-phone-number not fired"))
+ _experimentally_validate_phone_numbers(recipient)
sender_numbers = get_sender_numbers(notification)
if notification.reply_to_text not in sender_numbers:
@@ -145,6 +135,18 @@ def send_sms_to_provider(notification):
return message_id
+def _experimentally_validate_phone_numbers(recipient):
+ if "+" not in recipient:
+ recipient_lookup = f"+{recipient}"
+ else:
+ recipient_lookup = recipient
+ if recipient_lookup in current_app.config["SIMULATED_SMS_NUMBERS"] and os.getenv(
+ "NOTIFY_ENVIRONMENT"
+ ) in ["development", "test"]:
+ current_app.logger.info(hilite("#validate-phone-number fired"))
+ aws_pinpoint_client.validate_phone_number("01", recipient)
+
+
def _get_verify_code(notification):
key = f"2facode-{notification.id}".replace(" ", "")
recipient = redis_store.get(key)
diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py
index fbea9a2f7..4c0c39890 100644
--- a/tests/app/delivery/test_send_to_providers.py
+++ b/tests/app/delivery/test_send_to_providers.py
@@ -3,6 +3,7 @@ import os
from contextlib import suppress
from urllib import parse
+import pytest
from cachetools import TTLCache, cached
from flask import current_app
@@ -19,6 +20,7 @@ from app.dao.email_branding_dao import dao_get_email_branding_by_id
from app.dao.notifications_dao import dao_update_notification
from app.dao.provider_details_dao import get_provider_details_by_notification_type
from app.dao.service_sms_sender_dao import dao_get_sms_senders_by_service_id
+from app.delivery.send_to_providers import _experimentally_validate_phone_numbers
from app.enums import BrandType, KeyType, NotificationStatus, NotificationType
from app.exceptions import NotificationTechnicalFailureException
from app.serialised_models import SerialisedService, SerialisedTemplate
@@ -306,3 +308,18 @@ def technical_failure(notification):
f"Send {notification.notification_type} for notification id {notification.id} "
f"to provider is not allowed: service {notification.service_id} is inactive"
)
+
+
+@pytest.mark.parametrize(
+ ("recipient", "expected_invoke"),
+ [
+ ("15555555555", False),
+ ],
+)
+def test_experimentally_validate_phone_numbers(recipient, expected_invoke, mocker):
+ mock_pinpoint = mocker.patch("app.delivery.send_to_providers.aws_pinpoint_client")
+ _experimentally_validate_phone_numbers(recipient)
+ if expected_invoke:
+ mock_pinpoint.phone_number_validate.assert_called_once_with("foo")
+ else:
+ mock_pinpoint.phone_number_validate.assert_not_called()
From 6e73e81201c4b01f0d3147f5c454d052a0acfb6b Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 13:40:19 -0700
Subject: [PATCH 04/39] ugh fix tests
---
tests/app/delivery/test_send_to_providers.py | 1208 ++++++++++++++----
1 file changed, 946 insertions(+), 262 deletions(-)
diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py
index 4c0c39890..20b0f7186 100644
--- a/tests/app/delivery/test_send_to_providers.py
+++ b/tests/app/delivery/test_send_to_providers.py
@@ -1,313 +1,997 @@
import json
-import os
-from contextlib import suppress
-from urllib import parse
+from collections import namedtuple
+from unittest.mock import ANY
import pytest
-from cachetools import TTLCache, cached
from flask import current_app
+from requests import HTTPError
-from app import (
- aws_pinpoint_client,
- create_uuid,
- db,
- notification_provider_clients,
- redis_store,
+import app
+from app import aws_sns_client, notification_provider_clients
+from app.cloudfoundry_config import cloud_config
+from app.dao import notifications_dao
+from app.dao.provider_details_dao import get_provider_details_by_identifier
+from app.delivery import send_to_providers
+from app.delivery.send_to_providers import (
+ _experimentally_validate_phone_numbers,
+ get_html_email_options,
+ get_logo_url,
)
-from app.aws.s3 import get_personalisation_from_s3, get_phone_number_from_s3
-from app.celery.test_key_tasks import send_email_response, send_sms_response
-from app.dao.email_branding_dao import dao_get_email_branding_by_id
-from app.dao.notifications_dao import dao_update_notification
-from app.dao.provider_details_dao import get_provider_details_by_notification_type
-from app.dao.service_sms_sender_dao import dao_get_sms_senders_by_service_id
-from app.delivery.send_to_providers import _experimentally_validate_phone_numbers
from app.enums import BrandType, KeyType, NotificationStatus, NotificationType
from app.exceptions import NotificationTechnicalFailureException
-from app.serialised_models import SerialisedService, SerialisedTemplate
-from app.utils import hilite, utc_now
-from notifications_utils.template import (
- HTMLEmailTemplate,
- PlainTextEmailTemplate,
- SMSMessageTemplate,
+from app.models import EmailBranding, Notification
+from app.serialised_models import SerialisedService
+from app.utils import utc_now
+from tests.app.db import (
+ create_email_branding,
+ create_notification,
+ create_reply_to_email,
+ create_service,
+ create_service_sms_sender,
+ create_service_with_defined_sms_sender,
+ create_template,
)
-def send_sms_to_provider(notification):
- """Final step in the message send flow.
-
- Get data for recipient, template,
- notification and send it to sns.
- """
- # we no longer store the personalisation in the db,
- # need to retrieve from s3 before generating content
- # However, we are still sending the initial verify code through personalisation
- # so if there is some value there, don't overwrite it
- if not notification.personalisation:
- personalisation = get_personalisation_from_s3(
- notification.service_id,
- notification.job_id,
- notification.job_row_number,
- )
- notification.personalisation = personalisation
-
- service = SerialisedService.from_id(notification.service_id)
- message_id = None
- if not service.active:
- technical_failure(notification=notification)
- return
-
- if notification.status == NotificationStatus.CREATED:
- # We get the provider here (which is only aws sns)
- provider = provider_to_use(NotificationType.SMS, notification.international)
- if not provider:
- technical_failure(notification=notification)
- return
-
- template_model = SerialisedTemplate.from_id_and_service_id(
- template_id=notification.template_id,
- service_id=service.id,
- version=notification.template_version,
- )
-
- template = SMSMessageTemplate(
- template_model.__dict__,
- values=notification.personalisation,
- prefix=service.name,
- show_prefix=service.prefix_sms,
- )
- if notification.key_type == KeyType.TEST:
- update_notification_to_sending(notification, provider)
- send_sms_response(provider.name, str(notification.id))
-
- else:
- try:
- # End DB session here so that we don't have a connection stuck open waiting on the call
- # to one of the SMS providers
- # We don't want to tie our DB connections being open to the performance of our SMS
- # providers as a slow down of our providers can cause us to run out of DB connections
- # Therefore we pull all the data from our DB models into `send_sms_kwargs`now before
- # closing the session (as otherwise it would be reopened immediately)
-
- # We start by trying to get the phone number from a job in s3. If we fail, we assume
- # the phone number is for the verification code on login, which is not a job.
- recipient = None
- # It is our 2facode, maybe
- recipient = _get_verify_code(notification)
-
- if recipient is None:
- recipient = get_phone_number_from_s3(
- notification.service_id,
- notification.job_id,
- notification.job_row_number,
- )
-
- # TODO This is temporary to test the capability of validating phone numbers
- # The future home of the validation is TBD
- if "+" not in recipient:
- recipient_lookup = f"+{recipient}"
- else:
- recipient_lookup = recipient
- if recipient_lookup in current_app.config[
- "SIMULATED_SMS_NUMBERS"
- ] and os.getenv("NOTIFY_ENVIRONMENT") in ["development", "test"]:
- current_app.logger.info(hilite("#validate-phone-number fired"))
- aws_pinpoint_client.validate_phone_number("01", recipient)
- else:
- current_app.logger.info(hilite("#validate-phone-number not fired"))
-
- sender_numbers = get_sender_numbers(notification)
- if notification.reply_to_text not in sender_numbers:
- raise ValueError(
- f"{notification.reply_to_text} not in {sender_numbers} #notify-admin-1701"
- )
-
- send_sms_kwargs = {
- "to": recipient,
- "content": str(template),
- "reference": str(notification.id),
- "sender": notification.reply_to_text,
- "international": notification.international,
- }
- db.session.close() # no commit needed as no changes to objects have been made above
-
- message_id = provider.send_sms(**send_sms_kwargs)
- current_app.logger.info(f"got message_id {message_id}")
- except Exception as e:
- n = notification
- msg = f"FAILED send to sms, job_id: {n.job_id} row_number {n.job_row_number} message_id {message_id}"
- current_app.logger.exception(hilite(msg))
-
- notification.billable_units = template.fragment_count
- dao_update_notification(notification)
- raise e
- else:
- # Here we map the job_id and row number to the aws message_id
- n = notification
- msg = f"Send to aws for job_id {n.job_id} row_number {n.job_row_number} message_id {message_id}"
- current_app.logger.info(hilite(msg))
- notification.billable_units = template.fragment_count
- update_notification_to_sending(notification, provider)
- return message_id
+def setup_function(_function):
+ # pytest will run this function before each test. It makes sure the
+ # state of the cache is not shared between tests.
+ send_to_providers.provider_cache.clear()
-def _get_verify_code(notification):
- key = f"2facode-{notification.id}".replace(" ", "")
- recipient = redis_store.get(key)
- with suppress(AttributeError):
- recipient = recipient.decode("utf-8")
- return recipient
+@pytest.mark.parametrize(
+ "international_provider_priority",
+ (
+ # Since there’s only one international provider it should always
+ # be used, no matter what its priority is set to
+ 0,
+ 50,
+ 100,
+ ),
+)
+def test_provider_to_use_should_only_return_sns_for_international(
+ mocker,
+ notify_db_session,
+ international_provider_priority,
+):
+ sns = get_provider_details_by_identifier("sns")
+ sns.priority = international_provider_priority
+
+ ret = send_to_providers.provider_to_use(NotificationType.SMS, international=True)
+
+ assert ret.name == "sns"
-def get_sender_numbers(notification):
- possible_senders = dao_get_sms_senders_by_service_id(notification.service_id)
- sender_numbers = []
- for possible_sender in possible_senders:
- sender_numbers.append(possible_sender.sms_sender)
- return sender_numbers
+def test_provider_to_use_raises_if_no_active_providers(
+ mocker, restore_provider_details
+):
+ sns = get_provider_details_by_identifier("sns")
+ sns.active = False
+
+ # flake8 doesn't like raises with a generic exception
+ try:
+ send_to_providers.provider_to_use(NotificationType.SMS)
+ assert 1 == 0
+ except Exception:
+ assert 1 == 1
-def send_email_to_provider(notification):
- # Someone needs an email, possibly new registration
- recipient = redis_store.get(f"email-address-{notification.id}")
- recipient = recipient.decode("utf-8")
- personalisation = redis_store.get(f"email-personalisation-{notification.id}")
- if personalisation:
- personalisation = personalisation.decode("utf-8")
- notification.personalisation = json.loads(personalisation)
+def test_should_send_personalised_template_to_correct_sms_provider_and_persist(
+ sample_sms_template_with_html, mocker
+):
- service = SerialisedService.from_id(notification.service_id)
- if not service.active:
- technical_failure(notification=notification)
- return
- if notification.status == NotificationStatus.CREATED:
- provider = provider_to_use(NotificationType.EMAIL, False)
- template_dict = SerialisedTemplate.from_id_and_service_id(
- template_id=notification.template_id,
- service_id=service.id,
- version=notification.template_version,
- ).__dict__
+ mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None)
+ db_notification = create_notification(
+ template=sample_sms_template_with_html,
+ personalisation={},
+ status=NotificationStatus.CREATED,
+ reply_to_text=sample_sms_template_with_html.service.get_default_sms_sender(),
+ )
- html_email = HTMLEmailTemplate(
- template_dict,
- values=notification.personalisation,
- **get_html_email_options(service),
- )
+ mocker.patch("app.aws_sns_client.send_sms")
- plain_text_email = PlainTextEmailTemplate(
- template_dict, values=notification.personalisation
- )
+ mock_s3 = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_s3.return_value = "2028675309"
- if notification.key_type == KeyType.TEST:
- notification.reference = str(create_uuid())
- update_notification_to_sending(notification, provider)
- send_email_response(notification.reference, recipient)
- else:
- from_address = (
- f'"{service.name}" <{service.email_from}@'
- f'{current_app.config["NOTIFY_EMAIL_DOMAIN"]}>'
- )
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"name": "Jo"}
- reference = provider.send_email(
- from_address,
- recipient,
- plain_text_email.subject,
- body=str(plain_text_email),
- html_body=str(html_email),
- reply_to_address=notification.reply_to_text,
- )
- notification.reference = reference
- update_notification_to_sending(notification, provider)
+ send_to_providers.send_sms_to_provider(db_notification)
+
+ aws_sns_client.send_sms.assert_called_once_with(
+ to="2028675309",
+ content="Sample service: Hello Jo\nHere is some HTML & entities",
+ reference=str(db_notification.id),
+ sender=current_app.config["FROM_NUMBER"],
+ international=False,
+ )
+
+ notification = Notification.query.filter_by(id=db_notification.id).one()
+
+ assert notification.status == NotificationStatus.SENDING
+ assert notification.sent_at <= utc_now()
+ assert notification.sent_by == "sns"
+ assert notification.billable_units == 1
+ assert notification.personalisation == {"name": "Jo"}
-def update_notification_to_sending(notification, provider):
- notification.sent_at = utc_now()
- notification.sent_by = provider.name
- if notification.status not in NotificationStatus.completed_types():
- notification.status = NotificationStatus.SENDING
+def test_should_send_personalised_template_to_correct_email_provider_and_persist(
+ sample_email_template_with_html, mocker
+):
- dao_update_notification(notification)
+ mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store")
+ utf8_encoded_email = "jo.smith@example.com".encode("utf-8")
+ mock_redis.get.return_value = utf8_encoded_email
+ email = utf8_encoded_email
+ personalisation = {
+ "name": "Jo",
+ }
+ personalisation = json.dumps(personalisation)
+ personalisation = personalisation.encode("utf-8")
+ mock_redis.get.side_effect = [email, personalisation]
+ db_notification = create_notification(
+ template=sample_email_template_with_html,
+ )
+ db_notification.personalisation = {"name": "Jo"}
+ mocker.patch("app.aws_ses_client.send_email", return_value="reference")
+ send_to_providers.send_email_to_provider(db_notification)
+ app.aws_ses_client.send_email.assert_called_once_with(
+ f'"Sample service" ',
+ "jo.smith@example.com",
+ "Jo some HTML",
+ body="Hello Jo\nThis is an email from GOV.\u200bUK with some HTML\n",
+ html_body=ANY,
+ reply_to_address=None,
+ )
+
+ assert " version_on_notification
+
+ send_to_providers.send_sms_to_provider(db_notification)
+
+ aws_sns_client.send_sms.assert_called_once_with(
+ to="2028675309",
+ content="Sample service: This is a template:\nwith a newline",
+ reference=str(db_notification.id),
+ sender=current_app.config["FROM_NUMBER"],
+ international=False,
+ )
+
+ t = dao_get_template_by_id(expected_template_id)
+
+ persisted_notification = notifications_dao.get_notification_by_id(
+ db_notification.id
+ )
+ assert persisted_notification.to == db_notification.to
+ assert persisted_notification.template_id == expected_template_id
+ assert persisted_notification.template_version == version_on_notification
+ assert persisted_notification.template_version != t.version
+ assert persisted_notification.status == NotificationStatus.SENDING
+
+
+def test_should_have_sending_status_if_fake_callback_function_fails(
+ sample_notification, mocker
+):
+ mocker.patch(
+ "app.delivery.send_to_providers.send_sms_response",
+ side_effect=HTTPError,
+ )
+
+ mock_s3 = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_s3.return_value = "2028675309"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+
+ sample_notification.key_type = KeyType.TEST
+ with pytest.raises(HTTPError):
+ send_to_providers.send_sms_to_provider(sample_notification)
+ assert sample_notification.status == NotificationStatus.SENDING
+ assert sample_notification.sent_by == "sns"
+
+
+def test_should_not_send_to_provider_when_status_is_not_created(
+ sample_template, mocker
+):
+ notification = create_notification(
+ template=sample_template,
+ status=NotificationStatus.SENDING,
+ )
+ mocker.patch("app.aws_sns_client.send_sms")
+ response_mock = mocker.patch("app.delivery.send_to_providers.send_sms_response")
+
+ mock_s3 = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_s3.return_value = "2028675309"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+
+ send_to_providers.send_sms_to_provider(notification)
+
+ app.aws_sns_client.send_sms.assert_not_called()
+ response_mock.assert_not_called()
+
+
+def test_should_send_sms_with_downgraded_content(notify_db_session, mocker):
+ # é, o, and u are in GSM.
+ # ī, grapes, tabs, zero width space and ellipsis are not
+ # ó isn't in GSM, but it is in the welsh alphabet so will still be sent
+
+ mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None)
+ mocker.patch(
+ "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"]
+ )
+ msg = "a é ī o u 🍇 foo\tbar\u200bbaz((misc))…"
+ placeholder = "∆∆∆abc"
+ gsm_message = "?ódz Housing Service: a é i o u ? foo barbaz???abc..."
+ service = create_service(service_name="Łódź Housing Service")
+ template = create_template(service, content=msg)
+ db_notification = create_notification(
+ template=template,
+ )
+ db_notification.personalisation = {"misc": placeholder}
+ db_notification.reply_to_text = "testing"
+
+ mocker.patch("app.aws_sns_client.send_sms")
+
+ mock_phone = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_phone.return_value = "15555555555"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"misc": placeholder}
+
+ send_to_providers.send_sms_to_provider(db_notification)
+
+ aws_sns_client.send_sms.assert_called_once_with(
+ to=ANY, content=gsm_message, reference=ANY, sender=ANY, international=False
+ )
+
+
+def test_send_sms_should_use_service_sms_sender(
+ sample_service, sample_template, mocker
+):
+
+ mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None)
+ mocker.patch("app.aws_sns_client.send_sms")
+
+ sms_sender = create_service_sms_sender(
+ service=sample_service, sms_sender="123456", is_default=False
+ )
+ db_notification = create_notification(
+ template=sample_template, reply_to_text=sms_sender.sms_sender
+ )
+ expected_sender_name = sms_sender.sms_sender
+ mock_phone = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_phone.return_value = "15555555555"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+
+ send_to_providers.send_sms_to_provider(
+ db_notification,
+ )
+
+ app.aws_sns_client.send_sms.assert_called_once_with(
+ to=ANY,
+ content=ANY,
+ reference=ANY,
+ sender=expected_sender_name,
+ international=False,
+ )
+
+
+def test_send_email_to_provider_should_not_send_to_provider_when_status_is_not_created(
+ sample_email_template, mocker
+):
+ mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store")
+ mock_redis.get.return_value = "test@example.com".encode("utf-8")
+
+ notification = create_notification(
+ template=sample_email_template, status=NotificationStatus.SENDING
+ )
+ mocker.patch("app.aws_ses_client.send_email")
+ mocker.patch("app.delivery.send_to_providers.send_email_response")
+ mock_phone = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_phone.return_value = "15555555555"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+ send_to_providers.send_sms_to_provider(notification)
+ app.aws_ses_client.send_email.assert_not_called()
+ app.delivery.send_to_providers.send_email_response.assert_not_called()
+
+
+def test_send_email_should_use_service_reply_to_email(
+ sample_service, sample_email_template, mocker
+):
+ mocker.patch("app.aws_ses_client.send_email", return_value="reference")
+
+ mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store")
+ mock_redis.get.return_value = "test@example.com".encode("utf-8")
+
+ mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store")
+ email = "foo@bar.com".encode("utf-8")
+ personalisation = {}
+
+ personalisation = json.dumps(personalisation)
+ personalisation = personalisation.encode("utf-8")
+ mock_redis.get.side_effect = [email, personalisation]
+
+ db_notification = create_notification(
+ template=sample_email_template, reply_to_text="foo@bar.com"
+ )
+ create_reply_to_email(service=sample_service, email_address="foo@bar.com")
+
+ send_to_providers.send_email_to_provider(db_notification)
+
+ app.aws_ses_client.send_email.assert_called_once_with(
+ ANY,
+ ANY,
+ ANY,
+ body=ANY,
+ html_body=ANY,
+ reply_to_address="foo@bar.com",
+ )
+
+
+def test_get_html_email_renderer_should_return_for_normal_service(sample_service):
+ options = send_to_providers.get_html_email_options(sample_service)
+ assert options["govuk_banner"] is True
+ assert "brand_colour" not in options.keys()
+ assert "brand_logo" not in options.keys()
+ assert "brand_text" not in options.keys()
+ assert "brand_name" not in options.keys()
+
+
+@pytest.mark.parametrize(
+ "branding_type, govuk_banner",
+ [(BrandType.ORG, False), (BrandType.BOTH, True), (BrandType.ORG_BANNER, False)],
+)
+def test_get_html_email_renderer_with_branding_details(
+ branding_type, govuk_banner, notify_db_session, sample_service
+):
+ email_branding = EmailBranding(
+ brand_type=branding_type,
+ colour="#000000",
+ logo="justice-league.png",
+ name="Justice League",
+ text="League of Justice",
+ )
+ sample_service.email_branding = email_branding
+ notify_db_session.add_all([sample_service, email_branding])
+ notify_db_session.commit()
+
+ options = send_to_providers.get_html_email_options(sample_service)
+
+ assert options["govuk_banner"] == govuk_banner
+ assert options["brand_colour"] == "#000000"
+ assert options["brand_text"] == "League of Justice"
+ assert options["brand_name"] == "Justice League"
+
+ if branding_type == BrandType.ORG_BANNER:
+ assert options["brand_banner"] is True
+ else:
+ assert options["brand_banner"] is False
+
+
+def test_get_html_email_renderer_with_branding_details_and_render_govuk_banner_only(
+ notify_db_session, sample_service
+):
+ sample_service.email_branding = None
+ notify_db_session.add_all([sample_service])
+ notify_db_session.commit()
+
+ options = send_to_providers.get_html_email_options(sample_service)
+
+ assert options == {"govuk_banner": True, "brand_banner": False}
+
+
+def test_get_html_email_renderer_prepends_logo_path(notify_api):
+ Service = namedtuple("Service", ["email_branding"])
+ EmailBranding = namedtuple(
+ "EmailBranding",
+ ["brand_type", "colour", "name", "logo", "text"],
+ )
+
+ email_branding = EmailBranding(
+ brand_type=BrandType.ORG,
+ colour="#000000",
+ logo="justice-league.png",
+ name="Justice League",
+ text="League of Justice",
+ )
+ service = Service(
+ email_branding=email_branding,
+ )
+
+ renderer = send_to_providers.get_html_email_options(service)
+
+ assert (
+ renderer["brand_logo"] == "http://static-logos.notify.tools/justice-league.png"
+ )
+
+
+def test_get_html_email_renderer_handles_email_branding_without_logo(notify_api):
+ Service = namedtuple("Service", ["email_branding"])
+ EmailBranding = namedtuple(
+ "EmailBranding",
+ ["brand_type", "colour", "name", "logo", "text"],
+ )
+
+ email_branding = EmailBranding(
+ brand_type=BrandType.ORG_BANNER,
+ colour="#000000",
+ logo=None,
+ name="Justice League",
+ text="League of Justice",
+ )
+ service = Service(
+ email_branding=email_branding,
+ )
+
+ renderer = send_to_providers.get_html_email_options(service)
+
+ assert renderer["govuk_banner"] is False
+ assert renderer["brand_banner"] is True
+ assert renderer["brand_logo"] is None
+ assert renderer["brand_text"] == "League of Justice"
+ assert renderer["brand_colour"] == "#000000"
+ assert renderer["brand_name"] == "Justice League"
+
+
+@pytest.mark.parametrize(
+ "base_url, expected_url",
+ [
+ # don't change localhost to prevent errors when testing locally
+ ("http://localhost:6012", "http://static-logos.notify.tools/filename.png"),
+ (
+ "https://www.notifications.service.gov.uk",
+ "https://static-logos.notifications.service.gov.uk/filename.png",
+ ),
+ ("https://notify.works", "https://static-logos.notify.works/filename.png"),
+ (
+ "https://staging-notify.works",
+ "https://static-logos.staging-notify.works/filename.png",
+ ),
+ ("https://www.notify.works", "https://static-logos.notify.works/filename.png"),
+ (
+ "https://www.staging-notify.works",
+ "https://static-logos.staging-notify.works/filename.png",
+ ),
+ ],
+)
+def test_get_logo_url_works_for_different_environments(base_url, expected_url):
+ logo_file = "filename.png"
+
+ logo_url = send_to_providers.get_logo_url(base_url, logo_file)
+
+ assert logo_url == expected_url
+
+
+@pytest.mark.parametrize(
+ "starting_status, expected_status",
+ [
+ (NotificationStatus.DELIVERED, NotificationStatus.DELIVERED),
+ (NotificationStatus.CREATED, NotificationStatus.SENDING),
+ (NotificationStatus.TECHNICAL_FAILURE, NotificationStatus.TECHNICAL_FAILURE),
+ ],
+)
+def test_update_notification_to_sending_does_not_update_status_from_a_final_status(
+ sample_service, notify_db_session, starting_status, expected_status
+):
+ template = create_template(sample_service)
+ notification = create_notification(template=template, status=starting_status)
+ send_to_providers.update_notification_to_sending(
+ notification,
+ notification_provider_clients.get_client_by_name_and_type(
+ "sns", NotificationType.SMS
+ ),
+ )
+ assert notification.status == expected_status
+
+
+def __update_notification(notification_to_update, research_mode, expected_status):
+ if research_mode or notification_to_update.key_type == KeyType.TEST:
+ notification_to_update.status = expected_status
+
+
+@pytest.mark.parametrize(
+ "research_mode,key_type, billable_units, expected_status",
+ [
+ (True, KeyType.NORMAL, 0, NotificationStatus.DELIVERED),
+ (False, KeyType.NORMAL, 1, NotificationStatus.SENDING),
+ (False, KeyType.TEST, 0, NotificationStatus.SENDING),
+ (True, KeyType.TEST, 0, NotificationStatus.SENDING),
+ (True, KeyType.TEAM, 0, NotificationStatus.DELIVERED),
+ (False, KeyType.TEAM, 1, NotificationStatus.SENDING),
+ ],
+)
+def test_should_update_billable_units_and_status_according_to_research_mode_and_key_type(
+ sample_template, mocker, research_mode, key_type, billable_units, expected_status
+):
+
+ mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None)
+ mocker.patch(
+ "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"]
+ )
+ notification = create_notification(
+ template=sample_template,
+ billable_units=0,
+ status=NotificationStatus.CREATED,
+ key_type=key_type,
+ reply_to_text="testing",
+ )
+ mocker.patch("app.aws_sns_client.send_sms")
+ mocker.patch(
+ "app.delivery.send_to_providers.send_sms_response",
+ side_effect=__update_notification(notification, research_mode, expected_status),
+ )
+
+ if research_mode:
+ sample_template.service.research_mode = True
+
+ mock_phone = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_phone.return_value = "15555555555"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ # So we don't treat it as a one off and have to mock other things
+ mock_personalisation.return_value = {"ignore": "ignore"}
+
+ send_to_providers.send_sms_to_provider(notification)
+ assert notification.billable_units == billable_units
+ assert notification.status == expected_status
+
+
+def test_should_set_notification_billable_units_and_reduces_provider_priority_if_sending_to_provider_fails(
+ sample_notification,
+ mocker,
+):
+ mocker.patch("app.aws_sns_client.send_sms", side_effect=Exception())
+
+ sample_notification.billable_units = 0
+ assert sample_notification.sent_by is None
+
+ mock_phone = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_phone.return_value = "15555555555"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+
+ # flake8 no longer likes raises with a generic exception
+ try:
+ send_to_providers.send_sms_to_provider(sample_notification)
+ assert 1 == 0
+ except Exception:
+ assert 1 == 1
+
+ assert sample_notification.billable_units == 1
+
+
+def test_should_send_sms_to_international_providers(
+ sample_template, sample_user, mocker
+):
+
+ mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None)
+ mocker.patch("app.aws_sns_client.send_sms")
+
+ notification_international = create_notification(
+ template=sample_template,
+ to_field="+6011-17224412",
+ personalisation={"name": "Jo"},
+ status=NotificationStatus.CREATED,
+ international=True,
+ reply_to_text=sample_template.service.get_default_sms_sender(),
+ normalised_to="601117224412",
+ )
+
+ mock_s3 = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_s3.return_value = "601117224412"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+
+ send_to_providers.send_sms_to_provider(notification_international)
+
+ aws_sns_client.send_sms.assert_called_once_with(
+ to="601117224412",
+ content=ANY,
+ reference=str(notification_international.id),
+ sender=current_app.config["FROM_NUMBER"],
+ international=True,
+ )
+
+ assert notification_international.status == NotificationStatus.SENDING
+ assert notification_international.sent_by == "sns"
+
+
+@pytest.mark.parametrize(
+ "sms_sender, expected_sender, prefix_sms, expected_content",
+ [
+ ("foo", "foo", False, "bar"),
+ ("foo", "foo", True, "Sample service: bar"),
+ # if 40604 is actually in DB then treat that as if entered manually
+ ("40604", "40604", False, "bar"),
+ # 'testing' is the FROM_NUMBER during unit tests
+ ("testing", "testing", True, "Sample service: bar"),
+ ("testing", "testing", False, "bar"),
+ ],
+)
+def test_should_handle_sms_sender_and_prefix_message(
+ mocker, sms_sender, prefix_sms, expected_sender, expected_content, notify_db_session
+):
+
+ mocker.patch("app.delivery.send_to_providers.redis_store", return_value=None)
+ mocker.patch("app.aws_sns_client.send_sms")
+ service = create_service_with_defined_sms_sender(
+ sms_sender_value=sms_sender, prefix_sms=prefix_sms
+ )
+ template = create_template(service, content="bar")
+ notification = create_notification(template, reply_to_text=sms_sender)
+
+ mock_phone = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_phone.return_value = "15555555555"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+
+ send_to_providers.send_sms_to_provider(notification)
+
+ aws_sns_client.send_sms.assert_called_once_with(
+ content=expected_content,
+ sender=expected_sender,
+ to=ANY,
+ reference=ANY,
+ international=False,
+ )
+
+
+def test_send_email_to_provider_uses_reply_to_from_notification(
+ sample_email_template, mocker
+):
+ mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store")
+ mock_redis.get.side_effect = [
+ "test@example.com".encode("utf-8"),
+ json.dumps({}).encode("utf-8"),
]
- if not active_providers:
- current_app.logger.error(f"{notification_type} failed as no active providers")
- raise Exception(f"No active {notification_type} providers")
+ mocker.patch("app.aws_ses_client.send_email", return_value="reference")
- # we only have sns
- chosen_provider = active_providers[0]
+ db_notification = create_notification(
+ template=sample_email_template,
+ reply_to_text="test@test.com",
+ )
- return notification_provider_clients.get_client_by_name_and_type(
- chosen_provider.identifier, notification_type
+ send_to_providers.send_email_to_provider(db_notification)
+
+ app.aws_ses_client.send_email.assert_called_once_with(
+ ANY,
+ ANY,
+ ANY,
+ body=ANY,
+ html_body=ANY,
+ reply_to_address="test@test.com",
)
-def get_logo_url(base_url, logo_file):
- base_url = parse.urlparse(base_url)
- netloc = base_url.netloc
+def test_send_sms_to_provider_should_use_normalised_to(mocker, client, sample_template):
- if base_url.netloc.startswith("localhost"):
- netloc = "notify.tools"
- elif base_url.netloc.startswith("www"):
- # strip "www."
- netloc = base_url.netloc[4:]
-
- logo_url = parse.ParseResult(
- scheme=base_url.scheme,
- netloc="static-logos." + netloc,
- path=logo_file,
- params=base_url.params,
- query=base_url.query,
- fragment=base_url.fragment,
+ mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None)
+ mocker.patch(
+ "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"]
)
- return parse.urlunparse(logo_url)
-
-
-def get_html_email_options(service):
- if service.email_branding is None:
- return {
- "govuk_banner": True,
- "brand_banner": False,
- }
- if isinstance(service, SerialisedService):
- branding = dao_get_email_branding_by_id(service.email_branding)
- else:
- branding = service.email_branding
-
- logo_url = (
- get_logo_url(current_app.config["ADMIN_BASE_URL"], branding.logo)
- if branding.logo
- else None
+ send_mock = mocker.patch("app.aws_sns_client.send_sms")
+ notification = create_notification(
+ template=sample_template,
+ to_field="+12028675309",
+ normalised_to="2028675309",
+ reply_to_text="testing",
)
- return {
+ mock_s3 = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_s3.return_value = "12028675309"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+ send_to_providers.send_sms_to_provider(notification)
+ send_mock.assert_called_once_with(
+ to="12028675309",
+ content=ANY,
+ reference=str(notification.id),
+ sender=notification.reply_to_text,
+ international=False,
+ )
+
+
+def test_send_email_to_provider_should_user_normalised_to(
+ mocker, client, sample_email_template
+):
+ send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference")
+ notification = create_notification(
+ template=sample_email_template,
+ )
+ mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store")
+ mock_redis.get.return_value = "test@example.com".encode("utf-8")
+
+ mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store")
+ mock_redis.get.return_value = "jo.smith@example.com".encode("utf-8")
+ email = "test@example.com".encode("utf-8")
+ personalisation = {}
+
+ personalisation = json.dumps(personalisation)
+ personalisation = personalisation.encode("utf-8")
+ mock_redis.get.side_effect = [email, personalisation]
+
+ send_to_providers.send_email_to_provider(notification)
+ send_mock.assert_called_once_with(
+ ANY,
+ "test@example.com",
+ ANY,
+ body=ANY,
+ html_body=ANY,
+ reply_to_address=notification.reply_to_text,
+ )
+
+
+def test_send_sms_to_provider_should_return_template_if_found_in_redis(
+ mocker, client, sample_template
+):
+
+ mocker.patch("app.delivery.send_to_providers._get_verify_code", return_value=None)
+ mocker.patch(
+ "app.delivery.send_to_providers.get_sender_numbers", return_value=["testing"]
+ )
+ from app.schemas import service_schema, template_schema
+
+ service_dict = service_schema.dump(sample_template.service)
+ template_dict = template_schema.dump(sample_template)
+
+ mocker.patch(
+ "app.redis_store.get",
+ side_effect=[
+ json.dumps({"data": service_dict}).encode("utf-8"),
+ json.dumps({"data": template_dict}).encode("utf-8"),
+ ],
+ )
+ mock_get_template = mocker.patch(
+ "app.dao.templates_dao.dao_get_template_by_id_and_service_id"
+ )
+ mock_get_service = mocker.patch("app.dao.services_dao.dao_fetch_service_by_id")
+
+ send_mock = mocker.patch("app.aws_sns_client.send_sms")
+ notification = create_notification(
+ template=sample_template,
+ to_field="+447700900855",
+ normalised_to="447700900855",
+ reply_to_text="testing",
+ )
+
+ mock_s3 = mocker.patch("app.delivery.send_to_providers.get_phone_number_from_s3")
+ mock_s3.return_value = "447700900855"
+
+ mock_personalisation = mocker.patch(
+ "app.delivery.send_to_providers.get_personalisation_from_s3"
+ )
+ mock_personalisation.return_value = {"ignore": "ignore"}
+
+ send_to_providers.send_sms_to_provider(notification)
+ assert mock_get_template.called is False
+ assert mock_get_service.called is False
+ send_mock.assert_called_once_with(
+ to="447700900855",
+ content=ANY,
+ reference=str(notification.id),
+ sender=notification.reply_to_text,
+ international=False,
+ )
+
+
+def test_send_email_to_provider_should_return_template_if_found_in_redis(
+ mocker, client, sample_email_template
+):
+ from app.schemas import service_schema, template_schema
+
+ # mock_redis = mocker.patch("app.delivery.send_to_providers.redis_store")
+ # mock_redis.get.return_value = "jo.smith@example.com".encode("utf-8")
+ email = "test@example.com".encode("utf-8")
+ personalisation = {
+ "name": "Jo",
+ }
+
+ personalisation = json.dumps(personalisation)
+ personalisation = personalisation.encode("utf-8")
+ # mock_redis.get.side_effect = [email, personalisation]
+
+ service_dict = service_schema.dump(sample_email_template.service)
+ template_dict = template_schema.dump(sample_email_template)
+
+ mocker.patch(
+ "app.redis_store.get",
+ side_effect=[
+ email,
+ personalisation,
+ json.dumps({"data": service_dict}).encode("utf-8"),
+ json.dumps({"data": template_dict}).encode("utf-8"),
+ ],
+ )
+ mock_get_template = mocker.patch(
+ "app.dao.templates_dao.dao_get_template_by_id_and_service_id"
+ )
+ mock_get_service = mocker.patch("app.dao.services_dao.dao_fetch_service_by_id")
+ send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference")
+ notification = create_notification(
+ template=sample_email_template,
+ )
+
+ send_to_providers.send_email_to_provider(notification)
+ assert mock_get_template.called is False
+ assert mock_get_service.called is False
+ send_mock.assert_called_once_with(
+ ANY,
+ "test@example.com",
+ ANY,
+ body=ANY,
+ html_body=ANY,
+ reply_to_address=notification.reply_to_text,
+ )
+
+
+def test_get_html_email_options_return_email_branding_from_serialised_service(
+ sample_service,
+):
+ branding = create_email_branding()
+ sample_service.email_branding = branding
+ service = SerialisedService.from_id(sample_service.id)
+ email_options = get_html_email_options(service)
+ assert email_options is not None
+ assert email_options == {
"govuk_banner": branding.brand_type == BrandType.BOTH,
"brand_banner": branding.brand_type == BrandType.ORG_BANNER,
"brand_colour": branding.colour,
- "brand_logo": logo_url,
+ "brand_logo": get_logo_url(current_app.config["ADMIN_BASE_URL"], branding.logo),
"brand_text": branding.text,
"brand_name": branding.name,
}
-def technical_failure(notification):
- notification.status = NotificationStatus.TECHNICAL_FAILURE
- dao_update_notification(notification)
- raise NotificationTechnicalFailureException(
- f"Send {notification.notification_type} for notification id {notification.id} "
- f"to provider is not allowed: service {notification.service_id} is inactive"
- )
+def test_get_html_email_options_add_email_branding_from_service(sample_service):
+ branding = create_email_branding()
+ sample_service.email_branding = branding
+ email_options = get_html_email_options(sample_service)
+ assert email_options is not None
+ assert email_options == {
+ "govuk_banner": branding.brand_type == BrandType.BOTH,
+ "brand_banner": branding.brand_type == BrandType.ORG_BANNER,
+ "brand_colour": branding.colour,
+ "brand_logo": get_logo_url(current_app.config["ADMIN_BASE_URL"], branding.logo),
+ "brand_text": branding.text,
+ "brand_name": branding.name,
+ }
@pytest.mark.parametrize(
From 05a6a2a4d9f69f52fa40cb5eea43684730bd180a Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 14:04:26 -0700
Subject: [PATCH 05/39] comment out strange command we may never use
---
app/commands.py | 171 ++++++++++++++++++++++++------------------------
1 file changed, 84 insertions(+), 87 deletions(-)
diff --git a/app/commands.py b/app/commands.py
index 45fce9211..1c761f84a 100644
--- a/app/commands.py
+++ b/app/commands.py
@@ -24,12 +24,6 @@ from app.dao.annual_billing_dao import (
dao_create_or_update_annual_billing_for_year,
set_default_free_allowance_for_service,
)
-from app.dao.fact_billing_dao import (
- delete_billing_data_for_service_for_day,
- fetch_billing_data_for_day,
- get_service_ids_that_need_billing_populated,
- update_fact_billing,
-)
from app.dao.jobs_dao import dao_get_job_by_id
from app.dao.organization_dao import (
dao_add_service_to_organization,
@@ -63,7 +57,7 @@ from app.models import (
TemplateHistory,
User,
)
-from app.utils import get_midnight_in_utc, utc_now
+from app.utils import utc_now
from notifications_utils.recipients import RecipientCSV
from notifications_utils.template import SMSMessageTemplate
from tests.app.db import (
@@ -167,76 +161,78 @@ def purge_functional_test_data(user_email_prefix):
delete_model_user(usr)
-@notify_command(name="insert-inbound-numbers")
-@click.option(
- "-f",
- "--file_name",
- required=True,
- help="""Full path of the file to upload, file is a contains inbound numbers, one number per line.""",
-)
-def insert_inbound_numbers_from_file(file_name):
- # TODO maintainability what is the purpose of this command? Who would use it and why?
+# TODO maintainability what is the purpose of this command? Who would use it and why?
+# COMMENTING OUT UNTIL WE DETERMINE IF WE NEED IT OR NOT
+# @notify_command(name="insert-inbound-numbers")
+# @click.option(
+# "-f",
+# "--file_name",
+# required=True,
+# help="""Full path of the file to upload, file is a contains inbound numbers, one number per line.""",
+# )
+# def insert_inbound_numbers_from_file(file_name):
- current_app.logger.info(f"Inserting inbound numbers from {file_name}")
- with open(file_name) as file:
- sql = text(
- "insert into inbound_numbers values(:uuid, :line, 'sns', null, True, now(), null);"
- )
+# current_app.logger.info(f"Inserting inbound numbers from {file_name}")
+# with open(file_name) as file:
+# sql = text(
+# "insert into inbound_numbers values(:uuid, :line, 'sns', null, True, now(), null);"
+# )
- for line in file:
- line = line.strip()
- if line:
- current_app.logger.info(line)
- db.session.execute(sql, {"uuid": str(uuid.uuid4()), "line": line})
- db.session.commit()
+# for line in file:
+# line = line.strip()
+# if line:
+# current_app.logger.info(line)
+# db.session.execute(sql, {"uuid": str(uuid.uuid4()), "line": line})
+# db.session.commit()
def setup_commands(application):
application.cli.add_command(command_group)
-@notify_command(name="rebuild-ft-billing-for-day")
-@click.option("-s", "--service_id", required=False, type=click.UUID)
-@click.option(
- "-d",
- "--day",
- help="The date to recalculate, as YYYY-MM-DD",
- required=True,
- type=click_dt(format="%Y-%m-%d"),
-)
-def rebuild_ft_billing_for_day(service_id, day):
- # TODO maintainability what is the purpose of this command? Who would use it and why?
+# TODO maintainability what is the purpose of this command? Who would use it and why?
+# COMMENTING OUT UNTIL WE DETERMINE IF WE NEED IT OR NOT
+# @notify_command(name="rebuild-ft-billing-for-day")
+# @click.option("-s", "--service_id", required=False, type=click.UUID)
+# @click.option(
+# "-d",
+# "--day",
+# help="The date to recalculate, as YYYY-MM-DD",
+# required=True,
+# type=click_dt(format="%Y-%m-%d"),
+# )
+# def rebuild_ft_billing_for_day(service_id, day):
- """
- Rebuild the data in ft_billing for the given service_id and date
- """
+# """
+# Rebuild the data in ft_billing for the given service_id and date
+# """
- def rebuild_ft_data(process_day, service):
- deleted_rows = delete_billing_data_for_service_for_day(process_day, service)
- current_app.logger.info(
- f"deleted {deleted_rows} existing billing rows for {service} on {process_day}"
- )
- transit_data = fetch_billing_data_for_day(
- process_day=process_day, service_id=service
- )
- # transit_data = every row that should exist
- for data in transit_data:
- # upsert existing rows
- update_fact_billing(data, process_day)
- current_app.logger.info(
- f"added/updated {len(transit_data)} billing rows for {service} on {process_day}"
- )
+# def rebuild_ft_data(process_day, service):
+# deleted_rows = delete_billing_data_for_service_for_day(process_day, service)
+# current_app.logger.info(
+# f"deleted {deleted_rows} existing billing rows for {service} on {process_day}"
+# )
+# transit_data = fetch_billing_data_for_day(
+# process_day=process_day, service_id=service
+# )
+# # transit_data = every row that should exist
+# for data in transit_data:
+# # upsert existing rows
+# update_fact_billing(data, process_day)
+# current_app.logger.info(
+# f"added/updated {len(transit_data)} billing rows for {service} on {process_day}"
+# )
- if service_id:
- # confirm the service exists
- dao_fetch_service_by_id(service_id)
- rebuild_ft_data(day, service_id)
- else:
- services = get_service_ids_that_need_billing_populated(
- get_midnight_in_utc(day), get_midnight_in_utc(day + timedelta(days=1))
- )
- for row in services:
- rebuild_ft_data(day, row.service_id)
+# if service_id:
+# # confirm the service exists
+# dao_fetch_service_by_id(service_id)
+# rebuild_ft_data(day, service_id)
+# else:
+# services = get_service_ids_that_need_billing_populated(
+# get_midnight_in_utc(day), get_midnight_in_utc(day + timedelta(days=1))
+# )
+# for row in services:
+# rebuild_ft_data(day, row.service_id)
@notify_command(name="bulk-invite-user-to-service")
@@ -472,29 +468,30 @@ def associate_services_to_organizations():
current_app.logger.info("finished associating services to organizations")
-@notify_command(name="populate-service-volume-intentions")
-@click.option(
- "-f",
- "--file_name",
- required=True,
- help="Pipe delimited file containing service_id, SMS, email",
-)
-def populate_service_volume_intentions(file_name):
- # [0] service_id
- # [1] SMS:: volume intentions for service
- # [2] Email:: volume intentions for service
+# TODO maintainability what is the purpose of this command? Who would use it and why?
+# COMMENTING OUT UNTIL WE DETERMINE IF WE NEED IT OR NOT
+# @notify_command(name="populate-service-volume-intentions")
+# @click.option(
+# "-f",
+# "--file_name",
+# required=True,
+# help="Pipe delimited file containing service_id, SMS, email",
+# )
+# def populate_service_volume_intentions(file_name):
+# # [0] service_id
+# # [1] SMS:: volume intentions for service
+# # [2] Email:: volume intentions for service
- # TODO maintainability what is the purpose of this command? Who would use it and why?
- with open(file_name, "r") as f:
- for line in itertools.islice(f, 1, None):
- columns = line.split(",")
- current_app.logger.info(columns)
- service = dao_fetch_service_by_id(columns[0])
- service.volume_sms = columns[1]
- service.volume_email = columns[2]
- dao_update_service(service)
- current_app.logger.info("populate-service-volume-intentions complete")
+# with open(file_name, "r") as f:
+# for line in itertools.islice(f, 1, None):
+# columns = line.split(",")
+# current_app.logger.info(columns)
+# service = dao_fetch_service_by_id(columns[0])
+# service.volume_sms = columns[1]
+# service.volume_email = columns[2]
+# dao_update_service(service)
+# current_app.logger.info("populate-service-volume-intentions complete")
@notify_command(name="populate-go-live")
From 06643c3bb50dc58963fa7dae7139843aa5fdb862 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 14:11:47 -0700
Subject: [PATCH 06/39] comment out strange command we may never use
---
app/commands.py | 39 +++++++++++++++++++--------------------
1 file changed, 19 insertions(+), 20 deletions(-)
diff --git a/app/commands.py b/app/commands.py
index 1c761f84a..a43ae06ca 100644
--- a/app/commands.py
+++ b/app/commands.py
@@ -162,28 +162,27 @@ def purge_functional_test_data(user_email_prefix):
# TODO maintainability what is the purpose of this command? Who would use it and why?
-# COMMENTING OUT UNTIL WE DETERMINE IF WE NEED IT OR NOT
-# @notify_command(name="insert-inbound-numbers")
-# @click.option(
-# "-f",
-# "--file_name",
-# required=True,
-# help="""Full path of the file to upload, file is a contains inbound numbers, one number per line.""",
-# )
-# def insert_inbound_numbers_from_file(file_name):
+@notify_command(name="insert-inbound-numbers")
+@click.option(
+ "-f",
+ "--file_name",
+ required=True,
+ help="""Full path of the file to upload, file is a contains inbound numbers, one number per line.""",
+)
+def insert_inbound_numbers_from_file(file_name):
-# current_app.logger.info(f"Inserting inbound numbers from {file_name}")
-# with open(file_name) as file:
-# sql = text(
-# "insert into inbound_numbers values(:uuid, :line, 'sns', null, True, now(), null);"
-# )
+ current_app.logger.info(f"Inserting inbound numbers from {file_name}")
+ with open(file_name) as file:
+ sql = text(
+ "insert into inbound_numbers values(:uuid, :line, 'sns', null, True, now(), null);"
+ )
-# for line in file:
-# line = line.strip()
-# if line:
-# current_app.logger.info(line)
-# db.session.execute(sql, {"uuid": str(uuid.uuid4()), "line": line})
-# db.session.commit()
+ for line in file:
+ line = line.strip()
+ if line:
+ current_app.logger.info(line)
+ db.session.execute(sql, {"uuid": str(uuid.uuid4()), "line": line})
+ db.session.commit()
def setup_commands(application):
From face881a90ecb0ab40f3ee535080e8bd71bb6b1c Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 14:45:22 -0700
Subject: [PATCH 07/39] clean up sanitise_text
---
notifications_utils/sanitise_text.py | 22 +++++++++-------------
1 file changed, 9 insertions(+), 13 deletions(-)
diff --git a/notifications_utils/sanitise_text.py b/notifications_utils/sanitise_text.py
index 3e9da0764..5a1d1c382 100644
--- a/notifications_utils/sanitise_text.py
+++ b/notifications_utils/sanitise_text.py
@@ -122,19 +122,15 @@ class SanitiseText:
def is_punjabi(cls, value):
# Gukmukhi script or Shahmukhi script
- if regex.search(r"[\u0A00-\u0A7F]+", value):
- return True
- elif regex.search(r"[\u0600-\u06FF]+", value):
- return True
- elif regex.search(r"[\u0750-\u077F]+", value):
- return True
- elif regex.search(r"[\u08A0-\u08FF]+", value):
- return True
- elif regex.search(r"[\uFB50-\uFDFF]+", value):
- return True
- elif regex.search(r"[\uFE70-\uFEFF]+", value):
- return True
- elif regex.search(r"[\u0900-\u097F]+", value):
+ if (
+ regex.search(r"[\u0A00-\u0A7F]+", value)
+ or regex.search(r"[\u0600-\u06FF]+", value)
+ or regex.search(r"[\u0750-\u077F]+", value)
+ or regex.search(r"[\u08A0-\u08FF]+", value)
+ or regex.search(r"[\uFB50-\uFDFF]+", value)
+ or regex.search(r"[\uFE70-\uFEFF]+", value)
+ or regex.search(r"[\u0900-\u097F]+", value)
+ ):
return True
return False
From dba29a8ea7fff2d5f651c3669462dcc8bc3e9115 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 14:55:15 -0700
Subject: [PATCH 08/39] clean up sanitise_text
---
notifications_utils/sanitise_text.py | 38 ++++++++++++----------------
1 file changed, 16 insertions(+), 22 deletions(-)
diff --git a/notifications_utils/sanitise_text.py b/notifications_utils/sanitise_text.py
index 5a1d1c382..750a2e49b 100644
--- a/notifications_utils/sanitise_text.py
+++ b/notifications_utils/sanitise_text.py
@@ -152,33 +152,27 @@ class SanitiseText:
@classmethod
def _is_extended_language_group_two(cls, value):
- if regex.search(r"\p{IsBuhid}", value):
- return True
- if regex.search(r"\p{IsCanadian_Aboriginal}", value):
- return True
- if regex.search(r"\p{IsCherokee}", value):
- return True
- if regex.search(r"\p{IsDevanagari}", value):
- return True
- if regex.search(r"\p{IsEthiopic}", value):
- return True
- if regex.search(r"\p{IsGeorgian}", value):
+ if (
+ regex.search(r"\p{IsBuhid}", value)
+ or regex.search(r"\p{IsCanadian_Aboriginal}", value)
+ or regex.search(r"\p{IsCherokee}", value)
+ or regex.search(r"\p{IsDevanagari}", value)
+ or regex.search(r"\p{IsEthiopic}", value)
+ or regex.search(r"\p{IsGeorgian}", value)
+ ):
return True
return False
@classmethod
def _is_extended_language_group_three(cls, value):
- if regex.search(r"\p{IsGreek}", value):
- return True
- if regex.search(r"\p{IsGujarati}", value):
- return True
- if regex.search(r"\p{IsHanunoo}", value):
- return True
- if regex.search(r"\p{IsHebrew}", value):
- return True
- if regex.search(r"\p{IsLimbu}", value):
- return True
- if regex.search(r"\p{IsKannada}", value):
+ if (
+ regex.search(r"\p{IsGreek}", value)
+ or regex.search(r"\p{IsGujarati}", value)
+ or regex.search(r"\p{IsHanunoo}", value)
+ or regex.search(r"\p{IsHebrew}", value)
+ or regex.search(r"\p{IsLimbu}", value)
+ or regex.search(r"\p{IsKannada}", value)
+ ):
return True
return False
From 445a462b1052353e85475b284fb93c69db71650f Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 2 Oct 2024 15:00:03 -0700
Subject: [PATCH 09/39] clean up s3
---
app/aws/s3.py | 18 +-----------------
1 file changed, 1 insertion(+), 17 deletions(-)
diff --git a/app/aws/s3.py b/app/aws/s3.py
index bd0301d78..dc293ea6f 100644
--- a/app/aws/s3.py
+++ b/app/aws/s3.py
@@ -466,23 +466,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):
set_job_cache(job_cache, f"{job_id}_personalisation", extract_personalisation(job))
- # If we can find the quick dictionary, use it
- if job_cache.get(f"{job_id}_personalisation") is not None:
- personalisation_to_return = job_cache.get(f"{job_id}_personalisation")[0].get(
- job_row_number
- )
- if personalisation_to_return:
- return personalisation_to_return
- else:
- current_app.logger.warning(
- f"Was unable to retrieve personalisation from lookup dictionary for job {job_id}"
- )
- return {}
- else:
- current_app.logger.error(
- f"Was unable to construct lookup dictionary for job {job_id}"
- )
- return {}
+ return job_cache.get(f"{job_id}_personalisation")[0].get(job_row_number)
def get_job_metadata_from_s3(service_id, job_id):
From 76373de13b9f05b4b71c37d5206125af480c066e Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Thu, 3 Oct 2024 08:54:16 -0700
Subject: [PATCH 10/39] comment out search for notification by to field
---
app/service/rest.py | 106 ++++++++++++++++++++++----------------------
1 file changed, 53 insertions(+), 53 deletions(-)
diff --git a/app/service/rest.py b/app/service/rest.py
index 070f13457..9ae507adb 100644
--- a/app/service/rest.py
+++ b/app/service/rest.py
@@ -453,16 +453,16 @@ def get_all_notifications_for_service(service_id):
data = notifications_filter_schema.load(MultiDict(request.get_json()))
current_app.logger.debug(f"use POST, request {request.get_json()} data {data}")
- if data.get("to"):
- notification_type = (
- data.get("template_type")[0] if data.get("template_type") else None
- )
- return search_for_notification_by_to_field(
- service_id=service_id,
- search_term=data["to"],
- statuses=data.get("status"),
- notification_type=notification_type,
- )
+ # if data.get("to"):
+ # notification_type = (
+ # data.get("template_type")[0] if data.get("template_type") else None
+ # )
+ # return search_for_notification_by_to_field(
+ # service_id=service_id,
+ # search_term=data["to"],
+ # statuses=data.get("status"),
+ # notification_type=notification_type,
+ # )
page = data["page"] if "page" in data else 1
page_size = (
data["page_size"]
@@ -583,51 +583,51 @@ def get_notification_for_service(service_id, notification_id):
)
-def search_for_notification_by_to_field(
- service_id, search_term, statuses, notification_type
-):
- results = notifications_dao.dao_get_notifications_by_recipient_or_reference(
- service_id=service_id,
- search_term=search_term,
- statuses=statuses,
- notification_type=notification_type,
- page=1,
- page_size=current_app.config["PAGE_SIZE"],
- )
+# def search_for_notification_by_to_field(
+# service_id, search_term, statuses, notification_type
+# ):
+# results = notifications_dao.dao_get_notifications_by_recipient_or_reference(
+# service_id=service_id,
+# search_term=search_term,
+# statuses=statuses,
+# notification_type=notification_type,
+# page=1,
+# page_size=current_app.config["PAGE_SIZE"],
+# )
- # We try and get the next page of results to work out if we need provide a pagination link to the next page
- # in our response. Note, this was previously be done by having
- # notifications_dao.dao_get_notifications_by_recipient_or_reference use count=False when calling
- # Flask-Sqlalchemys `paginate'. But instead we now use this way because it is much more performant for
- # services with many results (unlike using Flask SqlAlchemy `paginate` with `count=True`, this approach
- # doesn't do an additional query to count all the results of which there could be millions but instead only
- # asks for a single extra page of results).
- next_page_of_pagination = notifications_dao.dao_get_notifications_by_recipient_or_reference(
- service_id=service_id,
- search_term=search_term,
- statuses=statuses,
- notification_type=notification_type,
- page=2,
- page_size=current_app.config["PAGE_SIZE"],
- error_out=False, # False so that if there are no results, it doesn't end in aborting with a 404
- )
+# # We try and get the next page of results to work out if we need provide a pagination link to the next page
+# # in our response. Note, this was previously be done by having
+# # notifications_dao.dao_get_notifications_by_recipient_or_reference use count=False when calling
+# # Flask-Sqlalchemys `paginate'. But instead we now use this way because it is much more performant for
+# # services with many results (unlike using Flask SqlAlchemy `paginate` with `count=True`, this approach
+# # doesn't do an additional query to count all the results of which there could be millions but instead only
+# # asks for a single extra page of results).
+# next_page_of_pagination = notifications_dao.dao_get_notifications_by_recipient_or_reference(
+# service_id=service_id,
+# search_term=search_term,
+# statuses=statuses,
+# notification_type=notification_type,
+# page=2,
+# page_size=current_app.config["PAGE_SIZE"],
+# error_out=False, # False so that if there are no results, it doesn't end in aborting with a 404
+# )
- return (
- jsonify(
- notifications=notification_with_template_schema.dump(
- results.items, many=True
- ),
- links=get_prev_next_pagination_links(
- 1,
- len(next_page_of_pagination.items),
- ".get_all_notifications_for_service",
- statuses=statuses,
- notification_type=notification_type,
- service_id=service_id,
- ),
- ),
- 200,
- )
+# return (
+# jsonify(
+# notifications=notification_with_template_schema.dump(
+# results.items, many=True
+# ),
+# links=get_prev_next_pagination_links(
+# 1,
+# len(next_page_of_pagination.items),
+# ".get_all_notifications_for_service",
+# statuses=statuses,
+# notification_type=notification_type,
+# service_id=service_id,
+# ),
+# ),
+# 200,
+# )
@service_blueprint.route("//notifications/monthly", methods=["GET"])
From 38583c28eaa99ba081a58baa196de5e01fc8ae15 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Thu, 3 Oct 2024 09:23:16 -0700
Subject: [PATCH 11/39] add a test in service rest
---
tests/app/service/test_rest.py | 23 +++++++++++++++++++++++
1 file changed, 23 insertions(+)
diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py
index fec71cf82..5e179b708 100644
--- a/tests/app/service/test_rest.py
+++ b/tests/app/service/test_rest.py
@@ -1959,6 +1959,29 @@ def test_get_all_notifications_for_service_including_ones_made_by_jobs(
assert response.status_code == 200
+def test_get_monthly_notification_stats_by_user(
+ client,
+ sample_service,
+ sample_user,
+ mocker,
+):
+ mock_s3 = mocker.patch("app.service.rest.get_phone_number_from_s3")
+ mock_s3.return_value = ""
+
+ mock_s3 = mocker.patch("app.service.rest.get_personalisation_from_s3")
+ mock_s3.return_value = {}
+
+ auth_header = create_admin_authorization_header()
+
+ response = client.get(
+ path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/monthly"),
+ headers=[auth_header],
+ )
+
+ # TODO This test could be a little more complete
+ assert response.status_code == 200
+
+
def test_get_only_api_created_notifications_for_service(
admin_request,
sample_job,
From 55966267c2d13bd861885c3bf1b33717a19bced1 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Thu, 3 Oct 2024 09:35:05 -0700
Subject: [PATCH 12/39] add a test in service rest
---
tests/app/service/test_rest.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py
index 5e179b708..d6f87d0f6 100644
--- a/tests/app/service/test_rest.py
+++ b/tests/app/service/test_rest.py
@@ -1975,9 +1975,12 @@ def test_get_monthly_notification_stats_by_user(
response = client.get(
path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/monthly"),
+ year=2024,
headers=[auth_header],
)
+ resp = json.loads(response.get_data(as_text=True))
+ print(f"RESP is {resp}")
# TODO This test could be a little more complete
assert response.status_code == 200
From b0735ffcdce7808e2be10bf3ddda2dcc0798871a Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Thu, 3 Oct 2024 09:48:32 -0700
Subject: [PATCH 13/39] add a test in service rest
---
tests/app/service/test_rest.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py
index d6f87d0f6..1c5170596 100644
--- a/tests/app/service/test_rest.py
+++ b/tests/app/service/test_rest.py
@@ -1975,8 +1975,8 @@ def test_get_monthly_notification_stats_by_user(
response = client.get(
path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/monthly"),
- year=2024,
headers=[auth_header],
+ year="2024",
)
resp = json.loads(response.get_data(as_text=True))
From 5277f7066035b6f7e0c444f94d3083f8b2b820b1 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Thu, 3 Oct 2024 09:59:41 -0700
Subject: [PATCH 14/39] add a test in service rest
---
tests/app/service/test_rest.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py
index 1c5170596..d17f778de 100644
--- a/tests/app/service/test_rest.py
+++ b/tests/app/service/test_rest.py
@@ -1974,9 +1974,8 @@ def test_get_monthly_notification_stats_by_user(
auth_header = create_admin_authorization_header()
response = client.get(
- path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/monthly"),
+ path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/monthly?year=2024"),
headers=[auth_header],
- year="2024",
)
resp = json.loads(response.get_data(as_text=True))
From 6d05c1a18ba39308bbe7d4290959608c42565f39 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Thu, 3 Oct 2024 10:13:37 -0700
Subject: [PATCH 15/39] add a test in service rest
---
tests/app/service/test_rest.py | 49 ++++++++++++++++++++++++++++++++++
1 file changed, 49 insertions(+)
diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py
index d17f778de..70104930f 100644
--- a/tests/app/service/test_rest.py
+++ b/tests/app/service/test_rest.py
@@ -1984,6 +1984,55 @@ def test_get_monthly_notification_stats_by_user(
assert response.status_code == 200
+def test_get_single_month_notification_stats_by_user(
+ client,
+ sample_service,
+ sample_user,
+ mocker,
+):
+ mock_s3 = mocker.patch("app.service.rest.get_phone_number_from_s3")
+ mock_s3.return_value = ""
+
+ mock_s3 = mocker.patch("app.service.rest.get_personalisation_from_s3")
+ mock_s3.return_value = {}
+
+ auth_header = create_admin_authorization_header()
+
+ response = client.get(
+ path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/month/?year=2024&month=07"),
+ headers=[auth_header],
+ )
+
+ resp = json.loads(response.get_data(as_text=True))
+ print(f"RESP is {resp}")
+ # TODO This test could be a little more complete
+ assert response.status_code == 200
+
+
+def test_get_single_month_notification_stats_for_service(
+ client,
+ sample_service,
+ mocker,
+):
+ mock_s3 = mocker.patch("app.service.rest.get_phone_number_from_s3")
+ mock_s3.return_value = ""
+
+ mock_s3 = mocker.patch("app.service.rest.get_personalisation_from_s3")
+ mock_s3.return_value = {}
+
+ auth_header = create_admin_authorization_header()
+
+ response = client.get(
+ path=(f"/service/{sample_service.id}/notifications/month/?year=2024&month=07"),
+ headers=[auth_header],
+ )
+
+ resp = json.loads(response.get_data(as_text=True))
+ print(f"RESP is {resp}")
+ # TODO This test could be a little more complete
+ assert response.status_code == 200
+
+
def test_get_only_api_created_notifications_for_service(
admin_request,
sample_job,
From 1c95cd63e76afbb0f2b80c42014a9b778ac84b41 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Thu, 3 Oct 2024 10:22:32 -0700
Subject: [PATCH 16/39] add a test in service rest
---
tests/app/service/test_rest.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py
index 70104930f..5ea6e1168 100644
--- a/tests/app/service/test_rest.py
+++ b/tests/app/service/test_rest.py
@@ -1999,7 +1999,7 @@ def test_get_single_month_notification_stats_by_user(
auth_header = create_admin_authorization_header()
response = client.get(
- path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/month/?year=2024&month=07"),
+ path=(f"/service/{sample_service.id}/notifications/{sample_user.id}/month?year=2024&month=07"),
headers=[auth_header],
)
@@ -2023,7 +2023,7 @@ def test_get_single_month_notification_stats_for_service(
auth_header = create_admin_authorization_header()
response = client.get(
- path=(f"/service/{sample_service.id}/notifications/month/?year=2024&month=07"),
+ path=(f"/service/{sample_service.id}/notifications/month?year=2024&month=07"),
headers=[auth_header],
)
From ff5d405a1528f3027e0de6854fff52528b974a80 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Thu, 3 Oct 2024 10:30:30 -0700
Subject: [PATCH 17/39] raise coverage to 93
---
.github/workflows/checks.yml | 2 +-
Makefile | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index 22c7f9c89..bcf0861e4 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -63,7 +63,7 @@ jobs:
NOTIFY_E2E_TEST_PASSWORD: ${{ secrets.NOTIFY_E2E_TEST_PASSWORD }}
- name: Check coverage threshold
# TODO get this back up to 95
- run: poetry run coverage report -m --fail-under=91
+ run: poetry run coverage report -m --fail-under=93
validate-new-relic-config:
runs-on: ubuntu-latest
diff --git a/Makefile b/Makefile
index 88cf6f814..76c38d94e 100644
--- a/Makefile
+++ b/Makefile
@@ -84,7 +84,7 @@ test: ## Run tests and create coverage report
poetry run coverage run --omit=*/migrations/*,*/tests/* -m pytest --maxfail=10
## TODO set this back to 95 asap
- poetry run coverage report -m --fail-under=91
+ poetry run coverage report -m --fail-under=93
poetry run coverage html -d .coverage_cache
.PHONY: py-lock
From 3f6c362f15e2d6276b82eb215ed74a4742e4b9a1 Mon Sep 17 00:00:00 2001
From: Carlo Costino
Date: Thu, 10 Oct 2024 18:34:52 -0400
Subject: [PATCH 18/39] Finalize updates for cg-cli-tools
This changeset finalizes our updates for the cg-cli-tools across all environments and restores the check for updates to the egress proxy before deploying.
Signed-off-by: Carlo Costino
---
.github/workflows/deploy-demo.yml | 8 ++++++--
.github/workflows/deploy-prod.yml | 8 ++++++--
.github/workflows/deploy.yml | 2 +-
3 files changed, 13 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/deploy-demo.yml b/.github/workflows/deploy-demo.yml
index 0bb7e1ec8..a43b661f3 100644
--- a/.github/workflows/deploy-demo.yml
+++ b/.github/workflows/deploy-demo.yml
@@ -97,6 +97,10 @@ jobs:
- name: Deploy egress proxy
if: steps.changed-egress-config.outputs.any_changed == 'true'
uses: ./.github/actions/deploy-proxy
+ env:
+ CF_USERNAME: ${{ secrets.CLOUDGOV_USERNAME }}
+ CF_PASSWORD: ${{ secrets.CLOUDGOV_PASSWORD }}
with:
- cf_space: notify-demo
- app: notify-api-demo
+ cf_org: gsa-tts-benefits-studio
+ cf_space: notify-staging
+ app: notify-api-staging
diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml
index c84cf7324..23ef3dc6c 100644
--- a/.github/workflows/deploy-prod.yml
+++ b/.github/workflows/deploy-prod.yml
@@ -101,6 +101,10 @@ jobs:
- name: Deploy egress proxy
if: steps.changed-egress-config.outputs.any_changed == 'true'
uses: ./.github/actions/deploy-proxy
+ env:
+ CF_USERNAME: ${{ secrets.CLOUDGOV_USERNAME }}
+ CF_PASSWORD: ${{ secrets.CLOUDGOV_PASSWORD }}
with:
- cf_space: notify-production
- app: notify-api-production
+ cf_org: gsa-tts-benefits-studio
+ cf_space: notify-staging
+ app: notify-api-staging
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 43296f9c7..d8d0da952 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -101,7 +101,7 @@ jobs:
.github/actions/deploy-proxy/action.yml
.github/workflows/deploy.yml
- name: Deploy egress proxy
- #if: steps.changed-egress-config.outputs.any_changed == 'true'
+ if: steps.changed-egress-config.outputs.any_changed == 'true'
uses: ./.github/actions/deploy-proxy
env:
CF_USERNAME: ${{ secrets.CLOUDGOV_USERNAME }}
From b5f7977a481cce794533d2ce834666638b84813b Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 09:16:19 -0700
Subject: [PATCH 19/39] fix core daos
---
app/dao/notifications_dao.py | 106 +++++++++++++++++++++--------------
app/service_invite/rest.py | 2 +-
2 files changed, 65 insertions(+), 43 deletions(-)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index f7150d08f..360c6af35 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -1,7 +1,7 @@
from datetime import timedelta
from flask import current_app
-from sqlalchemy import asc, desc, or_, select, text, union
+from sqlalchemy import asc, delete, desc, func, or_, select, text, union, update
from sqlalchemy.orm import joinedload
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql import functions
@@ -109,11 +109,12 @@ def _update_notification_status(
def update_notification_status_by_id(
notification_id, status, sent_by=None, provider_response=None, carrier=None
):
- notification = (
- Notification.query.with_for_update()
+ stmt = (
+ select(Notification)
+ .with_for_update()
.filter(Notification.id == notification_id)
- .first()
)
+ notification = db.session.execute(stmt).scalars().first()
if not notification:
current_app.logger.info(
@@ -156,9 +157,8 @@ def update_notification_status_by_id(
@autocommit
def update_notification_status_by_reference(reference, status):
# this is used to update emails
- notification = Notification.query.filter(
- Notification.reference == reference
- ).first()
+ stmt = select(Notification).filter(Notification.reference == reference)
+ notification = db.session.execute(stmt).scalars().first()
if not notification:
current_app.logger.error(
@@ -200,19 +200,20 @@ def get_notifications_for_job(
def dao_get_notification_count_for_job_id(*, job_id):
- return Notification.query.filter_by(job_id=job_id).count()
+ stmt = select(func.count(Notification.id)).filter_by(job_id=job_id)
+ return db.session.execute(stmt).scalar()
def dao_get_notification_count_for_service(*, service_id):
- notification_count = Notification.query.filter_by(service_id=service_id).count()
- return notification_count
+ stmt = select(func.count(Notification.id)).filter_by(service_id=service_id)
+ return db.session.execute(stmt).scalar()
def dao_get_failed_notification_count():
- failed_count = Notification.query.filter_by(
+ stmt = select(func.count(Notification.id)).filter_by(
status=NotificationStatus.FAILED
- ).count()
- return failed_count
+ )
+ return db.session.execute(stmt).scalar()
def get_notification_with_personalisation(service_id, notification_id, key_type):
@@ -220,11 +221,12 @@ def get_notification_with_personalisation(service_id, notification_id, key_type)
if key_type:
filter_dict["key_type"] = key_type
- return (
- Notification.query.filter_by(**filter_dict)
+ stmt = (
+ select(Notification)
+ .filter_by(**filter_dict)
.options(joinedload(Notification.template))
- .one()
)
+ return db.session.execute(stmt).scalars().one()
def get_notification_by_id(notification_id, service_id=None, _raise=False):
@@ -233,9 +235,13 @@ def get_notification_by_id(notification_id, service_id=None, _raise=False):
if service_id:
filters.append(Notification.service_id == service_id)
- query = Notification.query.filter(*filters)
+ stmt = select(Notification).filter(*filters)
- return query.one() if _raise else query.first()
+ return (
+ db.session.execute(stmt).scalars().one()
+ if _raise
+ else db.session.execute(stmt).scalars().first()
+ )
def get_notifications_for_service(
@@ -415,12 +421,13 @@ def move_notifications_to_notification_history(
deleted += delete_count_per_call
# Deleting test Notifications, test notifications are not persisted to NotificationHistory
- Notification.query.filter(
+ stmt = delete(Notification).filter(
Notification.notification_type == notification_type,
Notification.service_id == service_id,
Notification.created_at < timestamp_to_delete_backwards_from,
Notification.key_type == KeyType.TEST,
- ).delete(synchronize_session=False)
+ )
+ db.session.execute(stmt)
db.session.commit()
return deleted
@@ -442,8 +449,9 @@ def dao_timeout_notifications(cutoff_time, limit=100000):
current_statuses = [NotificationStatus.SENDING, NotificationStatus.PENDING]
new_status = NotificationStatus.TEMPORARY_FAILURE
- notifications = (
- Notification.query.filter(
+ stmt = (
+ select(Notification)
+ .filter(
Notification.created_at < cutoff_time,
Notification.status.in_(current_statuses),
Notification.notification_type.in_(
@@ -451,14 +459,17 @@ def dao_timeout_notifications(cutoff_time, limit=100000):
),
)
.limit(limit)
- .all()
)
+ notifications = db.session.execute(stmt).scalars().all()
- Notification.query.filter(
- Notification.id.in_([n.id for n in notifications]),
- ).update(
- {"status": new_status, "updated_at": updated_at}, synchronize_session=False
+ stmt = (
+ update(Notification)
+ .filter(Notification.id.in_([n.id for n in notifications]))
+ .update(
+ {"status": new_status, "updated_at": updated_at}, synchronize_session=False
+ )
)
+ db.session.execute(stmt)
db.session.commit()
return notifications
@@ -466,15 +477,21 @@ def dao_timeout_notifications(cutoff_time, limit=100000):
@autocommit
def dao_update_notifications_by_reference(references, update_dict):
- updated_count = Notification.query.filter(
- Notification.reference.in_(references)
- ).update(update_dict, synchronize_session=False)
+ stmt = (
+ update(Notification)
+ .filter(Notification.reference.in_(references))
+ .update(update_dict)
+ )
+ updated_count = db.stmt.execute(stmt)
updated_history_count = 0
if updated_count != len(references):
- updated_history_count = NotificationHistory.query.filter(
- NotificationHistory.reference.in_(references)
- ).update(update_dict, synchronize_session=False)
+ stmt = (
+ select(NotificationHistory)
+ .filter(NotificationHistory.reference.in_(references))
+ .update(update_dict, synchronize_session=False)
+ )
+ updated_history_count = db.stmt.execute(stmt)
return updated_count, updated_history_count
@@ -541,18 +558,21 @@ def dao_get_notifications_by_recipient_or_reference(
def dao_get_notification_by_reference(reference):
- return Notification.query.filter(Notification.reference == reference).one()
+ stmt = select(Notification).filter(Notification.reference == reference)
+ return db.session.execute(stmt).scalars().one()
def dao_get_notification_history_by_reference(reference):
try:
# This try except is necessary because in test keys and research mode does not create notification history.
# Otherwise we could just search for the NotificationHistory object
- return Notification.query.filter(Notification.reference == reference).one()
+ stmt = select(Notification).filter(Notification.reference == reference)
+ return db.session.execute(stmt).scalars().one()
except NoResultFound:
- return NotificationHistory.query.filter(
+ stmt = select(NotificationHistory).filter(
NotificationHistory.reference == reference
- ).one()
+ )
+ return db.session.execute(stmt).scalars().one()
def dao_get_notifications_processing_time_stats(start_date, end_date):
@@ -590,11 +610,12 @@ def dao_get_notifications_processing_time_stats(start_date, end_date):
def dao_get_last_notification_added_for_job_id(job_id):
- last_notification_added = (
- Notification.query.filter(Notification.job_id == job_id)
+ stmt = (
+ select(Notification)
+ .filter(Notification.job_id == job_id)
.order_by(Notification.job_row_number.desc())
- .first()
)
+ last_notification_added = db.session.execute(stmt).scalars().first()
return last_notification_added
@@ -602,11 +623,12 @@ def dao_get_last_notification_added_for_job_id(job_id):
def notifications_not_yet_sent(should_be_sending_after_seconds, notification_type):
older_than_date = utc_now() - timedelta(seconds=should_be_sending_after_seconds)
- notifications = Notification.query.filter(
+ stmt = select(Notification).filter(
Notification.created_at <= older_than_date,
Notification.notification_type == notification_type,
Notification.status == NotificationStatus.CREATED,
- ).all()
+ )
+ notifications = db.session.execute(stmt).all()
return notifications
diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py
index f6d9627da..5728b3ed5 100644
--- a/app/service_invite/rest.py
+++ b/app/service_invite/rest.py
@@ -86,7 +86,7 @@ def _create_service_invite(invited_user, invite_link_host):
redis_store.set(
f"email-personalisation-{saved_notification.id}",
json.dumps(personalisation),
- ex=2*24*60*60,
+ ex=2 * 24 * 60 * 60,
)
send_notification_to_queue(saved_notification, queue=QueueNames.NOTIFY)
From 88c9af90505cf471e98b496dbd8ec2606261875e Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 09:31:47 -0700
Subject: [PATCH 20/39] fix core daos
---
app/dao/notifications_dao.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index 360c6af35..4d1fee9d1 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -465,7 +465,7 @@ def dao_timeout_notifications(cutoff_time, limit=100000):
stmt = (
update(Notification)
.filter(Notification.id.in_([n.id for n in notifications]))
- .update(
+ .values(
{"status": new_status, "updated_at": updated_at}, synchronize_session=False
)
)
@@ -480,7 +480,7 @@ def dao_update_notifications_by_reference(references, update_dict):
stmt = (
update(Notification)
.filter(Notification.reference.in_(references))
- .update(update_dict)
+ .values(update_dict)
)
updated_count = db.stmt.execute(stmt)
@@ -489,7 +489,7 @@ def dao_update_notifications_by_reference(references, update_dict):
stmt = (
select(NotificationHistory)
.filter(NotificationHistory.reference.in_(references))
- .update(update_dict, synchronize_session=False)
+ .values(update_dict, synchronize_session=False)
)
updated_history_count = db.stmt.execute(stmt)
From 90b407241f971885d5d4c0c3aa9c4c9c001ef967 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 09:38:12 -0700
Subject: [PATCH 21/39] fix core daos
---
app/dao/notifications_dao.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index 4d1fee9d1..ca1646467 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -466,7 +466,7 @@ def dao_timeout_notifications(cutoff_time, limit=100000):
update(Notification)
.filter(Notification.id.in_([n.id for n in notifications]))
.values(
- {"status": new_status, "updated_at": updated_at}, synchronize_session=False
+ {"status": new_status, "updated_at": updated_at}
)
)
db.session.execute(stmt)
@@ -482,16 +482,16 @@ def dao_update_notifications_by_reference(references, update_dict):
.filter(Notification.reference.in_(references))
.values(update_dict)
)
- updated_count = db.stmt.execute(stmt)
+ updated_count = db.session.execute(stmt)
updated_history_count = 0
if updated_count != len(references):
stmt = (
select(NotificationHistory)
.filter(NotificationHistory.reference.in_(references))
- .values(update_dict, synchronize_session=False)
+ .values(update_dict)
)
- updated_history_count = db.stmt.execute(stmt)
+ updated_history_count = db.session.execute(stmt)
return updated_count, updated_history_count
From d8bb71bf39eaae872d8d1cc195de7b9f376115e9 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 09:47:00 -0700
Subject: [PATCH 22/39] fix core daos
---
app/dao/notifications_dao.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index ca1646467..d1c0d7202 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -487,7 +487,7 @@ def dao_update_notifications_by_reference(references, update_dict):
updated_history_count = 0
if updated_count != len(references):
stmt = (
- select(NotificationHistory)
+ update(NotificationHistory)
.filter(NotificationHistory.reference.in_(references))
.values(update_dict)
)
From 958861df59503eac58b6f75bc32e30b9f91bae8c Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 09:59:16 -0700
Subject: [PATCH 23/39] fix core daos
---
app/dao/notifications_dao.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index d1c0d7202..1fbb637c9 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -482,7 +482,7 @@ def dao_update_notifications_by_reference(references, update_dict):
.filter(Notification.reference.in_(references))
.values(update_dict)
)
- updated_count = db.session.execute(stmt)
+ updated_count = db.session.execute(stmt).scalar() or 0
updated_history_count = 0
if updated_count != len(references):
@@ -491,7 +491,7 @@ def dao_update_notifications_by_reference(references, update_dict):
.filter(NotificationHistory.reference.in_(references))
.values(update_dict)
)
- updated_history_count = db.session.execute(stmt)
+ updated_history_count = db.session.execute(stmt).scalar() or 0
return updated_count, updated_history_count
From c44300a73707cf9d6e62c127a23bf8498b4b8e46 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 10:13:11 -0700
Subject: [PATCH 24/39] fix core daos
---
app/dao/notifications_dao.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index 1fbb637c9..802c2a287 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -465,9 +465,7 @@ def dao_timeout_notifications(cutoff_time, limit=100000):
stmt = (
update(Notification)
.filter(Notification.id.in_([n.id for n in notifications]))
- .values(
- {"status": new_status, "updated_at": updated_at}
- )
+ .values({"status": new_status, "updated_at": updated_at})
)
db.session.execute(stmt)
@@ -482,7 +480,8 @@ def dao_update_notifications_by_reference(references, update_dict):
.filter(Notification.reference.in_(references))
.values(update_dict)
)
- updated_count = db.session.execute(stmt).scalar() or 0
+ result = db.session.execute(stmt)
+ updated_count = result.rowcount
updated_history_count = 0
if updated_count != len(references):
@@ -491,7 +490,8 @@ def dao_update_notifications_by_reference(references, update_dict):
.filter(NotificationHistory.reference.in_(references))
.values(update_dict)
)
- updated_history_count = db.session.execute(stmt).scalar() or 0
+ result = db.session.execute(stmt)
+ updated_history_count = result.rowcount
return updated_count, updated_history_count
From 84e46f1ef2e25fb9265e5ed5f199f9927271d46f Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 10:29:13 -0700
Subject: [PATCH 25/39] fix core daos
---
app/dao/notifications_dao.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index 802c2a287..e93f59e28 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -629,6 +629,7 @@ def notifications_not_yet_sent(should_be_sending_after_seconds, notification_typ
Notification.status == NotificationStatus.CREATED,
)
notifications = db.session.execute(stmt).all()
+ print(f"WE RETURN THIS FOR NOTIFICATIONS {notifications}")
return notifications
From ef6e4048c27368c7dd5914203cbcfe47e84919d5 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 10:38:28 -0700
Subject: [PATCH 26/39] fix core daos
---
app/dao/notifications_dao.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index e93f59e28..8659fca9b 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -628,7 +628,7 @@ def notifications_not_yet_sent(should_be_sending_after_seconds, notification_typ
Notification.notification_type == notification_type,
Notification.status == NotificationStatus.CREATED,
)
- notifications = db.session.execute(stmt).all()
+ notifications = db.session.execute(stmt).scalars().all()
print(f"WE RETURN THIS FOR NOTIFICATIONS {notifications}")
return notifications
From 4f20bfe2dbc1ba16dc75bf5f52769aad8a9f17e7 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 11:30:35 -0700
Subject: [PATCH 27/39] fix core daos
---
app/dao/users_dao.py | 55 ++++++++++++++++++++++++++------------------
1 file changed, 33 insertions(+), 22 deletions(-)
diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py
index 897bb1b9e..07995ac6a 100644
--- a/app/dao/users_dao.py
+++ b/app/dao/users_dao.py
@@ -4,7 +4,7 @@ from secrets import randbelow
import sqlalchemy
from flask import current_app
-from sqlalchemy import func, text
+from sqlalchemy import delete, func, select, text
from sqlalchemy.orm import joinedload
from app import db
@@ -37,8 +37,8 @@ def get_login_gov_user(login_uuid, email_address):
login.gov uuids are. Eventually the code that checks by email address
should be removed.
"""
-
- user = User.query.filter_by(login_uuid=login_uuid).first()
+ stmt = select(User).filter_by(login_uuid=login_uuid)
+ user = db.session.execute(stmt).scalars().first()
if user:
if user.email_address != email_address:
try:
@@ -54,7 +54,8 @@ def get_login_gov_user(login_uuid, email_address):
return user
# Remove this 1 July 2025, all users should have login.gov uuids by now
- user = User.query.filter(User.email_address.ilike(email_address)).first()
+ stmt = select(User).filter(User.email_address.ilike(email_address))
+ user = db.session.execute(stmt).scalars().first()
if user:
save_user_attribute(user, {"login_uuid": login_uuid})
@@ -102,24 +103,27 @@ def create_user_code(user, code, code_type):
def get_user_code(user, code, code_type):
# Get the most recent codes to try and reduce the
# time searching for the correct code.
- codes = VerifyCode.query.filter_by(user=user, code_type=code_type).order_by(
- VerifyCode.created_at.desc()
+ stmt = (
+ select(VerifyCode)
+ .filter_by(user=user, code_type=code_type)
+ .order_by(VerifyCode.created_at.desc())
)
+ codes = db.session.execute(stmt).scalars().all()
return next((x for x in codes if x.check_code(code)), None)
def delete_codes_older_created_more_than_a_day_ago():
- deleted = (
- db.session.query(VerifyCode)
- .filter(VerifyCode.created_at < utc_now() - timedelta(hours=24))
- .delete()
+ stmt = delete(VerifyCode).filter(
+ VerifyCode.created_at < utc_now() - timedelta(hours=24)
)
+
+ deleted = db.session.execute(stmt)
db.session.commit()
return deleted
def use_user_code(id):
- verify_code = VerifyCode.query.get(id)
+ verify_code = db.session.get(VerifyCode, id)
verify_code.code_used = True
db.session.add(verify_code)
db.session.commit()
@@ -131,36 +135,42 @@ def delete_model_user(user):
def delete_user_verify_codes(user):
- VerifyCode.query.filter_by(user=user).delete()
+ stmt = delete(VerifyCode).filter_by(user=user)
+ db.session.execute(stmt)
db.session.commit()
def count_user_verify_codes(user):
- query = VerifyCode.query.filter(
+ stmt = select(func.count(VerifyCode.id)).filter(
VerifyCode.user == user,
VerifyCode.expiry_datetime > utc_now(),
VerifyCode.code_used.is_(False),
)
- return query.count()
+ result = db.session.execute(stmt)
+ return result.rowcount
def get_user_by_id(user_id=None):
if user_id:
- return User.query.filter_by(id=user_id).one()
- return User.query.filter_by().all()
+ stmt = select(User).filter_by(id=user_id)
+ return db.session.execute(stmt).scalars().one()
+ return get_users()
def get_users():
- return User.query.all()
+ stmt = select(User)
+ return db.session.execute(stmt).scalars().all()
def get_user_by_email(email):
- return User.query.filter(func.lower(User.email_address) == func.lower(email)).one()
+ stmt = select(User).filter(func.lower(User.email_address) == func.lower(email))
+ return db.session.execute(stmt).scalars().one()
def get_users_by_partial_email(email):
email = escape_special_characters(email)
- return User.query.filter(User.email_address.ilike("%{}%".format(email))).all()
+ stmt = select(User).filter(User.email_address.ilike("%{}%".format(email)))
+ return db.session.execute(stmt).scalars().all()
def increment_failed_login_count(user):
@@ -188,16 +198,17 @@ def get_user_and_accounts(user_id):
# TODO: With sqlalchemy 2.0 change as below because of the breaking change
# at User.organizations.services, we need to verify that the below subqueryload
# that we have put is functionally doing the same thing as before
- return (
- User.query.filter(User.id == user_id)
+ stmt = (
+ select(User)
+ .filter(User.id == user_id)
.options(
# eagerly load the user's services and organizations, and also the service's org and vice versa
# (so we can see if the user knows about it)
joinedload(User.services).joinedload(Service.organization),
joinedload(User.organizations).subqueryload(Organization.services),
)
- .one()
)
+ return db.session.execute(stmt).scalars().one()
@autocommit
From a5eceae07b3d1f0a2e50300e21f4256143366d33 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 11:39:48 -0700
Subject: [PATCH 28/39] fix core daos
---
app/dao/users_dao.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py
index 07995ac6a..b0b5f2679 100644
--- a/app/dao/users_dao.py
+++ b/app/dao/users_dao.py
@@ -146,8 +146,8 @@ def count_user_verify_codes(user):
VerifyCode.expiry_datetime > utc_now(),
VerifyCode.code_used.is_(False),
)
- result = db.session.execute(stmt)
- return result.rowcount
+ result = db.session.execute(stmt).scalar()
+ return result or 0
def get_user_by_id(user_id=None):
From 1f6decebe2876d081bb0c066d5c2693d07ba48b1 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 12:00:04 -0700
Subject: [PATCH 29/39] fix core daos
---
app/dao/users_dao.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py
index b0b5f2679..690ecc7f9 100644
--- a/app/dao/users_dao.py
+++ b/app/dao/users_dao.py
@@ -208,7 +208,7 @@ def get_user_and_accounts(user_id):
joinedload(User.organizations).subqueryload(Organization.services),
)
)
- return db.session.execute(stmt).scalars().one()
+ return db.session.execute(stmt).scalars().unique().one()
@autocommit
From 54ab96e0737fb1bd5b223458e4d9b0d4cffd9833 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 12:26:55 -0700
Subject: [PATCH 30/39] fix core daos
---
.ds.baseline | 6 +-
.../notification_dao/test_notification_dao.py | 234 +++++++++++-------
tests/app/dao/test_users_dao.py | 35 ++-
3 files changed, 176 insertions(+), 99 deletions(-)
diff --git a/.ds.baseline b/.ds.baseline
index 1c279e018..37199f01f 100644
--- a/.ds.baseline
+++ b/.ds.baseline
@@ -249,7 +249,7 @@
"filename": "tests/app/dao/test_users_dao.py",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
- "line_number": 52,
+ "line_number": 69,
"is_secret": false
},
{
@@ -257,7 +257,7 @@
"filename": "tests/app/dao/test_users_dao.py",
"hashed_secret": "f2c57870308dc87f432e5912d4de6f8e322721ba",
"is_verified": false,
- "line_number": 176,
+ "line_number": 194,
"is_secret": false
}
],
@@ -384,5 +384,5 @@
}
]
},
- "generated_at": "2024-09-27T16:42:53Z"
+ "generated_at": "2024-10-11T19:26:50Z"
}
diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py
index 4bc1ce5ba..8e81db3a2 100644
--- a/tests/app/dao/notification_dao/test_notification_dao.py
+++ b/tests/app/dao/notification_dao/test_notification_dao.py
@@ -4,9 +4,11 @@ from functools import partial
import pytest
from freezegun import freeze_time
+from sqlalchemy import func, select
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from sqlalchemy.orm.exc import NoResultFound
+from app import db
from app.dao.notifications_dao import (
dao_create_notification,
dao_delete_notifications_by_id,
@@ -55,7 +57,10 @@ def test_should_by_able_to_update_status_by_reference(
notification = Notification(**data)
dao_create_notification(notification)
- assert Notification.query.get(notification.id).status == NotificationStatus.SENDING
+ assert (
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.SENDING
+ )
notification.reference = "reference"
dao_update_notification(notification)
@@ -64,7 +69,8 @@ def test_should_by_able_to_update_status_by_reference(
)
assert updated.status == NotificationStatus.DELIVERED
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
@@ -81,7 +87,10 @@ def test_should_by_able_to_update_status_by_id(
dao_create_notification(notification)
assert notification.status == NotificationStatus.SENDING
- assert Notification.query.get(notification.id).status == NotificationStatus.SENDING
+ assert (
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.SENDING
+ )
with freeze_time("2000-01-02 12:00:00"):
updated = update_notification_status_by_id(
@@ -92,7 +101,8 @@ def test_should_by_able_to_update_status_by_id(
assert updated.status == NotificationStatus.DELIVERED
assert updated.updated_at == datetime(2000, 1, 2, 12, 0, 0)
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
assert notification.updated_at == datetime(2000, 1, 2, 12, 0, 0)
assert notification.status == NotificationStatus.DELIVERED
@@ -107,15 +117,17 @@ def test_should_not_update_status_by_id_if_not_sending_and_does_not_update_job(
job=sample_job,
)
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
assert not update_notification_status_by_id(
notification.id, NotificationStatus.FAILED
)
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
- assert sample_job == Job.query.get(notification.job_id)
+ assert sample_job == db.session.get(Job, notification.job_id)
def test_should_not_update_status_by_reference_if_not_sending_and_does_not_update_job(
@@ -128,20 +140,22 @@ def test_should_not_update_status_by_reference_if_not_sending_and_does_not_updat
job=sample_job,
)
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
assert not update_notification_status_by_reference(
"reference", NotificationStatus.FAILED
)
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
- assert sample_job == Job.query.get(notification.job_id)
+ assert sample_job == db.session.get(Job, notification.job_id)
def test_should_update_status_by_id_if_created(sample_template, sample_notification):
assert (
- Notification.query.get(sample_notification.id).status
+ db.session.get(Notification, sample_notification.id).status
== NotificationStatus.CREATED
)
updated = update_notification_status_by_id(
@@ -149,7 +163,7 @@ def test_should_update_status_by_id_if_created(sample_template, sample_notificat
NotificationStatus.FAILED,
)
assert (
- Notification.query.get(sample_notification.id).status
+ db.session.get(Notification, sample_notification.id).status
== NotificationStatus.FAILED
)
assert updated.status == NotificationStatus.FAILED
@@ -244,11 +258,17 @@ def test_should_not_update_status_by_reference_if_not_sending(sample_template):
status=NotificationStatus.CREATED,
reference="reference",
)
- assert Notification.query.get(notification.id).status == NotificationStatus.CREATED
+ assert (
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.CREATED
+ )
updated = update_notification_status_by_reference(
"reference", NotificationStatus.FAILED
)
- assert Notification.query.get(notification.id).status == NotificationStatus.CREATED
+ assert (
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.CREATED
+ )
assert not updated
@@ -264,14 +284,18 @@ def test_should_by_able_to_update_status_by_id_from_pending_to_delivered(
assert update_notification_status_by_id(
notification_id=notification.id, status=NotificationStatus.PENDING
)
- assert Notification.query.get(notification.id).status == NotificationStatus.PENDING
+ assert (
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.PENDING
+ )
assert update_notification_status_by_id(
notification.id,
NotificationStatus.DELIVERED,
)
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
@@ -289,7 +313,10 @@ def test_should_by_able_to_update_status_by_id_from_pending_to_temporary_failure
notification_id=notification.id,
status=NotificationStatus.PENDING,
)
- assert Notification.query.get(notification.id).status == NotificationStatus.PENDING
+ assert (
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.PENDING
+ )
assert update_notification_status_by_id(
notification.id,
@@ -297,7 +324,7 @@ def test_should_by_able_to_update_status_by_id_from_pending_to_temporary_failure
)
assert (
- Notification.query.get(notification.id).status
+ db.session.get(Notification, notification.id).status
== NotificationStatus.TEMPORARY_FAILURE
)
@@ -312,14 +339,17 @@ def test_should_by_able_to_update_status_by_id_from_sending_to_permanent_failure
)
notification = Notification(**data)
dao_create_notification(notification)
- assert Notification.query.get(notification.id).status == NotificationStatus.SENDING
+ assert (
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.SENDING
+ )
assert update_notification_status_by_id(
notification.id,
status=NotificationStatus.PERMANENT_FAILURE,
)
assert (
- Notification.query.get(notification.id).status
+ db.session.get(Notification, notification.id).status
== NotificationStatus.PERMANENT_FAILURE
)
@@ -331,7 +361,10 @@ def test_should_not_update_status_once_notification_status_is_delivered(
template=sample_email_template,
status=NotificationStatus.SENDING,
)
- assert Notification.query.get(notification.id).status == NotificationStatus.SENDING
+ assert (
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.SENDING
+ )
notification.reference = "reference"
dao_update_notification(notification)
@@ -340,7 +373,8 @@ def test_should_not_update_status_once_notification_status_is_delivered(
NotificationStatus.DELIVERED,
)
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
update_notification_status_by_reference(
@@ -348,7 +382,8 @@ def test_should_not_update_status_once_notification_status_is_delivered(
NotificationStatus.FAILED,
)
assert (
- Notification.query.get(notification.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification.id).status
+ == NotificationStatus.DELIVERED
)
@@ -370,7 +405,7 @@ def test_create_notification_creates_notification_with_personalisation(
sample_template_with_placeholders,
sample_job,
):
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = create_notification(
template=sample_template_with_placeholders,
@@ -379,8 +414,8 @@ def test_create_notification_creates_notification_with_personalisation(
status=NotificationStatus.CREATED,
)
- assert Notification.query.count() == 1
- notification_from_db = Notification.query.all()[0]
+ assert _get_notification_query_count() == 1
+ notification_from_db = _get_notification_query_all()[0]
assert notification_from_db.id
assert data.to == notification_from_db.to
assert data.job_id == notification_from_db.job_id
@@ -393,15 +428,15 @@ def test_create_notification_creates_notification_with_personalisation(
def test_save_notification_creates_sms(sample_template, sample_job):
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_template, job_id=sample_job.id)
notification = Notification(**data)
dao_create_notification(notification)
- assert Notification.query.count() == 1
- notification_from_db = Notification.query.all()[0]
+ assert _get_notification_query_count() == 1
+ notification_from_db = _get_notification_query_all()[0]
assert notification_from_db.id
assert "1" == notification_from_db.to
assert data["job_id"] == notification_from_db.job_id
@@ -412,16 +447,36 @@ def test_save_notification_creates_sms(sample_template, sample_job):
assert notification_from_db.status == NotificationStatus.CREATED
+def _get_notification_query_all():
+ stmt = select(Notification)
+ return db.execute(stmt).scalars().all()
+
+
+def _get_notification_query_one():
+ stmt = select(Notification)
+ return db.execute(stmt).scalars().one()
+
+
+def _get_notification_query_count():
+ stmt = select(func.count(Notification.id))
+ return db.session.execute(stmt).scalar() or 0
+
+
+def _get_notification_history_query_count():
+ stmt = select(func.count(NotificationHistory.id))
+ return db.session.execute(stmt).scalar() or 0
+
+
def test_save_notification_and_create_email(sample_email_template, sample_job):
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_email_template, job_id=sample_job.id)
notification = Notification(**data)
dao_create_notification(notification)
- assert Notification.query.count() == 1
- notification_from_db = Notification.query.all()[0]
+ assert _get_notification_query_count() == 1
+ notification_from_db = _get_notification_query_all()[0]
assert notification_from_db.id
assert "1" == notification_from_db.to
assert data["job_id"] == notification_from_db.job_id
@@ -433,29 +488,29 @@ def test_save_notification_and_create_email(sample_email_template, sample_job):
def test_save_notification(sample_email_template, sample_job):
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_email_template, job_id=sample_job.id)
notification_1 = Notification(**data)
notification_2 = Notification(**data)
dao_create_notification(notification_1)
- assert Notification.query.count() == 1
+ assert _get_notification_query_count() == 1
dao_create_notification(notification_2)
- assert Notification.query.count() == 2
+ assert _get_notification_query_count() == 2
def test_save_notification_does_not_creates_history(sample_email_template, sample_job):
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_email_template, job_id=sample_job.id)
notification_1 = Notification(**data)
dao_create_notification(notification_1)
- assert Notification.query.count() == 1
- assert NotificationHistory.query.count() == 0
+ assert _get_notification_query_count() == 1
+ assert _get_notification_history_query_count() == 0
def test_update_notification_with_research_mode_service_does_not_create_or_update_history(
@@ -464,14 +519,14 @@ def test_update_notification_with_research_mode_service_does_not_create_or_updat
sample_template.service.research_mode = True
notification = create_notification(template=sample_template)
- assert Notification.query.count() == 1
- assert NotificationHistory.query.count() == 0
+ assert _get_notification_query_count() == 1
+ assert _get_notification_history_query_count() == 0
notification.status = NotificationStatus.DELIVERED
dao_update_notification(notification)
- assert Notification.query.one().status == NotificationStatus.DELIVERED
- assert NotificationHistory.query.count() == 0
+ assert _get_notification_query_one().status == NotificationStatus.DELIVERED
+ assert _get_notification_history_query_count() == 0
def test_not_save_notification_and_not_create_stats_on_commit_error(
@@ -479,26 +534,26 @@ def test_not_save_notification_and_not_create_stats_on_commit_error(
):
random_id = str(uuid.uuid4())
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_template, job_id=random_id)
notification = Notification(**data)
with pytest.raises(SQLAlchemyError):
dao_create_notification(notification)
- assert Notification.query.count() == 0
- assert Job.query.get(sample_job.id).notifications_sent == 0
+ assert _get_notification_query_count() == 0
+ assert db.session.get(Job, sample_job.id).notifications_sent == 0
def test_save_notification_and_increment_job(sample_template, sample_job, sns_provider):
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_template, job_id=sample_job.id)
notification = Notification(**data)
dao_create_notification(notification)
- assert Notification.query.count() == 1
- notification_from_db = Notification.query.all()[0]
+ assert _get_notification_query_count() == 1
+ notification_from_db = _get_notification_query_all()[0]
assert notification_from_db.id
assert "1" == notification_from_db.to
assert data["job_id"] == notification_from_db.job_id
@@ -510,21 +565,21 @@ def test_save_notification_and_increment_job(sample_template, sample_job, sns_pr
notification_2 = Notification(**data)
dao_create_notification(notification_2)
- assert Notification.query.count() == 2
+ assert _get_notification_query_count() == 2
def test_save_notification_and_increment_correct_job(sample_template, sns_provider):
job_1 = create_job(sample_template)
job_2 = create_job(sample_template)
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_template, job_id=job_1.id)
notification = Notification(**data)
dao_create_notification(notification)
- assert Notification.query.count() == 1
- notification_from_db = Notification.query.all()[0]
+ assert _get_notification_query_count() == 1
+ notification_from_db = _get_notification_query_all()[0]
assert notification_from_db.id
assert "1" == notification_from_db.to
assert data["job_id"] == notification_from_db.job_id
@@ -537,14 +592,14 @@ def test_save_notification_and_increment_correct_job(sample_template, sns_provid
def test_save_notification_with_no_job(sample_template, sns_provider):
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_template)
notification = Notification(**data)
dao_create_notification(notification)
- assert Notification.query.count() == 1
- notification_from_db = Notification.query.all()[0]
+ assert _get_notification_query_count() == 1
+ notification_from_db = _get_notification_query_all()[0]
assert notification_from_db.id
assert "1" == notification_from_db.to
assert data["service"] == notification_from_db.service
@@ -592,7 +647,7 @@ def test_get_notification_by_id_when_notification_exists_for_different_service(
def test_get_notifications_by_reference(sample_template):
client_reference = "some-client-ref"
- assert len(Notification.query.all()) == 0
+ assert len(_get_notification_query_all()) == 0
create_notification(sample_template, client_reference=client_reference)
create_notification(sample_template, client_reference=client_reference)
create_notification(sample_template, client_reference="other-ref")
@@ -603,14 +658,14 @@ def test_get_notifications_by_reference(sample_template):
def test_save_notification_no_job_id(sample_template):
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
data = _notification_json(sample_template)
notification = Notification(**data)
dao_create_notification(notification)
- assert Notification.query.count() == 1
- notification_from_db = Notification.query.all()[0]
+ assert _get_notification_query_count() == 1
+ notification_from_db = _get_notification_query_all()[0]
assert notification_from_db.id
assert "1" == notification_from_db.to
assert data["service"] == notification_from_db.service
@@ -687,13 +742,13 @@ def test_update_notification_sets_status(sample_notification):
assert sample_notification.status == NotificationStatus.CREATED
sample_notification.status = NotificationStatus.FAILED
dao_update_notification(sample_notification)
- notification_from_db = Notification.query.get(sample_notification.id)
+ notification_from_db = db.session.get(Notification, sample_notification.id)
assert notification_from_db.status == NotificationStatus.FAILED
@freeze_time("2016-01-10")
def test_should_limit_notifications_return_by_day_limit_plus_one(sample_template):
- assert len(Notification.query.all()) == 0
+ assert len(_get_notification_query_all()) == 0
# create one notification a day between 1st and 9th,
# with assumption that the local timezone is EST
@@ -706,7 +761,7 @@ def test_should_limit_notifications_return_by_day_limit_plus_one(sample_template
status=NotificationStatus.FAILED,
)
- all_notifications = Notification.query.all()
+ all_notifications = _get_notification_query_all()
assert len(all_notifications) == 10
all_notifications = get_notifications_for_service(
@@ -722,19 +777,19 @@ def test_should_limit_notifications_return_by_day_limit_plus_one(sample_template
def test_creating_notification_does_not_add_notification_history(sample_template):
create_notification(template=sample_template)
- assert Notification.query.count() == 1
- assert NotificationHistory.query.count() == 0
+ assert _get_notification_query_count() == 1
+ assert _get_notification_history_query_count() == 0
def test_should_delete_notification_for_id(sample_template):
notification = create_notification(template=sample_template)
- assert Notification.query.count() == 1
- assert NotificationHistory.query.count() == 0
+ assert _get_notification_query_count() == 1
+ assert _get_notification_history_query_count() == 0
dao_delete_notifications_by_id(notification.id)
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
def test_should_delete_notification_and_ignore_history_for_research_mode(
@@ -744,31 +799,32 @@ def test_should_delete_notification_and_ignore_history_for_research_mode(
notification = create_notification(template=sample_template)
- assert Notification.query.count() == 1
+ assert _get_notification_query_count() == 1
dao_delete_notifications_by_id(notification.id)
- assert Notification.query.count() == 0
+ assert _get_notification_query_count() == 0
def test_should_delete_only_notification_with_id(sample_template):
notification_1 = create_notification(template=sample_template)
notification_2 = create_notification(template=sample_template)
- assert Notification.query.count() == 2
+ assert _get_notification_query_count() == 2
dao_delete_notifications_by_id(notification_1.id)
- assert Notification.query.count() == 1
- assert Notification.query.first().id == notification_2.id
+ assert _get_notification_query_count() == 1
+ stmt = select(Notification)
+ assert db.session.execute(stmt).scalars().first().id == notification_2.id
def test_should_delete_no_notifications_if_no_matching_ids(sample_template):
create_notification(template=sample_template)
- assert Notification.query.count() == 1
+ assert _get_notification_query_count() == 1
dao_delete_notifications_by_id(uuid.uuid4())
- assert Notification.query.count() == 1
+ assert _get_notification_query_count() == 1
def _notification_json(sample_template, job_id=None, id=None, status=None):
@@ -814,16 +870,19 @@ def test_dao_timeout_notifications(sample_template):
temporary_failure_notifications = dao_timeout_notifications(utc_now())
assert len(temporary_failure_notifications) == 2
- assert Notification.query.get(created.id).status == NotificationStatus.CREATED
+ assert db.session.get(Notification, created.id).status == NotificationStatus.CREATED
assert (
- Notification.query.get(sending.id).status
+ db.session.get(Notification, sending.id).status
== NotificationStatus.TEMPORARY_FAILURE
)
assert (
- Notification.query.get(pending.id).status
+ db.session.get(Notification, pending.id).status
== NotificationStatus.TEMPORARY_FAILURE
)
- assert Notification.query.get(delivered.id).status == NotificationStatus.DELIVERED
+ assert (
+ db.session.get(Notification, delivered.id).status
+ == NotificationStatus.DELIVERED
+ )
def test_dao_timeout_notifications_only_updates_for_older_notifications(
@@ -842,8 +901,8 @@ def test_dao_timeout_notifications_only_updates_for_older_notifications(
temporary_failure_notifications = dao_timeout_notifications(utc_now())
assert len(temporary_failure_notifications) == 0
- assert Notification.query.get(sending.id).status == NotificationStatus.SENDING
- assert Notification.query.get(pending.id).status == NotificationStatus.PENDING
+ assert db.session.get(Notification, sending.id).status == NotificationStatus.SENDING
+ assert db.session.get(Notification, pending.id).status == NotificationStatus.PENDING
def test_should_return_notifications_excluding_jobs_by_default(
@@ -935,7 +994,7 @@ def test_get_notifications_created_by_api_or_csv_are_returned_correctly_excludin
key_type=sample_test_api_key.key_type,
)
- all_notifications = Notification.query.all()
+ all_notifications = _get_notification_query_all()
assert len(all_notifications) == 4
# returns all real API derived notifications
@@ -982,7 +1041,7 @@ def test_get_notifications_with_a_live_api_key_type(
key_type=sample_test_api_key.key_type,
)
- all_notifications = Notification.query.all()
+ all_notifications = _get_notification_query_all()
assert len(all_notifications) == 4
# only those created with normal API key, no jobs
@@ -1114,7 +1173,7 @@ def test_should_exclude_test_key_notifications_by_default(
key_type=sample_test_api_key.key_type,
)
- all_notifications = Notification.query.all()
+ all_notifications = _get_notification_query_all()
assert len(all_notifications) == 4
all_notifications = get_notifications_for_service(
@@ -1757,10 +1816,10 @@ def test_dao_update_notifications_by_reference_updated_notifications(sample_temp
update_dict={"status": NotificationStatus.DELIVERED, "billable_units": 2},
)
assert updated_count == 2
- updated_1 = Notification.query.get(notification_1.id)
+ updated_1 = db.session.get(Notification, notification_1.id)
assert updated_1.billable_units == 2
assert updated_1.status == NotificationStatus.DELIVERED
- updated_2 = Notification.query.get(notification_2.id)
+ updated_2 = db.session.get(Notification, notification_2.id)
assert updated_2.billable_units == 2
assert updated_2.status == NotificationStatus.DELIVERED
@@ -1823,10 +1882,11 @@ def test_dao_update_notifications_by_reference_updates_history_when_one_of_two_n
assert updated_count == 1
assert updated_history_count == 1
assert (
- Notification.query.get(notification2.id).status == NotificationStatus.DELIVERED
+ db.session.get(Notification, notification2.id).status
+ == NotificationStatus.DELIVERED
)
assert (
- NotificationHistory.query.get(notification1.id).status
+ db.session.get(NotificationHistory, notification1.id).status
== NotificationStatus.DELIVERED
)
diff --git a/tests/app/dao/test_users_dao.py b/tests/app/dao/test_users_dao.py
index 9c8770913..85149b246 100644
--- a/tests/app/dao/test_users_dao.py
+++ b/tests/app/dao/test_users_dao.py
@@ -3,6 +3,7 @@ from datetime import timedelta
import pytest
from freezegun import freeze_time
+from sqlalchemy import func, select
from sqlalchemy.exc import DataError
from sqlalchemy.orm.exc import NoResultFound
@@ -37,6 +38,21 @@ from tests.app.db import (
)
+def _get_user_query_count():
+ stmt = select(func.count(User.id))
+ return db.session.execute(stmt).scalar() or 0
+
+
+def _get_user_query_first():
+ stmt = select(User)
+ return db.session.execute(stmt).scalars().first()
+
+
+def _get_verify_code_query_count():
+ stmt = select(func.count(VerifyCode.id))
+ return db.session.execute(stmt).scalar() or 0
+
+
@freeze_time("2020-01-28T12:00:00")
@pytest.mark.parametrize(
"phone_number, expected_phone_number",
@@ -55,8 +71,8 @@ def test_create_user(notify_db_session, phone_number, expected_phone_number):
}
user = User(**data)
save_model_user(user, password="password", validated_email_access=True)
- assert User.query.count() == 1
- user_query = User.query.first()
+ assert _get_user_query_count() == 1
+ user_query = _get_user_query_first()
assert user_query.email_address == email
assert user_query.id == user.id
assert user_query.mobile_number == expected_phone_number
@@ -68,7 +84,7 @@ def test_get_all_users(notify_db_session):
create_user(email="1@test.com")
create_user(email="2@test.com")
- assert User.query.count() == 2
+ assert _get_user_query_count() == 2
assert len(get_user_by_id()) == 2
@@ -89,9 +105,9 @@ def test_get_user_invalid_id(notify_db_session):
def test_delete_users(sample_user):
- assert User.query.count() == 1
+ assert _get_user_query_count() == 1
delete_model_user(sample_user)
- assert User.query.count() == 0
+ assert _get_user_query_count() == 0
def test_increment_failed_login_should_increment_failed_logins(sample_user):
@@ -127,9 +143,9 @@ def test_get_user_by_email_is_case_insensitive(sample_user):
def test_should_delete_all_verification_codes_more_than_one_day_old(sample_user):
make_verify_code(sample_user, age=timedelta(hours=24), code="54321")
make_verify_code(sample_user, age=timedelta(hours=24), code="54321")
- assert VerifyCode.query.count() == 2
+ assert _get_verify_code_query_count() == 2
delete_codes_older_created_more_than_a_day_ago()
- assert VerifyCode.query.count() == 0
+ assert _get_verify_code_query_count() == 0
def test_should_not_delete_verification_codes_less_than_one_day_old(sample_user):
@@ -138,9 +154,10 @@ def test_should_not_delete_verification_codes_less_than_one_day_old(sample_user)
)
make_verify_code(sample_user, age=timedelta(hours=24), code="54321")
- assert VerifyCode.query.count() == 2
+ assert _get_verify_code_query_count() == 2
delete_codes_older_created_more_than_a_day_ago()
- assert VerifyCode.query.one()._code == "12345"
+ stmt = select(VerifyCode)
+ assert db.session.execute(stmt).scalars().one()._code == "12345"
def make_verify_code(user, age=None, expiry_age=None, code="12335", code_used=False):
From b84ed9c7befc872a5063c3c0b905db3912bf5ced Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Fri, 11 Oct 2024 12:33:52 -0700
Subject: [PATCH 31/39] fix core daos
---
tests/app/dao/notification_dao/test_notification_dao.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py
index 8e81db3a2..e2ac10032 100644
--- a/tests/app/dao/notification_dao/test_notification_dao.py
+++ b/tests/app/dao/notification_dao/test_notification_dao.py
@@ -449,12 +449,12 @@ def test_save_notification_creates_sms(sample_template, sample_job):
def _get_notification_query_all():
stmt = select(Notification)
- return db.execute(stmt).scalars().all()
+ return db.session.execute(stmt).scalars().all()
def _get_notification_query_one():
stmt = select(Notification)
- return db.execute(stmt).scalars().one()
+ return db.session.execute(stmt).scalars().one()
def _get_notification_query_count():
From c255eb4493ed5401790f191f69bc87471bd6e9fe Mon Sep 17 00:00:00 2001
From: Carlo Costino
Date: Wed, 16 Oct 2024 12:35:56 -0400
Subject: [PATCH 32/39] Fix egress proxy space references
This changeset fixes the references to the egress proxy spaces in the demo and prod environments.
Signed-off-by: Carlo Costino
---
.github/workflows/deploy-demo.yml | 4 ++--
.github/workflows/deploy-prod.yml | 4 ++--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/deploy-demo.yml b/.github/workflows/deploy-demo.yml
index a43b661f3..d3855d2ef 100644
--- a/.github/workflows/deploy-demo.yml
+++ b/.github/workflows/deploy-demo.yml
@@ -102,5 +102,5 @@ jobs:
CF_PASSWORD: ${{ secrets.CLOUDGOV_PASSWORD }}
with:
cf_org: gsa-tts-benefits-studio
- cf_space: notify-staging
- app: notify-api-staging
+ cf_space: notify-demo
+ app: notify-api-demo
diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml
index 23ef3dc6c..d01e53e0a 100644
--- a/.github/workflows/deploy-prod.yml
+++ b/.github/workflows/deploy-prod.yml
@@ -106,5 +106,5 @@ jobs:
CF_PASSWORD: ${{ secrets.CLOUDGOV_PASSWORD }}
with:
cf_org: gsa-tts-benefits-studio
- cf_space: notify-staging
- app: notify-api-staging
+ cf_space: notify-prod
+ app: notify-api-prod
From 430318ed582c25f144978eeaff61af7cbc890fcf Mon Sep 17 00:00:00 2001
From: Carlo Costino
Date: Wed, 16 Oct 2024 14:19:57 -0400
Subject: [PATCH 33/39] Fix reference to prod space
This changeset fixes the name of the prod space (notify-production).
Signed-off-by: Carlo Costino
---
.github/workflows/deploy-prod.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml
index d01e53e0a..bc4eaae74 100644
--- a/.github/workflows/deploy-prod.yml
+++ b/.github/workflows/deploy-prod.yml
@@ -106,5 +106,5 @@ jobs:
CF_PASSWORD: ${{ secrets.CLOUDGOV_PASSWORD }}
with:
cf_org: gsa-tts-benefits-studio
- cf_space: notify-prod
- app: notify-api-prod
+ cf_space: notify-production
+ app: notify-api-production
From 6dfeac93cd316617a2a58de8e17ff34336eb6d67 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Wed, 16 Oct 2024 13:22:36 -0700
Subject: [PATCH 34/39] fix partitioning
---
app/aws/s3.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/app/aws/s3.py b/app/aws/s3.py
index a3cd35811..d4d704632 100644
--- a/app/aws/s3.py
+++ b/app/aws/s3.py
@@ -295,7 +295,11 @@ def get_old_job_location(service_id, job_id):
def get_job_and_metadata_from_s3(service_id, job_id):
- obj = get_s3_object(*get_job_location(service_id, job_id))
+ try:
+ obj = get_s3_object(*get_job_location(service_id, job_id))
+ except botocore.exceptions.ClientError:
+ obj = get_s3_object(*get_old_job_location(service_id, job_id))
+
return obj.get()["Body"].read().decode("utf-8"), obj.get()["Metadata"]
From 261ea6fe8f6fc17eec227b557c90264cbffeba72 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Mon, 21 Oct 2024 10:32:28 -0700
Subject: [PATCH 35/39] fix commented out code
---
migrations/versions/0044_jobs_to_notification_hist.py | 11 +++++------
1 file changed, 5 insertions(+), 6 deletions(-)
diff --git a/migrations/versions/0044_jobs_to_notification_hist.py b/migrations/versions/0044_jobs_to_notification_hist.py
index e813833b4..3312d9a49 100644
--- a/migrations/versions/0044_jobs_to_notification_hist.py
+++ b/migrations/versions/0044_jobs_to_notification_hist.py
@@ -31,10 +31,10 @@ def upgrade():
#
# go_live = datetime.datetime.strptime('2016-05-18', '%Y-%m-%d')
# notifications_history_start_date = datetime.datetime.strptime('2016-06-26 23:21:55', '%Y-%m-%d %H:%M:%S')
- # jobs = session.query(Job).join(Template).filter(Job.service_id == '95316ff0-e555-462d-a6e7-95d26fbfd091',
+ # stmt = select(Job).join(Template).filter(Job.service_id == '95316ff0-e555-462d-a6e7-95d26fbfd091',
# Job.created_at >= go_live,
# Job.created_at < notifications_history_start_date).all()
- #
+ # jobs = db.session.execute(stmt).scalars().all()
# for job in jobs:
# for i in range(0, job.notifications_delivered):
# notification = NotificationHistory(id=uuid.uuid4(),
@@ -76,12 +76,11 @@ def downgrade():
#
# go_live = datetime.datetime.strptime('2016-05-18', '%Y-%m-%d')
# notifications_history_start_date = datetime.datetime.strptime('2016-06-26 23:21:55', '%Y-%m-%d %H:%M:%S')
- #
- # session.query(NotificationHistory).filter(
+ # stmt = delete(NotificationHistory).where(
# NotificationHistory.created_at >= go_live,
# NotificationHistory.service_id == '95316ff0-e555-462d-a6e7-95d26fbfd091',
- # NotificationHistory.created_at < notifications_history_start_date).delete()
- #
+ # NotificationHistory.created_at < notifications_history_start_date)
+ # session.execute(stmt)
# session.commit()
# ### end Alembic commands ###
pass
From 4c891de47c52bd46933880891c63a7922260b2f8 Mon Sep 17 00:00:00 2001
From: Cliff Hill
Date: Fri, 11 Oct 2024 13:46:53 -0400
Subject: [PATCH 36/39] Nonce stuff added.
Signed-off-by: Cliff Hill
---
app/service_invite/rest.py | 19 ++++++++++---------
1 file changed, 10 insertions(+), 9 deletions(-)
diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py
index 5728b3ed5..e7d0d4b20 100644
--- a/app/service_invite/rest.py
+++ b/app/service_invite/rest.py
@@ -32,7 +32,7 @@ service_invite = Blueprint("service_invite", __name__)
register_errors(service_invite)
-def _create_service_invite(invited_user, invite_link_host):
+def _create_service_invite(invited_user, nonce):
template_id = current_app.config["INVITATION_EMAIL_TEMPLATE_ID"]
@@ -40,12 +40,6 @@ def _create_service_invite(invited_user, invite_link_host):
service = Service.query.get(current_app.config["NOTIFY_SERVICE_ID"])
- token = generate_token(
- str(invited_user.email_address),
- current_app.config["SECRET_KEY"],
- current_app.config["DANGEROUS_SALT"],
- )
-
# The raw permissions are in the form "a,b,c,d"
# but need to be in the form ["a", "b", "c", "d"]
data = {}
@@ -59,7 +53,8 @@ def _create_service_invite(invited_user, invite_link_host):
data["invited_user_email"] = invited_user.email_address
url = os.environ["LOGIN_DOT_GOV_REGISTRATION_URL"]
- url = url.replace("NONCE", token)
+
+ url = url.replace("NONCE", nonce) # handed from data sent from admin.
user_data_url_safe = get_user_data_url_safe(data)
@@ -94,10 +89,16 @@ def _create_service_invite(invited_user, invite_link_host):
@service_invite.route("/service//invite", methods=["POST"])
def create_invited_user(service_id):
request_json = request.get_json()
+ try:
+ nonce = request_json.pop("nonce")
+ except KeyError:
+ current_app.logger.exception("nonce not found in submitted data.")
+ raise
+
invited_user = invited_user_schema.load(request_json)
save_invited_user(invited_user)
- _create_service_invite(invited_user, request_json.get("invite_link_host"))
+ _create_service_invite(invited_user, nonce)
return jsonify(data=invited_user_schema.dump(invited_user)), 201
From 0b648c98ddf833df722ba32cfd5e3749694ca7c8 Mon Sep 17 00:00:00 2001
From: Cliff Hill
Date: Mon, 21 Oct 2024 16:37:31 -0400
Subject: [PATCH 37/39] Fixed tests
Signed-off-by: Cliff Hill
---
tests/app/service_invite/test_service_invite_rest.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/tests/app/service_invite/test_service_invite_rest.py b/tests/app/service_invite/test_service_invite_rest.py
index 07d0b4c23..5cea786f5 100644
--- a/tests/app/service_invite/test_service_invite_rest.py
+++ b/tests/app/service_invite/test_service_invite_rest.py
@@ -45,6 +45,7 @@ def test_create_invited_user(
permissions="send_messages,manage_service,manage_api_keys",
auth_type=AuthType.EMAIL,
folder_permissions=["folder_1", "folder_2", "folder_3"],
+ nonce="FakeNonce",
**extra_args,
)
@@ -108,6 +109,7 @@ def test_create_invited_user_without_auth_type(
"from_user": str(invite_from.id),
"permissions": "send_messages,manage_service,manage_api_keys",
"folder_permissions": [],
+ "nonce": "FakeNonce",
}
json_resp = admin_request.post(
@@ -131,6 +133,7 @@ def test_create_invited_user_invalid_email(client, sample_service, mocker, fake_
"from_user": str(invite_from.id),
"permissions": "send_messages,manage_service,manage_api_keys",
"folder_permissions": [fake_uuid, fake_uuid],
+ "nonce": "FakeNonce",
}
data = json.dumps(data)
From d5cc8b239f7d6b5e46f4067a2edfc2d6b3956ec6 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 26 Oct 2024 00:08:57 +0000
Subject: [PATCH 38/39] Bump werkzeug from 3.0.3 to 3.0.6
Bumps [werkzeug](https://github.com/pallets/werkzeug) from 3.0.3 to 3.0.6.
- [Release notes](https://github.com/pallets/werkzeug/releases)
- [Changelog](https://github.com/pallets/werkzeug/blob/main/CHANGES.rst)
- [Commits](https://github.com/pallets/werkzeug/compare/3.0.3...3.0.6)
---
updated-dependencies:
- dependency-name: werkzeug
dependency-type: direct:production
...
Signed-off-by: dependabot[bot]
---
poetry.lock | 8 ++++----
pyproject.toml | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 60ce4d0ae..dcdb5290b 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -4519,13 +4519,13 @@ test = ["websockets"]
[[package]]
name = "werkzeug"
-version = "3.0.3"
+version = "3.0.6"
description = "The comprehensive WSGI web application library."
optional = false
python-versions = ">=3.8"
files = [
- {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"},
- {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"},
+ {file = "werkzeug-3.0.6-py3-none-any.whl", hash = "sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17"},
+ {file = "werkzeug-3.0.6.tar.gz", hash = "sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d"},
]
[package.dependencies]
@@ -4803,4 +4803,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = "^3.12.2"
-content-hash = "42172a923e16c5b0965ab06f717d41e8491ee35f7be674091b38014c48b7a89e"
+content-hash = "cf18ae74630e47eec18cc6c5fea9e554476809d20589d82c54a8d761bb2c3de0"
diff --git a/pyproject.toml b/pyproject.toml
index 3e3a78aed..99858c09e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -47,7 +47,7 @@ psycopg2-binary = "==2.9.9"
pyjwt = "==2.8.0"
python-dotenv = "==1.0.1"
sqlalchemy = "==2.0.31"
-werkzeug = "^3.0.3"
+werkzeug = "^3.0.6"
faker = "^26.0.0"
async-timeout = "^4.0.3"
bleach = "^6.1.0"
From a9385107b033541a727c837567dbff0cea53de93 Mon Sep 17 00:00:00 2001
From: Kenneth Kehl <@kkehl@flexion.us>
Date: Mon, 28 Oct 2024 12:50:11 -0700
Subject: [PATCH 39/39] code review feedback
---
app/dao/notifications_dao.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py
index 8659fca9b..1d07473c1 100644
--- a/app/dao/notifications_dao.py
+++ b/app/dao/notifications_dao.py
@@ -629,7 +629,6 @@ def notifications_not_yet_sent(should_be_sending_after_seconds, notification_typ
Notification.status == NotificationStatus.CREATED,
)
notifications = db.session.execute(stmt).scalars().all()
- print(f"WE RETURN THIS FOR NOTIFICATIONS {notifications}")
return notifications