From 2a0669636d461514b9680784253a4a0a6896ac9c Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Fri, 19 May 2017 16:42:33 +0100 Subject: [PATCH 01/92] Add and test new DAO method that counts the billable units multiplied by rate multiplier for a given service for a given time period. Currently this is SMS only. Used by the dashboard for a headline figure. --- app/dao/notification_usage_dao.py | 18 ++ tests/app/dao/test_notification_usage_dao.py | 197 ++++++++++++++++++- 2 files changed, 212 insertions(+), 3 deletions(-) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index 4f9e2ee8e..f4c233468 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -153,3 +153,21 @@ def rate_multiplier(): (NotificationHistory.rate_multiplier == None, literal_column("'1'")), # noqa (NotificationHistory.rate_multiplier != None, NotificationHistory.rate_multiplier), # noqa ]), Integer()) + + +@statsd(namespace="dao") +def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date, end_date, service_id): + result = db.session.query( + func.sum( + NotificationHistory.billable_units * func.coalesce(NotificationHistory.rate_multiplier, 1) + ).label('billable_units') + ).filter( + NotificationHistory.service_id == service_id, + NotificationHistory.notification_type == 'sms', + NotificationHistory.created_at >= start_date, + NotificationHistory.created_at <= end_date, + NotificationHistory.status.in_(NOTIFICATION_STATUS_TYPES_BILLABLE) + ) + if result.scalar(): + return int(result.scalar()) + return 0 diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index 5252b0f90..81cbd0202 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -1,11 +1,17 @@ import uuid -from datetime import datetime +from datetime import datetime, timedelta + +import pytest from app.dao.date_util import get_financial_year from app.dao.notification_usage_dao import (get_rates_for_year, get_yearly_billing_data, - get_monthly_billing_data) -from app.models import Rate + get_monthly_billing_data, + get_total_billable_units_for_sent_sms_notifications_in_date_range) +from app.models import Rate, NOTIFICATION_STATUS_SUCCESS, NOTIFICATION_DELIVERED, NOTIFICATION_STATUS_TYPES_BILLABLE, \ + NOTIFICATION_CREATED, NOTIFICATION_STATUS_TYPES_NON_BILLABLE +from tests.app.conftest import sample_notification, sample_email_template, sample_letter_template, sample_service from tests.app.db import create_notification +from freezegun import freeze_time def test_get_rates_for_year(notify_db, notify_db_session): @@ -248,3 +254,188 @@ def test_get_monthly_billing_data_with_no_notifications_for_year(notify_db, noti def set_up_rate(notify_db, start_date, value): rate = Rate(id=uuid.uuid4(), valid_from=start_date, rate=value, notification_type='sms') notify_db.session.add(rate) + + +@freeze_time("2016-01-10 12:00:00.000000") +def test_returns_total_billable_units_for_sms_notifications(notify_db, notify_db_session, sample_service): + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=1, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=2, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=3, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=4, status=NOTIFICATION_DELIVERED) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 10 + + +@freeze_time("2016-01-10 12:00:00.000000") +def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications( + notify_db, notify_db_session, sample_service +): + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=1.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=2.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=5.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=10.0, status=NOTIFICATION_DELIVERED) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 18 + + +@freeze_time("2016-01-10 12:00:00.000000") +def test_returns_total_billable_units_for_sms_notifications_ignoring_letters_and_emails( + notify_db, notify_db_session, sample_service +): + email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) + letter_template = sample_letter_template(sample_service) + + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + template=email_template, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + template=letter_template, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED + ) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 2 + + +@freeze_time("2016-01-10 12:00:00.000000") +def test_returns_total_billable_units_for_sms_notifications_for_only_requested_service( + notify_db, notify_db_session +): + service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + service_2 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + service_3 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_2, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_3, + billable_units=2, + status=NOTIFICATION_DELIVERED + ) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id) == 2 + + +@freeze_time("2016-01-10 12:00:00.000000") +def test_returns_total_billable_units_for_sms_notifications_handling_null_values( + notify_db, notify_db_session, sample_service +): + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=2, + rate_multiplier=None, + status=NOTIFICATION_DELIVERED) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 2 + + +@pytest.mark.parametrize('billable_units, states', ([ + (len(NOTIFICATION_STATUS_TYPES_BILLABLE), NOTIFICATION_STATUS_TYPES_BILLABLE), + (0, NOTIFICATION_STATUS_TYPES_NON_BILLABLE) +])) +@freeze_time("2016-01-10 12:00:00.000000") +def test_ignores_non_billable_states_when_returning_billable_units_for_sms_notifications( + notify_db, notify_db_session, sample_service, billable_units, states +): + for state in states: + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=None, + status=state) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + ) == billable_units + + +@freeze_time("2016-01-10 12:00:00.000000") +def test_restricts_to_time_period_when_returning_billable_units_for_sms_notifications( + notify_db, notify_db_session, sample_service +): + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=1.0, + created_at=datetime.utcnow() - timedelta(minutes=100), + status=NOTIFICATION_DELIVERED) + + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=1.0, + created_at=datetime.utcnow() - timedelta(minutes=5), + status=NOTIFICATION_DELIVERED) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 1 + + +def test_returns_zero_if_no_matching_rows_when_returning_billable_units_for_sms_notifications( + notify_db, notify_db_session, sample_service +): + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 0 + From 7268bc28fe4de692fb434009b132dbd93628cfce Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Fri, 19 May 2017 16:42:47 +0100 Subject: [PATCH 02/92] New array of non-billable states --- app/models.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app/models.py b/app/models.py index 5fff42027..50f3495d8 100644 --- a/app/models.py +++ b/app/models.py @@ -647,6 +647,7 @@ NOTIFICATION_STATUS_TYPES_BILLABLE = [ NOTIFICATION_PERMANENT_FAILURE, ] + NOTIFICATION_STATUS_TYPES = [ NOTIFICATION_CREATED, NOTIFICATION_SENDING, @@ -659,6 +660,8 @@ NOTIFICATION_STATUS_TYPES = [ NOTIFICATION_PERMANENT_FAILURE, ] +NOTIFICATION_STATUS_TYPES_NON_BILLABLE = list(set(NOTIFICATION_STATUS_TYPES) - set(NOTIFICATION_STATUS_TYPES_BILLABLE)) + NOTIFICATION_STATUS_TYPES_ENUM = db.Enum(*NOTIFICATION_STATUS_TYPES, name='notify_status_type') From f0395e74963be0d0de0411ccf9352c28ee0f9aa3 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Fri, 19 May 2017 16:43:05 +0100 Subject: [PATCH 03/92] New endpoint to get the count of billable SMS units. --- app/service/rest.py | 24 ++++++ tests/app/service/test_rest.py | 153 ++++++++++++++++++++++++--------- 2 files changed, 134 insertions(+), 43 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index 180740063..423d5dc9b 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -9,6 +9,7 @@ from flask import ( ) from sqlalchemy.orm.exc import NoResultFound +from app import redis_store from app.dao import notification_usage_dao from app.dao.dao_utils import dao_rollback from app.dao.api_key_dao import ( @@ -16,6 +17,8 @@ from app.dao.api_key_dao import ( get_model_api_keys, get_unsigned_secret, expire_api_key) +from app.dao.date_util import get_financial_year +from app.dao.notification_usage_dao import get_total_billable_units_for_sent_sms_notifications_in_date_range from app.dao.services_dao import ( dao_fetch_service_by_id, dao_fetch_all_services, @@ -58,6 +61,7 @@ from app.schemas import ( ) from app.utils import pagination_links from flask import Blueprint +from notifications_utils.clients.redis import sms_billable_units_cache_key service_blueprint = Blueprint('service', __name__) @@ -440,6 +444,26 @@ def get_monthly_template_stats(service_id): raise InvalidRequest('Year must be a number', status_code=400) +@service_blueprint.route('//yearly-usage-count') +def get_yearly_usage_count(service_id): + try: + cache_key = sms_billable_units_cache_key(service_id) + cached_value = redis_store.get(cache_key) + if cached_value: + return jsonify({'billable_sms_units': cached_value}) + else: + start_date, end_date = get_financial_year(int(request.args.get('year'))) + billable_units = get_total_billable_units_for_sent_sms_notifications_in_date_range( + start_date, + end_date, + service_id) + redis_store.set(cache_key, billable_units, ex=60) + return jsonify({'billable_sms_units': billable_units}) + + except (ValueError, TypeError): + return jsonify(result='error', message='No valid year provided'), 400 + + @service_blueprint.route('//yearly-usage') def get_yearly_billing_usage(service_id): try: diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index c12ca4eec..c7ad9495c 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -62,9 +62,9 @@ def test_get_service_list_with_only_active_flag(client, service_factory): def test_get_service_list_with_user_id_and_only_active_flag( - client, - sample_user, - service_factory + client, + sample_user, + service_factory ): other_user = create_user(email='foo@bar.gov.uk') @@ -663,7 +663,6 @@ def test_add_existing_user_to_another_service_with_all_permissions(notify_api, sample_user): with notify_api.test_request_context(): with notify_api.test_client() as client: - # check which users part of service user_already_in_service = sample_service.users[0] auth_header = create_authorization_header() @@ -738,7 +737,6 @@ def test_add_existing_user_to_another_service_with_send_permissions(notify_api, sample_user): with notify_api.test_request_context(): with notify_api.test_client() as client: - # they must exist in db first user_to_add = User( name='Invited User', @@ -782,7 +780,6 @@ def test_add_existing_user_to_another_service_with_manage_permissions(notify_api sample_user): with notify_api.test_request_context(): with notify_api.test_client() as client: - # they must exist in db first user_to_add = User( name='Invited User', @@ -826,7 +823,6 @@ def test_add_existing_user_to_another_service_with_manage_api_keys(notify_api, sample_user): with notify_api.test_request_context(): with notify_api.test_client() as client: - # they must exist in db first user_to_add = User( name='Invited User', @@ -867,7 +863,6 @@ def test_add_existing_user_to_non_existing_service_returns404(notify_api, sample_user): with notify_api.test_request_context(): with notify_api.test_client() as client: - user_to_add = User( name='Invited User', email_address='invited@digital.cabinet-office.gov.uk', @@ -898,7 +893,6 @@ def test_add_existing_user_to_non_existing_service_returns404(notify_api, def test_add_existing_user_of_service_to_service_returns400(notify_api, notify_db, notify_db_session, sample_service): with notify_api.test_request_context(): with notify_api.test_client() as client: - existing_user_id = sample_service.users[0].id data = {'permissions': ['send_messages', 'manage_service', 'manage_api_keys']} @@ -921,7 +915,6 @@ def test_add_existing_user_of_service_to_service_returns400(notify_api, notify_d def test_add_unknown_user_to_service_returns404(notify_api, notify_db, notify_db_session, sample_service): with notify_api.test_request_context(): with notify_api.test_client() as client: - incorrect_id = 9876 data = {'permissions': ['send_messages', 'manage_service', 'manage_api_keys']} @@ -942,7 +935,7 @@ def test_add_unknown_user_to_service_returns404(notify_api, notify_db, notify_db def test_remove_user_from_service( - notify_db, notify_db_session, client, sample_user_service_permission + notify_db, notify_db_session, client, sample_user_service_permission ): second_user = create_user(email="new@digital.cabinet-office.gov.uk") # Simulates successfully adding a user to the service @@ -962,7 +955,7 @@ def test_remove_user_from_service( def test_remove_non_existant_user_from_service( - client, sample_user_service_permission + client, sample_user_service_permission ): second_user = create_user(email="new@digital.cabinet-office.gov.uk") endpoint = url_for( @@ -998,13 +991,11 @@ def test_cannot_remove_only_user_from_service(notify_api, # This test is just here verify get_service_and_api_key_history that is a temp solution # until proper ui is sorted out on admin app def test_get_service_and_api_key_history(notify_api, notify_db, notify_db_session, sample_service): - from tests.app.conftest import sample_api_key as create_sample_api_key api_key = create_sample_api_key(notify_db, notify_db_session, service=sample_service) with notify_api.test_request_context(): with notify_api.test_client() as client: - auth_header = create_authorization_header() response = client.get( path='/service/{}/history'.format(sample_service.id), @@ -1078,12 +1069,12 @@ def test_get_all_notifications_for_service_in_order(notify_api, notify_db, notif ] ) def test_get_all_notifications_for_service_including_ones_made_by_jobs( - client, - notify_db, - notify_db_session, - sample_service, - include_from_test_key, - expected_count_of_notifications + client, + notify_db, + notify_db_session, + sample_service, + include_from_test_key, + expected_count_of_notifications ): with_job = sample_notification_with_job(notify_db, notify_db_session, service=sample_service) without_job = create_sample_notification(notify_db, notify_db_session, service=sample_service) @@ -1108,10 +1099,10 @@ def test_get_all_notifications_for_service_including_ones_made_by_jobs( def test_get_only_api_created_notifications_for_service( - client, - notify_db, - notify_db_session, - sample_service + client, + notify_db, + notify_db_session, + sample_service ): with_job = sample_notification_with_job(notify_db, notify_db_session, service=sample_service) without_job = create_sample_notification(notify_db, notify_db_session, service=sample_service) @@ -1211,19 +1202,19 @@ def test_get_detailed_service(notify_db, notify_db_session, notify_api, sample_s @pytest.mark.parametrize( 'url, expected_status, expected_json', [ ( - '/service/{}/notifications/monthly?year=2001', - 200, - {'data': {'foo': 'bar'}}, + '/service/{}/notifications/monthly?year=2001', + 200, + {'data': {'foo': 'bar'}}, ), ( - '/service/{}/notifications/monthly?year=baz', - 400, - {'message': 'Year must be a number', 'result': 'error'}, + '/service/{}/notifications/monthly?year=baz', + 400, + {'message': 'Year must be a number', 'result': 'error'}, ), ( - '/service/{}/notifications/monthly', - 400, - {'message': 'Year must be a number', 'result': 'error'}, + '/service/{}/notifications/monthly', + 400, + {'message': 'Year must be a number', 'result': 'error'}, ), ] ) @@ -1452,11 +1443,11 @@ def test_get_notification_billable_unit_count_missing_year(client, sample_servic ('?year=abcd', 400, {'message': 'Year must be a number', 'result': 'error'}), ]) def test_get_service_provider_aggregate_statistics( - client, - sample_service, - query_string, - expected_status, - expected_json, + client, + sample_service, + query_string, + expected_status, + expected_json, ): response = client.get( '/service/{}/fragment/aggregate_statistics{}'.format(sample_service.id, query_string), @@ -1497,11 +1488,11 @@ def test_get_template_stats_by_month_returns_correct_data(notify_db, notify_db_s ('?year=abcd', 400, {'message': 'Year must be a number', 'result': 'error'}), ]) def test_get_template_stats_by_month_returns_error_for_incorrect_year( - client, - sample_service, - query_string, - expected_status, - expected_json + client, + sample_service, + query_string, + expected_status, + expected_json ): response = client.get( '/service/{}/notifications/templates/monthly{}'.format(sample_service.id, query_string), @@ -1729,3 +1720,79 @@ def test_update_service_does_not_call_send_notification_when_restricted_not_chan assert resp.status_code == 200 assert not send_notification_mock.called + + +def test_get_yearly_billing_usage_count_returns_400_if_missing_year(client, sample_service): + response = client.get( + '/service/{}/yearly-usage-count'.format(sample_service.id), + headers=[create_authorization_header()] + ) + assert response.status_code == 400 + assert json.loads(response.get_data(as_text=True)) == { + 'message': 'No valid year provided', 'result': 'error' + } + + +def test_get_yearly_billing_usage_count_returns_400_if_invalid_year(client, sample_service, mocker): + redis_get_mock = mocker.patch('app.service.rest.redis_store.get', return_value=None) + redis_set_mock = mocker.patch('app.service.rest.redis_store.set') + + response = client.get( + '/service/{}/yearly-usage-count?year=HAHAHAHAH'.format(sample_service.id), + headers=[create_authorization_header()] + ) + assert response.status_code == 400 + assert json.loads(response.get_data(as_text=True)) == { + 'message': 'No valid year provided', 'result': 'error' + } + redis_get_mock.assert_called_once_with("{}-sms_billable_units".format(str(sample_service.id))) + redis_set_mock.assert_not_called() + + +def test_get_yearly_billing_usage_count_returns_200_if_year_provided(client, sample_service, mocker): + redis_get_mock = mocker.patch('app.service.rest.redis_store.get', return_value=None) + redis_set_mock = mocker.patch('app.service.rest.redis_store.set') + + start = datetime.utcnow() + end = datetime.utcnow() + timedelta(minutes=10) + mock_query = mocker.patch( + 'app.service.rest.get_total_billable_units_for_sent_sms_notifications_in_date_range', return_value=100 + ) + mock_year = mocker.patch('app.service.rest.get_financial_year', return_value=(start, end)) + response = client.get( + '/service/{}/yearly-usage-count?year=2016'.format(sample_service.id), + headers=[create_authorization_header()] + ) + assert response.status_code == 200 + assert json.loads(response.get_data(as_text=True)) == { + 'billable_sms_units': 100 + } + mock_query.assert_called_once_with(start, end, sample_service.id) + mock_year.assert_called_once_with(2016) + redis_get_mock.assert_called_once_with("{}-sms_billable_units".format(str(sample_service.id))) + redis_set_mock.assert_called_once_with("{}-sms_billable_units".format(str(sample_service.id)), 100, ex=60) + + +def test_get_yearly_billing_usage_count_returns_from_cache_if_present(client, sample_service, mocker): + redis_get_mock = mocker.patch('app.service.rest.redis_store.get', return_value=50) + redis_set_mock = mocker.patch('app.service.rest.redis_store.set') + mock_query = mocker.patch( + 'app.service.rest.get_total_billable_units_for_sent_sms_notifications_in_date_range', return_value=50 + ) + + start = datetime.utcnow() + end = datetime.utcnow() + timedelta(minutes=10) + mock_year = mocker.patch('app.service.rest.get_financial_year', return_value=(start, end)) + + response = client.get( + '/service/{}/yearly-usage-count?year=2016'.format(sample_service.id), + headers=[create_authorization_header()] + ) + assert response.status_code == 200 + assert json.loads(response.get_data(as_text=True)) == { + 'billable_sms_units': 50 + } + redis_get_mock.assert_called_once_with("{}-sms_billable_units".format(str(sample_service.id))) + mock_year.assert_not_called() + mock_query.assert_not_called() + redis_set_mock.assert_not_called() From 119f0532ab135ee59506f6d6889fd6ed8c5bfc88 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Mon, 22 May 2017 10:06:34 +0100 Subject: [PATCH 04/92] Renamed the API method/url --- app/service/rest.py | 8 +++--- tests/app/dao/test_notification_usage_dao.py | 1 - tests/app/service/test_rest.py | 26 ++++++++++---------- 3 files changed, 17 insertions(+), 18 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index 423d5dc9b..f52b18174 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -444,13 +444,13 @@ def get_monthly_template_stats(service_id): raise InvalidRequest('Year must be a number', status_code=400) -@service_blueprint.route('//yearly-usage-count') -def get_yearly_usage_count(service_id): +@service_blueprint.route('//yearly-sms-billable-units') +def get_yearly_sms_billable_units(service_id): try: cache_key = sms_billable_units_cache_key(service_id) cached_value = redis_store.get(cache_key) if cached_value: - return jsonify({'billable_sms_units': cached_value}) + return jsonify({'billable_sms_units': int(cached_value)}) else: start_date, end_date = get_financial_year(int(request.args.get('year'))) billable_units = get_total_billable_units_for_sent_sms_notifications_in_date_range( @@ -460,7 +460,7 @@ def get_yearly_usage_count(service_id): redis_store.set(cache_key, billable_units, ex=60) return jsonify({'billable_sms_units': billable_units}) - except (ValueError, TypeError): + except (ValueError, TypeError) as e: return jsonify(result='error', message='No valid year provided'), 400 diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index 81cbd0202..7dfd4bdfb 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -438,4 +438,3 @@ def test_returns_zero_if_no_matching_rows_when_returning_billable_units_for_sms_ start = datetime.utcnow() - timedelta(minutes=10) end = datetime.utcnow() + timedelta(minutes=10) assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 0 - diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index c7ad9495c..57d152b9b 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1202,19 +1202,19 @@ def test_get_detailed_service(notify_db, notify_db_session, notify_api, sample_s @pytest.mark.parametrize( 'url, expected_status, expected_json', [ ( - '/service/{}/notifications/monthly?year=2001', - 200, - {'data': {'foo': 'bar'}}, + '/service/{}/notifications/monthly?year=2001', + 200, + {'data': {'foo': 'bar'}}, ), ( - '/service/{}/notifications/monthly?year=baz', - 400, - {'message': 'Year must be a number', 'result': 'error'}, + '/service/{}/notifications/monthly?year=baz', + 400, + {'message': 'Year must be a number', 'result': 'error'}, ), ( - '/service/{}/notifications/monthly', - 400, - {'message': 'Year must be a number', 'result': 'error'}, + '/service/{}/notifications/monthly', + 400, + {'message': 'Year must be a number', 'result': 'error'}, ), ] ) @@ -1724,7 +1724,7 @@ def test_update_service_does_not_call_send_notification_when_restricted_not_chan def test_get_yearly_billing_usage_count_returns_400_if_missing_year(client, sample_service): response = client.get( - '/service/{}/yearly-usage-count'.format(sample_service.id), + '/service/{}/yearly-sms-billable-units'.format(sample_service.id), headers=[create_authorization_header()] ) assert response.status_code == 400 @@ -1738,7 +1738,7 @@ def test_get_yearly_billing_usage_count_returns_400_if_invalid_year(client, samp redis_set_mock = mocker.patch('app.service.rest.redis_store.set') response = client.get( - '/service/{}/yearly-usage-count?year=HAHAHAHAH'.format(sample_service.id), + '/service/{}/yearly-sms-billable-units?year=HAHAHAHAH'.format(sample_service.id), headers=[create_authorization_header()] ) assert response.status_code == 400 @@ -1760,7 +1760,7 @@ def test_get_yearly_billing_usage_count_returns_200_if_year_provided(client, sam ) mock_year = mocker.patch('app.service.rest.get_financial_year', return_value=(start, end)) response = client.get( - '/service/{}/yearly-usage-count?year=2016'.format(sample_service.id), + '/service/{}/yearly-sms-billable-units?year=2016'.format(sample_service.id), headers=[create_authorization_header()] ) assert response.status_code == 200 @@ -1785,7 +1785,7 @@ def test_get_yearly_billing_usage_count_returns_from_cache_if_present(client, sa mock_year = mocker.patch('app.service.rest.get_financial_year', return_value=(start, end)) response = client.get( - '/service/{}/yearly-usage-count?year=2016'.format(sample_service.id), + '/service/{}/yearly-sms-billable-units?year=2016'.format(sample_service.id), headers=[create_authorization_header()] ) assert response.status_code == 200 From 35af759f8736deb15dd06fe4ae356b01ca070cbf Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 23 May 2017 13:54:51 +0100 Subject: [PATCH 05/92] Adding rates to the billable units query --- app/dao/notification_usage_dao.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index f4c233468..35d3646e4 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -157,6 +157,9 @@ def rate_multiplier(): @statsd(namespace="dao") def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date, end_date, service_id): + rates = get_rates_for_year(start_date, end_date, SMS_TYPE) + print(rates) + result = db.session.query( func.sum( NotificationHistory.billable_units * func.coalesce(NotificationHistory.rate_multiplier, 1) From daa6d2d6f2ab63b5886c6262ef21c30eb7a5b5bf Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 08:55:59 +0100 Subject: [PATCH 06/92] No redis in dev mode --- app/config.py | 1 + 1 file changed, 1 insertion(+) diff --git a/app/config.py b/app/config.py index 4bdec85b6..4e09c3aaa 100644 --- a/app/config.py +++ b/app/config.py @@ -204,6 +204,7 @@ class Config(object): ###################### class Development(Config): + REDIS_ENABLED = False SQLALCHEMY_ECHO = False NOTIFY_EMAIL_DOMAIN = 'notify.tools' CSV_UPLOAD_BUCKET_NAME = 'development-notifications-csv-upload' From 0bb289a1f2115ee4682a78246be0e10d7ea9d9f3 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 08:56:34 +0100 Subject: [PATCH 07/92] Redis enable via config --- app/config.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/config.py b/app/config.py index 4e09c3aaa..4bdec85b6 100644 --- a/app/config.py +++ b/app/config.py @@ -204,7 +204,6 @@ class Config(object): ###################### class Development(Config): - REDIS_ENABLED = False SQLALCHEMY_ECHO = False NOTIFY_EMAIL_DOMAIN = 'notify.tools' CSV_UPLOAD_BUCKET_NAME = 'development-notifications-csv-upload' From 9dd604194404d461ba06d8994c206b2a9e46dd82 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 08:56:59 +0100 Subject: [PATCH 08/92] Usage DAO can now return rates and billable amount, alongside units. --- app/dao/notification_usage_dao.py | 78 ++++++-- tests/app/dao/test_notification_usage_dao.py | 193 +++++++++++++++++-- 2 files changed, 242 insertions(+), 29 deletions(-) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index 35d3646e4..f40c6fe8b 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -157,20 +157,66 @@ def rate_multiplier(): @statsd(namespace="dao") def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date, end_date, service_id): - rates = get_rates_for_year(start_date, end_date, SMS_TYPE) - print(rates) - result = db.session.query( - func.sum( - NotificationHistory.billable_units * func.coalesce(NotificationHistory.rate_multiplier, 1) - ).label('billable_units') - ).filter( - NotificationHistory.service_id == service_id, - NotificationHistory.notification_type == 'sms', - NotificationHistory.created_at >= start_date, - NotificationHistory.created_at <= end_date, - NotificationHistory.status.in_(NOTIFICATION_STATUS_TYPES_BILLABLE) - ) - if result.scalar(): - return int(result.scalar()) - return 0 + billable_units = 0 + total_cost = 0.0 + + rate_boundaries = discover_rate_bounds_for_billing_query(start_date, end_date) + for rate_boundary in rate_boundaries: + result = db.session.query( + func.sum( + NotificationHistory.billable_units * func.coalesce(NotificationHistory.rate_multiplier, 1) + ).label('billable_units') + ).filter( + NotificationHistory.service_id == service_id, + NotificationHistory.notification_type == 'sms', + NotificationHistory.created_at >= rate_boundary['start_date'], + NotificationHistory.created_at < rate_boundary['end_date'], + NotificationHistory.status.in_(NOTIFICATION_STATUS_TYPES_BILLABLE) + ) + billable_units_by_rate_boundry = result.scalar() + if billable_units_by_rate_boundry: + billable_units += int(billable_units_by_rate_boundry)vi end_date + total_cost += int(billable_units_by_rate_boundry) * rate_boundary['rate'] + + return billable_units, total_cost + + +def discover_rate_bounds_for_billing_query(start_date, end_date): + bounds = [] + rates = get_rates_for_year(start_date, end_date, SMS_TYPE) + + def current_valid_from(index): + return rates[index].valid_from + + def next_valid_from(index): + return rates[index + 1].valid_from + + def current_rate(index): + return rates[index].rate + + def append_rate(rate_start_date, rate_end_date, rate): + bounds.append({ + 'start_date': rate_start_date, + 'end_date': rate_end_date, + 'rate': rate + }) + + if len(rates) == 1: + append_rate(start_date, end_date, current_rate(0)) + return bounds + + for i in range(len(rates)): + # first boundary + if i == 0: + append_rate(start_date, next_valid_from(i), current_rate(i)) + + # last boundary + elif i == (len(rates) - 1): + append_rate(current_valid_from(i), end_date, current_rate(i)) + + # other boundaries + else: + append_rate(current_valid_from(i), next_valid_from(i), current_rate(i)) + + return bounds diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index 7dfd4bdfb..c1c668d4c 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -4,11 +4,19 @@ from datetime import datetime, timedelta import pytest from app.dao.date_util import get_financial_year -from app.dao.notification_usage_dao import (get_rates_for_year, get_yearly_billing_data, - get_monthly_billing_data, - get_total_billable_units_for_sent_sms_notifications_in_date_range) -from app.models import Rate, NOTIFICATION_STATUS_SUCCESS, NOTIFICATION_DELIVERED, NOTIFICATION_STATUS_TYPES_BILLABLE, \ - NOTIFICATION_CREATED, NOTIFICATION_STATUS_TYPES_NON_BILLABLE +from app.dao.notification_usage_dao import ( + get_rates_for_year, + get_yearly_billing_data, + get_monthly_billing_data, + get_total_billable_units_for_sent_sms_notifications_in_date_range, + discover_rate_bounds_for_billing_query +) +from app.models import ( + Rate, + NOTIFICATION_DELIVERED, + NOTIFICATION_STATUS_TYPES_BILLABLE, + NOTIFICATION_STATUS_TYPES_NON_BILLABLE, + Notification) from tests.app.conftest import sample_notification, sample_email_template, sample_letter_template, sample_service from tests.app.db import create_notification from freezegun import freeze_time @@ -258,6 +266,8 @@ def set_up_rate(notify_db, start_date, value): @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications(notify_db, notify_db_session, sample_service): + set_up_rate(notify_db, datetime(2016, 1, 1), 0.016) + sample_notification( notify_db, notify_db_session, service=sample_service, billable_units=1, status=NOTIFICATION_DELIVERED) sample_notification( @@ -270,13 +280,16 @@ def test_returns_total_billable_units_for_sms_notifications(notify_db, notify_db start = datetime.utcnow() - timedelta(minutes=10) end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 10 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 10 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 0.16 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications( notify_db, notify_db_session, sample_service ): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + sample_notification( notify_db, notify_db_session, service=sample_service, rate_multiplier=1.0, status=NOTIFICATION_DELIVERED) sample_notification( @@ -289,13 +302,94 @@ def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notificati start = datetime.utcnow() - timedelta(minutes=10) end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 18 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 18 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 45 + + +def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications_for_several_rates( + notify_db, notify_db_session, sample_service +): + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) + + eligble_rate_1 = datetime(2016, 2, 1) + eligble_rate_2 = datetime(2016, 11, 1) + eligble_rate_3 = datetime(2017, 2, 1) + + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=1.0, + status=NOTIFICATION_DELIVERED, + created_at=eligble_rate_1) + + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=2.0, + status=NOTIFICATION_DELIVERED, + created_at=eligble_rate_2) + + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=5.0, + status=NOTIFICATION_DELIVERED, + created_at=eligble_rate_3) + + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 8 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 40 + + +def test_returns_total_billable_units_for_sms_notifications_for_several_rates_where_dates_match_rate_boundary( + notify_db, notify_db_session, sample_service +): + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) + + eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) + eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) + eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) + eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) + eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) + eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) + + def make_notification(created_at): + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=1.0, + status=NOTIFICATION_DELIVERED, + created_at=created_at) + + make_notification(eligble_rate_1_start) + make_notification(eligble_rate_1_end) + make_notification(eligble_rate_2_start) + make_notification(eligble_rate_2_end) + make_notification(eligble_rate_3_start) + make_notification(eligble_rate_3_whenever) + + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 6 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 24.0 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_ignoring_letters_and_emails( notify_db, notify_db_session, sample_service ): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) letter_template = sample_letter_template(sample_service) @@ -324,13 +418,16 @@ def test_returns_total_billable_units_for_sms_notifications_ignoring_letters_and start = datetime.utcnow() - timedelta(minutes=10) end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_for_only_requested_service( notify_db, notify_db_session ): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) service_2 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) service_3 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) @@ -358,13 +455,16 @@ def test_returns_total_billable_units_for_sms_notifications_for_only_requested_s start = datetime.utcnow() - timedelta(minutes=10) end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id) == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 5 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_handling_null_values( notify_db, notify_db_session, sample_service ): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + sample_notification( notify_db, notify_db_session, @@ -376,7 +476,8 @@ def test_returns_total_billable_units_for_sms_notifications_handling_null_values start = datetime.utcnow() - timedelta(minutes=10) end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 @pytest.mark.parametrize('billable_units, states', ([ @@ -387,6 +488,8 @@ def test_returns_total_billable_units_for_sms_notifications_handling_null_values def test_ignores_non_billable_states_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service, billable_units, states ): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + for state in states: sample_notification( notify_db, @@ -401,13 +504,18 @@ def test_ignores_non_billable_states_when_returning_billable_units_for_sms_notif assert get_total_billable_units_for_sent_sms_notifications_in_date_range( start, end, sample_service.id - ) == billable_units + )[0] == billable_units + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + )[1] == billable_units * 2.5 @freeze_time("2016-01-10 12:00:00.000000") def test_restricts_to_time_period_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service ): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + sample_notification( notify_db, notify_db_session, @@ -429,12 +537,71 @@ def test_restricts_to_time_period_when_returning_billable_units_for_sms_notifica start = datetime.utcnow() - timedelta(minutes=10) end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 1 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 1 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 2.5 def test_returns_zero_if_no_matching_rows_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service ): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + start = datetime.utcnow() - timedelta(minutes=10) end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id) == 0 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 0 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 0.0 + + +def test_should_calculate_rate_boundaries_for_billing_query_for_single_relevant_rate(notify_db, notify_db_session): + start_date, end_date = get_financial_year(2016) + set_up_rate(notify_db, datetime(2016, 1, 1), 0.016) + rate_boundaries = discover_rate_bounds_for_billing_query(start_date, end_date) + print(rate_boundaries) + assert len(rate_boundaries) == 1 + assert rate_boundaries[0]['start_date'] == start_date + assert rate_boundaries[0]['end_date'] == end_date + assert rate_boundaries[0]['rate'] == 0.016 + + +def test_should_calculate_rate_boundaries_for_billing_query_for_two_relevant_rates(notify_db, notify_db_session): + start_date, end_date = get_financial_year(2016) + + rate_1_valid_from = datetime(2016, 1, 1) + rate_2_valid_from = datetime(2017, 1, 1) + + set_up_rate(notify_db, rate_1_valid_from, 0.02) + set_up_rate(notify_db, rate_2_valid_from, 0.04) + rate_boundaries = discover_rate_bounds_for_billing_query(start_date, end_date) + assert len(rate_boundaries) == 2 + assert rate_boundaries[0]['start_date'] == start_date + assert rate_boundaries[0]['end_date'] == rate_2_valid_from + assert rate_boundaries[0]['rate'] == 0.02 + + assert rate_boundaries[1]['start_date'] == rate_2_valid_from + assert rate_boundaries[1]['end_date'] == end_date + assert rate_boundaries[1]['rate'] == 0.04 + + +def test_should_calculate_rate_boundaries_for_billing_query_for_three_relevant_rates(notify_db, notify_db_session): + start_date, end_date = get_financial_year(2016) + rate_1_valid_from = datetime(2016, 1, 1) + rate_2_valid_from = datetime(2017, 1, 1) + rate_3_valid_from = datetime(2017, 2, 1) + + set_up_rate(notify_db, rate_1_valid_from, 0.02) + set_up_rate(notify_db, rate_2_valid_from, 0.04) + set_up_rate(notify_db, rate_3_valid_from, 0.06) + rate_boundaries = discover_rate_bounds_for_billing_query(start_date, end_date) + assert len(rate_boundaries) == 3 + + assert rate_boundaries[0]['start_date'] == start_date + assert rate_boundaries[0]['end_date'] == rate_2_valid_from + assert rate_boundaries[0]['rate'] == 0.02 + + assert rate_boundaries[1]['start_date'] == rate_2_valid_from + assert rate_boundaries[1]['end_date'] == rate_3_valid_from + assert rate_boundaries[1]['rate'] == 0.04 + + assert rate_boundaries[2]['start_date'] == rate_3_valid_from + assert rate_boundaries[2]['end_date'] == end_date + assert rate_boundaries[2]['rate'] == 0.06 From 511e143ace9b5f41d55273ca705fe5368d50369f Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 08:57:11 +0100 Subject: [PATCH 09/92] toString on the rates object --- app/models.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/app/models.py b/app/models.py index 66fa6a75c..79a873e95 100644 --- a/app/models.py +++ b/app/models.py @@ -1069,6 +1069,12 @@ class Rate(db.Model): rate = db.Column(db.Float(asdecimal=False), nullable=False) notification_type = db.Column(notification_types, index=True, nullable=False) + def __str__(self): + the_string = "{}".format(self.rate) + the_string += " {}".format(self.notification_type) + the_string += " {}".format(self.valid_from) + return the_string + class JobStatistics(db.Model): __tablename__ = 'job_statistics' From 78a55bafe0f5d3e4b405a25622d235ec122f7d1f Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 08:57:41 +0100 Subject: [PATCH 10/92] Added new cost field to yearly billable sms endpoint. --- app/service/rest.py | 17 ++++++++++++----- tests/app/service/test_rest.py | 13 ++++++++----- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index f52b18174..ad0c39d12 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -448,19 +448,26 @@ def get_monthly_template_stats(service_id): def get_yearly_sms_billable_units(service_id): try: cache_key = sms_billable_units_cache_key(service_id) - cached_value = redis_store.get(cache_key) - if cached_value: - return jsonify({'billable_sms_units': int(cached_value)}) + cached_billable_sms_units = redis_store.get(cache_key) + if cached_billable_sms_units: + return jsonify({ + 'billable_sms_units': int(cached_billable_sms_units[0]), + 'total_cost': float(cached_billable_sms_units[1]) + }) else: start_date, end_date = get_financial_year(int(request.args.get('year'))) - billable_units = get_total_billable_units_for_sent_sms_notifications_in_date_range( + billable_units, total_cost = get_total_billable_units_for_sent_sms_notifications_in_date_range( start_date, end_date, service_id) redis_store.set(cache_key, billable_units, ex=60) - return jsonify({'billable_sms_units': billable_units}) + return jsonify({ + 'billable_sms_units': billable_units, + 'total_cost': total_cost + }) except (ValueError, TypeError) as e: + print(e) return jsonify(result='error', message='No valid year provided'), 400 diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 4d9882e82..4231de249 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1758,7 +1758,7 @@ def test_get_yearly_billing_usage_count_returns_200_if_year_provided(client, sam start = datetime.utcnow() end = datetime.utcnow() + timedelta(minutes=10) mock_query = mocker.patch( - 'app.service.rest.get_total_billable_units_for_sent_sms_notifications_in_date_range', return_value=100 + 'app.service.rest.get_total_billable_units_for_sent_sms_notifications_in_date_range', return_value=(100, 200.0) ) mock_year = mocker.patch('app.service.rest.get_financial_year', return_value=(start, end)) response = client.get( @@ -1767,7 +1767,8 @@ def test_get_yearly_billing_usage_count_returns_200_if_year_provided(client, sam ) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == { - 'billable_sms_units': 100 + 'billable_sms_units': 100, + 'total_cost': 200.0 } mock_query.assert_called_once_with(start, end, sample_service.id) mock_year.assert_called_once_with(2016) @@ -1776,10 +1777,10 @@ def test_get_yearly_billing_usage_count_returns_200_if_year_provided(client, sam def test_get_yearly_billing_usage_count_returns_from_cache_if_present(client, sample_service, mocker): - redis_get_mock = mocker.patch('app.service.rest.redis_store.get', return_value=50) + redis_get_mock = mocker.patch('app.service.rest.redis_store.get', return_value=(50, 100.0)) redis_set_mock = mocker.patch('app.service.rest.redis_store.set') mock_query = mocker.patch( - 'app.service.rest.get_total_billable_units_for_sent_sms_notifications_in_date_range', return_value=50 + 'app.service.rest.get_total_billable_units_for_sent_sms_notifications_in_date_range', return_value=(50, 100.0) ) start = datetime.utcnow() @@ -1790,9 +1791,11 @@ def test_get_yearly_billing_usage_count_returns_from_cache_if_present(client, sa '/service/{}/yearly-sms-billable-units?year=2016'.format(sample_service.id), headers=[create_authorization_header()] ) + print(response.get_data(as_text=True)) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == { - 'billable_sms_units': 50 + 'billable_sms_units': 50, + 'total_cost': 100.0 } redis_get_mock.assert_called_once_with("{}-sms_billable_units".format(str(sample_service.id))) mock_year.assert_not_called() From 0db8297693c8e24bb359866c3d8c12f981e04869 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 09:59:07 +0100 Subject: [PATCH 11/92] Removed print statement --- tests/app/dao/test_notification_usage_dao.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index c1c668d4c..d4aec8531 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -556,7 +556,6 @@ def test_should_calculate_rate_boundaries_for_billing_query_for_single_relevant_ start_date, end_date = get_financial_year(2016) set_up_rate(notify_db, datetime(2016, 1, 1), 0.016) rate_boundaries = discover_rate_bounds_for_billing_query(start_date, end_date) - print(rate_boundaries) assert len(rate_boundaries) == 1 assert rate_boundaries[0]['start_date'] == start_date assert rate_boundaries[0]['end_date'] == end_date From 03346f467f214235948aadff04f76e58164d2c92 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 09:59:37 +0100 Subject: [PATCH 12/92] updated cache to store map not single value, to accommodate the billable units and the total cost. --- app/service/rest.py | 48 +++++++++++++++++++--------------- tests/app/service/test_rest.py | 21 ++++++++++----- 2 files changed, 41 insertions(+), 28 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index ad0c39d12..e8c585d1f 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -446,29 +446,35 @@ def get_monthly_template_stats(service_id): @service_blueprint.route('//yearly-sms-billable-units') def get_yearly_sms_billable_units(service_id): - try: - cache_key = sms_billable_units_cache_key(service_id) - cached_billable_sms_units = redis_store.get(cache_key) - if cached_billable_sms_units: - return jsonify({ - 'billable_sms_units': int(cached_billable_sms_units[0]), - 'total_cost': float(cached_billable_sms_units[1]) - }) - else: + cache_key = sms_billable_units_cache_key(service_id) + cached_billable_sms_units = redis_store.get_all_from_hash(cache_key) + if cached_billable_sms_units: + return jsonify({ + 'billable_sms_units': int(cached_billable_sms_units[b'billable_units']), + 'total_cost': float(cached_billable_sms_units[b'total_cost']) + }) + else: + try: start_date, end_date = get_financial_year(int(request.args.get('year'))) - billable_units, total_cost = get_total_billable_units_for_sent_sms_notifications_in_date_range( - start_date, - end_date, - service_id) - redis_store.set(cache_key, billable_units, ex=60) - return jsonify({ - 'billable_sms_units': billable_units, - 'total_cost': total_cost - }) + except (ValueError, TypeError) as e: + current_app.logger.exception(e) + return jsonify(result='error', message='No valid year provided'), 400 - except (ValueError, TypeError) as e: - print(e) - return jsonify(result='error', message='No valid year provided'), 400 + billable_units, total_cost = get_total_billable_units_for_sent_sms_notifications_in_date_range( + start_date, + end_date, + service_id) + + cached_values = { + 'billable_units': billable_units, + 'total_cost': total_cost + } + + redis_store.set_hash_and_expire(cache_key, cached_values, expire_in_seconds=60) + return jsonify({ + 'billable_sms_units': billable_units, + 'total_cost': total_cost + }) @service_blueprint.route('//yearly-usage') diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 4231de249..691f0895c 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1736,8 +1736,8 @@ def test_get_yearly_billing_usage_count_returns_400_if_missing_year(client, samp def test_get_yearly_billing_usage_count_returns_400_if_invalid_year(client, sample_service, mocker): - redis_get_mock = mocker.patch('app.service.rest.redis_store.get', return_value=None) - redis_set_mock = mocker.patch('app.service.rest.redis_store.set') + redis_get_mock = mocker.patch('app.service.rest.redis_store.get_all_from_hash', return_value=None) + redis_set_mock = mocker.patch('app.service.rest.redis_store.set_hash_and_expire') response = client.get( '/service/{}/yearly-sms-billable-units?year=HAHAHAHAH'.format(sample_service.id), @@ -1752,8 +1752,8 @@ def test_get_yearly_billing_usage_count_returns_400_if_invalid_year(client, samp def test_get_yearly_billing_usage_count_returns_200_if_year_provided(client, sample_service, mocker): - redis_get_mock = mocker.patch('app.service.rest.redis_store.get', return_value=None) - redis_set_mock = mocker.patch('app.service.rest.redis_store.set') + redis_get_mock = mocker.patch('app.service.rest.redis_store.get_all_from_hash', return_value=None) + redis_set_mock = mocker.patch('app.service.rest.redis_store.set_hash_and_expire') start = datetime.utcnow() end = datetime.utcnow() + timedelta(minutes=10) @@ -1773,12 +1773,19 @@ def test_get_yearly_billing_usage_count_returns_200_if_year_provided(client, sam mock_query.assert_called_once_with(start, end, sample_service.id) mock_year.assert_called_once_with(2016) redis_get_mock.assert_called_once_with("{}-sms_billable_units".format(str(sample_service.id))) - redis_set_mock.assert_called_once_with("{}-sms_billable_units".format(str(sample_service.id)), 100, ex=60) + redis_set_mock.assert_called_once_with( + "{}-sms_billable_units".format(str(sample_service.id)), + {'billable_units': 100, 'total_cost': 200.0}, + expire_in_seconds=60 + ) def test_get_yearly_billing_usage_count_returns_from_cache_if_present(client, sample_service, mocker): - redis_get_mock = mocker.patch('app.service.rest.redis_store.get', return_value=(50, 100.0)) - redis_set_mock = mocker.patch('app.service.rest.redis_store.set') + redis_get_mock = mocker.patch( + 'app.service.rest.redis_store.get_all_from_hash', + return_value={b'total_cost': 100.0, b'billable_units': 50} + ) + redis_set_mock = mocker.patch('app.service.rest.redis_store.set_hash_and_expire') mock_query = mocker.patch( 'app.service.rest.get_total_billable_units_for_sent_sms_notifications_in_date_range', return_value=(50, 100.0) ) From 517dc6be8b04509f0dcba609c75a218c1ba75080 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 09:59:51 +0100 Subject: [PATCH 13/92] Typo removed. Wrong window focus --- app/dao/notification_usage_dao.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index f40c6fe8b..b3b582ed3 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -176,7 +176,7 @@ def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date ) billable_units_by_rate_boundry = result.scalar() if billable_units_by_rate_boundry: - billable_units += int(billable_units_by_rate_boundry)vi end_date + billable_units += int(billable_units_by_rate_boundry) total_cost += int(billable_units_by_rate_boundry) * rate_boundary['rate'] return billable_units, total_cost From 390935e82c2051078bf61399ed23f6473714ff59 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 10:49:34 +0100 Subject: [PATCH 14/92] Bumped utils version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fe25533c1..4cc9907bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,6 +28,6 @@ notifications-python-client>=3.1,<3.2 awscli>=1.11,<1.12 awscli-cwlogs>=1.4,<1.5 -git+https://github.com/alphagov/notifications-utils.git@17.1.0#egg=notifications-utils==17.1.0 +git+https://github.com/alphagov/notifications-utils.git@17.1.3#egg=notifications-utils==17.1.3 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 From 07b527bb1bf95ed98370026319580cf5feef9151 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 24 May 2017 15:47:20 +0100 Subject: [PATCH 15/92] All queues now managed in the same way: - TEST and DEV builds set up their own queue lists for reading - PROD/STAGE/PREVIEW use the separate worker process with the -Q flag This enables us to rename queues in due course --- app/config.py | 16 +++++++++------- manifest-delivery-base.yml | 2 +- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/app/config.py b/app/config.py index 53e92912e..1d845b508 100644 --- a/app/config.py +++ b/app/config.py @@ -160,11 +160,7 @@ class Config(object): 'options': {'queue': 'periodic'} } } - CELERY_QUEUES = [ - Queue('process-job', Exchange('default'), routing_key='process-job'), - Queue('retry', Exchange('default'), routing_key='retry'), - Queue('notify', Exchange('default'), routing_key='notify') - ] + CELERY_QUEUES = [] NOTIFICATIONS_ALERT = 5 # five mins FROM_NUMBER = 'development' @@ -224,7 +220,10 @@ class Development(Config): Queue('send-sms', Exchange('default'), routing_key='send-sms'), Queue('send-email', Exchange('default'), routing_key='send-email'), Queue('research-mode', Exchange('default'), routing_key='research-mode'), - Queue('statistics', Exchange('default'), routing_key='statistics') + Queue('statistics', Exchange('default'), routing_key='statistics'), + Queue('process-job', Exchange('default'), routing_key='process-job'), + Queue('retry', Exchange('default'), routing_key='retry'), + Queue('notify', Exchange('default'), routing_key='notify') ] API_HOST_NAME = "http://localhost:6011" API_RATE_LIMIT_ENABLED = True @@ -248,7 +247,10 @@ class Test(Config): Queue('send-sms', Exchange('default'), routing_key='send-sms'), Queue('send-email', Exchange('default'), routing_key='send-email'), Queue('research-mode', Exchange('default'), routing_key='research-mode'), - Queue('statistics', Exchange('default'), routing_key='statistics') + Queue('statistics', Exchange('default'), routing_key='statistics'), + Queue('process-job', Exchange('default'), routing_key='process-job'), + Queue('retry', Exchange('default'), routing_key='retry'), + Queue('notify', Exchange('default'), routing_key='notify') ] API_RATE_LIMIT_ENABLED = True diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index 73457b3c9..f3c196272 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -50,6 +50,6 @@ applications: NOTIFY_APP_NAME: delivery-worker-priority - name: notify-delivery-worker - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q process-job,notify,retry env: NOTIFY_APP_NAME: delivery-worker From 21586c917c4ecdcb11bbb61f4168fcdc8a2ea12c Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 25 May 2017 10:50:55 +0100 Subject: [PATCH 16/92] Created an object to hold queue names. - This is to be used throughout the app. - Allows us to manage queue names centrally - All queues renamed to allow us to change the retry processing/visibility timeout --- app/config.py | 91 +++++++++++++++++++++++++++++---------------------- 1 file changed, 51 insertions(+), 40 deletions(-) diff --git a/app/config.py b/app/config.py index 1d845b508..3bd7218a0 100644 --- a/app/config.py +++ b/app/config.py @@ -12,6 +12,34 @@ if os.environ.get('VCAP_SERVICES'): extract_cloudfoundry_config() +class QueueNames(object): + PERIODIC = 'periodic-tasks' + PRIORITY = 'priority-tasks' + DATABASE = 'database-tasks' + SEND = 'send-tasks' + RESEARCH_MODE = 'research-mode-tasks' + STATISTICS = 'statistics-tasks' + JOBS = 'job-tasks' + RETRY = 'retry-tasks' + NOTIFY = 'notify-internal-tasks' + PROCESS_FTP = 'process-ftp-tasks' + + @staticmethod + def all_queues(): + return [ + QueueNames.PRIORITY, + QueueNames.PERIODIC, + QueueNames.DATABASE, + QueueNames.SEND, + QueueNames.RESEARCH_MODE, + QueueNames.STATISTICS, + QueueNames.JOBS, + QueueNames.RETRY, + QueueNames.NOTIFY, + QueueNames.PROCESS_FTP + ] + + class Config(object): # URL of admin app ADMIN_BASE_URL = os.environ['ADMIN_BASE_URL'] @@ -95,7 +123,7 @@ class Config(object): BROKER_TRANSPORT_OPTIONS = { 'region': AWS_REGION, 'polling_interval': 1, # 1 second - 'visibility_timeout': 14410, # 4 hours 10 seconds. 10 seconds longer than max retry + 'visibility_timeout': 300, 'queue_name_prefix': NOTIFICATION_QUEUE_PREFIX } CELERY_ENABLE_UTC = True, @@ -107,57 +135,57 @@ class Config(object): 'run-scheduled-jobs': { 'task': 'run-scheduled-jobs', 'schedule': crontab(minute=1), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'delete-verify-codes': { 'task': 'delete-verify-codes', 'schedule': timedelta(minutes=63), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'delete-invitations': { 'task': 'delete-invitations', 'schedule': timedelta(minutes=66), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'delete-sms-notifications': { 'task': 'delete-sms-notifications', 'schedule': crontab(minute=0, hour=0), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'delete-email-notifications': { 'task': 'delete-email-notifications', 'schedule': crontab(minute=20, hour=0), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'delete-letter-notifications': { 'task': 'delete-letter-notifications', 'schedule': crontab(minute=40, hour=0), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'send-daily-performance-platform-stats': { 'task': 'send-daily-performance-platform-stats', 'schedule': crontab(minute=0, hour=2), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'switch-current-sms-provider-on-slow-delivery': { 'task': 'switch-current-sms-provider-on-slow-delivery', 'schedule': crontab(), # Every minute - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'timeout-sending-notifications': { 'task': 'timeout-sending-notifications', 'schedule': crontab(minute=0, hour=3), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'remove_csv_files': { 'task': 'remove_csv_files', 'schedule': crontab(minute=0, hour=4), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} }, 'timeout-job-statistics': { 'task': 'timeout-job-statistics', 'schedule': crontab(minute=0, hour=5), - 'options': {'queue': 'periodic'} + 'options': {'queue': QueueNames.PERIODIC} } } CELERY_QUEUES = [] @@ -211,20 +239,12 @@ class Development(Config): NOTIFY_ENVIRONMENT = 'development' NOTIFICATION_QUEUE_PREFIX = 'development' DEBUG = True - CELERY_QUEUES = Config.CELERY_QUEUES + [ - Queue('db-sms', Exchange('default'), routing_key='db-sms'), - Queue('priority', Exchange('default'), routing_key='priority'), - Queue('periodic', Exchange('default'), routing_key='periodic'), - Queue('db-email', Exchange('default'), routing_key='db-email'), - Queue('db-letter', Exchange('default'), routing_key='db-letter'), - Queue('send-sms', Exchange('default'), routing_key='send-sms'), - Queue('send-email', Exchange('default'), routing_key='send-email'), - Queue('research-mode', Exchange('default'), routing_key='research-mode'), - Queue('statistics', Exchange('default'), routing_key='statistics'), - Queue('process-job', Exchange('default'), routing_key='process-job'), - Queue('retry', Exchange('default'), routing_key='retry'), - Queue('notify', Exchange('default'), routing_key='notify') - ] + + for queue in QueueNames.all_queues(): + Config.CELERY_QUEUES.append( + Queue(queue, Exchange('default'), routing_key=queue) + ) + API_HOST_NAME = "http://localhost:6011" API_RATE_LIMIT_ENABLED = True @@ -238,20 +258,11 @@ class Test(Config): STATSD_ENABLED = True STATSD_HOST = "localhost" STATSD_PORT = 1000 - CELERY_QUEUES = Config.CELERY_QUEUES + [ - Queue('periodic', Exchange('default'), routing_key='periodic'), - Queue('priority', Exchange('default'), routing_key='priority'), - Queue('db-sms', Exchange('default'), routing_key='db-sms'), - Queue('db-email', Exchange('default'), routing_key='db-email'), - Queue('db-letter', Exchange('default'), routing_key='db-letter'), - Queue('send-sms', Exchange('default'), routing_key='send-sms'), - Queue('send-email', Exchange('default'), routing_key='send-email'), - Queue('research-mode', Exchange('default'), routing_key='research-mode'), - Queue('statistics', Exchange('default'), routing_key='statistics'), - Queue('process-job', Exchange('default'), routing_key='process-job'), - Queue('retry', Exchange('default'), routing_key='retry'), - Queue('notify', Exchange('default'), routing_key='notify') - ] + + for queue in QueueNames.all_queues(): + Config.CELERY_QUEUES.append( + Queue(queue, Exchange('default'), routing_key=queue) + ) API_RATE_LIMIT_ENABLED = True API_HOST_NAME = "http://localhost:6011" From 2591d3a1dfc2643676a5fca0d9f4db3632c7b2c4 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 25 May 2017 10:51:49 +0100 Subject: [PATCH 17/92] This massive set of changes uses the new queue names object throughout the app and tests. Lots of changes, all changing the line of code that puts things into queues, and the code that tests that. --- app/celery/provider_tasks.py | 5 +- app/celery/scheduled_tasks.py | 3 +- app/celery/statistics_tasks.py | 12 ++--- app/celery/tasks.py | 20 +++----- app/delivery/rest.py | 11 ++--- app/invite/rest.py | 3 +- app/job/rest.py | 4 +- app/letters/send_letter_jobs.py | 3 +- .../notifications_letter_callback.py | 4 +- app/notifications/process_notifications.py | 9 ++-- app/notifications/rest.py | 3 +- app/service/sender.py | 3 +- app/user/rest.py | 11 +++-- app/v2/notifications/post_notifications.py | 3 +- tests/app/celery/test_provider_tasks.py | 8 +-- tests/app/celery/test_scheduled_tasks.py | 8 +-- tests/app/celery/test_statistics_tasks.py | 18 +++---- tests/app/celery/test_tasks.py | 49 +++++++++---------- tests/app/delivery/test_rest.py | 4 +- tests/app/invite/test_invite_rest.py | 2 +- tests/app/job/test_rest.py | 2 +- tests/app/letters/test_send_letter_jobs.py | 2 +- .../app/notifications/rest/test_callbacks.py | 2 +- .../rest/test_send_notification.py | 26 +++++----- .../test_process_notification.py | 22 ++++----- tests/app/user/test_rest.py | 6 +-- tests/app/user/test_rest_verify.py | 6 +-- .../notifications/test_post_notifications.py | 2 +- 28 files changed, 128 insertions(+), 123 deletions(-) diff --git a/app/celery/provider_tasks.py b/app/celery/provider_tasks.py index e00591f09..4d8718489 100644 --- a/app/celery/provider_tasks.py +++ b/app/celery/provider_tasks.py @@ -3,6 +3,7 @@ from notifications_utils.recipients import InvalidEmailError from sqlalchemy.orm.exc import NoResultFound from app import notify_celery +from app.config import QueueNames from app.dao import notifications_dao from app.dao.notifications_dao import update_notification_status_by_id from app.statsd_decorators import statsd @@ -46,7 +47,7 @@ def deliver_sms(self, notification_id): current_app.logger.exception( "SMS notification delivery for id: {} failed".format(notification_id) ) - self.retry(queue="retry", countdown=retry_iteration_to_delay(self.request.retries)) + self.retry(queue=QueueNames.RETRY, countdown=retry_iteration_to_delay(self.request.retries)) except self.MaxRetriesExceededError: current_app.logger.exception( "RETRY FAILED: task send_sms_to_provider failed for notification {}".format(notification_id), @@ -70,7 +71,7 @@ def deliver_email(self, notification_id): current_app.logger.exception( "RETRY: Email notification {} failed".format(notification_id) ) - self.retry(queue="retry", countdown=retry_iteration_to_delay(self.request.retries)) + self.retry(queue=QueueNames.RETRY, countdown=retry_iteration_to_delay(self.request.retries)) except self.MaxRetriesExceededError: current_app.logger.error( "RETRY FAILED: task send_email_to_provider failed for notification {}".format(notification_id) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 2b87150e0..460280424 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -23,6 +23,7 @@ from app.dao.provider_details_dao import ( from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago from app.statsd_decorators import statsd from app.celery.tasks import process_job +from app.config import QueueNames @notify_celery.task(name="remove_csv_files") @@ -39,7 +40,7 @@ def remove_csv_files(): def run_scheduled_jobs(): try: for job in dao_set_scheduled_jobs_to_pending(): - process_job.apply_async([str(job.id)], queue="process-job") + process_job.apply_async([str(job.id)], queue=QueueNames.JOBS) current_app.logger.info("Job ID {} added to process job queue".format(job.id)) except SQLAlchemyError as e: current_app.logger.exception("Failed to run scheduled jobs") diff --git a/app/celery/statistics_tasks.py b/app/celery/statistics_tasks.py index a82a3791f..150fa6aac 100644 --- a/app/celery/statistics_tasks.py +++ b/app/celery/statistics_tasks.py @@ -3,7 +3,6 @@ from sqlalchemy.exc import SQLAlchemyError from app import notify_celery from flask import current_app -from app.models import JobStatistics from app.statsd_decorators import statsd from app.dao.statistics_dao import ( create_or_update_job_sending_statistics, @@ -11,16 +10,17 @@ from app.dao.statistics_dao import ( ) from app.dao.notifications_dao import get_notification_by_id from app.models import NOTIFICATION_STATUS_TYPES_COMPLETED +from app.config import QueueNames def create_initial_notification_statistic_tasks(notification): if notification.job_id and notification.status: - record_initial_job_statistics.apply_async((str(notification.id),), queue="statistics") + record_initial_job_statistics.apply_async((str(notification.id),), queue=QueueNames.STATISTICS) def create_outcome_notification_statistic_tasks(notification): if notification.job_id and notification.status in NOTIFICATION_STATUS_TYPES_COMPLETED: - record_outcome_job_statistics.apply_async((str(notification.id),), queue="statistics") + record_outcome_job_statistics.apply_async((str(notification.id),), queue=QueueNames.STATISTICS) @notify_celery.task(bind=True, name='record_initial_job_statistics', max_retries=20, default_retry_delay=10) @@ -35,7 +35,7 @@ def record_initial_job_statistics(self, notification_id): raise SQLAlchemyError("Failed to find notification with id {}".format(notification_id)) except SQLAlchemyError as e: current_app.logger.exception(e) - self.retry(queue="retry") + self.retry(queue=QueueNames.RETRY) except self.MaxRetriesExceededError: current_app.logger.error( "RETRY FAILED: task record_initial_job_statistics failed for notification {}".format( @@ -53,12 +53,12 @@ def record_outcome_job_statistics(self, notification_id): if notification: updated_count = update_job_stats_outcome_count(notification) if updated_count == 0: - self.retry(queue="retry") + self.retry(queue=QueueNames.RETRY) else: raise SQLAlchemyError("Failed to find notification with id {}".format(notification_id)) except SQLAlchemyError as e: current_app.logger.exception(e) - self.retry(queue="retry") + self.retry(queue=QueueNames.RETRY) except self.MaxRetriesExceededError: current_app.logger.error( "RETRY FAILED: task update_job_stats_outcome_count failed for notification {}".format( diff --git a/app/celery/tasks.py b/app/celery/tasks.py index cce6b05f0..8fc6954f8 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -16,6 +16,7 @@ from app import ( ) from app.aws import s3 from app.celery import provider_tasks +from app.config import QueueNames from app.dao.jobs_dao import ( dao_update_job, dao_get_job_by_id, @@ -80,7 +81,7 @@ def process_job(job_id): process_row(row_number, recipient, personalisation, template, job, service) if template.template_type == LETTER_TYPE: - build_dvla_file.apply_async([str(job.id)], queue='process-job') + build_dvla_file.apply_async([str(job.id)], queue=QueueNames.JOBS) # temporary logging current_app.logger.info("send job {} to build-dvla-file in the process-job queue".format(job_id)) else: @@ -112,12 +113,6 @@ def process_row(row_number, recipient, personalisation, template, job, service): LETTER_TYPE: persist_letter } - queues = { - SMS_TYPE: 'db-sms', - EMAIL_TYPE: 'db-email', - LETTER_TYPE: 'db-letter', - } - send_fn = send_fns[template_type] send_fn.apply_async( @@ -127,7 +122,7 @@ def process_row(row_number, recipient, personalisation, template, job, service): encrypted, datetime.utcnow().strftime(DATETIME_FORMAT) ), - queue=queues[template_type] if not service.research_mode else 'research-mode' + queue=QueueNames.DATABASE if not service.research_mode else QueueNames.RESEARCH_MODE ) @@ -181,7 +176,7 @@ def send_sms(self, provider_tasks.deliver_sms.apply_async( [str(saved_notification.id)], - queue='send-sms' if not service.research_mode else 'research-mode' + queue=QueueNames.SEND if not service.research_mode else QueueNames.RESEARCH_MODE ) current_app.logger.info( @@ -226,7 +221,7 @@ def send_email(self, provider_tasks.deliver_email.apply_async( [str(saved_notification.id)], - queue='send-email' if not service.research_mode else 'research-mode' + queue=QueueNames.SEND if not service.research_mode else QueueNames.RESEARCH_MODE ) current_app.logger.info("Email {} created at {}".format(saved_notification.id, created_at)) @@ -284,10 +279,9 @@ def build_dvla_file(self, job_id): file_location="{}-dvla-job.text".format(job_id) ) dao_update_job_status(job_id, JOB_STATUS_READY_TO_SEND) - notify_celery.send_task("aggregrate-dvla-files", ([str(job_id)], ), queue='aggregate-dvla-files') else: current_app.logger.info("All notifications for job {} are not persisted".format(job_id)) - self.retry(queue="retry", exc="All notifications for job {} are not persisted".format(job_id)) + self.retry(queue=QueueNames.RETRY, exc="All notifications for job {} are not persisted".format(job_id)) except Exception as e: current_app.logger.exception("build_dvla_file threw exception") raise e @@ -341,7 +335,7 @@ def handle_exception(task, notification, notification_id, exc): # send to the retry queue. current_app.logger.exception('Retry' + retry_msg) try: - task.retry(queue="retry", exc=exc) + task.retry(queue=QueueNames.RETRY, exc=exc) except task.MaxRetriesExceededError: current_app.logger.exception('Retry' + retry_msg) diff --git a/app/delivery/rest.py b/app/delivery/rest.py index 0bacb43bb..489a5fcda 100644 --- a/app/delivery/rest.py +++ b/app/delivery/rest.py @@ -1,5 +1,6 @@ from flask import Blueprint, jsonify +from app.config import QueueNames from app.delivery import send_to_providers from app.models import EMAIL_TYPE from app.celery import provider_tasks @@ -23,18 +24,16 @@ def send_notification_to_provider(notification_id): send_response( send_to_providers.send_email_to_provider, provider_tasks.deliver_email, - notification, - 'send-email') + notification) else: send_response( send_to_providers.send_sms_to_provider, provider_tasks.deliver_sms, - notification, - 'send-sms') + notification) return jsonify({}), 204 -def send_response(send_call, task_call, notification, queue): +def send_response(send_call, task_call, notification): try: send_call(notification) except Exception as e: @@ -43,4 +42,4 @@ def send_response(send_call, task_call, notification, queue): notification.id, notification.notification_type), e) - task_call.apply_async((str(notification.id)), queue=queue) + task_call.apply_async((str(notification.id)), queue=QueueNames.SEND) diff --git a/app/invite/rest.py b/app/invite/rest.py index 2361629d1..8105b171f 100644 --- a/app/invite/rest.py +++ b/app/invite/rest.py @@ -4,6 +4,7 @@ from flask import ( jsonify, current_app) +from app.config import QueueNames from app.dao.invited_user_dao import ( save_invited_user, get_invited_user, @@ -44,7 +45,7 @@ def create_invited_user(service_id): key_type=KEY_TYPE_NORMAL ) - send_notification_to_queue(saved_notification, False, queue="notify") + send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) return jsonify(data=invited_user_schema.dump(invited_user).data), 201 diff --git a/app/job/rest.py b/app/job/rest.py index 8195ca87a..b4882945f 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -34,6 +34,8 @@ from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, JOB_STATUS_CANC from app.utils import pagination_links +from app.config import QueueNames + job_blueprint = Blueprint('job', __name__, url_prefix='/service//job') from app.errors import ( @@ -143,7 +145,7 @@ def create_job(service_id): dao_create_job(job) if job.job_status == JOB_STATUS_PENDING: - process_job.apply_async([str(job.id)], queue="process-job") + process_job.apply_async([str(job.id)], queue=QueueNames.JOBS) job_json = job_schema.dump(job).data job_json['statistics'] = [] diff --git a/app/letters/send_letter_jobs.py b/app/letters/send_letter_jobs.py index 7030b9bc5..91c39615a 100644 --- a/app/letters/send_letter_jobs.py +++ b/app/letters/send_letter_jobs.py @@ -2,6 +2,7 @@ from flask import Blueprint, jsonify from flask import request from app import notify_celery +from app.config import QueueNames from app.dao.jobs_dao import dao_get_all_letter_jobs from app.schemas import job_schema from app.v2.errors import register_errors @@ -15,7 +16,7 @@ register_errors(letter_job) @letter_job.route('/send-letter-jobs', methods=['POST']) def send_letter_jobs(): job_ids = validate(request.get_json(), letter_job_ids) - notify_celery.send_task(name="send-files-to-dvla", args=(job_ids['job_ids'],), queue="process-ftp") + notify_celery.send_task(name="send-files-to-dvla", args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP) return jsonify(data={"response": "Task created to send files to DVLA"}), 201 diff --git a/app/notifications/notifications_letter_callback.py b/app/notifications/notifications_letter_callback.py index cfcaf28df..ac2de9e5c 100644 --- a/app/notifications/notifications_letter_callback.py +++ b/app/notifications/notifications_letter_callback.py @@ -13,7 +13,7 @@ from app.celery.tasks import update_letter_notifications_statuses from app.v2.errors import register_errors from app.notifications.utils import autoconfirm_subscription from app.schema_validation import validate - +from app.config import QueueNames letter_callback_blueprint = Blueprint('notifications_letter_callback', __name__) register_errors(letter_callback_blueprint) @@ -54,7 +54,7 @@ def process_letter_response(): filename = message['Records'][0]['s3']['object']['key'] current_app.logger.info('Received file from DVLA: {}'.format(filename)) current_app.logger.info('DVLA callback: Calling task to update letter notifications') - update_letter_notifications_statuses.apply_async([filename], queue='notify') + update_letter_notifications_statuses.apply_async([filename], queue=QueueNames.NOTIFY) return jsonify( result="success", message="DVLA callback succeeded" diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 1d03efe8b..d59477143 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -10,6 +10,8 @@ from notifications_utils.recipients import ( from app import redis_store from app.celery import provider_tasks from notifications_utils.clients import redis + +from app.config import QueueNames from app.dao.notifications_dao import dao_create_notification, dao_delete_notifications_and_history_by_id from app.models import SMS_TYPE, Notification, KEY_TYPE_TEST, EMAIL_TYPE from app.v2.errors import BadRequestError, SendNotificationToQueueError @@ -90,12 +92,9 @@ def persist_notification( def send_notification_to_queue(notification, research_mode, queue=None): if research_mode or notification.key_type == KEY_TYPE_TEST: - queue = 'research-mode' + queue = QueueNames.RESEARCH_MODE elif not queue: - if notification.notification_type == SMS_TYPE: - queue = 'send-sms' - if notification.notification_type == EMAIL_TYPE: - queue = 'send-email' + queue = QueueNames.SEND if notification.notification_type == SMS_TYPE: deliver_task = provider_tasks.deliver_sms diff --git a/app/notifications/rest.py b/app/notifications/rest.py index faa086e32..bb136f8c8 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -6,6 +6,7 @@ from flask import ( ) from app import api_user, authenticated_service +from app.config import QueueNames from app.dao import ( templates_dao, notifications_dao @@ -134,7 +135,7 @@ def send_notification(notification_type): key_type=api_user.key_type, simulated=simulated) if not simulated: - queue_name = 'priority' if template.process_type == PRIORITY else None + queue_name = QueueNames.PRIORITY if template.process_type == PRIORITY else None send_notification_to_queue(notification=notification_model, research_mode=authenticated_service.research_mode, queue=queue_name) diff --git a/app/service/sender.py b/app/service/sender.py index 3c6a6a03e..4919a93bf 100644 --- a/app/service/sender.py +++ b/app/service/sender.py @@ -1,5 +1,6 @@ from flask import current_app +from app.config import QueueNames from app.dao.services_dao import dao_fetch_service_by_id, dao_fetch_active_users_for_service from app.dao.templates_dao import dao_get_template_by_id from app.models import EMAIL_TYPE, KEY_TYPE_NORMAL @@ -24,7 +25,7 @@ def send_notification_to_service_users(service_id, template_id, personalisation= api_key_id=None, key_type=KEY_TYPE_NORMAL ) - send_notification_to_queue(notification, False, queue='notify') + send_notification_to_queue(notification, False, queue=QueueNames.NOTIFY) def _add_user_fields(user, personalisation, fields): diff --git a/app/user/rest.py b/app/user/rest.py index b40c3a31c..cc0c6e41b 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -4,6 +4,7 @@ from datetime import datetime from flask import (jsonify, request, Blueprint, current_app) +from app.config import QueueNames from app.dao.users_dao import ( get_user_by_id, save_model_user, @@ -182,7 +183,7 @@ def send_user_sms_code(user_id): # Assume that we never want to observe the Notify service's research mode # setting for this notification - we still need to be able to log into the # admin even if we're doing user research using this service: - send_notification_to_queue(saved_notification, False, queue='notify') + send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) return jsonify({}), 204 @@ -212,7 +213,7 @@ def send_user_confirm_new_email(user_id): key_type=KEY_TYPE_NORMAL ) - send_notification_to_queue(saved_notification, False, queue='notify') + send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) return jsonify({}), 204 @@ -239,7 +240,7 @@ def send_user_email_verification(user_id): key_type=KEY_TYPE_NORMAL ) - send_notification_to_queue(saved_notification, False, queue="notify") + send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) return jsonify({}), 204 @@ -265,7 +266,7 @@ def send_already_registered_email(user_id): key_type=KEY_TYPE_NORMAL ) - send_notification_to_queue(saved_notification, False, queue="notify") + send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) return jsonify({}), 204 @@ -327,7 +328,7 @@ def send_user_reset_password(): key_type=KEY_TYPE_NORMAL ) - send_notification_to_queue(saved_notification, False, queue="notify") + send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) return jsonify({}), 204 diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index a5a3a1ec4..cf9269318 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -2,6 +2,7 @@ from flask import request, jsonify, current_app from sqlalchemy.orm.exc import NoResultFound from app import api_user, authenticated_service +from app.config import QueueNames from app.dao import services_dao, templates_dao from app.models import SMS_TYPE, EMAIL_TYPE, PRIORITY from app.notifications.process_notifications import ( @@ -57,7 +58,7 @@ def post_notification(notification_type): simulated=simulated) if not simulated: - queue_name = 'priority' if template.process_type == PRIORITY else None + queue_name = QueueNames.PRIORITY if template.process_type == PRIORITY else None send_notification_to_queue( notification=notification, research_mode=authenticated_service.research_mode, diff --git a/tests/app/celery/test_provider_tasks.py b/tests/app/celery/test_provider_tasks.py index d2f815d9d..9c9a44e5f 100644 --- a/tests/app/celery/test_provider_tasks.py +++ b/tests/app/celery/test_provider_tasks.py @@ -61,7 +61,7 @@ def test_should_add_to_retry_queue_if_notification_not_found_in_deliver_sms_task deliver_sms(notification_id) app.delivery.send_to_providers.send_sms_to_provider.assert_not_called() - app.celery.provider_tasks.deliver_sms.retry.assert_called_with(queue="retry", countdown=10) + app.celery.provider_tasks.deliver_sms.retry.assert_called_with(queue="retry-tasks", countdown=10) def test_should_call_send_email_to_provider_from_deliver_email_task( @@ -83,7 +83,7 @@ def test_should_add_to_retry_queue_if_notification_not_found_in_deliver_email_ta deliver_email(notification_id) app.delivery.send_to_providers.send_email_to_provider.assert_not_called() - app.celery.provider_tasks.deliver_email.retry.assert_called_with(queue="retry", countdown=10) + app.celery.provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks", countdown=10) # DO THESE FOR THE 4 TYPES OF TASK @@ -94,7 +94,7 @@ def test_should_go_into_technical_error_if_exceeds_retries_on_deliver_sms_task(s deliver_sms(sample_notification.id) - provider_tasks.deliver_sms.retry.assert_called_with(queue='retry', countdown=10) + provider_tasks.deliver_sms.retry.assert_called_with(queue="retry-tasks", countdown=10) assert sample_notification.status == 'technical-failure' @@ -105,7 +105,7 @@ def test_should_go_into_technical_error_if_exceeds_retries_on_deliver_email_task deliver_email(sample_notification.id) - provider_tasks.deliver_email.retry.assert_called_with(queue='retry', countdown=10) + provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks", countdown=10) assert sample_notification.status == 'technical-failure' diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 3cdc3796b..5566ebd70 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -165,7 +165,7 @@ def test_should_update_scheduled_jobs_and_put_on_queue(notify_db, notify_db_sess updated_job = dao_get_job_by_id(job.id) assert updated_job.job_status == 'pending' - mocked.assert_called_with([str(job.id)], queue='process-job') + mocked.assert_called_with([str(job.id)], queue="job-tasks") def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_session, mocker): @@ -200,9 +200,9 @@ def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_ assert dao_get_job_by_id(job_2.id).job_status == 'pending' mocked.assert_has_calls([ - call([str(job_3.id)], queue='process-job'), - call([str(job_2.id)], queue='process-job'), - call([str(job_1.id)], queue='process-job') + call([str(job_3.id)], queue="job-tasks"), + call([str(job_2.id)], queue="job-tasks"), + call([str(job_1.id)], queue="job-tasks") ]) diff --git a/tests/app/celery/test_statistics_tasks.py b/tests/app/celery/test_statistics_tasks.py index 24aaba97d..40d20117d 100644 --- a/tests/app/celery/test_statistics_tasks.py +++ b/tests/app/celery/test_statistics_tasks.py @@ -17,7 +17,7 @@ def test_should_create_initial_job_task_if_notification_is_related_to_a_job( mock = mocker.patch("app.celery.statistics_tasks.record_initial_job_statistics.apply_async") notification = sample_notification(notify_db, notify_db_session, job=sample_job) create_initial_notification_statistic_tasks(notification) - mock.assert_called_once_with((str(notification.id), ), queue="statistics") + mock.assert_called_once_with((str(notification.id), ), queue="statistics-tasks") @pytest.mark.parametrize('status', [ @@ -29,7 +29,7 @@ def test_should_create_intial_job_task_if_notification_is_not_in_completed_state mock = mocker.patch("app.celery.statistics_tasks.record_initial_job_statistics.apply_async") notification = sample_notification(notify_db, notify_db_session, job=sample_job, status=status) create_initial_notification_statistic_tasks(notification) - mock.assert_called_once_with((str(notification.id), ), queue="statistics") + mock.assert_called_once_with((str(notification.id), ), queue="statistics-tasks") def test_should_not_create_initial_job_task_if_notification_is_not_related_to_a_job( @@ -47,7 +47,7 @@ def test_should_create_outcome_job_task_if_notification_is_related_to_a_job( mock = mocker.patch("app.celery.statistics_tasks.record_outcome_job_statistics.apply_async") notification = sample_notification(notify_db, notify_db_session, job=sample_job, status=NOTIFICATION_DELIVERED) create_outcome_notification_statistic_tasks(notification) - mock.assert_called_once_with((str(notification.id), ), queue="statistics") + mock.assert_called_once_with((str(notification.id), ), queue="statistics-tasks") @pytest.mark.parametrize('status', NOTIFICATION_STATUS_TYPES_COMPLETED) @@ -57,7 +57,7 @@ def test_should_create_outcome_job_task_if_notification_is_in_completed_state( mock = mocker.patch("app.celery.statistics_tasks.record_outcome_job_statistics.apply_async") notification = sample_notification(notify_db, notify_db_session, job=sample_job, status=status) create_outcome_notification_statistic_tasks(notification) - mock.assert_called_once_with((str(notification.id), ), queue='statistics') + mock.assert_called_once_with((str(notification.id), ), queue="statistics-tasks") @pytest.mark.parametrize('status', [ @@ -100,7 +100,7 @@ def test_should_retry_if_persisting_the_job_stats_has_a_sql_alchemy_exception( record_initial_job_statistics(str(sample_notification.id)) dao_mock.assert_called_once_with(sample_notification) - retry_mock.assert_called_with(queue="retry") + retry_mock.assert_called_with(queue="retry-tasks") def test_should_call_update_job_stats_dao_outcome_methods(notify_db, notify_db_session, sample_notification, mocker): @@ -123,7 +123,7 @@ def test_should_retry_if_persisting_the_job_outcome_stats_has_a_sql_alchemy_exce record_outcome_job_statistics(str(sample_notification.id)) dao_mock.assert_called_once_with(sample_notification) - retry_mock.assert_called_with(queue="retry") + retry_mock.assert_called_with(queue="retry-tasks") def test_should_retry_if_persisting_the_job_outcome_stats_updates_zero_rows( @@ -136,7 +136,7 @@ def test_should_retry_if_persisting_the_job_outcome_stats_updates_zero_rows( record_outcome_job_statistics(str(sample_notification.id)) dao_mock.assert_called_once_with(sample_notification) - retry_mock.assert_called_with(queue="retry") + retry_mock.assert_called_with(queue="retry-tasks") def test_should_retry_if_persisting_the_job_stats_creation_cant_find_notification_by_id( @@ -148,7 +148,7 @@ def test_should_retry_if_persisting_the_job_stats_creation_cant_find_notificatio record_initial_job_statistics(str(create_uuid())) dao_mock.assert_not_called() - retry_mock.assert_called_with(queue="retry") + retry_mock.assert_called_with(queue="retry-tasks") def test_should_retry_if_persisting_the_job_stats_outcome_cant_find_notification_by_id( @@ -161,4 +161,4 @@ def test_should_retry_if_persisting_the_job_stats_outcome_cant_find_notification record_outcome_job_statistics(str(create_uuid())) dao_mock.assert_not_called() - retry_mock.assert_called_with(queue="retry") + retry_mock.assert_called_with(queue="retry-tasks") diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 27c9833a4..8cb5a13d4 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -108,7 +108,7 @@ def test_should_process_sms_job(sample_job, mocker): "uuid", "something_encrypted", "2016-01-01T11:09:00.061258Z"), - queue="db-sms" + queue="database-tasks" ) job = jobs_dao.dao_get_job_by_id(sample_job.id) assert job.job_status == 'finished' @@ -237,7 +237,7 @@ def test_should_process_email_job_if_exactly_on_send_limits(notify_db, "something_encrypted", "2016-01-01T11:09:00.061258Z" ), - queue="db-email" + queue="database-tasks" ) @@ -283,7 +283,7 @@ def test_should_process_email_job(email_job_with_placeholders, mocker): "something_encrypted", "2016-01-01T11:09:00.061258Z" ), - queue="db-email" + queue="database-tasks" ) job = jobs_dao.dao_get_job_by_id(email_job_with_placeholders.id) assert job.job_status == 'finished' @@ -324,7 +324,7 @@ def test_should_process_letter_job(sample_letter_job, mocker): assert process_row_mock.call_count == 1 assert sample_letter_job.job_status == 'in progress' - tasks.build_dvla_file.apply_async.assert_called_once_with([str(sample_letter_job.id)], queue="process-job") + tasks.build_dvla_file.apply_async.assert_called_once_with([str(sample_letter_job.id)], queue="job-tasks") def test_should_process_all_sms_job(sample_job_with_placeholdered_template, @@ -355,12 +355,12 @@ def test_should_process_all_sms_job(sample_job_with_placeholdered_template, @freeze_time('2001-01-01T12:00:00') @pytest.mark.parametrize('template_type, research_mode, expected_function, expected_queue', [ - (SMS_TYPE, False, 'send_sms', 'db-sms'), - (SMS_TYPE, True, 'send_sms', 'research-mode'), - (EMAIL_TYPE, False, 'send_email', 'db-email'), - (EMAIL_TYPE, True, 'send_email', 'research-mode'), - (LETTER_TYPE, False, 'persist_letter', 'db-letter'), - (LETTER_TYPE, True, 'persist_letter', 'research-mode'), + (SMS_TYPE, False, 'send_sms', 'database-tasks'), + (SMS_TYPE, True, 'send_sms', 'research-mode-tasks'), + (EMAIL_TYPE, False, 'send_email', 'database-tasks'), + (EMAIL_TYPE, True, 'send_email', 'research-mode-tasks'), + (LETTER_TYPE, False, 'persist_letter', 'database-tasks'), + (LETTER_TYPE, True, 'persist_letter', 'research-mode-tasks'), ]) def test_process_row_sends_letter_task(template_type, research_mode, expected_function, expected_queue, mocker): mocker.patch('app.celery.tasks.create_uuid', return_value='noti_uuid') @@ -420,7 +420,7 @@ def test_should_send_template_to_correct_sms_task_and_persist(sample_template_wi assert persisted_notification.notification_type == 'sms' mocked_deliver_sms.assert_called_once_with( [str(persisted_notification.id)], - queue="send-sms" + queue="send-tasks" ) @@ -446,7 +446,7 @@ def test_should_put_send_sms_task_in_research_mode_queue_if_research_mode_servic persisted_notification = Notification.query.one() provider_tasks.deliver_sms.apply_async.assert_called_once_with( [str(persisted_notification.id)], - queue="research-mode" + queue="research-mode-tasks" ) assert mocked_deliver_sms.called @@ -481,7 +481,7 @@ def test_should_send_sms_if_restricted_service_and_valid_number(notify_db, notif assert persisted_notification.notification_type == 'sms' provider_tasks.deliver_sms.apply_async.assert_called_once_with( [str(persisted_notification.id)], - queue="send-sms" + queue="send-tasks" ) @@ -507,7 +507,7 @@ def test_should_send_sms_if_restricted_service_and_non_team_number_with_test_key persisted_notification = Notification.query.one() mocked_deliver_sms.assert_called_once_with( [str(persisted_notification.id)], - queue="send-sms" + queue="send-tasks" ) @@ -535,7 +535,7 @@ def test_should_send_email_if_restricted_service_and_non_team_email_address_with persisted_notification = Notification.query.one() mocked_deliver_email.assert_called_once_with( [str(persisted_notification.id)], - queue="send-email" + queue="send-tasks" ) @@ -602,7 +602,7 @@ def test_should_put_send_email_task_in_research_mode_queue_if_research_mode_serv persisted_notification = Notification.query.one() provider_tasks.deliver_email.apply_async.assert_called_once_with( [str(persisted_notification.id)], - queue="research-mode" + queue="research-mode-tasks" ) @@ -639,7 +639,7 @@ def test_should_send_sms_template_to_and_persist_with_job_id(sample_job, sample_ provider_tasks.deliver_sms.apply_async.assert_called_once_with( [str(persisted_notification.id)], - queue="send-sms" + queue="send-tasks" ) @@ -736,7 +736,7 @@ def test_should_use_email_template_and_persist(sample_email_template_with_placeh assert persisted_notification.notification_type == 'email' provider_tasks.deliver_email.apply_async.assert_called_once_with( - [str(persisted_notification.id)], queue='send-email') + [str(persisted_notification.id)], queue='send-tasks') def test_send_email_should_use_template_version_from_job_not_latest(sample_email_template, mocker): @@ -767,7 +767,7 @@ def test_send_email_should_use_template_version_from_job_not_latest(sample_email assert not persisted_notification.sent_by assert persisted_notification.notification_type == 'email' provider_tasks.deliver_email.apply_async.assert_called_once_with([str(persisted_notification.id)], - queue='send-email') + queue='send-tasks') def test_should_use_email_template_subject_placeholders(sample_email_template_with_placeholders, mocker): @@ -793,7 +793,7 @@ def test_should_use_email_template_subject_placeholders(sample_email_template_wi assert not persisted_notification.reference assert persisted_notification.notification_type == 'email' provider_tasks.deliver_email.apply_async.assert_called_once_with( - [str(persisted_notification.id)], queue='send-email' + [str(persisted_notification.id)], queue='send-tasks' ) @@ -821,7 +821,7 @@ def test_should_use_email_template_and_persist_without_personalisation(sample_em assert not persisted_notification.reference assert persisted_notification.notification_type == 'email' provider_tasks.deliver_email.apply_async.assert_called_once_with([str(persisted_notification.id)], - queue='send-email') + queue='send-tasks') def test_send_sms_should_go_to_retry_queue_if_database_errors(sample_template, mocker): @@ -844,7 +844,7 @@ def test_send_sms_should_go_to_retry_queue_if_database_errors(sample_template, m now.strftime(DATETIME_FORMAT) ) assert provider_tasks.deliver_sms.apply_async.called is False - tasks.send_sms.retry.assert_called_with(exc=expected_exception, queue='retry') + tasks.send_sms.retry.assert_called_with(exc=expected_exception, queue="retry-tasks") assert Notification.query.count() == 0 @@ -869,7 +869,7 @@ def test_send_email_should_go_to_retry_queue_if_database_errors(sample_email_tem now.strftime(DATETIME_FORMAT) ) assert not provider_tasks.deliver_email.apply_async.called - tasks.send_email.retry.assert_called_with(exc=expected_exception, queue='retry') + tasks.send_email.retry.assert_called_with(exc=expected_exception, queue="retry-tasks") assert Notification.query.count() == 0 @@ -1002,7 +1002,6 @@ def test_build_dvla_file(sample_letter_template, mocker): file_location="{}-dvla-job.text".format(job.id) ) assert Job.query.get(job.id).job_status == 'ready to send' - mocked_send_task.assert_called_once_with("aggregrate-dvla-files", ([str(job.id)], ), queue='aggregate-dvla-files') def test_build_dvla_file_retries_if_all_notifications_are_not_created(sample_letter_template, mocker): @@ -1016,7 +1015,7 @@ def test_build_dvla_file_retries_if_all_notifications_are_not_created(sample_let build_dvla_file(job.id) mocked.assert_not_called() - tasks.build_dvla_file.retry.assert_called_with(queue='retry', + tasks.build_dvla_file.retry.assert_called_with(queue="retry-tasks", exc="All notifications for job {} are not persisted".format(job.id)) assert Job.query.get(job.id).job_status == 'in progress' mocked_send_task.assert_not_called() diff --git a/tests/app/delivery/test_rest.py b/tests/app/delivery/test_rest.py index fc51fb508..984bf90e1 100644 --- a/tests/app/delivery/test_rest.py +++ b/tests/app/delivery/test_rest.py @@ -78,7 +78,7 @@ def test_should_call_deliver_sms_task_if_send_sms_to_provider_fails(notify_api, ) app.delivery.send_to_providers.send_sms_to_provider.assert_called_with(sample_notification) app.celery.provider_tasks.deliver_sms.apply_async.assert_called_with( - (str(sample_notification.id)), queue='send-sms' + (str(sample_notification.id)), queue='send-tasks' ) assert response.status_code == 204 @@ -100,6 +100,6 @@ def test_should_call_deliver_email_task_if_send_email_to_provider_fails( ) app.delivery.send_to_providers.send_email_to_provider.assert_called_with(sample_email_notification) app.celery.provider_tasks.deliver_email.apply_async.assert_called_with( - (str(sample_email_notification.id)), queue='send-email' + (str(sample_email_notification.id)), queue='send-tasks' ) assert response.status_code == 204 diff --git a/tests/app/invite/test_invite_rest.py b/tests/app/invite/test_invite_rest.py index 182f4b05b..be1f46d82 100644 --- a/tests/app/invite/test_invite_rest.py +++ b/tests/app/invite/test_invite_rest.py @@ -33,7 +33,7 @@ def test_create_invited_user(client, sample_service, mocker, invitation_email_te assert json_resp['data']['id'] notification = Notification.query.first() - mocked.assert_called_once_with([(str(notification.id))], queue="notify") + mocked.assert_called_once_with([(str(notification.id))], queue="notify-internal-tasks") def test_create_invited_user_invalid_email(client, sample_service, mocker): diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index d583d6b31..6ebcb2e89 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -119,7 +119,7 @@ def test_create_unscheduled_job(notify_api, sample_template, mocker, fake_uuid): app.celery.tasks.process_job.apply_async.assert_called_once_with( ([str(fake_uuid)]), - queue="process-job" + queue="job-tasks" ) resp_json = json.loads(response.get_data(as_text=True)) diff --git a/tests/app/letters/test_send_letter_jobs.py b/tests/app/letters/test_send_letter_jobs.py index 7f1d88c85..1d32baf12 100644 --- a/tests/app/letters/test_send_letter_jobs.py +++ b/tests/app/letters/test_send_letter_jobs.py @@ -21,7 +21,7 @@ def test_send_letter_jobs(client, mocker): mock_celery.assert_called_once_with(name="send-files-to-dvla", args=(job_ids['job_ids'],), - queue="process-ftp") + queue="process-ftp-tasks") def test_send_letter_jobs_throws_validation_error(client, mocker): diff --git a/tests/app/notifications/rest/test_callbacks.py b/tests/app/notifications/rest/test_callbacks.py index 2c2ffd9cf..58bbc8b1f 100644 --- a/tests/app/notifications/rest/test_callbacks.py +++ b/tests/app/notifications/rest/test_callbacks.py @@ -68,7 +68,7 @@ def test_dvla_callback_calls_update_letter_notifications_task(client, mocker): assert response.status_code == 200 assert update_task.called - update_task.assert_called_with(['bar.txt'], queue='notify') + update_task.assert_called_with(['bar.txt'], queue='notify-internal-tasks') def test_dvla_callback_does_not_raise_error_parsing_json_for_plaintext_header(client, mocker): diff --git a/tests/app/notifications/rest/test_send_notification.py b/tests/app/notifications/rest/test_send_notification.py index ab527e06e..b491e3d7f 100644 --- a/tests/app/notifications/rest/test_send_notification.py +++ b/tests/app/notifications/rest/test_send_notification.py @@ -127,7 +127,7 @@ def test_send_notification_with_placeholders_replaced(notify_api, sample_email_t mocked.assert_called_once_with( [notification_id], - queue="send-email" + queue="send-tasks" ) assert response.status_code == 201 assert response_data['body'] == u'Hello Jo\nThis is an email from GOV.\u200BUK' @@ -338,7 +338,7 @@ def test_should_allow_valid_sms_notification(notify_api, sample_template, mocker response_data = json.loads(response.data)['data'] notification_id = response_data['notification']['id'] - mocked.assert_called_once_with([notification_id], queue='send-sms') + mocked.assert_called_once_with([notification_id], queue='send-tasks') assert response.status_code == 201 assert notification_id assert 'subject' not in response_data @@ -392,7 +392,7 @@ def test_should_allow_valid_email_notification(notify_api, sample_email_template notification_id = response_data['notification']['id'] app.celery.provider_tasks.deliver_email.apply_async.assert_called_once_with( [notification_id], - queue="send-email" + queue="send-tasks" ) assert response.status_code == 201 @@ -593,7 +593,7 @@ def test_should_send_email_if_team_api_key_and_a_service_user(notify_api, sample data=json.dumps(data), headers=[('Content-Type', 'application/json'), ('Authorization', 'Bearer {}'.format(auth_header))]) - app.celery.provider_tasks.deliver_email.apply_async.assert_called_once_with([fake_uuid], queue='send-email') + app.celery.provider_tasks.deliver_email.apply_async.assert_called_once_with([fake_uuid], queue='send-tasks') assert response.status_code == 201 @@ -626,7 +626,9 @@ def test_should_send_sms_to_anyone_with_test_key( data=json.dumps(data), headers=[('Content-Type', 'application/json'), ('Authorization', 'Bearer {}'.format(auth_header))] ) - app.celery.provider_tasks.deliver_sms.apply_async.assert_called_once_with([fake_uuid], queue='research-mode') + app.celery.provider_tasks.deliver_sms.apply_async.assert_called_once_with( + [fake_uuid], queue='research-mode-tasks' + ) assert response.status_code == 201 @@ -660,7 +662,9 @@ def test_should_send_email_to_anyone_with_test_key( headers=[('Content-Type', 'application/json'), ('Authorization', 'Bearer {}'.format(auth_header))] ) - app.celery.provider_tasks.deliver_email.apply_async.assert_called_once_with([fake_uuid], queue='research-mode') + app.celery.provider_tasks.deliver_email.apply_async.assert_called_once_with( + [fake_uuid], queue='research-mode-tasks' + ) assert response.status_code == 201 @@ -685,7 +689,7 @@ def test_should_send_sms_if_team_api_key_and_a_service_user(notify_api, sample_t data=json.dumps(data), headers=[('Content-Type', 'application/json'), ('Authorization', 'Bearer {}'.format(auth_header))]) - app.celery.provider_tasks.deliver_sms.apply_async.assert_called_once_with([fake_uuid], queue='send-sms') + app.celery.provider_tasks.deliver_sms.apply_async.assert_called_once_with([fake_uuid], queue='send-tasks') assert response.status_code == 201 @@ -718,7 +722,7 @@ def test_should_persist_notification(notify_api, sample_template, data=json.dumps(data), headers=[('Content-Type', 'application/json'), ('Authorization', 'Bearer {}'.format(auth_header))]) - mocked.assert_called_once_with([fake_uuid], queue='send-{}'.format(template_type)) + mocked.assert_called_once_with([fake_uuid], queue='send-tasks') assert response.status_code == 201 notification = notifications_dao.get_notification_by_id(fake_uuid) @@ -761,7 +765,7 @@ def test_should_delete_notification_and_return_error_if_sqs_fails( data=json.dumps(data), headers=[('Content-Type', 'application/json'), ('Authorization', 'Bearer {}'.format(auth_header))]) - mocked.assert_called_once_with([fake_uuid], queue='send-{}'.format(template_type)) + mocked.assert_called_once_with([fake_uuid], queue='send-tasks') assert response.status_code == 500 assert not notifications_dao.get_notification_by_id(fake_uuid) assert not NotificationHistory.query.get(fake_uuid) @@ -1046,7 +1050,7 @@ def test_send_notification_uses_priority_queue_when_template_is_marked_as_priori notification_id = response_data['notification']['id'] assert response.status_code == 201 - mocked.assert_called_once_with([notification_id], queue='priority') + mocked.assert_called_once_with([notification_id], queue='priority-tasks') @pytest.mark.parametrize( @@ -1114,7 +1118,7 @@ def test_should_allow_store_original_number_on_sms_notification(client, sample_t response_data = json.loads(response.data)['data'] notification_id = response_data['notification']['id'] - mocked.assert_called_once_with([notification_id], queue='send-sms') + mocked.assert_called_once_with([notification_id], queue='send-tasks') assert response.status_code == 201 assert notification_id notifications = Notification.query.all() diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index be2356c69..8d707c7dc 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -207,17 +207,17 @@ def test_persist_notification_increments_cache_if_key_exists(sample_template, sa @pytest.mark.parametrize('research_mode, requested_queue, expected_queue, notification_type, key_type', - [(True, None, 'research-mode', 'sms', 'normal'), - (True, None, 'research-mode', 'email', 'normal'), - (True, None, 'research-mode', 'email', 'team'), - (False, None, 'send-sms', 'sms', 'normal'), - (False, None, 'send-email', 'email', 'normal'), - (False, None, 'send-sms', 'sms', 'team'), - (False, None, 'research-mode', 'sms', 'test'), - (True, 'notify', 'research-mode', 'email', 'normal'), - (False, 'notify', 'notify', 'sms', 'normal'), - (False, 'notify', 'notify', 'email', 'normal'), - (False, 'notify', 'research-mode', 'sms', 'test')]) + [(True, None, 'research-mode-tasks', 'sms', 'normal'), + (True, None, 'research-mode-tasks', 'email', 'normal'), + (True, None, 'research-mode-tasks', 'email', 'team'), + (False, None, 'send-tasks', 'sms', 'normal'), + (False, None, 'send-tasks', 'email', 'normal'), + (False, None, 'send-tasks', 'sms', 'team'), + (False, None, 'research-mode-tasks', 'sms', 'test'), + (True, 'notify-internal-tasks', 'research-mode-tasks', 'email', 'normal'), + (False, 'notify-internal-tasks', 'notify-internal-tasks', 'sms', 'normal'), + (False, 'notify-internal-tasks', 'notify-internal-tasks', 'email', 'normal'), + (False, 'notify-internal-tasks', 'research-mode-tasks', 'sms', 'test')]) def test_send_notification_to_queue(notify_db, notify_db_session, research_mode, requested_queue, expected_queue, notification_type, key_type, mocker): diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index ddbb3eaae..73f60f05b 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -374,7 +374,7 @@ def test_send_user_reset_password_should_send_reset_password_link(client, assert resp.status_code == 204 notification = Notification.query.first() - mocked.assert_called_once_with([str(notification.id)], queue="notify") + mocked.assert_called_once_with([str(notification.id)], queue="notify-internal-tasks") def test_send_user_reset_password_should_return_400_when_email_is_missing(client, mocker): @@ -436,7 +436,7 @@ def test_send_already_registered_email(client, sample_user, already_registered_t assert resp.status_code == 204 notification = Notification.query.first() - mocked.assert_called_once_with(([str(notification.id)]), queue="notify") + mocked.assert_called_once_with(([str(notification.id)]), queue="notify-internal-tasks") def test_send_already_registered_email_returns_400_when_data_is_missing(client, sample_user): @@ -464,7 +464,7 @@ def test_send_user_confirm_new_email_returns_204(client, sample_user, change_ema notification = Notification.query.first() mocked.assert_called_once_with( ([str(notification.id)]), - queue="notify") + queue="notify-internal-tasks") def test_send_user_confirm_new_email_returns_400_when_email_missing(client, sample_user, mocker): diff --git a/tests/app/user/test_rest_verify.py b/tests/app/user/test_rest_verify.py index 08f510bbd..84a88f6a0 100644 --- a/tests/app/user/test_rest_verify.py +++ b/tests/app/user/test_rest_verify.py @@ -218,7 +218,7 @@ def test_send_user_sms_code(client, app.celery.provider_tasks.deliver_sms.apply_async.assert_called_once_with( ([str(notification.id)]), - queue="notify" + queue="notify-internal-tasks" ) @@ -246,7 +246,7 @@ def test_send_user_code_for_sms_with_optional_to_field(client, assert notification.to == to_number app.celery.provider_tasks.deliver_sms.apply_async.assert_called_once_with( ([str(notification.id)]), - queue="notify" + queue="notify-internal-tasks" ) @@ -294,7 +294,7 @@ def test_send_user_email_verification(client, headers=[('Content-Type', 'application/json'), auth_header]) assert resp.status_code == 204 notification = Notification.query.first() - mocked.assert_called_once_with(([str(notification.id)]), queue="notify") + mocked.assert_called_once_with(([str(notification.id)]), queue="notify-internal-tasks") def test_send_email_verification_returns_404_for_bad_input_data(client, notify_db_session, mocker): diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index 4e721f15f..0bd1156ba 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -228,7 +228,7 @@ def test_send_notification_uses_priority_queue_when_template_is_marked_as_priori notification_id = json.loads(response.data)['id'] assert response.status_code == 201 - mocked.assert_called_once_with([notification_id], queue='priority') + mocked.assert_called_once_with([notification_id], queue='priority-tasks') @pytest.mark.parametrize( From 4768f0b9fd869e33feb81257bc7ebadda27f568c Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 25 May 2017 11:12:40 +0100 Subject: [PATCH 18/92] Change retries policy. Before we had a long back off, now we have more, but shorter backoffs. - PREVIOUS When we had an error talking to a provider we retried quickly and if we still got errors we backed off more and more. Maximum attempts was 5, max delay 4hours. This was to allow us time to ship a build if that was required. - NOW Backing off 48 times of 5 minutes each. This gives us the same total backoff, but many more tries in that period. - WHY Having the long back off meant messages could be delayed 4 hours. This was happening more and more, as PaaS deploys can place things into the "inflight" state in SQS. The inflight state MUST have an expiry time LONGER than the maximum retry back off. This meant that messages would be delayed 4 hours, even when there was no app error. By doing this we can reduce this delay to 5 minutes. Whilst still giving us time to fix issues. --- app/celery/provider_tasks.py | 32 +++------------------- tests/app/celery/test_provider_tasks.py | 36 +++---------------------- 2 files changed, 8 insertions(+), 60 deletions(-) diff --git a/app/celery/provider_tasks.py b/app/celery/provider_tasks.py index 4d8718489..50d5a31b7 100644 --- a/app/celery/provider_tasks.py +++ b/app/celery/provider_tasks.py @@ -10,31 +10,7 @@ from app.statsd_decorators import statsd from app.delivery import send_to_providers -def retry_iteration_to_delay(retry=0): - """ - :param retry times we have performed a retry - Given current retry calculate some delay before retrying - 0: 10 seconds - 1: 60 seconds (1 minutes) - 2: 300 seconds (5 minutes) - 3: 3600 seconds (60 minutes) - 4: 14400 seconds (4 hours) - :param retry (zero indexed): - :return length to retry in seconds, default 10 seconds - """ - - delays = { - 0: 10, - 1: 60, - 2: 300, - 3: 3600, - 4: 14400 - } - - return delays.get(retry, 10) - - -@notify_celery.task(bind=True, name="deliver_sms", max_retries=5, default_retry_delay=5) +@notify_celery.task(bind=True, name="deliver_sms", max_retries=48, default_retry_delay=300) @statsd(namespace="tasks") def deliver_sms(self, notification_id): try: @@ -47,7 +23,7 @@ def deliver_sms(self, notification_id): current_app.logger.exception( "SMS notification delivery for id: {} failed".format(notification_id) ) - self.retry(queue=QueueNames.RETRY, countdown=retry_iteration_to_delay(self.request.retries)) + self.retry(queue=QueueNames.RETRY) except self.MaxRetriesExceededError: current_app.logger.exception( "RETRY FAILED: task send_sms_to_provider failed for notification {}".format(notification_id), @@ -55,7 +31,7 @@ def deliver_sms(self, notification_id): update_notification_status_by_id(notification_id, 'technical-failure') -@notify_celery.task(bind=True, name="deliver_email", max_retries=5, default_retry_delay=5) +@notify_celery.task(bind=True, name="deliver_email", max_retries=48, default_retry_delay=300) @statsd(namespace="tasks") def deliver_email(self, notification_id): try: @@ -71,7 +47,7 @@ def deliver_email(self, notification_id): current_app.logger.exception( "RETRY: Email notification {} failed".format(notification_id) ) - self.retry(queue=QueueNames.RETRY, countdown=retry_iteration_to_delay(self.request.retries)) + self.retry(queue=QueueNames.RETRY) except self.MaxRetriesExceededError: current_app.logger.error( "RETRY FAILED: task send_email_to_provider failed for notification {}".format(notification_id) diff --git a/tests/app/celery/test_provider_tasks.py b/tests/app/celery/test_provider_tasks.py index 9c9a44e5f..b5dbc999a 100644 --- a/tests/app/celery/test_provider_tasks.py +++ b/tests/app/celery/test_provider_tasks.py @@ -11,34 +11,6 @@ def test_should_have_decorated_tasks_functions(): assert deliver_email.__wrapped__.__name__ == 'deliver_email' -def test_should_by_10_second_delay_as_default(): - assert provider_tasks.retry_iteration_to_delay() == 10 - - -def test_should_by_10_second_delay_on_unmapped_retry_iteration(): - assert provider_tasks.retry_iteration_to_delay(99) == 10 - - -def test_should_by_10_second_delay_on_retry_one(): - assert provider_tasks.retry_iteration_to_delay(0) == 10 - - -def test_should_by_1_minute_delay_on_retry_two(): - assert provider_tasks.retry_iteration_to_delay(1) == 60 - - -def test_should_by_5_minute_delay_on_retry_two(): - assert provider_tasks.retry_iteration_to_delay(2) == 300 - - -def test_should_by_60_minute_delay_on_retry_two(): - assert provider_tasks.retry_iteration_to_delay(3) == 3600 - - -def test_should_by_240_minute_delay_on_retry_two(): - assert provider_tasks.retry_iteration_to_delay(4) == 14400 - - def test_should_call_send_sms_to_provider_from_deliver_sms_task( notify_db, notify_db_session, @@ -61,7 +33,7 @@ def test_should_add_to_retry_queue_if_notification_not_found_in_deliver_sms_task deliver_sms(notification_id) app.delivery.send_to_providers.send_sms_to_provider.assert_not_called() - app.celery.provider_tasks.deliver_sms.retry.assert_called_with(queue="retry-tasks", countdown=10) + app.celery.provider_tasks.deliver_sms.retry.assert_called_with(queue="retry-tasks") def test_should_call_send_email_to_provider_from_deliver_email_task( @@ -83,7 +55,7 @@ def test_should_add_to_retry_queue_if_notification_not_found_in_deliver_email_ta deliver_email(notification_id) app.delivery.send_to_providers.send_email_to_provider.assert_not_called() - app.celery.provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks", countdown=10) + app.celery.provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks") # DO THESE FOR THE 4 TYPES OF TASK @@ -94,7 +66,7 @@ def test_should_go_into_technical_error_if_exceeds_retries_on_deliver_sms_task(s deliver_sms(sample_notification.id) - provider_tasks.deliver_sms.retry.assert_called_with(queue="retry-tasks", countdown=10) + provider_tasks.deliver_sms.retry.assert_called_with(queue="retry-tasks") assert sample_notification.status == 'technical-failure' @@ -105,7 +77,7 @@ def test_should_go_into_technical_error_if_exceeds_retries_on_deliver_email_task deliver_email(sample_notification.id) - provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks", countdown=10) + provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks") assert sample_notification.status == 'technical-failure' From 753d58f1381c23496b395a29f775c960e8790173 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 25 May 2017 11:15:28 +0100 Subject: [PATCH 19/92] Adds new queues to the PaaS workers --- manifest-delivery-base.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index f3c196272..c68d36965 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -23,33 +23,33 @@ applications: NOTIFY_APP_NAME: delivery-celery-beat - name: notify-delivery-worker-database - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email,db-letter + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email,db-letter,database-tasks env: NOTIFY_APP_NAME: delivery-worker-database - name: notify-delivery-worker-research - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q research-mode + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q research-mode,research-mode-tasks env: NOTIFY_APP_NAME: delivery-worker-research - name: notify-delivery-worker-sender - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email,send-tasks env: NOTIFY_APP_NAME: delivery-worker-sender - name: notify-delivery-worker-periodic - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic,statistics + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic,statistics,periodic-tasks,statistics-tasks instances: 1 memory: 2G env: NOTIFY_APP_NAME: delivery-worker-periodic - name: notify-delivery-worker-priority - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q priority + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q priority,priority-tasks env: NOTIFY_APP_NAME: delivery-worker-priority - name: notify-delivery-worker - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q process-job,notify,retry + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q process-job,notify,retry,job-tasks,retry-tasks,notify-internal-tasks env: NOTIFY_APP_NAME: delivery-worker From 58af4e5ee9c3c0a9adae56328e835fc8e4dcb102 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 25 May 2017 11:20:23 +0100 Subject: [PATCH 20/92] timeout slightly longer than retry --- app/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/config.py b/app/config.py index 3bd7218a0..b3ec9fe5f 100644 --- a/app/config.py +++ b/app/config.py @@ -123,7 +123,7 @@ class Config(object): BROKER_TRANSPORT_OPTIONS = { 'region': AWS_REGION, 'polling_interval': 1, # 1 second - 'visibility_timeout': 300, + 'visibility_timeout': 310, 'queue_name_prefix': NOTIFICATION_QUEUE_PREFIX } CELERY_ENABLE_UTC = True, From 29b6a918e829ec78bebaca972f60d7e03a763213 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 25 May 2017 13:43:31 +0100 Subject: [PATCH 21/92] Ensure dev and test builds will read from old and new queues --- app/config.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/app/config.py b/app/config.py index b3ec9fe5f..76917b79c 100644 --- a/app/config.py +++ b/app/config.py @@ -24,6 +24,23 @@ class QueueNames(object): NOTIFY = 'notify-internal-tasks' PROCESS_FTP = 'process-ftp-tasks' + @staticmethod + def old_queues(): + return [ + 'db-sms', + 'db-email', + 'db-letter', + 'priority', + 'periodic', + 'send-sms', + 'send-email', + 'research-mode', + 'statistics', + 'notify', + 'retry', + 'process-job' + ] + @staticmethod def all_queues(): return [ @@ -240,6 +257,8 @@ class Development(Config): NOTIFICATION_QUEUE_PREFIX = 'development' DEBUG = True + queues = QueueNames.all_queues() + QueueNames.old_queues() + for queue in QueueNames.all_queues(): Config.CELERY_QUEUES.append( Queue(queue, Exchange('default'), routing_key=queue) @@ -259,7 +278,9 @@ class Test(Config): STATSD_HOST = "localhost" STATSD_PORT = 1000 - for queue in QueueNames.all_queues(): + queues = QueueNames.all_queues() + QueueNames.old_queues() + + for queue in queues: Config.CELERY_QUEUES.append( Queue(queue, Exchange('default'), routing_key=queue) ) From 3dc70b8c392fdde5277d4887986cfee98684627c Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Fri, 26 May 2017 15:41:14 +0100 Subject: [PATCH 22/92] Check service.permissions for the existence of schedule_notifications if the notications is being created with a scheduled_for param. --- app/models.py | 6 ++++-- app/notifications/validators.py | 9 +++++---- app/v2/notifications/post_notifications.py | 5 ++--- .../notifications/test_post_notifications.py | 19 +++++++++++-------- 4 files changed, 22 insertions(+), 17 deletions(-) diff --git a/app/models.py b/app/models.py index 8c1e22e79..0bfc32e24 100644 --- a/app/models.py +++ b/app/models.py @@ -146,8 +146,10 @@ class DVLAOrganisation(db.Model): INTERNATIONAL_SMS_TYPE = 'international_sms' INBOUND_SMS_TYPE = 'inbound_sms' +SCHEDULE_NOTIFICATIONS = 'schedule_notifications' -SERVICE_PERMISSION_TYPES = [EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, INBOUND_SMS_TYPE] +SERVICE_PERMISSION_TYPES = [EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, INBOUND_SMS_TYPE, + SCHEDULE_NOTIFICATIONS] class ServicePermissionTypes(db.Model): @@ -977,7 +979,7 @@ INVITED_USER_STATUS_TYPES = ['pending', 'accepted', 'cancelled'] class ScheduledNotification(db.Model): __tablename__ = 'scheduled_notifications' - id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4()) + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) notification_id = db.Column(UUID(as_uuid=True), db.ForeignKey('notifications.id'), index=True, nullable=False) notification = db.relationship('Notification', uselist=False) scheduled_for = db.Column(db.DateTime, index=False, nullable=False) diff --git a/app/notifications/validators.py b/app/notifications/validators.py index 1193d4015..c7e97c26e 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -6,7 +6,7 @@ from notifications_utils.recipients import ( ) from app.dao import services_dao -from app.models import KEY_TYPE_TEST, KEY_TYPE_TEAM, SMS_TYPE +from app.models import KEY_TYPE_TEST, KEY_TYPE_TEAM, SMS_TYPE, SCHEDULE_NOTIFICATIONS from app.service.utils import service_allowed_to_send_to from app.v2.errors import TooManyRequestsError, BadRequestError, RateLimitError from app import redis_store @@ -92,6 +92,7 @@ def check_sms_content_char_count(content_count): raise BadRequestError(message=message) -def service_can_schedule_notification(service): - # TODO: implement once the service permission works. - raise BadRequestError(message="Your service must be invited to schedule notifications via the API.") +def service_can_schedule_notification(service, scheduled_for): + if scheduled_for: + if SCHEDULE_NOTIFICATIONS not in [p.permission for p in service.permissions]: + raise BadRequestError(message="Your service must be invited to schedule notifications via the API.") diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index 6cbcbf0f0..e5f795a34 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -34,9 +34,8 @@ def post_notification(notification_type): form = validate(request.get_json(), post_sms_request) scheduled_for = form.get("scheduled_for", None) - if scheduled_for: - if not service_can_schedule_notification(authenticated_service): - return + service_can_schedule_notification(authenticated_service, scheduled_for) + check_rate_limiting(authenticated_service, api_user) form_send_to = form['phone_number'] if notification_type == SMS_TYPE else form['email_address'] diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index afce12947..9f938c13d 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -3,13 +3,14 @@ import uuid import pytest from freezegun import freeze_time -from app.models import Notification, ScheduledNotification +from app.models import Notification, ScheduledNotification, SCHEDULE_NOTIFICATIONS, EMAIL_TYPE, SMS_TYPE from flask import json, current_app from app.models import Notification from app.v2.errors import RateLimitError from tests import create_authorization_header from tests.app.conftest import sample_template as create_sample_template, sample_service +from tests.app.db import create_service, create_template @pytest.mark.parametrize("reference", [None, "reference_from_client"]) @@ -350,26 +351,28 @@ def test_post_sms_should_persist_supplied_sms_number(client, sample_template_wit assert mocked.called -@pytest.mark.skip("Once the service can be invited to schedule notifications we can add this test.") @pytest.mark.parametrize("notification_type, key_send_to, send_to", [("sms", "phone_number", "07700 900 855"), ("email", "email_address", "sample@email.com")]) @freeze_time("2017-05-14 14:00:00") -def test_post_notification_with_scheduled_for(client, sample_template, sample_email_template, +def test_post_notification_with_scheduled_for(client, notify_db, notify_db_session, notification_type, key_send_to, send_to): + service = create_service(service_name=str(uuid.uuid4()), + service_permissions=[EMAIL_TYPE, SMS_TYPE, SCHEDULE_NOTIFICATIONS]) + template = create_template(service=service, template_type=notification_type) data = { key_send_to: send_to, - 'template_id': str(sample_email_template.id) if notification_type == 'email' else str(sample_template.id), + 'template_id': str(template.id) if notification_type == 'email' else str(template.id), 'scheduled_for': '2017-05-14 14:15' } - auth_header = create_authorization_header(service_id=sample_template.service_id) + auth_header = create_authorization_header(service_id=service.id) response = client.post('/v2/notifications/{}'.format(notification_type), data=json.dumps(data), headers=[('Content-Type', 'application/json'), auth_header]) assert response.status_code == 201 resp_json = json.loads(response.get_data(as_text=True)) - scheduled_notification = ScheduledNotification.query.all() + scheduled_notification = ScheduledNotification.query.filter_by(notification_id=resp_json["id"]).all() assert len(scheduled_notification) == 1 assert resp_json["id"] == str(scheduled_notification[0].notification_id) assert resp_json["scheduled_for"] == '2017-05-14 14:15' @@ -379,8 +382,8 @@ def test_post_notification_with_scheduled_for(client, sample_template, sample_em [("sms", "phone_number", "07700 900 855"), ("email", "email_address", "sample@email.com")]) @freeze_time("2017-05-14 14:00:00") -def test_post_notification_with_scheduled_for_raises_bad_request(client, sample_template, sample_email_template, - notification_type, key_send_to, send_to): +def test_post_notification_raises_bad_request_if_service_not_invited_to_schedule( + client, sample_template, sample_email_template, notification_type, key_send_to, send_to): data = { key_send_to: send_to, 'template_id': str(sample_email_template.id) if notification_type == 'email' else str(sample_template.id), From b4c9901eb53f760e3998a6670962159183e90854 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Fri, 26 May 2017 15:53:25 +0100 Subject: [PATCH 23/92] Allow for scheduled_for to be null in schema --- app/v2/notifications/notification_schemas.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/v2/notifications/notification_schemas.py b/app/v2/notifications/notification_schemas.py index 6aa4dcb77..69372c4e1 100644 --- a/app/v2/notifications/notification_schemas.py +++ b/app/v2/notifications/notification_schemas.py @@ -113,7 +113,7 @@ post_sms_request = { "phone_number": {"type": "string", "format": "phone_number"}, "template_id": uuid, "personalisation": personalisation, - "scheduled_for": {"type": "string", "format": "datetime"} + "scheduled_for": {"type": ["string", "null"], "format": "datetime"} }, "required": ["phone_number", "template_id"] } @@ -141,7 +141,7 @@ post_sms_response = { "content": sms_content, "uri": {"type": "string", "format": "uri"}, "template": template, - "scheduled_for": {"type": "string"} + "scheduled_for": {"type": ["string", "null"]} }, "required": ["id", "content", "uri", "template"] } @@ -157,7 +157,7 @@ post_email_request = { "email_address": {"type": "string", "format": "email_address"}, "template_id": uuid, "personalisation": personalisation, - "scheduled_for": {"type": "string", "format": "datetime"} + "scheduled_for": {"type": ["string", "null"], "format": "datetime"} }, "required": ["email_address", "template_id"] } @@ -186,7 +186,7 @@ post_email_response = { "content": email_content, "uri": {"type": "string", "format": "uri"}, "template": template, - "scheduled_for": {"type": "string"} + "scheduled_for": {"type": ["string", "null"]} }, "required": ["id", "content", "uri", "template"] } From 1f6157eaf4d39d7a0320ce8291e1f808d7871a68 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Fri, 26 May 2017 16:10:52 +0100 Subject: [PATCH 24/92] Migration script to add new permission type --- .../versions/0088_add_schedule_serv_perm.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 migrations/versions/0088_add_schedule_serv_perm.py diff --git a/migrations/versions/0088_add_schedule_serv_perm.py b/migrations/versions/0088_add_schedule_serv_perm.py new file mode 100644 index 000000000..85b94118b --- /dev/null +++ b/migrations/versions/0088_add_schedule_serv_perm.py @@ -0,0 +1,23 @@ +"""empty message + +Revision ID: 0088_add_schedule_serv_perm +Revises: 0087_scheduled_notifications +Create Date: 2017-05-26 14:53:18.581320 + +""" + +# revision identifiers, used by Alembic. +revision = '0088_add_schedule_serv_perm' +down_revision = '0087_scheduled_notifications' + +from alembic import op + + +def upgrade(): + op.get_bind() + op.execute("insert into service_permission_types values('schedule_notifications')") + + +def downgrade(): + op.get_bind() + op.execute("delete from service_permission_types where name = 'schedule_notifications'") From 56e9faab2e62e01ead46eea078b2cd335d8fc449 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Fri, 26 May 2017 15:27:49 +0100 Subject: [PATCH 25/92] Refactor schema --- app/schemas.py | 40 ++++++++-------------------------------- 1 file changed, 8 insertions(+), 32 deletions(-) diff --git a/app/schemas.py b/app/schemas.py index ff4f83a95..09301868e 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -184,31 +184,13 @@ class ServiceSchema(BaseSchema): override_flag = False def service_permissions(self, service): - permissions = [] - str_permissions = [] - - perms = dao_fetch_service_permissions(service.id) - for p in perms: - permission = { - "service_id": service.id, - "permission": p.permission - } - permissions.append(permission) - str_permissions.append(p.permission) + permissions = [p.permission for p in service.permissions] def deprecate_convert_flags_to_permissions(): - def convert_flags(flag, notify_type): - if flag and notify_type not in str_permissions: - permission = { - "service_id": service.id, - "permission": notify_type - } + def convert_flags(flag, permission): + if flag and permission not in permissions: permissions.append(permission) - elif flag is False and notify_type in str_permissions: - permission = { - "service_id": service.id, - "permission": notify_type - } + elif flag is False and permission in permissions: permissions.remove(permission) convert_flags(service.can_send_international_sms, INTERNATIONAL_SMS_TYPE) @@ -254,14 +236,13 @@ class ServiceSchema(BaseSchema): if isinstance(in_data, dict) and 'permissions' in in_data: str_permissions = in_data['permissions'] permissions = [] - for p in in_data['permissions']: + for p in str_permissions: permission = ServicePermission(service_id=in_data["id"], permission=p) permissions.append(permission) - in_data['permissions'] = permissions def deprecate_override_flags(): - in_data['can_send_letters'] = LETTER_TYPE in [p.permission for p in permissions] - in_data['can_send_international_sms'] = INTERNATIONAL_SMS_TYPE in [p.permission for p in permissions] + in_data['can_send_letters'] = LETTER_TYPE in str_permissions + in_data['can_send_international_sms'] = INTERNATIONAL_SMS_TYPE in str_permissions def deprecate_convert_flags_to_permissions(): def convert_flags(flag, notify_type): @@ -280,12 +261,7 @@ class ServiceSchema(BaseSchema): deprecate_override_flags() else: deprecate_convert_flags_to_permissions() - - @post_dump - def format_as_string_array(self, in_data): - if isinstance(in_data, dict) and 'permissions' in in_data: - in_data['permissions'] = [p.get("permission") for p in in_data['permissions']] - return in_data + in_data['permissions'] = permissions def set_override_flag(self, flag): self.override_flag = flag From a58e724d215dc42919a7723677ebf7a0d1ac5d28 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Fri, 26 May 2017 16:44:23 +0100 Subject: [PATCH 26/92] Add a script and make command to detect if there are any migration changes --- Makefile | 5 +++++ scripts/check_if_new_migration.py | 34 +++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 scripts/check_if_new_migration.py diff --git a/Makefile b/Makefile index 37d876d92..14cae9b0c 100644 --- a/Makefile +++ b/Makefile @@ -303,3 +303,8 @@ cf-rollback: ## Rollbacks the app to the previous release cf-push: $(if ${CF_APP},,$(error Must specify CF_APP)) cf push ${CF_APP} -f ${CF_MANIFEST_FILE} + +.PHONY: check-if-migrations-to-run +check-if-migrations-to-run: + @echo $(shell python scripts/check_if_new_migration.py) + diff --git a/scripts/check_if_new_migration.py b/scripts/check_if_new_migration.py new file mode 100644 index 000000000..ba0519ea1 --- /dev/null +++ b/scripts/check_if_new_migration.py @@ -0,0 +1,34 @@ +import os +from os.path import dirname, abspath +import requests +import sys + + +def get_latest_db_migration_to_apply(): + project_dir = dirname(dirname(abspath(__file__))) # Get the main project directory + migrations_dir = '{}/migrations/versions/'.format(project_dir) + migration_files = [migration_file for migration_file in os.listdir(migrations_dir) if migration_file.endswith('py')] + latest_file = sorted(migration_files, reverse=True)[0].replace('.py', '') + return latest_file + + +def get_current_db_version(): + api_status_url = '{}/_status'.format(os.getenv('API_HOST_NAME')) + response = requests.get(api_status_url) + + if response.status_code != 200: + sys.exit('Could not make a request to the API: {}'.format()) + + current_db_version = response.json()['db_version'] + return current_db_version + + +def run(): + if get_current_db_version() == get_latest_db_migration_to_apply(): + print('no') + else: + print('yes') + + +if __name__ == "__main__": + run() From 18b8382d6e0cc6037766d96239043f5b12bee4f5 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Fri, 26 May 2017 17:17:15 +0100 Subject: [PATCH 27/92] Refactor schema and improve tests --- app/schemas.py | 12 ------------ tests/app/service/test_rest.py | 12 +++++------- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/app/schemas.py b/app/schemas.py index 09301868e..a89bbcd9f 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -186,18 +186,6 @@ class ServiceSchema(BaseSchema): def service_permissions(self, service): permissions = [p.permission for p in service.permissions] - def deprecate_convert_flags_to_permissions(): - def convert_flags(flag, permission): - if flag and permission not in permissions: - permissions.append(permission) - elif flag is False and permission in permissions: - permissions.remove(permission) - - convert_flags(service.can_send_international_sms, INTERNATIONAL_SMS_TYPE) - convert_flags(service.can_send_letters, LETTER_TYPE) - - deprecate_convert_flags_to_permissions() - return permissions class Meta: diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 7b5fda823..46adbba1b 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -519,7 +519,9 @@ def test_update_service_flags_will_remove_service_permissions(client, notify_db, assert resp.status_code == 200 assert result['data']['can_send_international_sms'] is False - assert set(result['data']['permissions']) == set([SMS_TYPE, EMAIL_TYPE]) + + permissions = ServicePermission.query.filter_by(service_id=service.id).all() + assert set([p.permission for p in permissions]) == set([SMS_TYPE, EMAIL_TYPE]) def test_update_permissions_will_override_permission_flags(client, service_with_no_permissions): @@ -583,14 +585,10 @@ def test_add_service_permission_will_add_permission(client, service_with_no_perm headers=[('Content-Type', 'application/json'), auth_header] ) - resp = client.get( - '/service/{}'.format(service_with_no_permissions.id), - headers=[auth_header] - ) - result = json.loads(resp.get_data(as_text=True)) + permissions = ServicePermission.query.filter_by(service_id=service_with_no_permissions.id).all() assert resp.status_code == 200 - assert result['data']['permissions'] == [permission_to_add] + assert [p.permission for p in permissions] == [permission_to_add] def test_update_permissions_with_an_invalid_permission_will_raise_error(client, sample_service): From 112c6735930801375d45a927a66eaf82d44d9a32 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Fri, 26 May 2017 17:23:01 +0100 Subject: [PATCH 28/92] Removed a few lines from schema --- app/schemas.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/app/schemas.py b/app/schemas.py index a89bbcd9f..b8d3ee7af 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -184,9 +184,7 @@ class ServiceSchema(BaseSchema): override_flag = False def service_permissions(self, service): - permissions = [p.permission for p in service.permissions] - - return permissions + return [p.permission for p in service.permissions] class Meta: model = models.Service From 4a85818c3450240ff36db8ad73b8a2652fe5cdbd Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 22 May 2017 11:26:47 +0100 Subject: [PATCH 29/92] add inbound sms table --- app/dao/inbound_sms_dao.py | 7 +++ app/dao/services_dao.py | 6 ++ app/models.py | 23 +++++++ app/notifications/receive_notifications.py | 61 ++++++++++++++++--- migrations/versions/0088_inbound_sms.py | 37 +++++++++++ ...rmissionDAO.py => test_permissions_dao.py} | 0 tests/app/dao/test_services_dao.py | 13 +++- tests/app/db.py | 12 +++- .../test_receive_notification.py | 47 +++++++++++++- 9 files changed, 192 insertions(+), 14 deletions(-) create mode 100644 app/dao/inbound_sms_dao.py create mode 100644 migrations/versions/0088_inbound_sms.py rename tests/app/dao/{test_permissionDAO.py => test_permissions_dao.py} (100%) diff --git a/app/dao/inbound_sms_dao.py b/app/dao/inbound_sms_dao.py new file mode 100644 index 000000000..92f1c79e0 --- /dev/null +++ b/app/dao/inbound_sms_dao.py @@ -0,0 +1,7 @@ +from app import db +from app.dao.dao_utils import transactional + + +@transactional +def dao_create_inbound_sms(inbound_sms): + db.session.add(inbound_sms) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 572e36e63..4e4b18bc1 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -66,6 +66,12 @@ def dao_fetch_service_by_id(service_id, only_active=False): return query.one() +def dao_fetch_services_by_sms_sender(sms_sender): + return Service.query.filter( + Service.sms_sender == sms_sender + ).all() + + def dao_fetch_service_by_id_with_api_keys(service_id, only_active=False): query = Service.query.filter_by( id=service_id diff --git a/app/models.py b/app/models.py index 8c1e22e79..0794ad6a5 100644 --- a/app/models.py +++ b/app/models.py @@ -1141,3 +1141,26 @@ class JobStatistics(db.Model): ) the_string += "created at {}".format(self.created_at) return the_string + + +class InboundSms(db.Model): + __tablename__ = 'inbound_sms' + + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) + service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'), index=True, nullable=False) + service = db.relationship('Service', backref='inbound_sms') + + notify_number = db.Column(db.String, nullable=False) # the service's number, that the msg was sent to + user_number = db.Column(db.String, nullable=False) # the end user's number, that the msg was sent from + provider_date = db.Column(db.DateTime) + provider_reference = db.Column(db.String) + _content = db.Column('content', db.String, nullable=False) + + @property + def content(self): + return encryption.decrypt(self._content) + + @content.setter + def content(self, content): + self._content = encryption.encrypt(content) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 08122fb41..19542488b 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -1,8 +1,15 @@ -from flask import Blueprint -from flask import current_app -from flask import request +from urllib.parse import unquote -from app.errors import register_errors +from flask import Blueprint, current_app, request +from notifications_utils.recipients import normalise_phone_number + +from app.dao.services_dao import dao_fetch_services_by_sms_sender +from app.dao.inbound_sms_dao import dao_create_inbound_sms +from app.models import InboundSms +from app.errors import ( + register_errors, + InvalidRequest +) receive_notifications_blueprint = Blueprint('receive_notifications', __name__) register_errors(receive_notifications_blueprint) @@ -10,8 +17,48 @@ register_errors(receive_notifications_blueprint) @receive_notifications_blueprint.route('/notifications/sms/receive/mmg', methods=['POST']) def receive_mmg_sms(): + """ + { + 'MSISDN': '447123456789' + 'Number': '40604', + 'Message': 'some+uri+encoded+message%3A', + 'ID': 'SOME-MMG-SPECIFIC-ID', + 'DateRecieved': '2017-05-21+11%3A56%3A11' + } + """ post_data = request.get_json() - post_data.pop('MSISDN', None) - current_app.logger.info("Recieve notification form data: {}".format(post_data)) + potential_services = dao_fetch_services_by_sms_sender(post_data['Number']) - return "RECEIVED" + if len(potential_services) != 1: + current_app.logger.error('') + raise InvalidRequest( + 'Inbound number "{}" not associated with exactly one service'.format(post_data['Number']), + status_code=400 + ) + + service = potential_services[0] + + inbound = create_inbound_sms_object(service, post_data) + + current_app.logger.info('{} received inbound SMS with reference {}'.format(service.id, inbound.provider_reference)) + + return 'RECEIVED', 200 + + +def format_message(message): + return unquote(message.replace('+', ' ')) + + +def create_inbound_sms_object(service, json): + message = format_message(json['Message']) + user_number = normalise_phone_number(json['MSISDN']) + inbound = InboundSms( + service=service, + notify_number=service.sms_sender, + user_number=user_number, + provider_date=json['DateReceived'], + provider_reference=json['ID'], + content=message, + ) + dao_create_inbound_sms(inbound) + return inbound diff --git a/migrations/versions/0088_inbound_sms.py b/migrations/versions/0088_inbound_sms.py new file mode 100644 index 000000000..4ef3f6613 --- /dev/null +++ b/migrations/versions/0088_inbound_sms.py @@ -0,0 +1,37 @@ +"""empty message + +Revision ID: 0088_inbound_sms +Revises: 0087_scheduled_notifications +Create Date: 2017-05-22 11:28:53.471004 + +""" + +# revision identifiers, used by Alembic. +revision = '0088_inbound_sms' +down_revision = '0087_scheduled_notifications' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +def upgrade(): + op.create_table( + 'inbound_sms', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('content', sa.String, nullable=False), + sa.Column('notify_number', sa.String, nullable=False), + sa.Column('user_number', sa.String, nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False), + sa.Column('provider_date', sa.DateTime, nullable=True), + sa.Column('provider_reference', sa.String, nullable=True), + + sa.ForeignKeyConstraint(['service_id'], ['services.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_inbound_sms_service_id'), 'inbound_sms', ['service_id'], unique=False) + op.create_index(op.f('ix_inbound_sms_user_number'), 'inbound_sms', ['user_number'], unique=False) + + +def downgrade(): + op.drop_table('inbound_sms') diff --git a/tests/app/dao/test_permissionDAO.py b/tests/app/dao/test_permissions_dao.py similarity index 100% rename from tests/app/dao/test_permissionDAO.py rename to tests/app/dao/test_permissions_dao.py diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index b7b3176dd..800852ca2 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -25,7 +25,8 @@ from app.dao.services_dao import ( fetch_stats_by_date_range_for_all_services, dao_suspend_service, dao_resume_service, - dao_fetch_active_users_for_service + dao_fetch_active_users_for_service, + dao_fetch_services_by_sms_sender ) from app.dao.service_permissions_dao import dao_add_service_permission, dao_remove_service_permission from app.dao.users_dao import save_model_user @@ -948,3 +949,13 @@ def test_dao_fetch_active_users_for_service_returns_active_only(notify_db, notif users = dao_fetch_active_users_for_service(service.id) assert len(users) == 1 + + +def test_dao_fetch_services_by_sms_sender(notify_db_session): + foo1 = create_service(service_name='a', sms_sender='foo') + foo2 = create_service(service_name='b', sms_sender='foo') + bar = create_service(service_name='c', sms_sender='bar') + + services = dao_fetch_services_by_sms_sender('foo') + + assert {foo1.id, foo2.id} == {x.id for x in services} diff --git a/tests/app/db.py b/tests/app/db.py index 2f637cf1b..76b458dd9 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -39,14 +39,20 @@ def create_user(mobile_number="+447700900986", email="notify@digital.cabinet-off def create_service( - user=None, service_name="Sample service", service_id=None, restricted=False, - service_permissions=[EMAIL_TYPE, SMS_TYPE]): + user=None, + service_name="Sample service", + service_id=None, + restricted=False, + service_permissions=[EMAIL_TYPE, SMS_TYPE], + sms_sender='testing' +): service = Service( name=service_name, message_limit=1000, restricted=restricted, email_from=service_name.lower().replace(' ', '.'), - created_by=user or create_user() + created_by=user or create_user(), + sms_sender=sms_sender ) dao_create_service(service, service.created_by, service_id, service_permissions=service_permissions) return service diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index f325fe6f9..3a38449a4 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -1,14 +1,23 @@ +from datetime import datetime + +import pytest from flask import json +import freezegun + +from app.notifications.receive_notifications import ( + format_message, + create_inbound_sms_object +) -def test_receive_notification_returns_received_to_mmg(client): +def test_receive_notification_returns_received_to_mmg(client, sample_service): data = {"ID": "1234", "MSISDN": "447700900855", "Message": "Some message to notify", "Trigger": "Trigger?", - "Number": "40604", + "Number": "testing", "Channel": "SMS", - "DateReceived": "2012-06-27-12:33:00" + "DateReceived": "2012-06-27 12:33:00" } response = client.post(path='/notifications/sms/receive/mmg', data=json.dumps(data), @@ -16,3 +25,35 @@ def test_receive_notification_returns_received_to_mmg(client): assert response.status_code == 200 assert response.get_data(as_text=True) == 'RECEIVED' + + +@pytest.mark.parametrize('message, expected_output', [ + ('abc', 'abc'), + ('', ''), + ('lots+of+words', 'lots of words'), + ('%F0%9F%93%A9+%F0%9F%93%A9+%F0%9F%93%A9', '📩 📩 📩'), + ('x+%2B+y', 'x + y') +]) +def test_format_message(message, expected_output): + assert format_message(message) == expected_output + + +def test_create_inbound_sms_object(sample_service): + sample_service.sms_sender = 'foo' + data = { + 'Message': 'hello+there+%F0%9F%93%A9', + 'Number': 'foo', + 'MSISDN': '07700 900 001', + 'DateReceived': '2017-01-02 03:04:05', + 'ID': 'bar', + } + + inbound_sms = create_inbound_sms_object(sample_service, data) + + assert inbound_sms.service_id == sample_service.id + assert inbound_sms.notify_number == 'foo' + assert inbound_sms.user_number == '7700900001' + assert inbound_sms.provider_date == datetime(2017, 1, 2, 3, 4, 5) + assert inbound_sms.provider_reference == 'bar' + assert inbound_sms._content != 'hello there 📩' + assert inbound_sms.content == 'hello there 📩' From 58503c855a2c934bde16ecc42966d0234504c2ac Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 22 May 2017 14:43:46 +0100 Subject: [PATCH 30/92] set sms_sender to be 'GOVUK' if not otherwise specified this is a precursor to making the column non-nullable --- tests/app/service/test_rest.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 46adbba1b..2912060af 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -240,7 +240,11 @@ def test_create_service(client, sample_user): assert json_resp['data']['email_from'] == 'created.service' assert not json_resp['data']['research_mode'] assert json_resp['data']['dvla_organisation'] == '001' +<<<<<<< HEAD assert json_resp['data']['sms_sender'] == current_app.config['FROM_NUMBER'] +======= + assert json_resp['data']['sms_sender'] == 'GOVUK' +>>>>>>> set sms_sender to be 'GOVUK' if not otherwise specified service_db = Service.query.get(json_resp['data']['id']) assert service_db.name == 'created service' From eb6edf06a3404f1f69c46575f02c07e952aab064 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 22 May 2017 14:18:12 +0100 Subject: [PATCH 31/92] add upgrade script to remove non-null values from the sender column --- app/models.py | 2 +- migrations/versions/0085_govuk_sms_sender.py | 25 ++++++ tests/app/service/test_rest.py | 92 ++++++++++---------- 3 files changed, 70 insertions(+), 49 deletions(-) create mode 100644 migrations/versions/0085_govuk_sms_sender.py diff --git a/app/models.py b/app/models.py index 8c1e22e79..85bbcfbd1 100644 --- a/app/models.py +++ b/app/models.py @@ -188,7 +188,7 @@ class Service(db.Model, Versioned): created_by_id = db.Column(UUID(as_uuid=True), db.ForeignKey('users.id'), index=True, nullable=False) reply_to_email_address = db.Column(db.Text, index=False, unique=False, nullable=True) letter_contact_block = db.Column(db.Text, index=False, unique=False, nullable=True) - sms_sender = db.Column(db.String(11), nullable=True, default=lambda: current_app.config['FROM_NUMBER']) + sms_sender = db.Column(db.String(11), nullable=False, default=lambda: current_app.config['FROM_NUMBER']) organisation_id = db.Column(UUID(as_uuid=True), db.ForeignKey('organisation.id'), index=True, nullable=True) organisation = db.relationship('Organisation') dvla_organisation_id = db.Column( diff --git a/migrations/versions/0085_govuk_sms_sender.py b/migrations/versions/0085_govuk_sms_sender.py new file mode 100644 index 000000000..34c0fa835 --- /dev/null +++ b/migrations/versions/0085_govuk_sms_sender.py @@ -0,0 +1,25 @@ +"""empty message + +Revision ID: 0085_govuk_sms_sender +Revises: 0084_add_job_stats +Create Date: 2017-05-22 13:46:09.584801 + +""" + +# revision identifiers, used by Alembic. +revision = '0085_govuk_sms_sender' +down_revision = '0084_add_job_stats' + +from alembic import op + + +def upgrade(): + op.execute("UPDATE services SET sms_sender = 'GOVUK' where sms_sender is null") + op.execute("UPDATE services_history SET sms_sender = 'GOVUK' where sms_sender is null") + op.alter_column('services', 'sms_sender', nullable=False) + op.alter_column('services_history', 'sms_sender', nullable=False) + + +def downgrade(): + op.alter_column('services_history', 'sms_sender', nullable=True) + op.alter_column('services', 'sms_sender', nullable=True) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 2912060af..f61fa3e6b 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1319,61 +1319,57 @@ def test_get_only_api_created_notifications_for_service( assert response.status_code == 200 -def test_set_sms_sender_for_service(notify_api, sample_service): - with notify_api.test_request_context(): - with notify_api.test_client() as client: - auth_header = create_authorization_header() - resp = client.get( - '/service/{}'.format(sample_service.id), - headers=[auth_header] - ) - json_resp = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 200 - assert json_resp['data']['name'] == sample_service.name +def test_set_sms_sender_for_service(client, sample_service): + data = { + 'sms_sender': 'elevenchars', + } - data = { - 'sms_sender': 'elevenchars', - } + auth_header = create_authorization_header() - auth_header = create_authorization_header() - - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - result = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 200 - assert result['data']['sms_sender'] == 'elevenchars' + resp = client.post( + '/service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header] + ) + result = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 200 + assert result['data']['sms_sender'] == 'elevenchars' -def test_set_sms_sender_for_service_rejects_invalid_characters(notify_api, sample_service): - with notify_api.test_request_context(): - with notify_api.test_client() as client: - auth_header = create_authorization_header() - resp = client.get( - '/service/{}'.format(sample_service.id), - headers=[auth_header] - ) - json_resp = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 200 - assert json_resp['data']['name'] == sample_service.name +def test_set_sms_sender_for_service_rejects_invalid_characters(client, sample_service): + data = { + 'sms_sender': 'invalid####', + } - data = { - 'sms_sender': 'invalid####', - } + auth_header = create_authorization_header() - auth_header = create_authorization_header() + resp = client.post( + '/service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header] + ) + result = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 400 + assert result['result'] == 'error' + assert result['message'] == {'sms_sender': ['Only alphanumeric characters allowed']} - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - result = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 400 - assert result['result'] == 'error' - assert result['message'] == {'sms_sender': ['Only alphanumeric characters allowed']} + +def test_set_sms_sender_for_service_rejects_null(client, sample_service): + data = { + 'sms_sender': None, + } + + auth_header = create_authorization_header() + + resp = client.post( + '/service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header] + ) + result = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 400 + assert result['result'] == 'error' + assert result['message'] == {'sms_sender': 'Field may not be null.'} @pytest.mark.parametrize('today_only,stats', [ From 25011f09ef4af7c48dbf7b86ff60ec3f245671e9 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 22 May 2017 17:24:31 +0100 Subject: [PATCH 32/92] test no longer applicable as null sms_sender is an error now --- ...{0085_govuk_sms_sender.py => 0086_govuk_sms_sender.py} | 8 ++++---- tests/app/delivery/test_send_to_providers.py | 1 - tests/app/service/test_rest.py | 6 +----- 3 files changed, 5 insertions(+), 10 deletions(-) rename migrations/versions/{0085_govuk_sms_sender.py => 0086_govuk_sms_sender.py} (78%) diff --git a/migrations/versions/0085_govuk_sms_sender.py b/migrations/versions/0086_govuk_sms_sender.py similarity index 78% rename from migrations/versions/0085_govuk_sms_sender.py rename to migrations/versions/0086_govuk_sms_sender.py index 34c0fa835..6e0fbf7d6 100644 --- a/migrations/versions/0085_govuk_sms_sender.py +++ b/migrations/versions/0086_govuk_sms_sender.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0085_govuk_sms_sender -Revises: 0084_add_job_stats +Revision ID: 0086_govuk_sms_sender +Revises: 0085_update_incoming_to_inbound Create Date: 2017-05-22 13:46:09.584801 """ # revision identifiers, used by Alembic. -revision = '0085_govuk_sms_sender' -down_revision = '0084_add_job_stats' +revision = '0086_govuk_sms_sender' +down_revision = '0085_update_incoming_to_inbound' from alembic import op diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 5cb467ac3..e1a4ec4a2 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -627,7 +627,6 @@ def test_should_set_international_phone_number_to_sent_status( # if 40604 is actually in DB then treat that as if entered manually ('40604', '40604', 'bar'), # 'testing' is the FROM_NUMBER during unit tests - (None, 'testing', 'Sample service: bar'), ('testing', 'testing', 'Sample service: bar'), ]) def test_should_handle_sms_sender_and_prefix_message( diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index f61fa3e6b..e9e1913a0 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -240,11 +240,7 @@ def test_create_service(client, sample_user): assert json_resp['data']['email_from'] == 'created.service' assert not json_resp['data']['research_mode'] assert json_resp['data']['dvla_organisation'] == '001' -<<<<<<< HEAD assert json_resp['data']['sms_sender'] == current_app.config['FROM_NUMBER'] -======= - assert json_resp['data']['sms_sender'] == 'GOVUK' ->>>>>>> set sms_sender to be 'GOVUK' if not otherwise specified service_db = Service.query.get(json_resp['data']['id']) assert service_db.name == 'created service' @@ -1369,7 +1365,7 @@ def test_set_sms_sender_for_service_rejects_null(client, sample_service): result = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 400 assert result['result'] == 'error' - assert result['message'] == {'sms_sender': 'Field may not be null.'} + assert result['message'] == {'sms_sender': ['Field may not be null.']} @pytest.mark.parametrize('today_only,stats', [ From 012b2bf36c7c56b31184fabc6744b15f2cf192d2 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 25 May 2017 10:35:10 +0100 Subject: [PATCH 33/92] version number bump --- ...{0086_govuk_sms_sender.py => 0087_govuk_sms_sender.py} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename migrations/versions/{0086_govuk_sms_sender.py => 0087_govuk_sms_sender.py} (78%) diff --git a/migrations/versions/0086_govuk_sms_sender.py b/migrations/versions/0087_govuk_sms_sender.py similarity index 78% rename from migrations/versions/0086_govuk_sms_sender.py rename to migrations/versions/0087_govuk_sms_sender.py index 6e0fbf7d6..ecd969f8d 100644 --- a/migrations/versions/0086_govuk_sms_sender.py +++ b/migrations/versions/0087_govuk_sms_sender.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0086_govuk_sms_sender -Revises: 0085_update_incoming_to_inbound +Revision ID: 0087_govuk_sms_sender +Revises: 0086_add_norm_to_notification Create Date: 2017-05-22 13:46:09.584801 """ # revision identifiers, used by Alembic. -revision = '0086_govuk_sms_sender' -down_revision = '0085_update_incoming_to_inbound' +revision = '0087_govuk_sms_sender' +down_revision = '0086_add_norm_to_notification' from alembic import op From db4b3e371a8f20911fdd1c041a873774844b9f68 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 25 May 2017 12:10:11 +0100 Subject: [PATCH 34/92] remove null sms sender test it's no longer possible for an sms_sender to be null --- tests/app/service/test_rest.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index e9e1913a0..18c384722 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1925,22 +1925,6 @@ def test_update_service_does_not_call_send_notification_when_restricted_not_chan assert not send_notification_mock.called -def test_update_service_works_when_sms_sender_is_null(sample_service, client, mocker): - sample_service.sms_sender = None - data = {'name': 'new name'} - - resp = client.post( - 'service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[create_authorization_header()], - content_type='application/json' - ) - - assert resp.status_code == 200 - # make sure it wasn't changed to not-null under the hood - assert sample_service.sms_sender is None - - def test_search_for_notification_by_to_field_filters_by_status(client, notify_db, notify_db_session): create_notification = partial( create_sample_notification, From ef799d0515e29aefe9b3e06a90654b67e9e2422d Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Tue, 30 May 2017 11:08:59 +0100 Subject: [PATCH 35/92] add sad path tests for inbound sms --- .../test_receive_notification.py | 26 ++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 3a38449a4..d4f14e5bc 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -2,13 +2,14 @@ from datetime import datetime import pytest from flask import json -import freezegun from app.notifications.receive_notifications import ( format_message, create_inbound_sms_object ) +from tests.app.db import create_service + def test_receive_notification_returns_received_to_mmg(client, sample_service): data = {"ID": "1234", @@ -57,3 +58,26 @@ def test_create_inbound_sms_object(sample_service): assert inbound_sms.provider_reference == 'bar' assert inbound_sms._content != 'hello there 📩' assert inbound_sms.content == 'hello there 📩' + + +@pytest.mark.parametrize('notify_number', ['foo', 'baz'], ids=['two_matching_services', 'no_matching_services']) +def test_receive_notification_error_if_not_single_matching_service(client, notify_db_session, notify_number): + create_service(service_name='a', sms_sender='foo') + create_service(service_name='b', sms_sender='foo') + + data = { + 'Message': 'hello', + 'Number': notify_number, + 'MSISDN': '7700900001', + 'DateReceived': '2017-01-02 03:04:05', + 'ID': 'bar', + } + response = client.post(path='/notifications/sms/receive/mmg', + data=json.dumps(data), + headers=[('Content-Type', 'application/json')]) + + assert response.status_code == 400 + assert json.loads(response.get_data(as_text=True)) == { + 'result': 'error', + 'message': 'Inbound number "{}" not associated with exactly one service'.format(notify_number) + } From d74675b6d87c4400318253edcde4ae41d941a7f7 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 30 May 2017 11:24:26 +0100 Subject: [PATCH 36/92] Removed references to old queus --- app/config.py | 23 +---------------------- manifest-delivery-base.yml | 12 ++++++------ 2 files changed, 7 insertions(+), 28 deletions(-) diff --git a/app/config.py b/app/config.py index 60d182d91..f2cd39cdb 100644 --- a/app/config.py +++ b/app/config.py @@ -24,23 +24,6 @@ class QueueNames(object): NOTIFY = 'notify-internal-tasks' PROCESS_FTP = 'process-ftp-tasks' - @staticmethod - def old_queues(): - return [ - 'db-sms', - 'db-email', - 'db-letter', - 'priority', - 'periodic', - 'send-sms', - 'send-email', - 'research-mode', - 'statistics', - 'notify', - 'retry', - 'process-job' - ] - @staticmethod def all_queues(): return [ @@ -262,8 +245,6 @@ class Development(Config): NOTIFICATION_QUEUE_PREFIX = 'development' DEBUG = True - queues = QueueNames.all_queues() + QueueNames.old_queues() - for queue in QueueNames.all_queues(): Config.CELERY_QUEUES.append( Queue(queue, Exchange('default'), routing_key=queue) @@ -283,9 +264,7 @@ class Test(Config): STATSD_HOST = "localhost" STATSD_PORT = 1000 - queues = QueueNames.all_queues() + QueueNames.old_queues() - - for queue in queues: + for queue in QueueNames.all_queues(): Config.CELERY_QUEUES.append( Queue(queue, Exchange('default'), routing_key=queue) ) diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index c68d36965..7d087746d 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -23,33 +23,33 @@ applications: NOTIFY_APP_NAME: delivery-celery-beat - name: notify-delivery-worker-database - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email,db-letter,database-tasks + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q database-tasks env: NOTIFY_APP_NAME: delivery-worker-database - name: notify-delivery-worker-research - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q research-mode,research-mode-tasks + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q research-mode-tasks env: NOTIFY_APP_NAME: delivery-worker-research - name: notify-delivery-worker-sender - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email,send-tasks + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-tasks env: NOTIFY_APP_NAME: delivery-worker-sender - name: notify-delivery-worker-periodic - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic,statistics,periodic-tasks,statistics-tasks + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic-tasks,statistics-tasks instances: 1 memory: 2G env: NOTIFY_APP_NAME: delivery-worker-periodic - name: notify-delivery-worker-priority - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q priority,priority-tasks + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=5 -Q priority-tasks env: NOTIFY_APP_NAME: delivery-worker-priority - name: notify-delivery-worker - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q process-job,notify,retry,job-tasks,retry-tasks,notify-internal-tasks + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q job-tasks,retry-tasks,notify-internal-tasks env: NOTIFY_APP_NAME: delivery-worker From de3b5a13a95250130ba125dcba60ef901b719caa Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Tue, 30 May 2017 12:49:30 +0100 Subject: [PATCH 37/92] version number bump --- ...{0087_govuk_sms_sender.py => 0088_govuk_sms_sender.py} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename migrations/versions/{0087_govuk_sms_sender.py => 0088_govuk_sms_sender.py} (79%) diff --git a/migrations/versions/0087_govuk_sms_sender.py b/migrations/versions/0088_govuk_sms_sender.py similarity index 79% rename from migrations/versions/0087_govuk_sms_sender.py rename to migrations/versions/0088_govuk_sms_sender.py index ecd969f8d..3d580d6ad 100644 --- a/migrations/versions/0087_govuk_sms_sender.py +++ b/migrations/versions/0088_govuk_sms_sender.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0087_govuk_sms_sender -Revises: 0086_add_norm_to_notification +Revision ID: 0088_govuk_sms_sender +Revises: 0087_scheduled_notifications Create Date: 2017-05-22 13:46:09.584801 """ # revision identifiers, used by Alembic. -revision = '0087_govuk_sms_sender' -down_revision = '0086_add_norm_to_notification' +revision = '0088_govuk_sms_sender' +down_revision = '0087_scheduled_notifications' from alembic import op From 9ada8b27538aa30c516e4f1ccd8b3331f0f72ddc Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Tue, 30 May 2017 14:40:27 +0100 Subject: [PATCH 38/92] =?UTF-8?q?Don=E2=80=99t=20500=20when=20searching=20?= =?UTF-8?q?with=20bad=20email=20address?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In the future we might want to validate email addresses before attempting to search by them. But for a first pass we can just return no results when a user types in something that isn’t an email address or phone number. It definitely better than returning a 500. --- app/dao/notifications_dao.py | 8 ++++++-- tests/app/dao/test_notification_dao.py | 14 ++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 7ebf9170c..f4ff2a24f 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -9,7 +9,8 @@ from flask import current_app from notifications_utils.recipients import ( validate_and_format_phone_number, validate_and_format_email_address, - InvalidPhoneError + InvalidPhoneError, + InvalidEmailError, ) from werkzeug.datastructures import MultiDict from sqlalchemy import (desc, func, or_, and_, asc) @@ -477,7 +478,10 @@ def dao_get_notifications_by_to_field(service_id, search_term, statuses=None): try: normalised = validate_and_format_phone_number(search_term) except InvalidPhoneError: - normalised = validate_and_format_email_address(search_term) + try: + normalised = validate_and_format_email_address(search_term) + except InvalidEmailError: + normalised = search_term filters = [ Notification.service_id == service_id, diff --git a/tests/app/dao/test_notification_dao.py b/tests/app/dao/test_notification_dao.py index a6b69cd24..dc0f3fc34 100644 --- a/tests/app/dao/test_notification_dao.py +++ b/tests/app/dao/test_notification_dao.py @@ -1772,6 +1772,20 @@ def test_dao_get_notifications_by_to_field_search_is_not_case_sensitive(sample_t assert notification.id in notification_ids +@pytest.mark.parametrize('to', [ + 'not@email', '123' +]) +def test_dao_get_notifications_by_to_field_accepts_invalid_phone_numbers_and_email_addresses( + sample_template, + to, +): + notification = create_notification( + template=sample_template, to_field='test@example.com', normalised_to='test@example.com' + ) + results = dao_get_notifications_by_to_field(notification.service_id, to) + assert len(results) == 0 + + def test_dao_get_notifications_by_to_field_search_ignores_spaces(sample_template): notification1 = create_notification( template=sample_template, to_field='+447700900855', normalised_to='447700900855' From 8f7afcdb16720faf59995dac15edbdc3902b4687 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 30 May 2017 17:07:43 +0100 Subject: [PATCH 39/92] Did some work around the delete queues script --- .gitignore | 2 + scripts/delete_sqs_queues.py | 72 +++++++++++++++++++++++++++--------- 2 files changed, 56 insertions(+), 18 deletions(-) mode change 100644 => 100755 scripts/delete_sqs_queues.py diff --git a/.gitignore b/.gitignore index d19df2e94..12f77b15c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +queues.csv + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/scripts/delete_sqs_queues.py b/scripts/delete_sqs_queues.py old mode 100644 new mode 100755 index bdbf6ff65..1e274b4ed --- a/scripts/delete_sqs_queues.py +++ b/scripts/delete_sqs_queues.py @@ -1,10 +1,33 @@ +""" + +Script to manage SQS queues. Can list or delete queues. + +Uses boto, so relies on correctly set up AWS access keys and tokens. + +In principle use this script to dump details of all queues in a gievn environment, and then +manipulate the resultant CSV file so that it contains the queues you want to delete. + +Very hands on. Starter for a more automagic process. + +Usage: + scripts/delete_sqs_queues.py + + options are: + - list: dumps queue details to local file queues.csv in current directory. + - delete: delete queues from local file queues.csv in current directory. + +Example: + scripts/delete_sqs_queues.py list delete +""" + +from docopt import docopt import boto3 import csv from datetime import datetime -from pprint import pprint -import os -client = boto3.client('sqs', region_name=os.getenv('AWS_REGION')) +FILE_NAME = "/tmp/queues.csv" + +client = boto3.client('sqs', region_name='eu-west-1') def _formatted_date_from_timestamp(timestamp): @@ -27,15 +50,19 @@ def get_queue_attributes(queue_name): ] ) queue_attributes = response['Attributes'] + queue_attributes.update({ + 'QueueUrl': queue_name + }) return queue_attributes -def delete_queue(queue_name): +def delete_queue(queue_url): + print("DELETEING {}".format(queue_url)) response = client.delete_queue( - QueueUrl=queue_name + QueueUrl=queue_url ) if response['ResponseMetadata']['HTTPStatusCode'] == 200: - print('Deleted queue successfully') + print('Deleted queue successfully {}'.format(response['ResponseMetadata'])) else: print('Error occured when attempting to delete queue') pprint(response) @@ -43,10 +70,10 @@ def delete_queue(queue_name): def output_to_csv(queue_attributes): - csv_name = 'queues.csv' - with open(csv_name, 'w') as csvfile: + with open(FILE_NAME, 'w') as csvfile: fieldnames = [ 'Queue Name', + 'Queue URL', 'Number of Messages', 'Number of Messages Delayed', 'Number of Messages Not Visible', @@ -55,23 +82,19 @@ def output_to_csv(queue_attributes): writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for queue_attr in queue_attributes: - queue_url = client.get_queue_url( - QueueName=queue_attr['QueueArn'] - )['QueueUrl'] writer.writerow({ 'Queue Name': queue_attr['QueueArn'], - 'Queue URL': queue_url, + 'Queue URL': queue_attr['QueueUrl'], 'Number of Messages': queue_attr['ApproximateNumberOfMessages'], 'Number of Messages Delayed': queue_attr['ApproximateNumberOfMessagesDelayed'], 'Number of Messages Not Visible': queue_attr['ApproximateNumberOfMessagesNotVisible'], 'Created': _formatted_date_from_timestamp(queue_attr['CreatedTimestamp']) }) - return csv_name -def read_from_csv(csv_name): +def read_from_csv(): queue_urls = [] - with open(csv_name, 'r') as csvfile: + with open(FILE_NAME, 'r') as csvfile: next(csvfile) rows = csv.reader(csvfile, delimiter=',') for row in rows: @@ -79,6 +102,19 @@ def read_from_csv(csv_name): return queue_urls -queues = get_queues() -for queue in queues: - delete_queue(queue) +if __name__ == "__main__": + arguments = docopt(__doc__) + + if arguments[''] == 'list': + queues = get_queues() + queue_attributes = [] + for queue in queues: + queue_attributes.append(get_queue_attributes(queue)) + output_to_csv(queue_attributes) + elif arguments[''] == 'delete': + queues_to_delete = read_from_csv() + for queue in queues_to_delete: + delete_queue(queue) + else: + print("UNKNOWN COMMAND") + exit(1) From 68e15b57f591bda31654ae93c130591c1e967ac3 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 30 May 2017 17:29:14 +0100 Subject: [PATCH 40/92] Fixed pep8 --- scripts/delete_sqs_queues.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/scripts/delete_sqs_queues.py b/scripts/delete_sqs_queues.py index 1e274b4ed..1ab641f76 100755 --- a/scripts/delete_sqs_queues.py +++ b/scripts/delete_sqs_queues.py @@ -4,20 +4,20 @@ Script to manage SQS queues. Can list or delete queues. Uses boto, so relies on correctly set up AWS access keys and tokens. -In principle use this script to dump details of all queues in a gievn environment, and then +In principle use this script to dump details of all queues in a gievn environment, and then manipulate the resultant CSV file so that it contains the queues you want to delete. Very hands on. Starter for a more automagic process. Usage: scripts/delete_sqs_queues.py - + options are: - list: dumps queue details to local file queues.csv in current directory. - delete: delete queues from local file queues.csv in current directory. Example: - scripts/delete_sqs_queues.py list delete + scripts/delete_sqs_queues.py list delete """ from docopt import docopt @@ -50,9 +50,7 @@ def get_queue_attributes(queue_name): ] ) queue_attributes = response['Attributes'] - queue_attributes.update({ - 'QueueUrl': queue_name - }) + queue_attributes.update({'QueueUrl': queue_name}) return queue_attributes From 566e56f888cedc381d6f8550510dde4a12e9bfe2 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Wed, 31 May 2017 10:35:27 +0100 Subject: [PATCH 41/92] Fix downgrade script --- migrations/versions/0088_add_schedule_serv_perm.py | 1 + 1 file changed, 1 insertion(+) diff --git a/migrations/versions/0088_add_schedule_serv_perm.py b/migrations/versions/0088_add_schedule_serv_perm.py index 85b94118b..0882c7c94 100644 --- a/migrations/versions/0088_add_schedule_serv_perm.py +++ b/migrations/versions/0088_add_schedule_serv_perm.py @@ -20,4 +20,5 @@ def upgrade(): def downgrade(): op.get_bind() + op.execute("delete from service_permissions where permission = 'schedule_notifications'") op.execute("delete from service_permission_types where name = 'schedule_notifications'") From 25c8f71f2cb3802efd326da4e0bea5f5cc34dfd2 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 31 May 2017 11:47:52 +0100 Subject: [PATCH 42/92] Reduced memory footprint of the API apps. Staging and prod now default to 768M of RAM, down from a 1G saves 512M per instance type Preview down to 256M per app --- manifest-api-preview.yml | 3 +++ manifest-delivery-base.yml | 3 ++- manifest-delivery-preview.yml | 1 + manifest-delivery-production.yml | 2 +- manifest-delivery-staging.yml | 2 +- 5 files changed, 8 insertions(+), 3 deletions(-) diff --git a/manifest-api-preview.yml b/manifest-api-preview.yml index 04b396388..a40990194 100644 --- a/manifest-api-preview.yml +++ b/manifest-api-preview.yml @@ -6,3 +6,6 @@ routes: - route: notify-api-preview.cloudapps.digital - route: api-paas.notify.works - route: api.notify.works + +instances: 1 +memory: 256M diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index c68d36965..2eaf380a6 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -24,6 +24,7 @@ applications: - name: notify-delivery-worker-database command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email,db-letter,database-tasks + memory: 1G env: NOTIFY_APP_NAME: delivery-worker-database @@ -34,13 +35,13 @@ applications: - name: notify-delivery-worker-sender command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email,send-tasks + memory: 1G env: NOTIFY_APP_NAME: delivery-worker-sender - name: notify-delivery-worker-periodic command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic,statistics,periodic-tasks,statistics-tasks instances: 1 - memory: 2G env: NOTIFY_APP_NAME: delivery-worker-periodic diff --git a/manifest-delivery-preview.yml b/manifest-delivery-preview.yml index d628e5fc9..2bbb3c0dc 100644 --- a/manifest-delivery-preview.yml +++ b/manifest-delivery-preview.yml @@ -1,3 +1,4 @@ --- inherit: manifest-delivery-base.yml +memory: 256M diff --git a/manifest-delivery-production.yml b/manifest-delivery-production.yml index 53c8d2f12..d2c2ba647 100644 --- a/manifest-delivery-production.yml +++ b/manifest-delivery-production.yml @@ -3,4 +3,4 @@ inherit: manifest-delivery-base.yml instances: 2 -memory: 1G +memory: 768M diff --git a/manifest-delivery-staging.yml b/manifest-delivery-staging.yml index 53c8d2f12..d2c2ba647 100644 --- a/manifest-delivery-staging.yml +++ b/manifest-delivery-staging.yml @@ -3,4 +3,4 @@ inherit: manifest-delivery-base.yml instances: 2 -memory: 1G +memory: 768M From 0b642623fb6b3d9347c5755a8b7f640a02628172 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Wed, 31 May 2017 13:34:54 +0100 Subject: [PATCH 43/92] Added table and model for letter rates. The rates for the letters are per page, therefore it seemed better to build a different table. --- app/models.py | 17 ++++++++ migrations/versions/0088_letter_billing.py | 50 ++++++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 migrations/versions/0088_letter_billing.py diff --git a/app/models.py b/app/models.py index 8c1e22e79..153ef57e8 100644 --- a/app/models.py +++ b/app/models.py @@ -1141,3 +1141,20 @@ class JobStatistics(db.Model): ) the_string += "created at {}".format(self.created_at) return the_string + + +class LetterRate(db.Model): + __tablename__ = 'letter_rates' + + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + valid_from = valid_from = db.Column(db.DateTime, nullable=False) + + +class LetterRateDetail(db.Model): + __tablename__ = 'letter_rate_details' + + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + letter_rate_id = db.Column(UUID(as_uuid=True), db.ForeignKey('letter_rates.id'), index=True, nullable=False) + letter_rate = db.relationship('LetterRate', backref='letter_rates') + page_total = db.Column(db.Integer, nullable=False) + rate = db.Column(db.Numeric(), nullable=False) diff --git a/migrations/versions/0088_letter_billing.py b/migrations/versions/0088_letter_billing.py new file mode 100644 index 000000000..9c184d10a --- /dev/null +++ b/migrations/versions/0088_letter_billing.py @@ -0,0 +1,50 @@ +"""empty message + +Revision ID: 0088_letter_billing +Revises: 0087_scheduled_notifications +Create Date: 2017-05-31 11:43:55.744631 + +""" +import uuid +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0088_letter_billing' +down_revision = '0087_scheduled_notifications' + + +def upgrade(): + op.create_table('letter_rates', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('valid_from', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('letter_rate_details', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('letter_rate_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('page_total', sa.Integer(), nullable=False), + sa.Column('rate', sa.Numeric(), nullable=False), + sa.ForeignKeyConstraint(['letter_rate_id'], ['letter_rates.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_letter_rate_details_letter_rate_id'), 'letter_rate_details', ['letter_rate_id'], + unique=False) + + op.get_bind() + letter_id = uuid.uuid4() + op.execute("insert into letter_rates(id, valid_from) values('{}', '2017-03-31 23:00:00')".format(letter_id)) + insert_details = "insert into letter_rate_details(id, letter_rate_id, page_total, rate) values('{}', '{}', {}, {})" + op.execute( + insert_details.format(uuid.uuid4(), letter_id, 1, 29.3)) + op.execute( + insert_details.format(uuid.uuid4(), letter_id, 2, 32)) + op.execute( + insert_details.format(uuid.uuid4(), letter_id, 3, 35)) + + +def downgrade(): + op.get_bind() + op.drop_index('ix_letter_rate_details_letter_rate_id') + op.drop_table('letter_rate_details') + op.drop_table('letter_rates') From ea0ba8d87ab4aa92eb794c40a32e4da6429236c8 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 31 May 2017 14:52:48 +0100 Subject: [PATCH 44/92] Revert "Remove nulls from sms_sender" --- app/models.py | 2 +- migrations/versions/0088_govuk_sms_sender.py | 25 ----- tests/app/delivery/test_send_to_providers.py | 1 + tests/app/service/test_rest.py | 108 +++++++++++-------- 4 files changed, 66 insertions(+), 70 deletions(-) delete mode 100644 migrations/versions/0088_govuk_sms_sender.py diff --git a/app/models.py b/app/models.py index 85bbcfbd1..8c1e22e79 100644 --- a/app/models.py +++ b/app/models.py @@ -188,7 +188,7 @@ class Service(db.Model, Versioned): created_by_id = db.Column(UUID(as_uuid=True), db.ForeignKey('users.id'), index=True, nullable=False) reply_to_email_address = db.Column(db.Text, index=False, unique=False, nullable=True) letter_contact_block = db.Column(db.Text, index=False, unique=False, nullable=True) - sms_sender = db.Column(db.String(11), nullable=False, default=lambda: current_app.config['FROM_NUMBER']) + sms_sender = db.Column(db.String(11), nullable=True, default=lambda: current_app.config['FROM_NUMBER']) organisation_id = db.Column(UUID(as_uuid=True), db.ForeignKey('organisation.id'), index=True, nullable=True) organisation = db.relationship('Organisation') dvla_organisation_id = db.Column( diff --git a/migrations/versions/0088_govuk_sms_sender.py b/migrations/versions/0088_govuk_sms_sender.py deleted file mode 100644 index 3d580d6ad..000000000 --- a/migrations/versions/0088_govuk_sms_sender.py +++ /dev/null @@ -1,25 +0,0 @@ -"""empty message - -Revision ID: 0088_govuk_sms_sender -Revises: 0087_scheduled_notifications -Create Date: 2017-05-22 13:46:09.584801 - -""" - -# revision identifiers, used by Alembic. -revision = '0088_govuk_sms_sender' -down_revision = '0087_scheduled_notifications' - -from alembic import op - - -def upgrade(): - op.execute("UPDATE services SET sms_sender = 'GOVUK' where sms_sender is null") - op.execute("UPDATE services_history SET sms_sender = 'GOVUK' where sms_sender is null") - op.alter_column('services', 'sms_sender', nullable=False) - op.alter_column('services_history', 'sms_sender', nullable=False) - - -def downgrade(): - op.alter_column('services_history', 'sms_sender', nullable=True) - op.alter_column('services', 'sms_sender', nullable=True) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index e1a4ec4a2..5cb467ac3 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -627,6 +627,7 @@ def test_should_set_international_phone_number_to_sent_status( # if 40604 is actually in DB then treat that as if entered manually ('40604', '40604', 'bar'), # 'testing' is the FROM_NUMBER during unit tests + (None, 'testing', 'Sample service: bar'), ('testing', 'testing', 'Sample service: bar'), ]) def test_should_handle_sms_sender_and_prefix_message( diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 18c384722..46adbba1b 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1315,57 +1315,61 @@ def test_get_only_api_created_notifications_for_service( assert response.status_code == 200 -def test_set_sms_sender_for_service(client, sample_service): - data = { - 'sms_sender': 'elevenchars', - } +def test_set_sms_sender_for_service(notify_api, sample_service): + with notify_api.test_request_context(): + with notify_api.test_client() as client: + auth_header = create_authorization_header() + resp = client.get( + '/service/{}'.format(sample_service.id), + headers=[auth_header] + ) + json_resp = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 200 + assert json_resp['data']['name'] == sample_service.name - auth_header = create_authorization_header() + data = { + 'sms_sender': 'elevenchars', + } - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - result = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 200 - assert result['data']['sms_sender'] == 'elevenchars' + auth_header = create_authorization_header() + + resp = client.post( + '/service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header] + ) + result = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 200 + assert result['data']['sms_sender'] == 'elevenchars' -def test_set_sms_sender_for_service_rejects_invalid_characters(client, sample_service): - data = { - 'sms_sender': 'invalid####', - } +def test_set_sms_sender_for_service_rejects_invalid_characters(notify_api, sample_service): + with notify_api.test_request_context(): + with notify_api.test_client() as client: + auth_header = create_authorization_header() + resp = client.get( + '/service/{}'.format(sample_service.id), + headers=[auth_header] + ) + json_resp = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 200 + assert json_resp['data']['name'] == sample_service.name - auth_header = create_authorization_header() + data = { + 'sms_sender': 'invalid####', + } - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - result = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 400 - assert result['result'] == 'error' - assert result['message'] == {'sms_sender': ['Only alphanumeric characters allowed']} + auth_header = create_authorization_header() - -def test_set_sms_sender_for_service_rejects_null(client, sample_service): - data = { - 'sms_sender': None, - } - - auth_header = create_authorization_header() - - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - result = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 400 - assert result['result'] == 'error' - assert result['message'] == {'sms_sender': ['Field may not be null.']} + resp = client.post( + '/service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header] + ) + result = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 400 + assert result['result'] == 'error' + assert result['message'] == {'sms_sender': ['Only alphanumeric characters allowed']} @pytest.mark.parametrize('today_only,stats', [ @@ -1925,6 +1929,22 @@ def test_update_service_does_not_call_send_notification_when_restricted_not_chan assert not send_notification_mock.called +def test_update_service_works_when_sms_sender_is_null(sample_service, client, mocker): + sample_service.sms_sender = None + data = {'name': 'new name'} + + resp = client.post( + 'service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[create_authorization_header()], + content_type='application/json' + ) + + assert resp.status_code == 200 + # make sure it wasn't changed to not-null under the hood + assert sample_service.sms_sender is None + + def test_search_for_notification_by_to_field_filters_by_status(client, notify_db, notify_db_session): create_notification = partial( create_sample_notification, From b98b97c4a21c87f92dbc63784483417c2ec66daf Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 31 May 2017 15:06:21 +0100 Subject: [PATCH 45/92] Added a comment about delete queues --- scripts/delete_sqs_queues.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/delete_sqs_queues.py b/scripts/delete_sqs_queues.py index 1ab641f76..b167ce392 100755 --- a/scripts/delete_sqs_queues.py +++ b/scripts/delete_sqs_queues.py @@ -55,6 +55,7 @@ def get_queue_attributes(queue_name): def delete_queue(queue_url): + # Note that deleting a queue returns 200 OK if it doesn't exist print("DELETEING {}".format(queue_url)) response = client.delete_queue( QueueUrl=queue_url From 726371269ab9ca059a30d51585a6001ad2b80a01 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Wed, 31 May 2017 15:38:57 +0100 Subject: [PATCH 46/92] Use python3 on jenkins otherwise SSL erorr occurs --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 14cae9b0c..4142722ff 100644 --- a/Makefile +++ b/Makefile @@ -306,5 +306,5 @@ cf-push: .PHONY: check-if-migrations-to-run check-if-migrations-to-run: - @echo $(shell python scripts/check_if_new_migration.py) + @echo $(shell python3 scripts/check_if_new_migration.py) From 502024b3bc997e109fab3ef7a78e54a17da059c7 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 31 May 2017 15:56:56 +0100 Subject: [PATCH 47/92] Boost API to 1G from 256M --- manifest-api-preview.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest-api-preview.yml b/manifest-api-preview.yml index a40990194..eccae21b1 100644 --- a/manifest-api-preview.yml +++ b/manifest-api-preview.yml @@ -8,4 +8,4 @@ routes: - route: api.notify.works instances: 1 -memory: 256M +memory: 1G From 3c416d36315eb351a771b407d851880ccd91c5c7 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 31 May 2017 16:15:25 +0100 Subject: [PATCH 48/92] Simple end point for fire text inbound SMS callbacks. --- app/notifications/receive_notifications.py | 12 +++++++++++- .../app/notifications/test_receive_notification.py | 13 +++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 08122fb41..9c6b63c48 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -1,5 +1,5 @@ from flask import Blueprint -from flask import current_app +from flask import current_app, jsonify from flask import request from app.errors import register_errors @@ -15,3 +15,13 @@ def receive_mmg_sms(): current_app.logger.info("Recieve notification form data: {}".format(post_data)) return "RECEIVED" + + +@receive_notifications_blueprint.route('/notifications/sms/receive/firetext', methods=['POST']) +def receive_firetext_sms(): + post_data = request.get_json() + current_app.logger.info("Received Firetext notification form data: {}".format(post_data)) + + return jsonify({ + "status": "ok" + }), 200 diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index f325fe6f9..91eac620a 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -16,3 +16,16 @@ def test_receive_notification_returns_received_to_mmg(client): assert response.status_code == 200 assert response.get_data(as_text=True) == 'RECEIVED' + + +def test_receive_notification_returns_received_to_firetext(client): + data = {"some": "thing"} + response = client.post( + path='/notifications/sms/receive/firetext', + data=json.dumps(data), + headers=[('Content-Type', 'application/json')]) + + assert response.status_code == 200 + result = json.loads(response.get_data(as_text=True)) + + assert result['status'] == 'ok' From e3baf71016c47831e45d8f19cd18dfeea24fc9c3 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Wed, 31 May 2017 16:22:56 +0100 Subject: [PATCH 49/92] Update error message --- app/notifications/validators.py | 2 +- tests/app/v2/notifications/test_post_notifications.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/notifications/validators.py b/app/notifications/validators.py index c7e97c26e..5663a6046 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -95,4 +95,4 @@ def check_sms_content_char_count(content_count): def service_can_schedule_notification(service, scheduled_for): if scheduled_for: if SCHEDULE_NOTIFICATIONS not in [p.permission for p in service.permissions]: - raise BadRequestError(message="Your service must be invited to schedule notifications via the API.") + raise BadRequestError(message="Cannot schedule notifications (this feature is invite-only)") diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index 9f938c13d..6cf109f6e 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -397,4 +397,4 @@ def test_post_notification_raises_bad_request_if_service_not_invited_to_schedule assert response.status_code == 400 error_json = json.loads(response.get_data(as_text=True)) assert error_json['errors'] == [ - {"error": "BadRequestError", "message": 'Your service must be invited to schedule notifications via the API.'}] + {"error": "BadRequestError", "message": 'Cannot schedule notifications (this feature is invite-only)'}] From 8e3b20e51d59260ccf75c6614d09acf4347fcd54 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 31 May 2017 16:46:56 +0100 Subject: [PATCH 50/92] Gone back to 1G as fails if less --- manifest-delivery-base.yml | 2 -- manifest-delivery-preview.yml | 2 +- manifest-delivery-production.yml | 2 +- manifest-delivery-staging.yml | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index 2eaf380a6..4ba8a7550 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -24,7 +24,6 @@ applications: - name: notify-delivery-worker-database command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email,db-letter,database-tasks - memory: 1G env: NOTIFY_APP_NAME: delivery-worker-database @@ -35,7 +34,6 @@ applications: - name: notify-delivery-worker-sender command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email,send-tasks - memory: 1G env: NOTIFY_APP_NAME: delivery-worker-sender diff --git a/manifest-delivery-preview.yml b/manifest-delivery-preview.yml index 2bbb3c0dc..492bc6c55 100644 --- a/manifest-delivery-preview.yml +++ b/manifest-delivery-preview.yml @@ -1,4 +1,4 @@ --- inherit: manifest-delivery-base.yml -memory: 256M +memory: 1G diff --git a/manifest-delivery-production.yml b/manifest-delivery-production.yml index d2c2ba647..53c8d2f12 100644 --- a/manifest-delivery-production.yml +++ b/manifest-delivery-production.yml @@ -3,4 +3,4 @@ inherit: manifest-delivery-base.yml instances: 2 -memory: 768M +memory: 1G diff --git a/manifest-delivery-staging.yml b/manifest-delivery-staging.yml index d2c2ba647..53c8d2f12 100644 --- a/manifest-delivery-staging.yml +++ b/manifest-delivery-staging.yml @@ -3,4 +3,4 @@ inherit: manifest-delivery-base.yml instances: 2 -memory: 768M +memory: 1G From 1530908228afd4cea364d195c4a6e6c3db3a2a9f Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 31 May 2017 17:31:06 +0100 Subject: [PATCH 51/92] manually set sms_sender when creating service sqlalchemy default doesn't appear to work correctly when there is a difference between the DB schema and the code (ie: during a migration) in this case, lets just set sms_sender ourselves. we can't write unit tests for this because this only happens when the db is in an inconsistent state :weary: --- app/dao/services_dao.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 572e36e63..ff06c356c 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -3,6 +3,7 @@ from datetime import date, datetime, timedelta from sqlalchemy import asc, func from sqlalchemy.orm import joinedload +from flask import current_app from app import db from app.dao.dao_utils import ( @@ -131,6 +132,12 @@ def dao_fetch_service_by_id_and_user(service_id, user_id): @transactional @version_class(Service) def dao_create_service(service, user, service_id=None, service_permissions=[SMS_TYPE, EMAIL_TYPE]): + # the default property does not appear to work when there is a difference between the sqlalchemy schema and the + # db schema (ie: during a migration), so we have to set sms_sender manually here. After the GOVUK sms_sender + # migration is completed, this code should be able to be removed. + if not service.sms_sender: + service.sms_sender = current_app.config['FROM_NUMBER'] + from app.dao.permissions_dao import permission_dao service.users.append(user) permission_dao.add_default_service_permissions_for_user(user, service) From a7fd624db5a14530dd867bbdab41527244b9bf04 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 1 Jun 2017 08:21:18 +0100 Subject: [PATCH 52/92] Added simple logging endpoint for fire text inbound SMS calls - logs post data - OK to log all as not currently in use so no real user data expected. --- app/notifications/receive_notifications.py | 2 +- tests/app/notifications/test_receive_notification.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 9c6b63c48..923701cda 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -19,7 +19,7 @@ def receive_mmg_sms(): @receive_notifications_blueprint.route('/notifications/sms/receive/firetext', methods=['POST']) def receive_firetext_sms(): - post_data = request.get_json() + post_data = request.form current_app.logger.info("Received Firetext notification form data: {}".format(post_data)) return jsonify({ diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 91eac620a..e82d3e638 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -19,11 +19,12 @@ def test_receive_notification_returns_received_to_mmg(client): def test_receive_notification_returns_received_to_firetext(client): - data = {"some": "thing"} + data = "source=07999999999&destination=07111111111&message=this is a message&time=2017-01-01 12:00:00" + response = client.post( path='/notifications/sms/receive/firetext', - data=json.dumps(data), - headers=[('Content-Type', 'application/json')]) + data=data, + headers=[('Content-Type', 'application/x-www-form-urlencoded')]) assert response.status_code == 200 result = json.loads(response.get_data(as_text=True)) From d33698216cb7849a665de069cafaa8bad01a4bdb Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 1 Jun 2017 11:00:26 +0100 Subject: [PATCH 53/92] Revert "Revert "Remove nulls from sms_sender"" --- app/models.py | 2 +- migrations/versions/0088_govuk_sms_sender.py | 25 +++++ tests/app/delivery/test_send_to_providers.py | 1 - tests/app/service/test_rest.py | 108 ++++++++----------- 4 files changed, 70 insertions(+), 66 deletions(-) create mode 100644 migrations/versions/0088_govuk_sms_sender.py diff --git a/app/models.py b/app/models.py index 8c1e22e79..85bbcfbd1 100644 --- a/app/models.py +++ b/app/models.py @@ -188,7 +188,7 @@ class Service(db.Model, Versioned): created_by_id = db.Column(UUID(as_uuid=True), db.ForeignKey('users.id'), index=True, nullable=False) reply_to_email_address = db.Column(db.Text, index=False, unique=False, nullable=True) letter_contact_block = db.Column(db.Text, index=False, unique=False, nullable=True) - sms_sender = db.Column(db.String(11), nullable=True, default=lambda: current_app.config['FROM_NUMBER']) + sms_sender = db.Column(db.String(11), nullable=False, default=lambda: current_app.config['FROM_NUMBER']) organisation_id = db.Column(UUID(as_uuid=True), db.ForeignKey('organisation.id'), index=True, nullable=True) organisation = db.relationship('Organisation') dvla_organisation_id = db.Column( diff --git a/migrations/versions/0088_govuk_sms_sender.py b/migrations/versions/0088_govuk_sms_sender.py new file mode 100644 index 000000000..3d580d6ad --- /dev/null +++ b/migrations/versions/0088_govuk_sms_sender.py @@ -0,0 +1,25 @@ +"""empty message + +Revision ID: 0088_govuk_sms_sender +Revises: 0087_scheduled_notifications +Create Date: 2017-05-22 13:46:09.584801 + +""" + +# revision identifiers, used by Alembic. +revision = '0088_govuk_sms_sender' +down_revision = '0087_scheduled_notifications' + +from alembic import op + + +def upgrade(): + op.execute("UPDATE services SET sms_sender = 'GOVUK' where sms_sender is null") + op.execute("UPDATE services_history SET sms_sender = 'GOVUK' where sms_sender is null") + op.alter_column('services', 'sms_sender', nullable=False) + op.alter_column('services_history', 'sms_sender', nullable=False) + + +def downgrade(): + op.alter_column('services_history', 'sms_sender', nullable=True) + op.alter_column('services', 'sms_sender', nullable=True) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 5cb467ac3..e1a4ec4a2 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -627,7 +627,6 @@ def test_should_set_international_phone_number_to_sent_status( # if 40604 is actually in DB then treat that as if entered manually ('40604', '40604', 'bar'), # 'testing' is the FROM_NUMBER during unit tests - (None, 'testing', 'Sample service: bar'), ('testing', 'testing', 'Sample service: bar'), ]) def test_should_handle_sms_sender_and_prefix_message( diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 46adbba1b..18c384722 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1315,61 +1315,57 @@ def test_get_only_api_created_notifications_for_service( assert response.status_code == 200 -def test_set_sms_sender_for_service(notify_api, sample_service): - with notify_api.test_request_context(): - with notify_api.test_client() as client: - auth_header = create_authorization_header() - resp = client.get( - '/service/{}'.format(sample_service.id), - headers=[auth_header] - ) - json_resp = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 200 - assert json_resp['data']['name'] == sample_service.name +def test_set_sms_sender_for_service(client, sample_service): + data = { + 'sms_sender': 'elevenchars', + } - data = { - 'sms_sender': 'elevenchars', - } + auth_header = create_authorization_header() - auth_header = create_authorization_header() - - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - result = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 200 - assert result['data']['sms_sender'] == 'elevenchars' + resp = client.post( + '/service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header] + ) + result = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 200 + assert result['data']['sms_sender'] == 'elevenchars' -def test_set_sms_sender_for_service_rejects_invalid_characters(notify_api, sample_service): - with notify_api.test_request_context(): - with notify_api.test_client() as client: - auth_header = create_authorization_header() - resp = client.get( - '/service/{}'.format(sample_service.id), - headers=[auth_header] - ) - json_resp = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 200 - assert json_resp['data']['name'] == sample_service.name +def test_set_sms_sender_for_service_rejects_invalid_characters(client, sample_service): + data = { + 'sms_sender': 'invalid####', + } - data = { - 'sms_sender': 'invalid####', - } + auth_header = create_authorization_header() - auth_header = create_authorization_header() + resp = client.post( + '/service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header] + ) + result = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 400 + assert result['result'] == 'error' + assert result['message'] == {'sms_sender': ['Only alphanumeric characters allowed']} - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - result = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == 400 - assert result['result'] == 'error' - assert result['message'] == {'sms_sender': ['Only alphanumeric characters allowed']} + +def test_set_sms_sender_for_service_rejects_null(client, sample_service): + data = { + 'sms_sender': None, + } + + auth_header = create_authorization_header() + + resp = client.post( + '/service/{}'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header] + ) + result = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == 400 + assert result['result'] == 'error' + assert result['message'] == {'sms_sender': ['Field may not be null.']} @pytest.mark.parametrize('today_only,stats', [ @@ -1929,22 +1925,6 @@ def test_update_service_does_not_call_send_notification_when_restricted_not_chan assert not send_notification_mock.called -def test_update_service_works_when_sms_sender_is_null(sample_service, client, mocker): - sample_service.sms_sender = None - data = {'name': 'new name'} - - resp = client.post( - 'service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[create_authorization_header()], - content_type='application/json' - ) - - assert resp.status_code == 200 - # make sure it wasn't changed to not-null under the hood - assert sample_service.sms_sender is None - - def test_search_for_notification_by_to_field_filters_by_status(client, notify_db, notify_db_session): create_notification = partial( create_sample_notification, From 78d071f22fc292bf35ce8601ef3061c5562bbd08 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Thu, 1 Jun 2017 09:35:13 +0100 Subject: [PATCH 54/92] Ignore one-off messages in job list on dashboard MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Same as how we ignore ‘send yourself a test’ messages (see: d8467bfc3cdca0f7dcd019f93ca20d981ef28760). The dashboard gets clogged up with one off messages otherwise, which affects: - performance - users ability to find their jobs --- app/config.py | 1 + app/dao/jobs_dao.py | 3 ++- tests/app/dao/test_jobs_dao.py | 16 ++++++++++++++-- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/app/config.py b/app/config.py index f2cd39cdb..4bb5d030a 100644 --- a/app/config.py +++ b/app/config.py @@ -107,6 +107,7 @@ class Config(object): SMS_CHAR_COUNT_LIMIT = 495 BRANDING_PATH = '/images/email-template/crests/' TEST_MESSAGE_FILENAME = 'Test message' + ONE_OFF_MESSAGE_FILENAME = 'One-off message' MAX_VERIFY_CODE_COUNT = 10 NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553' diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index b712b0e70..d2ec8c367 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -53,7 +53,8 @@ def dao_get_job_by_service_id_and_job_id(service_id, job_id): def dao_get_jobs_by_service_id(service_id, limit_days=None, page=1, page_size=50, statuses=None): query_filter = [ Job.service_id == service_id, - Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'] + Job.original_file_name != current_app.config['TEST_MESSAGE_FILENAME'], + Job.original_file_name != current_app.config['ONE_OFF_MESSAGE_FILENAME'], ] if limit_days is not None: query_filter.append(cast(Job.created_at, sql_date) >= days_ago(limit_days)) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 034048a32..3ee8e733b 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -1,5 +1,6 @@ from datetime import datetime, timedelta from functools import partial +import pytest import uuid from freezegun import freeze_time @@ -334,13 +335,24 @@ def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_ assert res.items[1].created_at == datetime(2015, 1, 1, 7) -def test_get_jobs_for_service_doesnt_return_test_messages(notify_db, notify_db_session, sample_template, sample_job): +@pytest.mark.parametrize('file_name', [ + 'Test message', + 'One-off message' +]) +def test_get_jobs_for_service_doesnt_return_test_messages( + notify_db, + notify_db_session, + sample_template, + sample_job, + file_name, +): test_job = create_job( notify_db, notify_db_session, sample_template.service, sample_template, - original_file_name='Test message') + original_file_name=file_name, + ) jobs = dao_get_jobs_by_service_id(sample_job.service_id).items From dd9fd6cf92c96df0fcce3bbc786de8c9ff143912 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 1 Jun 2017 13:13:51 +0100 Subject: [PATCH 55/92] still return RECEIVED even if we couldn't find a matching service mmg don't need to retry that message or anything. just log it. --- app/notifications/receive_notifications.py | 15 ++++++++++----- .../notifications/test_receive_notification.py | 10 +++++----- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 19542488b..a73ff8f27 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -3,6 +3,7 @@ from urllib.parse import unquote from flask import Blueprint, current_app, request from notifications_utils.recipients import normalise_phone_number +from app import statsd_client from app.dao.services_dao import dao_fetch_services_by_sms_sender from app.dao.inbound_sms_dao import dao_create_inbound_sms from app.models import InboundSms @@ -30,11 +31,15 @@ def receive_mmg_sms(): potential_services = dao_fetch_services_by_sms_sender(post_data['Number']) if len(potential_services) != 1: - current_app.logger.error('') - raise InvalidRequest( - 'Inbound number "{}" not associated with exactly one service'.format(post_data['Number']), - status_code=400 - ) + current_app.logger.error('Inbound number "{}" not associated with exactly one service'.format( + post_data['Number'] + )) + statsd_client.incr('inbound.mmg.failed') + # since this is an issue with our service <-> number mapping, we should still tell MMG that we received + # succesfully + return 'RECEIVED', 200 + + statsd_client.incr('inbound.mmg.succesful') service = potential_services[0] diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index d4f14e5bc..50c1f9ad5 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -8,6 +8,7 @@ from app.notifications.receive_notifications import ( create_inbound_sms_object ) +from app.models import InboundSms from tests.app.db import create_service @@ -76,8 +77,7 @@ def test_receive_notification_error_if_not_single_matching_service(client, notif data=json.dumps(data), headers=[('Content-Type', 'application/json')]) - assert response.status_code == 400 - assert json.loads(response.get_data(as_text=True)) == { - 'result': 'error', - 'message': 'Inbound number "{}" not associated with exactly one service'.format(notify_number) - } + # we still return 'RECEIVED' to MMG + assert response.status_code == 200 + assert response.get_data(as_text=True) == 'RECEIVED' + assert InboundSms.query.count() == 0 From ab50a3557ea36153a9f418f4590b7df4de471788 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 1 Jun 2017 13:18:56 +0100 Subject: [PATCH 56/92] fix versions --- ...{0088_govuk_sms_sender.py => 0089_govuk_sms_sender.py} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename migrations/versions/{0088_govuk_sms_sender.py => 0089_govuk_sms_sender.py} (79%) diff --git a/migrations/versions/0088_govuk_sms_sender.py b/migrations/versions/0089_govuk_sms_sender.py similarity index 79% rename from migrations/versions/0088_govuk_sms_sender.py rename to migrations/versions/0089_govuk_sms_sender.py index 3d580d6ad..b69701abd 100644 --- a/migrations/versions/0088_govuk_sms_sender.py +++ b/migrations/versions/0089_govuk_sms_sender.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0088_govuk_sms_sender -Revises: 0087_scheduled_notifications +Revision ID: 0089_govuk_sms_sender +Revises: 0088_add_schedule_serv_perm Create Date: 2017-05-22 13:46:09.584801 """ # revision identifiers, used by Alembic. -revision = '0088_govuk_sms_sender' -down_revision = '0087_scheduled_notifications' +revision = '0089_govuk_sms_sender' +down_revision = '0088_add_schedule_serv_perm' from alembic import op From eb493f68170ed5157d535c76db3575bbd881c91f Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Thu, 1 Jun 2017 13:56:47 +0100 Subject: [PATCH 57/92] =?UTF-8?q?Rename=20=E2=80=98One-off=20message?= =?UTF-8?q?=E2=80=99=20to=20=E2=80=98Report=E2=80=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/alphagov/notifications-admin/pull/1293/commits/7915845cb58bdacb90d693fbf3073d80e57fa0d2 --- app/config.py | 2 +- tests/app/dao/test_jobs_dao.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/config.py b/app/config.py index 4bb5d030a..1cac131e2 100644 --- a/app/config.py +++ b/app/config.py @@ -107,7 +107,7 @@ class Config(object): SMS_CHAR_COUNT_LIMIT = 495 BRANDING_PATH = '/images/email-template/crests/' TEST_MESSAGE_FILENAME = 'Test message' - ONE_OFF_MESSAGE_FILENAME = 'One-off message' + ONE_OFF_MESSAGE_FILENAME = 'Report' MAX_VERIFY_CODE_COUNT = 10 NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553' diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 3ee8e733b..a9af1afda 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -337,7 +337,7 @@ def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_ @pytest.mark.parametrize('file_name', [ 'Test message', - 'One-off message' + 'Report', ]) def test_get_jobs_for_service_doesnt_return_test_messages( notify_db, From c29f95381e8ed202409e69a09c712f3f261fa116 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 1 Jun 2017 14:57:46 +0100 Subject: [PATCH 58/92] Remved test re-added as part of a merge --- tests/app/service/test_rest.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 4ce78fcee..f2b0a8247 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -2002,22 +2002,6 @@ def test_get_yearly_billing_usage_count_returns_from_cache_if_present(client, sa redis_set_mock.assert_not_called() -def test_update_service_works_when_sms_sender_is_null(sample_service, client, mocker): - sample_service.sms_sender = None - data = {'name': 'new name'} - - resp = client.post( - 'service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[create_authorization_header()], - content_type='application/json' - ) - - assert resp.status_code == 200 - # make sure it wasn't changed to not-null under the hood - assert sample_service.sms_sender is None - - def test_search_for_notification_by_to_field_filters_by_status(client, notify_db, notify_db_session): create_notification = partial( create_sample_notification, From 3df868823daa987b746d7fa04d03131a8389915f Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Thu, 1 Jun 2017 15:58:15 +0100 Subject: [PATCH 59/92] Fixed DB migration order --- .../versions/{0088_inbound_sms.py => 0090_inbound_sms.py} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename migrations/versions/{0088_inbound_sms.py => 0090_inbound_sms.py} (88%) diff --git a/migrations/versions/0088_inbound_sms.py b/migrations/versions/0090_inbound_sms.py similarity index 88% rename from migrations/versions/0088_inbound_sms.py rename to migrations/versions/0090_inbound_sms.py index 4ef3f6613..d02690379 100644 --- a/migrations/versions/0088_inbound_sms.py +++ b/migrations/versions/0090_inbound_sms.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0088_inbound_sms -Revises: 0087_scheduled_notifications +Revision ID: 0090_inbound_sms +Revises: 0089_govuk_sms_sender Create Date: 2017-05-22 11:28:53.471004 """ # revision identifiers, used by Alembic. -revision = '0088_inbound_sms' -down_revision = '0087_scheduled_notifications' +revision = '0090_inbound_sms' +down_revision = '0089_govuk_sms_sender' from alembic import op import sqlalchemy as sa From dba4e2ad890ad25864882722639d9fae1ab0a594 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 1 Jun 2017 17:27:09 +0100 Subject: [PATCH 60/92] mmg spell received incorrectly, lets use that --- app/notifications/receive_notifications.py | 8 ++++---- tests/app/notifications/test_receive_notification.py | 12 ++++++------ 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 1abc1c0f5..ad2dcf9d0 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -41,7 +41,7 @@ def receive_mmg_sms(): service = potential_services[0] - inbound = create_inbound_sms_object(service, post_data) + inbound = create_inbound_mmg_sms_object(service, post_data) current_app.logger.info('{} received inbound SMS with reference {}'.format(service.id, inbound.provider_reference)) @@ -52,15 +52,15 @@ def format_message(message): return unquote(message.replace('+', ' ')) -def create_inbound_sms_object(service, json): +def create_inbound_mmg_sms_object(service, json): message = format_message(json['Message']) user_number = normalise_phone_number(json['MSISDN']) inbound = InboundSms( service=service, notify_number=service.sms_sender, user_number=user_number, - provider_date=json['DateReceived'], - provider_reference=json['ID'], + provider_date=json.get('DateRecieved'), + provider_reference=json.get('ID'), content=message, ) dao_create_inbound_sms(inbound) diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index cd80e0481..5c583a17f 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -5,7 +5,7 @@ from flask import json from app.notifications.receive_notifications import ( format_message, - create_inbound_sms_object + create_inbound_mmg_sms_object ) from app.models import InboundSms @@ -19,7 +19,7 @@ def test_receive_notification_returns_received_to_mmg(client, sample_service): "Trigger": "Trigger?", "Number": "testing", "Channel": "SMS", - "DateReceived": "2012-06-27 12:33:00" + "DateRecieved": "2012-06-27 12:33:00" } response = client.post(path='/notifications/sms/receive/mmg', data=json.dumps(data), @@ -40,17 +40,17 @@ def test_format_message(message, expected_output): assert format_message(message) == expected_output -def test_create_inbound_sms_object(sample_service): +def test_create_inbound_mmg_sms_object(sample_service): sample_service.sms_sender = 'foo' data = { 'Message': 'hello+there+%F0%9F%93%A9', 'Number': 'foo', 'MSISDN': '07700 900 001', - 'DateReceived': '2017-01-02 03:04:05', + 'DateRecieved': '2017-01-02 03:04:05', 'ID': 'bar', } - inbound_sms = create_inbound_sms_object(sample_service, data) + inbound_sms = create_inbound_mmg_sms_object(sample_service, data) assert inbound_sms.service_id == sample_service.id assert inbound_sms.notify_number == 'foo' @@ -70,7 +70,7 @@ def test_receive_notification_error_if_not_single_matching_service(client, notif 'Message': 'hello', 'Number': notify_number, 'MSISDN': '7700900001', - 'DateReceived': '2017-01-02 03:04:05', + 'DateRecieved': '2017-01-02 03:04:05', 'ID': 'bar', } response = client.post(path='/notifications/sms/receive/mmg', From d9bdacb5cd400c1c33ba073ba723b95abea64cb8 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Fri, 2 Jun 2017 10:14:01 +0100 Subject: [PATCH 61/92] parse datetimes from mmg inbound sms the DateRecieved field from MMG comes in with +s instead of spaces, and uriencoded (the same as how they format their messages) Make sure we decode this, and then convert to a UTC timestamp --- app/notifications/receive_notifications.py | 26 +++++++++++++++---- .../test_receive_notification.py | 17 +++++++++--- 2 files changed, 34 insertions(+), 9 deletions(-) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index ad2dcf9d0..cbe17e09e 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -1,7 +1,7 @@ from urllib.parse import unquote -from flask import jsonify -from flask import Blueprint, current_app, request +import iso8601 +from flask import jsonify, Blueprint, current_app, request from notifications_utils.recipients import normalise_phone_number from app import statsd_client @@ -9,6 +9,7 @@ from app.dao.services_dao import dao_fetch_services_by_sms_sender from app.dao.inbound_sms_dao import dao_create_inbound_sms from app.models import InboundSms from app.errors import register_errors +from app.utils import convert_bst_to_utc receive_notifications_blueprint = Blueprint('receive_notifications', __name__) register_errors(receive_notifications_blueprint) @@ -48,18 +49,33 @@ def receive_mmg_sms(): return 'RECEIVED', 200 -def format_message(message): +def format_mmg_message(message): return unquote(message.replace('+', ' ')) +def format_mmg_datetime(date): + """ + We expect datetimes in format 2017-05-21+11%3A56%3A11 - ie, spaces replaced with pluses, and URI encoded + (the same as UTC) + """ + orig_date = format_mmg_message(date) + parsed_datetime = iso8601.parse_date(orig_date).replace(tzinfo=None) + return convert_bst_to_utc(parsed_datetime) + + def create_inbound_mmg_sms_object(service, json): - message = format_message(json['Message']) + message = format_mmg_message(json['Message']) user_number = normalise_phone_number(json['MSISDN']) + + provider_date = json.get('DateRecieved') + if provider_date: + provider_date = format_mmg_datetime(provider_date) + inbound = InboundSms( service=service, notify_number=service.sms_sender, user_number=user_number, - provider_date=json.get('DateRecieved'), + provider_date=provider_date, provider_reference=json.get('ID'), content=message, ) diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 5c583a17f..5d93e5fd3 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -4,7 +4,8 @@ import pytest from flask import json from app.notifications.receive_notifications import ( - format_message, + format_mmg_message, + format_mmg_datetime, create_inbound_mmg_sms_object ) @@ -36,8 +37,16 @@ def test_receive_notification_returns_received_to_mmg(client, sample_service): ('%F0%9F%93%A9+%F0%9F%93%A9+%F0%9F%93%A9', '📩 📩 📩'), ('x+%2B+y', 'x + y') ]) -def test_format_message(message, expected_output): - assert format_message(message) == expected_output +def test_format_mmg_message(message, expected_output): + assert format_mmg_message(message) == expected_output + + +@pytest.mark.parametrize('provider_date, expected_output', [ + ('2017-01-21+11%3A56%3A11', datetime(2017, 1, 21, 11, 56, 11)), + ('2017-05-21+11%3A56%3A11', datetime(2017, 5, 21, 10, 56, 11)) +]) +def test_format_mmg_datetime(provider_date, expected_output): + assert format_mmg_datetime(provider_date) == expected_output def test_create_inbound_mmg_sms_object(sample_service): @@ -46,7 +55,7 @@ def test_create_inbound_mmg_sms_object(sample_service): 'Message': 'hello+there+%F0%9F%93%A9', 'Number': 'foo', 'MSISDN': '07700 900 001', - 'DateRecieved': '2017-01-02 03:04:05', + 'DateRecieved': '2017-01-02+03%3A04%3A05', 'ID': 'bar', } From d89cb2c120487b54f3b01d2dcbfe3511be3a848e Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 22 May 2017 15:05:05 +0100 Subject: [PATCH 62/92] add an admin_request fixture this gets rid of some boilerplate around mocking requests from the front-end --- tests/app/conftest.py | 46 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 43 insertions(+), 3 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 5b1d32144..d49c527aa 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -1,11 +1,12 @@ -import requests_mock -import pytest +import json import uuid from datetime import (datetime, date, timedelta) +import requests_mock +import pytest from sqlalchemy import asc from sqlalchemy.orm.session import make_transient -from flask import current_app +from flask import current_app, url_for from app import db from app.models import ( @@ -35,6 +36,7 @@ from app.dao.invited_user_dao import save_invited_user from app.dao.provider_rates_dao import create_provider_rates from app.clients.sms.firetext import FiretextClient +from tests import create_authorization_header from tests.app.db import create_user, create_template, create_notification @@ -976,3 +978,41 @@ def restore_provider_details(notify_db, notify_db_session): notify_db.session.add_all(existing_provider_details) notify_db.session.add_all(existing_provider_details_history) notify_db.session.commit() + + +@pytest.fixture +def admin_request(client): + class AdminRequest: + + @staticmethod + def get(endpoint, endpoint_kwargs=None, expected_status=200): + resp = client.get( + url_for(endpoint, **(endpoint_kwargs or {})), + headers=[create_authorization_header()] + ) + json_resp = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == expected_status + return json_resp + + @staticmethod + def post(endpoint, endpoint_kwargs=None, data=None, expected_status=200): + resp = client.post( + url_for(endpoint, **(endpoint_kwargs or {})), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), create_authorization_header()] + ) + json_resp = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == expected_status + return json_resp + + @staticmethod + def delete(endpoint, endpoint_kwargs=None, expected_status=204): + resp = client.delete( + url_for(endpoint, **(endpoint_kwargs or {})), + headers=[create_authorization_header()] + ) + json_resp = json.loads(resp.get_data(as_text=True)) + assert resp.status_code == expected_status + return json_resp + + return AdminRequest From ef52337d851146fb8e1662f708155c9d189a63ff Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 31 May 2017 14:49:14 +0100 Subject: [PATCH 63/92] add inbound sms api two endpoints: * get all inbound sms for a service (you can limit to the X most recent, or filter by user's phone number [which will be normalised]) * get a summary of inbound sms for a service - returns the count of inbound sms in the database, and the date that the most recent was sent --- app/__init__.py | 4 ++ app/dao/inbound_sms_dao.py | 23 +++++++ app/inbound_sms/__init__.py | 0 app/inbound_sms/rest.py | 41 ++++++++++++ app/models.py | 12 ++++ tests/app/dao/test_inbound_sms_dao.py | 59 +++++++++++++++++ tests/app/db.py | 23 +++++++ tests/app/inbound_sms/__init__.py | 0 tests/app/inbound_sms/test_rest.py | 93 +++++++++++++++++++++++++++ 9 files changed, 255 insertions(+) create mode 100644 app/inbound_sms/__init__.py create mode 100644 app/inbound_sms/rest.py create mode 100644 tests/app/dao/test_inbound_sms_dao.py create mode 100644 tests/app/inbound_sms/__init__.py create mode 100644 tests/app/inbound_sms/test_rest.py diff --git a/app/__init__.py b/app/__init__.py index c5b509f30..f8fefbe15 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -93,6 +93,7 @@ def register_blueprint(application): from app.organisation.rest import organisation_blueprint from app.dvla_organisation.rest import dvla_organisation_blueprint from app.delivery.rest import delivery_blueprint + from app.inbound_sms.rest import inbound_sms as inbound_sms_blueprint from app.notifications.receive_notifications import receive_notifications_blueprint from app.notifications.notifications_ses_callback import ses_callback_blueprint from app.notifications.notifications_sms_callback import sms_callback_blueprint @@ -133,6 +134,9 @@ def register_blueprint(application): delivery_blueprint.before_request(requires_admin_auth) application.register_blueprint(delivery_blueprint) + inbound_sms_blueprint.before_request(requires_admin_auth) + application.register_blueprint(inbound_sms_blueprint) + accept_invite.before_request(requires_admin_auth) application.register_blueprint(accept_invite, url_prefix='/invite') diff --git a/app/dao/inbound_sms_dao.py b/app/dao/inbound_sms_dao.py index 92f1c79e0..597748731 100644 --- a/app/dao/inbound_sms_dao.py +++ b/app/dao/inbound_sms_dao.py @@ -1,7 +1,30 @@ from app import db from app.dao.dao_utils import transactional +from app.models import InboundSms @transactional def dao_create_inbound_sms(inbound_sms): db.session.add(inbound_sms) + + +def dao_get_inbound_sms_for_service(service_id, limit=None, user_number=None): + q = InboundSms.query.filter( + InboundSms.service_id == service_id + ).order_by( + InboundSms.created_at.desc() + ) + + if user_number: + q = q.filter(InboundSms.user_number == user_number) + + if limit: + q = q.limit(limit) + + return q.all() + + +def dao_count_inbound_sms_for_service(service_id): + return InboundSms.query.filter( + InboundSms.service_id == service_id + ).count() diff --git a/app/inbound_sms/__init__.py b/app/inbound_sms/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/app/inbound_sms/rest.py b/app/inbound_sms/rest.py new file mode 100644 index 000000000..0ee2b9e90 --- /dev/null +++ b/app/inbound_sms/rest.py @@ -0,0 +1,41 @@ +from flask import ( + Blueprint, + jsonify, + request +) +from notifications_utils.recipients import normalise_phone_number + +from app.dao.inbound_sms_dao import dao_get_inbound_sms_for_service, dao_count_inbound_sms_for_service +from app.errors import register_errors + +inbound_sms = Blueprint( + 'inbound_sms', + __name__, + url_prefix='/service//inbound-sms' +) + +register_errors(inbound_sms) + + +@inbound_sms.route('') +def get_inbound_sms_for_service(service_id): + limit = request.args.get('limit') + user_number = request.args.get('user_number') + + if user_number: + user_number = normalise_phone_number(user_number) + + results = dao_get_inbound_sms_for_service(service_id, limit, user_number) + + return jsonify(data=[row.serialize() for row in results]) + + +@inbound_sms.route('/summary') +def get_inbound_sms_summary_for_service(service_id): + count = dao_count_inbound_sms_for_service(service_id) + most_recent = dao_get_inbound_sms_for_service(service_id, limit=1) + + return jsonify( + count=count, + most_recent=most_recent[0].created_at.isoformat() if most_recent else None + ) diff --git a/app/models.py b/app/models.py index 4680b6274..29272c380 100644 --- a/app/models.py +++ b/app/models.py @@ -1175,6 +1175,18 @@ class InboundSms(db.Model): def content(self, content): self._content = encryption.encrypt(content) + def serialize(self): + return { + 'id': str(self.id), + 'created_at': self.created_at.isoformat(), + 'service_id': str(self.service_id), + 'notify_number': self.notify_number, + 'user_number': self.user_number, + 'content': self.content, + 'provider_date': self.provider_date and self.provider_date.isoformat(), + 'provider_reference': self.provider_reference + } + class LetterRate(db.Model): __tablename__ = 'letter_rates' diff --git a/tests/app/dao/test_inbound_sms_dao.py b/tests/app/dao/test_inbound_sms_dao.py new file mode 100644 index 000000000..f0fc6d8b3 --- /dev/null +++ b/tests/app/dao/test_inbound_sms_dao.py @@ -0,0 +1,59 @@ +from datetime import datetime + +from freezegun import freeze_time + +from app.dao.inbound_sms_dao import dao_get_inbound_sms_for_service, dao_count_inbound_sms_for_service + +from tests.app.db import create_inbound_sms, create_service + + +def test_get_all_inbound_sms(sample_service): + inbound = create_inbound_sms(sample_service) + + res = dao_get_inbound_sms_for_service(sample_service.id) + assert len(res) == 1 + assert res[0] == inbound + + +def test_get_all_inbound_sms_when_none_exist(sample_service): + res = dao_get_inbound_sms_for_service(sample_service.id) + assert len(res) == 0 + + +def test_get_all_inbound_sms_limits_and_orders(sample_service): + with freeze_time('2017-01-01'): + one = create_inbound_sms(sample_service) + with freeze_time('2017-01-03'): + three = create_inbound_sms(sample_service) + with freeze_time('2017-01-02'): + two = create_inbound_sms(sample_service) + + res = dao_get_inbound_sms_for_service(sample_service.id, limit=2) + assert len(res) == 2 + assert res[0] == three + assert res[0].created_at == datetime(2017, 1, 3) + assert res[1] == two + assert res[1].created_at == datetime(2017, 1, 2) + + +def test_get_all_inbound_sms_filters_on_service(notify_db_session): + service_one = create_service(service_name='one') + service_two = create_service(service_name='two') + + sms_one = create_inbound_sms(service_one) + sms_two = create_inbound_sms(service_two) + + res = dao_get_inbound_sms_for_service(service_one.id) + assert len(res) == 1 + assert res[0] == sms_one + + +def test_count_inbound_sms_for_service(notify_db_session): + service_one = create_service(service_name='one') + service_two = create_service(service_name='two') + + create_inbound_sms(service_one) + create_inbound_sms(service_one) + create_inbound_sms(service_two) + + assert dao_count_inbound_sms_for_service(service_one.id) == 2 diff --git a/tests/app/db.py b/tests/app/db.py index 76b458dd9..cdc0d7a79 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -4,6 +4,7 @@ import uuid from app.dao.jobs_dao import dao_create_job from app.models import ( + InboundSms, Service, User, Template, @@ -20,6 +21,7 @@ from app.dao.notifications_dao import dao_create_notification, dao_created_sched from app.dao.templates_dao import dao_create_template from app.dao.services_dao import dao_create_service from app.dao.service_permissions_dao import dao_add_service_permission +from app.dao.inbound_sms_dao import dao_create_inbound_sms def create_user(mobile_number="+447700900986", email="notify@digital.cabinet-office.gov.uk", state='active'): @@ -183,3 +185,24 @@ def create_service_permission(service_id, permission=EMAIL_TYPE): service_permissions = ServicePermission.query.all() return service_permissions + + +def create_inbound_sms( + service, + notify_number=None, + user_number='7700900111', + provider_date=None, + provider_reference=None, + content='Hello' +): + inbound = InboundSms( + service=service, + created_at=datetime.utcnow(), + notify_number=notify_number or service.sms_sender, + user_number=user_number, + provider_date=provider_date or datetime.utcnow(), + provider_reference=provider_reference or 'foo', + content=content, + ) + dao_create_inbound_sms(inbound) + return inbound diff --git a/tests/app/inbound_sms/__init__.py b/tests/app/inbound_sms/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py new file mode 100644 index 000000000..70acaddd2 --- /dev/null +++ b/tests/app/inbound_sms/test_rest.py @@ -0,0 +1,93 @@ +from datetime import datetime + +from freezegun import freeze_time + +from tests.app.db import create_inbound_sms, create_service + + +def test_get_inbound_sms(admin_request, sample_service): + one = create_inbound_sms(sample_service) + two = create_inbound_sms(sample_service) + + json_resp = admin_request.get( + 'inbound_sms.get_inbound_sms_for_service', + endpoint_kwargs={'service_id': sample_service.id} + ) + + sms = json_resp['data'] + + assert len(sms) == 2 + assert {inbound['id'] for inbound in sms} == {str(one.id), str(two.id)} + assert sms[0]['content'] == 'Hello' + assert set(sms[0].keys()) == { + 'id', + 'created_at', + 'service_id', + 'notify_number', + 'user_number', + 'content', + 'provider_date', + 'provider_reference' + } + + +def test_get_inbound_sms_limits(admin_request, sample_service): + with freeze_time('2017-01-01'): + one = create_inbound_sms(sample_service) + with freeze_time('2017-01-02'): + two = create_inbound_sms(sample_service) + + sms = admin_request.get( + 'inbound_sms.get_inbound_sms_for_service', + endpoint_kwargs={'service_id': sample_service.id, 'limit': 1} + ) + + assert len(sms['data']) == 1 + assert sms['data'][0]['id'] == str(two.id) + + +def test_get_inbound_sms_filters_user_number(admin_request, sample_service): + # user_number in the db is normalised + one = create_inbound_sms(sample_service, user_number='7700900001') + two = create_inbound_sms(sample_service, user_number='7700900002') + + sms = admin_request.get( + 'inbound_sms.get_inbound_sms_for_service', + endpoint_kwargs={'service_id': sample_service.id, 'user_number': '(07700) 900-001'} + ) + + assert len(sms['data']) == 1 + assert sms['data'][0]['id'] == str(one.id) + assert sms['data'][0]['user_number'] == str(one.user_number) + + +def test_get_inbound_sms_summary(admin_request, sample_service): + other_service = create_service(service_name='other_service') + with freeze_time('2017-01-01'): + create_inbound_sms(sample_service) + with freeze_time('2017-01-02'): + create_inbound_sms(sample_service) + with freeze_time('2017-01-03'): + create_inbound_sms(other_service) + + summary = admin_request.get( + 'inbound_sms.get_inbound_sms_summary_for_service', + endpoint_kwargs={'service_id': sample_service.id} + ) + + assert summary == { + 'count': 2, + 'most_recent': datetime(2017, 1, 2).isoformat() + } + + +def test_get_inbound_sms_summary_with_no_inbound(admin_request, sample_service): + summary = admin_request.get( + 'inbound_sms.get_inbound_sms_summary_for_service', + endpoint_kwargs={'service_id': sample_service.id} + ) + + assert summary == { + 'count': 0, + 'most_recent': None + } From bf18b179b06fdb47cad1885150a3c9b5a572b84c Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Fri, 2 Jun 2017 12:57:28 +0100 Subject: [PATCH 64/92] ensure the user_number in inbound sms is international rather than using the `normalise_phone_number` function, use the `validate_and_format_phone_number` function - this will also convert all numbers to international format, which means we won't need to worry about whether the user enters internaional or UK phone numbers when searching --- app/inbound_sms/rest.py | 5 +++-- app/notifications/receive_notifications.py | 6 +++--- tests/app/db.py | 2 +- tests/app/inbound_sms/test_rest.py | 16 +++++++++++----- .../notifications/test_receive_notification.py | 2 +- 5 files changed, 19 insertions(+), 12 deletions(-) diff --git a/app/inbound_sms/rest.py b/app/inbound_sms/rest.py index 0ee2b9e90..8ebcff298 100644 --- a/app/inbound_sms/rest.py +++ b/app/inbound_sms/rest.py @@ -3,7 +3,7 @@ from flask import ( jsonify, request ) -from notifications_utils.recipients import normalise_phone_number +from notifications_utils.recipients import validate_and_format_phone_number from app.dao.inbound_sms_dao import dao_get_inbound_sms_for_service, dao_count_inbound_sms_for_service from app.errors import register_errors @@ -23,7 +23,8 @@ def get_inbound_sms_for_service(service_id): user_number = request.args.get('user_number') if user_number: - user_number = normalise_phone_number(user_number) + # we use this to normalise to an international phone number + user_number = validate_and_format_phone_number(user_number) results = dao_get_inbound_sms_for_service(service_id, limit, user_number) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index cbe17e09e..d053a4198 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -2,7 +2,7 @@ from urllib.parse import unquote import iso8601 from flask import jsonify, Blueprint, current_app, request -from notifications_utils.recipients import normalise_phone_number +from notifications_utils.recipients import validate_and_format_phone_number from app import statsd_client from app.dao.services_dao import dao_fetch_services_by_sms_sender @@ -64,8 +64,8 @@ def format_mmg_datetime(date): def create_inbound_mmg_sms_object(service, json): - message = format_mmg_message(json['Message']) - user_number = normalise_phone_number(json['MSISDN']) + message = format_message(json['Message']) + user_number = validate_and_format_phone_number(json['MSISDN']) provider_date = json.get('DateRecieved') if provider_date: diff --git a/tests/app/db.py b/tests/app/db.py index cdc0d7a79..8fcced1b1 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -190,7 +190,7 @@ def create_service_permission(service_id, permission=EMAIL_TYPE): def create_inbound_sms( service, notify_number=None, - user_number='7700900111', + user_number='447700900111', provider_date=None, provider_reference=None, content='Hello' diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index 70acaddd2..25f5d7cff 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -1,5 +1,6 @@ from datetime import datetime +import pytest from freezegun import freeze_time from tests.app.db import create_inbound_sms, create_service @@ -46,14 +47,19 @@ def test_get_inbound_sms_limits(admin_request, sample_service): assert sms['data'][0]['id'] == str(two.id) -def test_get_inbound_sms_filters_user_number(admin_request, sample_service): - # user_number in the db is normalised - one = create_inbound_sms(sample_service, user_number='7700900001') - two = create_inbound_sms(sample_service, user_number='7700900002') +@pytest.mark.parametrize('user_number', [ + '(07700) 900-001', + '+4407700900001', + '447700900001', +]) +def test_get_inbound_sms_filters_user_number(admin_request, sample_service, user_number): + # user_number in the db is international and normalised + one = create_inbound_sms(sample_service, user_number='447700900001') + two = create_inbound_sms(sample_service, user_number='447700900002') sms = admin_request.get( 'inbound_sms.get_inbound_sms_for_service', - endpoint_kwargs={'service_id': sample_service.id, 'user_number': '(07700) 900-001'} + endpoint_kwargs={'service_id': sample_service.id, 'user_number': user_number} ) assert len(sms['data']) == 1 diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 5d93e5fd3..317d9452e 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -63,7 +63,7 @@ def test_create_inbound_mmg_sms_object(sample_service): assert inbound_sms.service_id == sample_service.id assert inbound_sms.notify_number == 'foo' - assert inbound_sms.user_number == '7700900001' + assert inbound_sms.user_number == '447700900001' assert inbound_sms.provider_date == datetime(2017, 1, 2, 3, 4, 5) assert inbound_sms.provider_reference == 'bar' assert inbound_sms._content != 'hello there 📩' From 69c299dd6c7a233c02d674e721643f2bc3fb21fa Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Fri, 2 Jun 2017 13:34:02 +0100 Subject: [PATCH 65/92] ensure international numbers are handled correctly the international flag semantically means 'Should we throw an error if an international number is passed in?' (and the answer is no. We should not.) --- app/inbound_sms/rest.py | 2 +- app/notifications/receive_notifications.py | 4 ++-- tests/app/inbound_sms/test_rest.py | 15 +++++++++++++++ 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/app/inbound_sms/rest.py b/app/inbound_sms/rest.py index 8ebcff298..d4072dea3 100644 --- a/app/inbound_sms/rest.py +++ b/app/inbound_sms/rest.py @@ -24,7 +24,7 @@ def get_inbound_sms_for_service(service_id): if user_number: # we use this to normalise to an international phone number - user_number = validate_and_format_phone_number(user_number) + user_number = validate_and_format_phone_number(user_number, international=True) results = dao_get_inbound_sms_for_service(service_id, limit, user_number) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index d053a4198..0ed644904 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -64,8 +64,8 @@ def format_mmg_datetime(date): def create_inbound_mmg_sms_object(service, json): - message = format_message(json['Message']) - user_number = validate_and_format_phone_number(json['MSISDN']) + message = format_mmg_message(json['Message']) + user_number = validate_and_format_phone_number(json['MSISDN'], international=True) provider_date = json.get('DateRecieved') if provider_date: diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index 25f5d7cff..da10ecb6b 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -67,6 +67,21 @@ def test_get_inbound_sms_filters_user_number(admin_request, sample_service, user assert sms['data'][0]['user_number'] == str(one.user_number) +def test_get_inbound_sms_filters_international_user_number(admin_request, sample_service): + # user_number in the db is international and normalised + one = create_inbound_sms(sample_service, user_number='12025550104') + two = create_inbound_sms(sample_service) + + sms = admin_request.get( + 'inbound_sms.get_inbound_sms_for_service', + endpoint_kwargs={'service_id': sample_service.id, 'user_number': '+1 (202) 555-0104'} + ) + + assert len(sms['data']) == 1 + assert sms['data'][0]['id'] == str(one.id) + assert sms['data'][0]['user_number'] == str(one.user_number) + + def test_get_inbound_sms_summary(admin_request, sample_service): other_service = create_service(service_name='other_service') with freeze_time('2017-01-01'): From 3e1de2e901470dcfb98346ea70e8d1f996978de6 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Fri, 2 Jun 2017 15:58:36 +0100 Subject: [PATCH 66/92] Capture the fire text callbacks. Parse the form data, and stop the message --- app/notifications/receive_notifications.py | 29 ++++++- .../test_receive_notification.py | 77 ++++++++++++++++++- 2 files changed, 104 insertions(+), 2 deletions(-) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index cbe17e09e..58f4d7f53 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -86,7 +86,34 @@ def create_inbound_mmg_sms_object(service, json): @receive_notifications_blueprint.route('/notifications/sms/receive/firetext', methods=['POST']) def receive_firetext_sms(): post_data = request.form - current_app.logger.info("Received Firetext notification form data: {}".format(post_data)) + + potential_services = dao_fetch_services_by_sms_sender(post_data['destination']) + if len(potential_services) != 1: + current_app.logger.error('Inbound number "{}" not associated with exactly one service'.format( + post_data['source'] + )) + statsd_client.incr('inbound.firetext.failed') + return jsonify({ + "status": "ok" + }), 200 + + service = potential_services[0] + + user_number = normalise_phone_number(post_data['source']) + message = post_data['message'] + timestamp = post_data['time'] + + dao_create_inbound_sms( + InboundSms( + service=service, + notify_number=service.sms_sender, + user_number=user_number, + provider_date=timestamp, + content=message + ) + ) + + statsd_client.incr('inbound.firetext.successful') return jsonify({ "status": "ok" diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 5d93e5fd3..8cdaa2dc2 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -1,4 +1,5 @@ from datetime import datetime +from unittest.mock import call import pytest from flask import json @@ -10,6 +11,7 @@ from app.notifications.receive_notifications import ( ) from app.models import InboundSms +from tests.app.conftest import sample_service from tests.app.db import create_service @@ -92,7 +94,11 @@ def test_receive_notification_error_if_not_single_matching_service(client, notif assert InboundSms.query.count() == 0 -def test_receive_notification_returns_received_to_firetext(client): +def test_receive_notification_returns_received_to_firetext(notify_db_session, client, mocker): + mock = mocker.patch('app.notifications.receive_notifications.statsd_client.incr') + + create_service(service_name='b', sms_sender='07111111111') + data = "source=07999999999&destination=07111111111&message=this is a message&time=2017-01-01 12:00:00" response = client.post( @@ -103,4 +109,73 @@ def test_receive_notification_returns_received_to_firetext(client): assert response.status_code == 200 result = json.loads(response.get_data(as_text=True)) + mock.assert_has_calls([call('inbound.firetext.successful')]) + assert result['status'] == 'ok' + + +def test_receive_notification_from_firetext_persists_message(notify_db_session, client, mocker): + mock = mocker.patch('app.notifications.receive_notifications.statsd_client.incr') + + service = create_service(service_name='b', sms_sender='07111111111') + + data = "source=07999999999&destination=07111111111&message=this is a message&time=2017-01-01 12:00:00" + + response = client.post( + path='/notifications/sms/receive/firetext', + data=data, + headers=[('Content-Type', 'application/x-www-form-urlencoded')]) + + assert response.status_code == 200 + result = json.loads(response.get_data(as_text=True)) + + persisted = InboundSms.query.first() + + assert result['status'] == 'ok' + assert persisted.notify_number == '07111111111' + assert persisted.user_number == '7999999999' + assert persisted.service == service + assert persisted.content == 'this is a message' + assert persisted.provider_date == datetime(2017, 1, 1, 12, 0, 0, 0) + + +def test_receive_notification_from_firetext_persists_message_with_normalized_phone(notify_db_session, client, mocker): + mock = mocker.patch('app.notifications.receive_notifications.statsd_client.incr') + + create_service(service_name='b', sms_sender='07111111111') + + data = "source=(+44)7999999999&destination=07111111111&message=this is a message&time=2017-01-01 12:00:00" + + response = client.post( + path='/notifications/sms/receive/firetext', + data=data, + headers=[('Content-Type', 'application/x-www-form-urlencoded')]) + + assert response.status_code == 200 + result = json.loads(response.get_data(as_text=True)) + + persisted = InboundSms.query.first() + + assert result['status'] == 'ok' + assert persisted.user_number == '447999999999' + + +def test_returns_ok_to_firetext_if_mismatched_sms_sender(notify_db_session, client, mocker): + + mock = mocker.patch('app.notifications.receive_notifications.statsd_client.incr') + + create_service(service_name='b', sms_sender='07111111199') + + data = "source=(+44)7999999999&destination=07111111111&message=this is a message&time=2017-01-01 12:00:00" + + response = client.post( + path='/notifications/sms/receive/firetext', + data=data, + headers=[('Content-Type', 'application/x-www-form-urlencoded')]) + + assert response.status_code == 200 + result = json.loads(response.get_data(as_text=True)) + + assert not InboundSms.query.all() + assert result['status'] == 'ok' + mock.assert_has_calls([call('inbound.firetext.failed')]) From 0631b6c988e55b495c9fd6542006c26825684992 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Fri, 2 Jun 2017 12:21:12 +0100 Subject: [PATCH 67/92] Add dao to delete inbound sms after seven days --- app/dao/inbound_sms_dao.py | 19 +++++++++++++++ app/dao/notifications_dao.py | 7 ++++-- tests/app/dao/test_inbound_sms_dao.py | 35 ++++++++++++++++++++++++--- tests/app/db.py | 24 ++++++++++-------- 4 files changed, 70 insertions(+), 15 deletions(-) diff --git a/app/dao/inbound_sms_dao.py b/app/dao/inbound_sms_dao.py index 597748731..3411aeb22 100644 --- a/app/dao/inbound_sms_dao.py +++ b/app/dao/inbound_sms_dao.py @@ -1,6 +1,13 @@ +from datetime import ( + timedelta, + datetime +) + + from app import db from app.dao.dao_utils import transactional from app.models import InboundSms +from app.statsd_decorators import statsd @transactional @@ -28,3 +35,15 @@ def dao_count_inbound_sms_for_service(service_id): return InboundSms.query.filter( InboundSms.service_id == service_id ).count() + + +@statsd(namespace="dao") +@transactional +def delete_inbound_sms_created_more_than_a_week_ago(): + seven_days_ago = datetime.utcnow() - timedelta(days=7) + + deleted = db.session.query(InboundSms).filter( + InboundSms.created_at < seven_days_ago + ).delete(synchronize_session='fetch') + + return deleted diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index f4ff2a24f..22da11966 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -2,7 +2,8 @@ import functools from datetime import ( datetime, timedelta, - date) + date +) from flask import current_app @@ -26,6 +27,7 @@ from app.models import ( NotificationHistory, NotificationStatistics, Template, + ScheduledNotification, NOTIFICATION_CREATED, NOTIFICATION_DELIVERED, NOTIFICATION_SENDING, @@ -35,7 +37,8 @@ from app.models import ( NOTIFICATION_PERMANENT_FAILURE, KEY_TYPE_NORMAL, KEY_TYPE_TEST, LETTER_TYPE, - NOTIFICATION_SENT, ScheduledNotification) + NOTIFICATION_SENT, +) from app.dao.dao_utils import transactional from app.statsd_decorators import statsd diff --git a/tests/app/dao/test_inbound_sms_dao.py b/tests/app/dao/test_inbound_sms_dao.py index f0fc6d8b3..2731562c4 100644 --- a/tests/app/dao/test_inbound_sms_dao.py +++ b/tests/app/dao/test_inbound_sms_dao.py @@ -1,11 +1,16 @@ -from datetime import datetime +from datetime import datetime, timedelta from freezegun import freeze_time -from app.dao.inbound_sms_dao import dao_get_inbound_sms_for_service, dao_count_inbound_sms_for_service - +from app.dao.inbound_sms_dao import ( + dao_get_inbound_sms_for_service, + dao_count_inbound_sms_for_service, + delete_inbound_sms_created_more_than_a_week_ago +) from tests.app.db import create_inbound_sms, create_service +from app.models import InboundSms + def test_get_all_inbound_sms(sample_service): inbound = create_inbound_sms(sample_service) @@ -57,3 +62,27 @@ def test_count_inbound_sms_for_service(notify_db_session): create_inbound_sms(service_two) assert dao_count_inbound_sms_for_service(service_one.id) == 2 + + +@freeze_time("2017-01-01 12:00:00") +def test_should_delete_inbound_sms_older_than_seven_days(sample_service): + older_than_seven_days = datetime.utcnow() - timedelta(days=7, seconds=1) + create_inbound_sms(sample_service, created_at=older_than_seven_days) + delete_inbound_sms_created_more_than_a_week_ago() + + assert len(InboundSms.query.all()) == 0 + + +@freeze_time("2017-01-01 12:00:00") +def test_should_not_delete_inbound_sms_before_seven_days(sample_service): + yesterday = datetime.utcnow() - timedelta(days=1) + just_before_seven_days = datetime.utcnow() - timedelta(days=6, hours=23, minutes=59, seconds=59) + older_than_seven_days = datetime.utcnow() - timedelta(days=7, seconds=1) + + create_inbound_sms(sample_service, created_at=yesterday) + create_inbound_sms(sample_service, created_at=just_before_seven_days) + create_inbound_sms(sample_service, created_at=older_than_seven_days) + + delete_inbound_sms_created_more_than_a_week_ago() + + assert len(InboundSms.query.all()) == 2 diff --git a/tests/app/db.py b/tests/app/db.py index 8fcced1b1..2e658023d 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -2,6 +2,7 @@ from datetime import datetime import uuid +from app.dao.inbound_sms_dao import dao_create_inbound_sms from app.dao.jobs_dao import dao_create_job from app.models import ( InboundSms, @@ -12,6 +13,7 @@ from app.models import ( ScheduledNotification, ServicePermission, Job, + InboundSms, EMAIL_TYPE, SMS_TYPE, KEY_TYPE_NORMAL, @@ -151,14 +153,15 @@ def create_notification( return notification -def create_job(template, - notification_count=1, - created_at=None, - job_status='pending', - scheduled_for=None, - processing_started=None, - original_file_name='some.csv'): - +def create_job( + template, + notification_count=1, + created_at=None, + job_status='pending', + scheduled_for=None, + processing_started=None, + original_file_name='some.csv' +): data = { 'id': uuid.uuid4(), 'service_id': template.service_id, @@ -193,11 +196,12 @@ def create_inbound_sms( user_number='447700900111', provider_date=None, provider_reference=None, - content='Hello' + content='Hello', + created_at=None ): inbound = InboundSms( service=service, - created_at=datetime.utcnow(), + created_at=created_at or datetime.utcnow(), notify_number=notify_number or service.sms_sender, user_number=user_number, provider_date=provider_date or datetime.utcnow(), From 56c3f3cf7c9801393e43b427ee4448b3de9c2738 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Fri, 2 Jun 2017 14:28:52 +0100 Subject: [PATCH 68/92] Add task to delete inbound sms everyday at 1am --- app/celery/scheduled_tasks.py | 19 ++++++++++++++++ app/config.py | 6 +++++ tests/app/celery/test_scheduled_tasks.py | 29 +++++++++++++++++------- 3 files changed, 46 insertions(+), 8 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 457877ed0..6a9cfd055 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -9,6 +9,7 @@ from sqlalchemy.exc import SQLAlchemyError from app.aws import s3 from app import notify_celery from app import performance_platform_client +from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago from app.dao.jobs_dao import dao_set_scheduled_jobs_to_pending, dao_get_jobs_older_than_limited_by from app.dao.notifications_dao import ( @@ -226,3 +227,21 @@ def timeout_job_statistics(): if updated: current_app.logger.info( "Timeout period reached for {} job statistics, failure count has been updated.".format(updated)) + + +@notify_celery.task(name="delete-inbound-sms") +@statsd(namespace="tasks") +def delete_inbound_sms_older_than_seven_days(): + try: + start = datetime.utcnow() + deleted = delete_inbound_sms_created_more_than_a_week_ago() + current_app.logger.info( + "Delete inbound sms job started {} finished {} deleted {} inbound sms notifications".format( + start, + datetime.utcnow(), + deleted + ) + ) + except SQLAlchemyError as e: + current_app.logger.exception("Failed to delete inbound sms notifications") + raise diff --git a/app/config.py b/app/config.py index 1cac131e2..01aca1964 100644 --- a/app/config.py +++ b/app/config.py @@ -2,6 +2,7 @@ from datetime import timedelta from celery.schedules import crontab from kombu import Exchange, Queue import os + from app.models import KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST if os.environ.get('VCAP_SERVICES'): @@ -168,6 +169,11 @@ class Config(object): 'schedule': crontab(minute=40, hour=0), 'options': {'queue': QueueNames.PERIODIC} }, + 'delete-inbound-sms': { + 'task': 'delete-inbound-sms', + 'schedule': crontab(minute=0, hour=1), + 'options': {'queue': QueueNames.PERIODIC} + }, 'send-daily-performance-platform-stats': { 'task': 'send-daily-performance-platform-stats', 'schedule': crontab(minute=0, hour=2), diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 7a347e35c..f706db28c 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -5,19 +5,23 @@ from functools import partial from flask import current_app from freezegun import freeze_time -from app.celery.scheduled_tasks import s3, timeout_job_statistics, delete_sms_notifications_older_than_seven_days, \ - delete_letter_notifications_older_than_seven_days, delete_email_notifications_older_than_seven_days, \ - send_scheduled_notifications from app.celery import scheduled_tasks from app.celery.scheduled_tasks import ( + delete_email_notifications_older_than_seven_days, + delete_inbound_sms_older_than_seven_days, + delete_invitations, + delete_notifications_created_more_than_a_week_ago_by_type, + delete_letter_notifications_older_than_seven_days, + delete_sms_notifications_older_than_seven_days, delete_verify_codes, remove_csv_files, - delete_notifications_created_more_than_a_week_ago_by_type, - delete_invitations, - timeout_notifications, run_scheduled_jobs, + s3, send_daily_performance_platform_stats, - switch_current_sms_provider_on_slow_delivery + send_scheduled_notifications, + switch_current_sms_provider_on_slow_delivery, + timeout_job_statistics, + timeout_notifications ) from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient from app.dao.jobs_dao import dao_get_job_by_id @@ -71,7 +75,8 @@ def prepare_current_provider(restore_provider_details): def test_should_have_decorated_tasks_functions(): assert delete_verify_codes.__wrapped__.__name__ == 'delete_verify_codes' - assert delete_notifications_created_more_than_a_week_ago_by_type.__wrapped__.__name__ == 'delete_notifications_created_more_than_a_week_ago_by_type' # noqa + assert delete_notifications_created_more_than_a_week_ago_by_type.__wrapped__.__name__ == \ + 'delete_notifications_created_more_than_a_week_ago_by_type' assert timeout_notifications.__wrapped__.__name__ == 'timeout_notifications' assert delete_invitations.__wrapped__.__name__ == 'delete_invitations' assert run_scheduled_jobs.__wrapped__.__name__ == 'run_scheduled_jobs' @@ -79,6 +84,8 @@ def test_should_have_decorated_tasks_functions(): assert send_daily_performance_platform_stats.__wrapped__.__name__ == 'send_daily_performance_platform_stats' assert switch_current_sms_provider_on_slow_delivery.__wrapped__.__name__ == \ 'switch_current_sms_provider_on_slow_delivery' + assert delete_inbound_sms_older_than_seven_days.__wrapped__.__name__ == \ + 'delete_inbound_sms_older_than_seven_days' def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): @@ -440,3 +447,9 @@ def test_timeout_job_statistics_called_with_notification_timeout(notify_api, moc dao_mock = mocker.patch('app.celery.scheduled_tasks.dao_timeout_job_statistics') timeout_job_statistics() dao_mock.assert_called_once_with(999) + + +def test_should_call_delete_inbound_sms_older_than_seven_days(notify_api, mocker): + mocker.patch('app.celery.scheduled_tasks.delete_inbound_sms_created_more_than_a_week_ago') + delete_inbound_sms_older_than_seven_days() + assert scheduled_tasks.delete_inbound_sms_created_more_than_a_week_ago.call_count == 1 From 012f8d2675c058af561778402469f806eb9371cc Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Fri, 2 Jun 2017 16:37:57 +0100 Subject: [PATCH 69/92] Adds provider onto the inbound sms table so we know where this came from. --- app/models.py | 1 + app/notifications/receive_notifications.py | 8 ++++--- .../versions/0090_add_inbound_provider.py | 22 +++++++++++++++++++ .../test_receive_notification.py | 2 ++ 4 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 migrations/versions/0090_add_inbound_provider.py diff --git a/app/models.py b/app/models.py index 5bc1cf2fc..6d5bf19fb 100644 --- a/app/models.py +++ b/app/models.py @@ -1166,6 +1166,7 @@ class InboundSms(db.Model): user_number = db.Column(db.String, nullable=False) # the end user's number, that the msg was sent from provider_date = db.Column(db.DateTime) provider_reference = db.Column(db.String) + provider = db.Column(db.String, nullable=True) _content = db.Column('content', db.String, nullable=False) @property diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 58f4d7f53..5fd5ad585 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -4,7 +4,7 @@ import iso8601 from flask import jsonify, Blueprint, current_app, request from notifications_utils.recipients import normalise_phone_number -from app import statsd_client +from app import statsd_client, firetext_client, mmg_client from app.dao.services_dao import dao_fetch_services_by_sms_sender from app.dao.inbound_sms_dao import dao_create_inbound_sms from app.models import InboundSms @@ -78,6 +78,7 @@ def create_inbound_mmg_sms_object(service, json): provider_date=provider_date, provider_reference=json.get('ID'), content=message, + provider=mmg_client.name ) dao_create_inbound_sms(inbound) return inbound @@ -90,7 +91,7 @@ def receive_firetext_sms(): potential_services = dao_fetch_services_by_sms_sender(post_data['destination']) if len(potential_services) != 1: current_app.logger.error('Inbound number "{}" not associated with exactly one service'.format( - post_data['source'] + post_data['destination'] )) statsd_client.incr('inbound.firetext.failed') return jsonify({ @@ -109,7 +110,8 @@ def receive_firetext_sms(): notify_number=service.sms_sender, user_number=user_number, provider_date=timestamp, - content=message + content=message, + provider=firetext_client.name ) ) diff --git a/migrations/versions/0090_add_inbound_provider.py b/migrations/versions/0090_add_inbound_provider.py new file mode 100644 index 000000000..c110ac193 --- /dev/null +++ b/migrations/versions/0090_add_inbound_provider.py @@ -0,0 +1,22 @@ +"""empty message + +Revision ID: 0090_add_inbound_provider +Revises: 0090_inbound_sms +Create Date: 2017-06-02 16:07:35.445423 + +""" + +# revision identifiers, used by Alembic. +revision = '0090_add_inbound_provider' +down_revision = '0090_inbound_sms' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +def upgrade(): + op.add_column('inbound_sms', sa.Column('provider', sa.String(), nullable=True)) + + +def downgrade(): + op.drop_column('inbound_sms', 'provider') diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 8cdaa2dc2..c346bcf02 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -70,6 +70,7 @@ def test_create_inbound_mmg_sms_object(sample_service): assert inbound_sms.provider_reference == 'bar' assert inbound_sms._content != 'hello there 📩' assert inbound_sms.content == 'hello there 📩' + assert inbound_sms.provider == 'mmg' @pytest.mark.parametrize('notify_number', ['foo', 'baz'], ids=['two_matching_services', 'no_matching_services']) @@ -136,6 +137,7 @@ def test_receive_notification_from_firetext_persists_message(notify_db_session, assert persisted.user_number == '7999999999' assert persisted.service == service assert persisted.content == 'this is a message' + assert persisted.provider == 'firetext' assert persisted.provider_date == datetime(2017, 1, 1, 12, 0, 0, 0) From 199c43c5077b7ab91147ec473168d6ccf811e381 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Fri, 2 Jun 2017 16:49:11 +0100 Subject: [PATCH 70/92] Migration script to populate the provider. - initial build of this ONLY support MMG so we can assume that all existing entries are all MMG, so any nulls == MMG. - This PR will put in fire text so not so safe to keep doing this back and forward. --- ...ovider.py => 0091_add_inbound_provider.py} | 4 ++-- .../0092_populate_inbound_provider.py | 22 +++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) rename migrations/versions/{0090_add_inbound_provider.py => 0091_add_inbound_provider.py} (84%) create mode 100644 migrations/versions/0092_populate_inbound_provider.py diff --git a/migrations/versions/0090_add_inbound_provider.py b/migrations/versions/0091_add_inbound_provider.py similarity index 84% rename from migrations/versions/0090_add_inbound_provider.py rename to migrations/versions/0091_add_inbound_provider.py index c110ac193..a0864ddbe 100644 --- a/migrations/versions/0090_add_inbound_provider.py +++ b/migrations/versions/0091_add_inbound_provider.py @@ -1,13 +1,13 @@ """empty message -Revision ID: 0090_add_inbound_provider +Revision ID: 0091_add_inbound_provider Revises: 0090_inbound_sms Create Date: 2017-06-02 16:07:35.445423 """ # revision identifiers, used by Alembic. -revision = '0090_add_inbound_provider' +revision = '0091_add_inbound_provider' down_revision = '0090_inbound_sms' from alembic import op diff --git a/migrations/versions/0092_populate_inbound_provider.py b/migrations/versions/0092_populate_inbound_provider.py new file mode 100644 index 000000000..ba6da1f5d --- /dev/null +++ b/migrations/versions/0092_populate_inbound_provider.py @@ -0,0 +1,22 @@ +"""empty message + +Revision ID: 0092_populate_inbound_provider +Revises: 0091_add_inbound_provider +Create Date: 2017-05-22 10:23:43.939050 + +""" + +# revision identifiers, used by Alembic. +revision = '0092_populate_inbound_provider' +down_revision = '0091_add_inbound_provider' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +def upgrade(): + op.execute("UPDATE inbound_sms SET provider='mmg' WHERE provider is null") + + +def downgrade(): + op.execute("UPDATE inbound_sms SET provider=null WHERE provider = 'mmg'") From b296e736f2b77c1165cae19d535cebb03fe02d69 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Mon, 5 Jun 2017 11:51:30 +0100 Subject: [PATCH 71/92] Reorder the migrations. --- app/notifications/receive_notifications.py | 4 ++-- ...d_inbound_provider.py => 0092_add_inbound_provider.py} | 8 ++++---- ...ound_provider.py => 0093_populate_inbound_provider.py} | 8 ++++---- tests/app/notifications/test_receive_notification.py | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) rename migrations/versions/{0091_add_inbound_provider.py => 0092_add_inbound_provider.py} (71%) rename migrations/versions/{0092_populate_inbound_provider.py => 0093_populate_inbound_provider.py} (69%) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index ffe347d95..904e78711 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -38,7 +38,7 @@ def receive_mmg_sms(): # succesfully return 'RECEIVED', 200 - statsd_client.incr('inbound.mmg.succesful') + statsd_client.incr('inbound.mmg.successful') service = potential_services[0] @@ -100,7 +100,7 @@ def receive_firetext_sms(): service = potential_services[0] - user_number = normalise_phone_number(post_data['source']) + user_number = validate_and_format_phone_number(post_data['source'], international=True) message = post_data['message'] timestamp = post_data['time'] diff --git a/migrations/versions/0091_add_inbound_provider.py b/migrations/versions/0092_add_inbound_provider.py similarity index 71% rename from migrations/versions/0091_add_inbound_provider.py rename to migrations/versions/0092_add_inbound_provider.py index a0864ddbe..f7e5f510e 100644 --- a/migrations/versions/0091_add_inbound_provider.py +++ b/migrations/versions/0092_add_inbound_provider.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0091_add_inbound_provider -Revises: 0090_inbound_sms +Revision ID: 0092_add_inbound_provider +Revises: 0091_letter_billing Create Date: 2017-06-02 16:07:35.445423 """ # revision identifiers, used by Alembic. -revision = '0091_add_inbound_provider' -down_revision = '0090_inbound_sms' +revision = '0092_add_inbound_provider' +down_revision = '0091_letter_billing' from alembic import op import sqlalchemy as sa diff --git a/migrations/versions/0092_populate_inbound_provider.py b/migrations/versions/0093_populate_inbound_provider.py similarity index 69% rename from migrations/versions/0092_populate_inbound_provider.py rename to migrations/versions/0093_populate_inbound_provider.py index ba6da1f5d..688d3f7bf 100644 --- a/migrations/versions/0092_populate_inbound_provider.py +++ b/migrations/versions/0093_populate_inbound_provider.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0092_populate_inbound_provider -Revises: 0091_add_inbound_provider +Revision ID: 0093_populate_inbound_provider +Revises: 0092_add_inbound_provider Create Date: 2017-05-22 10:23:43.939050 """ # revision identifiers, used by Alembic. -revision = '0092_populate_inbound_provider' -down_revision = '0091_add_inbound_provider' +revision = '0093_populate_inbound_provider' +down_revision = '0092_add_inbound_provider' from alembic import op import sqlalchemy as sa diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 6ce813354..67949104a 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -116,7 +116,7 @@ def test_receive_notification_returns_received_to_firetext(notify_db_session, cl def test_receive_notification_from_firetext_persists_message(notify_db_session, client, mocker): - mock = mocker.patch('app.notifications.receive_notifications.statsd_client.incr') + mocker.patch('app.notifications.receive_notifications.statsd_client.incr') service = create_service(service_name='b', sms_sender='07111111111') @@ -134,7 +134,7 @@ def test_receive_notification_from_firetext_persists_message(notify_db_session, assert result['status'] == 'ok' assert persisted.notify_number == '07111111111' - assert persisted.user_number == '7999999999' + assert persisted.user_number == '447999999999' assert persisted.service == service assert persisted.content == 'this is a message' assert persisted.provider == 'firetext' From efb045fc68209fb951ca6d57c23a1067c64a395a Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Mon, 5 Jun 2017 11:55:13 +0100 Subject: [PATCH 72/92] Removed pre-populate column to run after migration --- .../0093_populate_inbound_provider.py | 22 ------------------- 1 file changed, 22 deletions(-) delete mode 100644 migrations/versions/0093_populate_inbound_provider.py diff --git a/migrations/versions/0093_populate_inbound_provider.py b/migrations/versions/0093_populate_inbound_provider.py deleted file mode 100644 index 688d3f7bf..000000000 --- a/migrations/versions/0093_populate_inbound_provider.py +++ /dev/null @@ -1,22 +0,0 @@ -"""empty message - -Revision ID: 0093_populate_inbound_provider -Revises: 0092_add_inbound_provider -Create Date: 2017-05-22 10:23:43.939050 - -""" - -# revision identifiers, used by Alembic. -revision = '0093_populate_inbound_provider' -down_revision = '0092_add_inbound_provider' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -def upgrade(): - op.execute("UPDATE inbound_sms SET provider='mmg' WHERE provider is null") - - -def downgrade(): - op.execute("UPDATE inbound_sms SET provider=null WHERE provider = 'mmg'") From 6b01cfd5b580354de811fa7266811bfb04e31086 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Mon, 5 Jun 2017 16:18:40 +0100 Subject: [PATCH 73/92] Add data.gov.uk to the list of organisations MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We need to send an email with data.gov.uk branding. The image for the logo doesn’t exist yet, but doing this migration so we’re ready when it the logo does exist. --- migrations/versions/0092_data_gov_uk.py | 31 +++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 migrations/versions/0092_data_gov_uk.py diff --git a/migrations/versions/0092_data_gov_uk.py b/migrations/versions/0092_data_gov_uk.py new file mode 100644 index 000000000..96c07d578 --- /dev/null +++ b/migrations/versions/0092_data_gov_uk.py @@ -0,0 +1,31 @@ +"""empty message + +Revision ID: 0092_data_gov_uk +Revises: 0091_letter_billing +Create Date: 2017-06-05 16:15:17.744908 + +""" + +# revision identifiers, used by Alembic. +revision = '0092_data_gov_uk' +down_revision = '0091_letter_billing' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +DATA_GOV_UK_ID = '123496d4-44cb-4324-8e0a-4187101f4bdc' + +def upgrade(): + op.execute("""INSERT INTO organisation VALUES ( + '{}', + '', + 'data_gov_uk_x2.png', + '' + )""".format(DATA_GOV_UK_ID)) + + +def downgrade(): + op.execute(""" + DELETE FROM organisation WHERE "id" = '{}' + """.format(DATA_GOV_UK_ID)) From 7a03ef3de457011f63202dcb93f5fa955cd89e3f Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Mon, 5 Jun 2017 17:25:40 +0100 Subject: [PATCH 74/92] Pseudo Code --- app/dao/notification_usage_dao.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index b3b582ed3..97fc4925e 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -176,8 +176,14 @@ def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date ) billable_units_by_rate_boundry = result.scalar() if billable_units_by_rate_boundry: + if billable_units >= 250000: + total_cost += int(billable_units_by_rate_boundry) * rate_boundary['rate'] + elif billable_units + billable_units_by_rate_boundry > 250000: + remaining_free_allowance = abs(250000 - billable_units) + total_cost += ((billable_units_by_rate_boundry - remaining_free_allowance) * rate_boundary) + else + total_cost += 0 billable_units += int(billable_units_by_rate_boundry) - total_cost += int(billable_units_by_rate_boundry) * rate_boundary['rate'] return billable_units, total_cost From d488c592f4757e1f1cebdfc87cc651c0931ea5b9 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 6 Jun 2017 10:59:01 +0100 Subject: [PATCH 75/92] Fix conflict in db migration script --- .../{0092_data_gov_uk.py => 0093_data_gov_uk.py} | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) rename migrations/versions/{0092_data_gov_uk.py => 0093_data_gov_uk.py} (79%) diff --git a/migrations/versions/0092_data_gov_uk.py b/migrations/versions/0093_data_gov_uk.py similarity index 79% rename from migrations/versions/0092_data_gov_uk.py rename to migrations/versions/0093_data_gov_uk.py index 96c07d578..fbe22d38a 100644 --- a/migrations/versions/0092_data_gov_uk.py +++ b/migrations/versions/0093_data_gov_uk.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0092_data_gov_uk -Revises: 0091_letter_billing +Revision ID: 0093_data_gov_uk +Revises: 0092_add_inbound_provider Create Date: 2017-06-05 16:15:17.744908 """ # revision identifiers, used by Alembic. -revision = '0092_data_gov_uk' -down_revision = '0091_letter_billing' +revision = '0093_data_gov_uk' +down_revision = '0092_add_inbound_provider' from alembic import op import sqlalchemy as sa @@ -16,6 +16,7 @@ from sqlalchemy.dialects import postgresql DATA_GOV_UK_ID = '123496d4-44cb-4324-8e0a-4187101f4bdc' + def upgrade(): op.execute("""INSERT INTO organisation VALUES ( '{}', From 29455b6d3bf366636af602911bd47383162ecf76 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 6 Jun 2017 11:50:30 +0100 Subject: [PATCH 76/92] Strip leading 44 from inbound SMS numbers to normalise to how we store things. --- app/notifications/receive_notifications.py | 19 +++++++++++++++---- .../test_receive_notification.py | 18 +++++++++++++++--- 2 files changed, 30 insertions(+), 7 deletions(-) diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 904e78711..fdd1f95ec 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -27,10 +27,13 @@ def receive_mmg_sms(): } """ post_data = request.get_json() - potential_services = dao_fetch_services_by_sms_sender(post_data['Number']) + + inbound_number = strip_leading_forty_four(post_data['Number']) + + potential_services = dao_fetch_services_by_sms_sender(inbound_number) if len(potential_services) != 1: - current_app.logger.error('Inbound number "{}" not associated with exactly one service'.format( + current_app.logger.error('Inbound number "{}" from MMG not associated with exactly one service'.format( post_data['Number'] )) statsd_client.incr('inbound.mmg.failed') @@ -88,9 +91,11 @@ def create_inbound_mmg_sms_object(service, json): def receive_firetext_sms(): post_data = request.form - potential_services = dao_fetch_services_by_sms_sender(post_data['destination']) + inbound_number = strip_leading_forty_four(post_data['destination']) + + potential_services = dao_fetch_services_by_sms_sender(inbound_number) if len(potential_services) != 1: - current_app.logger.error('Inbound number "{}" not associated with exactly one service'.format( + current_app.logger.error('Inbound number "{}" from firetext not associated with exactly one service'.format( post_data['destination'] )) statsd_client.incr('inbound.firetext.failed') @@ -120,3 +125,9 @@ def receive_firetext_sms(): return jsonify({ "status": "ok" }), 200 + + +def strip_leading_forty_four(number): + if number.startswith('44'): + return number.replace('44', '0', 1) + return number diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 67949104a..66f743be4 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -7,11 +7,11 @@ from flask import json from app.notifications.receive_notifications import ( format_mmg_message, format_mmg_datetime, - create_inbound_mmg_sms_object + create_inbound_mmg_sms_object, + strip_leading_forty_four ) from app.models import InboundSms -from tests.app.conftest import sample_service from tests.app.db import create_service @@ -163,7 +163,6 @@ def test_receive_notification_from_firetext_persists_message_with_normalized_pho def test_returns_ok_to_firetext_if_mismatched_sms_sender(notify_db_session, client, mocker): - mock = mocker.patch('app.notifications.receive_notifications.statsd_client.incr') create_service(service_name='b', sms_sender='07111111199') @@ -181,3 +180,16 @@ def test_returns_ok_to_firetext_if_mismatched_sms_sender(notify_db_session, clie assert not InboundSms.query.all() assert result['status'] == 'ok' mock.assert_has_calls([call('inbound.firetext.failed')]) + + +@pytest.mark.parametrize( + 'number, expected', + [ + ('447123123123', '07123123123'), + ('447123123144', '07123123144'), + ('07123123123', '07123123123'), + ('447444444444', '07444444444') + ] +) +def test_strip_leading_country_code(number, expected): + assert strip_leading_forty_four(number) == expected From 18dcc10a0695458df397386af292f5ddfbc9c491 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 6 Jun 2017 14:04:11 +0100 Subject: [PATCH 77/92] Fixed typo --- app/dao/notification_usage_dao.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index 97fc4925e..d51e96153 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -181,7 +181,7 @@ def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date elif billable_units + billable_units_by_rate_boundry > 250000: remaining_free_allowance = abs(250000 - billable_units) total_cost += ((billable_units_by_rate_boundry - remaining_free_allowance) * rate_boundary) - else + else: total_cost += 0 billable_units += int(billable_units_by_rate_boundry) From 75bf693f444cc7a5fe903a19a4c30a75eccda169 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 6 Jun 2017 14:49:05 +0100 Subject: [PATCH 78/92] Add the yearly free limit to the service model. This allows us to reference it across the API code base and return it in the API. But not currently attached to the service DB model - a static method on the class. --- app/config.py | 2 ++ app/models.py | 4 ++++ app/schemas.py | 7 +++++-- tests/app/service/test_rest.py | 11 +++++++++++ 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/app/config.py b/app/config.py index 01aca1964..07d451549 100644 --- a/app/config.py +++ b/app/config.py @@ -239,6 +239,8 @@ class Config(object): } } + FREE_SMS_TIER_FRAGMENT_COUNT = 250000 + ###################### # Config overrides ### diff --git a/app/models.py b/app/models.py index 1bcd66a19..a37959156 100644 --- a/app/models.py +++ b/app/models.py @@ -217,6 +217,10 @@ class Service(db.Model, Versioned): self.can_send_letters = LETTER_TYPE in [p.permission for p in self.permissions] self.can_send_international_sms = INTERNATIONAL_SMS_TYPE in [p.permission for p in self.permissions] + @staticmethod + def free_sms_fragment_limit(): + return current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] + @classmethod def from_json(cls, data): """ diff --git a/app/schemas.py b/app/schemas.py index b8d3ee7af..3ca8fda1a 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -25,9 +25,8 @@ from notifications_utils.recipients import ( from app import ma from app import models -from app.models import ServicePermission, INTERNATIONAL_SMS_TYPE, SMS_TYPE, LETTER_TYPE, EMAIL_TYPE +from app.models import ServicePermission, INTERNATIONAL_SMS_TYPE, LETTER_TYPE from app.dao.permissions_dao import permission_dao -from app.dao.service_permissions_dao import dao_fetch_service_permissions from app.utils import get_template_instance @@ -176,6 +175,7 @@ class ProviderDetailsHistorySchema(BaseSchema): class ServiceSchema(BaseSchema): + free_sms_fragment_limit = fields.Method('get_free_sms_fragment_limit') created_by = field_for(models.Service, 'created_by', required=True) organisation = field_for(models.Service, 'organisation') branding = field_for(models.Service, 'branding') @@ -183,6 +183,9 @@ class ServiceSchema(BaseSchema): permissions = fields.Method("service_permissions") override_flag = False + def get_free_sms_fragment_limit(selfs, service): + return service.free_sms_fragment_limit() + def service_permissions(self, service): return [p.permission for p in service.permissions] diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index f2b0a8247..2c2842177 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -147,6 +147,17 @@ def test_get_service_by_id(client, sample_service): assert json_resp['data']['sms_sender'] == current_app.config['FROM_NUMBER'] +def test_get_service_by_id_returns_free_sms_limit(client, sample_service): + auth_header = create_authorization_header() + resp = client.get( + '/service/{}'.format(sample_service.id), + headers=[auth_header] + ) + assert resp.status_code == 200 + json_resp = json.loads(resp.get_data(as_text=True)) + assert json_resp['data']['free_sms_fragment_limit'] == 250000 + + def test_get_service_list_has_default_permissions(client, service_factory): service_factory.get('one') service_factory.get('two') From 96d30d31b1adb671e6b58266f264946fa9904812 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 6 Jun 2017 14:55:37 +0100 Subject: [PATCH 79/92] Get existing tests to pass. Done by ensuring that the rate limit is 0, so that all messages are billable. --- app/dao/notification_usage_dao.py | 12 +++++----- tests/app/dao/test_notification_usage_dao.py | 23 ++++++++++++++++++-- 2 files changed, 28 insertions(+), 7 deletions(-) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index d51e96153..1ee3134bd 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -1,5 +1,6 @@ from datetime import datetime, timedelta +from flask import current_app from sqlalchemy import Float, Integer from sqlalchemy import func, case, cast from sqlalchemy import literal_column @@ -11,7 +12,7 @@ from app.models import (NotificationHistory, NOTIFICATION_STATUS_TYPES_BILLABLE, KEY_TYPE_TEST, SMS_TYPE, - EMAIL_TYPE) + EMAIL_TYPE, Service) from app.statsd_decorators import statsd from app.utils import get_london_month_from_utc_column @@ -158,6 +159,8 @@ def rate_multiplier(): @statsd(namespace="dao") def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date, end_date, service_id): + free_sms_limit = Service.free_sms_fragment_limit() + billable_units = 0 total_cost = 0.0 @@ -176,15 +179,14 @@ def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date ) billable_units_by_rate_boundry = result.scalar() if billable_units_by_rate_boundry: - if billable_units >= 250000: + if billable_units >= free_sms_limit: total_cost += int(billable_units_by_rate_boundry) * rate_boundary['rate'] - elif billable_units + billable_units_by_rate_boundry > 250000: - remaining_free_allowance = abs(250000 - billable_units) + elif billable_units + billable_units_by_rate_boundry > free_sms_limit: + remaining_free_allowance = abs(free_sms_limit - billable_units) total_cost += ((billable_units_by_rate_boundry - remaining_free_allowance) * rate_boundary) else: total_cost += 0 billable_units += int(billable_units_by_rate_boundry) - return billable_units, total_cost diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index d4aec8531..c20b76a4c 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -2,6 +2,7 @@ import uuid from datetime import datetime, timedelta import pytest +from flask import current_app from app.dao.date_util import get_financial_year from app.dao.notification_usage_dao import ( @@ -15,8 +16,7 @@ from app.models import ( Rate, NOTIFICATION_DELIVERED, NOTIFICATION_STATUS_TYPES_BILLABLE, - NOTIFICATION_STATUS_TYPES_NON_BILLABLE, - Notification) + NOTIFICATION_STATUS_TYPES_NON_BILLABLE) from tests.app.conftest import sample_notification, sample_email_template, sample_letter_template, sample_service from tests.app.db import create_notification from freezegun import freeze_time @@ -266,6 +266,8 @@ def set_up_rate(notify_db, start_date, value): @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications(notify_db, notify_db_session, sample_service): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 0.016) sample_notification( @@ -288,6 +290,8 @@ def test_returns_total_billable_units_for_sms_notifications(notify_db, notify_db def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications( notify_db, notify_db_session, sample_service ): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) sample_notification( @@ -309,6 +313,8 @@ def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notificati def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications_for_several_rates( notify_db, notify_db_session, sample_service ): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 2) set_up_rate(notify_db, datetime(2016, 10, 1), 4) set_up_rate(notify_db, datetime(2017, 1, 1), 6) @@ -350,6 +356,8 @@ def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notificati def test_returns_total_billable_units_for_sms_notifications_for_several_rates_where_dates_match_rate_boundary( notify_db, notify_db_session, sample_service ): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 2) set_up_rate(notify_db, datetime(2016, 10, 1), 4) set_up_rate(notify_db, datetime(2017, 1, 1), 6) @@ -388,6 +396,8 @@ def test_returns_total_billable_units_for_sms_notifications_for_several_rates_wh def test_returns_total_billable_units_for_sms_notifications_ignoring_letters_and_emails( notify_db, notify_db_session, sample_service ): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) @@ -426,6 +436,8 @@ def test_returns_total_billable_units_for_sms_notifications_ignoring_letters_and def test_returns_total_billable_units_for_sms_notifications_for_only_requested_service( notify_db, notify_db_session ): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) @@ -463,6 +475,8 @@ def test_returns_total_billable_units_for_sms_notifications_for_only_requested_s def test_returns_total_billable_units_for_sms_notifications_handling_null_values( notify_db, notify_db_session, sample_service ): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) sample_notification( @@ -488,6 +502,9 @@ def test_returns_total_billable_units_for_sms_notifications_handling_null_values def test_ignores_non_billable_states_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service, billable_units, states ): + + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) for state in states: @@ -514,6 +531,8 @@ def test_ignores_non_billable_states_when_returning_billable_units_for_sms_notif def test_restricts_to_time_period_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service ): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) sample_notification( From 6b4597149f42256d51da5b9f92c3cda5ff017da3 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Tue, 6 Jun 2017 16:01:27 +0100 Subject: [PATCH 80/92] Add filter to get jobs to delete (sms, email, letter) --- app/dao/jobs_dao.py | 26 +++++++------- tests/app/conftest.py | 22 ++++++------ tests/app/dao/test_jobs_dao.py | 66 ++++++++++++++++++++++------------ 3 files changed, 69 insertions(+), 45 deletions(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index d2ec8c367..86e565619 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -1,17 +1,15 @@ -from datetime import datetime +from datetime import datetime, timedelta from flask import current_app from sqlalchemy import func, desc, asc, cast, Date as sql_date from app import db from app.dao import days_ago -from app.models import (Job, - Notification, - NotificationHistory, - Template, - JOB_STATUS_SCHEDULED, - JOB_STATUS_PENDING, - LETTER_TYPE, JobStatistics) +from app.models import ( + Job, JobStatistics, Notification, NotificationHistory, Template, + JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, + EMAIL_TYPE, SMS_TYPE, LETTER_TYPE +) from app.statsd_decorators import statsd @@ -129,10 +127,14 @@ def dao_update_job_status(job_id, status): db.session.commit() -def dao_get_jobs_older_than_limited_by(older_than=7, limit_days=2): - return Job.query.filter( - cast(Job.created_at, sql_date) < days_ago(older_than), - cast(Job.created_at, sql_date) >= days_ago(older_than + limit_days) +def dao_get_jobs_older_than_limited_by(job_types, older_than=7, limit_days=2): + end_date = datetime.utcnow() - timedelta(days=older_than) + start_date = end_date - timedelta(days=limit_days) + + return Job.query.join(Template).filter( + Job.created_at < end_date, + Job.created_at >= start_date, + Template.template_type.in_(job_types) ).order_by(desc(Job.created_at)).all() diff --git a/tests/app/conftest.py b/tests/app/conftest.py index d49c527aa..91eff00fc 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -280,16 +280,18 @@ def sample_team_api_key(notify_db, notify_db_session, service=None): @pytest.fixture(scope='function') -def sample_job(notify_db, - notify_db_session, - service=None, - template=None, - notification_count=1, - created_at=None, - job_status='pending', - scheduled_for=None, - processing_started=None, - original_file_name='some.csv'): +def sample_job( + notify_db, + notify_db_session, + service=None, + template=None, + notification_count=1, + created_at=None, + job_status='pending', + scheduled_for=None, + processing_started=None, + original_file_name='some.csv' +): if service is None: service = sample_service(notify_db, notify_db_session) if template is None: diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index a9af1afda..42f93d230 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -17,7 +17,10 @@ from app.dao.jobs_dao import ( dao_update_job_status, dao_get_all_notifications_for_job, dao_get_jobs_older_than_limited_by) -from app.models import Job, JobStatistics +from app.models import ( + Job, JobStatistics, + EMAIL_TYPE, SMS_TYPE, LETTER_TYPE +) from tests.app.conftest import sample_notification as create_notification from tests.app.conftest import sample_job as create_job @@ -285,33 +288,30 @@ def test_get_future_scheduled_job_gets_a_job_yet_to_send(sample_scheduled_job): assert result.id == sample_scheduled_job.id -def test_should_get_jobs_seven_days_old(notify_db, notify_db_session): - # job runs at some point on each day - # shouldn't matter when, we are deleting things 7 days ago - job_run_time = '2016-10-31T10:00:00' +@freeze_time('2016-10-31 10:00:00') +def test_should_get_jobs_seven_days_old(notify_db, notify_db_session, sample_template): + """ + Jobs older than seven days are deleted, but only two day's worth (two-day window) + """ + seven_days_ago = datetime.utcnow() - timedelta(days=7) + within_seven_days = seven_days_ago + timedelta(seconds=1) - # running on the 31st means the previous 7 days are ignored + eight_days_ago = seven_days_ago - timedelta(days=1) - # 2 day window for delete jobs - # 7 days of files to skip includes the 30,29,28,27,26,25,24th, so the.... - last_possible_time_for_eligible_job = '2016-10-23T23:59:59' - first_possible_time_for_eligible_job = '2016-10-22T00:00:00' + nine_days_ago = eight_days_ago - timedelta(days=2) + nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - job_1 = create_job(notify_db, notify_db_session, created_at=last_possible_time_for_eligible_job) - job_2 = create_job(notify_db, notify_db_session, created_at=first_possible_time_for_eligible_job) + job = partial(create_job, notify_db, notify_db_session) + job(created_at=seven_days_ago) + job(created_at=within_seven_days) + job_to_delete = job(created_at=eight_days_ago) + job(created_at=nine_days_ago) + job(created_at=nine_days_one_second_ago) - # bookmarks for jobs that should be ignored - last_possible_time_for_ineligible_job = '2016-10-24T00:00:00' - create_job(notify_db, notify_db_session, created_at=last_possible_time_for_ineligible_job) + jobs = dao_get_jobs_older_than_limited_by(job_types=[sample_template.template_type]) - first_possible_time_for_ineligible_job = '2016-10-21T23:59:59' - create_job(notify_db, notify_db_session, created_at=first_possible_time_for_ineligible_job) - - with freeze_time(job_run_time): - jobs = dao_get_jobs_older_than_limited_by() - assert len(jobs) == 2 - assert jobs[0].id == job_1.id - assert jobs[1].id == job_2.id + assert len(jobs) == 1 + assert jobs[0].id == job_to_delete.id def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_service, sample_template): @@ -391,3 +391,23 @@ def test_dao_update_job_status(sample_job): updated_job = Job.query.get(sample_job.id) assert updated_job.job_status == 'sent to dvla' assert updated_job.updated_at + + +@freeze_time('2016-10-31 10:00:00') +def test_should_get_jobs_seven_days_old_filters_type(notify_db, notify_db_session): + eight_days_ago = datetime.utcnow() - timedelta(days=8) + letter_template = create_template(notify_db, notify_db_session, template_type=LETTER_TYPE) + sms_template = create_template(notify_db, notify_db_session, template_type=SMS_TYPE) + email_template = create_template(notify_db, notify_db_session, template_type=EMAIL_TYPE) + + job = partial(create_job, notify_db, notify_db_session, created_at=eight_days_ago) + job_to_remain = job(template=letter_template) + job(template=sms_template) + job(template=email_template) + + jobs = dao_get_jobs_older_than_limited_by( + job_types=[EMAIL_TYPE, SMS_TYPE] + ) + + assert len(jobs) == 2 + assert job_to_remain.id not in [job.id for job in jobs] From 74a8905be91fbb25784ce3a1bd793ce8da7a9130 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Tue, 6 Jun 2017 16:02:01 +0100 Subject: [PATCH 81/92] Seperate deletion of jobs: * Two separate jobs, one for sms&email and another for letter * Change celery task for delete to accept template type filter * General refacor of tests to make more readable --- app/celery/scheduled_tasks.py | 4 +- app/config.py | 16 ++++-- tests/app/celery/test_scheduled_tasks.py | 63 ++++++++++++++++++------ 3 files changed, 64 insertions(+), 19 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 6a9cfd055..636522f01 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -32,8 +32,8 @@ from app.config import QueueNames @notify_celery.task(name="remove_csv_files") @statsd(namespace="tasks") -def remove_csv_files(): - jobs = dao_get_jobs_older_than_limited_by() +def remove_csv_files(job_types): + jobs = dao_get_jobs_older_than_limited_by(job_types=job_types) for job in jobs: s3.remove_job_from_s3(job.service_id, job.id) current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) diff --git a/app/config.py b/app/config.py index 01aca1964..cbaa15d67 100644 --- a/app/config.py +++ b/app/config.py @@ -3,7 +3,10 @@ from celery.schedules import crontab from kombu import Exchange, Queue import os -from app.models import KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST +from app.models import ( + EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, + KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST +) if os.environ.get('VCAP_SERVICES'): # on cloudfoundry, config is a json blob in VCAP_SERVICES - unpack it, and populate @@ -189,10 +192,17 @@ class Config(object): 'schedule': crontab(minute=0, hour=3), 'options': {'queue': QueueNames.PERIODIC} }, - 'remove_csv_files': { + 'remove_sms_email_jobs': { 'task': 'remove_csv_files', 'schedule': crontab(minute=0, hour=4), - 'options': {'queue': QueueNames.PERIODIC} + 'options': {'queue': QueueNames.PERIODIC}, + 'kwargs': {'job_types': [EMAIL_TYPE, SMS_TYPE]} + }, + 'remove_letter_jobs': { + 'task': 'remove_csv_files', + 'schedule': crontab(minute=20, hour=4), + 'options': {'queue': QueueNames.PERIODIC}, + 'kwargs': {'job_types': [LETTER_TYPE]} }, 'timeout-job-statistics': { 'task': 'timeout-job-statistics', diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index f706db28c..b3ff5d478 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -30,9 +30,12 @@ from app.dao.provider_details_dao import ( dao_update_provider_details, get_current_provider ) -from app.models import Service, Template +from app.models import ( + Service, Template, + SMS_TYPE, LETTER_TYPE +) from app.utils import get_london_midnight_in_utc -from tests.app.db import create_notification, create_service +from tests.app.db import create_notification, create_service, create_template, create_job from tests.app.conftest import ( sample_job as create_sample_job, sample_notification_history as create_notification_history, @@ -214,22 +217,33 @@ def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_ ]) -def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_db_session, mocker): +@freeze_time('2016-10-18T10:00:00') +def test_will_remove_csv_files_for_jobs_older_than_seven_days( + notify_db, notify_db_session, mocker, sample_template +): mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') + """ + Jobs older than seven days are deleted, but only two day's worth (two-day window) + """ + seven_days_ago = datetime.utcnow() - timedelta(days=7) + just_under_seven_days = seven_days_ago + timedelta(seconds=1) + eight_days_ago = seven_days_ago - timedelta(days=1) + nine_days_ago = eight_days_ago - timedelta(days=1) + just_under_nine_days = nine_days_ago + timedelta(seconds=1) + nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - eligible_job_1 = datetime(2016, 10, 10, 23, 59, 59, 000) - eligible_job_2 = datetime(2016, 10, 9, 00, 00, 00, 000) - in_eligible_job_too_new = datetime(2016, 10, 11, 00, 00, 00, 000) - in_eligible_job_too_old = datetime(2016, 10, 8, 23, 59, 59, 999) + create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago) + job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) + job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) + create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) + create_sample_job(notify_db, notify_db_session, created_at=just_under_seven_days) - job_1 = create_sample_job(notify_db, notify_db_session, created_at=eligible_job_1) - job_2 = create_sample_job(notify_db, notify_db_session, created_at=eligible_job_2) - create_sample_job(notify_db, notify_db_session, created_at=in_eligible_job_too_new) - create_sample_job(notify_db, notify_db_session, created_at=in_eligible_job_too_old) + remove_csv_files(job_types=[sample_template.template_type]) - with freeze_time('2016-10-18T10:00:00'): - remove_csv_files() - assert s3.remove_job_from_s3.call_args_list == [call(job_1.service_id, job_1.id), call(job_2.service_id, job_2.id)] + assert s3.remove_job_from_s3.call_args_list == [ + call(job1_to_delete.service_id, job1_to_delete.id), + call(job2_to_delete.service_id, job2_to_delete.id) + ] def test_send_daily_performance_stats_calls_does_not_send_if_inactive( @@ -453,3 +467,24 @@ def test_should_call_delete_inbound_sms_older_than_seven_days(notify_api, mocker mocker.patch('app.celery.scheduled_tasks.delete_inbound_sms_created_more_than_a_week_ago') delete_inbound_sms_older_than_seven_days() assert scheduled_tasks.delete_inbound_sms_created_more_than_a_week_ago.call_count == 1 + + +@freeze_time('2017-01-01 10:00:00') +def test_remove_csv_files_filters_by_type(mocker, sample_service): + mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') + """ + Jobs older than seven days are deleted, but only two day's worth (two-day window) + """ + letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) + sms_template = create_template(service=sample_service, template_type=SMS_TYPE) + + eight_days_ago = datetime.utcnow() - timedelta(days=8) + + job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) + create_job(template=sms_template, created_at=eight_days_ago) + + remove_csv_files(job_types=[LETTER_TYPE]) + + assert s3.remove_job_from_s3.call_args_list == [ + call(job_to_delete.service_id, job_to_delete.id), + ] From cad195949ac4259d0a4e996380c6d248a335abf7 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Tue, 6 Jun 2017 16:21:05 +0100 Subject: [PATCH 82/92] Ensure that the bill includes whatever free allowance is applicable. --- app/dao/notification_usage_dao.py | 9 +-- tests/app/dao/test_notification_usage_dao.py | 76 ++++++++++++++++++++ 2 files changed, 81 insertions(+), 4 deletions(-) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index 1ee3134bd..995462509 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -179,14 +179,15 @@ def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date ) billable_units_by_rate_boundry = result.scalar() if billable_units_by_rate_boundry: + int_billable_units_by_rate_boundry = int(billable_units_by_rate_boundry) if billable_units >= free_sms_limit: - total_cost += int(billable_units_by_rate_boundry) * rate_boundary['rate'] - elif billable_units + billable_units_by_rate_boundry > free_sms_limit: + total_cost += int_billable_units_by_rate_boundry * rate_boundary['rate'] + elif billable_units + int_billable_units_by_rate_boundry > free_sms_limit: remaining_free_allowance = abs(free_sms_limit - billable_units) - total_cost += ((billable_units_by_rate_boundry - remaining_free_allowance) * rate_boundary) + total_cost += ((int_billable_units_by_rate_boundry - remaining_free_allowance) * rate_boundary['rate']) else: total_cost += 0 - billable_units += int(billable_units_by_rate_boundry) + billable_units += int_billable_units_by_rate_boundry return billable_units, total_cost diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index c20b76a4c..ef3bdfd13 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -623,3 +623,79 @@ def test_should_calculate_rate_boundaries_for_billing_query_for_three_relevant_r assert rate_boundaries[2]['start_date'] == rate_3_valid_from assert rate_boundaries[2]['end_date'] == end_date assert rate_boundaries[2]['rate'] == 0.06 + + +@freeze_time("2016-01-10 12:00:00.000000") +def test_deducts_free_tier_from_bill( + notify_db, notify_db_session +): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 1 + + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + + service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=1, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=1, + status=NOTIFICATION_DELIVERED) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 2.5 + + +@freeze_time("2016-01-10 12:00:00.000000") +@pytest.mark.parametrize( + 'free_tier, expected_cost', + [(0, 24.0), (1, 22.0), (2, 20.0), (3, 16.0), (4, 12.0), (5, 6.0), (6, 0.0)] +) +def test_deducts_free_tier_from_bill_across_rate_boundaries( + notify_db, notify_db_session, sample_service, free_tier, expected_cost +): + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = free_tier + + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) + + eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) + eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) + eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) + eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) + eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) + eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) + + def make_notification(created_at): + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=1.0, + status=NOTIFICATION_DELIVERED, + created_at=created_at) + + make_notification(eligble_rate_1_start) + make_notification(eligble_rate_1_end) + make_notification(eligble_rate_2_start) + make_notification(eligble_rate_2_end) + make_notification(eligble_rate_3_start) + make_notification(eligble_rate_3_whenever) + + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 6 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + )[1] == expected_cost From 635fb8fe44ca611d95c96a3e769015185d7d2331 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Tue, 6 Jun 2017 16:21:57 +0100 Subject: [PATCH 83/92] Add private endpoint to get notification by ID MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We need this for the two way stuff in the admin app. We already have this as a public endpoint, but the admin app can’t use it, because the admin app auths with its own key, not that of the service it’s acting on behalf of. This endpoint makes sure that a request originating from one service can’t be used to see notifications belonging to another service. --- app/service/rest.py | 13 +++++++++++++ tests/app/service/test_rest.py | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/app/service/rest.py b/app/service/rest.py index 24fdcd513..1e301d0a7 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -300,6 +300,19 @@ def get_all_notifications_for_service(service_id): ), 200 +@service_blueprint.route('//notifications/', methods=['GET']) +def get_notification_for_service(service_id, notification_id): + + notification = notifications_dao.get_notification_with_personalisation( + service_id, + notification_id, + key_type=None, + ) + return jsonify( + notification_with_template_schema.dump(notification).data, + ), 200 + + def search_for_notification_by_to_field(service_id, search_term, statuses): results = notifications_dao.dao_get_notifications_by_to_field(service_id, search_term, statuses) return jsonify( diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index f2b0a8247..ac8537a4f 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1248,6 +1248,39 @@ def test_get_all_notifications_for_service_in_order(notify_api, notify_db, notif assert response.status_code == 200 +def test_get_notification_for_service(client, notify_db, notify_db_session): + + service_1 = create_service(notify_db, notify_db_session, service_name="1", email_from='1') + service_2 = create_service(notify_db, notify_db_session, service_name="2", email_from='2') + + service_1_notifications = [ + create_sample_notification(notify_db, notify_db_session, service=service_1), + create_sample_notification(notify_db, notify_db_session, service=service_1), + create_sample_notification(notify_db, notify_db_session, service=service_1), + ] + + service_2_notifications = [ + create_sample_notification(notify_db, notify_db_session, service=service_2) + ] + + for notification in service_1_notifications: + response = client.get( + path='/service/{}/notifications/{}'.format(service_1.id, notification.id), + headers=[create_authorization_header()] + ) + resp = json.loads(response.get_data(as_text=True)) + assert str(resp['id']) == str(notification.id) + assert response.status_code == 200 + + service_2_response = client.get( + path='/service/{}/notifications/{}'.format(service_2.id, notification.id), + headers=[create_authorization_header()] + ) + assert service_2_response.status_code == 404 + service_2_response = json.loads(service_2_response.get_data(as_text=True)) + assert service_2_response == {'message': 'No result found', 'result': 'error'} + + @pytest.mark.parametrize( 'include_from_test_key, expected_count_of_notifications', [ From 23a501af1691227a9fdd30e22115bfc69468f336 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Tue, 6 Jun 2017 17:11:59 +0100 Subject: [PATCH 84/92] Add dao to get inbound sms by id --- app/dao/inbound_sms_dao.py | 4 ++++ tests/app/dao/test_inbound_sms_dao.py | 11 ++++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/app/dao/inbound_sms_dao.py b/app/dao/inbound_sms_dao.py index 3411aeb22..d87c1b1ed 100644 --- a/app/dao/inbound_sms_dao.py +++ b/app/dao/inbound_sms_dao.py @@ -47,3 +47,7 @@ def delete_inbound_sms_created_more_than_a_week_ago(): ).delete(synchronize_session='fetch') return deleted + + +def dao_get_inbound_sms_by_id(inbound_id): + return InboundSms.query.filter_by(id=inbound_id).one() diff --git a/tests/app/dao/test_inbound_sms_dao.py b/tests/app/dao/test_inbound_sms_dao.py index 2731562c4..6162b6d82 100644 --- a/tests/app/dao/test_inbound_sms_dao.py +++ b/tests/app/dao/test_inbound_sms_dao.py @@ -5,7 +5,8 @@ from freezegun import freeze_time from app.dao.inbound_sms_dao import ( dao_get_inbound_sms_for_service, dao_count_inbound_sms_for_service, - delete_inbound_sms_created_more_than_a_week_ago + delete_inbound_sms_created_more_than_a_week_ago, + dao_get_inbound_sms_by_id ) from tests.app.db import create_inbound_sms, create_service @@ -86,3 +87,11 @@ def test_should_not_delete_inbound_sms_before_seven_days(sample_service): delete_inbound_sms_created_more_than_a_week_ago() assert len(InboundSms.query.all()) == 2 + + +def test_get_inbound_sms_by_id_returns(sample_service): + inbound = create_inbound_sms(sample_service) + + inbound_from_db = dao_get_inbound_sms_by_id(inbound.id) + + assert inbound == inbound_from_db From ee488d416a0186ddb28e47764e9860d969498489 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Tue, 6 Jun 2017 17:12:21 +0100 Subject: [PATCH 85/92] Add endpoint to get inbound by id --- app/inbound_sms/rest.py | 23 ++++++++++++++++++++++- tests/app/inbound_sms/test_rest.py | 26 ++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/app/inbound_sms/rest.py b/app/inbound_sms/rest.py index d4072dea3..755f830d3 100644 --- a/app/inbound_sms/rest.py +++ b/app/inbound_sms/rest.py @@ -1,11 +1,19 @@ +import uuid + from flask import ( Blueprint, jsonify, request ) +from werkzeug.exceptions import abort + from notifications_utils.recipients import validate_and_format_phone_number -from app.dao.inbound_sms_dao import dao_get_inbound_sms_for_service, dao_count_inbound_sms_for_service +from app.dao.inbound_sms_dao import ( + dao_get_inbound_sms_for_service, + dao_count_inbound_sms_for_service, + dao_get_inbound_sms_by_id +) from app.errors import register_errors inbound_sms = Blueprint( @@ -40,3 +48,16 @@ def get_inbound_sms_summary_for_service(service_id): count=count, most_recent=most_recent[0].created_at.isoformat() if most_recent else None ) + + +@inbound_sms.route('/', methods=['GET']) +def get_inbound_by_id(service_id, inbound_sms_id): + # TODO: Add JSON Schema here + try: + validated_uuid = uuid.UUID(inbound_sms_id) + except (ValueError, AttributeError): + abort(400) + + inbound_sms = dao_get_inbound_sms_by_id(validated_uuid) + + return jsonify(inbound_sms.serialize()), 200 diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index da10ecb6b..4f021d03a 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -112,3 +112,29 @@ def test_get_inbound_sms_summary_with_no_inbound(admin_request, sample_service): 'count': 0, 'most_recent': None } + + +def test_get_inbound_sms_by_id_returns_200(admin_request, sample_service): + inbound = create_inbound_sms(sample_service, user_number='447700900001') + + response = admin_request.get( + 'inbound_sms.get_inbound_by_id', + endpoint_kwargs={ + 'service_id': sample_service.id, + 'inbound_sms_id': inbound.id + } + ) + + assert response['user_number'] == '447700900001' + assert response['service_id'] == str(sample_service.id) + + +def test_get_inbound_sms_by_id_invalid_id_returns_400(admin_request, sample_service): + assert admin_request.get( + 'inbound_sms.get_inbound_by_id', + endpoint_kwargs={ + 'service_id': sample_service.id, + 'inbound_sms_id': 'dsadsda' + }, + expected_status=400 + ) From d97c7c8e56e2a47a8cfe0bfb4d368482d747fc3d Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 7 Jun 2017 09:58:57 +0100 Subject: [PATCH 86/92] - Fix up free tier on the service object, use it only on dump not create/update in marshmallow - Ensure tests leave config as was after a test run that alters free tier quantity --- app/schemas.py | 3 +- tests/app/dao/test_notification_usage_dao.py | 525 ++++++++++--------- tests/app/service/test_rest.py | 4 +- tests/conftest.py | 1 + 4 files changed, 274 insertions(+), 259 deletions(-) diff --git a/app/schemas.py b/app/schemas.py index 3ca8fda1a..8f8395f3e 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -175,7 +175,7 @@ class ProviderDetailsHistorySchema(BaseSchema): class ServiceSchema(BaseSchema): - free_sms_fragment_limit = fields.Method('get_free_sms_fragment_limit') + free_sms_fragment_limit = fields.Method(method_name='get_free_sms_fragment_limit') created_by = field_for(models.Service, 'created_by', required=True) organisation = field_for(models.Service, 'organisation') branding = field_for(models.Service, 'branding') @@ -191,6 +191,7 @@ class ServiceSchema(BaseSchema): class Meta: model = models.Service + dump_only = ['free_sms_fragment_limit'] exclude = ( 'updated_at', 'created_at', diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index ef3bdfd13..83bfa264c 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -21,6 +21,8 @@ from tests.app.conftest import sample_notification, sample_email_template, sampl from tests.app.db import create_notification from freezegun import freeze_time +from tests.conftest import set_config + def test_get_rates_for_year(notify_db, notify_db_session): set_up_rate(notify_db, datetime(2016, 5, 18), 0.016) @@ -266,232 +268,235 @@ def set_up_rate(notify_db, start_date, value): @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications(notify_db, notify_db_session, sample_service): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - set_up_rate(notify_db, datetime(2016, 1, 1), 0.016) + set_up_rate(notify_db, datetime(2016, 1, 1), 0.016) - sample_notification( - notify_db, notify_db_session, service=sample_service, billable_units=1, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, billable_units=2, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, billable_units=3, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, billable_units=4, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=1, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=2, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=3, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=4, status=NOTIFICATION_DELIVERED) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 10 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 0.16 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[0] == 10 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[1] == 0.16 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications( notify_db, notify_db_session, sample_service ): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=1.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=2.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=5.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=10.0, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, rate_multiplier=1.0, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, rate_multiplier=2.0, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, rate_multiplier=5.0, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, rate_multiplier=10.0, status=NOTIFICATION_DELIVERED) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) - - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 18 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 45 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 18 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 45 def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications_for_several_rates( notify_db, notify_db_session, sample_service ): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - set_up_rate(notify_db, datetime(2016, 1, 1), 2) - set_up_rate(notify_db, datetime(2016, 10, 1), 4) - set_up_rate(notify_db, datetime(2017, 1, 1), 6) + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) - eligble_rate_1 = datetime(2016, 2, 1) - eligble_rate_2 = datetime(2016, 11, 1) - eligble_rate_3 = datetime(2017, 2, 1) + eligble_rate_1 = datetime(2016, 2, 1) + eligble_rate_2 = datetime(2016, 11, 1) + eligble_rate_3 = datetime(2017, 2, 1) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - rate_multiplier=1.0, - status=NOTIFICATION_DELIVERED, - created_at=eligble_rate_1) - - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - rate_multiplier=2.0, - status=NOTIFICATION_DELIVERED, - created_at=eligble_rate_2) - - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - rate_multiplier=5.0, - status=NOTIFICATION_DELIVERED, - created_at=eligble_rate_3) - - start = datetime(2016, 1, 1) - end = datetime(2018, 1, 1) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 8 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 40 - - -def test_returns_total_billable_units_for_sms_notifications_for_several_rates_where_dates_match_rate_boundary( - notify_db, notify_db_session, sample_service -): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 - - set_up_rate(notify_db, datetime(2016, 1, 1), 2) - set_up_rate(notify_db, datetime(2016, 10, 1), 4) - set_up_rate(notify_db, datetime(2017, 1, 1), 6) - - eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) - eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) - eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) - eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) - eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) - eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) - - def make_notification(created_at): sample_notification( notify_db, notify_db_session, service=sample_service, rate_multiplier=1.0, status=NOTIFICATION_DELIVERED, - created_at=created_at) + created_at=eligble_rate_1) - make_notification(eligble_rate_1_start) - make_notification(eligble_rate_1_end) - make_notification(eligble_rate_2_start) - make_notification(eligble_rate_2_end) - make_notification(eligble_rate_3_start) - make_notification(eligble_rate_3_whenever) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=2.0, + status=NOTIFICATION_DELIVERED, + created_at=eligble_rate_2) - start = datetime(2016, 1, 1) - end = datetime(2018, 1, 1) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=5.0, + status=NOTIFICATION_DELIVERED, + created_at=eligble_rate_3) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 6 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 24.0 + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 8 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 40 + + +def test_returns_total_billable_units_for_sms_notifications_for_several_rates_where_dates_match_rate_boundary( + notify_db, notify_db_session, sample_service +): + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): + + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) + + eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) + eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) + eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) + eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) + eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) + eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) + + def make_notification(created_at): + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=1.0, + status=NOTIFICATION_DELIVERED, + created_at=created_at) + + make_notification(eligble_rate_1_start) + make_notification(eligble_rate_1_end) + make_notification(eligble_rate_2_start) + make_notification(eligble_rate_2_end) + make_notification(eligble_rate_3_start) + make_notification(eligble_rate_3_whenever) + + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[0] == 6 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[1] == 24.0 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_ignoring_letters_and_emails( notify_db, notify_db_session, sample_service ): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) - letter_template = sample_letter_template(sample_service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) + letter_template = sample_letter_template(sample_service) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=2, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - template=email_template, - service=sample_service, - billable_units=2, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - template=letter_template, - service=sample_service, - billable_units=2, - status=NOTIFICATION_DELIVERED - ) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + template=email_template, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + template=letter_template, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED + ) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_for_only_requested_service( notify_db, notify_db_session ): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) - service_2 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) - service_3 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + service_2 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + service_3 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) - sample_notification( - notify_db, - notify_db_session, - service=service_1, - billable_units=2, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - service=service_2, - billable_units=2, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - service=service_3, - billable_units=2, - status=NOTIFICATION_DELIVERED - ) + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_2, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_3, + billable_units=2, + status=NOTIFICATION_DELIVERED + ) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 5 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 5 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_handling_null_values( notify_db, notify_db_session, sample_service ): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=2, - rate_multiplier=None, - status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=2, + rate_multiplier=None, + status=NOTIFICATION_DELIVERED) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 @pytest.mark.parametrize('billable_units, states', ([ @@ -502,62 +507,61 @@ def test_returns_total_billable_units_for_sms_notifications_handling_null_values def test_ignores_non_billable_states_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service, billable_units, states ): + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + for state in states: + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=None, + status=state) - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - for state in states: - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=1, - rate_multiplier=None, - status=state) - - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) - - assert get_total_billable_units_for_sent_sms_notifications_in_date_range( - start, end, sample_service.id - )[0] == billable_units - assert get_total_billable_units_for_sent_sms_notifications_in_date_range( - start, end, sample_service.id - )[1] == billable_units * 2.5 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + )[0] == billable_units + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + )[1] == billable_units * 2.5 @freeze_time("2016-01-10 12:00:00.000000") def test_restricts_to_time_period_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service ): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 0 + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=1.0, + created_at=datetime.utcnow() - timedelta(minutes=100), + status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=1, - rate_multiplier=1.0, - created_at=datetime.utcnow() - timedelta(minutes=100), - status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=1.0, + created_at=datetime.utcnow() - timedelta(minutes=5), + status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=1, - rate_multiplier=1.0, - created_at=datetime.utcnow() - timedelta(minutes=5), - status=NOTIFICATION_DELIVERED) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) - - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 1 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 2.5 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[0] == 1 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[1] == 2.5 def test_returns_zero_if_no_matching_rows_when_returning_billable_units_for_sms_notifications( @@ -629,30 +633,34 @@ def test_should_calculate_rate_boundaries_for_billing_query_for_three_relevant_r def test_deducts_free_tier_from_bill( notify_db, notify_db_session ): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 1 + start_value = current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] + try: + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 1 - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) - sample_notification( - notify_db, - notify_db_session, - service=service_1, - billable_units=1, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - service=service_1, - billable_units=1, - status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=1, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=1, + status=NOTIFICATION_DELIVERED) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 2.5 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 2.5 + finally: + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = start_value @freeze_time("2016-01-10 12:00:00.000000") @@ -663,39 +671,42 @@ def test_deducts_free_tier_from_bill( def test_deducts_free_tier_from_bill_across_rate_boundaries( notify_db, notify_db_session, sample_service, free_tier, expected_cost ): - current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = free_tier + start_value = current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] + try: + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = free_tier + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) - set_up_rate(notify_db, datetime(2016, 1, 1), 2) - set_up_rate(notify_db, datetime(2016, 10, 1), 4) - set_up_rate(notify_db, datetime(2017, 1, 1), 6) + eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) + eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) + eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) + eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) + eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) + eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) - eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) - eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) - eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) - eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) - eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) - eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) + def make_notification(created_at): + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=1.0, + status=NOTIFICATION_DELIVERED, + created_at=created_at) - def make_notification(created_at): - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - rate_multiplier=1.0, - status=NOTIFICATION_DELIVERED, - created_at=created_at) + make_notification(eligble_rate_1_start) + make_notification(eligble_rate_1_end) + make_notification(eligble_rate_2_start) + make_notification(eligble_rate_2_end) + make_notification(eligble_rate_3_start) + make_notification(eligble_rate_3_whenever) - make_notification(eligble_rate_1_start) - make_notification(eligble_rate_1_end) - make_notification(eligble_rate_2_start) - make_notification(eligble_rate_2_end) - make_notification(eligble_rate_3_start) - make_notification(eligble_rate_3_whenever) + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) - start = datetime(2016, 1, 1) - end = datetime(2018, 1, 1) - - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 6 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range( - start, end, sample_service.id - )[1] == expected_cost + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 6 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + )[1] == expected_cost + finally: + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = start_value diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 2c2842177..bc6010fa2 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -27,6 +27,7 @@ from app.models import ( ) from tests.app.db import create_user +from tests.conftest import set_config_values def test_get_service_list(client, service_factory): @@ -148,6 +149,7 @@ def test_get_service_by_id(client, sample_service): def test_get_service_by_id_returns_free_sms_limit(client, sample_service): + auth_header = create_authorization_header() resp = client.get( '/service/{}'.format(sample_service.id), @@ -2001,7 +2003,7 @@ def test_get_yearly_billing_usage_count_returns_from_cache_if_present(client, sa '/service/{}/yearly-sms-billable-units?year=2016'.format(sample_service.id), headers=[create_authorization_header()] ) - print(response.get_data(as_text=True)) + response.get_data(as_text=True) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == { 'billable_sms_units': 50, diff --git a/tests/conftest.py b/tests/conftest.py index bf9823331..a59de1958 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -115,6 +115,7 @@ def set_config(app, name, value): old_val = app.config.get(name) app.config[name] = value yield + print(app.config) app.config[name] = old_val From 1b4097cb16ccfbe9485632d998a9f7854f1016fc Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Wed, 7 Jun 2017 11:15:05 +0100 Subject: [PATCH 87/92] Add three new columns to job_statistics for sent, delivered and failed. A job only ever has one notification type. This is the first deploy, where the columns are added and populated. Next a data migration will happen to populate these new columns for the older jobs that do not have the values set. Then we stop populating the old columns and remove them. This refactoring of the table structure will make the queries to the table much easier to handle. --- app/dao/statistics_dao.py | 17 +- app/models.py | 3 + migrations/versions/0094_job_stats_update.py | 25 +++ tests/app/dao/test_statistics_dao.py | 215 ++++++++++++++----- 4 files changed, 203 insertions(+), 57 deletions(-) create mode 100644 migrations/versions/0094_job_stats_update.py diff --git a/app/dao/statistics_dao.py b/app/dao/statistics_dao.py index baff82ba4..48d62e71c 100644 --- a/app/dao/statistics_dao.py +++ b/app/dao/statistics_dao.py @@ -60,7 +60,10 @@ def timeout_job_counts(notifications_type, timeout_start): ).update({ sent: sent_count, failed: failed_count, - delivered: delivered_count + delivered: delivered_count, + 'sent': sent_count, + 'delivered': delivered_count, + 'failed': failed_count }, synchronize_session=False) return total_updated @@ -87,11 +90,13 @@ def create_or_update_job_sending_statistics(notification): @transactional def __update_job_stats_sent_count(notification): column = columns(notification.notification_type, 'sent') + new_column = 'sent' return db.session.query(JobStatistics).filter_by( job_id=notification.job_id, ).update({ - column: column + 1 + column: column + 1, + new_column: column + 1 }) @@ -102,7 +107,8 @@ def __insert_job_stats(notification): emails_sent=1 if notification.notification_type == EMAIL_TYPE else 0, sms_sent=1 if notification.notification_type == SMS_TYPE else 0, letters_sent=1 if notification.notification_type == LETTER_TYPE else 0, - updated_at=datetime.utcnow() + updated_at=datetime.utcnow(), + sent=1 ) db.session.add(stats) @@ -131,10 +137,12 @@ def columns(notification_type, status): def update_job_stats_outcome_count(notification): if notification.status in NOTIFICATION_STATUS_TYPES_FAILED: column = columns(notification.notification_type, 'failed') + new_column = 'failed' elif notification.status in [NOTIFICATION_DELIVERED, NOTIFICATION_SENT] and notification.notification_type != LETTER_TYPE: column = columns(notification.notification_type, 'delivered') + new_column = 'delivered' else: column = None @@ -143,7 +151,8 @@ def update_job_stats_outcome_count(notification): return db.session.query(JobStatistics).filter_by( job_id=notification.job_id, ).update({ - column: column + 1 + column: column + 1, + new_column: column + 1 }) else: return 0 diff --git a/app/models.py b/app/models.py index 1bcd66a19..22d819389 100644 --- a/app/models.py +++ b/app/models.py @@ -1122,6 +1122,9 @@ class JobStatistics(db.Model): sms_failed = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) letters_sent = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) letters_failed = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + sent = db.Column(db.BigInteger, index=False, unique=False, nullable=True, default=0) + delivered = db.Column(db.BigInteger, index=False, unique=False, nullable=True, default=0) + failed = db.Column(db.BigInteger, index=False, unique=False, nullable=True, default=0) created_at = db.Column( db.DateTime, index=False, diff --git a/migrations/versions/0094_job_stats_update.py b/migrations/versions/0094_job_stats_update.py new file mode 100644 index 000000000..6a7f7db2a --- /dev/null +++ b/migrations/versions/0094_job_stats_update.py @@ -0,0 +1,25 @@ +"""empty message + +Revision ID: 0094_job_stats_update +Revises: 0093_data_gov_uk +Create Date: 2017-06-06 14:37:30.051647 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = '0094_job_stats_update' +down_revision = '0093_data_gov_uk' + + +def upgrade(): + op.add_column('job_statistics', sa.Column('sent', sa.BigInteger(), nullable=True)) + op.add_column('job_statistics', sa.Column('delivered', sa.BigInteger(), nullable=True)) + op.add_column('job_statistics', sa.Column('failed', sa.BigInteger(), nullable=True)) + + +def downgrade(): + op.drop_column('job_statistics', 'sent') + op.drop_column('job_statistics', 'failed') + op.drop_column('job_statistics', 'delivered') diff --git a/tests/app/dao/test_statistics_dao.py b/tests/app/dao/test_statistics_dao.py index 4425a3409..5a73ea672 100644 --- a/tests/app/dao/test_statistics_dao.py +++ b/tests/app/dao/test_statistics_dao.py @@ -199,6 +199,10 @@ def test_should_update_a_stats_entry_with_its_success_outcome_for_a_job( assert stat.sms_failed == 0 assert stat.letters_failed == 0 + assert stat.sent == email_count + sms_count + letter_count + assert stat.delivered == email_count + sms_count + assert stat.failed == 0 + @pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ (SMS_TYPE, 1, 0, 0, NOTIFICATION_TECHNICAL_FAILURE), @@ -264,6 +268,10 @@ def test_should_update_a_stats_entry_with_its_error_outcomes_for_a_job( assert stat.emails_delivered == 0 assert stat.sms_delivered == 0 + assert stat.sent == email_count + sms_count + letter_count + assert stat.delivered == 0 + assert stat.failed == email_count + sms_count + letter_count + @pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ (SMS_TYPE, 1, 0, 0, NOTIFICATION_DELIVERED), @@ -326,6 +334,10 @@ def test_should_update_a_stats_entry_with_its_success_outcomes_for_a_job( assert stat.emails_delivered == email_count assert stat.sms_delivered == sms_count + assert stat.sent == email_count + sms_count + letter_count + assert stat.delivered == 0 if notification_type == LETTER_TYPE else 1 + assert stat.failed == 0 + @pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ (SMS_TYPE, 1, 0, 0, NOTIFICATION_PENDING), @@ -394,6 +406,10 @@ def test_should_not_update_job_stats_if_irrelevant_status( assert stat.emails_delivered == 0 assert stat.sms_delivered == 0 + assert stat.sent == email_count + sms_count + letter_count + assert stat.delivered == 0 + assert stat.failed == 0 + @pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count', [ (SMS_TYPE, 2, 1, 1), @@ -480,41 +496,52 @@ def test_inserting_one_type_of_notification_maintains_other_counts( assert updated_stats[0].sms_sent == sms_count assert updated_stats[0].letters_sent == letter_count + if notification_type == EMAIL_TYPE: + assert updated_stats[0].sent == email_count + elif notification_type == SMS_TYPE: + assert updated_stats[0].sent == sms_count + elif notification_type == LETTER_TYPE: + assert updated_stats[0].sent == letter_count + def test_updating_one_type_of_notification_to_success_maintains_other_counts( notify_db, notify_db_session, - sample_job, + sample_service, sample_letter_template ): - sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) - email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + job_1 = sample_job(notify_db, notify_db_session, service=sample_service) + job_2 = sample_job(notify_db, notify_db_session, service=sample_service) + job_3 = sample_job(notify_db, notify_db_session, service=sample_service) + + sms_template = sample_template(notify_db, notify_db_session, service=sample_service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) letter_template = sample_letter_template letter = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=letter_template, - job=sample_job, + job=job_1, status=NOTIFICATION_CREATED ) email = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=email_template, - job=sample_job, + job=job_2, status=NOTIFICATION_CREATED ) sms = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=sms_template, - job=sample_job, + job=job_3, status=NOTIFICATION_CREATED ) @@ -530,49 +557,76 @@ def test_updating_one_type_of_notification_to_success_maintains_other_counts( update_job_stats_outcome_count(email) update_job_stats_outcome_count(sms) - stats = JobStatistics.query.all() - assert len(stats) == 1 - assert stats[0].emails_sent == 1 - assert stats[0].sms_sent == 1 + stats = JobStatistics.query.order_by(JobStatistics.created_at).all() + assert len(stats) == 3 assert stats[0].letters_sent == 1 - assert stats[0].emails_delivered == 1 - assert stats[0].sms_delivered == 1 + assert stats[0].emails_sent == 0 + assert stats[0].sms_sent == 0 + assert stats[0].emails_delivered == 0 + assert stats[0].sms_delivered == 0 + + assert stats[1].letters_sent == 0 + assert stats[1].emails_sent == 1 + assert stats[1].sms_sent == 0 + assert stats[1].emails_delivered == 1 + assert stats[1].sms_delivered == 0 + + assert stats[2].letters_sent == 0 + assert stats[2].emails_sent == 0 + assert stats[2].sms_sent == 1 + assert stats[2].emails_delivered == 0 + assert stats[2].sms_delivered == 1 + + assert stats[0].sent == 1 + assert stats[0].delivered == 0 + assert stats[0].failed == 0 + + assert stats[1].sent == 1 + assert stats[1].delivered == 1 + assert stats[1].failed == 0 + + assert stats[2].sent == 1 + assert stats[2].delivered == 1 + assert stats[2].failed == 0 def test_updating_one_type_of_notification_to_error_maintains_other_counts( notify_db, notify_db_session, - sample_job, + sample_service, sample_letter_template ): - sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) - email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + job_1 = sample_job(notify_db, notify_db_session, service=sample_service) + job_2 = sample_job(notify_db, notify_db_session, service=sample_service) + job_3 = sample_job(notify_db, notify_db_session, service=sample_service) + sms_template = sample_template(notify_db, notify_db_session, service=sample_service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) letter_template = sample_letter_template letter = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=letter_template, - job=sample_job, + job=job_1, status=NOTIFICATION_CREATED ) email = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=email_template, - job=sample_job, + job=job_2, status=NOTIFICATION_CREATED ) sms = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=sms_template, - job=sample_job, + job=job_3, status=NOTIFICATION_CREATED ) @@ -588,20 +642,50 @@ def test_updating_one_type_of_notification_to_error_maintains_other_counts( update_job_stats_outcome_count(email) update_job_stats_outcome_count(sms) - stats = JobStatistics.query.all() - assert len(stats) == 1 - assert stats[0].emails_sent == 1 - assert stats[0].sms_sent == 1 + stats = JobStatistics.query.order_by(JobStatistics.created_at).all() + assert len(stats) == 3 + assert stats[0].emails_sent == 0 + assert stats[0].sms_sent == 0 assert stats[0].letters_sent == 1 assert stats[0].emails_delivered == 0 assert stats[0].sms_delivered == 0 - assert stats[0].sms_failed == 1 - assert stats[0].emails_failed == 1 + assert stats[0].sms_failed == 0 + assert stats[0].emails_failed == 0 + assert stats[0].letters_failed == 1 + + assert stats[1].emails_sent == 1 + assert stats[1].sms_sent == 0 + assert stats[1].letters_sent == 0 + assert stats[1].emails_delivered == 0 + assert stats[1].sms_delivered == 0 + assert stats[1].sms_failed == 0 + assert stats[1].emails_failed == 1 + assert stats[1].letters_failed == 0 + + assert stats[2].emails_sent == 0 + assert stats[2].sms_sent == 1 + assert stats[2].letters_sent == 0 + assert stats[2].emails_delivered == 0 + assert stats[2].sms_delivered == 0 + assert stats[2].sms_failed == 1 + assert stats[2].emails_failed == 0 + assert stats[1].letters_failed == 0 + + assert stats[0].sent == 1 + assert stats[0].delivered == 0 + assert stats[0].failed == 1 + + assert stats[1].sent == 1 + assert stats[1].delivered == 0 + assert stats[1].failed == 1 + + assert stats[2].sent == 1 + assert stats[2].delivered == 0 + assert stats[2].failed == 1 -def test_will_not_timeout_job_counts_before_notification_timeouts(notify_db, notify_db_session, sample_job): - sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) - email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) +def test_will_not_timeout_job_counts_before_notification_timeouts(notify_db, notify_db_session, + sample_job, sample_template): one_minute_ago = datetime.utcnow() - timedelta(minutes=1) @@ -609,43 +693,51 @@ def test_will_not_timeout_job_counts_before_notification_timeouts(notify_db, not notify_db, notify_db_session, service=sample_job.service, - template=sms_template, + template=sample_template, job=sample_job, status=NOTIFICATION_CREATED ) - email = sample_notification( + sms_2 = sample_notification( notify_db, notify_db_session, service=sample_job.service, - template=email_template, + template=sample_template, job=sample_job, status=NOTIFICATION_CREATED ) - create_or_update_job_sending_statistics(email) create_or_update_job_sending_statistics(sms) + create_or_update_job_sending_statistics(sms_2) JobStatistics.query.update({JobStatistics.created_at: one_minute_ago}) - intial_stats = JobStatistics.query.all() + initial_stats = JobStatistics.query.all() - assert intial_stats[0].emails_sent == 1 - assert intial_stats[0].sms_sent == 1 - assert intial_stats[0].emails_delivered == 0 - assert intial_stats[0].sms_delivered == 0 - assert intial_stats[0].sms_failed == 0 - assert intial_stats[0].emails_failed == 0 + assert initial_stats[0].emails_sent == 0 + assert initial_stats[0].sms_sent == 2 + assert initial_stats[0].emails_delivered == 0 + assert initial_stats[0].sms_delivered == 0 + assert initial_stats[0].sms_failed == 0 + assert initial_stats[0].emails_failed == 0 + + assert initial_stats[0].sent == 2 + assert initial_stats[0].delivered == 0 + assert initial_stats[0].failed == 0 dao_timeout_job_statistics(61) updated_stats = JobStatistics.query.all() - assert updated_stats[0].emails_sent == 1 - assert updated_stats[0].sms_sent == 1 + assert updated_stats[0].emails_sent == 0 + assert updated_stats[0].sms_sent == 2 assert updated_stats[0].emails_delivered == 0 assert updated_stats[0].sms_delivered == 0 assert updated_stats[0].sms_failed == 0 assert updated_stats[0].emails_failed == 0 + assert initial_stats[0].sent == 2 + assert initial_stats[0].delivered == 0 + assert initial_stats[0].failed == 0 + @pytest.mark.parametrize('notification_type, sms_count, email_count', [ (SMS_TYPE, 3, 0), @@ -688,6 +780,9 @@ def test_timeout_job_counts_timesout_multiple_jobs( assert stats.sms_delivered == 0 assert stats.sms_failed == 0 assert stats.emails_failed == 0 + assert stats.sent == email_count + sms_count + assert stats.delivered == 0 + assert stats.failed == 0 dao_timeout_job_statistics(1) updated_stats = JobStatistics.query.all() @@ -698,6 +793,9 @@ def test_timeout_job_counts_timesout_multiple_jobs( assert stats.sms_delivered == 0 assert stats.sms_failed == sms_count assert stats.emails_failed == email_count + assert stats.sent == email_count + sms_count + assert stats.delivered == 0 + assert stats.failed == email_count + sms_count count_notifications = len(NOTIFICATION_STATUS_TYPES) @@ -754,17 +852,13 @@ def test_timeout_job_sets_all_non_delivered_emails_to_error_and_doesnt_affect_sm assert initial_stats[0].sms_failed == 0 assert initial_stats[0].emails_failed == 0 - all = JobStatistics.query.all() - for a in all: - print(a) + assert initial_stats[0].sent == count_notifications + assert initial_stats[0].delivered == 0 + assert initial_stats[0].failed == 0 # timeout the notifications dao_timeout_job_statistics(1) - all = JobStatistics.query.all() - for a in all: - print(a) - # after timeout all delivered states are success and ALL other states are failed updated_stats = JobStatistics.query.filter_by(job_id=email_job.id).all() assert updated_stats[0].emails_sent == count_notifications @@ -774,6 +868,10 @@ def test_timeout_job_sets_all_non_delivered_emails_to_error_and_doesnt_affect_sm assert updated_stats[0].sms_failed == 0 assert updated_stats[0].emails_failed == count_error_notifications + assert initial_stats[0].sent == count_notifications + assert initial_stats[0].delivered == count_success_notifications + assert initial_stats[0].failed == count_error_notifications + sms_stats = JobStatistics.query.filter_by(job_id=sms_job.id).all() assert sms_stats[0].emails_sent == 0 assert sms_stats[0].sms_sent == 1 @@ -781,6 +879,9 @@ def test_timeout_job_sets_all_non_delivered_emails_to_error_and_doesnt_affect_sm assert sms_stats[0].sms_delivered == 0 assert sms_stats[0].sms_failed == 1 assert sms_stats[0].emails_failed == 0 + assert sms_stats[0].sent == 1 + assert sms_stats[0].delivered == 0 + assert sms_stats[0].failed == 1 # this test is as above, but for SMS not email @@ -810,6 +911,10 @@ def test_timeout_job_sets_all_non_delivered_states_to_error( assert stats.sms_failed == 0 assert stats.emails_failed == 0 + assert stats.sent == count_notifications + assert stats.delivered == 0 + assert stats.failed == 0 + dao_timeout_job_statistics(1) updated_stats = JobStatistics.query.all() @@ -820,3 +925,7 @@ def test_timeout_job_sets_all_non_delivered_states_to_error( assert stats.sms_delivered == count_success_notifications assert stats.sms_failed == count_error_notifications assert stats.emails_failed == 0 + + assert stats.sent == count_notifications + assert stats.delivered == count_success_notifications + assert stats.failed == count_error_notifications From f1399ca7f1cffe6ec2bbbbf3e0018866538cffea Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Wed, 7 Jun 2017 11:58:10 +0100 Subject: [PATCH 88/92] Fix support URLs in Notify emails --- app/user/rest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/user/rest.py b/app/user/rest.py index cc0c6e41b..9a9cf3832 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -206,7 +206,7 @@ def send_user_confirm_new_email(user_id): personalisation={ 'name': user_to_send_to.name, 'url': _create_confirmation_url(user=user_to_send_to, email_address=email['email']), - 'feedback_url': current_app.config['ADMIN_BASE_URL'] + '/feedback' + 'feedback_url': current_app.config['ADMIN_BASE_URL'] + '/support' }, notification_type=EMAIL_TYPE, api_key_id=None, @@ -259,7 +259,7 @@ def send_already_registered_email(user_id): personalisation={ 'signin_url': current_app.config['ADMIN_BASE_URL'] + '/sign-in', 'forgot_password_url': current_app.config['ADMIN_BASE_URL'] + '/forgot-password', - 'feedback_url': current_app.config['ADMIN_BASE_URL'] + '/feedback' + 'feedback_url': current_app.config['ADMIN_BASE_URL'] + '/support' }, notification_type=EMAIL_TYPE, api_key_id=None, From 6b5451ea879c143538ffb9dc67397293e5bec679 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Wed, 7 Jun 2017 13:18:51 +0100 Subject: [PATCH 89/92] Add test for invalid UUID --- tests/app/service/test_rest.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index ac8537a4f..f65825227 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1248,6 +1248,15 @@ def test_get_all_notifications_for_service_in_order(notify_api, notify_db, notif assert response.status_code == 200 +def test_get_notification_for_service_without_uuid(client, notify_db, notify_db_session): + service_1 = create_service(notify_db, notify_db_session, service_name="1", email_from='1') + response = client.get( + path='/service/{}/notifications/{}'.format(service_1.id, 'foo'), + headers=[create_authorization_header()] + ) + assert response.status_code == 404 + + def test_get_notification_for_service(client, notify_db, notify_db_session): service_1 = create_service(notify_db, notify_db_session, service_name="1", email_from='1') From fa0d51b66c5ec6e177eed4dd2678ce5655ed8e14 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 7 Jun 2017 14:19:25 +0100 Subject: [PATCH 90/92] Added the free limit to the detailed service representation. --- app/schemas.py | 5 +++++ tests/app/service/test_rest.py | 13 +++++++++++++ 2 files changed, 18 insertions(+) diff --git a/app/schemas.py b/app/schemas.py index 8f8395f3e..7971300b7 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -260,6 +260,11 @@ class ServiceSchema(BaseSchema): class DetailedServiceSchema(BaseSchema): statistics = fields.Dict() + free_sms_fragment_limit = fields.Method(method_name='get_free_sms_fragment_limit') + + def get_free_sms_fragment_limit(selfs, service): + return service.free_sms_fragment_limit() + class Meta: model = models.Service exclude = ( diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index bc6010fa2..2f16543df 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -160,7 +160,20 @@ def test_get_service_by_id_returns_free_sms_limit(client, sample_service): assert json_resp['data']['free_sms_fragment_limit'] == 250000 +def test_get_detailed_service_by_id_returns_free_sms_limit(client, sample_service): + + auth_header = create_authorization_header() + resp = client.get( + '/service/{}?detailed=True'.format(sample_service.id), + headers=[auth_header] + ) + assert resp.status_code == 200 + json_resp = json.loads(resp.get_data(as_text=True)) + assert json_resp['data']['free_sms_fragment_limit'] == 250000 + + def test_get_service_list_has_default_permissions(client, service_factory): + service_factory.get('one') service_factory.get('one') service_factory.get('two') service_factory.get('three') From 5b4ceda1c6c49fc57b3c4fc978428354ac3cdfba Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Wed, 7 Jun 2017 14:23:31 +0100 Subject: [PATCH 91/92] Refactor: * Filter inbound by service_id * Refactor to return 404 instead of 400 for consistency --- app/dao/inbound_sms_dao.py | 7 +++++-- app/inbound_sms/rest.py | 15 +++------------ tests/app/dao/test_inbound_sms_dao.py | 2 +- tests/app/inbound_sms/test_rest.py | 17 ++++++++++++++--- 4 files changed, 23 insertions(+), 18 deletions(-) diff --git a/app/dao/inbound_sms_dao.py b/app/dao/inbound_sms_dao.py index d87c1b1ed..18060cced 100644 --- a/app/dao/inbound_sms_dao.py +++ b/app/dao/inbound_sms_dao.py @@ -49,5 +49,8 @@ def delete_inbound_sms_created_more_than_a_week_ago(): return deleted -def dao_get_inbound_sms_by_id(inbound_id): - return InboundSms.query.filter_by(id=inbound_id).one() +def dao_get_inbound_sms_by_id(service_id, inbound_id): + return InboundSms.query.filter_by( + id=inbound_id, + service_id=service_id + ).one() diff --git a/app/inbound_sms/rest.py b/app/inbound_sms/rest.py index 755f830d3..1eca4b089 100644 --- a/app/inbound_sms/rest.py +++ b/app/inbound_sms/rest.py @@ -1,11 +1,8 @@ -import uuid - from flask import ( Blueprint, jsonify, request ) -from werkzeug.exceptions import abort from notifications_utils.recipients import validate_and_format_phone_number @@ -19,7 +16,7 @@ from app.errors import register_errors inbound_sms = Blueprint( 'inbound_sms', __name__, - url_prefix='/service//inbound-sms' + url_prefix='/service//inbound-sms' ) register_errors(inbound_sms) @@ -50,14 +47,8 @@ def get_inbound_sms_summary_for_service(service_id): ) -@inbound_sms.route('/', methods=['GET']) +@inbound_sms.route('/', methods=['GET']) def get_inbound_by_id(service_id, inbound_sms_id): - # TODO: Add JSON Schema here - try: - validated_uuid = uuid.UUID(inbound_sms_id) - except (ValueError, AttributeError): - abort(400) - - inbound_sms = dao_get_inbound_sms_by_id(validated_uuid) + inbound_sms = dao_get_inbound_sms_by_id(service_id, inbound_sms_id) return jsonify(inbound_sms.serialize()), 200 diff --git a/tests/app/dao/test_inbound_sms_dao.py b/tests/app/dao/test_inbound_sms_dao.py index 6162b6d82..b26dc913e 100644 --- a/tests/app/dao/test_inbound_sms_dao.py +++ b/tests/app/dao/test_inbound_sms_dao.py @@ -92,6 +92,6 @@ def test_should_not_delete_inbound_sms_before_seven_days(sample_service): def test_get_inbound_sms_by_id_returns(sample_service): inbound = create_inbound_sms(sample_service) - inbound_from_db = dao_get_inbound_sms_by_id(inbound.id) + inbound_from_db = dao_get_inbound_sms_by_id(sample_service.id, inbound.id) assert inbound == inbound_from_db diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index 4f021d03a..b4c55cd83 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -129,12 +129,23 @@ def test_get_inbound_sms_by_id_returns_200(admin_request, sample_service): assert response['service_id'] == str(sample_service.id) -def test_get_inbound_sms_by_id_invalid_id_returns_400(admin_request, sample_service): +def test_get_inbound_sms_by_id_invalid_id_returns_404(admin_request, sample_service): assert admin_request.get( 'inbound_sms.get_inbound_by_id', endpoint_kwargs={ 'service_id': sample_service.id, - 'inbound_sms_id': 'dsadsda' + 'inbound_sms_id': 'bar' }, - expected_status=400 + expected_status=404 + ) + + +def test_get_inbound_sms_by_id_with_invalid_service_id_returns_404(admin_request, sample_service): + assert admin_request.get( + 'inbound_sms.get_inbound_by_id', + endpoint_kwargs={ + 'service_id': 'foo', + 'inbound_sms_id': '2cfbd6a1-1575-4664-8969-f27be0ea40d9' + }, + expected_status=404 ) From d5fc02b14b5cd9aca9e7dd00350b1de799a01cd5 Mon Sep 17 00:00:00 2001 From: Martyn Inglis Date: Wed, 7 Jun 2017 15:13:48 +0100 Subject: [PATCH 92/92] removed print statement :-( --- tests/conftest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index a59de1958..bf9823331 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -115,7 +115,6 @@ def set_config(app, name, value): old_val = app.config.get(name) app.config[name] = value yield - print(app.config) app.config[name] = old_val