From fe30942889b650e958979aede949452d11bc3b8a Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Sat, 26 Aug 2017 14:56:56 +0100 Subject: [PATCH 01/42] Update psycopg2 from 2.7.3 to 2.7.3.1 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 51a4b4155..23e45450c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ jsonschema==2.6.0 marshmallow-sqlalchemy==0.13.1 marshmallow==2.13.6 monotonic==1.3 -psycopg2==2.7.3 +psycopg2==2.7.3.1 PyJWT==1.5.2 six==1.10.0 SQLAlchemy-Utils==0.32.14 From 14cf3a58a209482b301ef6f6b66814512be1241c Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 28 Aug 2017 23:18:03 +0100 Subject: [PATCH 02/42] Update flask-migrate from 2.1.0 to 2.1.1 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 51a4b4155..b9dd191fd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ docopt==0.6.2 # TODO: Upgrade flask-bcrypt in a safe way Flask-Bcrypt==0.6.2 # pyup: ignore Flask-Marshmallow==0.8.0 -Flask-Migrate==2.1.0 +Flask-Migrate==2.1.1 Flask-Script==2.0.5 Flask-SQLAlchemy==2.2 Flask==0.12.2 From caea65165c5443252763b1efaf60210b6f59f1cd Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Tue, 29 Aug 2017 15:14:18 +0100 Subject: [PATCH 03/42] Allow Notify service to send international sms MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Right now Notify restricts you to registering with a UK mobile number. This is because when we built the user registration stuff we couldn’t send to international mobiles. However we can send to international mobile numbers, and it’s totally reasonable to expect employees of the UK government to be working abroad, and have a foreign mobile phone – we’ve heard from one such user. In order for users of Notify to register with an international phone number, the Notify service needs to have the `international_sms` permission set. Which this service does, as a data migration. --- .../versions/0117_international_sms_notify.py | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 migrations/versions/0117_international_sms_notify.py diff --git a/migrations/versions/0117_international_sms_notify.py b/migrations/versions/0117_international_sms_notify.py new file mode 100644 index 000000000..5168e1f9c --- /dev/null +++ b/migrations/versions/0117_international_sms_notify.py @@ -0,0 +1,33 @@ +"""empty message + +Revision ID: 0117_international_sms_notify +Revises: 0116_another_letter_org +Create Date: 2017-08-29 14:09:41.042061 + +""" + +# revision identifiers, used by Alembic. +revision = '0117_international_sms_notify' +down_revision = '0116_another_letter_org' + +from alembic import op +from datetime import datetime + + +NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553' + + +def upgrade(): + op.execute(""" + INSERT INTO service_permissions VALUES + ('{}', 'international_sms', '{}') + """.format(NOTIFY_SERVICE_ID, datetime.utcnow())) + + +def downgrade(): + op.execute(""" + DELETE FROM service_permissions + WHERE + service_id = '{}' AND + permission = 'international_sms' + """.format(NOTIFY_SERVICE_ID)) From 04bf566614ee077666412c4dc348094927fe5b65 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 29 Aug 2017 21:31:56 +0100 Subject: [PATCH 04/42] Update boto3 from 1.4.6 to 1.4.7 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3c277b754..071637ce2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -boto3==1.4.6 +boto3==1.4.7 celery==3.1.25 # pyup: <4 docopt==0.6.2 # ignore flask-bcrypt - when upgrading, it installs an invalid version of bcrypt that isn't removed when a different From 26f50af6e9bbdefb2daedc9b885c23cc7fef3d56 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Wed, 30 Aug 2017 10:55:18 +0100 Subject: [PATCH 05/42] Let whitelist and user have int. phone numbers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit On the frontend, we’re letting users register with international phone numbers. So we shouldn’t block users from doing this on the API side. Same thing for the whitelist, where we’re also allowing international phone numbers now. --- app/models.py | 2 +- app/schemas.py | 2 +- tests/app/dao/test_users_dao.py | 9 +++++++-- tests/app/service/test_service_whitelist.py | 5 +++-- 4 files changed, 12 insertions(+), 6 deletions(-) diff --git a/app/models.py b/app/models.py index e32456f68..89e4d56b8 100644 --- a/app/models.py +++ b/app/models.py @@ -318,7 +318,7 @@ class ServiceWhitelist(db.Model): try: if recipient_type == MOBILE_TYPE: - validate_phone_number(recipient) + validate_phone_number(recipient, international=True) instance.recipient = recipient elif recipient_type == EMAIL_TYPE: validate_email_address(recipient) diff --git a/app/schemas.py b/app/schemas.py index af76eef6e..5f81821d4 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -130,7 +130,7 @@ class UserUpdateAttributeSchema(BaseSchema): @validates('mobile_number') def validate_mobile_number(self, value): try: - validate_phone_number(value) + validate_phone_number(value, international=True) except InvalidPhoneError as error: raise ValidationError('Invalid phone number: {}'.format(error)) diff --git a/tests/app/dao/test_users_dao.py b/tests/app/dao/test_users_dao.py index c815eb66b..22e1670f3 100644 --- a/tests/app/dao/test_users_dao.py +++ b/tests/app/dao/test_users_dao.py @@ -23,19 +23,24 @@ from app.models import User, VerifyCode from tests.app.db import create_user -def test_create_user(notify_db_session): +@pytest.mark.parametrize('phone_number', [ + '+447700900986', + '+1-800-555-5555', +]) +def test_create_user(notify_db_session, phone_number): email = 'notify@digital.cabinet-office.gov.uk' data = { 'name': 'Test User', 'email_address': email, 'password': 'password', - 'mobile_number': '+447700900986' + 'mobile_number': phone_number } user = User(**data) save_model_user(user) assert User.query.count() == 1 assert User.query.first().email_address == email assert User.query.first().id == user.id + assert User.query.first().mobile_number == phone_number assert not user.platform_admin diff --git a/tests/app/service/test_service_whitelist.py b/tests/app/service/test_service_whitelist.py index 23f6a3aa2..5edbe8e28 100644 --- a/tests/app/service/test_service_whitelist.py +++ b/tests/app/service/test_service_whitelist.py @@ -24,14 +24,15 @@ def test_get_whitelist_returns_data(client, sample_service_whitelist): def test_get_whitelist_separates_emails_and_phones(client, sample_service): dao_add_and_commit_whitelisted_contacts([ ServiceWhitelist.from_string(sample_service.id, EMAIL_TYPE, 'service@example.com'), - ServiceWhitelist.from_string(sample_service.id, MOBILE_TYPE, '07123456789') + ServiceWhitelist.from_string(sample_service.id, MOBILE_TYPE, '07123456789'), + ServiceWhitelist.from_string(sample_service.id, MOBILE_TYPE, '+1800-555-555'), ]) response = client.get('service/{}/whitelist'.format(sample_service.id), headers=[create_authorization_header()]) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == { 'email_addresses': ['service@example.com'], - 'phone_numbers': ['07123456789'] + 'phone_numbers': ['+1800-555-555', '07123456789'] } From 9478af0941a408b9da7bc40ca6bcb14dbb21906b Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Mon, 21 Aug 2017 13:36:47 +0100 Subject: [PATCH 06/42] Remove get yearly usage endpoint --- app/service/rest.py | 19 --------------- tests/app/service/test_rest.py | 44 ---------------------------------- 2 files changed, 63 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index c9f07d20f..4658c2efc 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -458,25 +458,6 @@ def get_monthly_template_stats(service_id): raise InvalidRequest('Year must be a number', status_code=400) -@service_blueprint.route('//yearly-usage') -def get_yearly_billing_usage(service_id): - try: - year = int(request.args.get('year')) - results = notification_usage_dao.get_yearly_billing_data(service_id, year) - json_result = [{ - "credits": x[0], - "billing_units": x[1], - "rate_multiplier": x[2], - "notification_type": x[3], - "international": x[4], - "rate": x[5] - } for x in results] - return json.dumps(json_result) - - except TypeError: - return jsonify(result='error', message='No valid year provided'), 400 - - @service_blueprint.route('//monthly-usage') def get_yearly_monthly_usage(service_id): try: diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index af6a815dc..ca1ceca49 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1730,50 +1730,6 @@ def test_get_template_stats_by_month_returns_error_for_incorrect_year( assert json.loads(response.get_data(as_text=True)) == expected_json -def test_get_yearly_billing_usage(client, notify_db, notify_db_session, sample_service): - rate = Rate(id=uuid.uuid4(), valid_from=datetime(2016, 3, 31, 23, 00), rate=0.0158, notification_type=SMS_TYPE) - notify_db.session.add(rate) - after_rate_created = datetime(2016, 6, 5) - notification = create_sample_notification( - notify_db, - notify_db_session, - created_at=after_rate_created, - sent_at=after_rate_created, - status='sending', - service=sample_service - ) - create_or_update_monthly_billing(sample_service.id, after_rate_created) - response = client.get( - '/service/{}/yearly-usage?year=2016'.format(notification.service_id), - headers=[create_authorization_header()] - ) - assert response.status_code == 200 - - assert json.loads(response.get_data(as_text=True)) == [{'credits': 1, - 'billing_units': 1, - 'rate_multiplier': 1, - 'notification_type': SMS_TYPE, - 'international': False, - 'rate': 0.0158}, - {'credits': 0, - 'billing_units': 0, - 'rate_multiplier': 1, - 'notification_type': EMAIL_TYPE, - 'international': False, - 'rate': 0}] - - -def test_get_yearly_billing_usage_returns_400_if_missing_year(client, sample_service): - response = client.get( - '/service/{}/yearly-usage'.format(sample_service.id), - headers=[create_authorization_header()] - ) - assert response.status_code == 400 - assert json.loads(response.get_data(as_text=True)) == { - 'message': 'No valid year provided', 'result': 'error' - } - - def test_get_monthly_billing_usage(client, notify_db, notify_db_session, sample_service): rate = Rate(id=uuid.uuid4(), valid_from=datetime(2016, 3, 31, 23, 00), rate=0.0158, notification_type=SMS_TYPE) notify_db.session.add(rate) From 9df498106f8b6be03f7a6a93ff8c22d84ab6ec8d Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Tue, 29 Aug 2017 16:26:55 +0100 Subject: [PATCH 07/42] Add one_off argument to create_notification test function One off messages have no API key or job ID. If one_off is set to False, an API key will automatically be added. --- tests/app/db.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/app/db.py b/tests/app/db.py index 8283428c9..e61270b81 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -123,7 +123,8 @@ def create_notification( international=False, phone_prefix=None, scheduled_for=None, - normalised_to=None + normalised_to=None, + one_off=False, ): if created_at is None: created_at = datetime.utcnow() @@ -132,7 +133,7 @@ def create_notification( sent_at = sent_at or datetime.utcnow() updated_at = updated_at or datetime.utcnow() - if job is None and api_key is None: + if not one_off and (job is None and api_key is None): # we didn't specify in test - lets create it api_key = ApiKey.query.filter(ApiKey.service == template.service, ApiKey.key_type == key_type).first() if not api_key: From 132d65bc75c69d44f38f44750f544823d63112e6 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Tue, 29 Aug 2017 16:35:30 +0100 Subject: [PATCH 08/42] Add query to get processing time stats for performance platform We are only interested in API notifications, not including test messages. Letters are not included. --- app/dao/notifications_dao.py | 36 ++++++++ tests/app/dao/notification_dao/__init__.py | 0 .../test_notification_dao.py | 0 ...t_notification_dao_performance_platform.py | 87 +++++++++++++++++++ 4 files changed, 123 insertions(+) create mode 100644 tests/app/dao/notification_dao/__init__.py rename tests/app/dao/{ => notification_dao}/test_notification_dao.py (100%) create mode 100644 tests/app/dao/notification_dao/test_notification_dao_performance_platform.py diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 4514d4310..dca73188a 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -16,6 +16,8 @@ from notifications_utils.recipients import ( from werkzeug.datastructures import MultiDict from sqlalchemy import (desc, func, or_, and_, asc) from sqlalchemy.orm import joinedload +from sqlalchemy.sql.expression import case +from sqlalchemy.sql import functions from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES from app import db, create_uuid @@ -519,3 +521,37 @@ def set_scheduled_notification_to_processed(notification_id): {'pending': False} ) db.session.commit() + + +def dao_get_total_notifications_sent_per_day_for_perfomance_platform(start_date, end_date): + """ + SELECT + count(notifications), + sum(CASE WHEN sent_at - created_at <= interval '10 seconds' THEN 1 ELSE 0 END) + FROM notifications + WHERE + created_at > 'START DATE' AND + created_at < 'END DATE' AND + api_key_id IS NOT NULL AND + key_type != 'test' AND + notification_type != 'letter'; + """ + under_10_secs = Notification.sent_at - Notification.created_at <= timedelta(seconds=10) + sum_column = functions.sum( + case( + [ + (under_10_secs, 1) + ], + else_=0 + ) + ) + return db.session.query( + func.count(Notification.id).label('messages_total'), + sum_column.label('messages_within_10_secs') + ).filter( + Notification.created_at >= start_date, + Notification.created_at < end_date, + Notification.api_key_id.isnot(None), + Notification.key_type != KEY_TYPE_TEST, + Notification.notification_type != LETTER_TYPE + ).one() diff --git a/tests/app/dao/notification_dao/__init__.py b/tests/app/dao/notification_dao/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/app/dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py similarity index 100% rename from tests/app/dao/test_notification_dao.py rename to tests/app/dao/notification_dao/test_notification_dao.py diff --git a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py new file mode 100644 index 000000000..d3fc254e8 --- /dev/null +++ b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py @@ -0,0 +1,87 @@ +from datetime import date, datetime, timedelta + +from freezegun import freeze_time + +from app.dao.notifications_dao import dao_get_total_notifications_sent_per_day_for_perfomance_platform +from app.models import KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST + +from tests.app.db import create_notification + + +def test_get_total_notifications_filters_on_date(sample_template): + create_notification(sample_template, created_at=datetime(2016, 10, 17, 10, 0)) + create_notification(sample_template, created_at=datetime(2016, 10, 18, 10, 0)) + create_notification(sample_template, created_at=datetime(2016, 10, 19, 10, 0)) + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 1 + + +def test_get_total_notifications_filters_on_date_at_midnight(sample_template): + create_notification(sample_template, created_at=datetime(2016, 10, 17, 23, 59, 59)) + create_notification(sample_template, created_at=datetime(2016, 10, 18, 0, 0)) + create_notification(sample_template, created_at=datetime(2016, 10, 18, 23, 59, 59)) + create_notification(sample_template, created_at=datetime(2016, 10, 19, 0, 0)) + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 2 + + +@freeze_time('2016-10-18T10:00') +def test_get_total_notifications_only_counts_api_notifications(sample_template, sample_job, sample_api_key): + create_notification(sample_template, one_off=True) + create_notification(sample_template, one_off=True) + create_notification(sample_template, job=sample_job) + create_notification(sample_template, job=sample_job) + create_notification(sample_template, api_key=sample_api_key) + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 1 + + +@freeze_time('2016-10-18T10:00') +def test_get_total_notifications_ignores_test_keys(sample_template): + # Creating multiple templates with normal and team keys but only 1 template + # with a test key to test that the count ignores letters + create_notification(sample_template, key_type=KEY_TYPE_NORMAL) + create_notification(sample_template, key_type=KEY_TYPE_NORMAL) + create_notification(sample_template, key_type=KEY_TYPE_TEAM) + create_notification(sample_template, key_type=KEY_TYPE_TEAM) + create_notification(sample_template, key_type=KEY_TYPE_TEST) + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 4 + + +@freeze_time('2016-10-18T10:00') +def test_get_total_notifications_ignores_letters( + sample_template, + sample_email_template, + sample_letter_template +): + # Creating multiple sms and email templates but only 1 letter template to + # test that the count ignores letters + create_notification(sample_template) + create_notification(sample_template) + create_notification(sample_email_template) + create_notification(sample_email_template) + create_notification(sample_letter_template) + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 4 + + +@freeze_time('2016-10-18T10:00') +def test_get_total_notifications_counts_messages_within_10_seconds(sample_template): + created_at = datetime.utcnow() + + create_notification(sample_template, sent_at=created_at + timedelta(seconds=5)) + create_notification(sample_template, sent_at=created_at + timedelta(seconds=10)) + create_notification(sample_template, sent_at=created_at + timedelta(seconds=15)) + + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 3 + assert result.messages_within_10_secs == 2 + + +@freeze_time('2016-10-18T10:00') +def test_get_total_notifications_counts_messages_that_have_not_sent(sample_template): + create_notification(sample_template, status='created', sent_at=None) + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 1 + assert result.messages_within_10_secs == 0 From a1a5fdedb117abd0234d1330e60817e955d673f0 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Wed, 30 Aug 2017 14:36:16 +0100 Subject: [PATCH 09/42] Send results of processing-time query to performance platform --- app/celery/scheduled_tasks.py | 43 +++++++++-------- app/performance_platform/processing_time.py | 36 ++++++++++++++ tests/app/celery/test_scheduled_tasks.py | 7 +-- .../test_processing_time.py | 47 +++++++++++++++++++ 4 files changed, 111 insertions(+), 22 deletions(-) create mode 100644 app/performance_platform/processing_time.py create mode 100644 tests/app/performance_platform/test_processing_time.py diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 5614c361f..bf10ceec7 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -8,7 +8,7 @@ from sqlalchemy.exc import SQLAlchemyError from app.aws import s3 from app import notify_celery -from app.performance_platform import total_sent_notifications +from app.performance_platform import total_sent_notifications, processing_time from app import performance_platform_client from app.dao.date_util import get_month_start_and_end_date_in_utc from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago @@ -176,27 +176,32 @@ def timeout_notifications(): @statsd(namespace="tasks") def send_daily_performance_platform_stats(): if performance_platform_client.active: - count_dict = total_sent_notifications.get_total_sent_notifications_yesterday() - email_sent_count = count_dict.get('email').get('count') - sms_sent_count = count_dict.get('sms').get('count') - start_date = count_dict.get('start_date') + send_total_sent_notifications_to_performance_platform() + processing_time.send_processing_time_to_performance_platform() - current_app.logger.info( - "Attempting to update performance platform for date {} with email count {} and sms count {}" - .format(start_date, email_sent_count, sms_sent_count) - ) - total_sent_notifications.send_total_notifications_sent_for_day_stats( - start_date, - 'sms', - sms_sent_count - ) +def send_total_sent_notifications_to_performance_platform(): + count_dict = total_sent_notifications.get_total_sent_notifications_yesterday() + email_sent_count = count_dict.get('email').get('count') + sms_sent_count = count_dict.get('sms').get('count') + start_date = count_dict.get('start_date') - total_sent_notifications.send_total_notifications_sent_for_day_stats( - start_date, - 'email', - email_sent_count - ) + current_app.logger.info( + "Attempting to update performance platform for date {} with email count {} and sms count {}" + .format(start_date, email_sent_count, sms_sent_count) + ) + + total_sent_notifications.send_total_notifications_sent_for_day_stats( + start_date, + 'sms', + sms_sent_count + ) + + total_sent_notifications.send_total_notifications_sent_for_day_stats( + start_date, + 'email', + email_sent_count + ) @notify_celery.task(name='switch-current-sms-provider-on-slow-delivery') diff --git a/app/performance_platform/processing_time.py b/app/performance_platform/processing_time.py new file mode 100644 index 000000000..e9811a939 --- /dev/null +++ b/app/performance_platform/processing_time.py @@ -0,0 +1,36 @@ +from datetime import datetime + +from flask import current_app + +from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc +from app.dao.notifications_dao import dao_get_total_notifications_sent_per_day_for_perfomance_platform +from app import performance_platform_client + + +def send_processing_time_to_performance_platform(): + today = datetime.utcnow() + start_date = get_midnight_for_day_before(today) + end_date = get_london_midnight_in_utc(today) + + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(start_date, end_date) + + current_app.logger.info( + 'Sending processing-time to performance platform for date {}. Total: {}, under 10 secs {}'.format( + start_date, result.messages_total, result.messages_within_10_secs + ) + ) + + send_processing_time_data(start_date, 'messages-total', result.messages_total) + send_processing_time_data(start_date, 'messages-within-10-secs', result.messages_within_10_secs) + + +def send_processing_time_data(date, status, count): + payload = performance_platform_client.format_payload( + dataset='processing-time', + date=date, + group_name='status', + group_value=status, + count=count + ) + + performance_platform_client.send_stats_to_performance_platform(payload) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index a05ea18f9..c77785e53 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -29,7 +29,8 @@ from app.celery.scheduled_tasks import ( switch_current_sms_provider_on_slow_delivery, timeout_job_statistics, timeout_notifications, - populate_monthly_billing) + populate_monthly_billing, + send_total_sent_notifications_to_performance_platform) from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient from app.config import QueueNames, TaskNames from app.dao.jobs_dao import dao_get_job_by_id @@ -285,7 +286,7 @@ def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mo @freeze_time("2016-01-11 12:30:00") -def test_send_daily_performance_stats_calls_with_correct_totals( +def test_send_total_sent_notifications_to_performance_platform_calls_with_correct_totals( notify_db, notify_db_session, sample_template, @@ -319,7 +320,7 @@ def test_send_daily_performance_stats_calls_with_correct_totals( new_callable=PropertyMock ) as mock_active: mock_active.return_value = True - send_daily_performance_platform_stats() + send_total_sent_notifications_to_performance_platform() perf_mock.assert_has_calls([ call(get_london_midnight_in_utc(yesterday), 'sms', 2), diff --git a/tests/app/performance_platform/test_processing_time.py b/tests/app/performance_platform/test_processing_time.py new file mode 100644 index 000000000..07f78859e --- /dev/null +++ b/tests/app/performance_platform/test_processing_time.py @@ -0,0 +1,47 @@ +from datetime import datetime, timedelta + +from freezegun import freeze_time + +from tests.app.db import create_notification +from app.performance_platform.processing_time import ( + send_processing_time_to_performance_platform, + send_processing_time_data +) + + +@freeze_time('2016-10-18T02:00') +def test_send_processing_time_to_performance_platform_generates_correct_calls(mocker, sample_template): + send_mock = mocker.patch('app.performance_platform.processing_time.send_processing_time_data') + + created_at = datetime.utcnow() - timedelta(days=1) + + create_notification(sample_template, created_at=created_at, sent_at=created_at + timedelta(seconds=5)) + create_notification(sample_template, created_at=created_at, sent_at=created_at + timedelta(seconds=15)) + create_notification(sample_template, created_at=datetime.utcnow() - timedelta(days=2)) + + send_processing_time_to_performance_platform() + + send_mock.assert_any_call(datetime(2016, 10, 16, 23, 0), 'messages-total', 2) + send_mock.assert_any_call(datetime(2016, 10, 16, 23, 0), 'messages-within-10-secs', 1) + + +def test_send_processing_time_to_performance_platform_creates_correct_call_to_perf_platform(mocker): + send_stats = mocker.patch('app.performance_platform.total_sent_notifications.performance_platform_client.send_stats_to_performance_platform') # noqa + + send_processing_time_data( + date=datetime(2016, 10, 15, 23, 0, 0), + status='foo', + count=142 + ) + + assert send_stats.call_count == 1 + + request_args = send_stats.call_args[0][0] + assert request_args['dataType'] == 'processing-time' + assert request_args['service'] == 'govuk-notify' + assert request_args['period'] == 'day' + assert request_args['status'] == 'foo' + assert request_args['_timestamp'] == '2016-10-16T00:00:00' + assert request_args['count'] == 142 + expected_base64_id = 'MjAxNi0xMC0xNlQwMDowMDowMGdvdnVrLW5vdGlmeWZvb3Byb2Nlc3NpbmctdGltZWRheQ==' + assert request_args['_id'] == expected_base64_id From c92b72da6ee9231d2ae0464e1ed256d1a826e861 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Mon, 21 Aug 2017 13:46:16 +0100 Subject: [PATCH 10/42] Remove yearly billing data dao methods --- app/dao/notification_usage_dao.py | 83 +-------------- tests/app/dao/test_notification_usage_dao.py | 102 ------------------- 2 files changed, 1 insertion(+), 184 deletions(-) diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index 662857f5a..7542fb0df 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -1,4 +1,3 @@ -from collections import namedtuple from datetime import datetime, timedelta from sqlalchemy import Float, Integer @@ -17,41 +16,7 @@ from app.models import ( EMAIL_TYPE ) from app.statsd_decorators import statsd -from app.utils import get_london_month_from_utc_column, convert_utc_to_bst - - -@statsd(namespace="dao") -def get_yearly_billing_data(service_id, year): - start_date, end_date = get_financial_year(year) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - - if not rates: - return [] - - def get_valid_from(valid_from): - return start_date if valid_from < start_date else valid_from - - result = [] - for r, n in zip(rates, rates[1:]): - result.append( - sms_yearly_billing_data_query( - r.rate, - service_id, - get_valid_from(r.valid_from), - n.valid_from - ) - ) - result.append( - sms_yearly_billing_data_query( - rates[-1].rate, - service_id, - get_valid_from(rates[-1].valid_from), - end_date - ) - ) - - result.append(email_yearly_billing_data_query(service_id, start_date, end_date)) - return sum(result, []) +from app.utils import get_london_month_from_utc_column @statsd(namespace="dao") @@ -112,52 +77,6 @@ def billing_data_filter(notification_type, start_date, end_date, service_id): ] -def email_yearly_billing_data_query(service_id, start_date, end_date, rate=0): - result = db.session.query( - func.count(NotificationHistory.id), - func.count(NotificationHistory.id), - rate_multiplier(), - NotificationHistory.notification_type, - NotificationHistory.international, - cast(rate, Integer()) - ).filter( - *billing_data_filter(EMAIL_TYPE, start_date, end_date, service_id) - ).group_by( - NotificationHistory.notification_type, - rate_multiplier(), - NotificationHistory.international - ).first() - - if not result: - return [(0, 0, 1, EMAIL_TYPE, False, 0)] - else: - return [result] - - -def sms_yearly_billing_data_query(rate, service_id, start_date, end_date): - result = db.session.query( - cast(func.sum(NotificationHistory.billable_units * rate_multiplier()), Integer()), - func.sum(NotificationHistory.billable_units), - rate_multiplier(), - NotificationHistory.notification_type, - NotificationHistory.international, - cast(rate, Float()) - ).filter( - *billing_data_filter(SMS_TYPE, start_date, end_date, service_id) - ).group_by( - NotificationHistory.notification_type, - NotificationHistory.international, - rate_multiplier() - ).order_by( - rate_multiplier() - ).all() - - if not result: - return [(0, 0, 1, SMS_TYPE, False, rate)] - else: - return result - - def get_rates_for_daterange(start_date, end_date, notification_type): rates = Rate.query.filter(Rate.notification_type == notification_type).order_by(Rate.valid_from).all() diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index 02876e6fb..5cc72d761 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -6,7 +6,6 @@ from freezegun import freeze_time from app.dao.date_util import get_financial_year from app.dao.notification_usage_dao import ( get_rates_for_daterange, - get_yearly_billing_data, get_billing_data_for_month, get_monthly_billing_data ) @@ -103,94 +102,6 @@ def test_get_rates_for_daterange_where_daterange_is_one_month_that_falls_between assert rates[0].rate == 0.175 -def test_get_yearly_billing_data(notify_db, notify_db_session, sample_template, sample_email_template): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.014) - set_up_rate(notify_db, datetime(2016, 6, 1), 0.0158) - set_up_rate(notify_db, datetime(2017, 6, 1), 0.0165) - # previous year - create_notification(template=sample_template, created_at=datetime(2016, 3, 31), sent_at=datetime(2016, 3, 31), - status='sending', billable_units=1) - # current year - create_notification(template=sample_template, created_at=datetime(2016, 4, 2), sent_at=datetime(2016, 4, 2), - status='sending', billable_units=1) - create_notification(template=sample_template, created_at=datetime(2016, 5, 18), sent_at=datetime(2016, 5, 18), - status='sending', billable_units=2) - create_notification(template=sample_template, created_at=datetime(2016, 7, 22), sent_at=datetime(2016, 7, 22), - status='sending', billable_units=3, rate_multiplier=2, international=True, phone_prefix="1") - create_notification(template=sample_template, created_at=datetime(2016, 9, 15), sent_at=datetime(2016, 9, 15), - status='sending', billable_units=4) - create_notification(template=sample_template, created_at=datetime(2017, 3, 31), sent_at=datetime(2017, 3, 31), - status='sending', billable_units=5) - create_notification(template=sample_email_template, created_at=datetime(2016, 9, 15), sent_at=datetime(2016, 9, 15), - status='sending', billable_units=0) - create_notification(template=sample_email_template, created_at=datetime(2017, 3, 31), sent_at=datetime(2017, 3, 31), - status='sending', billable_units=0) - # next year - create_notification(template=sample_template, created_at=datetime(2017, 4, 1), sent_at=datetime(2017, 4, 1), - status='sending', billable_units=6) - results = get_yearly_billing_data(sample_template.service_id, 2016) - assert len(results) == 4 - assert results[0] == (3, 3, 1, 'sms', False, 0.014) - assert results[1] == (9, 9, 1, 'sms', False, 0.0158) - assert results[2] == (6, 3, 2, 'sms', True, 0.0158) - assert results[3] == (2, 2, 1, 'email', False, 0) - - -def test_get_future_yearly_billing_data(notify_db, notify_db_session, sample_template, sample_email_template): - set_up_rate(notify_db, datetime(2017, 4, 1), 0.0158) - - create_notification(template=sample_template, created_at=datetime(2017, 3, 30), sent_at=datetime(2017, 3, 30), - status='sending', billable_units=1) - create_notification(template=sample_template, created_at=datetime(2017, 4, 6), sent_at=datetime(2017, 4, 6), - status='sending', billable_units=1) - create_notification(template=sample_template, created_at=datetime(2017, 4, 6), sent_at=datetime(2017, 4, 6), - status='sending', billable_units=1) - - results = get_yearly_billing_data(sample_template.service_id, 2018) - assert len(results) == 2 - assert results[0] == (0, 0, 1, 'sms', False, 0.0158) - - -def test_get_yearly_billing_data_with_one_rate(notify_db, notify_db_session, sample_template): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.014) - # previous year - create_notification(template=sample_template, created_at=datetime(2016, 3, 31), sent_at=datetime(2016, 3, 31), - status='sending', billable_units=1) - # current year - create_notification(template=sample_template, created_at=datetime(2016, 4, 2), sent_at=datetime(2016, 4, 2), - status='sending', billable_units=1) - create_notification(template=sample_template, created_at=datetime(2016, 5, 18), sent_at=datetime(2016, 5, 18), - status='sending', billable_units=2) - create_notification(template=sample_template, created_at=datetime(2016, 7, 22), sent_at=datetime(2016, 7, 22), - status='sending', billable_units=3) - create_notification(template=sample_template, created_at=datetime(2016, 9, 15), sent_at=datetime(2016, 9, 15), - status='sending', billable_units=4) - create_notification(template=sample_template, created_at=datetime(2017, 3, 31, 22, 59, 59), - sent_at=datetime(2017, 3, 31), status='sending', billable_units=5) - # next year - create_notification(template=sample_template, created_at=datetime(2017, 3, 31, 23, 00, 00), - sent_at=datetime(2017, 3, 31), status='sending', billable_units=6) - create_notification(template=sample_template, created_at=datetime(2017, 4, 1), sent_at=datetime(2017, 4, 1), - status='sending', billable_units=7) - results = get_yearly_billing_data(sample_template.service_id, 2016) - assert len(results) == 2 - assert results[0] == (15, 15, 1, 'sms', False, 0.014) - assert results[1] == (0, 0, 1, 'email', False, 0) - - -def test_get_yearly_billing_data_with_no_sms_notifications(notify_db, notify_db_session, sample_email_template): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.014) - create_notification(template=sample_email_template, created_at=datetime(2016, 7, 31), sent_at=datetime(2016, 3, 31), - status='sending', billable_units=0) - create_notification(template=sample_email_template, created_at=datetime(2016, 10, 2), sent_at=datetime(2016, 4, 2), - status='sending', billable_units=0) - - results = get_yearly_billing_data(sample_email_template.service_id, 2016) - assert len(results) == 2 - assert results[0] == (0, 0, 1, 'sms', False, 0.014) - assert results[1] == (2, 2, 1, 'email', False, 0) - - def test_get_monthly_billing_data(notify_db, notify_db_session, sample_template, sample_email_template): set_up_rate(notify_db, datetime(2016, 4, 1), 0.014) # previous year @@ -271,19 +182,6 @@ def set_up_rate(notify_db, start_date, value): notify_db.session.add(rate) -def test_get_yearly_billing_data_for_start_date_before_rate_returns_empty( - sample_template -): - create_rate(datetime(2016, 4, 1), 0.014, SMS_TYPE) - - results = get_yearly_billing_data( - service_id=sample_template.service_id, - year=2015 - ) - - assert not results - - @freeze_time("2016-05-01") def test_get_billing_data_for_month_where_start_date_before_rate_returns_empty( sample_template From 994e797b26cbd1dc07fa787888c65f417abba144 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Mon, 21 Aug 2017 13:46:50 +0100 Subject: [PATCH 11/42] Replace 'sms' with SMS_TYPE --- tests/app/dao/test_notification_usage_dao.py | 34 ++++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index 5cc72d761..24f09e026 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -20,7 +20,7 @@ def test_get_rates_for_daterange(notify_db, notify_db_session): set_up_rate(notify_db, datetime(2016, 5, 18), 0.016) set_up_rate(notify_db, datetime(2017, 3, 31, 23), 0.0158) start_date, end_date = get_financial_year(2017) - rates = get_rates_for_daterange(start_date, end_date, 'sms') + rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) assert len(rates) == 1 assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2017-03-31 23:00:00" assert rates[0].rate == 0.0158 @@ -31,7 +31,7 @@ def test_get_rates_for_daterange_multiple_result_per_year(notify_db, notify_db_s set_up_rate(notify_db, datetime(2016, 5, 18), 0.016) set_up_rate(notify_db, datetime(2017, 4, 1), 0.0158) start_date, end_date = get_financial_year(2016) - rates = get_rates_for_daterange(start_date, end_date, 'sms') + rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) assert len(rates) == 2 assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2016-04-01 00:00:00" assert rates[0].rate == 0.015 @@ -44,7 +44,7 @@ def test_get_rates_for_daterange_returns_correct_rates(notify_db, notify_db_sess set_up_rate(notify_db, datetime(2016, 9, 1), 0.016) set_up_rate(notify_db, datetime(2017, 6, 1), 0.0175) start_date, end_date = get_financial_year(2017) - rates_2017 = get_rates_for_daterange(start_date, end_date, 'sms') + rates_2017 = get_rates_for_daterange(start_date, end_date, SMS_TYPE) assert len(rates_2017) == 2 assert datetime.strftime(rates_2017[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2016-09-01 00:00:00" assert rates_2017[0].rate == 0.016 @@ -56,7 +56,7 @@ def test_get_rates_for_daterange_in_the_future(notify_db, notify_db_session): set_up_rate(notify_db, datetime(2016, 4, 1), 0.015) set_up_rate(notify_db, datetime(2017, 6, 1), 0.0175) start_date, end_date = get_financial_year(2018) - rates = get_rates_for_daterange(start_date, end_date, 'sms') + rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2017-06-01 00:00:00" assert rates[0].rate == 0.0175 @@ -65,7 +65,7 @@ def test_get_rates_for_daterange_returns_empty_list_if_year_is_before_earliest_r set_up_rate(notify_db, datetime(2016, 4, 1), 0.015) set_up_rate(notify_db, datetime(2017, 6, 1), 0.0175) start_date, end_date = get_financial_year(2015) - rates = get_rates_for_daterange(start_date, end_date, 'sms') + rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) assert rates == [] @@ -75,7 +75,7 @@ def test_get_rates_for_daterange_early_rate(notify_db, notify_db_session): set_up_rate(notify_db, datetime(2016, 9, 1), 0.016) set_up_rate(notify_db, datetime(2017, 6, 1), 0.0175) start_date, end_date = get_financial_year(2016) - rates = get_rates_for_daterange(start_date, end_date, 'sms') + rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) assert len(rates) == 3 @@ -83,7 +83,7 @@ def test_get_rates_for_daterange_edge_case(notify_db, notify_db_session): set_up_rate(notify_db, datetime(2016, 3, 31, 23, 00), 0.015) set_up_rate(notify_db, datetime(2017, 3, 31, 23, 00), 0.0175) start_date, end_date = get_financial_year(2016) - rates = get_rates_for_daterange(start_date, end_date, 'sms') + rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) assert len(rates) == 1 assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2016-03-31 23:00:00" assert rates[0].rate == 0.015 @@ -96,7 +96,7 @@ def test_get_rates_for_daterange_where_daterange_is_one_month_that_falls_between set_up_rate(notify_db, datetime(2017, 3, 31), 0.123) start_date = datetime(2017, 2, 1, 00, 00, 00) end_date = datetime(2017, 2, 28, 23, 59, 59, 99999) - rates = get_rates_for_daterange(start_date, end_date, 'sms') + rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) assert len(rates) == 1 assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2017-01-01 00:00:00" assert rates[0].rate == 0.175 @@ -131,10 +131,10 @@ def test_get_monthly_billing_data(notify_db, notify_db_session, sample_template, results = get_monthly_billing_data(sample_template.service_id, 2016) assert len(results) == 4 # (billable_units, rate_multiplier, international, type, rate) - assert results[0] == ('April', 1, 1, False, 'sms', 0.014) - assert results[1] == ('May', 2, 1, False, 'sms', 0.014) - assert results[2] == ('July', 7, 1, False, 'sms', 0.014) - assert results[3] == ('July', 6, 2, False, 'sms', 0.014) + assert results[0] == ('April', 1, 1, False, SMS_TYPE, 0.014) + assert results[1] == ('May', 2, 1, False, SMS_TYPE, 0.014) + assert results[2] == ('July', 7, 1, False, SMS_TYPE, 0.014) + assert results[3] == ('July', 6, 2, False, SMS_TYPE, 0.014) def test_get_monthly_billing_data_with_multiple_rates(notify_db, notify_db_session, sample_template, @@ -165,10 +165,10 @@ def test_get_monthly_billing_data_with_multiple_rates(notify_db, notify_db_sessi sent_at=datetime(2017, 3, 31), status='sending', billable_units=6) results = get_monthly_billing_data(sample_template.service_id, 2016) assert len(results) == 4 - assert results[0] == ('April', 1, 1, False, 'sms', 0.014) - assert results[1] == ('May', 2, 1, False, 'sms', 0.014) - assert results[2] == ('June', 3, 1, False, 'sms', 0.014) - assert results[3] == ('June', 4, 1, False, 'sms', 0.0175) + assert results[0] == ('April', 1, 1, False, SMS_TYPE, 0.014) + assert results[1] == ('May', 2, 1, False, SMS_TYPE, 0.014) + assert results[2] == ('June', 3, 1, False, SMS_TYPE, 0.014) + assert results[3] == ('June', 4, 1, False, SMS_TYPE, 0.0175) def test_get_monthly_billing_data_with_no_notifications_for_daterange(notify_db, notify_db_session, sample_template): @@ -178,7 +178,7 @@ def test_get_monthly_billing_data_with_no_notifications_for_daterange(notify_db, def set_up_rate(notify_db, start_date, value): - rate = Rate(id=uuid.uuid4(), valid_from=start_date, rate=value, notification_type='sms') + rate = Rate(id=uuid.uuid4(), valid_from=start_date, rate=value, notification_type=SMS_TYPE) notify_db.session.add(rate) From e1ef0e85c66592b99eb4c8fcdf28c6ec2941c815 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Mon, 21 Aug 2017 14:16:25 +0100 Subject: [PATCH 12/42] Remove monthly usage dao methods --- app/service/rest.py | 18 --------- tests/app/service/test_rest.py | 72 +--------------------------------- 2 files changed, 1 insertion(+), 89 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index 4658c2efc..ca818aa46 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -458,24 +458,6 @@ def get_monthly_template_stats(service_id): raise InvalidRequest('Year must be a number', status_code=400) -@service_blueprint.route('//monthly-usage') -def get_yearly_monthly_usage(service_id): - try: - year = int(request.args.get('year')) - results = notification_usage_dao.get_monthly_billing_data(service_id, year) - json_results = [{ - "month": x[0], - "billing_units": x[1], - "rate_multiplier": x[2], - "international": x[3], - "notification_type": x[4], - "rate": x[5] - } for x in results] - return json.dumps(json_results) - except TypeError: - return jsonify(result='error', message='No valid year provided'), 400 - - @service_blueprint.route('//inbound-api', methods=['POST']) def create_service_inbound_api(service_id): data = request.get_json() diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index ca1ceca49..eaf34219a 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -8,12 +8,11 @@ import pytest from flask import url_for, current_app from freezegun import freeze_time -from app.dao.monthly_billing_dao import create_or_update_monthly_billing from app.dao.services_dao import dao_remove_user_from_service from app.dao.templates_dao import dao_redact_template from app.dao.users_dao import save_model_user from app.models import ( - User, Organisation, Rate, Service, ServicePermission, Notification, + User, Organisation, Service, ServicePermission, Notification, DVLA_ORG_LAND_REGISTRY, KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, INBOUND_SMS_TYPE, @@ -1730,75 +1729,6 @@ def test_get_template_stats_by_month_returns_error_for_incorrect_year( assert json.loads(response.get_data(as_text=True)) == expected_json -def test_get_monthly_billing_usage(client, notify_db, notify_db_session, sample_service): - rate = Rate(id=uuid.uuid4(), valid_from=datetime(2016, 3, 31, 23, 00), rate=0.0158, notification_type=SMS_TYPE) - notify_db.session.add(rate) - notification = create_sample_notification(notify_db, notify_db_session, created_at=datetime(2016, 6, 5), - sent_at=datetime(2016, 6, 5), - status='sending') - create_sample_notification(notify_db, notify_db_session, created_at=datetime(2016, 6, 5), - sent_at=datetime(2016, 6, 5), - status='sending', rate_multiplier=2) - create_sample_notification(notify_db, notify_db_session, created_at=datetime(2016, 7, 5), - sent_at=datetime(2016, 7, 5), - status='sending') - - template = create_template(sample_service, template_type=EMAIL_TYPE) - create_sample_notification(notify_db, notify_db_session, created_at=datetime(2016, 6, 5), - sent_at=datetime(2016, 6, 5), - status='sending', - template=template) - response = client.get( - '/service/{}/monthly-usage?year=2016'.format(notification.service_id), - headers=[create_authorization_header()] - ) - assert response.status_code == 200 - actual = json.loads(response.get_data(as_text=True)) - assert len(actual) == 3 - assert actual == [{'month': 'June', - 'international': False, - 'rate_multiplier': 1, - 'notification_type': SMS_TYPE, - 'rate': 0.0158, - 'billing_units': 1}, - {'month': 'June', - 'international': False, - 'rate_multiplier': 2, - 'notification_type': SMS_TYPE, - 'rate': 0.0158, - 'billing_units': 1}, - {'month': 'July', - 'international': False, - 'rate_multiplier': 1, - 'notification_type': SMS_TYPE, - 'rate': 0.0158, - 'billing_units': 1}] - - -def test_get_monthly_billing_usage_returns_400_if_missing_year(client, sample_service): - response = client.get( - '/service/{}/monthly-usage'.format(sample_service.id), - headers=[create_authorization_header()] - ) - assert response.status_code == 400 - assert json.loads(response.get_data(as_text=True)) == { - 'message': 'No valid year provided', 'result': 'error' - } - - -def test_get_monthly_billing_usage_returns_empty_list_if_no_notifications(client, notify_db, sample_service): - rate = Rate(id=uuid.uuid4(), valid_from=datetime(2016, 3, 31, 23, 00), rate=0.0158, notification_type=SMS_TYPE) - notify_db.session.add(rate) - response = client.get( - '/service/{}/monthly-usage?year=2016'.format(sample_service.id), - headers=[create_authorization_header()] - ) - assert response.status_code == 200 - - results = json.loads(response.get_data(as_text=True)) - assert results == [] - - def test_search_for_notification_by_to_field(client, notify_db, notify_db_session): create_notification = partial(create_sample_notification, notify_db, notify_db_session) notification1 = create_notification(to_field='+447700900855', normalised_to='447700900855') From af54d98faa8ad9395d9119235eeef1ac58600a03 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Mon, 21 Aug 2017 14:41:49 +0100 Subject: [PATCH 13/42] Remove unused imports --- app/dao/monthly_billing_dao.py | 2 -- tests/app/dao/test_monthly_billing.py | 1 - 2 files changed, 3 deletions(-) diff --git a/app/dao/monthly_billing_dao.py b/app/dao/monthly_billing_dao.py index 4efef0e7d..606c02cc9 100644 --- a/app/dao/monthly_billing_dao.py +++ b/app/dao/monthly_billing_dao.py @@ -1,7 +1,5 @@ from datetime import datetime -from sqlalchemy import func - from app import db from app.dao.dao_utils import transactional diff --git a/tests/app/dao/test_monthly_billing.py b/tests/app/dao/test_monthly_billing.py index 24359af3a..23c445f92 100644 --- a/tests/app/dao/test_monthly_billing.py +++ b/tests/app/dao/test_monthly_billing.py @@ -3,7 +3,6 @@ from dateutil.relativedelta import relativedelta from freezegun import freeze_time from functools import partial -from app import db from app.dao.monthly_billing_dao import ( create_or_update_monthly_billing, get_monthly_billing_entry, From 49a6bfc06bf9f31e5142adf7befbebc1135c9317 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Wed, 30 Aug 2017 16:02:30 +0100 Subject: [PATCH 14/42] Send '0', not 'null', to perf platform if no notifications are sent --- app/dao/notifications_dao.py | 6 +++--- .../test_notification_dao_performance_platform.py | 7 +++++++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index dca73188a..c1b22301c 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -527,7 +527,7 @@ def dao_get_total_notifications_sent_per_day_for_perfomance_platform(start_date, """ SELECT count(notifications), - sum(CASE WHEN sent_at - created_at <= interval '10 seconds' THEN 1 ELSE 0 END) + coalesce(sum(CASE WHEN sent_at - created_at <= interval '10 seconds' THEN 1 ELSE 0 END), 0) FROM notifications WHERE created_at > 'START DATE' AND @@ -537,14 +537,14 @@ def dao_get_total_notifications_sent_per_day_for_perfomance_platform(start_date, notification_type != 'letter'; """ under_10_secs = Notification.sent_at - Notification.created_at <= timedelta(seconds=10) - sum_column = functions.sum( + sum_column = functions.coalesce(functions.sum( case( [ (under_10_secs, 1) ], else_=0 ) - ) + ), 0) return db.session.query( func.count(Notification.id).label('messages_total'), sum_column.label('messages_within_10_secs') diff --git a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py index d3fc254e8..6d3e6839a 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py +++ b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py @@ -85,3 +85,10 @@ def test_get_total_notifications_counts_messages_that_have_not_sent(sample_templ result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 1 assert result.messages_within_10_secs == 0 + + +@freeze_time('2016-10-18T10:00') +def test_get_total_notifications_returns_zero_if_no_data(notify_db_session): + result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 0 + assert result.messages_within_10_secs == 0 From 8f82642422288612b4e7cc0793fe26ea7ade49c8 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Fri, 25 Aug 2017 16:07:30 +0100 Subject: [PATCH 15/42] Prevent v2 notifications POST when in trial mode --- app/v2/notifications/post_notifications.py | 3 +++ .../test_post_letter_notifications.py | 23 ++++++++++++++++--- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index c27ca8dc5..c1bcd168e 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -150,6 +150,9 @@ def process_letter_notification(*, letter_data, api_key, template): if api_key.key_type == KEY_TYPE_TEAM: raise BadRequestError(message='Cannot send letters with a team api key', status_code=403) + if api_key.service.restricted: + raise BadRequestError(message='Cannot send letters when service is in trial mode', status_code=403) + job = create_letter_api_job(template) notification = create_letter_notification(letter_data, job, api_key) diff --git a/tests/app/v2/notifications/test_post_letter_notifications.py b/tests/app/v2/notifications/test_post_letter_notifications.py index 429e53eb8..cb9d4bb29 100644 --- a/tests/app/v2/notifications/test_post_letter_notifications.py +++ b/tests/app/v2/notifications/test_post_letter_notifications.py @@ -17,13 +17,12 @@ from app.variables import LETTER_TEST_API_FILENAME from app.variables import LETTER_API_FILENAME from tests import create_authorization_header -from tests.app.db import create_service -from tests.app.db import create_template +from tests.app.db import create_service, create_template def letter_request(client, data, service_id, key_type=KEY_TYPE_NORMAL, _expected_status=201): resp = client.post( - url_for('v2_notifications.post_notification', notification_type='letter'), + url_for('v2_notifications.post_notification', notification_type=LETTER_TYPE), data=json.dumps(data), headers=[ ('Content-Type', 'application/json'), @@ -232,3 +231,21 @@ def test_post_letter_notification_doesnt_accept_team_key(client, sample_letter_t assert error_json['status_code'] == 403 assert error_json['errors'] == [{'error': 'BadRequestError', 'message': 'Cannot send letters with a team api key'}] + + +def test_post_letter_notification_doesnt_send_in_trial(client, sample_trial_letter_template): + data = { + 'template_id': str(sample_trial_letter_template.id), + 'personalisation': {'address_line_1': 'Foo', 'address_line_2': 'Bar', 'postcode': 'Baz'} + } + + error_json = letter_request( + client, + data, + sample_trial_letter_template.service_id, + _expected_status=403 + ) + + assert error_json['status_code'] == 403 + assert error_json['errors'] == [ + {'error': 'BadRequestError', 'message': 'Cannot send letters when service is in trial mode'}] From 41a71c703b1b5000b30de1e68303fed7a9100c37 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Fri, 25 Aug 2017 16:10:41 +0100 Subject: [PATCH 16/42] Refactor test --- tests/app/celery/test_scheduled_tasks.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index a05ea18f9..0929291b2 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -680,9 +680,7 @@ def test_populate_monthly_billing_updates_correct_month_in_bst(sample_template): def test_run_letter_jobs(client, mocker, sample_letter_template): - jobs = [create_job(template=sample_letter_template, job_status=JOB_STATUS_READY_TO_SEND), - create_job(template=sample_letter_template, job_status=JOB_STATUS_READY_TO_SEND)] - job_ids = [str(job.id) for job in jobs] + job_ids = [str(uuid.uuid4()), str(uuid.uuid4())] mocker.patch( "app.celery.scheduled_tasks.dao_get_letter_job_ids_by_status", return_value=job_ids From 7e70b4411361de52eba01f0e989e94677e5a04f5 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Fri, 25 Aug 2017 16:11:39 +0100 Subject: [PATCH 17/42] Error in task when letter template and in trial mode --- app/celery/tasks.py | 7 +++++++ tests/app/celery/test_tasks.py | 23 ++++++++++++++++++++++- tests/app/conftest.py | 6 ++++++ 3 files changed, 35 insertions(+), 1 deletion(-) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 008b0fd5e..0581d346b 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -75,6 +75,13 @@ def process_job(job_id): db_template = dao_get_template_by_id(job.template_id, job.template_version) + if db_template.template_type == LETTER_TYPE and service.restricted: + job.job_status = JOB_STATUS_ERROR + dao_update_job(job) + current_app.logger.warn( + "Job {} has been set to error, service {} is in trial mode".format(job_id, service.id)) + return + TemplateClass = get_template_class(db_template.template_type) template = TemplateClass(db_template.__dict__) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 717dc7b82..e2fde2d17 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -38,7 +38,8 @@ from app.models import ( SMS_TYPE, EMAIL_TYPE, LETTER_TYPE, - Job) + Job, + JOB_STATUS_ERROR) from tests.app import load_example_csv from tests.conftest import set_config @@ -977,6 +978,26 @@ def test_should_cancel_job_if_service_is_inactive(sample_service, mock_dvla_file_task.assert_not_called() +def test_should_error_job_if_service_is_restricted_and_letter_template_type( + sample_service, + sample_letter_job, + mocker +): + sample_service.restricted = True + + mocker.patch('app.celery.tasks.s3.get_job_from_s3') + mocker.patch('app.celery.tasks.process_row') + mock_dvla_file_task = mocker.patch('app.celery.tasks.build_dvla_file') + + process_job(sample_letter_job.id) + + job = jobs_dao.dao_get_job_by_id(sample_letter_job.id) + assert job.job_status == JOB_STATUS_ERROR + s3.get_job_from_s3.assert_not_called() + tasks.process_row.assert_not_called() + mock_dvla_file_task.assert_not_called() + + @pytest.mark.parametrize('template_type, expected_class', [ (SMS_TYPE, SMSMessageTemplate), (EMAIL_TYPE, WithSubjectTemplate), diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 86437e7a1..13cc64ea3 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -271,6 +271,12 @@ def sample_letter_template(sample_service_full_permissions): return create_template(sample_service_full_permissions, template_type=LETTER_TYPE) +@pytest.fixture +def sample_trial_letter_template(sample_service_full_permissions): + sample_service_full_permissions.restricted = True + return create_template(sample_service_full_permissions, template_type=LETTER_TYPE) + + @pytest.fixture(scope='function') def sample_email_template_with_placeholders(notify_db, notify_db_session): return sample_email_template( From ff860ec24231b83a4215a1f722dee8848dafc152 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Fri, 25 Aug 2017 16:12:26 +0100 Subject: [PATCH 18/42] 403 when creating a letter job in trial mode --- app/job/rest.py | 5 ++++- tests/app/job/test_rest.py | 23 +++++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/app/job/rest.py b/app/job/rest.py index 6a8a86ee4..8b2165760 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -32,7 +32,7 @@ from app.schemas import ( from app.celery.tasks import process_job -from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, JOB_STATUS_CANCELLED +from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, JOB_STATUS_CANCELLED, LETTER_TYPE from app.utils import pagination_links @@ -190,6 +190,9 @@ def create_job(service_id): }) template = dao_get_template_by_id(data['template']) + if template.template_type == LETTER_TYPE and service.restricted: + raise InvalidRequest("Create letter job is not allowed for service in trial mode ", 403) + errors = unarchived_template_schema.validate({'archived': template.archived}) if errors: diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 7ce45b51e..920bc3397 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -186,6 +186,29 @@ def test_create_job_returns_403_if_service_is_not_active(client, fake_uuid, samp mock_job_dao.assert_not_called() +def test_create_job_returns_403_if_letter_template_type_and_service_in_trial( + client, fake_uuid, sample_service, sample_trial_letter_template, mocker): + data = { + 'id': fake_uuid, + 'service': str(sample_trial_letter_template.service.id), + 'template': str(sample_trial_letter_template.id), + 'original_file_name': 'thisisatest.csv', + 'notification_count': 1, + 'created_by': str(sample_trial_letter_template.created_by.id) + } + mock_job_dao = mocker.patch("app.dao.jobs_dao.dao_create_job") + auth_header = create_authorization_header() + response = client.post('/service/{}/job'.format(sample_service.id), + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header]) + + assert response.status_code == 403 + resp_json = json.loads(response.get_data(as_text=True)) + assert resp_json['result'] == 'error' + assert resp_json['message'] == "Create letter job is not allowed for service in trial mode " + mock_job_dao.assert_not_called() + + def test_should_not_create_scheduled_job_more_then_24_hours_hence(notify_api, sample_template, mocker, fake_uuid): with notify_api.test_request_context(): with notify_api.test_client() as client: From 01830b7e596b75c2d58e710c5fd774f880fd63ad Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Tue, 29 Aug 2017 12:24:17 +0100 Subject: [PATCH 19/42] Push letter job to research queue in research mode --- app/celery/tasks.py | 6 ++++-- tests/app/celery/test_tasks.py | 38 ++++++++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 2 deletions(-) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 0581d346b..52f2fcde9 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -93,9 +93,11 @@ def process_job(job_id): process_row(row_number, recipient, personalisation, template, job, service) if template.template_type == LETTER_TYPE: - build_dvla_file.apply_async([str(job.id)], queue=QueueNames.JOBS) + build_dvla_file.apply_async( + [str(job.id)], queue=QueueNames.JOBS if not service.research_mode else QueueNames.RESEARCH_MODE) # temporary logging - current_app.logger.info("send job {} to build-dvla-file in the process-job queue".format(job_id)) + current_app.logger.info("send job {} to build-dvla-file in the {} queue".format( + job_id, QueueNames.JOBS if not service.research_mode else QueueNames.RESEARCH_MODE)) else: job.job_status = JOB_STATUS_FINISHED diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index e2fde2d17..20d961f51 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -29,6 +29,7 @@ from app.celery.tasks import ( update_letter_notifications_statuses, process_updates_from_file, send_inbound_sms_to_service) +from app.config import QueueNames from app.dao import jobs_dao, services_dao from app.models import ( Notification, @@ -609,6 +610,43 @@ def test_should_put_send_email_task_in_research_mode_queue_if_research_mode_serv ) +@freeze_time("2017-08-29 17:30:00") +def test_should_build_dvla_file_in_research_mode_for_letter_job( + notify_db, notify_db_session, mocker, sample_service, sample_letter_job, fake_uuid +): + test_encrypted_data = 'some encrypted data' + sample_service.research_mode = True + + csv = """address_line_1,address_line_2,address_line_3,address_line_4,postcode,name + A1,A2,A3,A4,A_POST,Alice + """ + s3_mock = mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=csv) + # mocker.patch('app.celery.tasks.process_row') + mock_persist_letter = mocker.patch('app.celery.tasks.persist_letter.apply_async') + mocker.patch('app.celery.tasks.create_uuid', return_value=fake_uuid) + mocker.patch('app.celery.tasks.encryption.encrypt', return_value=test_encrypted_data) + mock_dvla_file_task = mocker.patch('app.celery.tasks.build_dvla_file.apply_async') + + process_job(sample_letter_job.id) + + job = jobs_dao.dao_get_job_by_id(sample_letter_job.id) + + persist_letter.apply_async.assert_called_once_with( + ( + str(sample_service.id), + fake_uuid, + test_encrypted_data, + datetime.utcnow().strftime(DATETIME_FORMAT) + ), + queue=QueueNames.RESEARCH_MODE + ) + + build_dvla_file.apply_async.assert_called_once_with( + [str(job.id)], + queue=QueueNames.RESEARCH_MODE + ) + + def test_should_send_sms_template_to_and_persist_with_job_id(sample_job, sample_api_key, mocker): notification = _notification_json( sample_job.template, From 225c85832c3822d32e220a90fafc9cedeb4a0697 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Tue, 29 Aug 2017 18:15:38 +0100 Subject: [PATCH 20/42] Refactored to update job status and not build dvla file --- app/celery/tasks.py | 6 ++++-- tests/app/celery/test_tasks.py | 37 +++++++++++++++++++++++++--------- 2 files changed, 32 insertions(+), 11 deletions(-) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 52f2fcde9..ed7811ddb 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -93,8 +93,10 @@ def process_job(job_id): process_row(row_number, recipient, personalisation, template, job, service) if template.template_type == LETTER_TYPE: - build_dvla_file.apply_async( - [str(job.id)], queue=QueueNames.JOBS if not service.research_mode else QueueNames.RESEARCH_MODE) + if service.research_mode: + update_job_to_sent_to_dvla.apply_async([str(job.id)], queue=QueueNames.RESEARCH_MODE) + else: + build_dvla_file.apply_async([str(job.id)], queue=QueueNames.JOBS) # temporary logging current_app.logger.info("send job {} to build-dvla-file in the {} queue".format( job_id, QueueNames.JOBS if not service.research_mode else QueueNames.RESEARCH_MODE)) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 20d961f51..344ac00ab 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -610,8 +610,7 @@ def test_should_put_send_email_task_in_research_mode_queue_if_research_mode_serv ) -@freeze_time("2017-08-29 17:30:00") -def test_should_build_dvla_file_in_research_mode_for_letter_job( +def test_should_not_build_dvla_file_in_research_mode_for_letter_job( notify_db, notify_db_session, mocker, sample_service, sample_letter_job, fake_uuid ): test_encrypted_data = 'some encrypted data' @@ -620,9 +619,31 @@ def test_should_build_dvla_file_in_research_mode_for_letter_job( csv = """address_line_1,address_line_2,address_line_3,address_line_4,postcode,name A1,A2,A3,A4,A_POST,Alice """ - s3_mock = mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=csv) - # mocker.patch('app.celery.tasks.process_row') - mock_persist_letter = mocker.patch('app.celery.tasks.persist_letter.apply_async') + mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=csv) + mocker.patch('app.celery.tasks.update_job_to_sent_to_dvla.apply_async') + mocker.patch('app.celery.tasks.persist_letter.apply_async') + mocker.patch('app.celery.tasks.create_uuid', return_value=fake_uuid) + mocker.patch('app.celery.tasks.encryption.encrypt', return_value=test_encrypted_data) + mock_dvla_file_task = mocker.patch('app.celery.tasks.build_dvla_file.apply_async') + + process_job(sample_letter_job.id) + + assert not mock_dvla_file_task.called + + +@freeze_time("2017-08-29 17:30:00") +def test_should_update_job_to_sent_to_dvla_in_research_mode_for_letter_job( + notify_db, notify_db_session, mocker, sample_service, sample_letter_job, fake_uuid +): + test_encrypted_data = 'some encrypted data' + sample_service.research_mode = True + + csv = """address_line_1,address_line_2,address_line_3,address_line_4,postcode,name + A1,A2,A3,A4,A_POST,Alice + """ + mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=csv) + mocker.patch('app.celery.tasks.update_job_to_sent_to_dvla.apply_async') + mocker.patch('app.celery.tasks.persist_letter.apply_async') mocker.patch('app.celery.tasks.create_uuid', return_value=fake_uuid) mocker.patch('app.celery.tasks.encryption.encrypt', return_value=test_encrypted_data) mock_dvla_file_task = mocker.patch('app.celery.tasks.build_dvla_file.apply_async') @@ -641,10 +662,8 @@ def test_should_build_dvla_file_in_research_mode_for_letter_job( queue=QueueNames.RESEARCH_MODE ) - build_dvla_file.apply_async.assert_called_once_with( - [str(job.id)], - queue=QueueNames.RESEARCH_MODE - ) + update_job_to_sent_to_dvla.apply_async.assert_called_once_with( + [str(job.id)], queue=QueueNames.RESEARCH_MODE) def test_should_send_sms_template_to_and_persist_with_job_id(sample_job, sample_api_key, mocker): From d39191967770595883aab11177a14cdd59f36260 Mon Sep 17 00:00:00 2001 From: Ken Tsang Date: Wed, 30 Aug 2017 16:03:05 +0100 Subject: [PATCH 21/42] Refactored to check trial when running scheduled job --- app/celery/scheduled_tasks.py | 19 +++++---- app/celery/tasks.py | 8 ++-- app/letters/rest.py | 7 +++- app/utils.py | 30 ++++++++++++++- tests/app/celery/test_scheduled_tasks.py | 45 +++++++++++++++++++++- tests/app/letters/test_send_letter_jobs.py | 8 +++- 6 files changed, 100 insertions(+), 17 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 5614c361f..9c4dbb5c5 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -39,7 +39,7 @@ from app.notifications.process_notifications import send_notification_to_queue from app.statsd_decorators import statsd from app.celery.tasks import process_job from app.config import QueueNames, TaskNames -from app.utils import convert_utc_to_bst +from app.utils import convert_utc_to_bst, get_unrestricted_letter_ids @notify_celery.task(name="remove_csv_files") @@ -308,9 +308,14 @@ def populate_monthly_billing(): @statsd(namespace="tasks") def run_letter_jobs(): job_ids = dao_get_letter_job_ids_by_status(JOB_STATUS_READY_TO_SEND) - notify_celery.send_task( - name=TaskNames.DVLA_FILES, - args=(job_ids,), - queue=QueueNames.PROCESS_FTP - ) - current_app.logger.info("Queued {} ready letter job ids onto {}".format(len(job_ids), QueueNames.PROCESS_FTP)) + + unrestricted_job_ids = get_unrestricted_letter_ids(job_ids) + + if unrestricted_job_ids: + notify_celery.send_task( + name=TaskNames.DVLA_FILES, + args=(unrestricted_job_ids,), + queue=QueueNames.PROCESS_FTP + ) + current_app.logger.info( + "Queued {} ready letter job ids onto {}".format(len(unrestricted_job_ids), QueueNames.PROCESS_FTP)) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index ed7811ddb..a5e9f98d6 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -75,7 +75,7 @@ def process_job(job_id): db_template = dao_get_template_by_id(job.template_id, job.template_version) - if db_template.template_type == LETTER_TYPE and service.restricted: + if db_template.template_type == LETTER_TYPE and job.service.restricted: job.job_status = JOB_STATUS_ERROR dao_update_job(job) current_app.logger.warn( @@ -97,9 +97,9 @@ def process_job(job_id): update_job_to_sent_to_dvla.apply_async([str(job.id)], queue=QueueNames.RESEARCH_MODE) else: build_dvla_file.apply_async([str(job.id)], queue=QueueNames.JOBS) - # temporary logging - current_app.logger.info("send job {} to build-dvla-file in the {} queue".format( - job_id, QueueNames.JOBS if not service.research_mode else QueueNames.RESEARCH_MODE)) + # temporary logging + current_app.logger.info("send job {} to build-dvla-file in the {} queue".format( + job_id, QueueNames.JOBS)) else: job.job_status = JOB_STATUS_FINISHED diff --git a/app/letters/rest.py b/app/letters/rest.py index b9ce7bf35..458611a80 100644 --- a/app/letters/rest.py +++ b/app/letters/rest.py @@ -8,6 +8,7 @@ from app.schemas import job_schema from app.v2.errors import register_errors from app.letters.letter_schemas import letter_job_ids from app.schema_validation import validate +from app.utils import get_unrestricted_letter_ids letter_job = Blueprint("letter-job", __name__) register_errors(letter_job) @@ -16,7 +17,11 @@ register_errors(letter_job) @letter_job.route('/send-letter-jobs', methods=['POST']) def send_letter_jobs(): job_ids = validate(request.get_json(), letter_job_ids) - notify_celery.send_task(name=TaskNames.DVLA_FILES, args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP) + + unrestricted_job_ids = get_unrestricted_letter_ids(job_ids['job_ids']) + + notify_celery.send_task( + name=TaskNames.DVLA_FILES, args=(unrestricted_job_ids,), queue=QueueNames.PROCESS_FTP) return jsonify(data={"response": "Task created to send files to DVLA"}), 201 diff --git a/app/utils.py b/app/utils.py index cf4c28f06..538673fb3 100644 --- a/app/utils.py +++ b/app/utils.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta import pytz -from flask import url_for +from flask import current_app, url_for from sqlalchemy import func from notifications_utils.template import SMSMessageTemplate, PlainTextEmailTemplate, LetterPreviewTemplate @@ -86,3 +86,31 @@ def get_public_notify_type_text(notify_type, plural=False): notify_type_text = 'text message' return '{}{}'.format(notify_type_text, 's' if plural else '') + + +def get_unrestricted_letter_ids(job_ids): + from app.dao.jobs_dao import ( + dao_get_job_by_id, + dao_update_job + ) + + from app.models import (LETTER_TYPE, JOB_STATUS_ERROR) + from app.dao.templates_dao import dao_get_template_by_id + + unrestricted_job_ids = [] + + for job_id in job_ids: + job = dao_get_job_by_id(job_id) + + template = dao_get_template_by_id(job.template_id, job.template_version) + + if template.template_type == LETTER_TYPE: + if job.service.restricted: + job.job_status = JOB_STATUS_ERROR + dao_update_job(job) + current_app.logger.warn( + "Job {} has been set to error, service {} is in trial mode".format(job.id, job.service.id)) + else: + unrestricted_job_ids.append(job_id) + + return unrestricted_job_ids diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 0929291b2..cfcd386dd 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -41,7 +41,7 @@ from app.dao.provider_details_dao import ( from app.models import ( Service, Template, SMS_TYPE, LETTER_TYPE, - JOB_STATUS_READY_TO_SEND, + JOB_STATUS_READY_TO_SEND, JOB_STATUS_ERROR, MonthlyBilling) from app.utils import get_london_midnight_in_utc, convert_utc_to_bst from tests.app.db import create_notification, create_service, create_template, create_job, create_rate @@ -680,7 +680,9 @@ def test_populate_monthly_billing_updates_correct_month_in_bst(sample_template): def test_run_letter_jobs(client, mocker, sample_letter_template): - job_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + jobs = [create_job(template=sample_letter_template, job_status=JOB_STATUS_READY_TO_SEND), + create_job(template=sample_letter_template, job_status=JOB_STATUS_READY_TO_SEND)] + job_ids = [str(j.id) for j in jobs] mocker.patch( "app.celery.scheduled_tasks.dao_get_letter_job_ids_by_status", return_value=job_ids @@ -692,3 +694,42 @@ def test_run_letter_jobs(client, mocker, sample_letter_template): mock_celery.assert_called_once_with(name=TaskNames.DVLA_FILES, args=(job_ids,), queue=QueueNames.PROCESS_FTP) + + +def test_run_letter_jobs_in_trial_sets_job_to_error(client, mocker, sample_letter_template): + sample_letter_template.service.restricted = True + job = create_job(sample_letter_template) + job_ids = [str(job.id)] + mocker.patch( + "app.celery.scheduled_tasks.dao_get_letter_job_ids_by_status", + return_value=job_ids + ) + mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") + + run_letter_jobs() + + assert not mock_celery.called + assert job.job_status == JOB_STATUS_ERROR + + +def test_run_letter_jobs_in_trial_sets_job_to_error_and_process_live_services( + client, mocker, sample_letter_template): + live_job = create_job(sample_letter_template) + + service = create_service(service_name="Sample service 2", restricted=True) + template = create_template(service, template_type=LETTER_TYPE) + trial_job = create_job(template) + + job_ids = [str(live_job.id), str(trial_job.id)] + mocker.patch( + "app.celery.scheduled_tasks.dao_get_letter_job_ids_by_status", + return_value=job_ids + ) + mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") + + run_letter_jobs() + + mock_celery.assert_called_once_with(name=TaskNames.DVLA_FILES, + args=([str(live_job.id)],), + queue=QueueNames.PROCESS_FTP) + assert trial_job.job_status == JOB_STATUS_ERROR diff --git a/tests/app/letters/test_send_letter_jobs.py b/tests/app/letters/test_send_letter_jobs.py index d38e24c8f..d899387b6 100644 --- a/tests/app/letters/test_send_letter_jobs.py +++ b/tests/app/letters/test_send_letter_jobs.py @@ -5,11 +5,15 @@ from flask import json from app.variables import LETTER_TEST_API_FILENAME from tests import create_authorization_header +from tests.app.db import create_job -def test_send_letter_jobs(client, mocker): +def test_send_letter_jobs(client, mocker, sample_letter_template): mock_celery = mocker.patch("app.letters.rest.notify_celery.send_task") - job_ids = {"job_ids": [str(uuid.uuid4()), str(uuid.uuid4()), str(uuid.uuid4())]} + job_1 = create_job(sample_letter_template) + job_2 = create_job(sample_letter_template) + job_3 = create_job(sample_letter_template) + job_ids = {"job_ids": [str(job_1.id), str(job_2.id), str(job_3.id)]} auth_header = create_authorization_header() From ea2f838510a90d4ac3b5ee50138972723a1580e6 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Thu, 31 Aug 2017 11:10:54 +0100 Subject: [PATCH 22/42] Fix typo --- app/performance_platform/processing_time.py | 4 ++-- ...st_notification_dao_performance_platform.py | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/app/performance_platform/processing_time.py b/app/performance_platform/processing_time.py index e9811a939..320c0d454 100644 --- a/app/performance_platform/processing_time.py +++ b/app/performance_platform/processing_time.py @@ -3,7 +3,7 @@ from datetime import datetime from flask import current_app from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc -from app.dao.notifications_dao import dao_get_total_notifications_sent_per_day_for_perfomance_platform +from app.dao.notifications_dao import dao_get_total_notifications_sent_per_day_for_performance_platform from app import performance_platform_client @@ -12,7 +12,7 @@ def send_processing_time_to_performance_platform(): start_date = get_midnight_for_day_before(today) end_date = get_london_midnight_in_utc(today) - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(start_date, end_date) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(start_date, end_date) current_app.logger.info( 'Sending processing-time to performance platform for date {}. Total: {}, under 10 secs {}'.format( diff --git a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py index 6d3e6839a..38d7096fe 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py +++ b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py @@ -2,7 +2,7 @@ from datetime import date, datetime, timedelta from freezegun import freeze_time -from app.dao.notifications_dao import dao_get_total_notifications_sent_per_day_for_perfomance_platform +from app.dao.notifications_dao import dao_get_total_notifications_sent_per_day_for_performance_platform from app.models import KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST from tests.app.db import create_notification @@ -12,7 +12,7 @@ def test_get_total_notifications_filters_on_date(sample_template): create_notification(sample_template, created_at=datetime(2016, 10, 17, 10, 0)) create_notification(sample_template, created_at=datetime(2016, 10, 18, 10, 0)) create_notification(sample_template, created_at=datetime(2016, 10, 19, 10, 0)) - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 1 @@ -21,7 +21,7 @@ def test_get_total_notifications_filters_on_date_at_midnight(sample_template): create_notification(sample_template, created_at=datetime(2016, 10, 18, 0, 0)) create_notification(sample_template, created_at=datetime(2016, 10, 18, 23, 59, 59)) create_notification(sample_template, created_at=datetime(2016, 10, 19, 0, 0)) - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 2 @@ -32,7 +32,7 @@ def test_get_total_notifications_only_counts_api_notifications(sample_template, create_notification(sample_template, job=sample_job) create_notification(sample_template, job=sample_job) create_notification(sample_template, api_key=sample_api_key) - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 1 @@ -45,7 +45,7 @@ def test_get_total_notifications_ignores_test_keys(sample_template): create_notification(sample_template, key_type=KEY_TYPE_TEAM) create_notification(sample_template, key_type=KEY_TYPE_TEAM) create_notification(sample_template, key_type=KEY_TYPE_TEST) - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 4 @@ -62,7 +62,7 @@ def test_get_total_notifications_ignores_letters( create_notification(sample_email_template) create_notification(sample_email_template) create_notification(sample_letter_template) - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 4 @@ -74,7 +74,7 @@ def test_get_total_notifications_counts_messages_within_10_seconds(sample_templa create_notification(sample_template, sent_at=created_at + timedelta(seconds=10)) create_notification(sample_template, sent_at=created_at + timedelta(seconds=15)) - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 3 assert result.messages_within_10_secs == 2 @@ -82,13 +82,13 @@ def test_get_total_notifications_counts_messages_within_10_seconds(sample_templa @freeze_time('2016-10-18T10:00') def test_get_total_notifications_counts_messages_that_have_not_sent(sample_template): create_notification(sample_template, status='created', sent_at=None) - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 1 assert result.messages_within_10_secs == 0 @freeze_time('2016-10-18T10:00') def test_get_total_notifications_returns_zero_if_no_data(notify_db_session): - result = dao_get_total_notifications_sent_per_day_for_perfomance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) assert result.messages_total == 0 assert result.messages_within_10_secs == 0 From 72de309b2676333ac4b46367d7fd52d9bd951d57 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Thu, 31 Aug 2017 12:44:06 +0100 Subject: [PATCH 23/42] Make perf platform processing stats query the NotificationHistory table --- app/dao/notifications_dao.py | 22 ++++----- tests/app/conftest.py | 26 ++++++++-- ...t_notification_dao_performance_platform.py | 48 +++++++++++++++++++ 3 files changed, 81 insertions(+), 15 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index c1b22301c..17e388422 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -44,7 +44,6 @@ from app.models import ( from app.dao.dao_utils import transactional from app.statsd_decorators import statsd -from app.utils import get_london_month_from_utc_column def dao_get_notification_statistics_for_service_and_day(service_id, day): @@ -523,12 +522,12 @@ def set_scheduled_notification_to_processed(notification_id): db.session.commit() -def dao_get_total_notifications_sent_per_day_for_perfomance_platform(start_date, end_date): +def dao_get_total_notifications_sent_per_day_for_performance_platform(start_date, end_date): """ SELECT - count(notifications), + count(notification_history), coalesce(sum(CASE WHEN sent_at - created_at <= interval '10 seconds' THEN 1 ELSE 0 END), 0) - FROM notifications + FROM notification_history WHERE created_at > 'START DATE' AND created_at < 'END DATE' AND @@ -536,7 +535,7 @@ def dao_get_total_notifications_sent_per_day_for_perfomance_platform(start_date, key_type != 'test' AND notification_type != 'letter'; """ - under_10_secs = Notification.sent_at - Notification.created_at <= timedelta(seconds=10) + under_10_secs = NotificationHistory.sent_at - NotificationHistory.created_at <= timedelta(seconds=10) sum_column = functions.coalesce(functions.sum( case( [ @@ -545,13 +544,14 @@ def dao_get_total_notifications_sent_per_day_for_perfomance_platform(start_date, else_=0 ) ), 0) + return db.session.query( - func.count(Notification.id).label('messages_total'), + func.count(NotificationHistory.id).label('messages_total'), sum_column.label('messages_within_10_secs') ).filter( - Notification.created_at >= start_date, - Notification.created_at < end_date, - Notification.api_key_id.isnot(None), - Notification.key_type != KEY_TYPE_TEST, - Notification.notification_type != LETTER_TYPE + NotificationHistory.created_at >= start_date, + NotificationHistory.created_at < end_date, + NotificationHistory.api_key_id.isnot(None), + NotificationHistory.key_type != KEY_TYPE_TEST, + NotificationHistory.notification_type != LETTER_TYPE ).one() diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 86437e7a1..91954dc1b 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -136,24 +136,30 @@ def sample_service( restricted=False, limit=1000, email_from=None, - permissions=[SMS_TYPE, EMAIL_TYPE] + permissions=[SMS_TYPE, EMAIL_TYPE], + research_mode=None ): if user is None: user = create_user() if email_from is None: email_from = service_name.lower().replace(' ', '.') + data = { 'name': service_name, 'message_limit': limit, 'restricted': restricted, 'email_from': email_from, 'created_by': user, - 'letter_contact_block': 'London,\nSW1A 1AA', + 'letter_contact_block': 'London,\nSW1A 1AA' } service = Service.query.filter_by(name=service_name).first() if not service: service = Service(**data) dao_create_service(service, user, service_permissions=permissions) + + if research_mode: + service.research_mode = research_mode + else: if user not in service.users: dao_add_user_to_service(service, user) @@ -206,6 +212,7 @@ def sample_template( }) template = Template(**data) dao_create_template(template) + return template @@ -631,14 +638,22 @@ def sample_notification_history( status='created', created_at=None, notification_type=None, - key_type=KEY_TYPE_NORMAL + key_type=KEY_TYPE_NORMAL, + sent_at=None, + api_key=None ): if created_at is None: created_at = datetime.utcnow() + if sent_at is None: + sent_at = datetime.utcnow() + if notification_type is None: notification_type = sample_template.template_type + if not api_key: + api_key = create_api_key(sample_template.service, key_type=key_type) + notification_history = NotificationHistory( id=uuid.uuid4(), service=sample_template.service, @@ -647,7 +662,10 @@ def sample_notification_history( status=status, created_at=created_at, notification_type=notification_type, - key_type=key_type + key_type=key_type, + api_key=api_key, + api_key_id=api_key and api_key.id, + sent_at=sent_at ) notify_db.session.add(notification_history) notify_db.session.commit() diff --git a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py index 38d7096fe..dc11ac8e2 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py +++ b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py @@ -6,13 +6,20 @@ from app.dao.notifications_dao import dao_get_total_notifications_sent_per_day_f from app.models import KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST from tests.app.db import create_notification +from tests.app.conftest import ( + sample_notification_history, + sample_service, + sample_template +) def test_get_total_notifications_filters_on_date(sample_template): create_notification(sample_template, created_at=datetime(2016, 10, 17, 10, 0)) create_notification(sample_template, created_at=datetime(2016, 10, 18, 10, 0)) create_notification(sample_template, created_at=datetime(2016, 10, 19, 10, 0)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 1 @@ -21,7 +28,9 @@ def test_get_total_notifications_filters_on_date_at_midnight(sample_template): create_notification(sample_template, created_at=datetime(2016, 10, 18, 0, 0)) create_notification(sample_template, created_at=datetime(2016, 10, 18, 23, 59, 59)) create_notification(sample_template, created_at=datetime(2016, 10, 19, 0, 0)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 2 @@ -32,7 +41,9 @@ def test_get_total_notifications_only_counts_api_notifications(sample_template, create_notification(sample_template, job=sample_job) create_notification(sample_template, job=sample_job) create_notification(sample_template, api_key=sample_api_key) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 1 @@ -45,7 +56,9 @@ def test_get_total_notifications_ignores_test_keys(sample_template): create_notification(sample_template, key_type=KEY_TYPE_TEAM) create_notification(sample_template, key_type=KEY_TYPE_TEAM) create_notification(sample_template, key_type=KEY_TYPE_TEST) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 4 @@ -62,7 +75,9 @@ def test_get_total_notifications_ignores_letters( create_notification(sample_email_template) create_notification(sample_email_template) create_notification(sample_letter_template) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 4 @@ -75,6 +90,7 @@ def test_get_total_notifications_counts_messages_within_10_seconds(sample_templa create_notification(sample_template, sent_at=created_at + timedelta(seconds=15)) result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 3 assert result.messages_within_10_secs == 2 @@ -82,7 +98,9 @@ def test_get_total_notifications_counts_messages_within_10_seconds(sample_templa @freeze_time('2016-10-18T10:00') def test_get_total_notifications_counts_messages_that_have_not_sent(sample_template): create_notification(sample_template, status='created', sent_at=None) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 1 assert result.messages_within_10_secs == 0 @@ -90,5 +108,35 @@ def test_get_total_notifications_counts_messages_that_have_not_sent(sample_templ @freeze_time('2016-10-18T10:00') def test_get_total_notifications_returns_zero_if_no_data(notify_db_session): result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + assert result.messages_total == 0 assert result.messages_within_10_secs == 0 + + +@freeze_time('2016-10-18T10:00') +def test_get_total_notifications_counts_ignores_research_mode(notify_db, notify_db_session): + created_at = datetime.utcnow() + service = sample_service(notify_db, notify_db_session, research_mode=True) + template = sample_template(notify_db, notify_db_session, service=service) + + create_notification(template, status='created', sent_at=None) + + sample_notification_history( + notify_db, + notify_db_session, + template, + notification_type='email', + sent_at=created_at + timedelta(seconds=5) + ) + sample_notification_history( + notify_db, + notify_db_session, + template, + notification_type='sms', + sent_at=created_at + timedelta(seconds=5) + ) + + result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + + assert result.messages_total == 2 + assert result.messages_within_10_secs == 2 From 96443f5d6e11a8df7e17da86acbb335cd964c720 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Thu, 31 Aug 2017 12:50:11 +0100 Subject: [PATCH 24/42] Combine test to query notifications within date range instead --- ...t_notification_dao_performance_platform.py | 33 ++++++++----------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py index dc11ac8e2..125223e2a 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py +++ b/tests/app/dao/notification_dao/test_notification_dao_performance_platform.py @@ -12,24 +12,17 @@ from tests.app.conftest import ( sample_template ) - -def test_get_total_notifications_filters_on_date(sample_template): - create_notification(sample_template, created_at=datetime(2016, 10, 17, 10, 0)) - create_notification(sample_template, created_at=datetime(2016, 10, 18, 10, 0)) - create_notification(sample_template, created_at=datetime(2016, 10, 19, 10, 0)) - - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) - - assert result.messages_total == 1 +BEGINNING_OF_DAY = date(2016, 10, 18) +END_OF_DAY = date(2016, 10, 19) -def test_get_total_notifications_filters_on_date_at_midnight(sample_template): +def test_get_total_notifications_filters_on_date_within_date_range(sample_template): create_notification(sample_template, created_at=datetime(2016, 10, 17, 23, 59, 59)) - create_notification(sample_template, created_at=datetime(2016, 10, 18, 0, 0)) + create_notification(sample_template, created_at=BEGINNING_OF_DAY) create_notification(sample_template, created_at=datetime(2016, 10, 18, 23, 59, 59)) - create_notification(sample_template, created_at=datetime(2016, 10, 19, 0, 0)) + create_notification(sample_template, created_at=END_OF_DAY) - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(BEGINNING_OF_DAY, END_OF_DAY) assert result.messages_total == 2 @@ -42,7 +35,7 @@ def test_get_total_notifications_only_counts_api_notifications(sample_template, create_notification(sample_template, job=sample_job) create_notification(sample_template, api_key=sample_api_key) - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(BEGINNING_OF_DAY, END_OF_DAY) assert result.messages_total == 1 @@ -57,7 +50,7 @@ def test_get_total_notifications_ignores_test_keys(sample_template): create_notification(sample_template, key_type=KEY_TYPE_TEAM) create_notification(sample_template, key_type=KEY_TYPE_TEST) - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(BEGINNING_OF_DAY, END_OF_DAY) assert result.messages_total == 4 @@ -76,7 +69,7 @@ def test_get_total_notifications_ignores_letters( create_notification(sample_email_template) create_notification(sample_letter_template) - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(BEGINNING_OF_DAY, END_OF_DAY) assert result.messages_total == 4 @@ -89,7 +82,7 @@ def test_get_total_notifications_counts_messages_within_10_seconds(sample_templa create_notification(sample_template, sent_at=created_at + timedelta(seconds=10)) create_notification(sample_template, sent_at=created_at + timedelta(seconds=15)) - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(BEGINNING_OF_DAY, END_OF_DAY) assert result.messages_total == 3 assert result.messages_within_10_secs == 2 @@ -99,7 +92,7 @@ def test_get_total_notifications_counts_messages_within_10_seconds(sample_templa def test_get_total_notifications_counts_messages_that_have_not_sent(sample_template): create_notification(sample_template, status='created', sent_at=None) - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(BEGINNING_OF_DAY, END_OF_DAY) assert result.messages_total == 1 assert result.messages_within_10_secs == 0 @@ -107,7 +100,7 @@ def test_get_total_notifications_counts_messages_that_have_not_sent(sample_templ @freeze_time('2016-10-18T10:00') def test_get_total_notifications_returns_zero_if_no_data(notify_db_session): - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(BEGINNING_OF_DAY, END_OF_DAY) assert result.messages_total == 0 assert result.messages_within_10_secs == 0 @@ -136,7 +129,7 @@ def test_get_total_notifications_counts_ignores_research_mode(notify_db, notify_ sent_at=created_at + timedelta(seconds=5) ) - result = dao_get_total_notifications_sent_per_day_for_performance_platform(date(2016, 10, 18), date(2016, 10, 19)) + result = dao_get_total_notifications_sent_per_day_for_performance_platform(BEGINNING_OF_DAY, END_OF_DAY) assert result.messages_total == 2 assert result.messages_within_10_secs == 2 From 9fcd54c12bc3160ff4f92b365df32e2c7c1ba466 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 31 Aug 2017 14:28:44 +0100 Subject: [PATCH 25/42] unindent old fn --- app/commands.py | 60 ++++++++++++++++++++++++------------------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/app/commands.py b/app/commands.py index 051cd7fc8..fba155288 100644 --- a/app/commands.py +++ b/app/commands.py @@ -1,7 +1,7 @@ import uuid -from datetime import datetime +from datetime import datetime, timedelta from decimal import Decimal -from flask_script import Command, Manager, Option +from flask_script import Command, Option from app import db from app.dao.monthly_billing_dao import ( @@ -49,7 +49,7 @@ class PurgeFunctionalTestDataCommand(Command): Option('-u', '-user-email-prefix', dest='user_email_prefix', help="Functional test user email prefix."), ) - def run(self, service_name_prefix=None, user_email_prefix=None): + def run(self, user_email_prefix=None): if user_email_prefix: users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all() for usr in users: @@ -167,33 +167,33 @@ class CustomDbScript(Command): class PopulateMonthlyBilling(Command): - option_list = ( - Option('-y', '-year', dest="year", help="Use for integer value for year, e.g. 2017"), + option_list = ( + Option('-y', '-year', dest="year", help="Use for integer value for year, e.g. 2017"), + ) + + def run(self, year): + service_ids = get_service_ids_that_need_billing_populated( + start_date=datetime(2016, 5, 1), end_date=datetime(2017, 8, 16) ) + start, end = 1, 13 + if year == '2016': + start = 4 - def run(self, year): - service_ids = get_service_ids_that_need_billing_populated( - start_date=datetime(2016, 5, 1), end_date=datetime(2017, 8, 16) - ) - start, end = 1, 13 - if year == '2016': - start = 4 + for service_id in service_ids: + print('Starting to populate data for service {}'.format(str(service_id))) + print('Starting populating monthly billing for {}'.format(year)) + for i in range(start, end): + print('Population for {}-{}'.format(i, year)) + self.populate(service_id, year, i) - for service_id in service_ids: - print('Starting to populate data for service {}'.format(str(service_id))) - print('Starting populating monthly billing for {}'.format(year)) - for i in range(start, end): - print('Population for {}-{}'.format(i, year)) - self.populate(service_id, year, i) - - def populate(self, service_id, year, month): - create_or_update_monthly_billing(service_id, datetime(int(year), int(month), 1)) - sms_res = get_monthly_billing_by_notification_type( - service_id, datetime(int(year), int(month), 1), SMS_TYPE - ) - email_res = get_monthly_billing_by_notification_type( - service_id, datetime(int(year), int(month), 1), EMAIL_TYPE - ) - print("Finished populating data for {} for service id {}".format(month, str(service_id))) - print('SMS: {}'.format(sms_res.monthly_totals)) - print('Email: {}'.format(email_res.monthly_totals)) + def populate(self, service_id, year, month): + create_or_update_monthly_billing(service_id, datetime(int(year), int(month), 1)) + sms_res = get_monthly_billing_by_notification_type( + service_id, datetime(int(year), int(month), 1), SMS_TYPE + ) + email_res = get_monthly_billing_by_notification_type( + service_id, datetime(int(year), int(month), 1), EMAIL_TYPE + ) + print("Finished populating data for {} for service id {}".format(month, str(service_id))) + print('SMS: {}'.format(sms_res.monthly_totals)) + print('Email: {}'.format(email_res.monthly_totals)) From 378b131c599b3f756b5801125750c7cdabf2748a Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 31 Aug 2017 14:29:13 +0100 Subject: [PATCH 26/42] add batch task to backfill processing time data give it a start date and an end date, and it'll send data to the performance platform for all dates in that (inclusive) --- app/commands.py | 31 +++++++++++++++++++++ app/performance_platform/processing_time.py | 4 +++ tests/app/test_commands.py | 13 +++++++++ 3 files changed, 48 insertions(+) create mode 100644 tests/app/test_commands.py diff --git a/app/commands.py b/app/commands.py index fba155288..a27f08d1b 100644 --- a/app/commands.py +++ b/app/commands.py @@ -16,6 +16,8 @@ from app.dao.services_dao import ( ) from app.dao.provider_rates_dao import create_provider_rates from app.dao.users_dao import (delete_model_user, delete_user_verify_codes) +from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc +from app.performance_platform.processing_time import send_processing_time_for_start_and_end class CreateProviderRateCommand(Command): @@ -197,3 +199,32 @@ class PopulateMonthlyBilling(Command): print("Finished populating data for {} for service id {}".format(month, str(service_id))) print('SMS: {}'.format(sms_res.monthly_totals)) print('Email: {}'.format(email_res.monthly_totals)) + + +class BackfillProcessingTime(Command): + option_list = ( + Option('-s', '--start_date', dest='start_date', help="Date (%Y-%m-%d) start date inclusive"), + Option('-e', '--end_date', dest='end_date', help="Date (%Y-%m-%d) end date inclusive"), + ) + + def run(self, start_date, end_date): + start_date = datetime.strptime(start_date, '%Y-%m-%d') + end_date = datetime.strptime(end_date, '%Y-%m-%d') + + delta = end_date - start_date + + print('Sending notification processing-time data for all days between {} and {}'.format(start_date, end_date)) + + for i in range(delta.days + 1): + # because the tz conversion funcs talk about midnight, and the midnight before last, + # we want to pretend we're running this from the next morning, so add one. + process_date = start_date + timedelta(days=i + 1) + + process_start_date = get_midnight_for_day_before(process_date) + process_end_date = get_london_midnight_in_utc(process_date) + + print('Sending notification processing-time for {} - {}'.format( + process_start_date.isoformat(), + process_end_date.isoformat() + )) + send_processing_time_for_start_and_end(process_start_date, process_end_date) diff --git a/app/performance_platform/processing_time.py b/app/performance_platform/processing_time.py index 320c0d454..b901bde84 100644 --- a/app/performance_platform/processing_time.py +++ b/app/performance_platform/processing_time.py @@ -12,6 +12,10 @@ def send_processing_time_to_performance_platform(): start_date = get_midnight_for_day_before(today) end_date = get_london_midnight_in_utc(today) + send_processing_time_for_start_and_end(start_date, end_date) + + +def send_processing_time_for_start_and_end(start_date, end_date): result = dao_get_total_notifications_sent_per_day_for_performance_platform(start_date, end_date) current_app.logger.info( diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py new file mode 100644 index 000000000..e44655241 --- /dev/null +++ b/tests/app/test_commands.py @@ -0,0 +1,13 @@ +from datetime import datetime + +from app.commands import BackfillProcessingTime + +def test_backfill_processing_time_works_for_correct_dates(mocker): + send_mock = mocker.patch('app.commands.send_processing_time_for_start_and_end') + + BackfillProcessingTime().run('2017-08-01', '2017-08-03') + + assert send_mock.call_count == 3 + send_mock.assert_any_call(datetime(2017, 7, 31, 23, 0), datetime(2017, 8, 1, 23, 0)) + send_mock.assert_any_call(datetime(2017, 8, 1, 23, 0), datetime(2017, 8, 2, 23, 0)) + send_mock.assert_any_call(datetime(2017, 8, 2, 23, 0), datetime(2017, 8, 3, 23, 0)) From b2436b3e02e977aae4ad18833f6792dd19fe078b Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 31 Aug 2017 14:45:01 +0100 Subject: [PATCH 27/42] add command to application.py --- application.py | 1 + tests/app/test_commands.py | 1 + 2 files changed, 2 insertions(+) diff --git a/application.py b/application.py index 0cb5377d8..3127f52af 100644 --- a/application.py +++ b/application.py @@ -17,6 +17,7 @@ manager.add_command('create_provider_rate', commands.CreateProviderRateCommand) manager.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand) manager.add_command('custom_db_script', commands.CustomDbScript) manager.add_command('populate_monthly_billing', commands.PopulateMonthlyBilling) +manager.add_command('backfill_processing_time', commands.BackfillProcessingTime) @manager.command diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index e44655241..1c5ceedd9 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -2,6 +2,7 @@ from datetime import datetime from app.commands import BackfillProcessingTime + def test_backfill_processing_time_works_for_correct_dates(mocker): send_mock = mocker.patch('app.commands.send_processing_time_for_start_and_end') From 19f964a90baead41e461f8f878a1404c162896af Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 4 Sep 2017 17:24:41 +0100 Subject: [PATCH 28/42] Added a check that the call is not using a test api key. Removed the tests for trial mode service for the scheduled tasks and the process job. Having the validation in the POST notification and create job endpoint is enough. Updated the test_service_whitelist test because the order of the array is not gaurenteed. --- app/celery/scheduled_tasks.py | 19 +++----- app/celery/tasks.py | 11 +---- app/letters/rest.py | 7 +-- app/utils.py | 32 +------------ app/v2/notifications/post_notifications.py | 6 +-- tests/app/celery/test_scheduled_tasks.py | 45 +------------------ tests/app/celery/test_tasks.py | 20 --------- tests/app/service/test_service_whitelist.py | 7 ++- .../test_post_letter_notifications.py | 16 +++++++ 9 files changed, 34 insertions(+), 129 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 9c4dbb5c5..5614c361f 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -39,7 +39,7 @@ from app.notifications.process_notifications import send_notification_to_queue from app.statsd_decorators import statsd from app.celery.tasks import process_job from app.config import QueueNames, TaskNames -from app.utils import convert_utc_to_bst, get_unrestricted_letter_ids +from app.utils import convert_utc_to_bst @notify_celery.task(name="remove_csv_files") @@ -308,14 +308,9 @@ def populate_monthly_billing(): @statsd(namespace="tasks") def run_letter_jobs(): job_ids = dao_get_letter_job_ids_by_status(JOB_STATUS_READY_TO_SEND) - - unrestricted_job_ids = get_unrestricted_letter_ids(job_ids) - - if unrestricted_job_ids: - notify_celery.send_task( - name=TaskNames.DVLA_FILES, - args=(unrestricted_job_ids,), - queue=QueueNames.PROCESS_FTP - ) - current_app.logger.info( - "Queued {} ready letter job ids onto {}".format(len(unrestricted_job_ids), QueueNames.PROCESS_FTP)) + notify_celery.send_task( + name=TaskNames.DVLA_FILES, + args=(job_ids,), + queue=QueueNames.PROCESS_FTP + ) + current_app.logger.info("Queued {} ready letter job ids onto {}".format(len(job_ids), QueueNames.PROCESS_FTP)) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index a5e9f98d6..e621350ce 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -75,13 +75,6 @@ def process_job(job_id): db_template = dao_get_template_by_id(job.template_id, job.template_version) - if db_template.template_type == LETTER_TYPE and job.service.restricted: - job.job_status = JOB_STATUS_ERROR - dao_update_job(job) - current_app.logger.warn( - "Job {} has been set to error, service {} is in trial mode".format(job_id, service.id)) - return - TemplateClass = get_template_class(db_template.template_type) template = TemplateClass(db_template.__dict__) @@ -97,9 +90,7 @@ def process_job(job_id): update_job_to_sent_to_dvla.apply_async([str(job.id)], queue=QueueNames.RESEARCH_MODE) else: build_dvla_file.apply_async([str(job.id)], queue=QueueNames.JOBS) - # temporary logging - current_app.logger.info("send job {} to build-dvla-file in the {} queue".format( - job_id, QueueNames.JOBS)) + current_app.logger.info("send job {} to build-dvla-file in the {} queue".format(job_id, QueueNames.JOBS)) else: job.job_status = JOB_STATUS_FINISHED diff --git a/app/letters/rest.py b/app/letters/rest.py index 458611a80..b9ce7bf35 100644 --- a/app/letters/rest.py +++ b/app/letters/rest.py @@ -8,7 +8,6 @@ from app.schemas import job_schema from app.v2.errors import register_errors from app.letters.letter_schemas import letter_job_ids from app.schema_validation import validate -from app.utils import get_unrestricted_letter_ids letter_job = Blueprint("letter-job", __name__) register_errors(letter_job) @@ -17,11 +16,7 @@ register_errors(letter_job) @letter_job.route('/send-letter-jobs', methods=['POST']) def send_letter_jobs(): job_ids = validate(request.get_json(), letter_job_ids) - - unrestricted_job_ids = get_unrestricted_letter_ids(job_ids['job_ids']) - - notify_celery.send_task( - name=TaskNames.DVLA_FILES, args=(unrestricted_job_ids,), queue=QueueNames.PROCESS_FTP) + notify_celery.send_task(name=TaskNames.DVLA_FILES, args=(job_ids['job_ids'],), queue=QueueNames.PROCESS_FTP) return jsonify(data={"response": "Task created to send files to DVLA"}), 201 diff --git a/app/utils.py b/app/utils.py index 538673fb3..49f6d61dc 100644 --- a/app/utils.py +++ b/app/utils.py @@ -1,9 +1,9 @@ from datetime import datetime, timedelta import pytz -from flask import current_app, url_for +from flask import url_for from sqlalchemy import func -from notifications_utils.template import SMSMessageTemplate, PlainTextEmailTemplate, LetterPreviewTemplate +from notifications_utils.template import SMSMessageTemplate, PlainTextEmailTemplate local_timezone = pytz.timezone("Europe/London") @@ -86,31 +86,3 @@ def get_public_notify_type_text(notify_type, plural=False): notify_type_text = 'text message' return '{}{}'.format(notify_type_text, 's' if plural else '') - - -def get_unrestricted_letter_ids(job_ids): - from app.dao.jobs_dao import ( - dao_get_job_by_id, - dao_update_job - ) - - from app.models import (LETTER_TYPE, JOB_STATUS_ERROR) - from app.dao.templates_dao import dao_get_template_by_id - - unrestricted_job_ids = [] - - for job_id in job_ids: - job = dao_get_job_by_id(job_id) - - template = dao_get_template_by_id(job.template_id, job.template_version) - - if template.template_type == LETTER_TYPE: - if job.service.restricted: - job.job_status = JOB_STATUS_ERROR - dao_update_job(job) - current_app.logger.warn( - "Job {} has been set to error, service {} is in trial mode".format(job.id, job.service.id)) - else: - unrestricted_job_ids.append(job_id) - - return unrestricted_job_ids diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index c1bcd168e..db69a5758 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -150,18 +150,16 @@ def process_letter_notification(*, letter_data, api_key, template): if api_key.key_type == KEY_TYPE_TEAM: raise BadRequestError(message='Cannot send letters with a team api key', status_code=403) - if api_key.service.restricted: - raise BadRequestError(message='Cannot send letters when service is in trial mode', status_code=403) + if api_key.service.restricted and api_key.key_type != KEY_TYPE_TEST: + raise BadRequestError(message='Cannot send letters when service is in trial mode', status_code=403) job = create_letter_api_job(template) notification = create_letter_notification(letter_data, job, api_key) if api_key.service.research_mode or api_key.key_type == KEY_TYPE_TEST: - # distinguish real API jobs from test jobs by giving the test jobs a different filename job.original_file_name = LETTER_TEST_API_FILENAME dao_update_job(job) - update_job_to_sent_to_dvla.apply_async([str(job.id)], queue=QueueNames.RESEARCH_MODE) else: build_dvla_file.apply_async([str(job.id)], queue=QueueNames.JOBS) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index cfcd386dd..2308e7247 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -1,5 +1,3 @@ -import uuid - from datetime import datetime, timedelta from functools import partial from unittest.mock import call, patch, PropertyMock @@ -41,9 +39,9 @@ from app.dao.provider_details_dao import ( from app.models import ( Service, Template, SMS_TYPE, LETTER_TYPE, - JOB_STATUS_READY_TO_SEND, JOB_STATUS_ERROR, + JOB_STATUS_READY_TO_SEND, MonthlyBilling) -from app.utils import get_london_midnight_in_utc, convert_utc_to_bst +from app.utils import get_london_midnight_in_utc from tests.app.db import create_notification, create_service, create_template, create_job, create_rate from tests.app.conftest import ( sample_job as create_sample_job, @@ -694,42 +692,3 @@ def test_run_letter_jobs(client, mocker, sample_letter_template): mock_celery.assert_called_once_with(name=TaskNames.DVLA_FILES, args=(job_ids,), queue=QueueNames.PROCESS_FTP) - - -def test_run_letter_jobs_in_trial_sets_job_to_error(client, mocker, sample_letter_template): - sample_letter_template.service.restricted = True - job = create_job(sample_letter_template) - job_ids = [str(job.id)] - mocker.patch( - "app.celery.scheduled_tasks.dao_get_letter_job_ids_by_status", - return_value=job_ids - ) - mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") - - run_letter_jobs() - - assert not mock_celery.called - assert job.job_status == JOB_STATUS_ERROR - - -def test_run_letter_jobs_in_trial_sets_job_to_error_and_process_live_services( - client, mocker, sample_letter_template): - live_job = create_job(sample_letter_template) - - service = create_service(service_name="Sample service 2", restricted=True) - template = create_template(service, template_type=LETTER_TYPE) - trial_job = create_job(template) - - job_ids = [str(live_job.id), str(trial_job.id)] - mocker.patch( - "app.celery.scheduled_tasks.dao_get_letter_job_ids_by_status", - return_value=job_ids - ) - mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") - - run_letter_jobs() - - mock_celery.assert_called_once_with(name=TaskNames.DVLA_FILES, - args=([str(live_job.id)],), - queue=QueueNames.PROCESS_FTP) - assert trial_job.job_status == JOB_STATUS_ERROR diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 344ac00ab..feca346e1 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -1035,26 +1035,6 @@ def test_should_cancel_job_if_service_is_inactive(sample_service, mock_dvla_file_task.assert_not_called() -def test_should_error_job_if_service_is_restricted_and_letter_template_type( - sample_service, - sample_letter_job, - mocker -): - sample_service.restricted = True - - mocker.patch('app.celery.tasks.s3.get_job_from_s3') - mocker.patch('app.celery.tasks.process_row') - mock_dvla_file_task = mocker.patch('app.celery.tasks.build_dvla_file') - - process_job(sample_letter_job.id) - - job = jobs_dao.dao_get_job_by_id(sample_letter_job.id) - assert job.job_status == JOB_STATUS_ERROR - s3.get_job_from_s3.assert_not_called() - tasks.process_row.assert_not_called() - mock_dvla_file_task.assert_not_called() - - @pytest.mark.parametrize('template_type, expected_class', [ (SMS_TYPE, SMSMessageTemplate), (EMAIL_TYPE, WithSubjectTemplate), diff --git a/tests/app/service/test_service_whitelist.py b/tests/app/service/test_service_whitelist.py index 5edbe8e28..18c0595f8 100644 --- a/tests/app/service/test_service_whitelist.py +++ b/tests/app/service/test_service_whitelist.py @@ -30,10 +30,9 @@ def test_get_whitelist_separates_emails_and_phones(client, sample_service): response = client.get('service/{}/whitelist'.format(sample_service.id), headers=[create_authorization_header()]) assert response.status_code == 200 - assert json.loads(response.get_data(as_text=True)) == { - 'email_addresses': ['service@example.com'], - 'phone_numbers': ['+1800-555-555', '07123456789'] - } + json_resp = json.loads(response.get_data(as_text=True)) + assert json_resp['email_addresses'] == ['service@example.com'] + assert sorted(json_resp['phone_numbers']) == sorted(['+1800-555-555', '07123456789']) def test_get_whitelist_404s_with_unknown_service_id(client): diff --git a/tests/app/v2/notifications/test_post_letter_notifications.py b/tests/app/v2/notifications/test_post_letter_notifications.py index cb9d4bb29..aa4433a59 100644 --- a/tests/app/v2/notifications/test_post_letter_notifications.py +++ b/tests/app/v2/notifications/test_post_letter_notifications.py @@ -249,3 +249,19 @@ def test_post_letter_notification_doesnt_send_in_trial(client, sample_trial_lett assert error_json['status_code'] == 403 assert error_json['errors'] == [ {'error': 'BadRequestError', 'message': 'Cannot send letters when service is in trial mode'}] + + +def test_post_letter_notification_calls_update_job_sent_to_dvla_when_service_is_in_trial_mode_but_using_test_key( + client, sample_trial_letter_template, mocker): + build_dvla_task = mocker.patch('app.celery.tasks.build_dvla_file.apply_async') + update_job_task = mocker.patch('app.celery.tasks.update_job_to_sent_to_dvla.apply_async') + + data = { + "template_id": sample_trial_letter_template.id, + "personalisation": {'address_line_1': 'Foo', 'address_line_2': 'Bar', 'postcode': 'Baz'} + } + letter_request(client, data=data, service_id=sample_trial_letter_template.service_id, + key_type=KEY_TYPE_TEST) + job = Job.query.one() + update_job_task.assert_called_once_with([str(job.id)], queue='research-mode-tasks') + assert not build_dvla_task.called From dbeec6c88b28dcc4371bb1ffe29d59813c6d6894 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Tue, 5 Sep 2017 11:50:39 +0100 Subject: [PATCH 29/42] Remove unused fixtures from tests --- tests/app/celery/test_tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index feca346e1..a12e6c050 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -611,7 +611,7 @@ def test_should_put_send_email_task_in_research_mode_queue_if_research_mode_serv def test_should_not_build_dvla_file_in_research_mode_for_letter_job( - notify_db, notify_db_session, mocker, sample_service, sample_letter_job, fake_uuid + mocker, sample_service, sample_letter_job, fake_uuid ): test_encrypted_data = 'some encrypted data' sample_service.research_mode = True @@ -633,7 +633,7 @@ def test_should_not_build_dvla_file_in_research_mode_for_letter_job( @freeze_time("2017-08-29 17:30:00") def test_should_update_job_to_sent_to_dvla_in_research_mode_for_letter_job( - notify_db, notify_db_session, mocker, sample_service, sample_letter_job, fake_uuid + mocker, sample_service, sample_letter_job, fake_uuid ): test_encrypted_data = 'some encrypted data' sample_service.research_mode = True From 33becb9b721c4a74809a6725b29fa202ca9b8150 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 5 Sep 2017 17:38:17 +0100 Subject: [PATCH 30/42] Update sqlalchemy from 1.1.13 to 1.1.14 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fa34786d4..3a1073f1c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,7 @@ psycopg2==2.7.3.1 PyJWT==1.5.2 six==1.10.0 SQLAlchemy-Utils==0.32.14 -SQLAlchemy==1.1.13 +SQLAlchemy==1.1.14 statsd==3.2.1 notifications-python-client==4.3.1 From 86929fd6b69e1103031eaa72dfe603f10429a66a Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 5 Sep 2017 17:53:47 +0100 Subject: [PATCH 31/42] Create a table for service_sms_senders. Migration from inbound_number to service_sms_senders. May need to pull out the migration into another PR. --- app/models.py | 15 +++++ .../versions/0118_service_sms_senders.py | 39 +++++++++++++ .../0119_insert_service_sms_senders.py | 55 +++++++++++++++++++ 3 files changed, 109 insertions(+) create mode 100644 migrations/versions/0118_service_sms_senders.py create mode 100644 migrations/versions/0119_insert_service_sms_senders.py diff --git a/app/models.py b/app/models.py index 89e4d56b8..304a4b4e1 100644 --- a/app/models.py +++ b/app/models.py @@ -278,6 +278,21 @@ class InboundNumber(db.Model): } +class ServiceSmsSender(db.Model): + __tablename__ = "service_sms_senders" + + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + sms_sender = db.Column(db.String(11), nullable=False) + service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'), unique=True, index=True, nullable=False) + service = db.relationship(Service, backref=db.backref("service_sms_senders", uselist=False)) + is_default = db.Column(db.Boolean, nullable=False, default=True) + inbound_number_id = db.Column(UUID(as_uuid=True), db.ForeignKey('inbound_numbers.id'), + unique=True, index=True, nullable=True) + inbound_number = db.relationship(InboundNumber, backref=db.backref("inbound_number", uselist=False)) + created_at = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False) + updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) + + class ServicePermission(db.Model): __tablename__ = "service_permissions" diff --git a/migrations/versions/0118_service_sms_senders.py b/migrations/versions/0118_service_sms_senders.py new file mode 100644 index 000000000..db0c1724f --- /dev/null +++ b/migrations/versions/0118_service_sms_senders.py @@ -0,0 +1,39 @@ +"""empty message + +Revision ID: 0118_service_sms_senders +Revises: 0117_international_sms_notify +Create Date: 2017-09-05 17:29:38.921045 + +""" + +# revision identifiers, used by Alembic. +revision = '0118_service_sms_senders' +down_revision = '0117_international_sms_notify' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +def upgrade(): + op.create_table('service_sms_senders', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('sms_sender', sa.String(length=11), nullable=False), + sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('is_default', sa.Boolean(), nullable=False), + sa.Column('inbound_number_id', postgresql.UUID(as_uuid=True), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['inbound_number_id'], ['inbound_numbers.id'], ), + sa.ForeignKeyConstraint(['service_id'], ['services.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_service_sms_senders_inbound_number_id'), 'service_sms_senders', ['inbound_number_id'], + unique=True) + op.create_index(op.f('ix_service_sms_senders_service_id'), 'service_sms_senders', ['service_id'], unique=True) + + +def downgrade(): + op.drop_index(op.f('ix_service_sms_senders_service_id'), table_name='service_sms_senders') + op.drop_index(op.f('ix_service_sms_senders_inbound_number_id'), table_name='service_sms_senders') + op.drop_table('service_sms_senders') diff --git a/migrations/versions/0119_insert_service_sms_senders.py b/migrations/versions/0119_insert_service_sms_senders.py new file mode 100644 index 000000000..d7de69ac3 --- /dev/null +++ b/migrations/versions/0119_insert_service_sms_senders.py @@ -0,0 +1,55 @@ +"""empty message + +Revision ID: 0119_insert_service_sms_senders +Revises: 0118_service_sms_senders +Create Date: 2017-09-05 17:21:14.816199 + +""" +import uuid + +from alembic import op + +revision = '0119_insert_service_sms_senders' +down_revision = '0118_service_sms_senders' + + +def upgrade(): + query = """SELECT id, number, service_id + FROM inbound_numbers + WHERE service_id is not null""" + + conn = op.get_bind() + results = conn.execute(query) + res = results.fetchall() + + for x in res: + op.execute( + """ + INSERT INTO service_sms_senders + ( + id, + sms_sender, + service_id, + inbound_number_id, + is_default, + created_at + ) + VALUES + ( + '{}', + '{}', + '{}', + '{}', + True, + now() + ) + """.format(uuid.uuid4(), x.number, x.service_id, x.id) + ) + + +def downgrade(): + op.execute( + """ + DELETE FROM service_sms_senders + """ + ) From b0c8f13199507a039b4b28cfa5c2ad09c0712c00 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Wed, 6 Sep 2017 11:17:12 +0100 Subject: [PATCH 32/42] Update notifications-python-client from 4.3.1 to 4.4.0 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fa34786d4..a69576ab2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,7 @@ SQLAlchemy-Utils==0.32.14 SQLAlchemy==1.1.13 statsd==3.2.1 -notifications-python-client==4.3.1 +notifications-python-client==4.4.0 # PaaS awscli>=1.11,<1.12 From 3b115cc79f96539aacc454a2d38f0db4e08d0c2f Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Wed, 6 Sep 2017 15:33:09 +0100 Subject: [PATCH 33/42] Remove the or current_app['FROM_NUMBER'] since sms_sender is a required field. --- app/models.py | 2 +- .../0119_insert_service_sms_senders.py | 55 ------------------- 2 files changed, 1 insertion(+), 56 deletions(-) delete mode 100644 migrations/versions/0119_insert_service_sms_senders.py diff --git a/app/models.py b/app/models.py index 304a4b4e1..cdefb2171 100644 --- a/app/models.py +++ b/app/models.py @@ -245,7 +245,7 @@ class Service(db.Model, Versioned): if self.inbound_number and self.inbound_number.active: return self.inbound_number.number else: - return self.sms_sender or current_app.config['FROM_NUMBER'] + return self.sms_sender class InboundNumber(db.Model): diff --git a/migrations/versions/0119_insert_service_sms_senders.py b/migrations/versions/0119_insert_service_sms_senders.py deleted file mode 100644 index d7de69ac3..000000000 --- a/migrations/versions/0119_insert_service_sms_senders.py +++ /dev/null @@ -1,55 +0,0 @@ -"""empty message - -Revision ID: 0119_insert_service_sms_senders -Revises: 0118_service_sms_senders -Create Date: 2017-09-05 17:21:14.816199 - -""" -import uuid - -from alembic import op - -revision = '0119_insert_service_sms_senders' -down_revision = '0118_service_sms_senders' - - -def upgrade(): - query = """SELECT id, number, service_id - FROM inbound_numbers - WHERE service_id is not null""" - - conn = op.get_bind() - results = conn.execute(query) - res = results.fetchall() - - for x in res: - op.execute( - """ - INSERT INTO service_sms_senders - ( - id, - sms_sender, - service_id, - inbound_number_id, - is_default, - created_at - ) - VALUES - ( - '{}', - '{}', - '{}', - '{}', - True, - now() - ) - """.format(uuid.uuid4(), x.number, x.service_id, x.id) - ) - - -def downgrade(): - op.execute( - """ - DELETE FROM service_sms_senders - """ - ) From b85ff4e6a68dd7dadf4d93b358e44c8a7404f989 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Thu, 7 Sep 2017 15:41:04 +0100 Subject: [PATCH 34/42] Update the alembic generator template to be PEP friendly --- migrations/script.py.mako | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/migrations/script.py.mako b/migrations/script.py.mako index 95702017e..e17366786 100644 --- a/migrations/script.py.mako +++ b/migrations/script.py.mako @@ -1,19 +1,18 @@ -"""${message} +""" Revision ID: ${up_revision} Revises: ${down_revision} Create Date: ${create_date} """ - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - from alembic import op import sqlalchemy as sa ${imports if imports else ""} +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} + + def upgrade(): ${upgrades if upgrades else "pass"} From d997eb3af9408d19b9ec6e691899bfc791f1e233 Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Thu, 7 Sep 2017 15:41:23 +0100 Subject: [PATCH 35/42] Create ServiceEmailReplyTo table --- app/models.py | 14 ++++++++ .../versions/0119_add_email_reply_to.py | 34 +++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 migrations/versions/0119_add_email_reply_to.py diff --git a/app/models.py b/app/models.py index cdefb2171..588c95d48 100644 --- a/app/models.py +++ b/app/models.py @@ -1329,3 +1329,17 @@ class MonthlyBilling(db.Model): def __repr__(self): return str(self.serialized()) + + +class ServiceEmailReplyTo(db.Model): + __tablename__ = "service_email_reply_to" + + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + + service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'), unique=False, index=True, nullable=False) + service = db.relationship(Service, backref=db.backref("reply_to_email_addresses", uselist=False)) + + email_address = db.Column(db.Text, nullable=False, index=False, unique=False) + is_default = db.Column(db.Boolean, nullable=False, default=True) + created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) + updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) diff --git a/migrations/versions/0119_add_email_reply_to.py b/migrations/versions/0119_add_email_reply_to.py new file mode 100644 index 000000000..1b5ba0a52 --- /dev/null +++ b/migrations/versions/0119_add_email_reply_to.py @@ -0,0 +1,34 @@ +""" + +Revision ID: 0119_add_email_reply_to +Revises: 0118_service_sms_senders +Create Date: 2017-09-07 15:29:49.087143 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0119_add_email_reply_to' +down_revision = '0118_service_sms_senders' + + +def upgrade(): + op.create_table('service_email_reply_to', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('email_address', sa.Text(), nullable=False), + sa.Column('is_default', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['service_id'], ['services.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index( + op.f('ix_service_email_reply_to_service_id'), 'service_email_reply_to', ['service_id'], unique=False + ) + + +def downgrade(): + op.drop_index(op.f('ix_service_email_reply_to_service_id'), table_name='service_email_reply_to') + op.drop_table('service_email_reply_to') From 772c03e2e5b6694ad7017368bd89bac0bc314f0f Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Thu, 7 Sep 2017 18:58:14 +0100 Subject: [PATCH 36/42] Update pytest from 3.2.1 to 3.2.2 --- requirements_for_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_for_test.txt b/requirements_for_test.txt index b6567ea5b..619e882c0 100644 --- a/requirements_for_test.txt +++ b/requirements_for_test.txt @@ -1,6 +1,6 @@ -r requirements.txt pycodestyle==2.3.1 -pytest==3.2.1 +pytest==3.2.2 pytest-mock==1.6.2 pytest-cov==2.5.1 pytest-xdist==1.20.0 From 154c8a098e54efb69dab3d66d0166de69b39ddae Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Thu, 7 Sep 2017 20:18:15 +0100 Subject: [PATCH 37/42] Update moto from 1.1.1 to 1.1.2 --- requirements_for_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_for_test.txt b/requirements_for_test.txt index b6567ea5b..29f89c6e6 100644 --- a/requirements_for_test.txt +++ b/requirements_for_test.txt @@ -5,7 +5,7 @@ pytest-mock==1.6.2 pytest-cov==2.5.1 pytest-xdist==1.20.0 coveralls==1.2.0 -moto==1.1.1 +moto==1.1.2 freezegun==0.3.9 requests-mock==1.3.0 strict-rfc3339==0.7 From 4f57f2c5a54dec0d906a5cd326d835a4c1a92141 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Fri, 8 Sep 2017 09:56:46 +0100 Subject: [PATCH 38/42] Change test to use json loads so that the test will pass consistently --- tests/app/test_cloudfoundry_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/app/test_cloudfoundry_config.py b/tests/app/test_cloudfoundry_config.py index 72b2f2865..8f90df90d 100644 --- a/tests/app/test_cloudfoundry_config.py +++ b/tests/app/test_cloudfoundry_config.py @@ -213,7 +213,7 @@ def test_sms_inbound_config(): def test_performance_platform_config(): extract_cloudfoundry_config() - assert os.environ['PERFORMANCE_PLATFORM_ENDPOINTS'] == json.dumps({ + assert json.loads(os.environ['PERFORMANCE_PLATFORM_ENDPOINTS']) == { 'foo': 'my_token', 'bar': 'other_token' - }) + } From 01eef6c7f53489ffca7ee605549c701b04b395fc Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Fri, 8 Sep 2017 11:14:26 +0100 Subject: [PATCH 39/42] Add service_email_reply_to DAO with an upsert method --- app/dao/service_email_reply_to_dao.py | 32 +++++++++++++++ .../dao/test_service_email_reply_to_dao.py | 41 +++++++++++++++++++ tests/app/db.py | 20 ++++++++- 3 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 app/dao/service_email_reply_to_dao.py create mode 100644 tests/app/dao/test_service_email_reply_to_dao.py diff --git a/app/dao/service_email_reply_to_dao.py b/app/dao/service_email_reply_to_dao.py new file mode 100644 index 000000000..5aaf9ee81 --- /dev/null +++ b/app/dao/service_email_reply_to_dao.py @@ -0,0 +1,32 @@ +from app import db +from app.dao.dao_utils import transactional +from app.models import ServiceEmailReplyTo + + +def create_or_update_email_reply_to(service_id, email_address): + reply_to = dao_get_reply_to_by_service_id(service_id) + if reply_to: + reply_to.email_address = email_address + dao_update_reply_to_email(reply_to) + else: + reply_to = ServiceEmailReplyTo(service_id=service_id, email_address=email_address) + dao_create_reply_to_email_address(reply_to) + + +@transactional +def dao_create_reply_to_email_address(reply_to_email): + db.session.add(reply_to_email) + + +def dao_get_reply_to_by_service_id(service_id): + reply_to = db.session.query( + ServiceEmailReplyTo + ).filter( + ServiceEmailReplyTo.service_id == service_id + ).first() + return reply_to + + +@transactional +def dao_update_reply_to_email(reply_to): + db.session.add(reply_to) diff --git a/tests/app/dao/test_service_email_reply_to_dao.py b/tests/app/dao/test_service_email_reply_to_dao.py new file mode 100644 index 000000000..e1de4ff7e --- /dev/null +++ b/tests/app/dao/test_service_email_reply_to_dao.py @@ -0,0 +1,41 @@ +from app.dao.service_email_reply_to_dao import ( + create_or_update_email_reply_to, + dao_get_reply_to_by_service_id +) +from app.models import ServiceEmailReplyTo +from tests.app.db import create_reply_to_email, create_service + + +def test_create_or_update_email_reply_to_does_not_create_another_entry(notify_db_session): + service = create_service() + create_reply_to_email(service, 'test@mail.com') + + create_or_update_email_reply_to(service.id, 'different@mail.com') + + reply_to = dao_get_reply_to_by_service_id(service.id) + + assert ServiceEmailReplyTo.query.count() == 1 + + +def test_create_or_update_email_reply_to_updates_existing_entry(notify_db_session): + service = create_service() + create_reply_to_email(service, 'test@mail.com') + + create_or_update_email_reply_to(service.id, 'different@mail.com') + + reply_to = dao_get_reply_to_by_service_id(service.id) + + assert reply_to.service.id == service.id + assert reply_to.email_address == 'different@mail.com' + + +def test_create_or_update_email_reply_to_creates_new_entry(notify_db_session): + service = create_service() + + create_or_update_email_reply_to(service.id, 'test@mail.com') + + reply_to = dao_get_reply_to_by_service_id(service.id) + + assert ServiceEmailReplyTo.query.count() == 1 + assert reply_to.service.id == service.id + assert reply_to.email_address == 'test@mail.com' diff --git a/tests/app/db.py b/tests/app/db.py index e61270b81..a1323be7d 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -25,7 +25,9 @@ from app.models import ( SMS_TYPE, INBOUND_SMS_TYPE, KEY_TYPE_NORMAL, - ServiceInboundApi) + ServiceInboundApi, + ServiceEmailReplyTo +) from app.dao.users_dao import save_model_user from app.dao.notifications_dao import dao_create_notification, dao_created_scheduled_notification from app.dao.templates_dao import dao_create_template @@ -327,3 +329,19 @@ def create_monthly_billing_entry( db.session.commit() return entry + + +def create_reply_to_email( + service, + email_address +): + data = { + 'service': service, + 'email_address': email_address, + } + reply_to = ServiceEmailReplyTo(**data) + + db.session.add(reply_to) + db.session.commit() + + return reply_to From 6d0ad75a60b45bae2c1519f8801f495080f7786d Mon Sep 17 00:00:00 2001 From: Imdad Ahad Date: Fri, 8 Sep 2017 12:09:45 +0100 Subject: [PATCH 40/42] Upsert to new email reply to table when updating a service --- app/service/rest.py | 10 +++++++--- tests/app/service/test_rest.py | 15 +++++++++++++++ 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index ca818aa46..98058774f 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -1,5 +1,4 @@ import itertools -import json from datetime import datetime from flask import ( @@ -11,7 +10,7 @@ from flask import ( from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm.exc import NoResultFound -from app.dao import notification_usage_dao, notifications_dao +from app.dao import notifications_dao from app.dao.dao_utils import dao_rollback from app.dao.api_key_dao import ( save_model_api_key, @@ -46,6 +45,7 @@ from app.dao.service_whitelist_dao import ( dao_add_and_commit_whitelisted_contacts, dao_remove_service_whitelist ) +from app.dao.service_email_reply_to_dao import create_or_update_email_reply_to from app.dao.provider_statistics_dao import get_fragment_count from app.dao.users_dao import get_user_by_id from app.errors import ( @@ -132,9 +132,13 @@ def create_service(): @service_blueprint.route('/', methods=['POST']) def update_service(service_id): + req_json = request.get_json() fetched_service = dao_fetch_service_by_id(service_id) # Capture the status change here as Marshmallow changes this later - service_going_live = fetched_service.restricted and not request.get_json().get('restricted', True) + service_going_live = fetched_service.restricted and not req_json.get('restricted', True) + + if 'reply_to_email_address' in req_json: + create_or_update_email_reply_to(fetched_service.id, req_json['reply_to_email_address']) current_data = dict(service_schema.dump(fetched_service).data.items()) current_data.update(request.get_json()) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index eaf34219a..0d8e3fea2 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -2123,3 +2123,18 @@ def test_is_service_name_unique_returns_400_when_name_does_not_exist(client): json_resp = json.loads(response.get_data(as_text=True)) assert json_resp["message"][0]["name"] == ["Can't be empty"] assert json_resp["message"][1]["email_from"] == ["Can't be empty"] + + +def test_update_service_reply_to_email_address_upserts_email_reply_to(mocker, admin_request, sample_service): + update_mock = mocker.patch('app.service.rest.create_or_update_email_reply_to') + + admin_request.post( + 'service.update_service', + service_id=sample_service.id, + _data={ + 'reply_to_email_address': 'new@mail.com' + }, + _expected_status=200 + ) + + assert update_mock.called From 2492061c63d1192f53068c313e0d8efeee196ee7 Mon Sep 17 00:00:00 2001 From: Athanasios Voutsadakis Date: Fri, 8 Sep 2017 16:12:47 +0100 Subject: [PATCH 41/42] Bump notifications-utils version to 21.0.0 This removes the PID from the log files --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a1ef0f9eb..05ac008e9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,6 +29,6 @@ notifications-python-client==4.4.0 awscli>=1.11,<1.12 awscli-cwlogs>=1.4,<1.5 -git+https://github.com/alphagov/notifications-utils.git@20.0.0#egg=notifications-utils==20.0.0 +git+https://github.com/alphagov/notifications-utils.git@21.0.0#egg=notifications-utils==21.0.0 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 From 4b88ec639f692cf11c2696ac7e679e4c72f57d4b Mon Sep 17 00:00:00 2001 From: Athanasios Voutsadakis Date: Mon, 11 Sep 2017 11:11:06 +0100 Subject: [PATCH 42/42] Remove * from logs matching pattern Now we are only going to have one log file that we're interested in --- scripts/run_app_paas.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/run_app_paas.sh b/scripts/run_app_paas.sh index b7bed184f..90205fd73 100755 --- a/scripts/run_app_paas.sh +++ b/scripts/run_app_paas.sh @@ -26,7 +26,7 @@ function configure_aws_logs { state_file = /home/vcap/logs/awslogs-state [/home/vcap/logs/app.log] -file = /home/vcap/logs/app.log*.json +file = /home/vcap/logs/app.log.json log_group_name = paas-${CW_APP_NAME}-application log_stream_name = {hostname} EOF