From a2aa9c7e325ae170a1986597835632c2ce7564b7 Mon Sep 17 00:00:00 2001 From: Athanasios Voutsadakis Date: Thu, 23 Nov 2017 14:35:36 +0000 Subject: [PATCH 01/15] Bump staging rate limit in preparation for Load Tests --- app/config.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/app/config.py b/app/config.py index bac6db38f..3199fbe93 100644 --- a/app/config.py +++ b/app/config.py @@ -389,6 +389,21 @@ class Staging(Config): API_RATE_LIMIT_ENABLED = True CHECK_PROXY_HEADER = True + API_KEY_LIMITS = { + KEY_TYPE_TEAM: { + "limit": 21000, + "interval": 60 + }, + KEY_TYPE_NORMAL: { + "limit": 21000, + "interval": 60 + }, + KEY_TYPE_TEST: { + "limit": 21000, + "interval": 60 + } + } + class Live(Config): NOTIFY_EMAIL_DOMAIN = 'notifications.service.gov.uk' From b2ba2afbcd896d35bdcd5078067f69f228f96594 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Thu, 23 Nov 2017 11:37:35 +0000 Subject: [PATCH 02/15] List templates in alphabetical order MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Currently templates are ordered by the newest created first. This made sense when, after creating a new template, you were landed on the page that listed all the templates. In other words, you needed to see confirmation of the thing that you’ve just done. Now (since https://github.com/alphagov/notifications-admin/pull/1195) you get landed on the page for just that template. So you always see the template you’ve just created, no matter how the list of templates is ordered. So we are free to change the order of the templates. Ordering by time created is not great, because it gives users no control over which templates appear first. For example, our research reported this from one team: > One frustration they have is that when you add a new template it > automatically goes to the top of the list. To get round this, whenever > they add a new template they delete all of the existing ones and start > again because they want to keep their templates in numerical order. > They'd like to be able to control the order of templates in the list. We _could_ give people some sort of drag-and-drop template ordering thing. But this feels like overkill. I think that alphabetical order is better because: - it’s easily discoverable – anyone who wants to know how a list is ordered can quickly tell just by looking at it - it’s universal – everyone knows how alphabetical ordering works - it’s familiar – this is how people documents on their computer are ordered; there’s no new UI to learn - it’s what users are doing already – from the same service as above: > They number their templates 1,2a, 2b, 3a etc So this commit changes the ordering from newest created first to alphabetical. Previous changes to template order and navigation: - https://github.com/alphagov/notifications-admin/pull/1163 - https://github.com/alphagov/notifications-admin/pull/1195 - https://github.com/alphagov/notifications-admin/pull/1330 Implementation notes --- I refactored some of the tests here. I still don’t think they’re great tests, but they’re a little more Pythonic now at least. I also added a sort by template type, so that the order is deterministic when you have, for example, an email template and a text message template with the same name. If you have two text message templates with the same name you’re on your own. --- app/dao/templates_dao.py | 8 ++- tests/app/dao/test_templates_dao.py | 12 ++-- tests/app/db.py | 3 +- tests/app/template/test_rest.py | 4 +- tests/app/v2/templates/test_get_templates.py | 62 +++++++++++--------- 5 files changed, 48 insertions(+), 41 deletions(-) diff --git a/app/dao/templates_dao.py b/app/dao/templates_dao.py index 6cae74c0e..5343f511c 100644 --- a/app/dao/templates_dao.py +++ b/app/dao/templates_dao.py @@ -1,7 +1,7 @@ from datetime import datetime import uuid -from sqlalchemy import desc +from sqlalchemy import asc, desc from sqlalchemy.sql.expression import bindparam from app import db @@ -65,14 +65,16 @@ def dao_get_all_templates_for_service(service_id, template_type=None): template_type=template_type, archived=False ).order_by( - desc(Template.created_at) + asc(Template.name), + asc(Template.template_type), ).all() return Template.query.filter_by( service_id=service_id, archived=False ).order_by( - desc(Template.created_at) + asc(Template.name), + asc(Template.template_type), ).all() diff --git a/tests/app/dao/test_templates_dao.py b/tests/app/dao/test_templates_dao.py index 607330097..62f5c8a8f 100644 --- a/tests/app/dao/test_templates_dao.py +++ b/tests/app/dao/test_templates_dao.py @@ -163,7 +163,7 @@ def test_get_all_templates_for_service(notify_db, notify_db_session, service_fac assert len(dao_get_all_templates_for_service(service_2.id)) == 2 -def test_get_all_templates_for_service_shows_newest_created_first(notify_db, notify_db_session, sample_service): +def test_get_all_templates_for_service_is_alphabetised(notify_db, notify_db_session, sample_service): template_1 = create_sample_template( notify_db, notify_db_session, @@ -190,14 +190,14 @@ def test_get_all_templates_for_service_shows_newest_created_first(notify_db, not ) assert Template.query.count() == 3 - assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'Sample Template 3' + assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'Sample Template 1' assert dao_get_all_templates_for_service(sample_service.id)[1].name == 'Sample Template 2' - assert dao_get_all_templates_for_service(sample_service.id)[2].name == 'Sample Template 1' + assert dao_get_all_templates_for_service(sample_service.id)[2].name == 'Sample Template 3' - template_2.name = 'Sample Template 2 (updated)' + template_2.name = 'AAAAA Sample Template 2' dao_update_template(template_2) - assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'Sample Template 3' - assert dao_get_all_templates_for_service(sample_service.id)[1].name == 'Sample Template 2 (updated)' + assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'AAAAA Sample Template 2' + assert dao_get_all_templates_for_service(sample_service.id)[1].name == 'Sample Template 1' def test_get_all_returns_empty_list_if_no_templates(sample_service): diff --git a/tests/app/db.py b/tests/app/db.py index 0457b3c0a..32ccc3d7a 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -120,12 +120,13 @@ def create_service_with_defined_sms_sender( def create_template( service, template_type=SMS_TYPE, + template_name=None, subject='Template subject', content='Dear Sir/Madam, Hello. Yours Truly, The Government.', template_id=None ): data = { - 'name': '{} Template Name'.format(template_type), + 'name': template_name or '{} Template Name'.format(template_type), 'template_type': template_type, 'content': content, 'service': service, diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py index da72c3dff..6d666bfa3 100644 --- a/tests/app/template/test_rest.py +++ b/tests/app/template/test_rest.py @@ -301,10 +301,10 @@ def test_should_be_able_to_get_all_templates_for_a_service(client, sample_user, assert response.status_code == 200 update_json_resp = json.loads(response.get_data(as_text=True)) - assert update_json_resp['data'][0]['name'] == 'my template 2' + assert update_json_resp['data'][0]['name'] == 'my template 1' assert update_json_resp['data'][0]['version'] == 1 assert update_json_resp['data'][0]['created_at'] - assert update_json_resp['data'][1]['name'] == 'my template 1' + assert update_json_resp['data'][1]['name'] == 'my template 2' assert update_json_resp['data'][1]['version'] == 1 assert update_json_resp['data'][1]['created_at'] diff --git a/tests/app/v2/templates/test_get_templates.py b/tests/app/v2/templates/test_get_templates.py index 6bc4aabde..1a0f1eacc 100644 --- a/tests/app/v2/templates/test_get_templates.py +++ b/tests/app/v2/templates/test_get_templates.py @@ -1,6 +1,7 @@ import pytest from flask import json +from itertools import product from app.models import TEMPLATE_TYPES, EMAIL_TYPE from tests import create_authorization_header @@ -8,12 +9,15 @@ from tests.app.db import create_template def test_get_all_templates_returns_200(client, sample_service): - num_templates = 3 - templates = [] - for i in range(num_templates): - for tmp_type in TEMPLATE_TYPES: - subject = 'subject_{}'.format(i) if tmp_type == EMAIL_TYPE else '' - templates.append(create_template(sample_service, template_type=tmp_type, subject=subject)) + templates = [ + create_template( + sample_service, + template_type=tmp_type, + subject='subject_{}'.format(name) if tmp_type == EMAIL_TYPE else '', + template_name=name, + ) + for name, tmp_type in product(('A', 'B', 'C'), TEMPLATE_TYPES) + ] auth_header = create_authorization_header(service_id=sample_service.id) @@ -25,25 +29,27 @@ def test_get_all_templates_returns_200(client, sample_service): json_response = json.loads(response.get_data(as_text=True)) - assert len(json_response['templates']) == num_templates * len(TEMPLATE_TYPES) + assert len(json_response['templates']) == len(templates) - # need to reverse index as get all templates returns list sorted by descending date - for i in range(len(json_response['templates'])): - reverse_index = len(json_response['templates']) - 1 - i - assert json_response['templates'][reverse_index]['id'] == str(templates[i].id) - assert json_response['templates'][reverse_index]['body'] == templates[i].content - assert json_response['templates'][reverse_index]['type'] == templates[i].template_type - if templates[i].template_type == EMAIL_TYPE: - assert json_response['templates'][reverse_index]['subject'] == templates[i].subject + for index, template in enumerate(json_response['templates']): + assert template['id'] == str(templates[index].id) + assert template['body'] == templates[index].content + assert template['type'] == templates[index].template_type + if templates[index].template_type == EMAIL_TYPE: + assert template['subject'] == templates[index].subject @pytest.mark.parametrize("tmp_type", TEMPLATE_TYPES) def test_get_all_templates_for_valid_type_returns_200(client, sample_service, tmp_type): - num_templates = 3 - templates = [] - for i in range(num_templates): - subject = 'subject_{}'.format(i) if tmp_type == EMAIL_TYPE else '' - templates.append(create_template(sample_service, template_type=tmp_type, subject=subject)) + templates = [ + create_template( + sample_service, + template_type=tmp_type, + template_name='Template {}'.format(i), + subject='subject_{}'.format(i) if tmp_type == EMAIL_TYPE else '' + ) + for i in range(3) + ] auth_header = create_authorization_header(service_id=sample_service.id) @@ -55,16 +61,14 @@ def test_get_all_templates_for_valid_type_returns_200(client, sample_service, tm json_response = json.loads(response.get_data(as_text=True)) - assert len(json_response['templates']) == num_templates + assert len(json_response['templates']) == len(templates) - # need to reverse index as get all templates returns list sorted by descending date - for i in range(len(json_response['templates'])): - reverse_index = len(json_response['templates']) - 1 - i - assert json_response['templates'][reverse_index]['id'] == str(templates[i].id) - assert json_response['templates'][reverse_index]['body'] == templates[i].content - assert json_response['templates'][reverse_index]['type'] == tmp_type - if templates[i].template_type == EMAIL_TYPE: - assert json_response['templates'][reverse_index]['subject'] == templates[i].subject + for index, template in enumerate(json_response['templates']): + assert template['id'] == str(templates[index].id) + assert template['body'] == templates[index].content + assert template['type'] == tmp_type + if templates[index].template_type == EMAIL_TYPE: + assert template['subject'] == templates[index].subject @pytest.mark.parametrize("tmp_type", TEMPLATE_TYPES) From f3d129b0d89f659e5a761ed5d90fe8f08b5d9039 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 23 Nov 2017 15:22:18 +0000 Subject: [PATCH 03/15] fix 500s when inbound msgs sent from alphanumerics rather than normal phone numbers --- app/celery/tasks.py | 1 + app/notifications/receive_notifications.py | 8 +++++-- requirements.txt | 2 +- .../test_receive_notification.py | 21 +++++++++++++++++++ 4 files changed, 29 insertions(+), 3 deletions(-) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 436def14d..e96d7e3d8 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -486,6 +486,7 @@ def send_inbound_sms_to_service(self, inbound_sms_id, service_id): inbound_id=inbound_sms_id) data = { "id": str(inbound_sms.id), + # TODO: should we be validating and formatting the phone number here? "source_number": inbound_sms.user_number, "destination_number": inbound_sms.notify_number, "message": inbound_sms.content, diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 3167134e3..22a7fe61e 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -2,7 +2,7 @@ from urllib.parse import unquote import iso8601 from flask import jsonify, Blueprint, current_app, request, abort -from notifications_utils.recipients import validate_and_format_phone_number +from notifications_utils.recipients import try_validate_and_format_phone_number from app import statsd_client, firetext_client, mmg_client from app.celery import tasks @@ -109,7 +109,11 @@ def format_mmg_datetime(date): def create_inbound_sms_object(service, content, from_number, provider_ref, date_received, provider_name): - user_number = validate_and_format_phone_number(from_number, international=True) + user_number = try_validate_and_format_phone_number( + from_number, + international=True, + log_msg='Invalid from_number received' + ) provider_date = date_received if provider_date: diff --git a/requirements.txt b/requirements.txt index 8d69902c5..8b5cb9f35 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,6 +26,6 @@ notifications-python-client==4.6.0 awscli>=1.11,<1.12 awscli-cwlogs>=1.4,<1.5 -git+https://github.com/alphagov/notifications-utils.git@23.0.1#egg=notifications-utils==23.0.1 +git+https://github.com/alphagov/notifications-utils.git@23.1.0#egg=notifications-utils==23.1.0 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 7f784135c..fbd190a50 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -413,3 +413,24 @@ def test_firetext_inbound_sms_auth(notify_db_session, notify_api, client, mocker with set_config(notify_api, 'FIRETEXT_INBOUND_SMS_AUTH', keys): response = firetext_post(client, data, auth=bool(auth), password=auth) assert response.status_code == status_code + + +def test_create_inbound_sms_object_works_with_alphanumeric_sender(sample_service_full_permissions): + data = { + 'Message': 'hello', + 'Number': sample_service_full_permissions.get_inbound_number(), + 'MSISDN': 'ALPHANUM3R1C', + 'DateRecieved': '2017-01-02+03%3A04%3A05', + 'ID': 'bar', + } + + inbound_sms = create_inbound_sms_object( + service=sample_service_full_permissions, + content=format_mmg_message(data["Message"]), + from_number='ALPHANUM3R1C', + provider_ref='foo', + date_received=None, + provider_name="mmg" + ) + + assert inbound_sms.user_number == 'ALPHANUM3R1C' From 3daf039fbe58e8f517537ce49534e23cc10e6bc5 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 23 Nov 2017 16:46:39 +0000 Subject: [PATCH 04/15] get_inbound_sms queries from the admin should also allow alphanumerics. Refactored the call to make the POST and the GET versions of the method much more distinct. --- app/inbound_sms/inbound_sms_schemas.py | 2 +- app/inbound_sms/rest.py | 47 +++++---- tests/app/inbound_sms/test_rest.py | 136 +++++++++++-------------- 3 files changed, 89 insertions(+), 96 deletions(-) diff --git a/app/inbound_sms/inbound_sms_schemas.py b/app/inbound_sms/inbound_sms_schemas.py index c0e644d68..24e642f49 100644 --- a/app/inbound_sms/inbound_sms_schemas.py +++ b/app/inbound_sms/inbound_sms_schemas.py @@ -3,7 +3,7 @@ get_inbound_sms_for_service_schema = { "description": "schema for parameters allowed when searching for to field=", "type": "object", "properties": { - "phone_number": {"type": "string", "format": "phone_number"}, + "phone_number": {"type": "string"}, "limit": {"type": ["integer", "null"], "minimum": 1} } } diff --git a/app/inbound_sms/rest.py b/app/inbound_sms/rest.py index a78e3c237..3e67e4ee1 100644 --- a/app/inbound_sms/rest.py +++ b/app/inbound_sms/rest.py @@ -1,11 +1,10 @@ from flask import ( Blueprint, jsonify, - request, - current_app, json) -from jsonschema import ValidationError + request +) -from notifications_utils.recipients import validate_and_format_phone_number +from notifications_utils.recipients import try_validate_and_format_phone_number from app.dao.inbound_sms_dao import ( dao_get_inbound_sms_for_service, @@ -26,25 +25,31 @@ inbound_sms = Blueprint( register_errors(inbound_sms) -@inbound_sms.route('', methods=['POST', 'GET']) -def get_inbound_sms_for_service(service_id): - - if request.method == 'GET': - limit = request.args.get('limit') - user_number = request.args.get('user_number') - - if user_number: - # we use this to normalise to an international phone number - user_number = validate_and_format_phone_number(user_number, international=True) - - results = dao_get_inbound_sms_for_service(service_id, limit, user_number) - - return jsonify(data=[row.serialize() for row in results]) +@inbound_sms.route('', methods=['POST']) +def post_query_inbound_sms_for_service(service_id): + form = validate(request.get_json(), get_inbound_sms_for_service_schema) + if 'phone_number' in form: + # we use this to normalise to an international phone number - but this may fail if it's an alphanumeric + user_number = try_validate_and_format_phone_number(form['phone_number'], international=True) else: - form = validate(request.get_json(), get_inbound_sms_for_service_schema) - results = dao_get_inbound_sms_for_service(service_id, form.get('limit'), form.get('phone_number')) + user_number = None + results = dao_get_inbound_sms_for_service(service_id, form.get('limit'), user_number) - return jsonify(data=[row.serialize() for row in results]) + return jsonify(data=[row.serialize() for row in results]) + + +@inbound_sms.route('', methods=['GET']) +def get_inbound_sms_for_service(service_id): + limit = request.args.get('limit') + user_number = request.args.get('user_number') + + if user_number: + # we use this to normalise to an international phone number - but this may fail if it's an alphanumeric + user_number = try_validate_and_format_phone_number(user_number, international=True) + + results = dao_get_inbound_sms_for_service(service_id, limit, user_number) + + return jsonify(data=[row.serialize() for row in results]) @inbound_sms.route('/summary') diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index f278251be..a0a6d612b 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -1,28 +1,20 @@ from datetime import datetime import pytest -from flask import json from freezegun import freeze_time -from tests import create_authorization_header from tests.app.db import create_inbound_sms, create_service, create_service_with_inbound_number -def test_get_inbound_sms_with_no_params(client, sample_service): +def test_post_to_get_inbound_sms_with_no_params(admin_request, sample_service): one = create_inbound_sms(sample_service) two = create_inbound_sms(sample_service) - auth_header = create_authorization_header() - - data = {} - - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - json_resp = json.loads(response.get_data(as_text=True)) - sms = json_resp['data'] + sms = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data={} + )['data'] assert len(sms) == 2 assert {inbound['id'] for inbound in sms} == {str(one.id), str(two.id)} @@ -37,40 +29,34 @@ def test_get_inbound_sms_with_no_params(client, sample_service): } -def test_get_inbound_sms_with_limit(client, sample_service): +def test_post_to_get_inbound_sms_with_limit(admin_request, sample_service): with freeze_time('2017-01-01'): one = create_inbound_sms(sample_service) with freeze_time('2017-01-02'): two = create_inbound_sms(sample_service) - auth_header = create_authorization_header() - data = {'limit': 1} - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - json_resp = json.loads(response.get_data(as_text=True)) - sms = json_resp['data'] + sms = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data=data + )['data'] assert len(sms) == 1 assert sms[0]['id'] == str(two.id) -def test_get_inbound_sms_should_error_with_invalid_limit(client, sample_service): - - auth_header = create_authorization_header() - +def test_post_to_get_inbound_sms_should_error_with_invalid_limit(admin_request, sample_service): data = {'limit': 'limit'} - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) + error_resp = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data=data, + _expected_status=400 + ) - error_resp = json.loads(response.get_data(as_text=True)) assert error_resp['status_code'] == 400 assert error_resp['errors'] == [{ 'error': 'ValidationError', @@ -78,73 +64,61 @@ def test_get_inbound_sms_should_error_with_invalid_limit(client, sample_service) }] -def test_get_inbound_sms_should_error_with_invalid_phone_number(client, sample_service): - - auth_header = create_authorization_header() - - data = {'phone_number': 'invalid phone number'} - - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - error_resp = json.loads(response.get_data(as_text=True)) - assert error_resp['status_code'] == 400 - assert error_resp['errors'] == [{ - 'error': 'ValidationError', - 'message': "phone_number Must not contain letters or symbols" - }] - - @pytest.mark.parametrize('user_number', [ '(07700) 900-001', '+4407700900001', '447700900001', ]) -def test_get_inbound_sms_filters_user_number(client, sample_service, user_number): +def test_post_to_get_inbound_sms_filters_user_number(admin_request, sample_service, user_number): # user_number in the db is international and normalised one = create_inbound_sms(sample_service, user_number='447700900001') two = create_inbound_sms(sample_service, user_number='447700900002') - auth_header = create_authorization_header() - data = { 'limit': 1, 'phone_number': user_number } - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) + sms = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data=data + )['data'] - json_resp = json.loads(response.get_data(as_text=True)) - sms = json_resp['data'] assert len(sms) == 1 assert sms[0]['id'] == str(one.id) assert sms[0]['user_number'] == str(one.user_number) -def test_get_inbound_sms_filters_international_user_number(admin_request, sample_service): +def test_post_to_get_inbound_sms_filters_international_user_number(admin_request, sample_service): # user_number in the db is international and normalised one = create_inbound_sms(sample_service, user_number='12025550104') two = create_inbound_sms(sample_service) - auth_header = create_authorization_header() - data = { 'limit': 1, 'phone_number': '+1 (202) 555-0104' } - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) + sms = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data=data + )['data'] - json_resp = json.loads(response.get_data(as_text=True)) - sms = json_resp['data'] + assert len(sms) == 1 + assert sms[0]['id'] == str(one.id) + assert sms[0]['user_number'] == str(one.user_number) + + +def test_post_to_get_inbound_sms_allows_badly_formatted_number(admin_request, sample_service): + one = create_inbound_sms(sample_service, user_number='ALPHANUM3R1C') + + sms = admin_request.post( + 'inbound_sms.get_inbound_sms_for_service', + service_id=sample_service.id, + _data={'phone_number': 'ALPHANUM3R1C'} + )['data'] assert len(sms) == 1 assert sms[0]['id'] == str(one.id) @@ -156,7 +130,7 @@ def test_get_inbound_sms_filters_international_user_number(admin_request, sample ############################################################## -def test_get_inbound_sms(admin_request, sample_service): +def test_old_get_inbound_sms(admin_request, sample_service): one = create_inbound_sms(sample_service) two = create_inbound_sms(sample_service) @@ -180,7 +154,7 @@ def test_get_inbound_sms(admin_request, sample_service): } -def test_get_inbound_sms_limits(admin_request, sample_service): +def test_old_get_inbound_sms_limits(admin_request, sample_service): with freeze_time('2017-01-01'): one = create_inbound_sms(sample_service) with freeze_time('2017-01-02'): @@ -201,7 +175,7 @@ def test_get_inbound_sms_limits(admin_request, sample_service): '+4407700900001', '447700900001', ]) -def test_get_inbound_sms_filters_user_number(admin_request, sample_service, user_number): +def test_old_get_inbound_sms_filters_user_number(admin_request, sample_service, user_number): # user_number in the db is international and normalised one = create_inbound_sms(sample_service, user_number='447700900001') two = create_inbound_sms(sample_service, user_number='447700900002') @@ -217,7 +191,7 @@ def test_get_inbound_sms_filters_user_number(admin_request, sample_service, user assert sms['data'][0]['user_number'] == str(one.user_number) -def test_get_inbound_sms_filters_international_user_number(admin_request, sample_service): +def test_old_get_inbound_sms_filters_international_user_number(admin_request, sample_service): # user_number in the db is international and normalised one = create_inbound_sms(sample_service, user_number='12025550104') two = create_inbound_sms(sample_service) @@ -233,6 +207,20 @@ def test_get_inbound_sms_filters_international_user_number(admin_request, sample assert sms['data'][0]['user_number'] == str(one.user_number) +def test_old_get_inbound_sms_allows_badly_formatted_number(admin_request, sample_service): + one = create_inbound_sms(sample_service, user_number='ALPHANUM3R1C') + + sms = admin_request.get( + 'inbound_sms.get_inbound_sms_for_service', + service_id=sample_service.id, + user_number='ALPHANUM3R1C', + ) + + assert len(sms['data']) == 1 + assert sms['data'][0]['id'] == str(one.id) + assert sms['data'][0]['user_number'] == str(one.user_number) + + ############################## # End delete section ############################## From 9f56dccdee1d801f803d3c12dab0745749374e70 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 6 Nov 2017 12:34:29 +0000 Subject: [PATCH 05/15] Remove flask-script, move commands to click click (http://click.pocoo.org/) is used by flask to run its cli args. In removing flask_script (it's unmaintained), we had to migrate all our commands to use click. This is a change for the better in my eyes - you don't need to define the command in several places, and it makes managing options a bit easier. View diff with whitespace turned off unless you're a masochist. --- README.md | 3 + app/__ | 0 app/__init__.py | 14 +- app/commands.py | 559 ++++++++++++++++++++++----------------------- application.py | 38 +-- requirements.txt | 1 - run_celery.py | 6 +- scripts/run_app.sh | 2 +- server_commands.py | 8 +- 9 files changed, 300 insertions(+), 331 deletions(-) create mode 100644 app/__ diff --git a/README.md b/README.md index 8252c9bbf..c5737e8d8 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,9 @@ export FIRETEXT_API_KEY='FIRETEXT_ACTUAL_KEY' export STATSD_PREFIX='YOU_OWN_PREFIX' export NOTIFICATION_QUEUE_PREFIX='YOUR_OWN_PREFIX' export REDIS_URL="redis://localhost:6379/0" +export FLASK_APP=application.py +export FLASK_DEBUG=1 +export WERKZEUG_DEBUG_PIN=off "> environment.sh ``` diff --git a/app/__ b/app/__ new file mode 100644 index 000000000..e69de29bb diff --git a/app/__init__.py b/app/__init__.py index d4e9c8136..a1172736e 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -6,6 +6,7 @@ import uuid from flask import Flask, _request_ctx_stack, request, g, jsonify from flask_sqlalchemy import SQLAlchemy from flask_marshmallow import Marshmallow +from flask_migrate import Migrate from monotonic import monotonic from notifications_utils.clients.statsd.statsd_client import StatsdClient from notifications_utils.clients.redis.redis_client import RedisClient @@ -25,6 +26,7 @@ DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" DATE_FORMAT = "%Y-%m-%d" db = SQLAlchemy() +migrate = Migrate() ma = Marshmallow() notify_celery = NotifyCelery() firetext_client = FiretextClient() @@ -42,21 +44,19 @@ api_user = LocalProxy(lambda: _request_ctx_stack.top.api_user) authenticated_service = LocalProxy(lambda: _request_ctx_stack.top.authenticated_service) -def create_app(app_name=None): - application = Flask(__name__) - +def create_app(application): from app.config import configs notify_environment = os.environ['NOTIFY_ENVIRONMENT'] application.config.from_object(configs[notify_environment]) - if app_name: - application.config['NOTIFY_APP_NAME'] = app_name + application.config['NOTIFY_APP_NAME'] = application.name init_app(application) request_helper.init_app(application) db.init_app(application) + migrate.init_app(application, db=db) ma.init_app(application) statsd_client.init_app(application) logging.init_app(application, statsd_client) @@ -73,6 +73,10 @@ def create_app(app_name=None): register_blueprint(application) register_v2_blueprints(application) + # avoid circular imports by importing this file later 😬 + from app.commands import setup_commands + setup_commands(application) + return application diff --git a/app/commands.py b/app/commands.py index f1e125e01..d4be9d23a 100644 --- a/app/commands.py +++ b/app/commands.py @@ -1,7 +1,9 @@ import uuid from datetime import datetime, timedelta from decimal import Decimal -from flask_script import Command, Option + +from flask import current_app +import click from app import db from app.dao.monthly_billing_dao import ( @@ -14,181 +16,152 @@ from app.dao.services_dao import ( delete_service_and_all_associated_db_objects, dao_fetch_all_services_by_user ) -from app.dao.provider_rates_dao import create_provider_rates +from app.dao.provider_rates_dao import create_provider_rates as dao_create_provider_rates from app.dao.users_dao import (delete_model_user, delete_user_verify_codes) from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc from app.performance_platform.processing_time import send_processing_time_for_start_and_end -class CreateProviderRateCommand(Command): +commands = click.Group(name='commands', help='Additional commands') - option_list = ( - Option('-p', '--provider_name', dest="provider_name", help='Provider name'), - Option('-c', '--cost', dest="cost", help='Cost (pence) per message including decimals'), - Option('-d', '--valid_from', dest="valid_from", help="Date (%Y-%m-%dT%H:%M:%S) valid from") - ) - def run(self, provider_name, cost, valid_from): - if provider_name not in PROVIDERS: - raise Exception("Invalid provider name, must be one of ({})".format(', '.join(PROVIDERS))) +@commands.command() +@click.option('-p', '--provider_name', required=True, help='Provider name') +@click.option('-c', '--cost', required=True, help='Cost (pence) per message including decimals') +@click.option('-d', '--valid_from', required=True, help="Date (%Y-%m-%dT%H:%M:%S) valid from") +def create_provider_rates(provider_name, cost, valid_from): + if provider_name not in PROVIDERS: + raise Exception("Invalid provider name, must be one of ({})".format(', '.join(PROVIDERS))) + try: + cost = Decimal(cost) + except: + raise Exception("Invalid cost value.") + + try: + valid_from = datetime.strptime('%Y-%m-%dT%H:%M:%S', valid_from) + except: + raise Exception("Invalid valid_from date. Use the format %Y-%m-%dT%H:%M:%S") + + dao_create_provider_rates(provider_name, valid_from, cost) + + +@commands.command() +@click.option('-u', '--user_email_prefix', required=True, help="Functional test user email prefix.") +def purge_functional_test_data(user_email_prefix): + users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all() + for usr in users: + # Make sure the full email includes a uuid in it + # Just in case someone decides to use a similar email address. try: - cost = Decimal(cost) - except: - raise Exception("Invalid cost value.") - - try: - valid_from = datetime.strptime('%Y-%m-%dT%H:%M:%S', valid_from) - except: - raise Exception("Invalid valid_from date. Use the format %Y-%m-%dT%H:%M:%S") - - create_provider_rates(provider_name, valid_from, cost) - - -class PurgeFunctionalTestDataCommand(Command): - - option_list = ( - Option('-u', '-user-email-prefix', dest='user_email_prefix', help="Functional test user email prefix."), - ) - - def run(self, user_email_prefix=None): - if user_email_prefix: - users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all() - for usr in users: - # Make sure the full email includes a uuid in it - # Just in case someone decides to use a similar email address. - try: - uuid.UUID(usr.email_address.split("@")[0].split('+')[1]) - except ValueError: - print("Skipping {} as the user email doesn't contain a UUID.".format(usr.email_address)) - else: - services = dao_fetch_all_services_by_user(usr.id) - if services: - for service in services: - delete_service_and_all_associated_db_objects(service) - else: - delete_user_verify_codes(usr) - delete_model_user(usr) - - -class CustomDbScript(Command): - - option_list = ( - Option('-n', '-name-of-db-function', dest='name_of_db_function', help="Function name of the DB script to run"), - ) - - def run(self, name_of_db_function): - db_function = getattr(self, name_of_db_function, None) - if callable(db_function): - db_function() + uuid.UUID(usr.email_address.split("@")[0].split('+')[1]) + except ValueError: + print("Skipping {} as the user email doesn't contain a UUID.".format(usr.email_address)) else: - print('The specified function does not exist.') + services = dao_fetch_all_services_by_user(usr.id) + if services: + for service in services: + delete_service_and_all_associated_db_objects(service) + else: + delete_user_verify_codes(usr) + delete_model_user(usr) - def backfill_notification_statuses(self): - """ - This will be used to populate the new `Notification._status_fkey` with the old - `Notification._status_enum` - """ - LIMIT = 250000 - subq = "SELECT id FROM notification_history WHERE notification_status is NULL LIMIT {}".format(LIMIT) - update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq) + +@commands.command() +def backfill_notification_statuses(): + """ + This will be used to populate the new `Notification._status_fkey` with the old + `Notification._status_enum` + """ + LIMIT = 250000 + subq = "SELECT id FROM notification_history WHERE notification_status is NULL LIMIT {}".format(LIMIT) + update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq) + result = db.session.execute(subq).fetchall() + + while len(result) > 0: + db.session.execute(update) + print('commit {} updates at {}'.format(LIMIT, datetime.utcnow())) + db.session.commit() result = db.session.execute(subq).fetchall() - while len(result) > 0: - db.session.execute(update) - print('commit {} updates at {}'.format(LIMIT, datetime.utcnow())) - db.session.commit() - result = db.session.execute(subq).fetchall() - def update_notification_international_flag(self): - # 250,000 rows takes 30 seconds to update. - subq = "select id from notifications where international is null limit 250000" - update = "update notifications set international = False where id in ({})".format(subq) +@commands.command() +def update_notification_international_flag(): + # 250,000 rows takes 30 seconds to update. + subq = "select id from notifications where international is null limit 250000" + update = "update notifications set international = False where id in ({})".format(subq) + result = db.session.execute(subq).fetchall() + + while len(result) > 0: + db.session.execute(update) + print('commit 250000 updates at {}'.format(datetime.utcnow())) + db.session.commit() result = db.session.execute(subq).fetchall() - while len(result) > 0: - db.session.execute(update) - print('commit 250000 updates at {}'.format(datetime.utcnow())) - db.session.commit() - result = db.session.execute(subq).fetchall() - - # Now update notification_history - subq_history = "select id from notification_history where international is null limit 250000" - update_history = "update notification_history set international = False where id in ({})".format(subq_history) + # Now update notification_history + subq_history = "select id from notification_history where international is null limit 250000" + update_history = "update notification_history set international = False where id in ({})".format(subq_history) + result_history = db.session.execute(subq_history).fetchall() + while len(result_history) > 0: + db.session.execute(update_history) + print('commit 250000 updates at {}'.format(datetime.utcnow())) + db.session.commit() result_history = db.session.execute(subq_history).fetchall() - while len(result_history) > 0: - db.session.execute(update_history) - print('commit 250000 updates at {}'.format(datetime.utcnow())) - db.session.commit() - result_history = db.session.execute(subq_history).fetchall() - def fix_notification_statuses_not_in_sync(self): - """ - This will be used to correct an issue where Notification._status_enum and NotificationHistory._status_fkey - became out of sync. See 979e90a. - Notification._status_enum is the source of truth so NotificationHistory._status_fkey will be updated with - these values. - """ - MAX = 10000 +@commands.command() +def fix_notification_statuses_not_in_sync(): + """ + This will be used to correct an issue where Notification._status_enum and NotificationHistory._status_fkey + became out of sync. See 979e90a. - subq = "SELECT id FROM notifications WHERE cast (status as text) != notification_status LIMIT {}".format(MAX) - update = "UPDATE notifications SET notification_status = status WHERE id in ({})".format(subq) + Notification._status_enum is the source of truth so NotificationHistory._status_fkey will be updated with + these values. + """ + MAX = 10000 + + subq = "SELECT id FROM notifications WHERE cast (status as text) != notification_status LIMIT {}".format(MAX) + update = "UPDATE notifications SET notification_status = status WHERE id in ({})".format(subq) + result = db.session.execute(subq).fetchall() + + while len(result) > 0: + db.session.execute(update) + print('Committed {} updates at {}'.format(len(result), datetime.utcnow())) + db.session.commit() result = db.session.execute(subq).fetchall() - while len(result) > 0: - db.session.execute(update) - print('Committed {} updates at {}'.format(len(result), datetime.utcnow())) - db.session.commit() - result = db.session.execute(subq).fetchall() + subq_hist = "SELECT id FROM notification_history WHERE cast (status as text) != notification_status LIMIT {}" \ + .format(MAX) + update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq_hist) + result = db.session.execute(subq_hist).fetchall() - subq_hist = "SELECT id FROM notification_history WHERE cast (status as text) != notification_status LIMIT {}" \ - .format(MAX) - update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq_hist) + while len(result) > 0: + db.session.execute(update) + print('Committed {} updates at {}'.format(len(result), datetime.utcnow())) + db.session.commit() result = db.session.execute(subq_hist).fetchall() - while len(result) > 0: - db.session.execute(update) - print('Committed {} updates at {}'.format(len(result), datetime.utcnow())) - db.session.commit() - result = db.session.execute(subq_hist).fetchall() - def link_inbound_numbers_to_service(self): - update = """ - UPDATE inbound_numbers SET - service_id = services.id, - updated_at = now() - FROM services - WHERE services.sms_sender = inbound_numbers.number AND - inbound_numbers.service_id is null - """ - result = db.session.execute(update) - db.session.commit() +@commands.command() +def link_inbound_numbers_to_service(): + update = """ + UPDATE inbound_numbers SET + service_id = services.id, + updated_at = now() + FROM services + WHERE services.sms_sender = inbound_numbers.number AND + inbound_numbers.service_id is null + """ + result = db.session.execute(update) + db.session.commit() - print("Linked {} inbound numbers to service".format(result.rowcount)) + print("Linked {} inbound numbers to service".format(result.rowcount)) -class PopulateMonthlyBilling(Command): - option_list = ( - Option('-y', '-year', dest="year", help="Use for integer value for year, e.g. 2017"), - ) - - def run(self, year): - service_ids = get_service_ids_that_need_billing_populated( - start_date=datetime(2016, 5, 1), end_date=datetime(2017, 8, 16) - ) - start, end = 1, 13 - if year == '2016': - start = 4 - - for service_id in service_ids: - print('Starting to populate data for service {}'.format(str(service_id))) - print('Starting populating monthly billing for {}'.format(year)) - for i in range(start, end): - print('Population for {}-{}'.format(i, year)) - self.populate(service_id, year, i) - - def populate(self, service_id, year, month): +@commands.command() +@click.option('-y', '--year', required=True, help="Use for integer value for year, e.g. 2017") +def populate_monthly_billing(year): + def populate(service_id, year, month): create_or_update_monthly_billing(service_id, datetime(int(year), int(month), 1)) sms_res = get_monthly_billing_by_notification_type( service_id, datetime(int(year), int(month), 1), SMS_TYPE @@ -200,165 +173,181 @@ class PopulateMonthlyBilling(Command): print('SMS: {}'.format(sms_res.monthly_totals)) print('Email: {}'.format(email_res.monthly_totals)) - -class BackfillProcessingTime(Command): - option_list = ( - Option('-s', '--start_date', dest='start_date', help="Date (%Y-%m-%d) start date inclusive"), - Option('-e', '--end_date', dest='end_date', help="Date (%Y-%m-%d) end date inclusive"), + service_ids = get_service_ids_that_need_billing_populated( + start_date=datetime(2016, 5, 1), end_date=datetime(2017, 8, 16) ) + start, end = 1, 13 - def run(self, start_date, end_date): - start_date = datetime.strptime(start_date, '%Y-%m-%d') - end_date = datetime.strptime(end_date, '%Y-%m-%d') + if year == '2016': + start = 4 - delta = end_date - start_date - - print('Sending notification processing-time data for all days between {} and {}'.format(start_date, end_date)) - - for i in range(delta.days + 1): - # because the tz conversion funcs talk about midnight, and the midnight before last, - # we want to pretend we're running this from the next morning, so add one. - process_date = start_date + timedelta(days=i + 1) - - process_start_date = get_midnight_for_day_before(process_date) - process_end_date = get_london_midnight_in_utc(process_date) - - print('Sending notification processing-time for {} - {}'.format( - process_start_date.isoformat(), - process_end_date.isoformat() - )) - send_processing_time_for_start_and_end(process_start_date, process_end_date) + for service_id in service_ids: + print('Starting to populate data for service {}'.format(str(service_id))) + print('Starting populating monthly billing for {}'.format(year)) + for i in range(start, end): + print('Population for {}-{}'.format(i, year)) + populate(service_id, year, i) -class PopulateServiceEmailReplyTo(Command): +@commands.command() +@click.option('-s', '--start_date', required=True, help="Date (%Y-%m-%d) start date inclusive") +@click.option('-e', '--end_date', required=True, help="Date (%Y-%m-%d) end date inclusive") +def backfill_processing_time(start_date, end_date): + start_date = datetime.strptime(start_date, '%Y-%m-%d') + end_date = datetime.strptime(end_date, '%Y-%m-%d') - def run(self): - services_to_update = """ - INSERT INTO service_email_reply_to(id, service_id, email_address, is_default, created_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, reply_to_email_address, true, '{}' - FROM services - WHERE reply_to_email_address IS NOT NULL - AND id NOT IN( - SELECT service_id - FROM service_email_reply_to - ) - """.format(datetime.utcnow()) + delta = end_date - start_date - result = db.session.execute(services_to_update) - db.session.commit() + print('Sending notification processing-time data for all days between {} and {}'.format(start_date, end_date)) - print("Populated email reply to addresses for {}".format(result.rowcount)) + for i in range(delta.days + 1): + # because the tz conversion funcs talk about midnight, and the midnight before last, + # we want to pretend we're running this from the next morning, so add one. + process_date = start_date + timedelta(days=i + 1) + + process_start_date = get_midnight_for_day_before(process_date) + process_end_date = get_london_midnight_in_utc(process_date) + + print('Sending notification processing-time for {} - {}'.format( + process_start_date.isoformat(), + process_end_date.isoformat() + )) + send_processing_time_for_start_and_end(process_start_date, process_end_date) -class PopulateServiceSmsSender(Command): +@commands.command() +def populate_service_email_reply_to(): + services_to_update = """ + INSERT INTO service_email_reply_to(id, service_id, email_address, is_default, created_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, reply_to_email_address, true, '{}' + FROM services + WHERE reply_to_email_address IS NOT NULL + AND id NOT IN( + SELECT service_id + FROM service_email_reply_to + ) + """.format(datetime.utcnow()) - def run(self): - services_to_update = """ - INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), service_id, number, id, true, '{}' - FROM inbound_numbers - WHERE service_id NOT IN( - SELECT service_id - FROM service_sms_senders - ) - """.format(datetime.utcnow()) + result = db.session.execute(services_to_update) + db.session.commit() - services_to_update_from_services = """ - INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, sms_sender, null, true, '{}' + print("Populated email reply to addresses for {}".format(result.rowcount)) + + +@commands.command() +def populate_service_sms_sender(): + services_to_update = """ + INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), service_id, number, id, true, '{}' + FROM inbound_numbers + WHERE service_id NOT IN( + SELECT service_id + FROM service_sms_senders + ) + """.format(datetime.utcnow()) + + services_to_update_from_services = """ + INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, sms_sender, null, true, '{}' + FROM services + WHERE id NOT IN( + SELECT service_id + FROM service_sms_senders + ) + """.format(datetime.utcnow()) + + result = db.session.execute(services_to_update) + second_result = db.session.execute(services_to_update_from_services) + db.session.commit() + + services_count_query = db.session.execute("Select count(*) from services").fetchall()[0][0] + + service_sms_sender_count_query = db.session.execute("Select count(*) from service_sms_senders").fetchall()[0][0] + + print("Populated sms sender {} services from inbound_numbers".format(result.rowcount)) + print("Populated sms sender {} services from services".format(second_result.rowcount)) + print("{} services in table".format(services_count_query)) + print("{} service_sms_senders".format(service_sms_sender_count_query)) + + +@commands.command() +def populate_service_letter_contact(): + services_to_update = """ + INSERT INTO service_letter_contacts(id, service_id, contact_block, is_default, created_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, letter_contact_block, true, '{}' + FROM services + WHERE letter_contact_block IS NOT NULL + AND id NOT IN( + SELECT service_id + FROM service_letter_contacts + ) + """.format(datetime.utcnow()) + + result = db.session.execute(services_to_update) + db.session.commit() + + print("Populated letter contacts for {} services".format(result.rowcount)) + + +@commands.command() +def populate_service_and_service_history_free_sms_fragment_limit(): + services_to_update = """ + UPDATE services + SET free_sms_fragment_limit = 250000 + WHERE free_sms_fragment_limit IS NULL + """ + + services_history_to_update = """ + UPDATE services_history + SET free_sms_fragment_limit = 250000 + WHERE free_sms_fragment_limit IS NULL + """ + + services_result = db.session.execute(services_to_update) + services_history_result = db.session.execute(services_history_to_update) + + db.session.commit() + + print("Populated free sms fragment limits for {} services".format(services_result.rowcount)) + print("Populated free sms fragment limits for {} services history".format(services_history_result.rowcount)) + + +@commands.command() +def populate_annual_billing(): + financial_year = [2016, 2017, 2018] + + for fy in financial_year: + populate_data = """ + INSERT INTO annual_billing(id, service_id, free_sms_fragment_limit, financial_year_start, + created_at, updated_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, 250000, {}, '{}', '{}' FROM services WHERE id NOT IN( SELECT service_id - FROM service_sms_senders - ) - """.format(datetime.utcnow()) + FROM annual_billing + WHERE financial_year_start={}) + """.format(fy, datetime.utcnow(), datetime.utcnow(), fy) - result = db.session.execute(services_to_update) - second_result = db.session.execute(services_to_update_from_services) + services_result1 = db.session.execute(populate_data) db.session.commit() - services_count_query = db.session.execute("Select count(*) from services").fetchall()[0][0] - - service_sms_sender_count_query = db.session.execute("Select count(*) from service_sms_senders").fetchall()[0][0] - - print("Populated sms sender {} services from inbound_numbers".format(result.rowcount)) - print("Populated sms sender {} services from services".format(second_result.rowcount)) - print("{} services in table".format(services_count_query)) - print("{} service_sms_senders".format(service_sms_sender_count_query)) + print("Populated annual billing {} for {} services".format(fy, services_result1.rowcount)) -class PopulateServiceLetterContact(Command): - - def run(self): - services_to_update = """ - INSERT INTO service_letter_contacts(id, service_id, contact_block, is_default, created_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, letter_contact_block, true, '{}' - FROM services - WHERE letter_contact_block IS NOT NULL - AND id NOT IN( - SELECT service_id - FROM service_letter_contacts - ) - """.format(datetime.utcnow()) - - result = db.session.execute(services_to_update) - db.session.commit() - - print("Populated letter contacts for {} services".format(result.rowcount)) +@commands.command() +@click.option('-j', '--job_id', required=True, help="Enter the job id to rebuild the dvla file for") +def re_run_build_dvla_file_for_job(job_id): + from app.celery.tasks import build_dvla_file + from app.config import QueueNames + build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS) -class PopulateServiceAndServiceHistoryFreeSmsFragmentLimit(Command): - - def run(self): - services_to_update = """ - UPDATE services - SET free_sms_fragment_limit = 250000 - WHERE free_sms_fragment_limit IS NULL - """ - - services_history_to_update = """ - UPDATE services_history - SET free_sms_fragment_limit = 250000 - WHERE free_sms_fragment_limit IS NULL - """ - - services_result = db.session.execute(services_to_update) - services_history_result = db.session.execute(services_history_to_update) - - db.session.commit() - - print("Populated free sms fragment limits for {} services".format(services_result.rowcount)) - print("Populated free sms fragment limits for {} services history".format(services_history_result.rowcount)) +@commands.command() +def list_routes(): + """List URLs of all application routes.""" + for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule): + print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) -class PopulateAnnualBilling(Command): - def run(self): - financial_year = [2016, 2017, 2018] - - for fy in financial_year: - populate_data = """ - INSERT INTO annual_billing(id, service_id, free_sms_fragment_limit, financial_year_start, - created_at, updated_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, 250000, {}, '{}', '{}' - FROM services - WHERE id NOT IN( - SELECT service_id - FROM annual_billing - WHERE financial_year_start={}) - """.format(fy, datetime.utcnow(), datetime.utcnow(), fy) - - services_result1 = db.session.execute(populate_data) - db.session.commit() - - print("Populated annual billing {} for {} services".format(fy, services_result1.rowcount)) - - -class ReRunBuildDvlaFileForJob(Command): - option_list = ( - Option('-j', '--job_id', dest='job_id', help="Enter the job id to rebuild the dvla file for"), - ) - - def run(self, job_id): - from app.celery.tasks import build_dvla_file - from app.config import QueueNames - build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS) +def setup_commands(application): + application.cli.add_command(commands) diff --git a/application.py b/application.py index b606aee81..6960fd08b 100644 --- a/application.py +++ b/application.py @@ -1,38 +1,10 @@ -#!/usr/bin/env python - +##!/usr/bin/env python from __future__ import print_function -import os -from flask_script import Manager, Server -from flask_migrate import Migrate, MigrateCommand -from app import (create_app, db, commands) -application = create_app() -manager = Manager(application) -port = int(os.environ.get('PORT', 6011)) -manager.add_command("runserver", Server(host='0.0.0.0', port=port)) +from flask import Flask -migrate = Migrate(application, db) -manager.add_command('db', MigrateCommand) -manager.add_command('create_provider_rate', commands.CreateProviderRateCommand) -manager.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand) -manager.add_command('custom_db_script', commands.CustomDbScript) -manager.add_command('populate_monthly_billing', commands.PopulateMonthlyBilling) -manager.add_command('backfill_processing_time', commands.BackfillProcessingTime) -manager.add_command('populate_service_email_reply_to', commands.PopulateServiceEmailReplyTo) -manager.add_command('populate_service_sms_sender', commands.PopulateServiceSmsSender) -manager.add_command('populate_service_letter_contact', commands.PopulateServiceLetterContact) -manager.add_command('populate_service_and_service_history_free_sms_fragment_limit', - commands.PopulateServiceAndServiceHistoryFreeSmsFragmentLimit) -manager.add_command('populate_annual_billing', commands.PopulateAnnualBilling) -manager.add_command('rerun_build_dvla_file', commands.ReRunBuildDvlaFileForJob) +from app import create_app +app = Flask('app') -@manager.command -def list_routes(): - """List URLs of all application routes.""" - for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule): - print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) - - -if __name__ == '__main__': - manager.run() +create_app(app) diff --git a/requirements.txt b/requirements.txt index 8d69902c5..37351cacb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,6 @@ docopt==0.6.2 Flask-Bcrypt==0.7.1 Flask-Marshmallow==0.8.0 Flask-Migrate==2.1.1 -Flask-Script==2.0.5 Flask-SQLAlchemy==2.3.2 Flask==0.12.2 gunicorn==19.7.1 diff --git a/run_celery.py b/run_celery.py index 013499615..4fb28ae08 100644 --- a/run_celery.py +++ b/run_celery.py @@ -1,6 +1,10 @@ #!/usr/bin/env python # notify_celery is referenced from manifest_delivery_base.yml, and cannot be removed +from flask import Flask + from app import notify_celery, create_app -application = create_app('delivery') + +application = Flask('delivery') +create_app(application) application.app_context().push() diff --git a/scripts/run_app.sh b/scripts/run_app.sh index e8ab00bb6..9997d1072 100755 --- a/scripts/run_app.sh +++ b/scripts/run_app.sh @@ -3,4 +3,4 @@ set -e source environment.sh -python3 application.py runserver +flask run -p 6011 diff --git a/server_commands.py b/server_commands.py index 9f0ce8a60..a3c78bf78 100644 --- a/server_commands.py +++ b/server_commands.py @@ -1,4 +1,3 @@ -from flask_script import Manager, Server from flask_migrate import Migrate, MigrateCommand from app import (create_app, db, commands) import os @@ -16,11 +15,10 @@ os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment] application = create_app() -manager = Manager(application) migrate = Migrate(application, db) -manager.add_command('db', MigrateCommand) -manager.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand) -manager.add_command('custom_db_script', commands.CustomDbScript) +application.add_command('db', MigrateCommand) +application.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand) +application.add_command('custom_db_script', commands.CustomDbScript) if __name__ == '__main__': manager.run() From e2e9db8c97d5fcaf0f2e53a57e97d77d228cc6fe Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 22 Nov 2017 15:01:32 +0000 Subject: [PATCH 06/15] add docstrings for all custom commands --- app/commands.py | 47 ++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/app/commands.py b/app/commands.py index d4be9d23a..ac1e36866 100644 --- a/app/commands.py +++ b/app/commands.py @@ -30,6 +30,9 @@ commands = click.Group(name='commands', help='Additional commands') @click.option('-c', '--cost', required=True, help='Cost (pence) per message including decimals') @click.option('-d', '--valid_from', required=True, help="Date (%Y-%m-%dT%H:%M:%S) valid from") def create_provider_rates(provider_name, cost, valid_from): + """ + Backfill rates for a given provider + """ if provider_name not in PROVIDERS: raise Exception("Invalid provider name, must be one of ({})".format(', '.join(PROVIDERS))) @@ -47,8 +50,15 @@ def create_provider_rates(provider_name, cost, valid_from): @commands.command() -@click.option('-u', '--user_email_prefix', required=True, help="Functional test user email prefix.") +@click.option('-u', '--user_email_prefix', required=True, help=""" + Functional test user email prefix. eg "notify-test-preview" +""") # noqa def purge_functional_test_data(user_email_prefix): + """ + Remove non-seeded functional test data + + users, services, etc. Give an email prefix. Probably "notify-test-preview". + """ users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all() for usr in users: # Make sure the full email includes a uuid in it @@ -70,6 +80,8 @@ def purge_functional_test_data(user_email_prefix): @commands.command() def backfill_notification_statuses(): """ + DEPRECATED. Populates notification_status. + This will be used to populate the new `Notification._status_fkey` with the old `Notification._status_enum` """ @@ -87,6 +99,9 @@ def backfill_notification_statuses(): @commands.command() def update_notification_international_flag(): + """ + DEPRECATED. Set notifications.international=false. + """ # 250,000 rows takes 30 seconds to update. subq = "select id from notifications where international is null limit 250000" update = "update notifications set international = False where id in ({})".format(subq) @@ -112,6 +127,7 @@ def update_notification_international_flag(): @commands.command() def fix_notification_statuses_not_in_sync(): """ + DEPRECATED. This will be used to correct an issue where Notification._status_enum and NotificationHistory._status_fkey became out of sync. See 979e90a. @@ -144,6 +160,11 @@ def fix_notification_statuses_not_in_sync(): @commands.command() def link_inbound_numbers_to_service(): + """ + DEPRECATED. + + Matches inbound numbers and service ids based on services.sms_sender + """ update = """ UPDATE inbound_numbers SET service_id = services.id, @@ -161,6 +182,9 @@ def link_inbound_numbers_to_service(): @commands.command() @click.option('-y', '--year', required=True, help="Use for integer value for year, e.g. 2017") def populate_monthly_billing(year): + """ + Populate monthly billing table for all services for a given year. + """ def populate(service_id, year, month): create_or_update_monthly_billing(service_id, datetime(int(year), int(month), 1)) sms_res = get_monthly_billing_by_notification_type( @@ -193,6 +217,9 @@ def populate_monthly_billing(year): @click.option('-s', '--start_date', required=True, help="Date (%Y-%m-%d) start date inclusive") @click.option('-e', '--end_date', required=True, help="Date (%Y-%m-%d) end date inclusive") def backfill_processing_time(start_date, end_date): + """ + Send historical performance platform stats. + """ start_date = datetime.strptime(start_date, '%Y-%m-%d') end_date = datetime.strptime(end_date, '%Y-%m-%d') @@ -217,6 +244,9 @@ def backfill_processing_time(start_date, end_date): @commands.command() def populate_service_email_reply_to(): + """ + Migrate reply to emails. + """ services_to_update = """ INSERT INTO service_email_reply_to(id, service_id, email_address, is_default, created_at) SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, reply_to_email_address, true, '{}' @@ -236,6 +266,9 @@ def populate_service_email_reply_to(): @commands.command() def populate_service_sms_sender(): + """ + Migrate sms senders. Must be called when working on a fresh db! + """ services_to_update = """ INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) SELECT uuid_in(md5(random()::text || now()::text)::cstring), service_id, number, id, true, '{}' @@ -272,6 +305,9 @@ def populate_service_sms_sender(): @commands.command() def populate_service_letter_contact(): + """ + Migrates letter contact blocks. + """ services_to_update = """ INSERT INTO service_letter_contacts(id, service_id, contact_block, is_default, created_at) SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, letter_contact_block, true, '{}' @@ -291,6 +327,9 @@ def populate_service_letter_contact(): @commands.command() def populate_service_and_service_history_free_sms_fragment_limit(): + """ + DEPRECATED. Set services to have 250k sms limit. + """ services_to_update = """ UPDATE services SET free_sms_fragment_limit = 250000 @@ -314,6 +353,9 @@ def populate_service_and_service_history_free_sms_fragment_limit(): @commands.command() def populate_annual_billing(): + """ + add annual_billing for 2016, 2017 and 2018. + """ financial_year = [2016, 2017, 2018] for fy in financial_year: @@ -337,6 +379,9 @@ def populate_annual_billing(): @commands.command() @click.option('-j', '--job_id', required=True, help="Enter the job id to rebuild the dvla file for") def re_run_build_dvla_file_for_job(job_id): + """ + Rebuild dvla file for a job. + """ from app.celery.tasks import build_dvla_file from app.config import QueueNames build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS) From 6d45a887c540713176662652d724204f02343d9f Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 22 Nov 2017 15:01:53 +0000 Subject: [PATCH 07/15] remove wsgi --- manifest-api-base.yml | 2 +- wsgi.py | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) delete mode 100644 wsgi.py diff --git a/manifest-api-base.yml b/manifest-api-base.yml index 28cf8a8cd..b3c874f91 100644 --- a/manifest-api-base.yml +++ b/manifest-api-base.yml @@ -1,7 +1,7 @@ --- buildpack: python_buildpack -command: scripts/run_app_paas.sh gunicorn -c /home/vcap/app/gunicorn_config.py --error-logfile /home/vcap/logs/gunicorn_error.log -w 5 -b 0.0.0.0:$PORT wsgi +command: scripts/run_app_paas.sh gunicorn -c /home/vcap/app/gunicorn_config.py --error-logfile /home/vcap/logs/gunicorn_error.log -w 5 -b 0.0.0.0:$PORT application services: - notify-aws - notify-config diff --git a/wsgi.py b/wsgi.py deleted file mode 100644 index 9fbeb28ac..000000000 --- a/wsgi.py +++ /dev/null @@ -1,7 +0,0 @@ -from app import create_app - - -application = create_app() - -if __name__ == "__main__": - application.run() From cd6f85281c82ded0375591d783d056121659f670 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 22 Nov 2017 16:29:51 +0000 Subject: [PATCH 08/15] any apps that use current_app need to be in a context to achieve this, the decorator flask.cli.with_appcontext is used. Not sure why it hasn't applied by default /shrug --- app/commands.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/app/commands.py b/app/commands.py index ac1e36866..522aea87e 100644 --- a/app/commands.py +++ b/app/commands.py @@ -2,6 +2,7 @@ import uuid from datetime import datetime, timedelta from decimal import Decimal +import flask from flask import current_app import click @@ -22,7 +23,9 @@ from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc from app.performance_platform.processing_time import send_processing_time_for_start_and_end -commands = click.Group(name='commands', help='Additional commands') +@click.group(name='command', help='Additional commands') +def commands(): + pass @commands.command() @@ -387,7 +390,8 @@ def re_run_build_dvla_file_for_job(job_id): build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS) -@commands.command() +@commands.command(name='list-routes') +@flask.cli.with_appcontext def list_routes(): """List URLs of all application routes.""" for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule): From b727f53836bb06a6c58ad537bce3cb4ed3b1e721 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 22 Nov 2017 16:31:04 +0000 Subject: [PATCH 09/15] ensure app can run on paas instead of using wsgi, we now use "application" - this tells gunicorn to look inside the python module application (application.py) for a wsgi app - and by default that is also called application so rename the variable. Also, when running tasks, we're not using gunicorn so need to set the flask variable in the manifest so that `flask command ...` finds the app properly. Remove server_commands as it's not used any more. --- application.py | 4 ++-- manifest-api-base.yml | 2 ++ server_commands.py | 24 ------------------------ 3 files changed, 4 insertions(+), 26 deletions(-) delete mode 100644 server_commands.py diff --git a/application.py b/application.py index 6960fd08b..8d94e6d0c 100644 --- a/application.py +++ b/application.py @@ -5,6 +5,6 @@ from flask import Flask from app import create_app -app = Flask('app') +application = Flask('app') -create_app(app) +create_app(application) diff --git a/manifest-api-base.yml b/manifest-api-base.yml index b3c874f91..9afb9f860 100644 --- a/manifest-api-base.yml +++ b/manifest-api-base.yml @@ -14,6 +14,8 @@ services: env: NOTIFY_APP_NAME: public-api CW_APP_NAME: api + # required by cf run-task + FLASK_APP: application.py instances: 1 memory: 1G diff --git a/server_commands.py b/server_commands.py deleted file mode 100644 index a3c78bf78..000000000 --- a/server_commands.py +++ /dev/null @@ -1,24 +0,0 @@ -from flask_migrate import Migrate, MigrateCommand -from app import (create_app, db, commands) -import os - -default_env_file = '/home/ubuntu/environment' -environment = 'live' - -if os.path.isfile(default_env_file): - with open(default_env_file, 'r') as environment_file: - environment = environment_file.readline().strip() - -from app.config import configs - -os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment] - -application = create_app() - -migrate = Migrate(application, db) -application.add_command('db', MigrateCommand) -application.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand) -application.add_command('custom_db_script', commands.CustomDbScript) - -if __name__ == '__main__': - manager.run() From d30a8b83c13841aabd12723702d5e298432b1839 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 22 Nov 2017 16:36:09 +0000 Subject: [PATCH 10/15] update readme and ensure makefile up to date --- Makefile | 3 +-- README.md | 13 ++++++++----- migrations/README | 9 --------- migrations/README.md | 9 +++++++++ 4 files changed, 18 insertions(+), 16 deletions(-) delete mode 100644 migrations/README create mode 100644 migrations/README.md diff --git a/Makefile b/Makefile index 732d669a4..dff7a7b69 100644 --- a/Makefile +++ b/Makefile @@ -287,7 +287,7 @@ cf-deploy-api-db-migration: cf unbind-service notify-api-db-migration notify-config cf unbind-service notify-api-db-migration notify-aws cf push notify-api-db-migration -f manifest-api-${CF_SPACE}.yml - cf run-task notify-api-db-migration "python db.py db upgrade" --name api_db_migration + cf run-task notify-api-db-migration "flask db upgrade" --name api_db_migration .PHONY: cf-check-api-db-migration-task cf-check-api-db-migration-task: ## Get the status for the last notify-api-db-migration task @@ -310,4 +310,3 @@ cf-push: .PHONY: check-if-migrations-to-run check-if-migrations-to-run: @echo $(shell python3 scripts/check_if_new_migration.py) - diff --git a/README.md b/README.md index c5737e8d8..dce3dbad6 100644 --- a/README.md +++ b/README.md @@ -105,17 +105,20 @@ That will run pycodestyle for code analysis and our unit test suite. If you wish -## To remove functional test data +## To run one off tasks -NOTE: There is assumption that both the server name prefix and user name prefix are followed by a uuid. -The script will search for all services/users with that prefix and only remove it if the prefix is followed by a uuid otherwise it will be skipped. +Tasks are run through the `flask` command - run `flask --help` for more information. There are two sections we need to +care about: `flask db` contains alembic migration commands, and `flask command` contains all of our custom commands. For +example, to purge all dynamically generated functional test data, do the following: Locally ``` -python application.py purge_functional_test_data -u # Remove the user and associated services. +flask command purge_functional_test_data -u ``` On the server ``` -python server_commands.py purge_functional_test_data -u # Remove the user and associated services. +cf run-task notify-api "flask command purge_functional_test_data -u " ``` + +All commands and command options have a --help command if you need more information. diff --git a/migrations/README b/migrations/README deleted file mode 100644 index 7c44eae44..000000000 --- a/migrations/README +++ /dev/null @@ -1,9 +0,0 @@ -Generic single-database configuration. - -python application.py db migrate to generate migration script. - -python application.py db upgrade to upgrade db with script. - -python application.py db downgrade to rollback db changes. - -python application.py db current to show current script. diff --git a/migrations/README.md b/migrations/README.md new file mode 100644 index 000000000..b2977a1cf --- /dev/null +++ b/migrations/README.md @@ -0,0 +1,9 @@ +Generic single-database configuration. + +flask db migrate to generate migration script. + +flask db upgrade to upgrade db with script. + +flask db downgrade to rollback db changes. + +flask db current to show current script. From 5466b7cd3cde59a220b81af511362cf2817f407d Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 22 Nov 2017 17:54:59 +0000 Subject: [PATCH 11/15] fix test create_app invocation --- tests/app/test_commands.py | 4 ++-- tests/conftest.py | 8 +++----- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index 1c5ceedd9..2ee188d4a 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -1,12 +1,12 @@ from datetime import datetime -from app.commands import BackfillProcessingTime +from app.commands import backfill_processing_time def test_backfill_processing_time_works_for_correct_dates(mocker): send_mock = mocker.patch('app.commands.send_processing_time_for_start_and_end') - BackfillProcessingTime().run('2017-08-01', '2017-08-03') + backfill_processing_time.callback('2017-08-01', '2017-08-03') assert send_mock.call_count == 3 send_mock.assert_any_call(datetime(2017, 7, 31, 23, 0), datetime(2017, 8, 1, 23, 0)) diff --git a/tests/conftest.py b/tests/conftest.py index 321d063a0..266346ba9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,9 @@ from contextlib import contextmanager import os +from flask import Flask from alembic.command import upgrade from alembic.config import Config -from flask_migrate import Migrate, MigrateCommand -from flask_script import Manager import boto3 import pytest import sqlalchemy @@ -14,7 +13,8 @@ from app import create_app, db @pytest.fixture(scope='session') def notify_api(): - app = create_app() + app = Flask('test') + create_app(app) # deattach server-error error handlers - error_handler_spec looks like: # {'blueprint_name': { @@ -76,8 +76,6 @@ def notify_db(notify_api, worker_id): current_app.config['SQLALCHEMY_DATABASE_URI'] += '_{}'.format(worker_id) create_test_db(current_app.config['SQLALCHEMY_DATABASE_URI']) - Migrate(notify_api, db) - Manager(db, MigrateCommand) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ALEMBIC_CONFIG = os.path.join(BASE_DIR, 'migrations') config = Config(ALEMBIC_CONFIG + '/alembic.ini') From 61b28ca5233f2034fdcb7c63f0896da5d6b6b78c Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 23 Nov 2017 17:12:11 +0000 Subject: [PATCH 12/15] update bootstrap to use new command --- scripts/bootstrap.sh | 2 +- scripts/check_if_new_migration.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 9ee35d24b..c46d03aa6 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -36,4 +36,4 @@ createdb notification_api # Upgrade databases source environment.sh -python application.py db upgrade +flask db upgrade diff --git a/scripts/check_if_new_migration.py b/scripts/check_if_new_migration.py index 2cd1614d0..2878086f6 100644 --- a/scripts/check_if_new_migration.py +++ b/scripts/check_if_new_migration.py @@ -8,7 +8,7 @@ def get_latest_db_migration_to_apply(): project_dir = dirname(dirname(abspath(__file__))) # Get the main project directory migrations_dir = '{}/migrations/versions/'.format(project_dir) migration_files = [migration_file for migration_file in os.listdir(migrations_dir) if migration_file.endswith('py')] - # sometimes there's a trailing underscore, if script was created with `python app.py db migrate --rev-id=...` + # sometimes there's a trailing underscore, if script was created with `flask db migrate --rev-id=...` latest_file = sorted(migration_files, reverse=True)[0].replace('_.py', '').replace('.py', '') return latest_file From e813f1ff87c58d5521868a02f08f1c1ad81303b8 Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Fri, 17 Nov 2017 13:58:44 +0000 Subject: [PATCH 13/15] Add a script to fix migration ordering conflicts When generating a new migration we give it a number that increments the latest existing migration on master. This means that when there are multiple PRs open containing a migration and one of them gets merged the others need to be updated to move their migration files to apply on top of the recently merged one. This requires renaming the file and changing migration references for both the migration revision and down_revision. If a PR introduced more than 1 migration they all need to be updated one after another since each one needs to be renamed. This adds a script to simplify this process. `./scripts/fix_migrations.py` will check for any branch points If it finds exactly one (which should be the common case), it asks which migration should be moved and renames / updates references to move the selected branch on top of the other one. It won't resolve any conflicts within migrations themselves (eg if both branches modified the same column) and it won't try to resolve cases with more than 1 branch. --- scripts/fix_migrations.py | 87 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) create mode 100755 scripts/fix_migrations.py diff --git a/scripts/fix_migrations.py b/scripts/fix_migrations.py new file mode 100755 index 000000000..3965ed411 --- /dev/null +++ b/scripts/fix_migrations.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# encoding: utf-8 + +import os +import sys + +from alembic.script import ScriptDirectory + +sys.path.append('.') + + +def get_branch_points(migrations): + return [m for m in migrations.walk_revisions() if m.is_branch_point] + + +def get_branches(migrations, branch_point, heads): + return [list(migrations.iterate_revisions(m, branch_point.revision))[::-1] + for m in heads] + + +def choice(prompt, options, option_fmt=lambda x: x): + print("{}:\n".format(prompt)) + for i, option in enumerate(options): + print("{}. {}".format(i + 1, option_fmt(option))) + + print() + choice = input("Option> ") + + return options[int(choice) - 1] + + +def rename_revision(current_revision, new_base): + new_id = int(new_base[:4]) + 1 + return "{:04d}{}".format(new_id, current_revision[4:]) + + +def reorder_revisions(revisions, old_base, new_base): + if not revisions: + return + + head, *tail = revisions + new_revision_id = rename_revision(head.revision, new_base) + + print("Moving {} to {}".format(head.revision, new_revision_id)) + with open(head.path, 'r') as rev_file: + file_data = rev_file.read() + + file_data = file_data.replace(head.revision, new_revision_id).replace(old_base, new_base) + + with open(head.path.replace(head.revision, new_revision_id), 'w') as rev_file: + rev_file.write(file_data) + + print("Removing {}".format(head.path)) + os.remove(head.path) + + reorder_revisions(tail, head.revision, new_revision_id) + + +def fix_branch_point(migrations, branch_point, heads): + print("Migrations directory has a branch point at {}".format(branch_point.revision)) + + branches = get_branches(migrations, branch_point, heads) + move_branch = choice("Select migrations to move", branches, + lambda x: " -> ".join(m.revision for m in x)) + branches.remove(move_branch) + + reorder_revisions(move_branch, branch_point.revision, branches[0][-1].revision) + + +def main(migrations_path): + migrations = ScriptDirectory(migrations_path) + + branch_points = get_branch_points(migrations) + heads = migrations.get_heads() + + if not branch_points: + print("Migrations are ordered") + elif len(branch_points) == 1 and len(heads) == 2: + fix_branch_point(migrations, branch_points[0], heads) + else: + print("Found {} branch points and {} heads, can't fix automatically".format( + [bp.revision for bp in branch_points], heads)) + sys.exit(1) + + +if __name__ == '__main__': + main('migrations/') From 4d75f032c6895eb863a8aaee27a9c6e755e38412 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Fri, 24 Nov 2017 10:53:16 +0000 Subject: [PATCH 14/15] remove cf stop to try and improve deploy robustness Rationale: Sometimes, when deploying, we've seen errors while stopping the old apps: "(psycopg2.ProgrammingError) permission denied for relation notifications". When you call cf stop, it may not be entirely synchronous. Under the hood, cloudfoundry has to do a whole bunch of things when you stop an app - it has its own internal db of what app states are, and also has to remove it from any load balancers etc, and also it has to actually stop the app. We're not sure if the `cf stop` command guarantees that your process has already terminated by the time that the command returns. In our Makefile, we call `cf stop`, followed by `cf delete`. One posisble theory is that the process is still running when `cf stop` exits, and then `cf delete` unbinds that service from the database, removing all of it's users' permissions. This isn't confirmed, however, this commit removes the `cf stop` command to see if it solves the issue. PaaS team confirmed that it's redundant - `cf delete` will carry out the same tasks under the hood. --- Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/Makefile b/Makefile index dff7a7b69..8ac0f2429 100644 --- a/Makefile +++ b/Makefile @@ -277,7 +277,6 @@ cf-deploy: ## Deploys the app to Cloud Foundry cf rename ${CF_APP} ${CF_APP}-rollback cf push ${CF_APP} -f ${CF_MANIFEST_FILE} cf scale -i $$(cf curl /v2/apps/$$(cf app --guid ${CF_APP}-rollback) | jq -r ".entity.instances" 2>/dev/null || echo "1") ${CF_APP} - cf stop ${CF_APP}-rollback cf delete -f ${CF_APP}-rollback .PHONY: cf-deploy-api-db-migration From 4c14e3279f8372e9a3283e8645d57a28a97a1df0 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Fri, 24 Nov 2017 12:01:28 +0000 Subject: [PATCH 15/15] ensure the app context is included in every single flask command --- app/commands.py | 55 +++++++++++++++++++++++++------------- tests/app/test_commands.py | 6 +++-- 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/app/commands.py b/app/commands.py index 522aea87e..1c7918315 100644 --- a/app/commands.py +++ b/app/commands.py @@ -1,6 +1,7 @@ import uuid from datetime import datetime, timedelta from decimal import Decimal +import functools import flask from flask import current_app @@ -24,11 +25,30 @@ from app.performance_platform.processing_time import send_processing_time_for_st @click.group(name='command', help='Additional commands') -def commands(): +def command_group(): pass -@commands.command() +class notify_command: + def __init__(self, name=None): + self.name = name + + def __call__(self, func): + # we need to call the flask with_appcontext decorator to ensure the config is loaded, db connected etc etc. + # we also need to use functools.wraps to carry through the names and docstrings etc of the functions. + # Then we need to turn it into a click.Command - that's what command_group.add_command expects. + @click.command(name=self.name) + @functools.wraps(func) + @flask.cli.with_appcontext + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + command_group.add_command(wrapper) + + return wrapper + + +@notify_command() @click.option('-p', '--provider_name', required=True, help='Provider name') @click.option('-c', '--cost', required=True, help='Cost (pence) per message including decimals') @click.option('-d', '--valid_from', required=True, help="Date (%Y-%m-%dT%H:%M:%S) valid from") @@ -52,7 +72,7 @@ def create_provider_rates(provider_name, cost, valid_from): dao_create_provider_rates(provider_name, valid_from, cost) -@commands.command() +@notify_command() @click.option('-u', '--user_email_prefix', required=True, help=""" Functional test user email prefix. eg "notify-test-preview" """) # noqa @@ -80,7 +100,7 @@ def purge_functional_test_data(user_email_prefix): delete_model_user(usr) -@commands.command() +@notify_command() def backfill_notification_statuses(): """ DEPRECATED. Populates notification_status. @@ -100,7 +120,7 @@ def backfill_notification_statuses(): result = db.session.execute(subq).fetchall() -@commands.command() +@notify_command() def update_notification_international_flag(): """ DEPRECATED. Set notifications.international=false. @@ -127,7 +147,7 @@ def update_notification_international_flag(): result_history = db.session.execute(subq_history).fetchall() -@commands.command() +@notify_command() def fix_notification_statuses_not_in_sync(): """ DEPRECATED. @@ -161,7 +181,7 @@ def fix_notification_statuses_not_in_sync(): result = db.session.execute(subq_hist).fetchall() -@commands.command() +@notify_command() def link_inbound_numbers_to_service(): """ DEPRECATED. @@ -182,7 +202,7 @@ def link_inbound_numbers_to_service(): print("Linked {} inbound numbers to service".format(result.rowcount)) -@commands.command() +@notify_command() @click.option('-y', '--year', required=True, help="Use for integer value for year, e.g. 2017") def populate_monthly_billing(year): """ @@ -216,7 +236,7 @@ def populate_monthly_billing(year): populate(service_id, year, i) -@commands.command() +@notify_command() @click.option('-s', '--start_date', required=True, help="Date (%Y-%m-%d) start date inclusive") @click.option('-e', '--end_date', required=True, help="Date (%Y-%m-%d) end date inclusive") def backfill_processing_time(start_date, end_date): @@ -245,7 +265,7 @@ def backfill_processing_time(start_date, end_date): send_processing_time_for_start_and_end(process_start_date, process_end_date) -@commands.command() +@notify_command() def populate_service_email_reply_to(): """ Migrate reply to emails. @@ -267,7 +287,7 @@ def populate_service_email_reply_to(): print("Populated email reply to addresses for {}".format(result.rowcount)) -@commands.command() +@notify_command() def populate_service_sms_sender(): """ Migrate sms senders. Must be called when working on a fresh db! @@ -306,7 +326,7 @@ def populate_service_sms_sender(): print("{} service_sms_senders".format(service_sms_sender_count_query)) -@commands.command() +@notify_command() def populate_service_letter_contact(): """ Migrates letter contact blocks. @@ -328,7 +348,7 @@ def populate_service_letter_contact(): print("Populated letter contacts for {} services".format(result.rowcount)) -@commands.command() +@notify_command() def populate_service_and_service_history_free_sms_fragment_limit(): """ DEPRECATED. Set services to have 250k sms limit. @@ -354,7 +374,7 @@ def populate_service_and_service_history_free_sms_fragment_limit(): print("Populated free sms fragment limits for {} services history".format(services_history_result.rowcount)) -@commands.command() +@notify_command() def populate_annual_billing(): """ add annual_billing for 2016, 2017 and 2018. @@ -379,7 +399,7 @@ def populate_annual_billing(): print("Populated annual billing {} for {} services".format(fy, services_result1.rowcount)) -@commands.command() +@notify_command() @click.option('-j', '--job_id', required=True, help="Enter the job id to rebuild the dvla file for") def re_run_build_dvla_file_for_job(job_id): """ @@ -390,8 +410,7 @@ def re_run_build_dvla_file_for_job(job_id): build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS) -@commands.command(name='list-routes') -@flask.cli.with_appcontext +@notify_command(name='list-routes') def list_routes(): """List URLs of all application routes.""" for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule): @@ -399,4 +418,4 @@ def list_routes(): def setup_commands(application): - application.cli.add_command(commands) + application.cli.add_command(command_group) diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index 2ee188d4a..d5f6e75e1 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -3,10 +3,12 @@ from datetime import datetime from app.commands import backfill_processing_time -def test_backfill_processing_time_works_for_correct_dates(mocker): +def test_backfill_processing_time_works_for_correct_dates(mocker, notify_api): send_mock = mocker.patch('app.commands.send_processing_time_for_start_and_end') - backfill_processing_time.callback('2017-08-01', '2017-08-03') + # backfill_processing_time is a click.Command object - if you try invoking the callback on its own, it + # throws a `RuntimeError: There is no active click context.` - so get at the original function using __wrapped__ + backfill_processing_time.callback.__wrapped__('2017-08-01', '2017-08-03') assert send_mock.call_count == 3 send_mock.assert_any_call(datetime(2017, 7, 31, 23, 0), datetime(2017, 8, 1, 23, 0))