diff --git a/Makefile b/Makefile index 732d669a4..8ac0f2429 100644 --- a/Makefile +++ b/Makefile @@ -277,7 +277,6 @@ cf-deploy: ## Deploys the app to Cloud Foundry cf rename ${CF_APP} ${CF_APP}-rollback cf push ${CF_APP} -f ${CF_MANIFEST_FILE} cf scale -i $$(cf curl /v2/apps/$$(cf app --guid ${CF_APP}-rollback) | jq -r ".entity.instances" 2>/dev/null || echo "1") ${CF_APP} - cf stop ${CF_APP}-rollback cf delete -f ${CF_APP}-rollback .PHONY: cf-deploy-api-db-migration @@ -287,7 +286,7 @@ cf-deploy-api-db-migration: cf unbind-service notify-api-db-migration notify-config cf unbind-service notify-api-db-migration notify-aws cf push notify-api-db-migration -f manifest-api-${CF_SPACE}.yml - cf run-task notify-api-db-migration "python db.py db upgrade" --name api_db_migration + cf run-task notify-api-db-migration "flask db upgrade" --name api_db_migration .PHONY: cf-check-api-db-migration-task cf-check-api-db-migration-task: ## Get the status for the last notify-api-db-migration task @@ -310,4 +309,3 @@ cf-push: .PHONY: check-if-migrations-to-run check-if-migrations-to-run: @echo $(shell python3 scripts/check_if_new_migration.py) - diff --git a/README.md b/README.md index 8252c9bbf..dce3dbad6 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,9 @@ export FIRETEXT_API_KEY='FIRETEXT_ACTUAL_KEY' export STATSD_PREFIX='YOU_OWN_PREFIX' export NOTIFICATION_QUEUE_PREFIX='YOUR_OWN_PREFIX' export REDIS_URL="redis://localhost:6379/0" +export FLASK_APP=application.py +export FLASK_DEBUG=1 +export WERKZEUG_DEBUG_PIN=off "> environment.sh ``` @@ -102,17 +105,20 @@ That will run pycodestyle for code analysis and our unit test suite. If you wish -## To remove functional test data +## To run one off tasks -NOTE: There is assumption that both the server name prefix and user name prefix are followed by a uuid. -The script will search for all services/users with that prefix and only remove it if the prefix is followed by a uuid otherwise it will be skipped. +Tasks are run through the `flask` command - run `flask --help` for more information. There are two sections we need to +care about: `flask db` contains alembic migration commands, and `flask command` contains all of our custom commands. For +example, to purge all dynamically generated functional test data, do the following: Locally ``` -python application.py purge_functional_test_data -u # Remove the user and associated services. +flask command purge_functional_test_data -u ``` On the server ``` -python server_commands.py purge_functional_test_data -u # Remove the user and associated services. +cf run-task notify-api "flask command purge_functional_test_data -u " ``` + +All commands and command options have a --help command if you need more information. diff --git a/app/__ b/app/__ new file mode 100644 index 000000000..e69de29bb diff --git a/app/__init__.py b/app/__init__.py index d4e9c8136..a1172736e 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -6,6 +6,7 @@ import uuid from flask import Flask, _request_ctx_stack, request, g, jsonify from flask_sqlalchemy import SQLAlchemy from flask_marshmallow import Marshmallow +from flask_migrate import Migrate from monotonic import monotonic from notifications_utils.clients.statsd.statsd_client import StatsdClient from notifications_utils.clients.redis.redis_client import RedisClient @@ -25,6 +26,7 @@ DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" DATE_FORMAT = "%Y-%m-%d" db = SQLAlchemy() +migrate = Migrate() ma = Marshmallow() notify_celery = NotifyCelery() firetext_client = FiretextClient() @@ -42,21 +44,19 @@ api_user = LocalProxy(lambda: _request_ctx_stack.top.api_user) authenticated_service = LocalProxy(lambda: _request_ctx_stack.top.authenticated_service) -def create_app(app_name=None): - application = Flask(__name__) - +def create_app(application): from app.config import configs notify_environment = os.environ['NOTIFY_ENVIRONMENT'] application.config.from_object(configs[notify_environment]) - if app_name: - application.config['NOTIFY_APP_NAME'] = app_name + application.config['NOTIFY_APP_NAME'] = application.name init_app(application) request_helper.init_app(application) db.init_app(application) + migrate.init_app(application, db=db) ma.init_app(application) statsd_client.init_app(application) logging.init_app(application, statsd_client) @@ -73,6 +73,10 @@ def create_app(app_name=None): register_blueprint(application) register_v2_blueprints(application) + # avoid circular imports by importing this file later 😬 + from app.commands import setup_commands + setup_commands(application) + return application diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 67d553fcd..4dd51828c 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -488,6 +488,7 @@ def send_inbound_sms_to_service(self, inbound_sms_id, service_id): inbound_id=inbound_sms_id) data = { "id": str(inbound_sms.id), + # TODO: should we be validating and formatting the phone number here? "source_number": inbound_sms.user_number, "destination_number": inbound_sms.notify_number, "message": inbound_sms.content, diff --git a/app/commands.py b/app/commands.py index f1e125e01..1c7918315 100644 --- a/app/commands.py +++ b/app/commands.py @@ -1,7 +1,11 @@ import uuid from datetime import datetime, timedelta from decimal import Decimal -from flask_script import Command, Option +import functools + +import flask +from flask import current_app +import click from app import db from app.dao.monthly_billing_dao import ( @@ -14,181 +18,197 @@ from app.dao.services_dao import ( delete_service_and_all_associated_db_objects, dao_fetch_all_services_by_user ) -from app.dao.provider_rates_dao import create_provider_rates +from app.dao.provider_rates_dao import create_provider_rates as dao_create_provider_rates from app.dao.users_dao import (delete_model_user, delete_user_verify_codes) from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc from app.performance_platform.processing_time import send_processing_time_for_start_and_end -class CreateProviderRateCommand(Command): +@click.group(name='command', help='Additional commands') +def command_group(): + pass - option_list = ( - Option('-p', '--provider_name', dest="provider_name", help='Provider name'), - Option('-c', '--cost', dest="cost", help='Cost (pence) per message including decimals'), - Option('-d', '--valid_from', dest="valid_from", help="Date (%Y-%m-%dT%H:%M:%S) valid from") - ) - def run(self, provider_name, cost, valid_from): - if provider_name not in PROVIDERS: - raise Exception("Invalid provider name, must be one of ({})".format(', '.join(PROVIDERS))) +class notify_command: + def __init__(self, name=None): + self.name = name + def __call__(self, func): + # we need to call the flask with_appcontext decorator to ensure the config is loaded, db connected etc etc. + # we also need to use functools.wraps to carry through the names and docstrings etc of the functions. + # Then we need to turn it into a click.Command - that's what command_group.add_command expects. + @click.command(name=self.name) + @functools.wraps(func) + @flask.cli.with_appcontext + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + command_group.add_command(wrapper) + + return wrapper + + +@notify_command() +@click.option('-p', '--provider_name', required=True, help='Provider name') +@click.option('-c', '--cost', required=True, help='Cost (pence) per message including decimals') +@click.option('-d', '--valid_from', required=True, help="Date (%Y-%m-%dT%H:%M:%S) valid from") +def create_provider_rates(provider_name, cost, valid_from): + """ + Backfill rates for a given provider + """ + if provider_name not in PROVIDERS: + raise Exception("Invalid provider name, must be one of ({})".format(', '.join(PROVIDERS))) + + try: + cost = Decimal(cost) + except: + raise Exception("Invalid cost value.") + + try: + valid_from = datetime.strptime('%Y-%m-%dT%H:%M:%S', valid_from) + except: + raise Exception("Invalid valid_from date. Use the format %Y-%m-%dT%H:%M:%S") + + dao_create_provider_rates(provider_name, valid_from, cost) + + +@notify_command() +@click.option('-u', '--user_email_prefix', required=True, help=""" + Functional test user email prefix. eg "notify-test-preview" +""") # noqa +def purge_functional_test_data(user_email_prefix): + """ + Remove non-seeded functional test data + + users, services, etc. Give an email prefix. Probably "notify-test-preview". + """ + users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all() + for usr in users: + # Make sure the full email includes a uuid in it + # Just in case someone decides to use a similar email address. try: - cost = Decimal(cost) - except: - raise Exception("Invalid cost value.") - - try: - valid_from = datetime.strptime('%Y-%m-%dT%H:%M:%S', valid_from) - except: - raise Exception("Invalid valid_from date. Use the format %Y-%m-%dT%H:%M:%S") - - create_provider_rates(provider_name, valid_from, cost) - - -class PurgeFunctionalTestDataCommand(Command): - - option_list = ( - Option('-u', '-user-email-prefix', dest='user_email_prefix', help="Functional test user email prefix."), - ) - - def run(self, user_email_prefix=None): - if user_email_prefix: - users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all() - for usr in users: - # Make sure the full email includes a uuid in it - # Just in case someone decides to use a similar email address. - try: - uuid.UUID(usr.email_address.split("@")[0].split('+')[1]) - except ValueError: - print("Skipping {} as the user email doesn't contain a UUID.".format(usr.email_address)) - else: - services = dao_fetch_all_services_by_user(usr.id) - if services: - for service in services: - delete_service_and_all_associated_db_objects(service) - else: - delete_user_verify_codes(usr) - delete_model_user(usr) - - -class CustomDbScript(Command): - - option_list = ( - Option('-n', '-name-of-db-function', dest='name_of_db_function', help="Function name of the DB script to run"), - ) - - def run(self, name_of_db_function): - db_function = getattr(self, name_of_db_function, None) - if callable(db_function): - db_function() + uuid.UUID(usr.email_address.split("@")[0].split('+')[1]) + except ValueError: + print("Skipping {} as the user email doesn't contain a UUID.".format(usr.email_address)) else: - print('The specified function does not exist.') + services = dao_fetch_all_services_by_user(usr.id) + if services: + for service in services: + delete_service_and_all_associated_db_objects(service) + else: + delete_user_verify_codes(usr) + delete_model_user(usr) - def backfill_notification_statuses(self): - """ - This will be used to populate the new `Notification._status_fkey` with the old - `Notification._status_enum` - """ - LIMIT = 250000 - subq = "SELECT id FROM notification_history WHERE notification_status is NULL LIMIT {}".format(LIMIT) - update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq) + +@notify_command() +def backfill_notification_statuses(): + """ + DEPRECATED. Populates notification_status. + + This will be used to populate the new `Notification._status_fkey` with the old + `Notification._status_enum` + """ + LIMIT = 250000 + subq = "SELECT id FROM notification_history WHERE notification_status is NULL LIMIT {}".format(LIMIT) + update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq) + result = db.session.execute(subq).fetchall() + + while len(result) > 0: + db.session.execute(update) + print('commit {} updates at {}'.format(LIMIT, datetime.utcnow())) + db.session.commit() result = db.session.execute(subq).fetchall() - while len(result) > 0: - db.session.execute(update) - print('commit {} updates at {}'.format(LIMIT, datetime.utcnow())) - db.session.commit() - result = db.session.execute(subq).fetchall() - def update_notification_international_flag(self): - # 250,000 rows takes 30 seconds to update. - subq = "select id from notifications where international is null limit 250000" - update = "update notifications set international = False where id in ({})".format(subq) +@notify_command() +def update_notification_international_flag(): + """ + DEPRECATED. Set notifications.international=false. + """ + # 250,000 rows takes 30 seconds to update. + subq = "select id from notifications where international is null limit 250000" + update = "update notifications set international = False where id in ({})".format(subq) + result = db.session.execute(subq).fetchall() + + while len(result) > 0: + db.session.execute(update) + print('commit 250000 updates at {}'.format(datetime.utcnow())) + db.session.commit() result = db.session.execute(subq).fetchall() - while len(result) > 0: - db.session.execute(update) - print('commit 250000 updates at {}'.format(datetime.utcnow())) - db.session.commit() - result = db.session.execute(subq).fetchall() - - # Now update notification_history - subq_history = "select id from notification_history where international is null limit 250000" - update_history = "update notification_history set international = False where id in ({})".format(subq_history) + # Now update notification_history + subq_history = "select id from notification_history where international is null limit 250000" + update_history = "update notification_history set international = False where id in ({})".format(subq_history) + result_history = db.session.execute(subq_history).fetchall() + while len(result_history) > 0: + db.session.execute(update_history) + print('commit 250000 updates at {}'.format(datetime.utcnow())) + db.session.commit() result_history = db.session.execute(subq_history).fetchall() - while len(result_history) > 0: - db.session.execute(update_history) - print('commit 250000 updates at {}'.format(datetime.utcnow())) - db.session.commit() - result_history = db.session.execute(subq_history).fetchall() - def fix_notification_statuses_not_in_sync(self): - """ - This will be used to correct an issue where Notification._status_enum and NotificationHistory._status_fkey - became out of sync. See 979e90a. - Notification._status_enum is the source of truth so NotificationHistory._status_fkey will be updated with - these values. - """ - MAX = 10000 +@notify_command() +def fix_notification_statuses_not_in_sync(): + """ + DEPRECATED. + This will be used to correct an issue where Notification._status_enum and NotificationHistory._status_fkey + became out of sync. See 979e90a. - subq = "SELECT id FROM notifications WHERE cast (status as text) != notification_status LIMIT {}".format(MAX) - update = "UPDATE notifications SET notification_status = status WHERE id in ({})".format(subq) + Notification._status_enum is the source of truth so NotificationHistory._status_fkey will be updated with + these values. + """ + MAX = 10000 + + subq = "SELECT id FROM notifications WHERE cast (status as text) != notification_status LIMIT {}".format(MAX) + update = "UPDATE notifications SET notification_status = status WHERE id in ({})".format(subq) + result = db.session.execute(subq).fetchall() + + while len(result) > 0: + db.session.execute(update) + print('Committed {} updates at {}'.format(len(result), datetime.utcnow())) + db.session.commit() result = db.session.execute(subq).fetchall() - while len(result) > 0: - db.session.execute(update) - print('Committed {} updates at {}'.format(len(result), datetime.utcnow())) - db.session.commit() - result = db.session.execute(subq).fetchall() + subq_hist = "SELECT id FROM notification_history WHERE cast (status as text) != notification_status LIMIT {}" \ + .format(MAX) + update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq_hist) + result = db.session.execute(subq_hist).fetchall() - subq_hist = "SELECT id FROM notification_history WHERE cast (status as text) != notification_status LIMIT {}" \ - .format(MAX) - update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq_hist) + while len(result) > 0: + db.session.execute(update) + print('Committed {} updates at {}'.format(len(result), datetime.utcnow())) + db.session.commit() result = db.session.execute(subq_hist).fetchall() - while len(result) > 0: - db.session.execute(update) - print('Committed {} updates at {}'.format(len(result), datetime.utcnow())) - db.session.commit() - result = db.session.execute(subq_hist).fetchall() - def link_inbound_numbers_to_service(self): - update = """ - UPDATE inbound_numbers SET - service_id = services.id, - updated_at = now() - FROM services - WHERE services.sms_sender = inbound_numbers.number AND - inbound_numbers.service_id is null - """ - result = db.session.execute(update) - db.session.commit() +@notify_command() +def link_inbound_numbers_to_service(): + """ + DEPRECATED. - print("Linked {} inbound numbers to service".format(result.rowcount)) + Matches inbound numbers and service ids based on services.sms_sender + """ + update = """ + UPDATE inbound_numbers SET + service_id = services.id, + updated_at = now() + FROM services + WHERE services.sms_sender = inbound_numbers.number AND + inbound_numbers.service_id is null + """ + result = db.session.execute(update) + db.session.commit() + + print("Linked {} inbound numbers to service".format(result.rowcount)) -class PopulateMonthlyBilling(Command): - option_list = ( - Option('-y', '-year', dest="year", help="Use for integer value for year, e.g. 2017"), - ) - - def run(self, year): - service_ids = get_service_ids_that_need_billing_populated( - start_date=datetime(2016, 5, 1), end_date=datetime(2017, 8, 16) - ) - start, end = 1, 13 - if year == '2016': - start = 4 - - for service_id in service_ids: - print('Starting to populate data for service {}'.format(str(service_id))) - print('Starting populating monthly billing for {}'.format(year)) - for i in range(start, end): - print('Population for {}-{}'.format(i, year)) - self.populate(service_id, year, i) - - def populate(self, service_id, year, month): +@notify_command() +@click.option('-y', '--year', required=True, help="Use for integer value for year, e.g. 2017") +def populate_monthly_billing(year): + """ + Populate monthly billing table for all services for a given year. + """ + def populate(service_id, year, month): create_or_update_monthly_billing(service_id, datetime(int(year), int(month), 1)) sms_res = get_monthly_billing_by_notification_type( service_id, datetime(int(year), int(month), 1), SMS_TYPE @@ -200,165 +220,202 @@ class PopulateMonthlyBilling(Command): print('SMS: {}'.format(sms_res.monthly_totals)) print('Email: {}'.format(email_res.monthly_totals)) - -class BackfillProcessingTime(Command): - option_list = ( - Option('-s', '--start_date', dest='start_date', help="Date (%Y-%m-%d) start date inclusive"), - Option('-e', '--end_date', dest='end_date', help="Date (%Y-%m-%d) end date inclusive"), + service_ids = get_service_ids_that_need_billing_populated( + start_date=datetime(2016, 5, 1), end_date=datetime(2017, 8, 16) ) + start, end = 1, 13 - def run(self, start_date, end_date): - start_date = datetime.strptime(start_date, '%Y-%m-%d') - end_date = datetime.strptime(end_date, '%Y-%m-%d') + if year == '2016': + start = 4 - delta = end_date - start_date - - print('Sending notification processing-time data for all days between {} and {}'.format(start_date, end_date)) - - for i in range(delta.days + 1): - # because the tz conversion funcs talk about midnight, and the midnight before last, - # we want to pretend we're running this from the next morning, so add one. - process_date = start_date + timedelta(days=i + 1) - - process_start_date = get_midnight_for_day_before(process_date) - process_end_date = get_london_midnight_in_utc(process_date) - - print('Sending notification processing-time for {} - {}'.format( - process_start_date.isoformat(), - process_end_date.isoformat() - )) - send_processing_time_for_start_and_end(process_start_date, process_end_date) + for service_id in service_ids: + print('Starting to populate data for service {}'.format(str(service_id))) + print('Starting populating monthly billing for {}'.format(year)) + for i in range(start, end): + print('Population for {}-{}'.format(i, year)) + populate(service_id, year, i) -class PopulateServiceEmailReplyTo(Command): +@notify_command() +@click.option('-s', '--start_date', required=True, help="Date (%Y-%m-%d) start date inclusive") +@click.option('-e', '--end_date', required=True, help="Date (%Y-%m-%d) end date inclusive") +def backfill_processing_time(start_date, end_date): + """ + Send historical performance platform stats. + """ + start_date = datetime.strptime(start_date, '%Y-%m-%d') + end_date = datetime.strptime(end_date, '%Y-%m-%d') - def run(self): - services_to_update = """ - INSERT INTO service_email_reply_to(id, service_id, email_address, is_default, created_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, reply_to_email_address, true, '{}' - FROM services - WHERE reply_to_email_address IS NOT NULL - AND id NOT IN( - SELECT service_id - FROM service_email_reply_to - ) - """.format(datetime.utcnow()) + delta = end_date - start_date - result = db.session.execute(services_to_update) - db.session.commit() + print('Sending notification processing-time data for all days between {} and {}'.format(start_date, end_date)) - print("Populated email reply to addresses for {}".format(result.rowcount)) + for i in range(delta.days + 1): + # because the tz conversion funcs talk about midnight, and the midnight before last, + # we want to pretend we're running this from the next morning, so add one. + process_date = start_date + timedelta(days=i + 1) + + process_start_date = get_midnight_for_day_before(process_date) + process_end_date = get_london_midnight_in_utc(process_date) + + print('Sending notification processing-time for {} - {}'.format( + process_start_date.isoformat(), + process_end_date.isoformat() + )) + send_processing_time_for_start_and_end(process_start_date, process_end_date) -class PopulateServiceSmsSender(Command): +@notify_command() +def populate_service_email_reply_to(): + """ + Migrate reply to emails. + """ + services_to_update = """ + INSERT INTO service_email_reply_to(id, service_id, email_address, is_default, created_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, reply_to_email_address, true, '{}' + FROM services + WHERE reply_to_email_address IS NOT NULL + AND id NOT IN( + SELECT service_id + FROM service_email_reply_to + ) + """.format(datetime.utcnow()) - def run(self): - services_to_update = """ - INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), service_id, number, id, true, '{}' - FROM inbound_numbers - WHERE service_id NOT IN( - SELECT service_id - FROM service_sms_senders - ) - """.format(datetime.utcnow()) + result = db.session.execute(services_to_update) + db.session.commit() - services_to_update_from_services = """ - INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, sms_sender, null, true, '{}' + print("Populated email reply to addresses for {}".format(result.rowcount)) + + +@notify_command() +def populate_service_sms_sender(): + """ + Migrate sms senders. Must be called when working on a fresh db! + """ + services_to_update = """ + INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), service_id, number, id, true, '{}' + FROM inbound_numbers + WHERE service_id NOT IN( + SELECT service_id + FROM service_sms_senders + ) + """.format(datetime.utcnow()) + + services_to_update_from_services = """ + INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, sms_sender, null, true, '{}' + FROM services + WHERE id NOT IN( + SELECT service_id + FROM service_sms_senders + ) + """.format(datetime.utcnow()) + + result = db.session.execute(services_to_update) + second_result = db.session.execute(services_to_update_from_services) + db.session.commit() + + services_count_query = db.session.execute("Select count(*) from services").fetchall()[0][0] + + service_sms_sender_count_query = db.session.execute("Select count(*) from service_sms_senders").fetchall()[0][0] + + print("Populated sms sender {} services from inbound_numbers".format(result.rowcount)) + print("Populated sms sender {} services from services".format(second_result.rowcount)) + print("{} services in table".format(services_count_query)) + print("{} service_sms_senders".format(service_sms_sender_count_query)) + + +@notify_command() +def populate_service_letter_contact(): + """ + Migrates letter contact blocks. + """ + services_to_update = """ + INSERT INTO service_letter_contacts(id, service_id, contact_block, is_default, created_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, letter_contact_block, true, '{}' + FROM services + WHERE letter_contact_block IS NOT NULL + AND id NOT IN( + SELECT service_id + FROM service_letter_contacts + ) + """.format(datetime.utcnow()) + + result = db.session.execute(services_to_update) + db.session.commit() + + print("Populated letter contacts for {} services".format(result.rowcount)) + + +@notify_command() +def populate_service_and_service_history_free_sms_fragment_limit(): + """ + DEPRECATED. Set services to have 250k sms limit. + """ + services_to_update = """ + UPDATE services + SET free_sms_fragment_limit = 250000 + WHERE free_sms_fragment_limit IS NULL + """ + + services_history_to_update = """ + UPDATE services_history + SET free_sms_fragment_limit = 250000 + WHERE free_sms_fragment_limit IS NULL + """ + + services_result = db.session.execute(services_to_update) + services_history_result = db.session.execute(services_history_to_update) + + db.session.commit() + + print("Populated free sms fragment limits for {} services".format(services_result.rowcount)) + print("Populated free sms fragment limits for {} services history".format(services_history_result.rowcount)) + + +@notify_command() +def populate_annual_billing(): + """ + add annual_billing for 2016, 2017 and 2018. + """ + financial_year = [2016, 2017, 2018] + + for fy in financial_year: + populate_data = """ + INSERT INTO annual_billing(id, service_id, free_sms_fragment_limit, financial_year_start, + created_at, updated_at) + SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, 250000, {}, '{}', '{}' FROM services WHERE id NOT IN( SELECT service_id - FROM service_sms_senders - ) - """.format(datetime.utcnow()) + FROM annual_billing + WHERE financial_year_start={}) + """.format(fy, datetime.utcnow(), datetime.utcnow(), fy) - result = db.session.execute(services_to_update) - second_result = db.session.execute(services_to_update_from_services) + services_result1 = db.session.execute(populate_data) db.session.commit() - services_count_query = db.session.execute("Select count(*) from services").fetchall()[0][0] - - service_sms_sender_count_query = db.session.execute("Select count(*) from service_sms_senders").fetchall()[0][0] - - print("Populated sms sender {} services from inbound_numbers".format(result.rowcount)) - print("Populated sms sender {} services from services".format(second_result.rowcount)) - print("{} services in table".format(services_count_query)) - print("{} service_sms_senders".format(service_sms_sender_count_query)) + print("Populated annual billing {} for {} services".format(fy, services_result1.rowcount)) -class PopulateServiceLetterContact(Command): - - def run(self): - services_to_update = """ - INSERT INTO service_letter_contacts(id, service_id, contact_block, is_default, created_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, letter_contact_block, true, '{}' - FROM services - WHERE letter_contact_block IS NOT NULL - AND id NOT IN( - SELECT service_id - FROM service_letter_contacts - ) - """.format(datetime.utcnow()) - - result = db.session.execute(services_to_update) - db.session.commit() - - print("Populated letter contacts for {} services".format(result.rowcount)) +@notify_command() +@click.option('-j', '--job_id', required=True, help="Enter the job id to rebuild the dvla file for") +def re_run_build_dvla_file_for_job(job_id): + """ + Rebuild dvla file for a job. + """ + from app.celery.tasks import build_dvla_file + from app.config import QueueNames + build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS) -class PopulateServiceAndServiceHistoryFreeSmsFragmentLimit(Command): - - def run(self): - services_to_update = """ - UPDATE services - SET free_sms_fragment_limit = 250000 - WHERE free_sms_fragment_limit IS NULL - """ - - services_history_to_update = """ - UPDATE services_history - SET free_sms_fragment_limit = 250000 - WHERE free_sms_fragment_limit IS NULL - """ - - services_result = db.session.execute(services_to_update) - services_history_result = db.session.execute(services_history_to_update) - - db.session.commit() - - print("Populated free sms fragment limits for {} services".format(services_result.rowcount)) - print("Populated free sms fragment limits for {} services history".format(services_history_result.rowcount)) +@notify_command(name='list-routes') +def list_routes(): + """List URLs of all application routes.""" + for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule): + print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) -class PopulateAnnualBilling(Command): - def run(self): - financial_year = [2016, 2017, 2018] - - for fy in financial_year: - populate_data = """ - INSERT INTO annual_billing(id, service_id, free_sms_fragment_limit, financial_year_start, - created_at, updated_at) - SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, 250000, {}, '{}', '{}' - FROM services - WHERE id NOT IN( - SELECT service_id - FROM annual_billing - WHERE financial_year_start={}) - """.format(fy, datetime.utcnow(), datetime.utcnow(), fy) - - services_result1 = db.session.execute(populate_data) - db.session.commit() - - print("Populated annual billing {} for {} services".format(fy, services_result1.rowcount)) - - -class ReRunBuildDvlaFileForJob(Command): - option_list = ( - Option('-j', '--job_id', dest='job_id', help="Enter the job id to rebuild the dvla file for"), - ) - - def run(self, job_id): - from app.celery.tasks import build_dvla_file - from app.config import QueueNames - build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS) +def setup_commands(application): + application.cli.add_command(command_group) diff --git a/app/config.py b/app/config.py index bac6db38f..3199fbe93 100644 --- a/app/config.py +++ b/app/config.py @@ -389,6 +389,21 @@ class Staging(Config): API_RATE_LIMIT_ENABLED = True CHECK_PROXY_HEADER = True + API_KEY_LIMITS = { + KEY_TYPE_TEAM: { + "limit": 21000, + "interval": 60 + }, + KEY_TYPE_NORMAL: { + "limit": 21000, + "interval": 60 + }, + KEY_TYPE_TEST: { + "limit": 21000, + "interval": 60 + } + } + class Live(Config): NOTIFY_EMAIL_DOMAIN = 'notifications.service.gov.uk' diff --git a/app/dao/templates_dao.py b/app/dao/templates_dao.py index 6cae74c0e..5343f511c 100644 --- a/app/dao/templates_dao.py +++ b/app/dao/templates_dao.py @@ -1,7 +1,7 @@ from datetime import datetime import uuid -from sqlalchemy import desc +from sqlalchemy import asc, desc from sqlalchemy.sql.expression import bindparam from app import db @@ -65,14 +65,16 @@ def dao_get_all_templates_for_service(service_id, template_type=None): template_type=template_type, archived=False ).order_by( - desc(Template.created_at) + asc(Template.name), + asc(Template.template_type), ).all() return Template.query.filter_by( service_id=service_id, archived=False ).order_by( - desc(Template.created_at) + asc(Template.name), + asc(Template.template_type), ).all() diff --git a/app/inbound_sms/inbound_sms_schemas.py b/app/inbound_sms/inbound_sms_schemas.py index c0e644d68..24e642f49 100644 --- a/app/inbound_sms/inbound_sms_schemas.py +++ b/app/inbound_sms/inbound_sms_schemas.py @@ -3,7 +3,7 @@ get_inbound_sms_for_service_schema = { "description": "schema for parameters allowed when searching for to field=", "type": "object", "properties": { - "phone_number": {"type": "string", "format": "phone_number"}, + "phone_number": {"type": "string"}, "limit": {"type": ["integer", "null"], "minimum": 1} } } diff --git a/app/inbound_sms/rest.py b/app/inbound_sms/rest.py index a78e3c237..3e67e4ee1 100644 --- a/app/inbound_sms/rest.py +++ b/app/inbound_sms/rest.py @@ -1,11 +1,10 @@ from flask import ( Blueprint, jsonify, - request, - current_app, json) -from jsonschema import ValidationError + request +) -from notifications_utils.recipients import validate_and_format_phone_number +from notifications_utils.recipients import try_validate_and_format_phone_number from app.dao.inbound_sms_dao import ( dao_get_inbound_sms_for_service, @@ -26,25 +25,31 @@ inbound_sms = Blueprint( register_errors(inbound_sms) -@inbound_sms.route('', methods=['POST', 'GET']) -def get_inbound_sms_for_service(service_id): - - if request.method == 'GET': - limit = request.args.get('limit') - user_number = request.args.get('user_number') - - if user_number: - # we use this to normalise to an international phone number - user_number = validate_and_format_phone_number(user_number, international=True) - - results = dao_get_inbound_sms_for_service(service_id, limit, user_number) - - return jsonify(data=[row.serialize() for row in results]) +@inbound_sms.route('', methods=['POST']) +def post_query_inbound_sms_for_service(service_id): + form = validate(request.get_json(), get_inbound_sms_for_service_schema) + if 'phone_number' in form: + # we use this to normalise to an international phone number - but this may fail if it's an alphanumeric + user_number = try_validate_and_format_phone_number(form['phone_number'], international=True) else: - form = validate(request.get_json(), get_inbound_sms_for_service_schema) - results = dao_get_inbound_sms_for_service(service_id, form.get('limit'), form.get('phone_number')) + user_number = None + results = dao_get_inbound_sms_for_service(service_id, form.get('limit'), user_number) - return jsonify(data=[row.serialize() for row in results]) + return jsonify(data=[row.serialize() for row in results]) + + +@inbound_sms.route('', methods=['GET']) +def get_inbound_sms_for_service(service_id): + limit = request.args.get('limit') + user_number = request.args.get('user_number') + + if user_number: + # we use this to normalise to an international phone number - but this may fail if it's an alphanumeric + user_number = try_validate_and_format_phone_number(user_number, international=True) + + results = dao_get_inbound_sms_for_service(service_id, limit, user_number) + + return jsonify(data=[row.serialize() for row in results]) @inbound_sms.route('/summary') diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 3167134e3..22a7fe61e 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -2,7 +2,7 @@ from urllib.parse import unquote import iso8601 from flask import jsonify, Blueprint, current_app, request, abort -from notifications_utils.recipients import validate_and_format_phone_number +from notifications_utils.recipients import try_validate_and_format_phone_number from app import statsd_client, firetext_client, mmg_client from app.celery import tasks @@ -109,7 +109,11 @@ def format_mmg_datetime(date): def create_inbound_sms_object(service, content, from_number, provider_ref, date_received, provider_name): - user_number = validate_and_format_phone_number(from_number, international=True) + user_number = try_validate_and_format_phone_number( + from_number, + international=True, + log_msg='Invalid from_number received' + ) provider_date = date_received if provider_date: diff --git a/application.py b/application.py index b606aee81..8d94e6d0c 100644 --- a/application.py +++ b/application.py @@ -1,38 +1,10 @@ -#!/usr/bin/env python - +##!/usr/bin/env python from __future__ import print_function -import os -from flask_script import Manager, Server -from flask_migrate import Migrate, MigrateCommand -from app import (create_app, db, commands) -application = create_app() -manager = Manager(application) -port = int(os.environ.get('PORT', 6011)) -manager.add_command("runserver", Server(host='0.0.0.0', port=port)) +from flask import Flask -migrate = Migrate(application, db) -manager.add_command('db', MigrateCommand) -manager.add_command('create_provider_rate', commands.CreateProviderRateCommand) -manager.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand) -manager.add_command('custom_db_script', commands.CustomDbScript) -manager.add_command('populate_monthly_billing', commands.PopulateMonthlyBilling) -manager.add_command('backfill_processing_time', commands.BackfillProcessingTime) -manager.add_command('populate_service_email_reply_to', commands.PopulateServiceEmailReplyTo) -manager.add_command('populate_service_sms_sender', commands.PopulateServiceSmsSender) -manager.add_command('populate_service_letter_contact', commands.PopulateServiceLetterContact) -manager.add_command('populate_service_and_service_history_free_sms_fragment_limit', - commands.PopulateServiceAndServiceHistoryFreeSmsFragmentLimit) -manager.add_command('populate_annual_billing', commands.PopulateAnnualBilling) -manager.add_command('rerun_build_dvla_file', commands.ReRunBuildDvlaFileForJob) +from app import create_app +application = Flask('app') -@manager.command -def list_routes(): - """List URLs of all application routes.""" - for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule): - print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) - - -if __name__ == '__main__': - manager.run() +create_app(application) diff --git a/manifest-api-base.yml b/manifest-api-base.yml index 28cf8a8cd..9afb9f860 100644 --- a/manifest-api-base.yml +++ b/manifest-api-base.yml @@ -1,7 +1,7 @@ --- buildpack: python_buildpack -command: scripts/run_app_paas.sh gunicorn -c /home/vcap/app/gunicorn_config.py --error-logfile /home/vcap/logs/gunicorn_error.log -w 5 -b 0.0.0.0:$PORT wsgi +command: scripts/run_app_paas.sh gunicorn -c /home/vcap/app/gunicorn_config.py --error-logfile /home/vcap/logs/gunicorn_error.log -w 5 -b 0.0.0.0:$PORT application services: - notify-aws - notify-config @@ -14,6 +14,8 @@ services: env: NOTIFY_APP_NAME: public-api CW_APP_NAME: api + # required by cf run-task + FLASK_APP: application.py instances: 1 memory: 1G diff --git a/migrations/README b/migrations/README deleted file mode 100644 index 7c44eae44..000000000 --- a/migrations/README +++ /dev/null @@ -1,9 +0,0 @@ -Generic single-database configuration. - -python application.py db migrate to generate migration script. - -python application.py db upgrade to upgrade db with script. - -python application.py db downgrade to rollback db changes. - -python application.py db current to show current script. diff --git a/migrations/README.md b/migrations/README.md new file mode 100644 index 000000000..b2977a1cf --- /dev/null +++ b/migrations/README.md @@ -0,0 +1,9 @@ +Generic single-database configuration. + +flask db migrate to generate migration script. + +flask db upgrade to upgrade db with script. + +flask db downgrade to rollback db changes. + +flask db current to show current script. diff --git a/requirements.txt b/requirements.txt index 8d69902c5..93743ed58 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,6 @@ docopt==0.6.2 Flask-Bcrypt==0.7.1 Flask-Marshmallow==0.8.0 Flask-Migrate==2.1.1 -Flask-Script==2.0.5 Flask-SQLAlchemy==2.3.2 Flask==0.12.2 gunicorn==19.7.1 @@ -26,6 +25,6 @@ notifications-python-client==4.6.0 awscli>=1.11,<1.12 awscli-cwlogs>=1.4,<1.5 -git+https://github.com/alphagov/notifications-utils.git@23.0.1#egg=notifications-utils==23.0.1 +git+https://github.com/alphagov/notifications-utils.git@23.1.0#egg=notifications-utils==23.1.0 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 diff --git a/run_celery.py b/run_celery.py index 013499615..4fb28ae08 100644 --- a/run_celery.py +++ b/run_celery.py @@ -1,6 +1,10 @@ #!/usr/bin/env python # notify_celery is referenced from manifest_delivery_base.yml, and cannot be removed +from flask import Flask + from app import notify_celery, create_app -application = create_app('delivery') + +application = Flask('delivery') +create_app(application) application.app_context().push() diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 9ee35d24b..c46d03aa6 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -36,4 +36,4 @@ createdb notification_api # Upgrade databases source environment.sh -python application.py db upgrade +flask db upgrade diff --git a/scripts/check_if_new_migration.py b/scripts/check_if_new_migration.py index 2cd1614d0..2878086f6 100644 --- a/scripts/check_if_new_migration.py +++ b/scripts/check_if_new_migration.py @@ -8,7 +8,7 @@ def get_latest_db_migration_to_apply(): project_dir = dirname(dirname(abspath(__file__))) # Get the main project directory migrations_dir = '{}/migrations/versions/'.format(project_dir) migration_files = [migration_file for migration_file in os.listdir(migrations_dir) if migration_file.endswith('py')] - # sometimes there's a trailing underscore, if script was created with `python app.py db migrate --rev-id=...` + # sometimes there's a trailing underscore, if script was created with `flask db migrate --rev-id=...` latest_file = sorted(migration_files, reverse=True)[0].replace('_.py', '').replace('.py', '') return latest_file diff --git a/scripts/fix_migrations.py b/scripts/fix_migrations.py new file mode 100755 index 000000000..3965ed411 --- /dev/null +++ b/scripts/fix_migrations.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# encoding: utf-8 + +import os +import sys + +from alembic.script import ScriptDirectory + +sys.path.append('.') + + +def get_branch_points(migrations): + return [m for m in migrations.walk_revisions() if m.is_branch_point] + + +def get_branches(migrations, branch_point, heads): + return [list(migrations.iterate_revisions(m, branch_point.revision))[::-1] + for m in heads] + + +def choice(prompt, options, option_fmt=lambda x: x): + print("{}:\n".format(prompt)) + for i, option in enumerate(options): + print("{}. {}".format(i + 1, option_fmt(option))) + + print() + choice = input("Option> ") + + return options[int(choice) - 1] + + +def rename_revision(current_revision, new_base): + new_id = int(new_base[:4]) + 1 + return "{:04d}{}".format(new_id, current_revision[4:]) + + +def reorder_revisions(revisions, old_base, new_base): + if not revisions: + return + + head, *tail = revisions + new_revision_id = rename_revision(head.revision, new_base) + + print("Moving {} to {}".format(head.revision, new_revision_id)) + with open(head.path, 'r') as rev_file: + file_data = rev_file.read() + + file_data = file_data.replace(head.revision, new_revision_id).replace(old_base, new_base) + + with open(head.path.replace(head.revision, new_revision_id), 'w') as rev_file: + rev_file.write(file_data) + + print("Removing {}".format(head.path)) + os.remove(head.path) + + reorder_revisions(tail, head.revision, new_revision_id) + + +def fix_branch_point(migrations, branch_point, heads): + print("Migrations directory has a branch point at {}".format(branch_point.revision)) + + branches = get_branches(migrations, branch_point, heads) + move_branch = choice("Select migrations to move", branches, + lambda x: " -> ".join(m.revision for m in x)) + branches.remove(move_branch) + + reorder_revisions(move_branch, branch_point.revision, branches[0][-1].revision) + + +def main(migrations_path): + migrations = ScriptDirectory(migrations_path) + + branch_points = get_branch_points(migrations) + heads = migrations.get_heads() + + if not branch_points: + print("Migrations are ordered") + elif len(branch_points) == 1 and len(heads) == 2: + fix_branch_point(migrations, branch_points[0], heads) + else: + print("Found {} branch points and {} heads, can't fix automatically".format( + [bp.revision for bp in branch_points], heads)) + sys.exit(1) + + +if __name__ == '__main__': + main('migrations/') diff --git a/scripts/run_app.sh b/scripts/run_app.sh index e8ab00bb6..9997d1072 100755 --- a/scripts/run_app.sh +++ b/scripts/run_app.sh @@ -3,4 +3,4 @@ set -e source environment.sh -python3 application.py runserver +flask run -p 6011 diff --git a/server_commands.py b/server_commands.py deleted file mode 100644 index 9f0ce8a60..000000000 --- a/server_commands.py +++ /dev/null @@ -1,26 +0,0 @@ -from flask_script import Manager, Server -from flask_migrate import Migrate, MigrateCommand -from app import (create_app, db, commands) -import os - -default_env_file = '/home/ubuntu/environment' -environment = 'live' - -if os.path.isfile(default_env_file): - with open(default_env_file, 'r') as environment_file: - environment = environment_file.readline().strip() - -from app.config import configs - -os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment] - -application = create_app() - -manager = Manager(application) -migrate = Migrate(application, db) -manager.add_command('db', MigrateCommand) -manager.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand) -manager.add_command('custom_db_script', commands.CustomDbScript) - -if __name__ == '__main__': - manager.run() diff --git a/tests/app/dao/test_templates_dao.py b/tests/app/dao/test_templates_dao.py index 607330097..62f5c8a8f 100644 --- a/tests/app/dao/test_templates_dao.py +++ b/tests/app/dao/test_templates_dao.py @@ -163,7 +163,7 @@ def test_get_all_templates_for_service(notify_db, notify_db_session, service_fac assert len(dao_get_all_templates_for_service(service_2.id)) == 2 -def test_get_all_templates_for_service_shows_newest_created_first(notify_db, notify_db_session, sample_service): +def test_get_all_templates_for_service_is_alphabetised(notify_db, notify_db_session, sample_service): template_1 = create_sample_template( notify_db, notify_db_session, @@ -190,14 +190,14 @@ def test_get_all_templates_for_service_shows_newest_created_first(notify_db, not ) assert Template.query.count() == 3 - assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'Sample Template 3' + assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'Sample Template 1' assert dao_get_all_templates_for_service(sample_service.id)[1].name == 'Sample Template 2' - assert dao_get_all_templates_for_service(sample_service.id)[2].name == 'Sample Template 1' + assert dao_get_all_templates_for_service(sample_service.id)[2].name == 'Sample Template 3' - template_2.name = 'Sample Template 2 (updated)' + template_2.name = 'AAAAA Sample Template 2' dao_update_template(template_2) - assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'Sample Template 3' - assert dao_get_all_templates_for_service(sample_service.id)[1].name == 'Sample Template 2 (updated)' + assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'AAAAA Sample Template 2' + assert dao_get_all_templates_for_service(sample_service.id)[1].name == 'Sample Template 1' def test_get_all_returns_empty_list_if_no_templates(sample_service): diff --git a/tests/app/db.py b/tests/app/db.py index 687b456c0..1da012e0a 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -121,12 +121,13 @@ def create_service_with_defined_sms_sender( def create_template( service, template_type=SMS_TYPE, + template_name=None, subject='Template subject', content='Dear Sir/Madam, Hello. Yours Truly, The Government.', template_id=None ): data = { - 'name': '{} Template Name'.format(template_type), + 'name': template_name or '{} Template Name'.format(template_type), 'template_type': template_type, 'content': content, 'service': service, diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index f278251be..a0a6d612b 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -1,28 +1,20 @@ from datetime import datetime import pytest -from flask import json from freezegun import freeze_time -from tests import create_authorization_header from tests.app.db import create_inbound_sms, create_service, create_service_with_inbound_number -def test_get_inbound_sms_with_no_params(client, sample_service): +def test_post_to_get_inbound_sms_with_no_params(admin_request, sample_service): one = create_inbound_sms(sample_service) two = create_inbound_sms(sample_service) - auth_header = create_authorization_header() - - data = {} - - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - json_resp = json.loads(response.get_data(as_text=True)) - sms = json_resp['data'] + sms = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data={} + )['data'] assert len(sms) == 2 assert {inbound['id'] for inbound in sms} == {str(one.id), str(two.id)} @@ -37,40 +29,34 @@ def test_get_inbound_sms_with_no_params(client, sample_service): } -def test_get_inbound_sms_with_limit(client, sample_service): +def test_post_to_get_inbound_sms_with_limit(admin_request, sample_service): with freeze_time('2017-01-01'): one = create_inbound_sms(sample_service) with freeze_time('2017-01-02'): two = create_inbound_sms(sample_service) - auth_header = create_authorization_header() - data = {'limit': 1} - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - json_resp = json.loads(response.get_data(as_text=True)) - sms = json_resp['data'] + sms = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data=data + )['data'] assert len(sms) == 1 assert sms[0]['id'] == str(two.id) -def test_get_inbound_sms_should_error_with_invalid_limit(client, sample_service): - - auth_header = create_authorization_header() - +def test_post_to_get_inbound_sms_should_error_with_invalid_limit(admin_request, sample_service): data = {'limit': 'limit'} - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) + error_resp = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data=data, + _expected_status=400 + ) - error_resp = json.loads(response.get_data(as_text=True)) assert error_resp['status_code'] == 400 assert error_resp['errors'] == [{ 'error': 'ValidationError', @@ -78,73 +64,61 @@ def test_get_inbound_sms_should_error_with_invalid_limit(client, sample_service) }] -def test_get_inbound_sms_should_error_with_invalid_phone_number(client, sample_service): - - auth_header = create_authorization_header() - - data = {'phone_number': 'invalid phone number'} - - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - error_resp = json.loads(response.get_data(as_text=True)) - assert error_resp['status_code'] == 400 - assert error_resp['errors'] == [{ - 'error': 'ValidationError', - 'message': "phone_number Must not contain letters or symbols" - }] - - @pytest.mark.parametrize('user_number', [ '(07700) 900-001', '+4407700900001', '447700900001', ]) -def test_get_inbound_sms_filters_user_number(client, sample_service, user_number): +def test_post_to_get_inbound_sms_filters_user_number(admin_request, sample_service, user_number): # user_number in the db is international and normalised one = create_inbound_sms(sample_service, user_number='447700900001') two = create_inbound_sms(sample_service, user_number='447700900002') - auth_header = create_authorization_header() - data = { 'limit': 1, 'phone_number': user_number } - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) + sms = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data=data + )['data'] - json_resp = json.loads(response.get_data(as_text=True)) - sms = json_resp['data'] assert len(sms) == 1 assert sms[0]['id'] == str(one.id) assert sms[0]['user_number'] == str(one.user_number) -def test_get_inbound_sms_filters_international_user_number(admin_request, sample_service): +def test_post_to_get_inbound_sms_filters_international_user_number(admin_request, sample_service): # user_number in the db is international and normalised one = create_inbound_sms(sample_service, user_number='12025550104') two = create_inbound_sms(sample_service) - auth_header = create_authorization_header() - data = { 'limit': 1, 'phone_number': '+1 (202) 555-0104' } - response = client.post( - path='/service/{}/inbound-sms'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) + sms = admin_request.post( + 'inbound_sms.post_query_inbound_sms_for_service', + service_id=sample_service.id, + _data=data + )['data'] - json_resp = json.loads(response.get_data(as_text=True)) - sms = json_resp['data'] + assert len(sms) == 1 + assert sms[0]['id'] == str(one.id) + assert sms[0]['user_number'] == str(one.user_number) + + +def test_post_to_get_inbound_sms_allows_badly_formatted_number(admin_request, sample_service): + one = create_inbound_sms(sample_service, user_number='ALPHANUM3R1C') + + sms = admin_request.post( + 'inbound_sms.get_inbound_sms_for_service', + service_id=sample_service.id, + _data={'phone_number': 'ALPHANUM3R1C'} + )['data'] assert len(sms) == 1 assert sms[0]['id'] == str(one.id) @@ -156,7 +130,7 @@ def test_get_inbound_sms_filters_international_user_number(admin_request, sample ############################################################## -def test_get_inbound_sms(admin_request, sample_service): +def test_old_get_inbound_sms(admin_request, sample_service): one = create_inbound_sms(sample_service) two = create_inbound_sms(sample_service) @@ -180,7 +154,7 @@ def test_get_inbound_sms(admin_request, sample_service): } -def test_get_inbound_sms_limits(admin_request, sample_service): +def test_old_get_inbound_sms_limits(admin_request, sample_service): with freeze_time('2017-01-01'): one = create_inbound_sms(sample_service) with freeze_time('2017-01-02'): @@ -201,7 +175,7 @@ def test_get_inbound_sms_limits(admin_request, sample_service): '+4407700900001', '447700900001', ]) -def test_get_inbound_sms_filters_user_number(admin_request, sample_service, user_number): +def test_old_get_inbound_sms_filters_user_number(admin_request, sample_service, user_number): # user_number in the db is international and normalised one = create_inbound_sms(sample_service, user_number='447700900001') two = create_inbound_sms(sample_service, user_number='447700900002') @@ -217,7 +191,7 @@ def test_get_inbound_sms_filters_user_number(admin_request, sample_service, user assert sms['data'][0]['user_number'] == str(one.user_number) -def test_get_inbound_sms_filters_international_user_number(admin_request, sample_service): +def test_old_get_inbound_sms_filters_international_user_number(admin_request, sample_service): # user_number in the db is international and normalised one = create_inbound_sms(sample_service, user_number='12025550104') two = create_inbound_sms(sample_service) @@ -233,6 +207,20 @@ def test_get_inbound_sms_filters_international_user_number(admin_request, sample assert sms['data'][0]['user_number'] == str(one.user_number) +def test_old_get_inbound_sms_allows_badly_formatted_number(admin_request, sample_service): + one = create_inbound_sms(sample_service, user_number='ALPHANUM3R1C') + + sms = admin_request.get( + 'inbound_sms.get_inbound_sms_for_service', + service_id=sample_service.id, + user_number='ALPHANUM3R1C', + ) + + assert len(sms['data']) == 1 + assert sms['data'][0]['id'] == str(one.id) + assert sms['data'][0]['user_number'] == str(one.user_number) + + ############################## # End delete section ############################## diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 7f784135c..fbd190a50 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -413,3 +413,24 @@ def test_firetext_inbound_sms_auth(notify_db_session, notify_api, client, mocker with set_config(notify_api, 'FIRETEXT_INBOUND_SMS_AUTH', keys): response = firetext_post(client, data, auth=bool(auth), password=auth) assert response.status_code == status_code + + +def test_create_inbound_sms_object_works_with_alphanumeric_sender(sample_service_full_permissions): + data = { + 'Message': 'hello', + 'Number': sample_service_full_permissions.get_inbound_number(), + 'MSISDN': 'ALPHANUM3R1C', + 'DateRecieved': '2017-01-02+03%3A04%3A05', + 'ID': 'bar', + } + + inbound_sms = create_inbound_sms_object( + service=sample_service_full_permissions, + content=format_mmg_message(data["Message"]), + from_number='ALPHANUM3R1C', + provider_ref='foo', + date_received=None, + provider_name="mmg" + ) + + assert inbound_sms.user_number == 'ALPHANUM3R1C' diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py index da72c3dff..6d666bfa3 100644 --- a/tests/app/template/test_rest.py +++ b/tests/app/template/test_rest.py @@ -301,10 +301,10 @@ def test_should_be_able_to_get_all_templates_for_a_service(client, sample_user, assert response.status_code == 200 update_json_resp = json.loads(response.get_data(as_text=True)) - assert update_json_resp['data'][0]['name'] == 'my template 2' + assert update_json_resp['data'][0]['name'] == 'my template 1' assert update_json_resp['data'][0]['version'] == 1 assert update_json_resp['data'][0]['created_at'] - assert update_json_resp['data'][1]['name'] == 'my template 1' + assert update_json_resp['data'][1]['name'] == 'my template 2' assert update_json_resp['data'][1]['version'] == 1 assert update_json_resp['data'][1]['created_at'] diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index 1c5ceedd9..d5f6e75e1 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -1,12 +1,14 @@ from datetime import datetime -from app.commands import BackfillProcessingTime +from app.commands import backfill_processing_time -def test_backfill_processing_time_works_for_correct_dates(mocker): +def test_backfill_processing_time_works_for_correct_dates(mocker, notify_api): send_mock = mocker.patch('app.commands.send_processing_time_for_start_and_end') - BackfillProcessingTime().run('2017-08-01', '2017-08-03') + # backfill_processing_time is a click.Command object - if you try invoking the callback on its own, it + # throws a `RuntimeError: There is no active click context.` - so get at the original function using __wrapped__ + backfill_processing_time.callback.__wrapped__('2017-08-01', '2017-08-03') assert send_mock.call_count == 3 send_mock.assert_any_call(datetime(2017, 7, 31, 23, 0), datetime(2017, 8, 1, 23, 0)) diff --git a/tests/app/v2/templates/test_get_templates.py b/tests/app/v2/templates/test_get_templates.py index 6bc4aabde..1a0f1eacc 100644 --- a/tests/app/v2/templates/test_get_templates.py +++ b/tests/app/v2/templates/test_get_templates.py @@ -1,6 +1,7 @@ import pytest from flask import json +from itertools import product from app.models import TEMPLATE_TYPES, EMAIL_TYPE from tests import create_authorization_header @@ -8,12 +9,15 @@ from tests.app.db import create_template def test_get_all_templates_returns_200(client, sample_service): - num_templates = 3 - templates = [] - for i in range(num_templates): - for tmp_type in TEMPLATE_TYPES: - subject = 'subject_{}'.format(i) if tmp_type == EMAIL_TYPE else '' - templates.append(create_template(sample_service, template_type=tmp_type, subject=subject)) + templates = [ + create_template( + sample_service, + template_type=tmp_type, + subject='subject_{}'.format(name) if tmp_type == EMAIL_TYPE else '', + template_name=name, + ) + for name, tmp_type in product(('A', 'B', 'C'), TEMPLATE_TYPES) + ] auth_header = create_authorization_header(service_id=sample_service.id) @@ -25,25 +29,27 @@ def test_get_all_templates_returns_200(client, sample_service): json_response = json.loads(response.get_data(as_text=True)) - assert len(json_response['templates']) == num_templates * len(TEMPLATE_TYPES) + assert len(json_response['templates']) == len(templates) - # need to reverse index as get all templates returns list sorted by descending date - for i in range(len(json_response['templates'])): - reverse_index = len(json_response['templates']) - 1 - i - assert json_response['templates'][reverse_index]['id'] == str(templates[i].id) - assert json_response['templates'][reverse_index]['body'] == templates[i].content - assert json_response['templates'][reverse_index]['type'] == templates[i].template_type - if templates[i].template_type == EMAIL_TYPE: - assert json_response['templates'][reverse_index]['subject'] == templates[i].subject + for index, template in enumerate(json_response['templates']): + assert template['id'] == str(templates[index].id) + assert template['body'] == templates[index].content + assert template['type'] == templates[index].template_type + if templates[index].template_type == EMAIL_TYPE: + assert template['subject'] == templates[index].subject @pytest.mark.parametrize("tmp_type", TEMPLATE_TYPES) def test_get_all_templates_for_valid_type_returns_200(client, sample_service, tmp_type): - num_templates = 3 - templates = [] - for i in range(num_templates): - subject = 'subject_{}'.format(i) if tmp_type == EMAIL_TYPE else '' - templates.append(create_template(sample_service, template_type=tmp_type, subject=subject)) + templates = [ + create_template( + sample_service, + template_type=tmp_type, + template_name='Template {}'.format(i), + subject='subject_{}'.format(i) if tmp_type == EMAIL_TYPE else '' + ) + for i in range(3) + ] auth_header = create_authorization_header(service_id=sample_service.id) @@ -55,16 +61,14 @@ def test_get_all_templates_for_valid_type_returns_200(client, sample_service, tm json_response = json.loads(response.get_data(as_text=True)) - assert len(json_response['templates']) == num_templates + assert len(json_response['templates']) == len(templates) - # need to reverse index as get all templates returns list sorted by descending date - for i in range(len(json_response['templates'])): - reverse_index = len(json_response['templates']) - 1 - i - assert json_response['templates'][reverse_index]['id'] == str(templates[i].id) - assert json_response['templates'][reverse_index]['body'] == templates[i].content - assert json_response['templates'][reverse_index]['type'] == tmp_type - if templates[i].template_type == EMAIL_TYPE: - assert json_response['templates'][reverse_index]['subject'] == templates[i].subject + for index, template in enumerate(json_response['templates']): + assert template['id'] == str(templates[index].id) + assert template['body'] == templates[index].content + assert template['type'] == tmp_type + if templates[index].template_type == EMAIL_TYPE: + assert template['subject'] == templates[index].subject @pytest.mark.parametrize("tmp_type", TEMPLATE_TYPES) diff --git a/tests/conftest.py b/tests/conftest.py index 321d063a0..266346ba9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,9 @@ from contextlib import contextmanager import os +from flask import Flask from alembic.command import upgrade from alembic.config import Config -from flask_migrate import Migrate, MigrateCommand -from flask_script import Manager import boto3 import pytest import sqlalchemy @@ -14,7 +13,8 @@ from app import create_app, db @pytest.fixture(scope='session') def notify_api(): - app = create_app() + app = Flask('test') + create_app(app) # deattach server-error error handlers - error_handler_spec looks like: # {'blueprint_name': { @@ -76,8 +76,6 @@ def notify_db(notify_api, worker_id): current_app.config['SQLALCHEMY_DATABASE_URI'] += '_{}'.format(worker_id) create_test_db(current_app.config['SQLALCHEMY_DATABASE_URI']) - Migrate(notify_api, db) - Manager(db, MigrateCommand) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ALEMBIC_CONFIG = os.path.join(BASE_DIR, 'migrations') config = Config(ALEMBIC_CONFIG + '/alembic.ini') diff --git a/wsgi.py b/wsgi.py deleted file mode 100644 index 9fbeb28ac..000000000 --- a/wsgi.py +++ /dev/null @@ -1,7 +0,0 @@ -from app import create_app - - -application = create_app() - -if __name__ == "__main__": - application.run()