mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-14 01:02:09 -05:00
Remove flask-script, move commands to click
click (http://click.pocoo.org/) is used by flask to run its cli args. In removing flask_script (it's unmaintained), we had to migrate all our commands to use click. This is a change for the better in my eyes - you don't need to define the command in several places, and it makes managing options a bit easier. View diff with whitespace turned off unless you're a masochist.
This commit is contained in:
@@ -44,6 +44,9 @@ export FIRETEXT_API_KEY='FIRETEXT_ACTUAL_KEY'
|
|||||||
export STATSD_PREFIX='YOU_OWN_PREFIX'
|
export STATSD_PREFIX='YOU_OWN_PREFIX'
|
||||||
export NOTIFICATION_QUEUE_PREFIX='YOUR_OWN_PREFIX'
|
export NOTIFICATION_QUEUE_PREFIX='YOUR_OWN_PREFIX'
|
||||||
export REDIS_URL="redis://localhost:6379/0"
|
export REDIS_URL="redis://localhost:6379/0"
|
||||||
|
export FLASK_APP=application.py
|
||||||
|
export FLASK_DEBUG=1
|
||||||
|
export WERKZEUG_DEBUG_PIN=off
|
||||||
"> environment.sh
|
"> environment.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import uuid
|
|||||||
from flask import Flask, _request_ctx_stack, request, g, jsonify
|
from flask import Flask, _request_ctx_stack, request, g, jsonify
|
||||||
from flask_sqlalchemy import SQLAlchemy
|
from flask_sqlalchemy import SQLAlchemy
|
||||||
from flask_marshmallow import Marshmallow
|
from flask_marshmallow import Marshmallow
|
||||||
|
from flask_migrate import Migrate
|
||||||
from monotonic import monotonic
|
from monotonic import monotonic
|
||||||
from notifications_utils.clients.statsd.statsd_client import StatsdClient
|
from notifications_utils.clients.statsd.statsd_client import StatsdClient
|
||||||
from notifications_utils.clients.redis.redis_client import RedisClient
|
from notifications_utils.clients.redis.redis_client import RedisClient
|
||||||
@@ -25,6 +26,7 @@ DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
|
|||||||
DATE_FORMAT = "%Y-%m-%d"
|
DATE_FORMAT = "%Y-%m-%d"
|
||||||
|
|
||||||
db = SQLAlchemy()
|
db = SQLAlchemy()
|
||||||
|
migrate = Migrate()
|
||||||
ma = Marshmallow()
|
ma = Marshmallow()
|
||||||
notify_celery = NotifyCelery()
|
notify_celery = NotifyCelery()
|
||||||
firetext_client = FiretextClient()
|
firetext_client = FiretextClient()
|
||||||
@@ -42,21 +44,19 @@ api_user = LocalProxy(lambda: _request_ctx_stack.top.api_user)
|
|||||||
authenticated_service = LocalProxy(lambda: _request_ctx_stack.top.authenticated_service)
|
authenticated_service = LocalProxy(lambda: _request_ctx_stack.top.authenticated_service)
|
||||||
|
|
||||||
|
|
||||||
def create_app(app_name=None):
|
def create_app(application):
|
||||||
application = Flask(__name__)
|
|
||||||
|
|
||||||
from app.config import configs
|
from app.config import configs
|
||||||
|
|
||||||
notify_environment = os.environ['NOTIFY_ENVIRONMENT']
|
notify_environment = os.environ['NOTIFY_ENVIRONMENT']
|
||||||
|
|
||||||
application.config.from_object(configs[notify_environment])
|
application.config.from_object(configs[notify_environment])
|
||||||
|
|
||||||
if app_name:
|
application.config['NOTIFY_APP_NAME'] = application.name
|
||||||
application.config['NOTIFY_APP_NAME'] = app_name
|
|
||||||
|
|
||||||
init_app(application)
|
init_app(application)
|
||||||
request_helper.init_app(application)
|
request_helper.init_app(application)
|
||||||
db.init_app(application)
|
db.init_app(application)
|
||||||
|
migrate.init_app(application, db=db)
|
||||||
ma.init_app(application)
|
ma.init_app(application)
|
||||||
statsd_client.init_app(application)
|
statsd_client.init_app(application)
|
||||||
logging.init_app(application, statsd_client)
|
logging.init_app(application, statsd_client)
|
||||||
@@ -73,6 +73,10 @@ def create_app(app_name=None):
|
|||||||
register_blueprint(application)
|
register_blueprint(application)
|
||||||
register_v2_blueprints(application)
|
register_v2_blueprints(application)
|
||||||
|
|
||||||
|
# avoid circular imports by importing this file later 😬
|
||||||
|
from app.commands import setup_commands
|
||||||
|
setup_commands(application)
|
||||||
|
|
||||||
return application
|
return application
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
559
app/commands.py
559
app/commands.py
@@ -1,7 +1,9 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from flask_script import Command, Option
|
|
||||||
|
from flask import current_app
|
||||||
|
import click
|
||||||
|
|
||||||
from app import db
|
from app import db
|
||||||
from app.dao.monthly_billing_dao import (
|
from app.dao.monthly_billing_dao import (
|
||||||
@@ -14,181 +16,152 @@ from app.dao.services_dao import (
|
|||||||
delete_service_and_all_associated_db_objects,
|
delete_service_and_all_associated_db_objects,
|
||||||
dao_fetch_all_services_by_user
|
dao_fetch_all_services_by_user
|
||||||
)
|
)
|
||||||
from app.dao.provider_rates_dao import create_provider_rates
|
from app.dao.provider_rates_dao import create_provider_rates as dao_create_provider_rates
|
||||||
from app.dao.users_dao import (delete_model_user, delete_user_verify_codes)
|
from app.dao.users_dao import (delete_model_user, delete_user_verify_codes)
|
||||||
from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc
|
from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc
|
||||||
from app.performance_platform.processing_time import send_processing_time_for_start_and_end
|
from app.performance_platform.processing_time import send_processing_time_for_start_and_end
|
||||||
|
|
||||||
|
|
||||||
class CreateProviderRateCommand(Command):
|
commands = click.Group(name='commands', help='Additional commands')
|
||||||
|
|
||||||
option_list = (
|
|
||||||
Option('-p', '--provider_name', dest="provider_name", help='Provider name'),
|
|
||||||
Option('-c', '--cost', dest="cost", help='Cost (pence) per message including decimals'),
|
|
||||||
Option('-d', '--valid_from', dest="valid_from", help="Date (%Y-%m-%dT%H:%M:%S) valid from")
|
|
||||||
)
|
|
||||||
|
|
||||||
def run(self, provider_name, cost, valid_from):
|
@commands.command()
|
||||||
if provider_name not in PROVIDERS:
|
@click.option('-p', '--provider_name', required=True, help='Provider name')
|
||||||
raise Exception("Invalid provider name, must be one of ({})".format(', '.join(PROVIDERS)))
|
@click.option('-c', '--cost', required=True, help='Cost (pence) per message including decimals')
|
||||||
|
@click.option('-d', '--valid_from', required=True, help="Date (%Y-%m-%dT%H:%M:%S) valid from")
|
||||||
|
def create_provider_rates(provider_name, cost, valid_from):
|
||||||
|
if provider_name not in PROVIDERS:
|
||||||
|
raise Exception("Invalid provider name, must be one of ({})".format(', '.join(PROVIDERS)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
cost = Decimal(cost)
|
||||||
|
except:
|
||||||
|
raise Exception("Invalid cost value.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
valid_from = datetime.strptime('%Y-%m-%dT%H:%M:%S', valid_from)
|
||||||
|
except:
|
||||||
|
raise Exception("Invalid valid_from date. Use the format %Y-%m-%dT%H:%M:%S")
|
||||||
|
|
||||||
|
dao_create_provider_rates(provider_name, valid_from, cost)
|
||||||
|
|
||||||
|
|
||||||
|
@commands.command()
|
||||||
|
@click.option('-u', '--user_email_prefix', required=True, help="Functional test user email prefix.")
|
||||||
|
def purge_functional_test_data(user_email_prefix):
|
||||||
|
users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all()
|
||||||
|
for usr in users:
|
||||||
|
# Make sure the full email includes a uuid in it
|
||||||
|
# Just in case someone decides to use a similar email address.
|
||||||
try:
|
try:
|
||||||
cost = Decimal(cost)
|
uuid.UUID(usr.email_address.split("@")[0].split('+')[1])
|
||||||
except:
|
except ValueError:
|
||||||
raise Exception("Invalid cost value.")
|
print("Skipping {} as the user email doesn't contain a UUID.".format(usr.email_address))
|
||||||
|
|
||||||
try:
|
|
||||||
valid_from = datetime.strptime('%Y-%m-%dT%H:%M:%S', valid_from)
|
|
||||||
except:
|
|
||||||
raise Exception("Invalid valid_from date. Use the format %Y-%m-%dT%H:%M:%S")
|
|
||||||
|
|
||||||
create_provider_rates(provider_name, valid_from, cost)
|
|
||||||
|
|
||||||
|
|
||||||
class PurgeFunctionalTestDataCommand(Command):
|
|
||||||
|
|
||||||
option_list = (
|
|
||||||
Option('-u', '-user-email-prefix', dest='user_email_prefix', help="Functional test user email prefix."),
|
|
||||||
)
|
|
||||||
|
|
||||||
def run(self, user_email_prefix=None):
|
|
||||||
if user_email_prefix:
|
|
||||||
users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all()
|
|
||||||
for usr in users:
|
|
||||||
# Make sure the full email includes a uuid in it
|
|
||||||
# Just in case someone decides to use a similar email address.
|
|
||||||
try:
|
|
||||||
uuid.UUID(usr.email_address.split("@")[0].split('+')[1])
|
|
||||||
except ValueError:
|
|
||||||
print("Skipping {} as the user email doesn't contain a UUID.".format(usr.email_address))
|
|
||||||
else:
|
|
||||||
services = dao_fetch_all_services_by_user(usr.id)
|
|
||||||
if services:
|
|
||||||
for service in services:
|
|
||||||
delete_service_and_all_associated_db_objects(service)
|
|
||||||
else:
|
|
||||||
delete_user_verify_codes(usr)
|
|
||||||
delete_model_user(usr)
|
|
||||||
|
|
||||||
|
|
||||||
class CustomDbScript(Command):
|
|
||||||
|
|
||||||
option_list = (
|
|
||||||
Option('-n', '-name-of-db-function', dest='name_of_db_function', help="Function name of the DB script to run"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def run(self, name_of_db_function):
|
|
||||||
db_function = getattr(self, name_of_db_function, None)
|
|
||||||
if callable(db_function):
|
|
||||||
db_function()
|
|
||||||
else:
|
else:
|
||||||
print('The specified function does not exist.')
|
services = dao_fetch_all_services_by_user(usr.id)
|
||||||
|
if services:
|
||||||
|
for service in services:
|
||||||
|
delete_service_and_all_associated_db_objects(service)
|
||||||
|
else:
|
||||||
|
delete_user_verify_codes(usr)
|
||||||
|
delete_model_user(usr)
|
||||||
|
|
||||||
def backfill_notification_statuses(self):
|
|
||||||
"""
|
@commands.command()
|
||||||
This will be used to populate the new `Notification._status_fkey` with the old
|
def backfill_notification_statuses():
|
||||||
`Notification._status_enum`
|
"""
|
||||||
"""
|
This will be used to populate the new `Notification._status_fkey` with the old
|
||||||
LIMIT = 250000
|
`Notification._status_enum`
|
||||||
subq = "SELECT id FROM notification_history WHERE notification_status is NULL LIMIT {}".format(LIMIT)
|
"""
|
||||||
update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq)
|
LIMIT = 250000
|
||||||
|
subq = "SELECT id FROM notification_history WHERE notification_status is NULL LIMIT {}".format(LIMIT)
|
||||||
|
update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq)
|
||||||
|
result = db.session.execute(subq).fetchall()
|
||||||
|
|
||||||
|
while len(result) > 0:
|
||||||
|
db.session.execute(update)
|
||||||
|
print('commit {} updates at {}'.format(LIMIT, datetime.utcnow()))
|
||||||
|
db.session.commit()
|
||||||
result = db.session.execute(subq).fetchall()
|
result = db.session.execute(subq).fetchall()
|
||||||
|
|
||||||
while len(result) > 0:
|
|
||||||
db.session.execute(update)
|
|
||||||
print('commit {} updates at {}'.format(LIMIT, datetime.utcnow()))
|
|
||||||
db.session.commit()
|
|
||||||
result = db.session.execute(subq).fetchall()
|
|
||||||
|
|
||||||
def update_notification_international_flag(self):
|
@commands.command()
|
||||||
# 250,000 rows takes 30 seconds to update.
|
def update_notification_international_flag():
|
||||||
subq = "select id from notifications where international is null limit 250000"
|
# 250,000 rows takes 30 seconds to update.
|
||||||
update = "update notifications set international = False where id in ({})".format(subq)
|
subq = "select id from notifications where international is null limit 250000"
|
||||||
|
update = "update notifications set international = False where id in ({})".format(subq)
|
||||||
|
result = db.session.execute(subq).fetchall()
|
||||||
|
|
||||||
|
while len(result) > 0:
|
||||||
|
db.session.execute(update)
|
||||||
|
print('commit 250000 updates at {}'.format(datetime.utcnow()))
|
||||||
|
db.session.commit()
|
||||||
result = db.session.execute(subq).fetchall()
|
result = db.session.execute(subq).fetchall()
|
||||||
|
|
||||||
while len(result) > 0:
|
# Now update notification_history
|
||||||
db.session.execute(update)
|
subq_history = "select id from notification_history where international is null limit 250000"
|
||||||
print('commit 250000 updates at {}'.format(datetime.utcnow()))
|
update_history = "update notification_history set international = False where id in ({})".format(subq_history)
|
||||||
db.session.commit()
|
result_history = db.session.execute(subq_history).fetchall()
|
||||||
result = db.session.execute(subq).fetchall()
|
while len(result_history) > 0:
|
||||||
|
db.session.execute(update_history)
|
||||||
# Now update notification_history
|
print('commit 250000 updates at {}'.format(datetime.utcnow()))
|
||||||
subq_history = "select id from notification_history where international is null limit 250000"
|
db.session.commit()
|
||||||
update_history = "update notification_history set international = False where id in ({})".format(subq_history)
|
|
||||||
result_history = db.session.execute(subq_history).fetchall()
|
result_history = db.session.execute(subq_history).fetchall()
|
||||||
while len(result_history) > 0:
|
|
||||||
db.session.execute(update_history)
|
|
||||||
print('commit 250000 updates at {}'.format(datetime.utcnow()))
|
|
||||||
db.session.commit()
|
|
||||||
result_history = db.session.execute(subq_history).fetchall()
|
|
||||||
|
|
||||||
def fix_notification_statuses_not_in_sync(self):
|
|
||||||
"""
|
|
||||||
This will be used to correct an issue where Notification._status_enum and NotificationHistory._status_fkey
|
|
||||||
became out of sync. See 979e90a.
|
|
||||||
|
|
||||||
Notification._status_enum is the source of truth so NotificationHistory._status_fkey will be updated with
|
@commands.command()
|
||||||
these values.
|
def fix_notification_statuses_not_in_sync():
|
||||||
"""
|
"""
|
||||||
MAX = 10000
|
This will be used to correct an issue where Notification._status_enum and NotificationHistory._status_fkey
|
||||||
|
became out of sync. See 979e90a.
|
||||||
|
|
||||||
subq = "SELECT id FROM notifications WHERE cast (status as text) != notification_status LIMIT {}".format(MAX)
|
Notification._status_enum is the source of truth so NotificationHistory._status_fkey will be updated with
|
||||||
update = "UPDATE notifications SET notification_status = status WHERE id in ({})".format(subq)
|
these values.
|
||||||
|
"""
|
||||||
|
MAX = 10000
|
||||||
|
|
||||||
|
subq = "SELECT id FROM notifications WHERE cast (status as text) != notification_status LIMIT {}".format(MAX)
|
||||||
|
update = "UPDATE notifications SET notification_status = status WHERE id in ({})".format(subq)
|
||||||
|
result = db.session.execute(subq).fetchall()
|
||||||
|
|
||||||
|
while len(result) > 0:
|
||||||
|
db.session.execute(update)
|
||||||
|
print('Committed {} updates at {}'.format(len(result), datetime.utcnow()))
|
||||||
|
db.session.commit()
|
||||||
result = db.session.execute(subq).fetchall()
|
result = db.session.execute(subq).fetchall()
|
||||||
|
|
||||||
while len(result) > 0:
|
subq_hist = "SELECT id FROM notification_history WHERE cast (status as text) != notification_status LIMIT {}" \
|
||||||
db.session.execute(update)
|
.format(MAX)
|
||||||
print('Committed {} updates at {}'.format(len(result), datetime.utcnow()))
|
update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq_hist)
|
||||||
db.session.commit()
|
result = db.session.execute(subq_hist).fetchall()
|
||||||
result = db.session.execute(subq).fetchall()
|
|
||||||
|
|
||||||
subq_hist = "SELECT id FROM notification_history WHERE cast (status as text) != notification_status LIMIT {}" \
|
while len(result) > 0:
|
||||||
.format(MAX)
|
db.session.execute(update)
|
||||||
update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq_hist)
|
print('Committed {} updates at {}'.format(len(result), datetime.utcnow()))
|
||||||
|
db.session.commit()
|
||||||
result = db.session.execute(subq_hist).fetchall()
|
result = db.session.execute(subq_hist).fetchall()
|
||||||
|
|
||||||
while len(result) > 0:
|
|
||||||
db.session.execute(update)
|
|
||||||
print('Committed {} updates at {}'.format(len(result), datetime.utcnow()))
|
|
||||||
db.session.commit()
|
|
||||||
result = db.session.execute(subq_hist).fetchall()
|
|
||||||
|
|
||||||
def link_inbound_numbers_to_service(self):
|
@commands.command()
|
||||||
update = """
|
def link_inbound_numbers_to_service():
|
||||||
UPDATE inbound_numbers SET
|
update = """
|
||||||
service_id = services.id,
|
UPDATE inbound_numbers SET
|
||||||
updated_at = now()
|
service_id = services.id,
|
||||||
FROM services
|
updated_at = now()
|
||||||
WHERE services.sms_sender = inbound_numbers.number AND
|
FROM services
|
||||||
inbound_numbers.service_id is null
|
WHERE services.sms_sender = inbound_numbers.number AND
|
||||||
"""
|
inbound_numbers.service_id is null
|
||||||
result = db.session.execute(update)
|
"""
|
||||||
db.session.commit()
|
result = db.session.execute(update)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
print("Linked {} inbound numbers to service".format(result.rowcount))
|
print("Linked {} inbound numbers to service".format(result.rowcount))
|
||||||
|
|
||||||
|
|
||||||
class PopulateMonthlyBilling(Command):
|
@commands.command()
|
||||||
option_list = (
|
@click.option('-y', '--year', required=True, help="Use for integer value for year, e.g. 2017")
|
||||||
Option('-y', '-year', dest="year", help="Use for integer value for year, e.g. 2017"),
|
def populate_monthly_billing(year):
|
||||||
)
|
def populate(service_id, year, month):
|
||||||
|
|
||||||
def run(self, year):
|
|
||||||
service_ids = get_service_ids_that_need_billing_populated(
|
|
||||||
start_date=datetime(2016, 5, 1), end_date=datetime(2017, 8, 16)
|
|
||||||
)
|
|
||||||
start, end = 1, 13
|
|
||||||
if year == '2016':
|
|
||||||
start = 4
|
|
||||||
|
|
||||||
for service_id in service_ids:
|
|
||||||
print('Starting to populate data for service {}'.format(str(service_id)))
|
|
||||||
print('Starting populating monthly billing for {}'.format(year))
|
|
||||||
for i in range(start, end):
|
|
||||||
print('Population for {}-{}'.format(i, year))
|
|
||||||
self.populate(service_id, year, i)
|
|
||||||
|
|
||||||
def populate(self, service_id, year, month):
|
|
||||||
create_or_update_monthly_billing(service_id, datetime(int(year), int(month), 1))
|
create_or_update_monthly_billing(service_id, datetime(int(year), int(month), 1))
|
||||||
sms_res = get_monthly_billing_by_notification_type(
|
sms_res = get_monthly_billing_by_notification_type(
|
||||||
service_id, datetime(int(year), int(month), 1), SMS_TYPE
|
service_id, datetime(int(year), int(month), 1), SMS_TYPE
|
||||||
@@ -200,165 +173,181 @@ class PopulateMonthlyBilling(Command):
|
|||||||
print('SMS: {}'.format(sms_res.monthly_totals))
|
print('SMS: {}'.format(sms_res.monthly_totals))
|
||||||
print('Email: {}'.format(email_res.monthly_totals))
|
print('Email: {}'.format(email_res.monthly_totals))
|
||||||
|
|
||||||
|
service_ids = get_service_ids_that_need_billing_populated(
|
||||||
class BackfillProcessingTime(Command):
|
start_date=datetime(2016, 5, 1), end_date=datetime(2017, 8, 16)
|
||||||
option_list = (
|
|
||||||
Option('-s', '--start_date', dest='start_date', help="Date (%Y-%m-%d) start date inclusive"),
|
|
||||||
Option('-e', '--end_date', dest='end_date', help="Date (%Y-%m-%d) end date inclusive"),
|
|
||||||
)
|
)
|
||||||
|
start, end = 1, 13
|
||||||
|
|
||||||
def run(self, start_date, end_date):
|
if year == '2016':
|
||||||
start_date = datetime.strptime(start_date, '%Y-%m-%d')
|
start = 4
|
||||||
end_date = datetime.strptime(end_date, '%Y-%m-%d')
|
|
||||||
|
|
||||||
delta = end_date - start_date
|
for service_id in service_ids:
|
||||||
|
print('Starting to populate data for service {}'.format(str(service_id)))
|
||||||
print('Sending notification processing-time data for all days between {} and {}'.format(start_date, end_date))
|
print('Starting populating monthly billing for {}'.format(year))
|
||||||
|
for i in range(start, end):
|
||||||
for i in range(delta.days + 1):
|
print('Population for {}-{}'.format(i, year))
|
||||||
# because the tz conversion funcs talk about midnight, and the midnight before last,
|
populate(service_id, year, i)
|
||||||
# we want to pretend we're running this from the next morning, so add one.
|
|
||||||
process_date = start_date + timedelta(days=i + 1)
|
|
||||||
|
|
||||||
process_start_date = get_midnight_for_day_before(process_date)
|
|
||||||
process_end_date = get_london_midnight_in_utc(process_date)
|
|
||||||
|
|
||||||
print('Sending notification processing-time for {} - {}'.format(
|
|
||||||
process_start_date.isoformat(),
|
|
||||||
process_end_date.isoformat()
|
|
||||||
))
|
|
||||||
send_processing_time_for_start_and_end(process_start_date, process_end_date)
|
|
||||||
|
|
||||||
|
|
||||||
class PopulateServiceEmailReplyTo(Command):
|
@commands.command()
|
||||||
|
@click.option('-s', '--start_date', required=True, help="Date (%Y-%m-%d) start date inclusive")
|
||||||
|
@click.option('-e', '--end_date', required=True, help="Date (%Y-%m-%d) end date inclusive")
|
||||||
|
def backfill_processing_time(start_date, end_date):
|
||||||
|
start_date = datetime.strptime(start_date, '%Y-%m-%d')
|
||||||
|
end_date = datetime.strptime(end_date, '%Y-%m-%d')
|
||||||
|
|
||||||
def run(self):
|
delta = end_date - start_date
|
||||||
services_to_update = """
|
|
||||||
INSERT INTO service_email_reply_to(id, service_id, email_address, is_default, created_at)
|
|
||||||
SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, reply_to_email_address, true, '{}'
|
|
||||||
FROM services
|
|
||||||
WHERE reply_to_email_address IS NOT NULL
|
|
||||||
AND id NOT IN(
|
|
||||||
SELECT service_id
|
|
||||||
FROM service_email_reply_to
|
|
||||||
)
|
|
||||||
""".format(datetime.utcnow())
|
|
||||||
|
|
||||||
result = db.session.execute(services_to_update)
|
print('Sending notification processing-time data for all days between {} and {}'.format(start_date, end_date))
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
print("Populated email reply to addresses for {}".format(result.rowcount))
|
for i in range(delta.days + 1):
|
||||||
|
# because the tz conversion funcs talk about midnight, and the midnight before last,
|
||||||
|
# we want to pretend we're running this from the next morning, so add one.
|
||||||
|
process_date = start_date + timedelta(days=i + 1)
|
||||||
|
|
||||||
|
process_start_date = get_midnight_for_day_before(process_date)
|
||||||
|
process_end_date = get_london_midnight_in_utc(process_date)
|
||||||
|
|
||||||
|
print('Sending notification processing-time for {} - {}'.format(
|
||||||
|
process_start_date.isoformat(),
|
||||||
|
process_end_date.isoformat()
|
||||||
|
))
|
||||||
|
send_processing_time_for_start_and_end(process_start_date, process_end_date)
|
||||||
|
|
||||||
|
|
||||||
class PopulateServiceSmsSender(Command):
|
@commands.command()
|
||||||
|
def populate_service_email_reply_to():
|
||||||
|
services_to_update = """
|
||||||
|
INSERT INTO service_email_reply_to(id, service_id, email_address, is_default, created_at)
|
||||||
|
SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, reply_to_email_address, true, '{}'
|
||||||
|
FROM services
|
||||||
|
WHERE reply_to_email_address IS NOT NULL
|
||||||
|
AND id NOT IN(
|
||||||
|
SELECT service_id
|
||||||
|
FROM service_email_reply_to
|
||||||
|
)
|
||||||
|
""".format(datetime.utcnow())
|
||||||
|
|
||||||
def run(self):
|
result = db.session.execute(services_to_update)
|
||||||
services_to_update = """
|
db.session.commit()
|
||||||
INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at)
|
|
||||||
SELECT uuid_in(md5(random()::text || now()::text)::cstring), service_id, number, id, true, '{}'
|
|
||||||
FROM inbound_numbers
|
|
||||||
WHERE service_id NOT IN(
|
|
||||||
SELECT service_id
|
|
||||||
FROM service_sms_senders
|
|
||||||
)
|
|
||||||
""".format(datetime.utcnow())
|
|
||||||
|
|
||||||
services_to_update_from_services = """
|
print("Populated email reply to addresses for {}".format(result.rowcount))
|
||||||
INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at)
|
|
||||||
SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, sms_sender, null, true, '{}'
|
|
||||||
|
@commands.command()
|
||||||
|
def populate_service_sms_sender():
|
||||||
|
services_to_update = """
|
||||||
|
INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at)
|
||||||
|
SELECT uuid_in(md5(random()::text || now()::text)::cstring), service_id, number, id, true, '{}'
|
||||||
|
FROM inbound_numbers
|
||||||
|
WHERE service_id NOT IN(
|
||||||
|
SELECT service_id
|
||||||
|
FROM service_sms_senders
|
||||||
|
)
|
||||||
|
""".format(datetime.utcnow())
|
||||||
|
|
||||||
|
services_to_update_from_services = """
|
||||||
|
INSERT INTO service_sms_senders(id, service_id, sms_sender, inbound_number_id, is_default, created_at)
|
||||||
|
SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, sms_sender, null, true, '{}'
|
||||||
|
FROM services
|
||||||
|
WHERE id NOT IN(
|
||||||
|
SELECT service_id
|
||||||
|
FROM service_sms_senders
|
||||||
|
)
|
||||||
|
""".format(datetime.utcnow())
|
||||||
|
|
||||||
|
result = db.session.execute(services_to_update)
|
||||||
|
second_result = db.session.execute(services_to_update_from_services)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
services_count_query = db.session.execute("Select count(*) from services").fetchall()[0][0]
|
||||||
|
|
||||||
|
service_sms_sender_count_query = db.session.execute("Select count(*) from service_sms_senders").fetchall()[0][0]
|
||||||
|
|
||||||
|
print("Populated sms sender {} services from inbound_numbers".format(result.rowcount))
|
||||||
|
print("Populated sms sender {} services from services".format(second_result.rowcount))
|
||||||
|
print("{} services in table".format(services_count_query))
|
||||||
|
print("{} service_sms_senders".format(service_sms_sender_count_query))
|
||||||
|
|
||||||
|
|
||||||
|
@commands.command()
|
||||||
|
def populate_service_letter_contact():
|
||||||
|
services_to_update = """
|
||||||
|
INSERT INTO service_letter_contacts(id, service_id, contact_block, is_default, created_at)
|
||||||
|
SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, letter_contact_block, true, '{}'
|
||||||
|
FROM services
|
||||||
|
WHERE letter_contact_block IS NOT NULL
|
||||||
|
AND id NOT IN(
|
||||||
|
SELECT service_id
|
||||||
|
FROM service_letter_contacts
|
||||||
|
)
|
||||||
|
""".format(datetime.utcnow())
|
||||||
|
|
||||||
|
result = db.session.execute(services_to_update)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
print("Populated letter contacts for {} services".format(result.rowcount))
|
||||||
|
|
||||||
|
|
||||||
|
@commands.command()
|
||||||
|
def populate_service_and_service_history_free_sms_fragment_limit():
|
||||||
|
services_to_update = """
|
||||||
|
UPDATE services
|
||||||
|
SET free_sms_fragment_limit = 250000
|
||||||
|
WHERE free_sms_fragment_limit IS NULL
|
||||||
|
"""
|
||||||
|
|
||||||
|
services_history_to_update = """
|
||||||
|
UPDATE services_history
|
||||||
|
SET free_sms_fragment_limit = 250000
|
||||||
|
WHERE free_sms_fragment_limit IS NULL
|
||||||
|
"""
|
||||||
|
|
||||||
|
services_result = db.session.execute(services_to_update)
|
||||||
|
services_history_result = db.session.execute(services_history_to_update)
|
||||||
|
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
print("Populated free sms fragment limits for {} services".format(services_result.rowcount))
|
||||||
|
print("Populated free sms fragment limits for {} services history".format(services_history_result.rowcount))
|
||||||
|
|
||||||
|
|
||||||
|
@commands.command()
|
||||||
|
def populate_annual_billing():
|
||||||
|
financial_year = [2016, 2017, 2018]
|
||||||
|
|
||||||
|
for fy in financial_year:
|
||||||
|
populate_data = """
|
||||||
|
INSERT INTO annual_billing(id, service_id, free_sms_fragment_limit, financial_year_start,
|
||||||
|
created_at, updated_at)
|
||||||
|
SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, 250000, {}, '{}', '{}'
|
||||||
FROM services
|
FROM services
|
||||||
WHERE id NOT IN(
|
WHERE id NOT IN(
|
||||||
SELECT service_id
|
SELECT service_id
|
||||||
FROM service_sms_senders
|
FROM annual_billing
|
||||||
)
|
WHERE financial_year_start={})
|
||||||
""".format(datetime.utcnow())
|
""".format(fy, datetime.utcnow(), datetime.utcnow(), fy)
|
||||||
|
|
||||||
result = db.session.execute(services_to_update)
|
services_result1 = db.session.execute(populate_data)
|
||||||
second_result = db.session.execute(services_to_update_from_services)
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
services_count_query = db.session.execute("Select count(*) from services").fetchall()[0][0]
|
print("Populated annual billing {} for {} services".format(fy, services_result1.rowcount))
|
||||||
|
|
||||||
service_sms_sender_count_query = db.session.execute("Select count(*) from service_sms_senders").fetchall()[0][0]
|
|
||||||
|
|
||||||
print("Populated sms sender {} services from inbound_numbers".format(result.rowcount))
|
|
||||||
print("Populated sms sender {} services from services".format(second_result.rowcount))
|
|
||||||
print("{} services in table".format(services_count_query))
|
|
||||||
print("{} service_sms_senders".format(service_sms_sender_count_query))
|
|
||||||
|
|
||||||
|
|
||||||
class PopulateServiceLetterContact(Command):
|
@commands.command()
|
||||||
|
@click.option('-j', '--job_id', required=True, help="Enter the job id to rebuild the dvla file for")
|
||||||
def run(self):
|
def re_run_build_dvla_file_for_job(job_id):
|
||||||
services_to_update = """
|
from app.celery.tasks import build_dvla_file
|
||||||
INSERT INTO service_letter_contacts(id, service_id, contact_block, is_default, created_at)
|
from app.config import QueueNames
|
||||||
SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, letter_contact_block, true, '{}'
|
build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS)
|
||||||
FROM services
|
|
||||||
WHERE letter_contact_block IS NOT NULL
|
|
||||||
AND id NOT IN(
|
|
||||||
SELECT service_id
|
|
||||||
FROM service_letter_contacts
|
|
||||||
)
|
|
||||||
""".format(datetime.utcnow())
|
|
||||||
|
|
||||||
result = db.session.execute(services_to_update)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
print("Populated letter contacts for {} services".format(result.rowcount))
|
|
||||||
|
|
||||||
|
|
||||||
class PopulateServiceAndServiceHistoryFreeSmsFragmentLimit(Command):
|
@commands.command()
|
||||||
|
def list_routes():
|
||||||
def run(self):
|
"""List URLs of all application routes."""
|
||||||
services_to_update = """
|
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
|
||||||
UPDATE services
|
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule))
|
||||||
SET free_sms_fragment_limit = 250000
|
|
||||||
WHERE free_sms_fragment_limit IS NULL
|
|
||||||
"""
|
|
||||||
|
|
||||||
services_history_to_update = """
|
|
||||||
UPDATE services_history
|
|
||||||
SET free_sms_fragment_limit = 250000
|
|
||||||
WHERE free_sms_fragment_limit IS NULL
|
|
||||||
"""
|
|
||||||
|
|
||||||
services_result = db.session.execute(services_to_update)
|
|
||||||
services_history_result = db.session.execute(services_history_to_update)
|
|
||||||
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
print("Populated free sms fragment limits for {} services".format(services_result.rowcount))
|
|
||||||
print("Populated free sms fragment limits for {} services history".format(services_history_result.rowcount))
|
|
||||||
|
|
||||||
|
|
||||||
class PopulateAnnualBilling(Command):
|
def setup_commands(application):
|
||||||
def run(self):
|
application.cli.add_command(commands)
|
||||||
financial_year = [2016, 2017, 2018]
|
|
||||||
|
|
||||||
for fy in financial_year:
|
|
||||||
populate_data = """
|
|
||||||
INSERT INTO annual_billing(id, service_id, free_sms_fragment_limit, financial_year_start,
|
|
||||||
created_at, updated_at)
|
|
||||||
SELECT uuid_in(md5(random()::text || now()::text)::cstring), id, 250000, {}, '{}', '{}'
|
|
||||||
FROM services
|
|
||||||
WHERE id NOT IN(
|
|
||||||
SELECT service_id
|
|
||||||
FROM annual_billing
|
|
||||||
WHERE financial_year_start={})
|
|
||||||
""".format(fy, datetime.utcnow(), datetime.utcnow(), fy)
|
|
||||||
|
|
||||||
services_result1 = db.session.execute(populate_data)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
print("Populated annual billing {} for {} services".format(fy, services_result1.rowcount))
|
|
||||||
|
|
||||||
|
|
||||||
class ReRunBuildDvlaFileForJob(Command):
|
|
||||||
option_list = (
|
|
||||||
Option('-j', '--job_id', dest='job_id', help="Enter the job id to rebuild the dvla file for"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def run(self, job_id):
|
|
||||||
from app.celery.tasks import build_dvla_file
|
|
||||||
from app.config import QueueNames
|
|
||||||
build_dvla_file.apply_async([job_id], queue=QueueNames.JOBS)
|
|
||||||
|
|||||||
@@ -1,38 +1,10 @@
|
|||||||
#!/usr/bin/env python
|
##!/usr/bin/env python
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import os
|
|
||||||
from flask_script import Manager, Server
|
|
||||||
from flask_migrate import Migrate, MigrateCommand
|
|
||||||
from app import (create_app, db, commands)
|
|
||||||
|
|
||||||
application = create_app()
|
from flask import Flask
|
||||||
manager = Manager(application)
|
|
||||||
port = int(os.environ.get('PORT', 6011))
|
|
||||||
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
|
|
||||||
|
|
||||||
migrate = Migrate(application, db)
|
from app import create_app
|
||||||
manager.add_command('db', MigrateCommand)
|
|
||||||
manager.add_command('create_provider_rate', commands.CreateProviderRateCommand)
|
|
||||||
manager.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand)
|
|
||||||
manager.add_command('custom_db_script', commands.CustomDbScript)
|
|
||||||
manager.add_command('populate_monthly_billing', commands.PopulateMonthlyBilling)
|
|
||||||
manager.add_command('backfill_processing_time', commands.BackfillProcessingTime)
|
|
||||||
manager.add_command('populate_service_email_reply_to', commands.PopulateServiceEmailReplyTo)
|
|
||||||
manager.add_command('populate_service_sms_sender', commands.PopulateServiceSmsSender)
|
|
||||||
manager.add_command('populate_service_letter_contact', commands.PopulateServiceLetterContact)
|
|
||||||
manager.add_command('populate_service_and_service_history_free_sms_fragment_limit',
|
|
||||||
commands.PopulateServiceAndServiceHistoryFreeSmsFragmentLimit)
|
|
||||||
manager.add_command('populate_annual_billing', commands.PopulateAnnualBilling)
|
|
||||||
manager.add_command('rerun_build_dvla_file', commands.ReRunBuildDvlaFileForJob)
|
|
||||||
|
|
||||||
|
app = Flask('app')
|
||||||
|
|
||||||
@manager.command
|
create_app(app)
|
||||||
def list_routes():
|
|
||||||
"""List URLs of all application routes."""
|
|
||||||
for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule):
|
|
||||||
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
manager.run()
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ docopt==0.6.2
|
|||||||
Flask-Bcrypt==0.7.1
|
Flask-Bcrypt==0.7.1
|
||||||
Flask-Marshmallow==0.8.0
|
Flask-Marshmallow==0.8.0
|
||||||
Flask-Migrate==2.1.1
|
Flask-Migrate==2.1.1
|
||||||
Flask-Script==2.0.5
|
|
||||||
Flask-SQLAlchemy==2.3.2
|
Flask-SQLAlchemy==2.3.2
|
||||||
Flask==0.12.2
|
Flask==0.12.2
|
||||||
gunicorn==19.7.1
|
gunicorn==19.7.1
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# notify_celery is referenced from manifest_delivery_base.yml, and cannot be removed
|
# notify_celery is referenced from manifest_delivery_base.yml, and cannot be removed
|
||||||
|
from flask import Flask
|
||||||
|
|
||||||
from app import notify_celery, create_app
|
from app import notify_celery, create_app
|
||||||
|
|
||||||
application = create_app('delivery')
|
|
||||||
|
application = Flask('delivery')
|
||||||
|
create_app(application)
|
||||||
application.app_context().push()
|
application.app_context().push()
|
||||||
|
|||||||
@@ -3,4 +3,4 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
source environment.sh
|
source environment.sh
|
||||||
python3 application.py runserver
|
flask run -p 6011
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
from flask_script import Manager, Server
|
|
||||||
from flask_migrate import Migrate, MigrateCommand
|
from flask_migrate import Migrate, MigrateCommand
|
||||||
from app import (create_app, db, commands)
|
from app import (create_app, db, commands)
|
||||||
import os
|
import os
|
||||||
@@ -16,11 +15,10 @@ os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
|
|||||||
|
|
||||||
application = create_app()
|
application = create_app()
|
||||||
|
|
||||||
manager = Manager(application)
|
|
||||||
migrate = Migrate(application, db)
|
migrate = Migrate(application, db)
|
||||||
manager.add_command('db', MigrateCommand)
|
application.add_command('db', MigrateCommand)
|
||||||
manager.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand)
|
application.add_command('purge_functional_test_data', commands.PurgeFunctionalTestDataCommand)
|
||||||
manager.add_command('custom_db_script', commands.CustomDbScript)
|
application.add_command('custom_db_script', commands.CustomDbScript)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
manager.run()
|
manager.run()
|
||||||
|
|||||||
Reference in New Issue
Block a user