Merge branch 'master' into check-service-is-active

This commit is contained in:
Rebecca Law
2017-01-31 12:00:30 +00:00
11 changed files with 393 additions and 41 deletions

View File

@@ -66,6 +66,7 @@ def create_app(app_name=None):
notify_celery.init_app(application)
encryption.init_app(application)
redis_store.init_app(application)
performance_platform_client.init_app(application)
clients.init_app(sms_clients=[firetext_client, mmg_client, loadtest_client], email_clients=[aws_ses_client])
register_blueprint(application)

View File

@@ -116,20 +116,28 @@ def timeout_notifications():
@notify_celery.task(name='send-daily-performance-platform-stats')
@statsd(namespace="tasks")
def send_daily_performance_stats():
count_dict = performance_platform_client.get_total_sent_notifications_yesterday()
start_date = count_dict.get('start_date')
def send_daily_performance_platform_stats():
if performance_platform_client.active:
count_dict = performance_platform_client.get_total_sent_notifications_yesterday()
email_sent_count = count_dict.get('email').get('count')
sms_sent_count = count_dict.get('sms').get('count')
start_date = count_dict.get('start_date')
performance_platform_client.send_performance_stats(
start_date,
'sms',
count_dict.get('sms').get('count'),
'day'
)
current_app.logger.info(
"Attempting to update performance platform for date {} with email count {} and sms count {}"
.format(start_date, email_sent_count, sms_sent_count)
)
performance_platform_client.send_performance_stats(
start_date,
'email',
count_dict.get('email').get('count'),
'day'
)
performance_platform_client.send_performance_stats(
start_date,
'sms',
sms_sent_count,
'day'
)
performance_platform_client.send_performance_stats(
start_date,
'email',
email_sent_count,
'day'
)

View File

@@ -13,16 +13,24 @@ from app.utils import (
class PerformancePlatformClient:
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = value
def init_app(self, app):
self.active = app.config.get('PERFORMANCE_PLATFORM_ENABLED')
self._active = app.config.get('PERFORMANCE_PLATFORM_ENABLED')
if self.active:
self.bearer_token = app.config.get('PERFORMANCE_PLATFORM_TOKEN')
self.performance_platform_url = current_app.config.get('PERFORMANCE_PLATFORM_URL')
self.performance_platform_url = app.config.get('PERFORMANCE_PLATFORM_URL')
def send_performance_stats(self, date, channel, count, period):
if self.active:
payload = {
'_timestamp': date,
'_timestamp': str(date),
'service': 'govuk-notify',
'channel': channel,
'count': count,
@@ -61,9 +69,14 @@ class PerformancePlatformClient:
headers=headers
)
if resp.status_code != 200:
if resp.status_code == 200:
current_app.logger.info(
"Updated performance platform successfully with payload {}".format(json.dumps(payload))
)
else:
current_app.logger.error(
"Performance platform update request failed with {} '{}'".format(
"Performance platform update request failed for payload with response details: {} '{}'".format(
json.dumps(payload),
resp.status_code,
resp.json())
)

View File

@@ -73,7 +73,7 @@ def dao_fetch_all_services_by_user(user_id, only_active=False):
@version_class(Service)
@version_class(Template, TemplateHistory)
@version_class(ApiKey)
def dao_deactive_service(service_id):
def dao_archive_service(service_id):
# have to eager load templates and api keys so that we don't flush when we loop through them
# to ensure that db.session still contains the models when it comes to creating history objects
service = Service.query.options(
@@ -291,3 +291,27 @@ def fetch_stats_by_date_range_for_all_services(start_date, end_date, include_fro
query = query.filter(NotificationHistory.key_type != KEY_TYPE_TEST)
return query.all()
@transactional
@version_class(Service)
@version_class(ApiKey)
def dao_suspend_service(service_id):
# have to eager load api keys so that we don't flush when we loop through them
# to ensure that db.session still contains the models when it comes to creating history objects
service = Service.query.options(
joinedload('api_keys'),
).filter(Service.id == service_id).one()
service.active = False
for api_key in service.api_keys:
if not api_key.expiry_date:
api_key.expiry_date = datetime.utcnow()
@transactional
@version_class(Service)
def dao_resume_service(service_id):
service = Service.query.get(service_id)
service.active = True

View File

@@ -27,9 +27,10 @@ from app.dao.services_dao import (
dao_fetch_todays_stats_for_service,
dao_fetch_weekly_historical_stats_for_service,
dao_fetch_todays_stats_for_all_services,
dao_deactive_service,
fetch_stats_by_date_range_for_all_services
)
dao_archive_service,
fetch_stats_by_date_range_for_all_services,
dao_suspend_service,
dao_resume_service)
from app.dao.service_whitelist_dao import (
dao_fetch_service_whitelist,
dao_add_and_commit_whitelisted_contacts,
@@ -204,7 +205,7 @@ def get_service_provider_aggregate_statistics(service_id):
# tables. This is so product owner can pass stories as done
@service_blueprint.route('/<uuid:service_id>/history', methods=['GET'])
def get_service_history(service_id):
from app.models import (Service, ApiKey, Template, TemplateHistory, Event)
from app.models import (Service, ApiKey, TemplateHistory, Event)
from app.schemas import (
service_history_schema,
api_key_history_schema,
@@ -329,12 +330,13 @@ def update_whitelist(service_id):
current_app.logger.exception(e)
dao_rollback()
msg = '{} is not a valid email address or phone number'.format(str(e))
return jsonify(result='error', message=msg), 400
raise InvalidRequest(msg, 400)
else:
dao_add_and_commit_whitelisted_contacts(whitelist_objs)
return '', 204
# Renaming this endpoint to archive
@service_blueprint.route('/<uuid:service_id>/deactivate', methods=['POST'])
def deactivate_service(service_id):
service = dao_fetch_service_by_id(service_id)
@@ -343,7 +345,54 @@ def deactivate_service(service_id):
# assume already inactive, don't change service name
return '', 204
dao_deactive_service(service.id)
dao_archive_service(service.id)
return '', 204
@service_blueprint.route('/<uuid:service_id>/archive', methods=['POST'])
def archive_service(service_id):
"""
When a service is archived the service is made inactive, templates are archived and api keys are revoked.
There is no coming back from this operation.
:param service_id:
:return:
"""
service = dao_fetch_service_by_id(service_id)
if service.active:
dao_archive_service(service.id)
return '', 204
@service_blueprint.route('/<uuid:service_id>/suspend', methods=['POST'])
def suspend_service(service_id):
"""
Suspending a service will mark the service as inactive and revoke API keys.
:param service_id:
:return:
"""
service = dao_fetch_service_by_id(service_id)
if service.active:
dao_suspend_service(service.id)
return '', 204
@service_blueprint.route('/<uuid:service_id>/resume', methods=['POST'])
def resume_service(service_id):
"""
Resuming a service that has been suspended will mark the service as active.
The service will need to re-create API keys
:param service_id:
:return:
"""
service = dao_fetch_service_by_id(service_id)
if not service.active:
dao_resume_service(service.id)
return '', 204