Merge branch 'master' into stats-db-updates

Conflicts:
	tests/app/conftest.py
This commit is contained in:
Martyn Inglis
2016-08-03 11:46:40 +01:00
21 changed files with 465 additions and 268 deletions

View File

@@ -3,7 +3,7 @@ language: python
cache:
pip: true
directories:
- ~/.pip-accel
- ~/.pip-accel
python:
- '3.4'
addons:
@@ -26,53 +26,53 @@ notifications:
secure: vCmpAjdXkprqGzSnQzcco1lNiFuj/nRQdlVTGSnvg2HEqZUM5vzmWtQYw3WNS/bcgbVxV/nLgza17cdsmFqOuaZll4kDM0swnmk17eynImAHPFF/flGsNcw4oRV7WzWrq7j1TvGzJzxJkHYOjJFk0iMLM7+P76sWHI6Oa5NHQYH2l7a4hqzqmuUKaloHeZX/Ro2RmFE+/t/SfNr0pmtTRO4CfLfPUdUs9D8Mx0cyc1Z9Ke4Cq+R2Lsp8BwH0nye6pb9cI4h1YmUY/Xt8O1Z0QbncHZviWojSB2vV0V5WQPzqjjseAznJ1depK/LPOlRHlqddkEbGK28rDwsxbzY1q3DCZZiZZTXBD80HyNr/J4rfRAXhoCafwR57cqqIQ7G/fb5A/ckUM8TFKqUPh7wvu67OAiSWJmO78C8fumF3HxcoGp7DQOQPFLtWGuCfh2zjI9WbUhiHQwWl9/9qpgMEuXH7LOJAzgQbEp1A7XRmT2GupYEkGdFMwT8ojp8OXds08diEy1FNonWAtFZs8pO92urK862pCRB8WRQZWMjpcnif1Ht4QtwYFO9iVp5ygReTEoFuiQPMAypCVCzKgHe/W8LX55xzSnlUQqDU2NzN37LfC4ctS1h3BfIIU5KP9PmdgkKRUWcrFE3eMXPG36OdU4hRj4FMURzmAOZYhaSXuP8=
deploy:
- provider: s3
access_key_id: AKIAJQPPNM6P6V53SWKA
access_key_id: AKIAJLWYN4T4D5WU4APA
secret_access_key: &1
secure: irusc9JdTbXhKPMYUEHIOPXk7cCCleRsXXW/ZOAEWcHtdtuF6YNACsN3pywwyZ9UU+MqZb8lD3UH77o3u7BtuyPPv5JDy7cB7AoIX6EAaZIdxAS4v2tTpLPh6oS2QtfI1U5giI2g2C5IkRZIIrqiWtT63y0eReL5z5r5x7kEY5U7UOMrIL0w2m/frate0a4Q380ZbjRA1X0PcQ3IeavxQbTEYug+XEaKcpjqIFFwxeNYu814Ox7fMm+bqpG7TuwDF2i1+jAlX3NZSiuUBBYpHqgK6EARnCNwi8Yr2+CBO6ykXY1lcVn13+cSsFXDP7lgkK/BJUewWIrkQ6EL28ntTaG3cpZhSMB2fc21VRNnVzNDfzf9R+bI/Xy45fyraTVZ2Xun8j262SAM+F8D6Ivt1Y+n8aRHN0U8P//Yhfx2TMTFoHLhGUp076FDPKmcWRWpYikMzmTR/oVu6yE5OacntYBp8dE1QGrVSR//rRHK6QkQeDbnp/cSbOiZY92PeYeDjCLbZMrqASONWtzR6V0CPu/++iLw0mODd2xRlpsnFcpF8ygxFtAgOUrKXbAdu9JNdNE1pPE2FMDb6DKAuIxrRQT8ceUwhEChR7UGERTtbr2deyc+4f+MpmUK59uaqdG6y5q0KsiXhUTyzmKVLGxr/dyA5cATtsFDCx8caM0Qx6I=
secure: j89/yB4NUSUwgG4UKVBMuzkGW8UwFS3Fp9W/7sxFh0zXndMwEN8yl9OtaJ/K9gKds8FAKYCwdSJ90wztdEuDO7c1k60WtD08vbXx1TIEvk9N+VRCOAo5dKy3ZpoF8vQJYNP/luQyU5Ev8zm69nzW+40KaHosyS0b6/dvxy4luZnBiiX9bmR+12Mg3qT/NN7qf4SsImxc0OKl37NdUc03ygCPRPLhDD1bOgfLhDaZNby7DNbP3rKSB81T5cww8Ib2PkwjR9UTurD5JpIBJmZXhqVDtZ9cYUjrrmRPtHtzWBVt3QR7pQeqGupdLTlljV7o6FkAbwjYcEo72cMlwYcoDvBwGc0nyZCgQsQDkHRo8ez5WbXuDq3QvNNOCUh+HPCYSze3uWRIwY/Sb48dwLI1Y5ieQ5b+u2dusxRQbOHajPAO6I9Zl2IRuV6/lHhtfKfTF2BIKT2hfRdkMCIlTBGSPF6wZVhBd3dSTXJxwsarphDvfVilHI0tzz3gfnbmwAz/Z5KAASlCE73oX+0dvZDke1tNIURCM8MzfmK3BmRzh0V0zdET1wzRvhSzyvHSfOL6qRjxDfbvRRPQXD2EkFuW3RJlguOrX5k2eJ5eG0Da9hcI2XZ7VI7cK/UdLgRYjad8l6pKtYIQcxRw4EPb4OtsnMlzpzV9uFpObIZSgUgOoC4=
local_dir: dpl_cd_upload
skip_cleanup: true
region: eu-west-1
bucket: notifications-api-codedeploy
wait-until-deployed: true
on:
on: &2
repo: alphagov/notifications-api
bucket: notify.works-notifications-api-codedeploy
- provider: codedeploy
access_key_id: AKIAJQPPNM6P6V53SWKA
access_key_id: AKIAJLWYN4T4D5WU4APA
secret_access_key: *1
bucket: notifications-api-codedeploy
bucket: notify.works-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
deployment_group: notifications_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: codedeploy
access_key_id: AKIAJQPPNM6P6V53SWKA
secret_access_key: *1
bucket: notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
application: api
deployment_group: notifications_admin_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: codedeploy
access_key_id: AKIAJQPPNM6P6V53SWKA
access_key_id: AKIAJLWYN4T4D5WU4APA
secret_access_key: *1
bucket: notifications-api-codedeploy
bucket: notify.works-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
application: api
deployment_group: notifications_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: codedeploy
access_key_id: AKIAJLWYN4T4D5WU4APA
secret_access_key: *1
bucket: notify.works-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: api
deployment_group: notifications_delivery_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: s3
access_key_id: AKIAJ5MKF6G3P2JQP4QQ
access_key_id: AKIAJBHLXFS5UHJ6PXLA
secret_access_key: &1
secure: daC1bCHXqLRK+iIZ8P699KCnTh77lwV4KxrZxL1yd6cstgfptyd/rg1WgRwE6QdxOCT9gQvKWUZFCzFy7M6E/Ih8EUHqEXTzC5M4oAye8rhePIBMQwqkgfYyIoZ3LdDMMP5JfBhiz0zS3Vj7HerL2qIu12adJBjkRJx3XAGimCrFOMQ0xUXQAKDjL6Xmv+gVz2f/ISLy6icKY4KNGt3cQV+8pa5aMF34C9R2udA9N67EWlXlh7hJbFtmY+0Zqpo8Rr6wKRb5MA0xEcTVLORSz1aa6GkxUCbzaIH99p7z3Ghz0qW2bUi9ZcDrvg0GLbVe1T+1HXhfktJfW8wnzw6A/2U/CIIFDQZ/qk0w/DkEwpQinXow99Zl49CcEU+v8llKhg5nM3LmAZCQg1c/iZyP/d90AwAMoMA/VTDD72M93IqTJQH18eC8g02DwE0hNDD6aos5wzeuDeiH/6BG+Tq0pDl0y0aWCcHf3vGRlo/5GlWfpE0vMQEC+qnEOWOUqSprCdSypgD2Aip9mCC98w4BkqKKvGNHPZolA7rxf7E9hTK+BNPRATpYsHR1X/1Xl0TMc/pHhjU1yNXzWnI/kOlNV2CRq3slEtcWihaEo8oDHJ+BhGT49Ps3Je7UB2xO/jXXFPhwJotPMOacTcnUkGqVJSlK1g6TIn4t9nTVSY8KFUs=
secure: wYEw/AMHxcfddKP7Hc3r08onLtkZyYBnm+xhxMum3rQjR6Otj4rQ9pROw9V3xOzPBq/nKPTkO6G+4UVrHlPwBmplfpeyK/PXLll8gcNPV0qWxyKs7uVBW6KAgsXm5utuAyLNSWINWpbinjtHCTg3g+JwVFj32/RTUYTKfqzKl/M91Cd1Ezs0olrUIqkONTWRbM3fwCce//77XpRRYIK6baZK4I/qXXJ4P52cj9iqFVF1x684082cxcROtREC0Aeucldvft4WuNtUGZlczcG00PhTP9+pkKhgsNB9q/ICc3aBtbhjdKtJjweNfaNzbL1f/UgG+xJdf7zl4LEpTdSjklJeelHh93UlPObOUsm+BUU9ZRLa8xDpuWxvOxDpSIwBAORkYTl1kxfwb2GJVACZHrmF3zl9Z+btPYGyu6gCSf+3ez0HWFR76DfpsZfMyFUF9mxrBfpQR6d7hYAOdOggOpTB3jCkzMtkDeeYedYErUnfP6CA0rrN5voTqq5RuOWjJPMnuL7BMyM2ZQ2z5QJaUP7FV8z20YV89QiHN83GpZBBqm4t0ai+OLE5tqxGGztfoYpashqPsU6131lkA6X1eSCi9k4TlB40Ll2vcLoW+NhQTYtRr2TcdUiu23CPzzzsi6DOabJFu6PyvZ9IC5nISud/eegbJyDczZfiHETJuBQ=
local_dir: dpl_cd_upload
skip_cleanup: true
region: eu-west-1
@@ -80,87 +80,42 @@ deploy:
on: &2
repo: alphagov/notifications-api
branch: staging
bucket: staging-notifications-api-codedeploy
bucket: staging-notify.works-notifications-api-codedeploy
- provider: codedeploy
access_key_id: AKIAJ5MKF6G3P2JQP4QQ
access_key_id: AKIAJBHLXFS5UHJ6PXLA
secret_access_key: *1
bucket: staging-notifications-api-codedeploy
bucket: staging-notify.works-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
deployment_group: staging_delivery_api_deployment_group
application: api
deployment_group: notifications_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: codedeploy
access_key_id: AKIAJ5MKF6G3P2JQP4QQ
access_key_id: AKIAJBHLXFS5UHJ6PXLA
secret_access_key: *1
bucket: staging-notifications-api-codedeploy
bucket: staging-notify.works-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
deployment_group: staging_public_api_deployment_group
application: api
deployment_group: notifications_admin_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: codedeploy
access_key_id: AKIAJ5MKF6G3P2JQP4QQ
access_key_id: AKIAJBHLXFS5UHJ6PXLA
secret_access_key: *1
bucket: staging-notifications-api-codedeploy
bucket: staging-notify.works-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
deployment_group: staging_admin_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: s3
access_key_id: AKIAJ5MKF6G3P2JQP4QQ
secret_access_key: &1
secure: daC1bCHXqLRK+iIZ8P699KCnTh77lwV4KxrZxL1yd6cstgfptyd/rg1WgRwE6QdxOCT9gQvKWUZFCzFy7M6E/Ih8EUHqEXTzC5M4oAye8rhePIBMQwqkgfYyIoZ3LdDMMP5JfBhiz0zS3Vj7HerL2qIu12adJBjkRJx3XAGimCrFOMQ0xUXQAKDjL6Xmv+gVz2f/ISLy6icKY4KNGt3cQV+8pa5aMF34C9R2udA9N67EWlXlh7hJbFtmY+0Zqpo8Rr6wKRb5MA0xEcTVLORSz1aa6GkxUCbzaIH99p7z3Ghz0qW2bUi9ZcDrvg0GLbVe1T+1HXhfktJfW8wnzw6A/2U/CIIFDQZ/qk0w/DkEwpQinXow99Zl49CcEU+v8llKhg5nM3LmAZCQg1c/iZyP/d90AwAMoMA/VTDD72M93IqTJQH18eC8g02DwE0hNDD6aos5wzeuDeiH/6BG+Tq0pDl0y0aWCcHf3vGRlo/5GlWfpE0vMQEC+qnEOWOUqSprCdSypgD2Aip9mCC98w4BkqKKvGNHPZolA7rxf7E9hTK+BNPRATpYsHR1X/1Xl0TMc/pHhjU1yNXzWnI/kOlNV2CRq3slEtcWihaEo8oDHJ+BhGT49Ps3Je7UB2xO/jXXFPhwJotPMOacTcnUkGqVJSlK1g6TIn4t9nTVSY8KFUs=
local_dir: dpl_cd_upload
skip_cleanup: true
region: eu-west-1
wait-until-deployed: true
on: &2
repo: alphagov/notifications-api
branch: live
bucket: live-notifications-api-codedeploy
- provider: codedeploy
access_key_id: AKIAJ5MKF6G3P2JQP4QQ
secret_access_key: *1
bucket: live-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
deployment_group: live_delivery_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: codedeploy
access_key_id: AKIAJ5MKF6G3P2JQP4QQ
secret_access_key: *1
bucket: live-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
deployment_group: live_public_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
- provider: codedeploy
access_key_id: AKIAJ5MKF6G3P2JQP4QQ
secret_access_key: *1
bucket: live-notifications-api-codedeploy
key: notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
bundle_type: zip
application: notifications-api
deployment_group: live_admin_api_deployment_group
application: api
deployment_group: notifications_delivery_api_deployment_group
region: eu-west-1
wait-until-deployed: true
on: *2
before_deploy:
- ./scripts/update_version_file.sh
- zip -r --exclude=*__pycache__* notifications-api *
- mkdir -p dpl_cd_upload
- mv notifications-api.zip dpl_cd_upload/notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip
- ./scripts/update_version_file.sh
- zip -r --exclude=*__pycache__* notifications-api *
- mkdir -p dpl_cd_upload
- mv notifications-api.zip dpl_cd_upload/notifications-api-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER-$TRAVIS_COMMIT.zip

View File

@@ -17,6 +17,7 @@ from app.clients.email.aws_ses import AwsSesClient
from app.clients.statsd.statsd_client import StatsdClient
from app.encryption import Encryption
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
DATE_FORMAT = "%Y-%m-%d"
@@ -38,7 +39,8 @@ api_user = LocalProxy(lambda: _request_ctx_stack.top.api_user)
def create_app(app_name=None):
application = Flask(__name__)
application.config.from_object(os.environ['NOTIFY_API_ENVIRONMENT'])
from config import configs
application.config.from_object(configs[os.environ['NOTIFY_ENVIRONMENT']])
if app_name:
application.config['NOTIFY_APP_NAME'] = app_name

View File

@@ -157,3 +157,21 @@ def _stats_for_service_query(service_id):
Notification.notification_type,
Notification.status,
)
def dao_fetch_weekly_historical_stats_for_service(service_id):
monday_of_notification_week = func.date_trunc('week', NotificationHistory.created_at).label('week_start')
return db.session.query(
NotificationHistory.notification_type,
NotificationHistory.status,
monday_of_notification_week,
func.count(NotificationHistory.id).label('count')
).filter(
NotificationHistory.service_id == service_id
).group_by(
NotificationHistory.notification_type,
NotificationHistory.status,
monday_of_notification_week
).order_by(
asc(monday_of_notification_week), NotificationHistory.status
).all()

View File

@@ -1,4 +1,4 @@
from datetime import date
from datetime import date, timedelta
from flask import (
jsonify,
@@ -8,7 +8,6 @@ from flask import (
)
from sqlalchemy.orm.exc import NoResultFound
from app.models import EMAIL_TYPE, SMS_TYPE
from app.dao.api_key_dao import (
save_model_api_key,
get_model_api_keys,
@@ -23,7 +22,8 @@ from app.dao.services_dao import (
dao_add_user_to_service,
dao_remove_user_from_service,
dao_fetch_stats_for_service,
dao_fetch_todays_stats_for_service
dao_fetch_todays_stats_for_service,
dao_fetch_weekly_historical_stats_for_service
)
from app.dao import notifications_dao
from app.dao.provider_statistics_dao import get_fragment_count
@@ -43,6 +43,7 @@ from app.errors import (
register_errors,
InvalidRequest
)
from app.service import statistics
service = Blueprint('service', __name__)
register_errors(service)
@@ -236,29 +237,20 @@ def get_all_notifications_for_service(service_id):
), 200
@service.route('/<uuid:service_id>/notifications/weekly', methods=['GET'])
def get_weekly_notification_stats(service_id):
service = dao_fetch_service_by_id(service_id)
stats = dao_fetch_weekly_historical_stats_for_service(service_id)
stats = statistics.format_weekly_notification_stats(stats, service.created_at)
return jsonify(data={week.date().isoformat(): statistics for week, statistics in stats.items()})
def get_detailed_service(service_id, today_only=False):
service = dao_fetch_service_by_id(service_id)
stats_fn = dao_fetch_todays_stats_for_service if today_only else dao_fetch_stats_for_service
statistics = stats_fn(service_id)
service.statistics = format_statistics(statistics)
stats = stats_fn(service_id)
service.statistics = statistics.format_statistics(stats)
data = detailed_service_schema.dump(service).data
return jsonify(data=data)
def format_statistics(statistics):
# statistics come in a named tuple with uniqueness from 'notification_type', 'status' - however missing
# statuses/notification types won't be represented and the status types need to be simplified/summed up
# so we can return emails/sms * created, sent, and failed
counts = {
template_type: {
status: 0 for status in ('requested', 'delivered', 'failed')
} for template_type in (EMAIL_TYPE, SMS_TYPE)
}
for row in statistics:
counts[row.notification_type]['requested'] += row.count
if row.status == 'delivered':
counts[row.notification_type]['delivered'] += row.count
elif row.status in ('failed', 'technical-failure', 'temporary-failure', 'permanent-failure'):
counts[row.notification_type]['failed'] += row.count
return counts

53
app/service/statistics.py Normal file
View File

@@ -0,0 +1,53 @@
import itertools
from datetime import datetime, timedelta
from app.models import EMAIL_TYPE, SMS_TYPE
def format_statistics(statistics):
# statistics come in a named tuple with uniqueness from 'notification_type', 'status' - however missing
# statuses/notification types won't be represented and the status types need to be simplified/summed up
# so we can return emails/sms * created, sent, and failed
counts = _create_zeroed_stats_dicts()
for row in statistics:
_update_statuses_from_row(counts[row.notification_type], row)
return counts
def format_weekly_notification_stats(statistics, service_created_at):
preceeding_monday = (service_created_at - timedelta(days=service_created_at.weekday()))
# turn a datetime into midnight that day http://stackoverflow.com/a/1937636
preceeding_monday_midnight = datetime.combine(preceeding_monday.date(), datetime.min.time())
week_dict = {
week: _create_zeroed_stats_dicts()
for week in _weeks_for_range(preceeding_monday_midnight, datetime.utcnow())
}
for row in statistics:
_update_statuses_from_row(week_dict[row.week_start][row.notification_type], row)
return week_dict
def _create_zeroed_stats_dicts():
return {
template_type: {
status: 0 for status in ('requested', 'delivered', 'failed')
} for template_type in (EMAIL_TYPE, SMS_TYPE)
}
def _update_statuses_from_row(update_dict, row):
update_dict['requested'] += row.count
if row.status == 'delivered':
update_dict['delivered'] += row.count
elif row.status in ('failed', 'technical-failure', 'temporary-failure', 'permanent-failure'):
update_dict['failed'] += row.count
def _weeks_for_range(start, end):
"""
Generator that yields dates from `start` to `end`, in 7 day intervals. End is inclusive.
"""
infinite_date_generator = (start + timedelta(days=i) for i in itertools.count(step=7))
return itertools.takewhile(lambda x: x <= end, infinite_date_generator)

View File

@@ -1,3 +1,3 @@
__travis_commit__ = "dev"
__time__ = "dev"
__travis_job_number__ = "dev"
__travis_commit__ = ""
__time__ = "2016-07-05:15:38:37"
__travis_job_number__ = ""

View File

@@ -1,14 +1,14 @@
---
files:
-
destination: /home/ubuntu/notifications-api
destination: /home/notify-app/notifications-api
source: /
hooks:
AfterInstall:
-
location: scripts/aws_install_dependencies.sh
runas: root
timeout: 300
timeout: 1000
-
location: scripts/aws_change_ownership.sh
runas: root

View File

@@ -3,19 +3,8 @@ from app import notify_celery, create_app
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
application.app_context().push()

View File

@@ -92,10 +92,12 @@ class Config(object):
Queue('retry', Exchange('default'), routing_key='retry'),
Queue('email-already-registered', Exchange('default'), routing_key='email-already-registered')
]
API_HOST_NAME = os.environ['API_HOST_NAME']
MMG_API_KEY = os.environ['MMG_API_KEY']
FIRETEXT_API_KEY = os.getenv("FIRETEXT_API_KEY")
LOADTESTING_NUMBER = os.getenv('LOADTESTING_NUMBER')
LOADTESTING_API_KEY = os.getenv("LOADTESTING_API_KEY")
CSV_UPLOAD_BUCKET_NAME = 'local-notifications-csv-upload'
CSV_UPLOAD_BUCKET_NAME = os.getenv("CSV_UPLOAD_BUCKET_NAME")
NOTIFICATIONS_ALERT = 5 # five mins
FROM_NUMBER = os.getenv('FROM_NUMBER')
@@ -109,27 +111,29 @@ class Config(object):
class Development(Config):
DEBUG = True
API_HOST_NAME = os.environ['API_HOST_NAME']
MMG_API_KEY = os.environ['MMG_API_KEY']
CSV_UPLOAD_BUCKET_NAME = 'development-notifications-csv-upload'
class Preview(Config):
MMG_API_KEY = os.environ['MMG_API_KEY']
API_HOST_NAME = os.environ['API_HOST_NAME']
CSV_UPLOAD_BUCKET_NAME = 'preview-notifications-csv-upload'
CSV_UPLOAD_BUCKET_NAME = os.getenv("CSV_UPLOAD_BUCKET_NAME")
class Test(Development):
MMG_API_KEY = os.environ['MMG_API_KEY']
API_HOST_NAME = os.environ['API_HOST_NAME']
CSV_UPLOAD_BUCKET_NAME = 'test-notifications-csv-upload'
class Staging(Config):
CSV_UPLOAD_BUCKET_NAME = 'staging-notify-csv-upload'
class Live(Config):
CSV_UPLOAD_BUCKET_NAME = 'live-notifications-csv-upload'
STATSD_ENABLED = True
configs = {
'development': 'config.Development',
'test': 'config.Test',
'live': 'config_live.Live',
'staging': 'config_staging.Staging',
'preview': 'config.Preview'
'development': Development,
'test': Test,
'live': Live,
'staging': Staging,
'preview': Preview
}

View File

@@ -1,30 +0,0 @@
import os
from config import Config
class Live(Config):
ADMIN_BASE_URL = os.environ['LIVE_ADMIN_BASE_URL']
API_HOST_NAME = os.environ['LIVE_API_HOST_NAME']
ADMIN_CLIENT_SECRET = os.environ['LIVE_ADMIN_CLIENT_SECRET']
DANGEROUS_SALT = os.environ['LIVE_DANGEROUS_SALT']
NOTIFICATION_QUEUE_PREFIX = os.environ['LIVE_NOTIFICATION_QUEUE_PREFIX']
NOTIFY_JOB_QUEUE = os.environ['LIVE_NOTIFY_JOB_QUEUE']
SECRET_KEY = os.environ['LIVE_SECRET_KEY']
SQLALCHEMY_DATABASE_URI = os.environ['LIVE_SQLALCHEMY_DATABASE_URI']
VERIFY_CODE_FROM_EMAIL_ADDRESS = os.environ['LIVE_VERIFY_CODE_FROM_EMAIL_ADDRESS']
NOTIFY_EMAIL_DOMAIN = os.environ['LIVE_NOTIFY_EMAIL_DOMAIN']
FIRETEXT_API_KEY = os.getenv("LIVE_FIRETEXT_API_KEY")
MMG_API_KEY = os.environ['LIVE_MMG_API_KEY']
CSV_UPLOAD_BUCKET_NAME = 'live-notifications-csv-upload'
STATSD_ENABLED = True
STATSD_HOST = os.getenv('LIVE_STATSD_HOST')
STATSD_PORT = os.getenv('LIVE_STATSD_PORT')
STATSD_PREFIX = os.getenv('LIVE_STATSD_PREFIX')
FROM_NUMBER = os.getenv('LIVE_FROM_NUMBER')
BROKER_TRANSPORT_OPTIONS = {
'region': 'eu-west-1',
'polling_interval': 1, # 1 second
'visibility_timeout': 14410, # 60 seconds
'queue_name_prefix': os.environ['LIVE_NOTIFICATION_QUEUE_PREFIX'] + '-'
}

View File

@@ -1,26 +0,0 @@
import os
from config import Config
class Staging(Config):
ADMIN_BASE_URL = os.environ['STAGING_ADMIN_BASE_URL']
API_HOST_NAME = os.environ['STAGING_API_HOST_NAME']
ADMIN_CLIENT_SECRET = os.environ['STAGING_ADMIN_CLIENT_SECRET']
DANGEROUS_SALT = os.environ['STAGING_DANGEROUS_SALT']
NOTIFICATION_QUEUE_PREFIX = os.environ['STAGING_NOTIFICATION_QUEUE_PREFIX']
NOTIFY_JOB_QUEUE = os.environ['STAGING_NOTIFY_JOB_QUEUE']
SECRET_KEY = os.environ['STAGING_SECRET_KEY']
SQLALCHEMY_DATABASE_URI = os.environ['STAGING_SQLALCHEMY_DATABASE_URI']
VERIFY_CODE_FROM_EMAIL_ADDRESS = os.environ['STAGING_VERIFY_CODE_FROM_EMAIL_ADDRESS']
NOTIFY_EMAIL_DOMAIN = os.environ['STAGING_NOTIFY_EMAIL_DOMAIN']
FIRETEXT_API_KEY = os.getenv("STAGING_FIRETEXT_API_KEY")
MMG_API_KEY = os.environ['STAGING_MMG_API_KEY']
CSV_UPLOAD_BUCKET_NAME = 'staging-notifications-csv-upload'
FROM_NUMBER = os.getenv('STAGING_FROM_NUMBER')
BROKER_TRANSPORT_OPTIONS = {
'region': 'eu-west-1',
'polling_interval': 1, # 1 second
'visibility_timeout': 14410, # 60 seconds
'queue_name_prefix': os.environ['STAGING_NOTIFICATION_QUEUE_PREFIX'] + '-'
}

19
db.py
View File

@@ -4,19 +4,18 @@ from app import create_app, db
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
print("DOING SETUP")
print("\n" * 10)
print("SECRETS")
print("\n" * 10)
print(getAllSecrets(region="eu-west-1"))
print("\n" * 10)
print("ENV")
print("\n" * 10)
print(os.environ)
application = create_app()

View File

@@ -1,5 +1,5 @@
#!/bin/bash
export NOTIFY_API_ENVIRONMENT='config.Test'
export NOTIFY_ENVIRONMENT='test'
export ADMIN_BASE_URL='http://localhost:6012'
export ADMIN_CLIENT_USER_NAME='dev-notify-admin'
export ADMIN_CLIENT_SECRET='dev-notify-secret-key'

View File

@@ -1,5 +1,13 @@
#!/bin/bash
echo "Chown application to be owned by ubuntu"
cd /home/ubuntu/;
chown -R ubuntu:ubuntu notifications-api
if [ -e "/home/notify-app" ]
then
echo "Chown application to be owned by notify-app"
cd /home/notify-app/;
chown -R notify-app:govuk-notify-applications notifications-api
else
echo "Chown application to be owned by ubuntu"
cd /home/ubuntu/;
chown -R ubuntu:ubuntu notifications-api
fi

View File

@@ -1,4 +1,15 @@
echo "Install dependencies"
cd /home/ubuntu/notifications-api;
pip3 install -r /home/ubuntu/notifications-api/requirements.txt
python3 db.py db upgrade
if [ -e "/home/notify-app" ]
then
echo "Depenencies for notify-app"
cd /home/notify-app/notifications-api;
pip3 install -r /home/notify-app/notifications-api/requirements.txt
python3 db.py db upgrade
else
echo "Depenencies for ubuntu"
cd /home/ubuntu/notifications-api;
pip3 install -r /home/ubuntu/notifications-api/requirements.txt
python3 db.py db upgrade
fi

View File

@@ -101,7 +101,7 @@ reset_waiter_timeout() {
fi
# Base register/deregister action may take up to about 30 seconds
timeout=$((timeout + 30))
timeout=$((timeout + 60))
WAITER_ATTEMPTS=$((timeout / WAITER_INTERVAL))
}
@@ -371,7 +371,9 @@ get_elb_name_for_instance_name() {
local instance_name=$1
declare -A elb_to_instance_mapping
elb_to_instance_mapping['notify_api']='notify-api-elb'
elb_to_instance_mapping['notify_admin_api']='notify-admin-api-elb'
elb_to_instance_mapping['live_notify_api']='live-notify-api-elb'
elb_to_instance_mapping['staging_notify_api']='staging-notify-api-elb'
elb_to_instance_mapping['NotifyApi']='notify-api-elb'

View File

@@ -13,6 +13,7 @@ from app.models import (
ApiKey,
Job,
Notification,
NotificationHistory,
InvitedUser,
Permission,
ProviderStatistics,
@@ -42,6 +43,8 @@ def service_factory(notify_db, notify_db_session):
def get(self, service_name, user=None, template_type=None, email_from=None):
if not user:
user = sample_user(notify_db, notify_db_session)
if not email_from:
email_from = service_name
service = sample_service(notify_db, notify_db_session, service_name, user, email_from=email_from)
if template_type == 'email':
sample_template(
@@ -377,6 +380,31 @@ def mock_statsd_timing(mocker):
return mocker.patch('app.statsd_client.timing')
@pytest.fixture(scope='function')
def sample_notification_history(notify_db,
notify_db_session,
sample_template,
status='created',
created_at=None):
if created_at is None:
created_at = datetime.utcnow()
notification_history = NotificationHistory(
id=uuid.uuid4(),
service=sample_template.service,
template=sample_template,
template_version=sample_template.version,
status=status,
created_at=created_at,
notification_type=sample_template.template_type,
key_type=KEY_TYPE_NORMAL
)
notify_db.session.add(notification_history)
notify_db.session.commit()
return notification_history
@pytest.fixture(scope='function')
def mock_celery_send_sms_code(mocker):
return mocker.patch('app.celery.tasks.send_sms_code.apply_async')

View File

@@ -1,6 +1,8 @@
from datetime import datetime
import uuid
import pytest
import functools
import pytest
from sqlalchemy.orm.exc import FlushError, NoResultFound
from sqlalchemy.exc import IntegrityError
from freezegun import freeze_time
@@ -16,7 +18,8 @@ from app.dao.services_dao import (
dao_update_service,
delete_service_and_all_associated_db_objects,
dao_fetch_stats_for_service,
dao_fetch_todays_stats_for_service
dao_fetch_todays_stats_for_service,
dao_fetch_weekly_historical_stats_for_service
)
from app.dao.users_dao import save_model_user
from app.models import (
@@ -36,7 +39,8 @@ from app.models import (
)
from tests.app.conftest import (
sample_notification as create_notification
sample_notification as create_notification,
sample_notification_history as create_notification_history
)
@@ -407,7 +411,7 @@ def test_fetch_stats_counts_correctly(notify_db, notify_db_session, sample_templ
create_notification(notify_db, notify_db_session, template=sample_email_template, status='technical-failure')
create_notification(notify_db, notify_db_session, template=sample_template, status='created')
stats = dao_fetch_stats_for_service(sample_template.service.id)
stats = dao_fetch_stats_for_service(sample_template.service_id)
stats = sorted(stats, key=lambda x: (x.notification_type, x.status))
assert len(stats) == 3
@@ -435,9 +439,83 @@ def test_fetch_stats_for_today_only_includes_today(notify_db, notify_db_session,
with freeze_time('2001-01-02T12:00:00'):
right_now = create_notification(notify_db, None, to_field='3', status='created')
stats = dao_fetch_todays_stats_for_service(sample_template.service.id)
stats = dao_fetch_todays_stats_for_service(sample_template.service_id)
stats = {row.status: row.count for row in stats}
assert 'delivered' not in stats
assert stats['failed'] == 1
assert stats['created'] == 1
def test_fetch_weekly_historical_stats_separates_weeks(notify_db, notify_db_session, sample_template):
notification_history = functools.partial(
create_notification_history,
notify_db,
notify_db_session,
sample_template
)
week_53_last_yr = notification_history(created_at=datetime(2016, 1, 1))
week_1_last_yr = notification_history(created_at=datetime(2016, 1, 5))
last_sunday = notification_history(created_at=datetime(2016, 7, 24, 23, 59))
last_monday_morning = notification_history(created_at=datetime(2016, 7, 25, 0, 0))
last_monday_evening = notification_history(created_at=datetime(2016, 7, 25, 23, 59))
with freeze_time('Wed 27th July 2016'):
today = notification_history(created_at=datetime.now(), status='delivered')
ret = dao_fetch_weekly_historical_stats_for_service(sample_template.service_id)
assert [(row.week_start, row.status) for row in ret] == [
(datetime(2015, 12, 28), 'created'),
(datetime(2016, 1, 4), 'created'),
(datetime(2016, 7, 18), 'created'),
(datetime(2016, 7, 25), 'created'),
(datetime(2016, 7, 25), 'delivered')
]
assert ret[-2].count == 2
assert ret[-1].count == 1
def test_fetch_weekly_historical_stats_ignores_second_service(notify_db, notify_db_session, service_factory):
template_1 = service_factory.get('1').templates[0]
template_2 = service_factory.get('2').templates[0]
notification_history = functools.partial(
create_notification_history,
notify_db,
notify_db_session
)
last_sunday = notification_history(template_1, created_at=datetime(2016, 7, 24, 23, 59))
last_monday_morning = notification_history(template_2, created_at=datetime(2016, 7, 25, 0, 0))
with freeze_time('Wed 27th July 2016'):
ret = dao_fetch_weekly_historical_stats_for_service(template_1.service_id)
assert len(ret) == 1
assert ret[0].week_start == datetime(2016, 7, 18)
assert ret[0].count == 1
def test_fetch_weekly_historical_stats_separates_types(notify_db,
notify_db_session,
sample_template,
sample_email_template):
notification_history = functools.partial(
create_notification_history,
notify_db,
notify_db_session,
created_at=datetime(2016, 7, 25)
)
notification_history(sample_template)
notification_history(sample_email_template)
with freeze_time('Wed 27th July 2016'):
ret = dao_fetch_weekly_historical_stats_for_service(sample_template.service_id)
assert len(ret) == 2
assert ret[0].week_start == datetime(2016, 7, 25)
assert ret[0].count == 1
assert ret[0].notification_type == 'email'
assert ret[1].week_start == datetime(2016, 7, 25)
assert ret[1].count == 1
assert ret[1].notification_type == 'sms'

View File

@@ -1,5 +1,4 @@
import json
import collections
import uuid
import pytest
@@ -18,9 +17,6 @@ from tests.app.conftest import (
)
Row = collections.namedtuple('row', ('notification_type', 'status', 'count'))
def test_get_service_list(notify_api, service_factory):
with notify_api.test_request_context():
with notify_api.test_client() as client:
@@ -1127,37 +1123,28 @@ def test_get_detailed_service(notify_db, notify_db_session, notify_api, sample_s
assert service['statistics']['sms'] == stats
# email_counts and sms_counts are 3-tuple of requested, delivered, failed
@pytest.mark.idparametrize('stats, email_counts, sms_counts', {
'empty': ([], [0, 0, 0], [0, 0, 0]),
'always_increment_requested': ([
Row('email', 'delivered', 1),
Row('email', 'failed', 1)
], [2, 1, 1], [0, 0, 0]),
'dont_mix_email_and_sms': ([
Row('email', 'delivered', 1),
Row('sms', 'delivered', 1)
], [1, 1, 0], [1, 1, 0]),
'convert_fail_statuses_to_failed': ([
Row('email', 'failed', 1),
Row('email', 'technical-failure', 1),
Row('email', 'temporary-failure', 1),
Row('email', 'permanent-failure', 1),
], [4, 0, 4], [0, 0, 0]),
})
def test_format_statistics(stats, email_counts, sms_counts):
from app.service.rest import format_statistics
def test_get_weekly_notification_stats(notify_api, notify_db, notify_db_session):
with freeze_time('2000-01-01T12:00:00'):
noti = create_sample_notification(notify_db, notify_db_session)
with notify_api.test_request_context(), notify_api.test_client() as client, freeze_time('2000-01-02T12:00:00'):
resp = client.get(
'/service/{}/notifications/weekly'.format(noti.service_id),
headers=[create_authorization_header()]
)
ret = format_statistics(stats)
assert ret['email'] == {
status: count
for status, count
in zip(['requested', 'delivered', 'failed'], email_counts)
}
assert ret['sms'] == {
status: count
for status, count
in zip(['requested', 'delivered', 'failed'], sms_counts)
assert resp.status_code == 200
data = json.loads(resp.get_data(as_text=True))['data']
assert data == {
'1999-12-27': {
'sms': {
'requested': 1,
'delivered': 0,
'failed': 0
},
'email': {
'requested': 0,
'delivered': 0,
'failed': 0
}
}
}

View File

@@ -0,0 +1,138 @@
from datetime import datetime
import collections
import pytest
from freezegun import freeze_time
from app.service.statistics import (
format_statistics,
_weeks_for_range,
_create_zeroed_stats_dicts,
format_weekly_notification_stats
)
StatsRow = collections.namedtuple('row', ('notification_type', 'status', 'count'))
WeeklyStatsRow = collections.namedtuple('row', ('notification_type', 'status', 'week_start', 'count'))
# email_counts and sms_counts are 3-tuple of requested, delivered, failed
@pytest.mark.idparametrize('stats, email_counts, sms_counts', {
'empty': ([], [0, 0, 0], [0, 0, 0]),
'always_increment_requested': ([
StatsRow('email', 'delivered', 1),
StatsRow('email', 'failed', 1)
], [2, 1, 1], [0, 0, 0]),
'dont_mix_email_and_sms': ([
StatsRow('email', 'delivered', 1),
StatsRow('sms', 'delivered', 1)
], [1, 1, 0], [1, 1, 0]),
'convert_fail_statuses_to_failed': ([
StatsRow('email', 'failed', 1),
StatsRow('email', 'technical-failure', 1),
StatsRow('email', 'temporary-failure', 1),
StatsRow('email', 'permanent-failure', 1),
], [4, 0, 4], [0, 0, 0]),
})
def test_format_statistics(stats, email_counts, sms_counts):
ret = format_statistics(stats)
assert ret['email'] == {
status: count
for status, count
in zip(['requested', 'delivered', 'failed'], email_counts)
}
assert ret['sms'] == {
status: count
for status, count
in zip(['requested', 'delivered', 'failed'], sms_counts)
}
@pytest.mark.parametrize('start,end,dates', [
(datetime(2016, 7, 25), datetime(2016, 7, 25), [datetime(2016, 7, 25)]),
(datetime(2016, 7, 25), datetime(2016, 7, 28), [datetime(2016, 7, 25)]),
(datetime(2016, 7, 25), datetime(2016, 8, 1), [datetime(2016, 7, 25), datetime(2016, 8, 1)]),
(datetime(2016, 7, 25), datetime(2016, 8, 10), [
datetime(2016, 7, 25), datetime(2016, 8, 1), datetime(2016, 8, 8)
])
])
def test_weeks_for_range(start, end, dates):
assert list(_weeks_for_range(start, end)) == dates
def test_create_zeroed_stats_dicts():
assert _create_zeroed_stats_dicts() == {
'sms': {'requested': 0, 'delivered': 0, 'failed': 0},
'email': {'requested': 0, 'delivered': 0, 'failed': 0},
}
def _stats(requested, delivered, failed):
return {'requested': requested, 'delivered': delivered, 'failed': failed}
@freeze_time('2016-07-28T12:00:00')
@pytest.mark.parametrize('created_at, statistics, expected_results', [
# with no stats and just today, return this week's stats
(datetime(2016, 7, 28), [], {
datetime(2016, 7, 25): {
'sms': _stats(0, 0, 0),
'email': _stats(0, 0, 0)
}
}),
# with a random created time, still create the dict for midnight
(datetime(2016, 7, 28, 12, 13, 14), [], {
datetime(2016, 7, 25, 0, 0, 0): {
'sms': _stats(0, 0, 0),
'email': _stats(0, 0, 0)
}
}),
# with no stats but a service
(datetime(2016, 7, 14), [], {
datetime(2016, 7, 11): {
'sms': _stats(0, 0, 0),
'email': _stats(0, 0, 0)
},
datetime(2016, 7, 18): {
'sms': _stats(0, 0, 0),
'email': _stats(0, 0, 0)
},
datetime(2016, 7, 25): {
'sms': _stats(0, 0, 0),
'email': _stats(0, 0, 0)
}
}),
# two stats for same week dont re-zero each other
(datetime(2016, 7, 21), [
WeeklyStatsRow('email', 'created', datetime(2016, 7, 18), 1),
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 18), 1),
], {
datetime(2016, 7, 18): {
'sms': _stats(1, 0, 0),
'email': _stats(1, 0, 0)
},
datetime(2016, 7, 25): {
'sms': _stats(0, 0, 0),
'email': _stats(0, 0, 0)
}
}),
# two stats for same type are added together
(datetime(2016, 7, 21), [
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 18), 1),
WeeklyStatsRow('sms', 'delivered', datetime(2016, 7, 18), 1),
WeeklyStatsRow('sms', 'created', datetime(2016, 7, 25), 1),
], {
datetime(2016, 7, 18): {
'sms': _stats(2, 1, 0),
'email': _stats(0, 0, 0)
},
datetime(2016, 7, 25): {
'sms': _stats(1, 0, 0),
'email': _stats(0, 0, 0)
}
})
])
def test_format_weekly_notification_stats(statistics, created_at, expected_results):
assert format_weekly_notification_stats(statistics, created_at) == expected_results

11
wsgi.py
View File

@@ -4,20 +4,9 @@ from app import create_app
from credstash import getAllSecrets
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
if __name__ == "__main__":