Files
notifications-api/app/config.py

473 lines
17 KiB
Python
Raw Normal View History

from datetime import timedelta
import os
import json
from celery.schedules import crontab
from kombu import Exchange, Queue
2016-12-08 12:12:45 +00:00
if os.environ.get('VCAP_SERVICES'):
# on cloudfoundry, config is a json blob in VCAP_SERVICES - unpack it, and populate
# standard environment variables from it
from app.cloudfoundry_config import extract_cloudfoundry_config
2016-12-08 12:12:45 +00:00
extract_cloudfoundry_config()
class QueueNames(object):
PERIODIC = 'periodic-tasks'
PRIORITY = 'priority-tasks'
DATABASE = 'database-tasks'
SEND_SMS = 'send-sms-tasks'
SEND_EMAIL = 'send-email-tasks'
RESEARCH_MODE = 'research-mode-tasks'
STATISTICS = 'statistics-tasks'
JOBS = 'job-tasks'
RETRY = 'retry-tasks'
NOTIFY = 'notify-internal-tasks'
PROCESS_FTP = 'process-ftp-tasks'
CREATE_LETTERS_PDF = 'create-letters-pdf-tasks'
2017-12-13 10:57:08 +00:00
CALLBACKS = 'service-callbacks'
LETTERS = 'letter-tasks'
2018-03-19 13:13:38 +00:00
ANTIVIRUS = 'antivirus-tasks'
@staticmethod
def all_queues():
return [
QueueNames.PRIORITY,
QueueNames.PERIODIC,
QueueNames.DATABASE,
QueueNames.SEND_SMS,
QueueNames.SEND_EMAIL,
QueueNames.RESEARCH_MODE,
QueueNames.STATISTICS,
QueueNames.JOBS,
QueueNames.RETRY,
QueueNames.NOTIFY,
2017-12-08 17:27:05 +00:00
QueueNames.CREATE_LETTERS_PDF,
2017-12-13 10:57:08 +00:00
QueueNames.CALLBACKS,
QueueNames.LETTERS,
]
class TaskNames(object):
PROCESS_INCOMPLETE_JOBS = 'process-incomplete-jobs'
ZIP_AND_SEND_LETTER_PDFS = 'zip-and-send-letter-pdfs'
2018-03-19 13:13:38 +00:00
SCAN_FILE = 'scan-file'
2016-12-08 12:12:45 +00:00
class Config(object):
# URL of admin app
ADMIN_BASE_URL = os.getenv('ADMIN_BASE_URL', 'http://localhost:6012')
2016-12-08 12:12:45 +00:00
# URL of api app (on AWS this is the internal api endpoint)
API_HOST_NAME = os.getenv('API_HOST_NAME')
# admin app api key
ADMIN_CLIENT_SECRET = os.getenv('ADMIN_CLIENT_SECRET')
# encyption secret/salt
SECRET_KEY = os.getenv('SECRET_KEY')
DANGEROUS_SALT = os.getenv('DANGEROUS_SALT')
# DB conection string
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI')
# MMG API Key
MMG_API_KEY = os.getenv('MMG_API_KEY')
# Firetext API Key
FIRETEXT_API_KEY = os.getenv("FIRETEXT_API_KEY")
# Firetext simluation key
LOADTESTING_API_KEY = os.getenv("LOADTESTING_API_KEY")
# Hosted graphite statsd prefix
STATSD_PREFIX = os.getenv('STATSD_PREFIX')
# Prefix to identify queues in SQS
NOTIFICATION_QUEUE_PREFIX = os.getenv('NOTIFICATION_QUEUE_PREFIX')
# URL of redis instance
REDIS_URL = os.getenv('REDIS_URL')
2016-12-08 12:12:45 +00:00
REDIS_ENABLED = os.getenv('REDIS_ENABLED') == '1'
add new redis template usage per day key We've run into issues with redis expiring keys while we try and write to them - short lived redis TTLs aren't really sustainable for keys where we mutate the state. Template usage is a hash contained in redis where we increment a count keyed by template_id each time a message is sent for that template. But if the key expires, hincrby (redis command for incrementing a value in a hash) will re-create an empty hash. This is no good, as we need the hash to be populated with the last seven days worth of data, which we then increment further. We can't tell whether the hincrby created the key, so a different approach entirely was needed: * New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table * Incremented to from process_notification - if it doesn't exist yet, it'll be created then. * Expiry set to 8 days every time it's incremented to. Then, at read time, we'll just read the last eight days of keys from Redis, and sum them up. This works because we're only ever incrementing from that one place - never setting wholesale, never recreating the data from scratch. So we know that if the data is in redis, then it is good and accurate data. One thing we *don't* know and *cannot* reason about is what no key in redis means. It could be either of: * This is the first message that the service has sent today. * The key was deleted from redis for some reason. Since we set the TTL to so long, we'll never be writing to a key that previously expired. But if there is a redis (or operator) error and the key is deleted, then we'll have bad data - after any data loss we'll have to rebuild the data.
2018-03-29 13:55:22 +01:00
EXPIRE_CACHE_TEN_MINUTES = 600
EXPIRE_CACHE_EIGHT_DAYS = 8 * 24 * 60 * 60
2016-12-08 12:12:45 +00:00
# Performance platform
PERFORMANCE_PLATFORM_ENABLED = False
PERFORMANCE_PLATFORM_URL = 'https://www.performance.service.gov.uk/data/govuk-notify/'
2018-04-25 14:22:23 +01:00
# Zendesk
ZENDESK_API_KEY = os.environ.get('ZENDESK_API_KEY')
2016-12-08 12:12:45 +00:00
# Logging
DEBUG = False
NOTIFY_LOG_PATH = os.getenv('NOTIFY_LOG_PATH')
# Cronitor
CRONITOR_ENABLED = False
CRONITOR_KEYS = json.loads(os.environ.get('CRONITOR_KEYS', '{}'))
###########################
# Default config values ###
###########################
NOTIFY_ENVIRONMENT = 'development'
ADMIN_CLIENT_USER_NAME = 'notify-admin'
AWS_REGION = 'eu-west-1'
INVITATION_EXPIRATION_DAYS = 2
NOTIFY_APP_NAME = 'api'
SQLALCHEMY_RECORD_QUERIES = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
2018-02-22 10:27:02 +00:00
SQLALCHEMY_POOL_SIZE = int(os.environ.get('SQLALCHEMY_POOL_SIZE', 5))
SQLALCHEMY_POOL_TIMEOUT = 30
SQLALCHEMY_POOL_RECYCLE = 300
SQLALCHEMY_STATEMENT_TIMEOUT = 1200
PAGE_SIZE = 50
API_PAGE_SIZE = 250
TEST_MESSAGE_FILENAME = 'Test message'
ONE_OFF_MESSAGE_FILENAME = 'Report'
MAX_VERIFY_CODE_COUNT = 10
MAX_LETTER_PDF_ZIP_FILESIZE = 500 * 1024 * 1024 # 500mb
MAX_LETTER_PDF_COUNT_PER_ZIP = 5000
CHECK_PROXY_HEADER = False
NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'
NOTIFY_USER_ID = '6af522d0-2915-4e52-83a3-3690455a5fe6'
INVITATION_EMAIL_TEMPLATE_ID = '4f46df42-f795-4cc4-83bb-65ca312f49cc'
SMS_CODE_TEMPLATE_ID = '36fb0730-6259-4da1-8a80-c8de22ad4246'
EMAIL_2FA_TEMPLATE_ID = '299726d2-dba6-42b8-8209-30e1d66ea164'
NEW_USER_EMAIL_VERIFICATION_TEMPLATE_ID = 'ece42649-22a8-4d06-b87f-d52d5d3f0a27'
PASSWORD_RESET_TEMPLATE_ID = '474e9242-823b-4f99-813d-ed392e7f1201'
ALREADY_REGISTERED_EMAIL_TEMPLATE_ID = '0880fbb1-a0c6-46f0-9a8e-36c986381ceb'
CHANGE_EMAIL_CONFIRMATION_TEMPLATE_ID = 'eb4d9930-87ab-4aef-9bce-786762687884'
SERVICE_NOW_LIVE_TEMPLATE_ID = '618185c6-3636-49cd-b7d2-6f6f5eb3bdde'
2018-02-16 14:42:03 +00:00
ORGANISATION_INVITATION_EMAIL_TEMPLATE_ID = '203566f0-d835-47c5-aa06-932439c86573'
BROKER_URL = 'sqs://'
BROKER_TRANSPORT_OPTIONS = {
'region': AWS_REGION,
2018-10-09 13:27:49 +01:00
'polling_interval': 1, # 1 second
'visibility_timeout': 310,
2018-10-09 13:27:49 +01:00
'queue_name_prefix': NOTIFICATION_QUEUE_PREFIX
}
CELERY_ENABLE_UTC = True
CELERY_TIMEZONE = 'Europe/London'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_IMPORTS = (
'app.celery.tasks',
'app.celery.scheduled_tasks',
'app.celery.reporting_tasks',
'app.celery.nightly_tasks',
)
CELERYBEAT_SCHEDULE = {
# app/celery/scheduled_tasks.py
'run-scheduled-jobs': {
'task': 'run-scheduled-jobs',
'schedule': crontab(minute=1),
'options': {'queue': QueueNames.PERIODIC}
},
'delete-verify-codes': {
'task': 'delete-verify-codes',
'schedule': timedelta(minutes=63),
'options': {'queue': QueueNames.PERIODIC}
},
'delete-invitations': {
'task': 'delete-invitations',
'schedule': timedelta(minutes=66),
'options': {'queue': QueueNames.PERIODIC}
},
'switch-current-sms-provider-on-slow-delivery': {
'task': 'switch-current-sms-provider-on-slow-delivery',
'schedule': crontab(), # Every minute
'options': {'queue': QueueNames.PERIODIC}
},
'check-job-status': {
'task': 'check-job-status',
'schedule': crontab(),
'options': {'queue': QueueNames.PERIODIC}
},
'replay-created-notifications': {
'task': 'replay-created-notifications',
'schedule': crontab(minute='0, 15, 30, 45'),
'options': {'queue': QueueNames.PERIODIC}
},
# app/celery/nightly_tasks.py
'timeout-sending-notifications': {
'task': 'timeout-sending-notifications',
'schedule': crontab(hour=0, minute=5),
'options': {'queue': QueueNames.PERIODIC}
},
'create-nightly-billing': {
'task': 'create-nightly-billing',
'schedule': crontab(hour=0, minute=15),
'options': {'queue': QueueNames.PERIODIC}
},
'create-nightly-notification-status': {
'task': 'create-nightly-notification-status',
'schedule': crontab(hour=0, minute=30), # after 'timeout-sending-notifications'
'options': {'queue': QueueNames.PERIODIC}
},
'delete-sms-notifications': {
'task': 'delete-sms-notifications',
'schedule': crontab(hour=0, minute=45), # after 'create-nightly-notification-status'
'options': {'queue': QueueNames.PERIODIC}
},
'delete-email-notifications': {
'task': 'delete-email-notifications',
'schedule': crontab(hour=1, minute=0), # after 'create-nightly-notification-status'
'options': {'queue': QueueNames.PERIODIC}
},
'delete-letter-notifications': {
'task': 'delete-letter-notifications',
'schedule': crontab(hour=1, minute=20), # after 'create-nightly-notification-status'
'options': {'queue': QueueNames.PERIODIC}
},
'delete-inbound-sms': {
'task': 'delete-inbound-sms',
'schedule': crontab(hour=1, minute=40),
'options': {'queue': QueueNames.PERIODIC}
},
'send-daily-performance-platform-stats': {
'task': 'send-daily-performance-platform-stats',
'schedule': crontab(hour=2, minute=0),
'options': {'queue': QueueNames.PERIODIC}
},
'remove_transformed_dvla_files': {
'task': 'remove_transformed_dvla_files',
'schedule': crontab(hour=3, minute=40),
'options': {'queue': QueueNames.PERIODIC}
},
'remove_sms_email_jobs': {
'task': 'remove_sms_email_jobs',
'schedule': crontab(hour=4, minute=0),
'options': {'queue': QueueNames.PERIODIC},
},
'remove_letter_jobs': {
'task': 'remove_letter_jobs',
'schedule': crontab(hour=4, minute=20), # this has to run AFTER remove_transformed_dvla_files
# since we mark jobs as archived
'options': {'queue': QueueNames.PERIODIC},
},
'raise-alert-if-letter-notifications-still-sending': {
'task': 'raise-alert-if-letter-notifications-still-sending',
'schedule': crontab(hour=16, minute=30),
'options': {'queue': QueueNames.PERIODIC}
},
# The collate-letter-pdf does assume it is called in an hour that BST does not make a
# difference to the truncate date which translates to the filename to process
'collate-letter-pdfs-for-day': {
'task': 'collate-letter-pdfs-for-day',
'schedule': crontab(hour=17, minute=50),
'options': {'queue': QueueNames.PERIODIC}
},
'raise-alert-if-no-letter-ack-file': {
'task': 'raise-alert-if-no-letter-ack-file',
'schedule': crontab(hour=23, minute=00),
'options': {'queue': QueueNames.PERIODIC}
},
}
2018-10-09 13:27:49 +01:00
CELERY_QUEUES = []
2016-04-29 12:59:36 +01:00
NOTIFICATIONS_ALERT = 5 # five mins
FROM_NUMBER = 'development'
2016-02-09 13:31:45 +00:00
STATSD_ENABLED = False
STATSD_HOST = "statsd.hostedgraphite.com"
STATSD_PORT = 8125
SENDING_NOTIFICATIONS_TIMEOUT_PERIOD = 259200 # 3 days
2017-02-03 13:34:09 +00:00
SIMULATED_EMAIL_ADDRESSES = (
'simulate-delivered@notifications.service.gov.uk',
'simulate-delivered-2@notifications.service.gov.uk',
'simulate-delivered-3@notifications.service.gov.uk',
)
SIMULATED_SMS_NUMBERS = ('+447700900000', '+447700900111', '+447700900222')
DVLA_BUCKETS = {
'job': '{}-dvla-file-per-job'.format(os.getenv('NOTIFY_ENVIRONMENT')),
'notification': '{}-dvla-letter-api-files'.format(os.getenv('NOTIFY_ENVIRONMENT'))
}
FREE_SMS_TIER_FRAGMENT_COUNT = 250000
SMS_INBOUND_WHITELIST = json.loads(os.environ.get('SMS_INBOUND_WHITELIST', '[]'))
FIRETEXT_INBOUND_SMS_AUTH = json.loads(os.environ.get('FIRETEXT_INBOUND_SMS_AUTH', '[]'))
MMG_INBOUND_SMS_AUTH = json.loads(os.environ.get('MMG_INBOUND_SMS_AUTH', '[]'))
MMG_INBOUND_SMS_USERNAME = json.loads(os.environ.get('MMG_INBOUND_SMS_USERNAME', '[]'))
ROUTE_SECRET_KEY_1 = os.environ.get('ROUTE_SECRET_KEY_1', '')
ROUTE_SECRET_KEY_2 = os.environ.get('ROUTE_SECRET_KEY_2', '')
# Format is as follows:
# {"dataset_1": "token_1", ...}
PERFORMANCE_PLATFORM_ENDPOINTS = json.loads(os.environ.get('PERFORMANCE_PLATFORM_ENDPOINTS', '{}'))
TEMPLATE_PREVIEW_API_HOST = os.environ.get('TEMPLATE_PREVIEW_API_HOST', 'http://localhost:6013')
TEMPLATE_PREVIEW_API_KEY = os.environ.get('TEMPLATE_PREVIEW_API_KEY', 'my-secret-key')
DOCUMENT_DOWNLOAD_API_HOST = os.environ.get('DOCUMENT_DOWNLOAD_API_HOST', 'http://localhost:7000')
DOCUMENT_DOWNLOAD_API_KEY = os.environ.get('DOCUMENT_DOWNLOAD_API_KEY', 'auth-token')
MMG_URL = "https://api.mmg.co.uk/json/api.php"
AWS_REGION = 'eu-west-1'
######################
# Config overrides ###
######################
class Development(Config):
DEBUG = True
SQLALCHEMY_ECHO = False
CSV_UPLOAD_BUCKET_NAME = 'development-notifications-csv-upload'
2018-03-14 17:39:17 +00:00
TEST_LETTERS_BUCKET_NAME = 'development-test-letters'
DVLA_RESPONSE_BUCKET_NAME = 'notify.tools-ftp'
2018-03-13 14:06:03 +00:00
LETTERS_PDF_BUCKET_NAME = 'development-letters-pdf'
LETTERS_SCAN_BUCKET_NAME = 'development-letters-scan'
INVALID_PDF_BUCKET_NAME = 'development-letters-invalid-pdf'
ADMIN_CLIENT_SECRET = 'dev-notify-secret-key'
SECRET_KEY = 'dev-notify-secret-key'
DANGEROUS_SALT = 'dev-notify-salt'
MMG_INBOUND_SMS_AUTH = ['testkey']
MMG_INBOUND_SMS_USERNAME = ['username']
NOTIFY_ENVIRONMENT = 'development'
NOTIFY_LOG_PATH = 'application.log'
NOTIFICATION_QUEUE_PREFIX = 'development'
NOTIFY_EMAIL_DOMAIN = "notify.tools"
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/notification_api'
REDIS_URL = 'redis://localhost:6379/0'
STATSD_ENABLED = False
STATSD_HOST = "localhost"
STATSD_PORT = 1000
STATSD_PREFIX = "stats-prefix"
2018-10-09 13:27:49 +01:00
for queue in QueueNames.all_queues():
Config.CELERY_QUEUES.append(
Queue(queue, Exchange('default'), routing_key=queue)
)
2016-12-08 12:12:45 +00:00
API_HOST_NAME = "http://localhost:6011"
API_RATE_LIMIT_ENABLED = True
class Test(Development):
NOTIFY_EMAIL_DOMAIN = 'test.notify.com'
FROM_NUMBER = 'testing'
NOTIFY_ENVIRONMENT = 'test'
TESTING = True
CSV_UPLOAD_BUCKET_NAME = 'test-notifications-csv-upload'
2018-03-14 17:39:17 +00:00
TEST_LETTERS_BUCKET_NAME = 'test-test-letters'
DVLA_RESPONSE_BUCKET_NAME = 'test.notify.com-ftp'
2018-03-13 14:06:03 +00:00
LETTERS_PDF_BUCKET_NAME = 'test-letters-pdf'
LETTERS_SCAN_BUCKET_NAME = 'test-letters-scan'
INVALID_PDF_BUCKET_NAME = 'test-letters-invalid-pdf'
# this is overriden in jenkins and on cloudfoundry
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI', 'postgresql://localhost/test_notification_api')
BROKER_URL = 'you-forgot-to-mock-celery-in-your-tests://'
2018-10-09 13:27:49 +01:00
for queue in QueueNames.all_queues():
Config.CELERY_QUEUES.append(
Queue(queue, Exchange('default'), routing_key=queue)
)
API_RATE_LIMIT_ENABLED = True
2016-12-08 12:12:45 +00:00
API_HOST_NAME = "http://localhost:6011"
2016-01-18 11:03:38 +00:00
2017-09-26 10:59:09 +01:00
SMS_INBOUND_WHITELIST = ['203.0.113.195']
FIRETEXT_INBOUND_SMS_AUTH = ['testkey']
TEMPLATE_PREVIEW_API_HOST = 'http://localhost:9999'
2017-09-26 10:59:09 +01:00
class Preview(Config):
NOTIFY_EMAIL_DOMAIN = 'notify.works'
NOTIFY_ENVIRONMENT = 'preview'
2016-08-03 11:50:24 +01:00
CSV_UPLOAD_BUCKET_NAME = 'preview-notifications-csv-upload'
2018-03-14 17:39:17 +00:00
TEST_LETTERS_BUCKET_NAME = 'preview-test-letters'
DVLA_RESPONSE_BUCKET_NAME = 'notify.works-ftp'
2018-03-13 14:06:03 +00:00
LETTERS_PDF_BUCKET_NAME = 'preview-letters-pdf'
LETTERS_SCAN_BUCKET_NAME = 'preview-letters-scan'
INVALID_PDF_BUCKET_NAME = 'preview-letters-invalid-pdf'
FROM_NUMBER = 'preview'
API_RATE_LIMIT_ENABLED = True
CHECK_PROXY_HEADER = False
2016-07-04 16:54:03 +01:00
class Staging(Config):
NOTIFY_EMAIL_DOMAIN = 'staging-notify.works'
NOTIFY_ENVIRONMENT = 'staging'
CSV_UPLOAD_BUCKET_NAME = 'staging-notifications-csv-upload'
2018-03-14 17:39:17 +00:00
TEST_LETTERS_BUCKET_NAME = 'staging-test-letters'
DVLA_RESPONSE_BUCKET_NAME = 'staging-notify.works-ftp'
2018-03-13 14:06:03 +00:00
LETTERS_PDF_BUCKET_NAME = 'staging-letters-pdf'
LETTERS_SCAN_BUCKET_NAME = 'staging-letters-scan'
INVALID_PDF_BUCKET_NAME = 'staging-letters-invalid-pdf'
STATSD_ENABLED = True
FROM_NUMBER = 'stage'
API_RATE_LIMIT_ENABLED = True
CHECK_PROXY_HEADER = True
REDIS_ENABLED = True
2016-07-04 16:54:03 +01:00
class Live(Config):
NOTIFY_EMAIL_DOMAIN = 'notifications.service.gov.uk'
NOTIFY_ENVIRONMENT = 'live'
2016-07-04 16:54:03 +01:00
CSV_UPLOAD_BUCKET_NAME = 'live-notifications-csv-upload'
2018-03-14 17:39:17 +00:00
TEST_LETTERS_BUCKET_NAME = 'production-test-letters'
DVLA_RESPONSE_BUCKET_NAME = 'notifications.service.gov.uk-ftp'
2018-03-13 14:06:03 +00:00
LETTERS_PDF_BUCKET_NAME = 'production-letters-pdf'
LETTERS_SCAN_BUCKET_NAME = 'production-letters-scan'
INVALID_PDF_BUCKET_NAME = 'production-letters-invalid-pdf'
STATSD_ENABLED = True
FROM_NUMBER = 'GOVUK'
PERFORMANCE_PLATFORM_ENABLED = True
API_RATE_LIMIT_ENABLED = True
2017-11-28 11:59:17 +00:00
CHECK_PROXY_HEADER = True
2016-12-08 12:12:45 +00:00
CRONITOR_ENABLED = True
2016-12-08 12:12:45 +00:00
class CloudFoundryConfig(Config):
pass
# CloudFoundry sandbox
class Sandbox(CloudFoundryConfig):
NOTIFY_EMAIL_DOMAIN = 'notify.works'
NOTIFY_ENVIRONMENT = 'sandbox'
CSV_UPLOAD_BUCKET_NAME = 'cf-sandbox-notifications-csv-upload'
LETTERS_PDF_BUCKET_NAME = 'cf-sandbox-letters-pdf'
2018-03-14 17:39:17 +00:00
TEST_LETTERS_BUCKET_NAME = 'cf-sandbox-test-letters'
DVLA_RESPONSE_BUCKET_NAME = 'notify.works-ftp'
2018-03-13 14:06:03 +00:00
LETTERS_PDF_BUCKET_NAME = 'cf-sandbox-letters-pdf'
LETTERS_SCAN_BUCKET_NAME = 'cf-sandbox-letters-scan'
INVALID_PDF_BUCKET_NAME = 'cf-sandbox-letters-invalid-pdf'
2016-12-08 12:12:45 +00:00
FROM_NUMBER = 'sandbox'
2017-05-05 15:20:34 +01:00
REDIS_ENABLED = False
2016-07-04 16:54:03 +01:00
configs = {
2016-08-31 11:42:05 +01:00
'development': Development,
'test': Test,
'live': Live,
'production': Live,
2016-08-31 11:42:05 +01:00
'staging': Staging,
2016-12-08 12:12:45 +00:00
'preview': Preview,
'sandbox': Sandbox
}