2017-07-11 15:41:44 +01:00
|
|
|
import json
|
2021-03-10 13:55:06 +00:00
|
|
|
import os
|
|
|
|
|
from datetime import timedelta
|
2017-06-12 15:55:42 +01:00
|
|
|
|
2017-07-19 13:50:29 +01:00
|
|
|
from celery.schedules import crontab
|
|
|
|
|
from kombu import Exchange, Queue
|
|
|
|
|
|
2016-12-08 12:12:45 +00:00
|
|
|
if os.environ.get('VCAP_SERVICES'):
|
|
|
|
|
# on cloudfoundry, config is a json blob in VCAP_SERVICES - unpack it, and populate
|
|
|
|
|
# standard environment variables from it
|
|
|
|
|
from app.cloudfoundry_config import extract_cloudfoundry_config
|
2017-04-24 14:15:08 +01:00
|
|
|
|
2016-12-08 12:12:45 +00:00
|
|
|
extract_cloudfoundry_config()
|
|
|
|
|
|
2016-09-07 09:35:31 +01:00
|
|
|
|
2017-07-19 13:50:29 +01:00
|
|
|
class QueueNames(object):
|
|
|
|
|
PERIODIC = 'periodic-tasks'
|
|
|
|
|
PRIORITY = 'priority-tasks'
|
|
|
|
|
DATABASE = 'database-tasks'
|
2017-07-20 15:48:21 +01:00
|
|
|
SEND_SMS = 'send-sms-tasks'
|
|
|
|
|
SEND_EMAIL = 'send-email-tasks'
|
2017-07-19 13:50:29 +01:00
|
|
|
RESEARCH_MODE = 'research-mode-tasks'
|
2019-08-15 15:21:00 +01:00
|
|
|
REPORTING = 'reporting-tasks'
|
2017-07-19 13:50:29 +01:00
|
|
|
JOBS = 'job-tasks'
|
|
|
|
|
RETRY = 'retry-tasks'
|
|
|
|
|
NOTIFY = 'notify-internal-tasks'
|
|
|
|
|
PROCESS_FTP = 'process-ftp-tasks'
|
2017-12-14 16:00:51 +00:00
|
|
|
CREATE_LETTERS_PDF = 'create-letters-pdf-tasks'
|
2017-12-13 10:57:08 +00:00
|
|
|
CALLBACKS = 'service-callbacks'
|
2021-02-04 11:53:22 +00:00
|
|
|
CALLBACKS_RETRY = 'service-callbacks-retry'
|
2017-12-18 16:12:17 +00:00
|
|
|
LETTERS = 'letter-tasks'
|
2020-03-16 16:13:30 +00:00
|
|
|
SMS_CALLBACKS = 'sms-callbacks'
|
2018-03-19 13:13:38 +00:00
|
|
|
ANTIVIRUS = 'antivirus-tasks'
|
2019-12-04 15:59:51 +00:00
|
|
|
SANITISE_LETTERS = 'sanitise-letter-tasks'
|
2020-03-25 12:39:15 +00:00
|
|
|
SAVE_API_EMAIL = 'save-api-email-tasks'
|
2020-10-29 11:12:46 +00:00
|
|
|
SAVE_API_SMS = 'save-api-sms-tasks'
|
2021-01-13 16:19:50 +00:00
|
|
|
BROADCASTS = 'broadcast-tasks'
|
2021-10-15 11:39:15 +01:00
|
|
|
GOVUK_ALERTS = 'govuk-alerts'
|
2017-07-19 13:50:29 +01:00
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def all_queues():
|
|
|
|
|
return [
|
|
|
|
|
QueueNames.PRIORITY,
|
|
|
|
|
QueueNames.PERIODIC,
|
|
|
|
|
QueueNames.DATABASE,
|
2017-07-20 15:48:21 +01:00
|
|
|
QueueNames.SEND_SMS,
|
|
|
|
|
QueueNames.SEND_EMAIL,
|
2017-07-19 13:50:29 +01:00
|
|
|
QueueNames.RESEARCH_MODE,
|
2019-08-15 15:21:00 +01:00
|
|
|
QueueNames.REPORTING,
|
2017-07-19 13:50:29 +01:00
|
|
|
QueueNames.JOBS,
|
|
|
|
|
QueueNames.RETRY,
|
|
|
|
|
QueueNames.NOTIFY,
|
2017-12-08 17:27:05 +00:00
|
|
|
QueueNames.CREATE_LETTERS_PDF,
|
2017-12-13 10:57:08 +00:00
|
|
|
QueueNames.CALLBACKS,
|
2021-02-04 11:53:22 +00:00
|
|
|
QueueNames.CALLBACKS_RETRY,
|
2017-12-18 16:12:17 +00:00
|
|
|
QueueNames.LETTERS,
|
2020-03-16 16:13:30 +00:00
|
|
|
QueueNames.SMS_CALLBACKS,
|
2020-10-29 11:12:46 +00:00
|
|
|
QueueNames.SAVE_API_EMAIL,
|
2021-01-13 16:19:50 +00:00
|
|
|
QueueNames.SAVE_API_SMS,
|
|
|
|
|
QueueNames.BROADCASTS,
|
2017-07-19 13:50:29 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
2020-11-16 12:47:38 +00:00
|
|
|
class BroadcastProvider:
|
|
|
|
|
EE = 'ee'
|
|
|
|
|
VODAFONE = 'vodafone'
|
|
|
|
|
THREE = 'three'
|
|
|
|
|
O2 = 'o2'
|
|
|
|
|
|
|
|
|
|
PROVIDERS = [EE, VODAFONE, THREE, O2]
|
|
|
|
|
|
|
|
|
|
|
2017-08-22 15:49:56 +01:00
|
|
|
class TaskNames(object):
|
2017-10-13 16:46:17 +01:00
|
|
|
PROCESS_INCOMPLETE_JOBS = 'process-incomplete-jobs'
|
2017-12-19 14:18:05 +00:00
|
|
|
ZIP_AND_SEND_LETTER_PDFS = 'zip-and-send-letter-pdfs'
|
2018-03-19 13:13:38 +00:00
|
|
|
SCAN_FILE = 'scan-file'
|
2019-12-04 15:59:51 +00:00
|
|
|
SANITISE_LETTER = 'sanitise-and-upload-letter'
|
2020-05-05 11:17:22 +01:00
|
|
|
CREATE_PDF_FOR_TEMPLATED_LETTER = 'create-pdf-for-templated-letter'
|
2021-10-15 11:39:15 +01:00
|
|
|
PUBLISH_GOVUK_ALERTS = 'publish-govuk-alerts'
|
2021-10-11 18:04:42 +01:00
|
|
|
RECREATE_PDF_FOR_PRECOMPILED_LETTER = 'recreate-pdf-for-precompiled-letter'
|
2017-08-22 15:49:56 +01:00
|
|
|
|
|
|
|
|
|
2016-12-08 12:12:45 +00:00
|
|
|
class Config(object):
|
2016-09-07 09:35:31 +01:00
|
|
|
# URL of admin app
|
2018-02-21 18:12:03 +00:00
|
|
|
ADMIN_BASE_URL = os.getenv('ADMIN_BASE_URL', 'http://localhost:6012')
|
2016-09-07 09:35:31 +01:00
|
|
|
|
2016-12-08 12:12:45 +00:00
|
|
|
# URL of api app (on AWS this is the internal api endpoint)
|
|
|
|
|
API_HOST_NAME = os.getenv('API_HOST_NAME')
|
|
|
|
|
|
2021-07-26 16:45:10 +01:00
|
|
|
# secrets that internal apps, such as the admin app or document download, must use to authenticate with the API
|
2021-07-29 12:18:10 +01:00
|
|
|
ADMIN_CLIENT_ID = 'notify-admin'
|
2021-07-29 12:25:28 +01:00
|
|
|
GOVUK_ALERTS_CLIENT_ID = 'govuk-alerts'
|
2021-07-26 16:45:10 +01:00
|
|
|
|
2021-08-05 17:24:56 +01:00
|
|
|
INTERNAL_CLIENT_API_KEYS = json.loads(
|
|
|
|
|
os.environ.get('INTERNAL_CLIENT_API_KEYS', '{}')
|
|
|
|
|
)
|
2021-07-26 16:45:10 +01:00
|
|
|
|
2016-09-07 09:35:31 +01:00
|
|
|
# encyption secret/salt
|
2018-02-21 18:12:03 +00:00
|
|
|
SECRET_KEY = os.getenv('SECRET_KEY')
|
|
|
|
|
DANGEROUS_SALT = os.getenv('DANGEROUS_SALT')
|
2016-09-07 09:35:31 +01:00
|
|
|
|
|
|
|
|
# DB conection string
|
2018-02-21 18:12:03 +00:00
|
|
|
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI')
|
2016-09-07 09:35:31 +01:00
|
|
|
|
|
|
|
|
# MMG API Key
|
2018-02-21 18:12:03 +00:00
|
|
|
MMG_API_KEY = os.getenv('MMG_API_KEY')
|
2016-09-07 09:35:31 +01:00
|
|
|
|
|
|
|
|
# Firetext API Key
|
|
|
|
|
FIRETEXT_API_KEY = os.getenv("FIRETEXT_API_KEY")
|
2021-03-16 14:53:42 +00:00
|
|
|
FIRETEXT_INTERNATIONAL_API_KEY = os.getenv("FIRETEXT_INTERNATIONAL_API_KEY", "placeholder")
|
2016-09-07 09:35:31 +01:00
|
|
|
|
|
|
|
|
# Prefix to identify queues in SQS
|
|
|
|
|
NOTIFICATION_QUEUE_PREFIX = os.getenv('NOTIFICATION_QUEUE_PREFIX')
|
|
|
|
|
|
2016-11-28 09:39:56 +00:00
|
|
|
# URL of redis instance
|
|
|
|
|
REDIS_URL = os.getenv('REDIS_URL')
|
2020-08-11 10:00:29 +01:00
|
|
|
REDIS_ENABLED = os.getenv('REDIS_ENABLED') == '1'
|
add new redis template usage per day key
We've run into issues with redis expiring keys while we try and write
to them - short lived redis TTLs aren't really sustainable for keys
where we mutate the state. Template usage is a hash contained in redis
where we increment a count keyed by template_id each time a message is
sent for that template. But if the key expires, hincrby (redis command
for incrementing a value in a hash) will re-create an empty hash.
This is no good, as we need the hash to be populated with the last
seven days worth of data, which we then increment further. We can't
tell whether the hincrby created the key, so a different approach
entirely was needed:
* New redis key: <service_id>-template-usage-<YYYY-MM-DD>. Note: This
YYYY-MM-DD is BTC time so it lines up nicely with ft_billing table
* Incremented to from process_notification - if it doesn't exist yet,
it'll be created then.
* Expiry set to 8 days every time it's incremented to.
Then, at read time, we'll just read the last eight days of keys from
Redis, and sum them up. This works because we're only ever incrementing
from that one place - never setting wholesale, never recreating the
data from scratch. So we know that if the data is in redis, then it is
good and accurate data.
One thing we *don't* know and *cannot* reason about is what no key in
redis means. It could be either of:
* This is the first message that the service has sent today.
* The key was deleted from redis for some reason.
Since we set the TTL to so long, we'll never be writing to a key that
previously expired. But if there is a redis (or operator) error and the
key is deleted, then we'll have bad data - after any data loss we'll
have to rebuild the data.
2018-03-29 13:55:22 +01:00
|
|
|
EXPIRE_CACHE_TEN_MINUTES = 600
|
|
|
|
|
EXPIRE_CACHE_EIGHT_DAYS = 8 * 24 * 60 * 60
|
2016-12-08 12:12:45 +00:00
|
|
|
|
2018-04-25 14:22:23 +01:00
|
|
|
# Zendesk
|
|
|
|
|
ZENDESK_API_KEY = os.environ.get('ZENDESK_API_KEY')
|
2018-01-17 14:36:02 +00:00
|
|
|
|
2016-12-08 12:12:45 +00:00
|
|
|
# Logging
|
|
|
|
|
DEBUG = False
|
2017-08-09 16:07:35 +01:00
|
|
|
NOTIFY_LOG_PATH = os.getenv('NOTIFY_LOG_PATH')
|
2016-11-28 09:39:56 +00:00
|
|
|
|
2019-01-16 14:11:03 +00:00
|
|
|
# Cronitor
|
|
|
|
|
CRONITOR_ENABLED = False
|
|
|
|
|
CRONITOR_KEYS = json.loads(os.environ.get('CRONITOR_KEYS', '{}'))
|
|
|
|
|
|
2019-02-25 14:26:47 +00:00
|
|
|
# Antivirus
|
|
|
|
|
ANTIVIRUS_ENABLED = True
|
|
|
|
|
|
2016-09-07 09:35:31 +01:00
|
|
|
###########################
|
|
|
|
|
# Default config values ###
|
|
|
|
|
###########################
|
|
|
|
|
|
|
|
|
|
NOTIFY_ENVIRONMENT = 'development'
|
|
|
|
|
AWS_REGION = 'eu-west-1'
|
|
|
|
|
INVITATION_EXPIRATION_DAYS = 2
|
2016-01-07 13:28:56 +00:00
|
|
|
NOTIFY_APP_NAME = 'api'
|
2018-12-04 11:48:28 +00:00
|
|
|
SQLALCHEMY_RECORD_QUERIES = False
|
|
|
|
|
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
2018-02-22 10:27:02 +00:00
|
|
|
SQLALCHEMY_POOL_SIZE = int(os.environ.get('SQLALCHEMY_POOL_SIZE', 5))
|
2018-01-05 05:53:40 +00:00
|
|
|
SQLALCHEMY_POOL_TIMEOUT = 30
|
|
|
|
|
SQLALCHEMY_POOL_RECYCLE = 300
|
2022-01-25 12:50:43 +00:00
|
|
|
SQLALCHEMY_STATEMENT_TIMEOUT = 1200
|
2016-03-01 13:30:10 +00:00
|
|
|
PAGE_SIZE = 50
|
2017-01-20 12:26:55 +00:00
|
|
|
API_PAGE_SIZE = 250
|
2016-10-11 14:30:40 +01:00
|
|
|
TEST_MESSAGE_FILENAME = 'Test message'
|
2017-06-01 13:56:47 +01:00
|
|
|
ONE_OFF_MESSAGE_FILENAME = 'Report'
|
2021-03-24 15:35:16 +00:00
|
|
|
MAX_VERIFY_CODE_COUNT = 5
|
2021-10-04 10:18:58 +01:00
|
|
|
MAX_FAILED_LOGIN_COUNT = 10
|
2016-01-27 17:42:05 +00:00
|
|
|
|
2019-07-29 17:23:22 +01:00
|
|
|
# be careful increasing this size without being sure that we won't see slowness in pysftp
|
|
|
|
|
MAX_LETTER_PDF_ZIP_FILESIZE = 40 * 1024 * 1024 # 40mb
|
2019-03-18 13:13:05 +00:00
|
|
|
MAX_LETTER_PDF_COUNT_PER_ZIP = 500
|
2017-12-19 14:18:05 +00:00
|
|
|
|
2017-11-16 12:02:09 +00:00
|
|
|
CHECK_PROXY_HEADER = False
|
|
|
|
|
|
2019-12-09 15:55:36 +00:00
|
|
|
# these should always add up to 100%
|
|
|
|
|
SMS_PROVIDER_RESTING_POINTS = {
|
2020-11-19 10:28:37 +00:00
|
|
|
'mmg': 50,
|
|
|
|
|
'firetext': 50
|
2019-12-09 15:55:36 +00:00
|
|
|
}
|
|
|
|
|
|
2016-06-02 11:12:01 +01:00
|
|
|
NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'
|
2017-03-02 18:03:53 +00:00
|
|
|
NOTIFY_USER_ID = '6af522d0-2915-4e52-83a3-3690455a5fe6'
|
2016-06-02 11:12:01 +01:00
|
|
|
INVITATION_EMAIL_TEMPLATE_ID = '4f46df42-f795-4cc4-83bb-65ca312f49cc'
|
2020-09-15 16:45:24 +01:00
|
|
|
BROADCAST_INVITATION_EMAIL_TEMPLATE_ID = '46152f7c-6901-41d5-8590-a5624d0d4359'
|
2016-06-02 11:12:01 +01:00
|
|
|
SMS_CODE_TEMPLATE_ID = '36fb0730-6259-4da1-8a80-c8de22ad4246'
|
2017-11-03 14:38:30 +00:00
|
|
|
EMAIL_2FA_TEMPLATE_ID = '299726d2-dba6-42b8-8209-30e1d66ea164'
|
|
|
|
|
NEW_USER_EMAIL_VERIFICATION_TEMPLATE_ID = 'ece42649-22a8-4d06-b87f-d52d5d3f0a27'
|
2016-06-02 11:12:01 +01:00
|
|
|
PASSWORD_RESET_TEMPLATE_ID = '474e9242-823b-4f99-813d-ed392e7f1201'
|
2016-07-07 17:23:07 +01:00
|
|
|
ALREADY_REGISTERED_EMAIL_TEMPLATE_ID = '0880fbb1-a0c6-46f0-9a8e-36c986381ceb'
|
2016-10-12 13:06:39 +01:00
|
|
|
CHANGE_EMAIL_CONFIRMATION_TEMPLATE_ID = 'eb4d9930-87ab-4aef-9bce-786762687884'
|
2017-05-12 14:06:29 +01:00
|
|
|
SERVICE_NOW_LIVE_TEMPLATE_ID = '618185c6-3636-49cd-b7d2-6f6f5eb3bdde'
|
2018-02-16 14:42:03 +00:00
|
|
|
ORGANISATION_INVITATION_EMAIL_TEMPLATE_ID = '203566f0-d835-47c5-aa06-932439c86573'
|
2019-02-26 16:25:04 +00:00
|
|
|
TEAM_MEMBER_EDIT_EMAIL_TEMPLATE_ID = 'c73f1d71-4049-46d5-a647-d013bdeca3f0'
|
|
|
|
|
TEAM_MEMBER_EDIT_MOBILE_TEMPLATE_ID = '8a31520f-4751-4789-8ea1-fe54496725eb'
|
2019-05-22 17:30:35 +01:00
|
|
|
REPLY_TO_EMAIL_ADDRESS_VERIFICATION_TEMPLATE_ID = 'a42f1d17-9404-46d5-a647-d013bdfca3e1'
|
2019-07-09 11:59:33 +01:00
|
|
|
MOU_SIGNER_RECEIPT_TEMPLATE_ID = '4fd2e43c-309b-4e50-8fb8-1955852d9d71'
|
|
|
|
|
MOU_SIGNED_ON_BEHALF_SIGNER_RECEIPT_TEMPLATE_ID = 'c20206d5-bf03-4002-9a90-37d5032d9e84'
|
|
|
|
|
MOU_SIGNED_ON_BEHALF_ON_BEHALF_RECEIPT_TEMPLATE_ID = '522b6657-5ca5-4368-a294-6b527703bd0b'
|
2021-02-17 13:15:29 +00:00
|
|
|
NOTIFY_INTERNATIONAL_SMS_SENDER = '07984404008'
|
2021-02-17 17:47:00 +00:00
|
|
|
LETTERS_VOLUME_EMAIL_TEMPLATE_ID = '11fad854-fd38-4a7c-bd17-805fb13dfc12'
|
2021-02-22 17:21:13 +00:00
|
|
|
# we only need real email in Live environment (production)
|
2021-02-26 12:19:03 +00:00
|
|
|
DVLA_EMAIL_ADDRESSES = json.loads(os.environ.get('DVLA_EMAIL_ADDRESSES', '[]'))
|
2016-06-02 11:12:01 +01:00
|
|
|
|
2021-10-26 16:36:25 +01:00
|
|
|
CELERY = {
|
|
|
|
|
'broker_url': 'sqs://',
|
|
|
|
|
'broker_transport_options': {
|
|
|
|
|
'region': AWS_REGION,
|
|
|
|
|
'visibility_timeout': 310,
|
|
|
|
|
'queue_name_prefix': NOTIFICATION_QUEUE_PREFIX,
|
2021-07-19 16:17:33 +01:00
|
|
|
},
|
2021-10-26 16:36:25 +01:00
|
|
|
'timezone': 'Europe/London',
|
|
|
|
|
'imports': [
|
|
|
|
|
'app.celery.tasks',
|
|
|
|
|
'app.celery.scheduled_tasks',
|
|
|
|
|
'app.celery.reporting_tasks',
|
|
|
|
|
'app.celery.nightly_tasks',
|
|
|
|
|
],
|
|
|
|
|
# this is overriden by the -Q command, but locally, we should read from all queues
|
|
|
|
|
'task_queues': [
|
|
|
|
|
Queue(queue, Exchange('default'), routing_key=queue) for queue in QueueNames.all_queues()
|
|
|
|
|
],
|
|
|
|
|
'beat_schedule': {
|
|
|
|
|
# app/celery/scheduled_tasks.py
|
|
|
|
|
'run-scheduled-jobs': {
|
|
|
|
|
'task': 'run-scheduled-jobs',
|
|
|
|
|
'schedule': crontab(minute='0,15,30,45'),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'delete-verify-codes': {
|
|
|
|
|
'task': 'delete-verify-codes',
|
|
|
|
|
'schedule': timedelta(minutes=63),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'delete-invitations': {
|
|
|
|
|
'task': 'delete-invitations',
|
|
|
|
|
'schedule': timedelta(minutes=66),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'switch-current-sms-provider-on-slow-delivery': {
|
|
|
|
|
'task': 'switch-current-sms-provider-on-slow-delivery',
|
|
|
|
|
'schedule': crontab(), # Every minute
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'check-job-status': {
|
|
|
|
|
'task': 'check-job-status',
|
|
|
|
|
'schedule': crontab(),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'tend-providers-back-to-middle': {
|
|
|
|
|
'task': 'tend-providers-back-to-middle',
|
|
|
|
|
'schedule': crontab(minute='*/5'),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'check-for-missing-rows-in-completed-jobs': {
|
|
|
|
|
'task': 'check-for-missing-rows-in-completed-jobs',
|
|
|
|
|
'schedule': crontab(minute='*/10'),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'replay-created-notifications': {
|
|
|
|
|
'task': 'replay-created-notifications',
|
|
|
|
|
'schedule': crontab(minute='0, 15, 30, 45'),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
# app/celery/nightly_tasks.py
|
|
|
|
|
'timeout-sending-notifications': {
|
|
|
|
|
'task': 'timeout-sending-notifications',
|
|
|
|
|
'schedule': crontab(hour=0, minute=5),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'create-nightly-billing': {
|
|
|
|
|
'task': 'create-nightly-billing',
|
|
|
|
|
'schedule': crontab(hour=0, minute=15),
|
|
|
|
|
'options': {'queue': QueueNames.REPORTING}
|
|
|
|
|
},
|
|
|
|
|
'create-nightly-notification-status': {
|
|
|
|
|
'task': 'create-nightly-notification-status',
|
|
|
|
|
'schedule': crontab(hour=0, minute=30), # after 'timeout-sending-notifications'
|
|
|
|
|
'options': {'queue': QueueNames.REPORTING}
|
|
|
|
|
},
|
|
|
|
|
'delete-notifications-older-than-retention': {
|
|
|
|
|
'task': 'delete-notifications-older-than-retention',
|
|
|
|
|
'schedule': crontab(hour=3, minute=0), # after 'create-nightly-notification-status'
|
put delete tasks on the reporting worker
they share a lot with the reporting tasks (creating ft_billing and
ft_notification_status), in that they're run nightly, take a long time,
and we see error messages if they get run multiple times (due to
visibility timeout).
The periodic app has two concurrent processes - previously there was
just one delete task, which would use one of those processes, while the
other process would pick up anything else on the queue (at that time of
night, the regular provider switch checks and scheduled job checks).
However, when we switched to running the three delete notification types
separately, we saw visibility timeout issues - three tasks would be
created, all three would be picked up by one celery instance, the two
worker processes would start on two of them, and the third would sit on
the box, wait longer than the visibility timeout to be picked up (and
acknowledged), and so SQS would assume the task was lost and replay it.
it's queues all the way down!
By putting them on the reporting worker we can take advantage of tuning
that app (for example setting the prefetch multiplier to one) which is
designed to run large tasks. We've also got more concurrent workers on
this box, so we can run all three tasks at once.
2021-12-03 13:28:16 +00:00
|
|
|
'options': {'queue': QueueNames.REPORTING}
|
2021-10-26 16:36:25 +01:00
|
|
|
},
|
|
|
|
|
'delete-inbound-sms': {
|
|
|
|
|
'task': 'delete-inbound-sms',
|
|
|
|
|
'schedule': crontab(hour=1, minute=40),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'save-daily-notification-processing-time': {
|
|
|
|
|
'task': 'save-daily-notification-processing-time',
|
|
|
|
|
'schedule': crontab(hour=2, minute=0),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'remove_sms_email_jobs': {
|
|
|
|
|
'task': 'remove_sms_email_jobs',
|
|
|
|
|
'schedule': crontab(hour=4, minute=0),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC},
|
|
|
|
|
},
|
|
|
|
|
'remove_letter_jobs': {
|
|
|
|
|
'task': 'remove_letter_jobs',
|
|
|
|
|
'schedule': crontab(hour=4, minute=20),
|
|
|
|
|
# since we mark jobs as archived
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC},
|
|
|
|
|
},
|
|
|
|
|
'check-if-letters-still-in-created': {
|
|
|
|
|
'task': 'check-if-letters-still-in-created',
|
|
|
|
|
'schedule': crontab(day_of_week='mon-fri', hour=7, minute=0),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'check-if-letters-still-pending-virus-check': {
|
|
|
|
|
'task': 'check-if-letters-still-pending-virus-check',
|
|
|
|
|
'schedule': crontab(day_of_week='mon-fri', hour='9,15', minute=0),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'check-for-services-with-high-failure-rates-or-sending-to-tv-numbers': {
|
|
|
|
|
'task': 'check-for-services-with-high-failure-rates-or-sending-to-tv-numbers',
|
|
|
|
|
'schedule': crontab(day_of_week='mon-fri', hour=10, minute=30),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'raise-alert-if-letter-notifications-still-sending': {
|
|
|
|
|
'task': 'raise-alert-if-letter-notifications-still-sending',
|
2022-02-28 11:22:14 +00:00
|
|
|
'schedule': crontab(hour=17, minute=00),
|
2021-10-26 16:36:25 +01:00
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
# The collate-letter-pdf does assume it is called in an hour that BST does not make a
|
|
|
|
|
# difference to the truncate date which translates to the filename to process
|
|
|
|
|
'collate-letter-pdfs-to-be-sent': {
|
|
|
|
|
'task': 'collate-letter-pdfs-to-be-sent',
|
|
|
|
|
'schedule': crontab(hour=17, minute=50),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'raise-alert-if-no-letter-ack-file': {
|
|
|
|
|
'task': 'raise-alert-if-no-letter-ack-file',
|
|
|
|
|
'schedule': crontab(hour=23, minute=00),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'trigger-link-tests': {
|
|
|
|
|
'task': 'trigger-link-tests',
|
|
|
|
|
'schedule': timedelta(minutes=15),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
|
|
|
|
'auto-expire-broadcast-messages': {
|
|
|
|
|
'task': 'auto-expire-broadcast-messages',
|
|
|
|
|
'schedule': timedelta(minutes=5),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
2021-11-04 09:16:20 +00:00
|
|
|
'remove-yesterdays-planned-tests-on-govuk-alerts': {
|
|
|
|
|
'task': 'remove-yesterdays-planned-tests-on-govuk-alerts',
|
|
|
|
|
'schedule': crontab(hour=00, minute=00),
|
|
|
|
|
'options': {'queue': QueueNames.PERIODIC}
|
|
|
|
|
},
|
2021-10-26 16:36:25 +01:00
|
|
|
}
|
2017-07-19 13:50:29 +01:00
|
|
|
}
|
2017-12-28 12:04:19 +00:00
|
|
|
|
2021-10-26 16:36:25 +01:00
|
|
|
# we can set celeryd_prefetch_multiplier to be 1 for celery apps which handle only long running tasks
|
|
|
|
|
if os.getenv('CELERYD_PREFETCH_MULTIPLIER'):
|
|
|
|
|
CELERY['worker_prefetch_multiplier'] = os.getenv('CELERYD_PREFETCH_MULTIPLIER')
|
2017-07-19 13:50:29 +01:00
|
|
|
|
2016-09-07 09:35:31 +01:00
|
|
|
FROM_NUMBER = 'development'
|
2016-02-09 13:31:45 +00:00
|
|
|
|
2019-04-24 11:13:10 +01:00
|
|
|
STATSD_HOST = os.getenv('STATSD_HOST')
|
2016-08-05 10:44:43 +01:00
|
|
|
STATSD_PORT = 8125
|
2020-06-18 11:14:15 +01:00
|
|
|
STATSD_ENABLED = bool(STATSD_HOST)
|
2016-05-13 17:15:39 +01:00
|
|
|
|
2016-12-15 17:30:05 +00:00
|
|
|
SENDING_NOTIFICATIONS_TIMEOUT_PERIOD = 259200 # 3 days
|
2016-06-08 15:25:57 +01:00
|
|
|
|
2017-02-03 13:34:09 +00:00
|
|
|
SIMULATED_EMAIL_ADDRESSES = (
|
|
|
|
|
'simulate-delivered@notifications.service.gov.uk',
|
|
|
|
|
'simulate-delivered-2@notifications.service.gov.uk',
|
|
|
|
|
'simulate-delivered-3@notifications.service.gov.uk',
|
|
|
|
|
)
|
2016-09-13 17:00:28 +01:00
|
|
|
|
|
|
|
|
SIMULATED_SMS_NUMBERS = ('+447700900000', '+447700900111', '+447700900222')
|
|
|
|
|
|
2017-06-06 14:49:05 +01:00
|
|
|
FREE_SMS_TIER_FRAGMENT_COUNT = 250000
|
|
|
|
|
|
2017-07-11 15:41:44 +01:00
|
|
|
SMS_INBOUND_WHITELIST = json.loads(os.environ.get('SMS_INBOUND_WHITELIST', '[]'))
|
2017-11-20 12:25:01 +00:00
|
|
|
FIRETEXT_INBOUND_SMS_AUTH = json.loads(os.environ.get('FIRETEXT_INBOUND_SMS_AUTH', '[]'))
|
2017-12-14 13:35:13 +00:00
|
|
|
MMG_INBOUND_SMS_AUTH = json.loads(os.environ.get('MMG_INBOUND_SMS_AUTH', '[]'))
|
2017-12-15 12:19:58 +00:00
|
|
|
MMG_INBOUND_SMS_USERNAME = json.loads(os.environ.get('MMG_INBOUND_SMS_USERNAME', '[]'))
|
2017-11-06 12:27:48 +00:00
|
|
|
ROUTE_SECRET_KEY_1 = os.environ.get('ROUTE_SECRET_KEY_1', '')
|
|
|
|
|
ROUTE_SECRET_KEY_2 = os.environ.get('ROUTE_SECRET_KEY_2', '')
|
2017-07-03 14:14:17 +01:00
|
|
|
|
2020-03-27 08:02:51 +00:00
|
|
|
HIGH_VOLUME_SERVICE = json.loads(os.environ.get('HIGH_VOLUME_SERVICE', '[]'))
|
|
|
|
|
|
2017-12-06 13:51:52 +00:00
|
|
|
TEMPLATE_PREVIEW_API_HOST = os.environ.get('TEMPLATE_PREVIEW_API_HOST', 'http://localhost:6013')
|
|
|
|
|
TEMPLATE_PREVIEW_API_KEY = os.environ.get('TEMPLATE_PREVIEW_API_KEY', 'my-secret-key')
|
|
|
|
|
|
2018-04-04 17:31:02 +01:00
|
|
|
DOCUMENT_DOWNLOAD_API_HOST = os.environ.get('DOCUMENT_DOWNLOAD_API_HOST', 'http://localhost:7000')
|
|
|
|
|
DOCUMENT_DOWNLOAD_API_KEY = os.environ.get('DOCUMENT_DOWNLOAD_API_KEY', 'auth-token')
|
|
|
|
|
|
2019-12-04 15:26:49 +00:00
|
|
|
# these environment vars aren't defined in the manifest so to set them on paas use `cf set-env`
|
2019-11-27 16:29:30 +00:00
|
|
|
MMG_URL = os.environ.get("MMG_URL", "https://api.mmg.co.uk/jsonv2a/api.php")
|
2019-04-12 12:03:58 +01:00
|
|
|
FIRETEXT_URL = os.environ.get("FIRETEXT_URL", "https://www.firetext.co.uk/api/sendsms/json")
|
2020-06-01 17:08:30 +01:00
|
|
|
SES_STUB_URL = os.environ.get("SES_STUB_URL")
|
2019-04-12 12:03:58 +01:00
|
|
|
|
2018-02-21 18:12:03 +00:00
|
|
|
AWS_REGION = 'eu-west-1'
|
|
|
|
|
|
2021-04-09 11:38:48 +01:00
|
|
|
CBC_PROXY_ENABLED = True
|
2020-10-22 12:11:22 +01:00
|
|
|
CBC_PROXY_AWS_ACCESS_KEY_ID = os.environ.get('CBC_PROXY_AWS_ACCESS_KEY_ID', '')
|
|
|
|
|
CBC_PROXY_AWS_SECRET_ACCESS_KEY = os.environ.get('CBC_PROXY_AWS_SECRET_ACCESS_KEY', '')
|
2020-10-20 11:18:46 +01:00
|
|
|
|
2021-01-26 11:11:44 +00:00
|
|
|
ENABLED_CBCS = {BroadcastProvider.EE, BroadcastProvider.THREE, BroadcastProvider.O2, BroadcastProvider.VODAFONE}
|
2020-10-29 11:12:28 +00:00
|
|
|
|
2021-02-05 17:10:41 +00:00
|
|
|
# as defined in api db migration 0331_add_broadcast_org.py
|
|
|
|
|
BROADCAST_ORGANISATION_ID = '38e4bf69-93b0-445d-acee-53ea53fe02df'
|
|
|
|
|
|
2017-07-03 14:14:17 +01:00
|
|
|
|
2016-09-07 09:35:31 +01:00
|
|
|
######################
|
|
|
|
|
# Config overrides ###
|
|
|
|
|
######################
|
|
|
|
|
|
2015-12-10 10:56:59 +00:00
|
|
|
class Development(Config):
|
2018-02-21 18:12:03 +00:00
|
|
|
DEBUG = True
|
2017-11-15 16:00:13 +00:00
|
|
|
SQLALCHEMY_ECHO = False
|
2018-02-21 18:12:03 +00:00
|
|
|
|
2016-08-08 14:05:35 +01:00
|
|
|
CSV_UPLOAD_BUCKET_NAME = 'development-notifications-csv-upload'
|
2020-03-25 16:36:27 +00:00
|
|
|
CONTACT_LIST_BUCKET_NAME = 'development-contact-list'
|
2018-03-14 17:39:17 +00:00
|
|
|
TEST_LETTERS_BUCKET_NAME = 'development-test-letters'
|
2017-06-12 15:55:42 +01:00
|
|
|
DVLA_RESPONSE_BUCKET_NAME = 'notify.tools-ftp'
|
2018-03-13 14:06:03 +00:00
|
|
|
LETTERS_PDF_BUCKET_NAME = 'development-letters-pdf'
|
|
|
|
|
LETTERS_SCAN_BUCKET_NAME = 'development-letters-scan'
|
2018-09-03 13:24:51 +01:00
|
|
|
INVALID_PDF_BUCKET_NAME = 'development-letters-invalid-pdf'
|
2019-09-05 12:57:59 +01:00
|
|
|
TRANSIENT_UPLOADED_LETTERS = 'development-transient-uploaded-letters'
|
2019-12-04 15:59:51 +00:00
|
|
|
LETTER_SANITISE_BUCKET_NAME = 'development-letters-sanitise'
|
2018-02-21 18:12:03 +00:00
|
|
|
|
2021-07-26 16:45:10 +01:00
|
|
|
INTERNAL_CLIENT_API_KEYS = {
|
2021-07-29 12:25:28 +01:00
|
|
|
Config.ADMIN_CLIENT_ID: ['dev-notify-secret-key'],
|
|
|
|
|
Config.GOVUK_ALERTS_CLIENT_ID: ['govuk-alerts-secret-key']
|
2021-07-26 16:45:10 +01:00
|
|
|
}
|
|
|
|
|
|
2018-02-21 18:12:03 +00:00
|
|
|
SECRET_KEY = 'dev-notify-secret-key'
|
|
|
|
|
DANGEROUS_SALT = 'dev-notify-salt'
|
|
|
|
|
|
2018-09-20 14:34:46 +01:00
|
|
|
MMG_INBOUND_SMS_AUTH = ['testkey']
|
|
|
|
|
MMG_INBOUND_SMS_USERNAME = ['username']
|
|
|
|
|
|
2016-08-08 10:20:33 +01:00
|
|
|
NOTIFY_ENVIRONMENT = 'development'
|
2018-02-21 18:12:03 +00:00
|
|
|
NOTIFY_LOG_PATH = 'application.log'
|
|
|
|
|
NOTIFY_EMAIL_DOMAIN = "notify.tools"
|
|
|
|
|
|
add script to run celery from within docker
as a team we primarily develop locally. However, we've been experiencing
issues with pycurl, a subdependency of celery, that is notoriously
difficult to install on mac. On top of the existing issues, we're also
seeing it conflict with pyproj in bizarre ways (where the order of
imports between pyproj and pycurl result in different configurations of
dynamically linked C libraries being loaded.
You are encouraged to attempt to install pycurl locally, following these
instructions: https://github.com/alphagov/notifications-manuals/wiki/Getting-Started#pycurl
However, if you aren't having any luck, you can instead now run celery
in a docker container.
`make run-celery-with-docker`
This will build a container, install the dependencies, and run celery
(with the default of four concurrent workers).
It will pull aws variables from your aws configuration as boto would
normally, and it will attempt to connect to your local database with the
user `postgres`. If your local database is configured differently (for
example, with a different user, or on a different port), then you can
set the SQLALCHEMY_DATABASE_URI locally to override that.
2022-01-31 18:35:39 +00:00
|
|
|
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI', 'postgresql://localhost/notification_api')
|
2022-02-24 17:11:46 +00:00
|
|
|
REDIS_URL = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
2018-02-21 18:12:03 +00:00
|
|
|
|
2019-02-25 14:26:47 +00:00
|
|
|
ANTIVIRUS_ENABLED = os.getenv('ANTIVIRUS_ENABLED') == '1'
|
|
|
|
|
|
2016-12-08 12:12:45 +00:00
|
|
|
API_HOST_NAME = "http://localhost:6011"
|
2017-04-25 09:54:09 +01:00
|
|
|
API_RATE_LIMIT_ENABLED = True
|
2021-02-26 12:19:03 +00:00
|
|
|
DVLA_EMAIL_ADDRESSES = ['success@simulator.amazonses.com']
|
2016-08-31 11:40:31 +01:00
|
|
|
|
2021-04-09 11:38:48 +01:00
|
|
|
CBC_PROXY_ENABLED = False
|
|
|
|
|
|
2016-08-31 11:40:31 +01:00
|
|
|
|
2018-02-21 18:42:24 +00:00
|
|
|
class Test(Development):
|
2016-09-07 09:35:31 +01:00
|
|
|
NOTIFY_EMAIL_DOMAIN = 'test.notify.com'
|
|
|
|
|
FROM_NUMBER = 'testing'
|
2016-08-31 11:40:31 +01:00
|
|
|
NOTIFY_ENVIRONMENT = 'test'
|
2017-06-19 13:49:20 +01:00
|
|
|
TESTING = True
|
2018-02-21 18:42:24 +00:00
|
|
|
|
2020-06-26 14:10:12 +01:00
|
|
|
HIGH_VOLUME_SERVICE = [
|
|
|
|
|
'941b6f9a-50d7-4742-8d50-f365ca74bf27',
|
|
|
|
|
'63f95b86-2d19-4497-b8b2-ccf25457df4e',
|
|
|
|
|
'7e5950cb-9954-41f5-8376-962b8c8555cf',
|
|
|
|
|
'10d1b9c9-0072-4fa9-ae1c-595e333841da',
|
|
|
|
|
]
|
2020-03-27 08:02:51 +00:00
|
|
|
|
2016-08-31 11:40:31 +01:00
|
|
|
CSV_UPLOAD_BUCKET_NAME = 'test-notifications-csv-upload'
|
2020-03-25 16:36:27 +00:00
|
|
|
CONTACT_LIST_BUCKET_NAME = 'test-contact-list'
|
2018-03-14 17:39:17 +00:00
|
|
|
TEST_LETTERS_BUCKET_NAME = 'test-test-letters'
|
2017-06-12 15:55:42 +01:00
|
|
|
DVLA_RESPONSE_BUCKET_NAME = 'test.notify.com-ftp'
|
2018-03-13 14:06:03 +00:00
|
|
|
LETTERS_PDF_BUCKET_NAME = 'test-letters-pdf'
|
|
|
|
|
LETTERS_SCAN_BUCKET_NAME = 'test-letters-scan'
|
2018-09-03 13:24:51 +01:00
|
|
|
INVALID_PDF_BUCKET_NAME = 'test-letters-invalid-pdf'
|
2019-09-05 12:57:59 +01:00
|
|
|
TRANSIENT_UPLOADED_LETTERS = 'test-transient-uploaded-letters'
|
2019-12-04 15:59:51 +00:00
|
|
|
LETTER_SANITISE_BUCKET_NAME = 'test-letters-sanitise'
|
2018-02-21 18:42:24 +00:00
|
|
|
|
2018-02-21 18:12:03 +00:00
|
|
|
# this is overriden in jenkins and on cloudfoundry
|
|
|
|
|
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI', 'postgresql://localhost/test_notification_api')
|
2017-05-25 10:50:55 +01:00
|
|
|
|
2021-10-26 16:36:25 +01:00
|
|
|
CELERY = {
|
|
|
|
|
**Config.CELERY,
|
|
|
|
|
'broker_url': 'you-forgot-to-mock-celery-in-your-tests://'
|
|
|
|
|
}
|
2017-06-09 16:20:02 +01:00
|
|
|
|
2019-02-25 14:26:47 +00:00
|
|
|
ANTIVIRUS_ENABLED = True
|
|
|
|
|
|
2017-04-25 09:54:09 +01:00
|
|
|
API_RATE_LIMIT_ENABLED = True
|
2016-12-08 12:12:45 +00:00
|
|
|
API_HOST_NAME = "http://localhost:6011"
|
2016-01-18 11:03:38 +00:00
|
|
|
|
2017-09-26 10:59:09 +01:00
|
|
|
SMS_INBOUND_WHITELIST = ['203.0.113.195']
|
2017-11-20 12:25:01 +00:00
|
|
|
FIRETEXT_INBOUND_SMS_AUTH = ['testkey']
|
2017-12-12 14:53:38 +00:00
|
|
|
TEMPLATE_PREVIEW_API_HOST = 'http://localhost:9999'
|
2017-09-26 10:59:09 +01:00
|
|
|
|
2019-04-12 12:03:58 +01:00
|
|
|
MMG_URL = 'https://example.com/mmg'
|
|
|
|
|
FIRETEXT_URL = 'https://example.com/firetext'
|
|
|
|
|
|
2020-11-25 17:39:32 +00:00
|
|
|
CBC_PROXY_ENABLED = True
|
2021-02-26 11:01:24 +00:00
|
|
|
DVLA_EMAIL_ADDRESSES = ['success@simulator.amazonses.com', 'success+2@simulator.amazonses.com']
|
|
|
|
|
|
2016-05-13 17:20:29 +01:00
|
|
|
|
2016-04-07 13:44:04 +01:00
|
|
|
class Preview(Config):
|
2016-09-07 09:35:31 +01:00
|
|
|
NOTIFY_EMAIL_DOMAIN = 'notify.works'
|
2016-08-08 10:20:33 +01:00
|
|
|
NOTIFY_ENVIRONMENT = 'preview'
|
2016-08-03 11:50:24 +01:00
|
|
|
CSV_UPLOAD_BUCKET_NAME = 'preview-notifications-csv-upload'
|
2020-03-25 16:36:27 +00:00
|
|
|
CONTACT_LIST_BUCKET_NAME = 'preview-contact-list'
|
2018-03-14 17:39:17 +00:00
|
|
|
TEST_LETTERS_BUCKET_NAME = 'preview-test-letters'
|
2017-06-12 15:55:42 +01:00
|
|
|
DVLA_RESPONSE_BUCKET_NAME = 'notify.works-ftp'
|
2018-03-13 14:06:03 +00:00
|
|
|
LETTERS_PDF_BUCKET_NAME = 'preview-letters-pdf'
|
|
|
|
|
LETTERS_SCAN_BUCKET_NAME = 'preview-letters-scan'
|
2018-09-03 13:24:51 +01:00
|
|
|
INVALID_PDF_BUCKET_NAME = 'preview-letters-invalid-pdf'
|
2019-09-05 12:57:59 +01:00
|
|
|
TRANSIENT_UPLOADED_LETTERS = 'preview-transient-uploaded-letters'
|
2019-12-04 15:59:51 +00:00
|
|
|
LETTER_SANITISE_BUCKET_NAME = 'preview-letters-sanitise'
|
2016-09-08 10:44:14 +01:00
|
|
|
FROM_NUMBER = 'preview'
|
2017-04-25 09:54:09 +01:00
|
|
|
API_RATE_LIMIT_ENABLED = True
|
2018-08-23 10:26:06 +01:00
|
|
|
CHECK_PROXY_HEADER = False
|
2016-04-07 13:44:04 +01:00
|
|
|
|
|
|
|
|
|
2016-07-04 16:54:03 +01:00
|
|
|
class Staging(Config):
|
2016-09-07 09:35:31 +01:00
|
|
|
NOTIFY_EMAIL_DOMAIN = 'staging-notify.works'
|
2016-08-08 10:20:33 +01:00
|
|
|
NOTIFY_ENVIRONMENT = 'staging'
|
2018-11-21 14:07:04 +00:00
|
|
|
CSV_UPLOAD_BUCKET_NAME = 'staging-notifications-csv-upload'
|
2020-03-25 16:36:27 +00:00
|
|
|
CONTACT_LIST_BUCKET_NAME = 'staging-contact-list'
|
2018-03-14 17:39:17 +00:00
|
|
|
TEST_LETTERS_BUCKET_NAME = 'staging-test-letters'
|
2017-06-12 15:55:42 +01:00
|
|
|
DVLA_RESPONSE_BUCKET_NAME = 'staging-notify.works-ftp'
|
2018-03-13 14:06:03 +00:00
|
|
|
LETTERS_PDF_BUCKET_NAME = 'staging-letters-pdf'
|
|
|
|
|
LETTERS_SCAN_BUCKET_NAME = 'staging-letters-scan'
|
2018-09-03 13:24:51 +01:00
|
|
|
INVALID_PDF_BUCKET_NAME = 'staging-letters-invalid-pdf'
|
2019-09-05 12:57:59 +01:00
|
|
|
TRANSIENT_UPLOADED_LETTERS = 'staging-transient-uploaded-letters'
|
2019-12-04 15:59:51 +00:00
|
|
|
LETTER_SANITISE_BUCKET_NAME = 'staging-letters-sanitise'
|
2016-09-08 10:44:14 +01:00
|
|
|
FROM_NUMBER = 'stage'
|
2017-04-25 09:54:09 +01:00
|
|
|
API_RATE_LIMIT_ENABLED = True
|
2017-11-16 12:02:09 +00:00
|
|
|
CHECK_PROXY_HEADER = True
|
2016-07-04 16:54:03 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class Live(Config):
|
2016-09-07 09:35:31 +01:00
|
|
|
NOTIFY_EMAIL_DOMAIN = 'notifications.service.gov.uk'
|
2016-08-08 10:20:33 +01:00
|
|
|
NOTIFY_ENVIRONMENT = 'live'
|
2016-07-04 16:54:03 +01:00
|
|
|
CSV_UPLOAD_BUCKET_NAME = 'live-notifications-csv-upload'
|
2020-03-25 16:36:27 +00:00
|
|
|
CONTACT_LIST_BUCKET_NAME = 'production-contact-list'
|
2018-03-14 17:39:17 +00:00
|
|
|
TEST_LETTERS_BUCKET_NAME = 'production-test-letters'
|
2017-06-12 15:55:42 +01:00
|
|
|
DVLA_RESPONSE_BUCKET_NAME = 'notifications.service.gov.uk-ftp'
|
2018-03-13 14:06:03 +00:00
|
|
|
LETTERS_PDF_BUCKET_NAME = 'production-letters-pdf'
|
|
|
|
|
LETTERS_SCAN_BUCKET_NAME = 'production-letters-scan'
|
2018-09-03 13:24:51 +01:00
|
|
|
INVALID_PDF_BUCKET_NAME = 'production-letters-invalid-pdf'
|
2019-09-05 12:57:59 +01:00
|
|
|
TRANSIENT_UPLOADED_LETTERS = 'production-transient-uploaded-letters'
|
2019-12-04 15:59:51 +00:00
|
|
|
LETTER_SANITISE_BUCKET_NAME = 'production-letters-sanitise'
|
2017-05-22 15:58:19 +01:00
|
|
|
FROM_NUMBER = 'GOVUK'
|
2017-05-03 12:13:32 +01:00
|
|
|
API_RATE_LIMIT_ENABLED = True
|
2017-11-28 11:59:17 +00:00
|
|
|
CHECK_PROXY_HEADER = True
|
2020-06-03 12:34:04 +01:00
|
|
|
SES_STUB_URL = None
|
2016-12-08 12:12:45 +00:00
|
|
|
|
2019-01-16 14:11:03 +00:00
|
|
|
CRONITOR_ENABLED = True
|
|
|
|
|
|
2021-02-15 17:38:50 +00:00
|
|
|
|
2016-12-08 12:12:45 +00:00
|
|
|
class CloudFoundryConfig(Config):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# CloudFoundry sandbox
|
|
|
|
|
class Sandbox(CloudFoundryConfig):
|
|
|
|
|
NOTIFY_EMAIL_DOMAIN = 'notify.works'
|
|
|
|
|
NOTIFY_ENVIRONMENT = 'sandbox'
|
|
|
|
|
CSV_UPLOAD_BUCKET_NAME = 'cf-sandbox-notifications-csv-upload'
|
2020-03-25 16:36:27 +00:00
|
|
|
CONTACT_LIST_BUCKET_NAME = 'cf-sandbox-contact-list'
|
2017-12-04 16:29:04 +00:00
|
|
|
LETTERS_PDF_BUCKET_NAME = 'cf-sandbox-letters-pdf'
|
2018-03-14 17:39:17 +00:00
|
|
|
TEST_LETTERS_BUCKET_NAME = 'cf-sandbox-test-letters'
|
2017-06-12 15:55:42 +01:00
|
|
|
DVLA_RESPONSE_BUCKET_NAME = 'notify.works-ftp'
|
2018-03-13 14:06:03 +00:00
|
|
|
LETTERS_PDF_BUCKET_NAME = 'cf-sandbox-letters-pdf'
|
2018-03-13 14:08:34 +00:00
|
|
|
LETTERS_SCAN_BUCKET_NAME = 'cf-sandbox-letters-scan'
|
2018-09-03 13:24:51 +01:00
|
|
|
INVALID_PDF_BUCKET_NAME = 'cf-sandbox-letters-invalid-pdf'
|
2016-12-08 12:12:45 +00:00
|
|
|
FROM_NUMBER = 'sandbox'
|
2016-07-05 21:25:37 +01:00
|
|
|
|
2016-07-04 16:54:03 +01:00
|
|
|
|
2016-03-17 11:47:44 +00:00
|
|
|
configs = {
|
2016-08-31 11:42:05 +01:00
|
|
|
'development': Development,
|
|
|
|
|
'test': Test,
|
|
|
|
|
'live': Live,
|
2017-03-21 14:43:56 +00:00
|
|
|
'production': Live,
|
2016-08-31 11:42:05 +01:00
|
|
|
'staging': Staging,
|
2016-12-08 12:12:45 +00:00
|
|
|
'preview': Preview,
|
|
|
|
|
'sandbox': Sandbox
|
2016-03-17 11:47:44 +00:00
|
|
|
}
|