don't capture logs directly from stdout

previously in run_app_paas.sh, we captured stdout from the app and
piped that into the log file. However, this came up with a bunch of
problems, mainly:

* exceptions with stack traces often weren't formatted properly,
  and kibana could not parse them
* celery logs were duplicated - we'd collect both the json logs and
  the human readable stdout logs.

instead, with the updated utils library, we can use that to log json
straight to the appropriate directory directly.
This commit is contained in:
Leo Hemsted
2017-07-31 13:28:34 +01:00
parent 14dd18aefc
commit e7b13e727a
6 changed files with 8 additions and 32 deletions

View File

@@ -18,7 +18,7 @@ def set_config_env_vars(vcap_services):
vcap_application = json.loads(os.environ['VCAP_APPLICATION'])
os.environ['NOTIFY_ENVIRONMENT'] = vcap_application['space_name']
os.environ['LOGGING_STDOUT_JSON'] = '1'
os.environ['NOTIFY_LOG_PATH'] = '/home/vcap/logs/app.log'
# Notify common config
for s in vcap_services['user-provided']:

View File

@@ -95,7 +95,6 @@ class Config(object):
# Logging
DEBUG = False
LOGGING_STDOUT_JSON = os.getenv('LOGGING_STDOUT_JSON') == '1'
###########################
# Default config values ###
@@ -106,7 +105,7 @@ class Config(object):
AWS_REGION = 'eu-west-1'
INVITATION_EXPIRATION_DAYS = 2
NOTIFY_APP_NAME = 'api'
NOTIFY_LOG_PATH = '/var/log/notify/application.log'
NOTIFY_LOG_PATH = None
SQLALCHEMY_COMMIT_ON_TEARDOWN = False
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
@@ -277,6 +276,7 @@ class Config(object):
######################
class Development(Config):
NOTIFY_LOG_PATH = 'application.log'
SQLALCHEMY_ECHO = False
NOTIFY_EMAIL_DOMAIN = 'notify.tools'
CSV_UPLOAD_BUCKET_NAME = 'development-notifications-csv-upload'