From 676e3ec39aabeb6667864eea06186ff29ff069c5 Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Fri, 29 Jun 2018 11:49:02 +0100 Subject: [PATCH] Stream delivery worker logs to stdout when running on PaaS Our application servers and celery workers write logs both to a file that is shipped to CloudWatch and to stdout, which is picked up by CloudFoundry and sent to Logit Logstash. This works with gunicorn and single-worker celery deployments, however celery multi daemonizes worker processes, which detaches them from stdout, so there's no log output in `cf logs` or Logit. To fix this, we start a separate tail process to duplicate logs written to a file to stdout, which should be picked up by CloudFoundry. --- scripts/run_multi_worker_app_paas.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/run_multi_worker_app_paas.sh b/scripts/run_multi_worker_app_paas.sh index d50358c6b..3965ee577 100755 --- a/scripts/run_multi_worker_app_paas.sh +++ b/scripts/run_multi_worker_app_paas.sh @@ -97,6 +97,12 @@ function start_aws_logs_agent { echo "AWS logs agent pid: ${AWSLOGS_AGENT_PID}" } +function start_logs_tail { + exec tail -n0 -f ${LOGS_DIR}/app.log.json & + LOGS_TAIL_PID=$! + echo "tail pid: ${LOGS_TAIL_PID}" +} + function run { while true; do get_celery_pids @@ -104,6 +110,7 @@ function run { kill -0 ${APP_PID} 2&>/dev/null || return 1 done kill -0 ${AWSLOGS_AGENT_PID} 2&>/dev/null || start_aws_logs_agent + kill -0 ${LOGS_TAIL_PID} 2&>/dev/null || start_logs_tail sleep 1 done } @@ -120,5 +127,6 @@ configure_aws_logs start_application "$@" start_aws_logs_agent +start_logs_tail run