Stream delivery worker logs to stdout when running on PaaS

Our application servers and celery workers write logs both to a
file that is shipped to CloudWatch and to stdout, which is picked
up by CloudFoundry and sent to Logit Logstash.

This works with gunicorn and single-worker celery deployments, however
celery multi daemonizes worker processes, which detaches them from
stdout, so there's no log output in `cf logs` or Logit.

To fix this, we start a separate tail process to duplicate logs written
to a file to stdout, which should be picked up by CloudFoundry.
This commit is contained in:
Alexey Bezhan
2018-06-29 11:49:02 +01:00
parent 2ad703b89b
commit 676e3ec39a

View File

@@ -97,6 +97,12 @@ function start_aws_logs_agent {
echo "AWS logs agent pid: ${AWSLOGS_AGENT_PID}"
}
function start_logs_tail {
exec tail -n0 -f ${LOGS_DIR}/app.log.json &
LOGS_TAIL_PID=$!
echo "tail pid: ${LOGS_TAIL_PID}"
}
function run {
while true; do
get_celery_pids
@@ -104,6 +110,7 @@ function run {
kill -0 ${APP_PID} 2&>/dev/null || return 1
done
kill -0 ${AWSLOGS_AGENT_PID} 2&>/dev/null || start_aws_logs_agent
kill -0 ${LOGS_TAIL_PID} 2&>/dev/null || start_logs_tail
sleep 1
done
}
@@ -120,5 +127,6 @@ configure_aws_logs
start_application "$@"
start_aws_logs_agent
start_logs_tail
run