From 025b51c801d0c7aad2d84de9ac9b54da401106d6 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 21 Dec 2020 14:53:14 +0000 Subject: [PATCH 1/2] If the request_id exists in the Flask global context, add it to the kwargs for the task. The request_id is set is the task is created from a http request, if that task then calls through to another task this will set the request_id from the global context. We should then be able to follow the creation of a notification all the way from the original http request to the sending task. --- app/celery/celery.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/celery/celery.py b/app/celery/celery.py index 76e4f464e..0316a61b8 100644 --- a/app/celery/celery.py +++ b/app/celery/celery.py @@ -58,6 +58,8 @@ def make_task(app): if has_request_context() and hasattr(request, 'request_id'): kwargs['request_id'] = request.request_id + elif g.request_id: + kwargs['request_id'] = g.request_id with SQS_APPLY_ASYNC_DURATION_SECONDS.labels(self.name).time(): return super().apply_async(args, kwargs, task_id, producer, link, link_error, **options) From a1b31a6c2029aac2b07ddc9fde8c23c44b951d04 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 22 Dec 2020 15:47:35 +0000 Subject: [PATCH 2/2] Check for app_context and request in g to prevent Attribute Errors. We can add a request_id for tasks that are not spawned by an HTTP request, for example scheduled or nightly tasks. That means you can match up all the tasks spawned by a single task, for example, create-night-billing spawns 4 tasks, those would all have the same idea. Not sure if that is helpful or not. Also it might be confusing to have a request_id for logs that were not started from a request so I have left it out. --- app/celery/celery.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/app/celery/celery.py b/app/celery/celery.py index 0316a61b8..37c240307 100644 --- a/app/celery/celery.py +++ b/app/celery/celery.py @@ -1,10 +1,9 @@ import time - from gds_metrics.metrics import Histogram from celery import Celery, Task from celery.signals import worker_process_shutdown from flask import g, request -from flask.ctx import has_request_context +from flask.ctx import has_request_context, has_app_context @worker_process_shutdown.connect @@ -55,10 +54,9 @@ def make_task(app): def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, link=None, link_error=None, **options): kwargs = kwargs or {} - if has_request_context() and hasattr(request, 'request_id'): kwargs['request_id'] = request.request_id - elif g.request_id: + elif has_app_context() and 'request_id' in g: kwargs['request_id'] = g.request_id with SQS_APPLY_ASYNC_DURATION_SECONDS.labels(self.name).time():