fix eventlet

This commit is contained in:
Kenneth Kehl
2025-07-15 08:15:08 -07:00
parent be443172b0
commit f080c94093
5 changed files with 9 additions and 8 deletions

View File

@@ -70,8 +70,8 @@ run-celery: ## Run celery, TODO remove purge for staging/prod
-A run_celery.notify_celery worker \ -A run_celery.notify_celery worker \
--pidfile="/tmp/celery.pid" \ --pidfile="/tmp/celery.pid" \
--loglevel=INFO \ --loglevel=INFO \
--pool=threads --pool=eventlet
--concurrency=10 --concurrency=100
.PHONY: dead-code .PHONY: dead-code

View File

@@ -5,6 +5,7 @@ import time
from io import StringIO from io import StringIO
import botocore import botocore
import eventlet
from boto3 import Session from boto3 import Session
from flask import current_app from flask import current_app
@@ -249,7 +250,7 @@ def get_s3_files():
for object_key in object_keys: for object_key in object_keys:
read_s3_file(bucket_name, object_key, s3res) read_s3_file(bucket_name, object_key, s3res)
count = count + 1 count = count + 1
time.sleep(0.2) eventlet.sleep(0.2)
except Exception: except Exception:
current_app.logger.exception( current_app.logger.exception(
f"Trouble reading {object_key} which is # {count} during cache regeneration" f"Trouble reading {object_key} which is # {count} during cache regeneration"
@@ -410,7 +411,7 @@ def get_job_from_s3(service_id, job_id):
) )
retries += 1 retries += 1
sleep_time = backoff_factor * (2**retries) # Exponential backoff sleep_time = backoff_factor * (2**retries) # Exponential backoff
time.sleep(sleep_time) eventlet.sleep(sleep_time)
continue continue
else: else:
# Typically this is "NoSuchKey" # Typically this is "NoSuchKey"

View File

@@ -1,6 +1,6 @@
import json import json
import time
import eventlet
from celery.signals import task_postrun from celery.signals import task_postrun
from flask import current_app from flask import current_app
from requests import HTTPError, RequestException, request from requests import HTTPError, RequestException, request
@@ -84,7 +84,7 @@ def process_job(job_id, sender_id=None):
process_row(row, template, job, service, sender_id=sender_id) process_row(row, template, job, service, sender_id=sender_id)
count = count + 1 count = count + 1
if count % 3 == 0: if count % 3 == 0:
time.sleep(1) eventlet.sleep(1)
# End point/Exit point for message send flow. # End point/Exit point for message send flow.
job_complete(job, start=start) job_complete(job, start=start)

View File

@@ -151,7 +151,7 @@ class AwsCloudwatchClient(Client):
# result = temp_client.get_query_results(queryId=query_id) # result = temp_client.get_query_results(queryId=query_id)
# if result['status'] == 'Complete': # if result['status'] == 'Complete':
# break # break
# time.sleep(1) # eventlet.sleep(1)
# delivery_receipts = [] # delivery_receipts = []
# for log in result['results']: # for log in result['results']:

View File

@@ -26,7 +26,7 @@ applications:
- type: worker - type: worker
instances: ((worker_instances)) instances: ((worker_instances))
memory: ((worker_memory)) memory: ((worker_memory))
command: newrelic-admin run-program celery -A run_celery.notify_celery worker --loglevel=INFO --pool=threads --concurrency=10 --prefetch-multiplier=2 command: newrelic-admin run-program celery -A run_celery.notify_celery worker --loglevel=INFO --pool=eventlet --concurrency=100 --prefetch-multiplier=2
- type: scheduler - type: scheduler
instances: 1 instances: 1
memory: ((scheduler_memory)) memory: ((scheduler_memory))