mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-03 09:51:11 -05:00
split returned letters tasks into a max count of returned letters
if we have too many returned letters, we'll exceed SQS's max task size of 256kb. Cap it to 5000 - this is probably a bit conservative but follows the initial values we used when implementing this for the collate-letters-task[^1]. Also follow the pattern of compressing the sqs payload just to reduce it a little more. [^1]: https://github.com/alphagov/notifications-api/pull/1536
This commit is contained in:
@@ -9,11 +9,19 @@ from app.v2.errors import register_errors
|
||||
letter_job = Blueprint("letter-job", __name__)
|
||||
register_errors(letter_job)
|
||||
|
||||
# too many references will make SQS error (as the task can only be 256kb)
|
||||
MAX_REFERENCES_PER_TASK = 5000
|
||||
|
||||
|
||||
@letter_job.route('/letters/returned', methods=['POST'])
|
||||
def create_process_returned_letters_job():
|
||||
references = validate(request.get_json(), letter_references)
|
||||
references = validate(request.get_json(), letter_references)['references']
|
||||
|
||||
process_returned_letters_list.apply_async([references['references']], queue=QueueNames.DATABASE)
|
||||
for start_index in range(0, len(references), MAX_REFERENCES_PER_TASK):
|
||||
process_returned_letters_list.apply_async(
|
||||
args=(references[start_index:start_index + MAX_REFERENCES_PER_TASK], ),
|
||||
queue=QueueNames.DATABASE,
|
||||
compression='zlib'
|
||||
)
|
||||
|
||||
return jsonify(references=references['references']), 200
|
||||
return jsonify(references=references), 200
|
||||
|
||||
Reference in New Issue
Block a user