mirror of
https://github.com/GSA/notifications-api.git
synced 2026-01-30 14:31:57 -05:00
try batch inserts
This commit is contained in:
@@ -1,10 +1,11 @@
|
|||||||
|
import json
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from sqlalchemy import between
|
from sqlalchemy import between
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
from app import notify_celery, zendesk_client
|
from app import notify_celery, redis_store, zendesk_client
|
||||||
from app.celery.tasks import (
|
from app.celery.tasks import (
|
||||||
get_recipient_csv_and_template_and_sender_id,
|
get_recipient_csv_and_template_and_sender_id,
|
||||||
process_incomplete_jobs,
|
process_incomplete_jobs,
|
||||||
@@ -24,6 +25,7 @@ from app.dao.jobs_dao import (
|
|||||||
find_missing_row_for_job,
|
find_missing_row_for_job,
|
||||||
)
|
)
|
||||||
from app.dao.notifications_dao import (
|
from app.dao.notifications_dao import (
|
||||||
|
dao_batch_insert_notifications,
|
||||||
dao_close_out_delivery_receipts,
|
dao_close_out_delivery_receipts,
|
||||||
dao_update_delivery_receipts,
|
dao_update_delivery_receipts,
|
||||||
notifications_not_yet_sent,
|
notifications_not_yet_sent,
|
||||||
@@ -286,3 +288,17 @@ def process_delivery_receipts(self):
|
|||||||
)
|
)
|
||||||
def cleanup_delivery_receipts(self):
|
def cleanup_delivery_receipts(self):
|
||||||
dao_close_out_delivery_receipts()
|
dao_close_out_delivery_receipts()
|
||||||
|
|
||||||
|
|
||||||
|
@notify_celery.task(bind=True, name="batch-insert-notifications")
|
||||||
|
def batch_insert_notifications(self):
|
||||||
|
batch = []
|
||||||
|
with redis_store.pipeline:
|
||||||
|
notification = redis_store.lpop("notification_queue")
|
||||||
|
batch.append(json.loads(notification))
|
||||||
|
try:
|
||||||
|
dao_batch_insert_notifications(batch)
|
||||||
|
except Exception as e:
|
||||||
|
for msg in batch:
|
||||||
|
redis_store.rpush("notification_queue", json.dumps(msg))
|
||||||
|
current_app.logger.exception(f"Notification batch insert failed {e}")
|
||||||
|
|||||||
@@ -208,6 +208,11 @@ class Config(object):
|
|||||||
"schedule": timedelta(minutes=82),
|
"schedule": timedelta(minutes=82),
|
||||||
"options": {"queue": QueueNames.PERIODIC},
|
"options": {"queue": QueueNames.PERIODIC},
|
||||||
},
|
},
|
||||||
|
"batch-insert-notifications": {
|
||||||
|
"task": "batch-insert-notifications",
|
||||||
|
"schedule": 10.0,
|
||||||
|
"options": {"queue": QueueNames.PERIODIC},
|
||||||
|
},
|
||||||
"expire-or-delete-invitations": {
|
"expire-or-delete-invitations": {
|
||||||
"task": "expire-or-delete-invitations",
|
"task": "expire-or-delete-invitations",
|
||||||
"schedule": timedelta(minutes=66),
|
"schedule": timedelta(minutes=66),
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import json
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
|
import sqlalchemy
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from sqlalchemy import (
|
from sqlalchemy import (
|
||||||
TIMESTAMP,
|
TIMESTAMP,
|
||||||
@@ -799,3 +800,12 @@ def dao_close_out_delivery_receipts():
|
|||||||
current_app.logger.info(
|
current_app.logger.info(
|
||||||
f"Marked {result.rowcount} notifications as technical failures"
|
f"Marked {result.rowcount} notifications as technical failures"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def dao_batch_insert_notifications(batch):
|
||||||
|
try:
|
||||||
|
db.session.bulk_save_objects(Notification(**msg) for msg in batch)
|
||||||
|
db.session.commit()
|
||||||
|
return len(batch)
|
||||||
|
except sqlalchemy.exc.SQLAlchemyError as e:
|
||||||
|
current_app.logger.exception(f"Error during batch insert {e}")
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ from app import redis_store
|
|||||||
from app.celery import provider_tasks
|
from app.celery import provider_tasks
|
||||||
from app.config import QueueNames
|
from app.config import QueueNames
|
||||||
from app.dao.notifications_dao import (
|
from app.dao.notifications_dao import (
|
||||||
dao_create_notification,
|
|
||||||
dao_delete_notifications_by_id,
|
dao_delete_notifications_by_id,
|
||||||
dao_notification_exists,
|
dao_notification_exists,
|
||||||
get_notification_by_id,
|
get_notification_by_id,
|
||||||
@@ -139,8 +138,9 @@ def persist_notification(
|
|||||||
|
|
||||||
# if simulated create a Notification model to return but do not persist the Notification to the dB
|
# if simulated create a Notification model to return but do not persist the Notification to the dB
|
||||||
if not simulated:
|
if not simulated:
|
||||||
current_app.logger.info("Firing dao_create_notification")
|
# current_app.logger.info("Firing dao_create_notification")
|
||||||
dao_create_notification(notification)
|
# dao_create_notification(notification)
|
||||||
|
redis_store.rpush("message_queue", notification)
|
||||||
if key_type != KeyType.TEST and current_app.config["REDIS_ENABLED"]:
|
if key_type != KeyType.TEST and current_app.config["REDIS_ENABLED"]:
|
||||||
current_app.logger.info(
|
current_app.logger.info(
|
||||||
"Redis enabled, querying cache key for service id: {}".format(
|
"Redis enabled, querying cache key for service id: {}".format(
|
||||||
@@ -172,7 +172,7 @@ def send_notification_to_queue_detached(
|
|||||||
deliver_task = provider_tasks.deliver_email
|
deliver_task = provider_tasks.deliver_email
|
||||||
|
|
||||||
try:
|
try:
|
||||||
deliver_task.apply_async([str(notification_id)], queue=queue)
|
deliver_task.apply_async([str(notification_id)], queue=queue, countdown=30)
|
||||||
except Exception:
|
except Exception:
|
||||||
dao_delete_notifications_by_id(notification_id)
|
dao_delete_notifications_by_id(notification_id)
|
||||||
raise
|
raise
|
||||||
|
|||||||
Reference in New Issue
Block a user