mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-02 17:31:14 -05:00
Merge branch 'main' of https://github.com/GSA/notifications-api into notify-260
This commit is contained in:
@@ -5,6 +5,7 @@ from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from app import notify_celery
|
||||
from app.aws import s3
|
||||
from app.aws.s3 import remove_csv_object
|
||||
from app.celery.process_ses_receipts_tasks import check_and_queue_callback_task
|
||||
from app.config import QueueNames
|
||||
from app.cronitor import cronitor
|
||||
@@ -13,6 +14,7 @@ from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention
|
||||
from app.dao.jobs_dao import (
|
||||
dao_archive_job,
|
||||
dao_get_jobs_older_than_data_retention,
|
||||
dao_get_unfinished_jobs,
|
||||
)
|
||||
from app.dao.notifications_dao import (
|
||||
dao_get_notifications_processing_time_stats,
|
||||
@@ -41,6 +43,19 @@ def _remove_csv_files(job_types):
|
||||
current_app.logger.info("Job ID {} has been removed from s3.".format(job.id))
|
||||
|
||||
|
||||
@notify_celery.task(name="cleanup-unfinished-jobs")
|
||||
def cleanup_unfinished_jobs():
|
||||
now = datetime.utcnow()
|
||||
jobs = dao_get_unfinished_jobs()
|
||||
for job in jobs:
|
||||
# The query already checks that the processing_finished time is null, so here we are saying
|
||||
# if it started more than 4 hours ago, that's too long
|
||||
acceptable_finish_time = job.processing_started + timedelta(minutes=5)
|
||||
if now > acceptable_finish_time:
|
||||
remove_csv_object(job.original_file_name)
|
||||
dao_archive_job(job)
|
||||
|
||||
|
||||
@notify_celery.task(name="delete-notifications-older-than-retention")
|
||||
def delete_notifications_older_than_retention():
|
||||
delete_email_notifications_older_than_retention.apply_async(queue=QueueNames.REPORTING)
|
||||
@@ -158,6 +173,7 @@ def delete_inbound_sms():
|
||||
@notify_celery.task(name='save-daily-notification-processing-time')
|
||||
@cronitor("save-daily-notification-processing-time")
|
||||
def save_daily_notification_processing_time(local_date=None):
|
||||
|
||||
# local_date is a string in the format of "YYYY-MM-DD"
|
||||
if local_date is None:
|
||||
# if a date is not provided, we run against yesterdays data
|
||||
|
||||
@@ -11,7 +11,10 @@ from app.clients.email.aws_ses import AwsSesClientThrottlingSendRateException
|
||||
from app.clients.sms import SmsClientResponseException
|
||||
from app.config import QueueNames
|
||||
from app.dao import notifications_dao
|
||||
from app.dao.notifications_dao import update_notification_status_by_id
|
||||
from app.dao.notifications_dao import (
|
||||
insert_notification_history_delete_notifications_by_id,
|
||||
update_notification_status_by_id,
|
||||
)
|
||||
from app.delivery import send_to_providers
|
||||
from app.exceptions import NotificationTechnicalFailureException
|
||||
from app.models import (
|
||||
@@ -40,6 +43,10 @@ def check_sms_delivery_receipt(self, message_id, notification_id, sent_at):
|
||||
update_notification_status_by_id(notification_id, status, provider_response=provider_response)
|
||||
current_app.logger.info(f"Updated notification {notification_id} with response '{provider_response}'")
|
||||
|
||||
if status == NOTIFICATION_SENT:
|
||||
insert_notification_history_delete_notifications_by_id(notification_id)
|
||||
current_app.logger.info(f"Archived notification {notification_id} that was successfully sent")
|
||||
|
||||
|
||||
@notify_celery.task(bind=True, name="deliver_sms", max_retries=48, default_retry_delay=300)
|
||||
def deliver_sms(self, notification_id):
|
||||
|
||||
@@ -5,6 +5,7 @@ from requests import HTTPError, request
|
||||
|
||||
from app.celery.process_ses_receipts_tasks import process_ses_results
|
||||
from app.config import QueueNames
|
||||
from app.dao.notifications_dao import get_notification_by_id
|
||||
from app.models import SMS_TYPE
|
||||
|
||||
temp_fail = "2028675303"
|
||||
@@ -16,8 +17,8 @@ perm_fail_email = "perm-fail@simulator.notify"
|
||||
temp_fail_email = "temp-fail@simulator.notify"
|
||||
|
||||
|
||||
def send_sms_response(provider, reference, to):
|
||||
body = sns_callback(reference, to)
|
||||
def send_sms_response(provider, reference):
|
||||
body = sns_callback(reference)
|
||||
headers = {"Content-type": "application/json"}
|
||||
|
||||
make_request(SMS_TYPE, provider, body, headers)
|
||||
@@ -59,25 +60,16 @@ def make_request(notification_type, provider, data, headers):
|
||||
return response.json()
|
||||
|
||||
|
||||
def sns_callback(notification_id, to):
|
||||
raise Exception("Need to update for SNS callback format along with test_send_to_providers")
|
||||
def sns_callback(notification_id):
|
||||
notification = get_notification_by_id(notification_id)
|
||||
|
||||
# example from mmg_callback
|
||||
# if to.strip().endswith(temp_fail):
|
||||
# # status: 4 - expired (temp failure)
|
||||
# status = "4"
|
||||
# elif to.strip().endswith(perm_fail):
|
||||
# # status: 5 - rejected (perm failure)
|
||||
# status = "5"
|
||||
# else:
|
||||
# # status: 3 - delivered
|
||||
# status = "3"
|
||||
|
||||
# return json.dumps({"reference": "mmg_reference",
|
||||
# "CID": str(notification_id),
|
||||
# "MSISDN": to,
|
||||
# "status": status,
|
||||
# "deliverytime": "2016-04-05 16:01:07"})
|
||||
# This will only work if all notifications, including successful ones, are in the notifications table
|
||||
# If we decide to delete successful notifications, we will have to get this from notifications history
|
||||
return json.dumps({
|
||||
"CID": str(notification_id),
|
||||
"status": notification.status,
|
||||
# "deliverytime": notification.completed_at
|
||||
})
|
||||
|
||||
|
||||
def ses_notification_callback(reference):
|
||||
|
||||
Reference in New Issue
Block a user