mirror of
https://github.com/GSA/notifications-api.git
synced 2026-04-11 12:49:36 -04:00
@@ -8,7 +8,6 @@ from app.aws import s3
|
||||
from app.aws.s3 import remove_csv_object
|
||||
from app.celery.process_ses_receipts_tasks import check_and_queue_callback_task
|
||||
from app.config import QueueNames
|
||||
from app.cronitor import cronitor
|
||||
from app.dao.fact_processing_time_dao import insert_update_processing_time
|
||||
from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention
|
||||
from app.dao.jobs_dao import (
|
||||
@@ -31,7 +30,6 @@ from app.utils import get_midnight_in_utc, utc_now
|
||||
|
||||
|
||||
@notify_celery.task(name="remove-sms-email-jobs")
|
||||
@cronitor("remove-sms-email-jobs")
|
||||
def remove_sms_email_csv_files():
|
||||
_remove_csv_files([NotificationType.EMAIL, NotificationType.SMS])
|
||||
|
||||
@@ -75,14 +73,10 @@ def delete_notifications_older_than_retention():
|
||||
)
|
||||
|
||||
|
||||
@notify_celery.task(name="delete-sms-notifications")
|
||||
@cronitor("delete-sms-notifications")
|
||||
def delete_sms_notifications_older_than_retention():
|
||||
_delete_notifications_older_than_retention_by_type(NotificationType.SMS)
|
||||
|
||||
|
||||
@notify_celery.task(name="delete-email-notifications")
|
||||
@cronitor("delete-email-notifications")
|
||||
def delete_email_notifications_older_than_retention():
|
||||
_delete_notifications_older_than_retention_by_type(NotificationType.EMAIL)
|
||||
|
||||
@@ -162,7 +156,6 @@ def delete_notifications_for_service_and_type(
|
||||
|
||||
|
||||
@notify_celery.task(name="timeout-sending-notifications")
|
||||
@cronitor("timeout-sending-notifications")
|
||||
def timeout_notifications():
|
||||
notifications = ["dummy value so len() > 0"]
|
||||
|
||||
@@ -184,7 +177,6 @@ def timeout_notifications():
|
||||
|
||||
|
||||
@notify_celery.task(name="delete-inbound-sms")
|
||||
@cronitor("delete-inbound-sms")
|
||||
def delete_inbound_sms():
|
||||
try:
|
||||
start = utc_now()
|
||||
@@ -200,7 +192,6 @@ def delete_inbound_sms():
|
||||
|
||||
|
||||
@notify_celery.task(name="save-daily-notification-processing-time")
|
||||
@cronitor("save-daily-notification-processing-time")
|
||||
def save_daily_notification_processing_time(local_date=None):
|
||||
# local_date is a string in the format of "YYYY-MM-DD"
|
||||
if local_date is None:
|
||||
|
||||
@@ -4,7 +4,6 @@ from flask import current_app
|
||||
|
||||
from app import notify_celery
|
||||
from app.config import QueueNames
|
||||
from app.cronitor import cronitor
|
||||
from app.dao.fact_billing_dao import fetch_billing_data_for_day, update_fact_billing
|
||||
from app.dao.fact_notification_status_dao import update_fact_notification_status
|
||||
from app.dao.notifications_dao import get_service_ids_with_notifications_on_date
|
||||
@@ -13,7 +12,6 @@ from app.utils import utc_now
|
||||
|
||||
|
||||
@notify_celery.task(name="create-nightly-billing")
|
||||
@cronitor("create-nightly-billing")
|
||||
def create_nightly_billing(day_start=None):
|
||||
# day_start is a datetime.date() object. e.g.
|
||||
# up to 4 days of data counting back from day_start is consolidated
|
||||
@@ -58,7 +56,6 @@ def create_nightly_billing_for_day(process_day):
|
||||
|
||||
|
||||
@notify_celery.task(name="create-nightly-notification-status")
|
||||
@cronitor("create-nightly-notification-status")
|
||||
def create_nightly_notification_status():
|
||||
"""
|
||||
Aggregate notification statuses into rows in ft_notification_status.
|
||||
|
||||
@@ -124,10 +124,6 @@ class Config(object):
|
||||
# Logging
|
||||
DEBUG = False
|
||||
|
||||
# Monitoring
|
||||
CRONITOR_ENABLED = False
|
||||
CRONITOR_KEYS = json.loads(getenv("CRONITOR_KEYS", "{}"))
|
||||
|
||||
# Antivirus
|
||||
ANTIVIRUS_ENABLED = getenv("ANTIVIRUS_ENABLED", "1") == "1"
|
||||
|
||||
@@ -411,7 +407,6 @@ class Production(Config):
|
||||
)
|
||||
|
||||
FROM_NUMBER = "Notify.gov"
|
||||
CRONITOR_ENABLED = True
|
||||
|
||||
|
||||
class Staging(Production):
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
from functools import wraps
|
||||
|
||||
import requests
|
||||
from flask import current_app
|
||||
|
||||
|
||||
def cronitor(task_name):
|
||||
def decorator(func):
|
||||
def ping_cronitor(command):
|
||||
if not current_app.config["CRONITOR_ENABLED"]:
|
||||
return
|
||||
|
||||
# it's useful to have a log that a periodic task has started in case it
|
||||
# get stuck without generating any other logs - we know it got this far
|
||||
current_app.logger.info(f"Pinging Cronitor for Celery task {task_name}")
|
||||
|
||||
task_slug = current_app.config["CRONITOR_KEYS"].get(task_name)
|
||||
if not task_slug:
|
||||
current_app.logger.error(
|
||||
"Cronitor enabled but task_name {} not found in environment".format(
|
||||
task_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
if command not in {"run", "complete", "fail"}:
|
||||
raise ValueError(
|
||||
"command {} not a valid cronitor command".format(command)
|
||||
)
|
||||
|
||||
try:
|
||||
resp = requests.get(
|
||||
"https://cronitor.link/{}/{}".format(task_slug, command),
|
||||
# cronitor limits msg to 1000 characters
|
||||
params={
|
||||
"host": current_app.config["API_HOST_NAME"],
|
||||
},
|
||||
timeout=30,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
except requests.RequestException as e:
|
||||
current_app.logger.warning(
|
||||
"Cronitor API failed for task {} due to {}".format(
|
||||
task_name, repr(e)
|
||||
)
|
||||
)
|
||||
|
||||
@wraps(func)
|
||||
def inner_decorator(*args, **kwargs):
|
||||
ping_cronitor("run")
|
||||
status = "fail"
|
||||
try:
|
||||
ret = func(*args, **kwargs)
|
||||
status = "complete"
|
||||
return ret
|
||||
finally:
|
||||
ping_cronitor(status)
|
||||
|
||||
return inner_decorator
|
||||
|
||||
return decorator
|
||||
@@ -1,100 +0,0 @@
|
||||
from urllib import parse
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from app.cronitor import cronitor
|
||||
from tests.conftest import set_config_values
|
||||
|
||||
|
||||
def _cronitor_url(key, command):
|
||||
return parse.urlunparse(
|
||||
parse.ParseResult(
|
||||
scheme="https",
|
||||
netloc="cronitor.link",
|
||||
path="{}/{}".format(key, command),
|
||||
params="",
|
||||
query=parse.urlencode({"host": "http://localhost:6011"}),
|
||||
fragment="",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
RUN_LINK = _cronitor_url("secret", "run")
|
||||
FAIL_LINK = _cronitor_url("secret", "fail")
|
||||
COMPLETE_LINK = _cronitor_url("secret", "complete")
|
||||
|
||||
|
||||
@cronitor("hello")
|
||||
def successful_task():
|
||||
return 1
|
||||
|
||||
|
||||
@cronitor("hello")
|
||||
def crashing_task():
|
||||
raise ValueError
|
||||
|
||||
|
||||
def test_cronitor_sends_run_and_complete(notify_api, rmock):
|
||||
rmock.get(RUN_LINK, status_code=200)
|
||||
rmock.get(COMPLETE_LINK, status_code=200)
|
||||
|
||||
with set_config_values(
|
||||
notify_api, {"CRONITOR_ENABLED": True, "CRONITOR_KEYS": {"hello": "secret"}}
|
||||
):
|
||||
assert successful_task() == 1
|
||||
|
||||
assert rmock.call_count == 2
|
||||
assert rmock.request_history[0].url == RUN_LINK
|
||||
assert rmock.request_history[1].url == COMPLETE_LINK
|
||||
|
||||
|
||||
def test_cronitor_sends_run_and_fail_if_exception(notify_api, rmock):
|
||||
rmock.get(RUN_LINK, status_code=200)
|
||||
rmock.get(FAIL_LINK, status_code=200)
|
||||
|
||||
with set_config_values(
|
||||
notify_api, {"CRONITOR_ENABLED": True, "CRONITOR_KEYS": {"hello": "secret"}}
|
||||
):
|
||||
with pytest.raises(ValueError):
|
||||
crashing_task()
|
||||
|
||||
assert rmock.call_count == 2
|
||||
assert rmock.request_history[0].url == RUN_LINK
|
||||
assert rmock.request_history[1].url == FAIL_LINK
|
||||
|
||||
|
||||
def test_cronitor_does_nothing_if_cronitor_not_enabled(notify_api, rmock):
|
||||
with set_config_values(
|
||||
notify_api, {"CRONITOR_ENABLED": False, "CRONITOR_KEYS": {"hello": "secret"}}
|
||||
):
|
||||
assert successful_task() == 1
|
||||
|
||||
assert rmock.called is False
|
||||
|
||||
|
||||
def test_cronitor_does_nothing_if_name_not_recognised(notify_api, rmock, mocker):
|
||||
mock_logger = mocker.patch("app.cronitor.current_app.logger")
|
||||
|
||||
with set_config_values(
|
||||
notify_api, {"CRONITOR_ENABLED": True, "CRONITOR_KEYS": {"not-hello": "other"}}
|
||||
):
|
||||
assert successful_task() == 1
|
||||
|
||||
mock_logger.error.assert_called_with(
|
||||
"Cronitor enabled but task_name hello not found in environment"
|
||||
)
|
||||
|
||||
assert rmock.called is False
|
||||
|
||||
|
||||
def test_cronitor_doesnt_crash_if_request_fails(notify_api, rmock):
|
||||
rmock.get(RUN_LINK, exc=requests.exceptions.ConnectTimeout)
|
||||
rmock.get(COMPLETE_LINK, status_code=500)
|
||||
|
||||
with set_config_values(
|
||||
notify_api, {"CRONITOR_ENABLED": True, "CRONITOR_KEYS": {"hello": "secret"}}
|
||||
):
|
||||
assert successful_task() == 1
|
||||
|
||||
assert rmock.call_count == 2
|
||||
Reference in New Issue
Block a user