2018-04-25 14:24:47 +01:00
|
|
|
from datetime import datetime, timedelta
|
2018-04-24 17:37:04 +01:00
|
|
|
|
2018-05-10 15:35:58 +01:00
|
|
|
from flask import current_app
|
2019-04-12 10:26:46 +01:00
|
|
|
from notifications_utils.timezones import convert_utc_to_bst
|
2018-04-24 17:37:04 +01:00
|
|
|
|
2018-03-20 13:53:31 +00:00
|
|
|
from app import notify_celery
|
2019-08-14 17:38:09 +01:00
|
|
|
from app.config import QueueNames
|
2019-01-16 14:11:03 +00:00
|
|
|
from app.cronitor import cronitor
|
2018-04-24 17:37:04 +01:00
|
|
|
from app.dao.fact_billing_dao import (
|
2018-04-25 14:24:47 +01:00
|
|
|
fetch_billing_data_for_day,
|
2021-03-10 13:55:06 +00:00
|
|
|
update_fact_billing,
|
2018-04-24 17:37:04 +01:00
|
|
|
)
|
2021-03-10 13:55:06 +00:00
|
|
|
from app.dao.fact_notification_status_dao import (
|
|
|
|
|
fetch_notification_status_for_day,
|
|
|
|
|
update_fact_notification_status,
|
2019-12-05 12:11:30 +00:00
|
|
|
)
|
2021-03-10 13:55:06 +00:00
|
|
|
from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE
|
2018-03-14 14:47:30 +00:00
|
|
|
|
|
|
|
|
|
2018-03-20 13:53:31 +00:00
|
|
|
@notify_celery.task(name="create-nightly-billing")
|
2019-01-16 14:11:03 +00:00
|
|
|
@cronitor("create-nightly-billing")
|
2018-03-20 13:53:31 +00:00
|
|
|
def create_nightly_billing(day_start=None):
|
2018-03-27 10:37:56 +01:00
|
|
|
# day_start is a datetime.date() object. e.g.
|
2019-08-15 17:00:47 +01:00
|
|
|
# up to 4 days of data counting back from day_start is consolidated
|
2018-03-16 09:22:34 +00:00
|
|
|
if day_start is None:
|
2019-04-02 15:15:07 +01:00
|
|
|
day_start = convert_utc_to_bst(datetime.utcnow()).date() - timedelta(days=1)
|
2018-05-15 14:00:06 +01:00
|
|
|
else:
|
|
|
|
|
# When calling the task its a string in the format of "YYYY-MM-DD"
|
2019-04-02 15:15:07 +01:00
|
|
|
day_start = datetime.strptime(day_start, "%Y-%m-%d").date()
|
2019-08-15 17:00:47 +01:00
|
|
|
for i in range(0, 4):
|
2020-04-27 17:51:20 +01:00
|
|
|
process_day = (day_start - timedelta(days=i)).isoformat()
|
2018-03-14 14:47:30 +00:00
|
|
|
|
2019-08-15 16:57:31 +01:00
|
|
|
create_nightly_billing_for_day.apply_async(
|
2020-04-27 17:51:20 +01:00
|
|
|
kwargs={'process_day': process_day},
|
2019-08-15 16:57:31 +01:00
|
|
|
queue=QueueNames.REPORTING
|
|
|
|
|
)
|
2020-04-27 17:51:20 +01:00
|
|
|
current_app.logger.info(
|
|
|
|
|
f"create-nightly-billing task: create-nightly-billing-for-day task created for {process_day}"
|
|
|
|
|
)
|
2019-08-15 16:57:31 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@notify_celery.task(name="create-nightly-billing-for-day")
|
|
|
|
|
def create_nightly_billing_for_day(process_day):
|
|
|
|
|
process_day = datetime.strptime(process_day, "%Y-%m-%d").date()
|
2020-04-27 17:51:20 +01:00
|
|
|
current_app.logger.info(
|
|
|
|
|
f'create-nightly-billing-for-day task for {process_day}: started'
|
|
|
|
|
)
|
2018-03-21 14:14:16 +00:00
|
|
|
|
2019-08-15 16:57:31 +01:00
|
|
|
start = datetime.utcnow()
|
|
|
|
|
transit_data = fetch_billing_data_for_day(process_day=process_day)
|
|
|
|
|
end = datetime.utcnow()
|
2018-05-10 15:35:58 +01:00
|
|
|
|
2020-04-27 17:51:20 +01:00
|
|
|
current_app.logger.info(
|
|
|
|
|
f'create-nightly-billing-for-day task for {process_day}: data fetched in {(end - start).seconds} seconds'
|
|
|
|
|
)
|
2019-08-15 16:57:31 +01:00
|
|
|
|
|
|
|
|
for data in transit_data:
|
|
|
|
|
update_fact_billing(data, process_day)
|
|
|
|
|
|
|
|
|
|
current_app.logger.info(
|
2020-04-27 17:51:20 +01:00
|
|
|
f"create-nightly-billing-for-day task for {process_day}: "
|
|
|
|
|
f"task complete. {len(transit_data)} rows updated"
|
2019-08-15 16:57:31 +01:00
|
|
|
)
|
2018-06-20 16:45:20 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@notify_celery.task(name="create-nightly-notification-status")
|
2019-01-16 14:11:03 +00:00
|
|
|
@cronitor("create-nightly-notification-status")
|
2019-12-05 15:59:40 +00:00
|
|
|
def create_nightly_notification_status():
|
2022-01-10 18:13:45 +00:00
|
|
|
"""
|
|
|
|
|
Aggregate notification statuses into rows in ft_notification_status.
|
|
|
|
|
In order to minimise effort, this task assumes that:
|
|
|
|
|
|
|
|
|
|
- Email + SMS statuses don't change after 3 days. This is currently true
|
|
|
|
|
because all outstanding email / SMS are "timed out" after 3 days, and
|
|
|
|
|
we reject delivery receipts after this point.
|
|
|
|
|
|
|
|
|
|
- Letter statuses don't change after 9 days. There's no "timeout" for
|
|
|
|
|
letters but this is the longest we've had to cope with in the past - due
|
|
|
|
|
to major issues with our print provider.
|
|
|
|
|
|
|
|
|
|
Because the time range of the task exceeds the minimum possible retention
|
|
|
|
|
period (3 days), we need to choose which table to query for each service.
|
|
|
|
|
|
|
|
|
|
The aggregation happens for 1 extra day in case:
|
|
|
|
|
|
|
|
|
|
- This task or the "timeout" task fails to run.
|
|
|
|
|
|
|
|
|
|
- Data is (somehow) still in transit to the history table, which would
|
|
|
|
|
mean the aggregated results are temporarily incorrect.
|
|
|
|
|
"""
|
|
|
|
|
|
2019-12-05 16:12:16 +00:00
|
|
|
yesterday = convert_utc_to_bst(datetime.utcnow()).date() - timedelta(days=1)
|
2019-12-05 15:59:40 +00:00
|
|
|
|
2019-12-05 16:12:16 +00:00
|
|
|
# email and sms
|
|
|
|
|
for i in range(4):
|
|
|
|
|
process_day = yesterday - timedelta(days=i)
|
|
|
|
|
for notification_type in [SMS_TYPE, EMAIL_TYPE]:
|
2019-12-05 12:11:30 +00:00
|
|
|
create_nightly_notification_status_for_day.apply_async(
|
|
|
|
|
kwargs={'process_day': process_day.isoformat(), 'notification_type': notification_type},
|
|
|
|
|
queue=QueueNames.REPORTING
|
|
|
|
|
)
|
2020-04-27 17:51:20 +01:00
|
|
|
current_app.logger.info(
|
|
|
|
|
f"create-nightly-notification-status task: create-nightly-notification-status-for-day task created "
|
|
|
|
|
f"for type {notification_type} for {process_day}"
|
|
|
|
|
)
|
2022-01-10 18:13:45 +00:00
|
|
|
|
|
|
|
|
# letters
|
2019-12-05 16:12:16 +00:00
|
|
|
for i in range(10):
|
|
|
|
|
process_day = yesterday - timedelta(days=i)
|
|
|
|
|
create_nightly_notification_status_for_day.apply_async(
|
|
|
|
|
kwargs={'process_day': process_day.isoformat(), 'notification_type': LETTER_TYPE},
|
|
|
|
|
queue=QueueNames.REPORTING
|
|
|
|
|
)
|
2020-04-27 17:51:20 +01:00
|
|
|
current_app.logger.info(
|
|
|
|
|
f"create-nightly-notification-status task: create-nightly-notification-status-for-day task created "
|
|
|
|
|
f"for type letter for {process_day}"
|
|
|
|
|
)
|
2018-06-20 16:45:20 +01:00
|
|
|
|
|
|
|
|
|
2019-08-15 16:57:31 +01:00
|
|
|
@notify_celery.task(name="create-nightly-notification-status-for-day")
|
2019-12-05 12:11:30 +00:00
|
|
|
def create_nightly_notification_status_for_day(process_day, notification_type):
|
2019-08-15 16:57:31 +01:00
|
|
|
process_day = datetime.strptime(process_day, "%Y-%m-%d").date()
|
2020-04-27 17:51:20 +01:00
|
|
|
current_app.logger.info(
|
|
|
|
|
f'create-nightly-notification-status-for-day task for {process_day} type {notification_type}: started'
|
|
|
|
|
)
|
2019-08-14 17:38:09 +01:00
|
|
|
|
2019-08-15 16:57:31 +01:00
|
|
|
start = datetime.utcnow()
|
2019-12-05 12:11:30 +00:00
|
|
|
transit_data = fetch_notification_status_for_day(process_day=process_day, notification_type=notification_type)
|
2019-08-15 16:57:31 +01:00
|
|
|
end = datetime.utcnow()
|
2019-12-11 13:22:28 +00:00
|
|
|
current_app.logger.info(
|
2020-04-27 17:51:20 +01:00
|
|
|
f'create-nightly-notification-status-for-day task for {process_day} type {notification_type}: '
|
2019-12-11 13:22:28 +00:00
|
|
|
f'data fetched in {(end - start).seconds} seconds'
|
2019-08-15 16:57:31 +01:00
|
|
|
)
|
|
|
|
|
|
2019-12-05 12:11:30 +00:00
|
|
|
update_fact_notification_status(transit_data, process_day, notification_type)
|
2019-08-15 16:57:31 +01:00
|
|
|
|
|
|
|
|
current_app.logger.info(
|
2020-04-27 17:51:20 +01:00
|
|
|
f'create-nightly-notification-status-for-day task for {process_day} type {notification_type}: '
|
2019-12-11 13:22:28 +00:00
|
|
|
f'task complete - {len(transit_data)} rows updated'
|
2019-08-15 16:57:31 +01:00
|
|
|
)
|