mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-20 15:31:15 -05:00
Remove letters-related code (#175)
This deletes a big ol' chunk of code related to letters. It's not everything—there are still a few things that might be tied to sms/email—but it's the the heart of letters function. SMS and email function should be untouched by this. Areas affected: - Things obviously about letters - PDF tasks, used for precompiling letters - Virus scanning, used for those PDFs - FTP, used to send letters to the printer - Postage stuff
This commit is contained in:
@@ -1,6 +1,5 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from flask import current_app
|
||||
from notifications_utils.international_billing_rates import (
|
||||
INTERNATIONAL_BILLING_RATES,
|
||||
@@ -10,10 +9,6 @@ from notifications_utils.recipients import (
|
||||
try_validate_and_format_phone_number,
|
||||
validate_and_format_email_address,
|
||||
)
|
||||
from notifications_utils.timezones import (
|
||||
convert_local_timezone_to_utc,
|
||||
convert_utc_to_local_timezone,
|
||||
)
|
||||
from sqlalchemy import asc, desc, func, or_, union
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
@@ -23,19 +18,15 @@ from werkzeug.datastructures import MultiDict
|
||||
|
||||
from app import create_uuid, db
|
||||
from app.dao.dao_utils import autocommit
|
||||
from app.letters.utils import LetterPDFNotFound, find_letter_pdf_in_s3
|
||||
from app.models import (
|
||||
EMAIL_TYPE,
|
||||
KEY_TYPE_NORMAL,
|
||||
KEY_TYPE_TEST,
|
||||
LETTER_TYPE,
|
||||
NOTIFICATION_CREATED,
|
||||
NOTIFICATION_PENDING,
|
||||
NOTIFICATION_PENDING_VIRUS_CHECK,
|
||||
NOTIFICATION_PERMANENT_FAILURE,
|
||||
NOTIFICATION_SENDING,
|
||||
NOTIFICATION_SENT,
|
||||
NOTIFICATION_STATUS_TYPES_COMPLETED,
|
||||
NOTIFICATION_TEMPORARY_FAILURE,
|
||||
SMS_TYPE,
|
||||
FactNotificationStatus,
|
||||
@@ -140,7 +131,7 @@ def update_notification_status_by_id(notification_id, status, sent_by=None):
|
||||
|
||||
@autocommit
|
||||
def update_notification_status_by_reference(reference, status):
|
||||
# this is used to update letters and emails
|
||||
# this is used to update emails
|
||||
notification = Notification.query.filter(Notification.reference == reference).first()
|
||||
|
||||
if not notification:
|
||||
@@ -304,7 +295,7 @@ def insert_notification_history_delete_notifications(
|
||||
SELECT id, job_id, job_row_number, service_id, template_id, template_version, api_key_id,
|
||||
key_type, notification_type, created_at, sent_at, sent_by, updated_at, reference, billable_units,
|
||||
client_reference, international, phone_prefix, rate_multiplier, notification_status,
|
||||
created_by_id, postage, document_download_count
|
||||
created_by_id, document_download_count
|
||||
FROM notifications
|
||||
WHERE service_id = :service_id
|
||||
AND notification_type = :notification_type
|
||||
@@ -312,20 +303,6 @@ def insert_notification_history_delete_notifications(
|
||||
AND key_type in ('normal', 'team')
|
||||
limit :qry_limit
|
||||
"""
|
||||
select_into_temp_table_for_letters = """
|
||||
CREATE TEMP TABLE NOTIFICATION_ARCHIVE ON COMMIT DROP AS
|
||||
SELECT id, job_id, job_row_number, service_id, template_id, template_version, api_key_id,
|
||||
key_type, notification_type, created_at, sent_at, sent_by, updated_at, reference, billable_units,
|
||||
client_reference, international, phone_prefix, rate_multiplier, notification_status,
|
||||
created_by_id, postage, document_download_count
|
||||
FROM notifications
|
||||
WHERE service_id = :service_id
|
||||
AND notification_type = :notification_type
|
||||
AND created_at < :timestamp_to_delete_backwards_from
|
||||
AND notification_status NOT IN ('pending-virus-check', 'created', 'sending')
|
||||
AND key_type in ('normal', 'team')
|
||||
limit :qry_limit
|
||||
"""
|
||||
# Insert into NotificationHistory if the row already exists do nothing.
|
||||
insert_query = """
|
||||
insert into notification_history
|
||||
@@ -344,8 +321,7 @@ def insert_notification_history_delete_notifications(
|
||||
"qry_limit": qry_limit
|
||||
}
|
||||
|
||||
select_to_use = select_into_temp_table_for_letters if notification_type == 'letter' else select_into_temp_table
|
||||
db.session.execute(select_to_use, input_params)
|
||||
db.session.execute(select_into_temp_table, input_params)
|
||||
|
||||
result = db.session.execute("select count(*) from NOTIFICATION_ARCHIVE").fetchone()[0]
|
||||
|
||||
@@ -363,10 +339,6 @@ def move_notifications_to_notification_history(
|
||||
qry_limit=50000
|
||||
):
|
||||
deleted = 0
|
||||
if notification_type == LETTER_TYPE:
|
||||
_delete_letters_from_s3(
|
||||
notification_type, service_id, timestamp_to_delete_backwards_from, qry_limit
|
||||
)
|
||||
delete_count_per_call = 1
|
||||
while delete_count_per_call > 0:
|
||||
delete_count_per_call = insert_notification_history_delete_notifications(
|
||||
@@ -389,32 +361,6 @@ def move_notifications_to_notification_history(
|
||||
return deleted
|
||||
|
||||
|
||||
def _delete_letters_from_s3(
|
||||
notification_type, service_id, date_to_delete_from, query_limit
|
||||
):
|
||||
letters_to_delete_from_s3 = db.session.query(
|
||||
Notification
|
||||
).filter(
|
||||
Notification.notification_type == notification_type,
|
||||
Notification.created_at < date_to_delete_from,
|
||||
Notification.service_id == service_id,
|
||||
# although letters in non completed statuses do have PDFs in s3, they do not exist in the
|
||||
# production-letters-pdf bucket as they never made it that far so we do not try and delete
|
||||
# them from it
|
||||
Notification.status.in_(NOTIFICATION_STATUS_TYPES_COMPLETED)
|
||||
).limit(query_limit).all()
|
||||
for letter in letters_to_delete_from_s3:
|
||||
try:
|
||||
letter_pdf = find_letter_pdf_in_s3(letter)
|
||||
letter_pdf.delete()
|
||||
except ClientError:
|
||||
current_app.logger.exception(
|
||||
"Error deleting S3 object for letter: {}".format(letter.id))
|
||||
except LetterPDFNotFound:
|
||||
current_app.logger.warning(
|
||||
"No S3 object to delete for letter: {}".format(letter.id))
|
||||
|
||||
|
||||
@autocommit
|
||||
def dao_delete_notifications_by_id(notification_id):
|
||||
db.session.query(Notification).filter(
|
||||
@@ -493,10 +439,8 @@ def dao_get_notifications_by_recipient_or_reference(
|
||||
except InvalidEmailError:
|
||||
normalised = search_term.lower()
|
||||
|
||||
elif notification_type in {LETTER_TYPE, None}:
|
||||
# For letters, we store the address without spaces, so we need
|
||||
# to removes spaces from the search term to match. We also do
|
||||
# this when a notification type isn’t provided (this will
|
||||
elif notification_type is None:
|
||||
# This happens when a notification type isn’t provided (this will
|
||||
# happen if a user doesn’t have permission to see the dashboard)
|
||||
# because email addresses and phone numbers will never be stored
|
||||
# with spaces either.
|
||||
@@ -504,7 +448,7 @@ def dao_get_notifications_by_recipient_or_reference(
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
f'Notification type must be {EMAIL_TYPE}, {SMS_TYPE}, {LETTER_TYPE} or None'
|
||||
f'Notification type must be {EMAIL_TYPE}, {SMS_TYPE}, or None'
|
||||
)
|
||||
|
||||
normalised = escape_special_characters(normalised)
|
||||
@@ -559,8 +503,7 @@ def dao_get_notifications_processing_time_stats(start_date, end_date):
|
||||
created_at > 'START DATE' AND
|
||||
created_at < 'END DATE' AND
|
||||
api_key_id IS NOT NULL AND
|
||||
key_type != 'test' AND
|
||||
notification_type != 'letter';
|
||||
key_type != 'test';
|
||||
"""
|
||||
under_10_secs = Notification.sent_at - Notification.created_at <= timedelta(seconds=10)
|
||||
sum_column = functions.coalesce(functions.sum(
|
||||
@@ -580,7 +523,6 @@ def dao_get_notifications_processing_time_stats(start_date, end_date):
|
||||
Notification.created_at < end_date,
|
||||
Notification.api_key_id.isnot(None),
|
||||
Notification.key_type != KEY_TYPE_TEST,
|
||||
Notification.notification_type != LETTER_TYPE
|
||||
).one()
|
||||
|
||||
|
||||
@@ -605,97 +547,6 @@ def notifications_not_yet_sent(should_be_sending_after_seconds, notification_typ
|
||||
return notifications
|
||||
|
||||
|
||||
def dao_get_letters_to_be_printed(print_run_deadline, postage, query_limit=10000):
|
||||
"""
|
||||
Return all letters created before the print run deadline that have not yet been sent. This yields in batches of 10k
|
||||
to prevent the query taking too long and eating up too much memory. As each 10k batch is yielded, the
|
||||
get_key_and_size_of_letters_to_be_sent_to_print function will go and fetch the s3 data, andhese start sending off
|
||||
tasks to the notify-ftp app to send them.
|
||||
|
||||
CAUTION! Modify this query with caution. Modifying filters etc is fine, but if we join onto another table, then
|
||||
there may be undefined behaviour. Essentially we need each ORM object returned for each row to be unique,
|
||||
and we should avoid modifying state of returned objects.
|
||||
|
||||
For more reading:
|
||||
https://docs.sqlalchemy.org/en/13/orm/query.html?highlight=yield_per#sqlalchemy.orm.query.Query.yield_per
|
||||
https://www.mail-archive.com/sqlalchemy@googlegroups.com/msg12443.html
|
||||
"""
|
||||
notifications = Notification.query.filter(
|
||||
Notification.created_at < convert_local_timezone_to_utc(print_run_deadline),
|
||||
Notification.notification_type == LETTER_TYPE,
|
||||
Notification.status == NOTIFICATION_CREATED,
|
||||
Notification.key_type == KEY_TYPE_NORMAL,
|
||||
Notification.postage == postage,
|
||||
Notification.billable_units > 0
|
||||
).order_by(
|
||||
Notification.service_id,
|
||||
Notification.created_at
|
||||
).yield_per(query_limit)
|
||||
return notifications
|
||||
|
||||
|
||||
def dao_get_letters_and_sheets_volume_by_postage(print_run_deadline):
|
||||
notifications = db.session.query(
|
||||
func.count(Notification.id).label('letters_count'),
|
||||
func.sum(Notification.billable_units).label('sheets_count'),
|
||||
Notification.postage
|
||||
).filter(
|
||||
Notification.created_at < convert_local_timezone_to_utc(print_run_deadline),
|
||||
Notification.notification_type == LETTER_TYPE,
|
||||
Notification.status == NOTIFICATION_CREATED,
|
||||
Notification.key_type == KEY_TYPE_NORMAL,
|
||||
Notification.billable_units > 0
|
||||
).group_by(
|
||||
Notification.postage
|
||||
).order_by(
|
||||
Notification.postage
|
||||
).all()
|
||||
return notifications
|
||||
|
||||
|
||||
def dao_old_letters_with_created_status():
|
||||
yesterday_bst = convert_utc_to_local_timezone(datetime.utcnow()) - timedelta(days=1)
|
||||
last_processing_deadline = yesterday_bst.replace(hour=17, minute=30, second=0, microsecond=0)
|
||||
|
||||
notifications = Notification.query.filter(
|
||||
Notification.created_at < convert_local_timezone_to_utc(last_processing_deadline),
|
||||
Notification.notification_type == LETTER_TYPE,
|
||||
Notification.status == NOTIFICATION_CREATED
|
||||
).order_by(
|
||||
Notification.created_at
|
||||
).all()
|
||||
return notifications
|
||||
|
||||
|
||||
def letters_missing_from_sending_bucket(seconds_to_subtract):
|
||||
older_than_date = datetime.utcnow() - timedelta(seconds=seconds_to_subtract)
|
||||
# We expect letters to have a `created` status, updated_at timestamp and billable units greater than zero.
|
||||
notifications = Notification.query.filter(
|
||||
Notification.billable_units == 0,
|
||||
Notification.updated_at == None, # noqa
|
||||
Notification.status == NOTIFICATION_CREATED,
|
||||
Notification.created_at <= older_than_date,
|
||||
Notification.notification_type == LETTER_TYPE,
|
||||
Notification.key_type == KEY_TYPE_NORMAL
|
||||
).order_by(
|
||||
Notification.created_at
|
||||
).all()
|
||||
|
||||
return notifications
|
||||
|
||||
|
||||
def dao_precompiled_letters_still_pending_virus_check():
|
||||
ninety_minutes_ago = datetime.utcnow() - timedelta(seconds=5400)
|
||||
|
||||
notifications = Notification.query.filter(
|
||||
Notification.created_at < ninety_minutes_ago,
|
||||
Notification.status == NOTIFICATION_PENDING_VIRUS_CHECK
|
||||
).order_by(
|
||||
Notification.created_at
|
||||
).all()
|
||||
return notifications
|
||||
|
||||
|
||||
def _duplicate_update_warning(notification, status):
|
||||
current_app.logger.info(
|
||||
(
|
||||
|
||||
Reference in New Issue
Block a user