From 33645c7747b37b33747f46c45efc3aa3563214b0 Mon Sep 17 00:00:00 2001 From: Ben Thorner Date: Thu, 19 May 2022 11:28:44 +0100 Subject: [PATCH] Use notification view for status / billing tasks This fixes a bug where (letter) notifications left in sending would temporarily get excluded from billing and status calculations once the service retention period had elapsed, and then get included once again when they finally get marked as delivered.* Status and billing tasks shouldn't need to have knowledge about which table their data is in and getting this wrong is the fundamental cause of the bug here. Adding a view across both tables abstracts this away while keeping the query complexity the same. Using a view also has the added benefit that we no longer need to care when the status / billing tasks run in comparison to the deletion task, since we will retrieve the same data irrespective (see below for a more detailed discussion on data integrity). *Such a scenario is rare but has happened. A New View ========== I've included all the columns that are shared between the two tables, even though only a subset are actually needed. Having extra columns has no impact and may be useful in future. Although the view isn't actually a table, SQLAlchemy appears to wrap it without any issues, noting that the package doesn't have any direct support for "view models". Because we're never inserting data, we don't need most of the kwargs when defining columns.* *Note that the "default" kwarg doesn't affect data that's retrieved, only data that's written (if no value is set). Data Integrity ============== The (new) tests cover the main scenarios. We need to be careful with how the view interacts with the deletion / archiving task. There are two concerns here: - Duplicates. The deletion task inserts before it deletes [^1], so we could end up double counting. It turns out this isn't a problem because a Postgres UNION is an implicit "DISTINCT" [^2]. I've also verified this manually, just to be on the safe side. - No data. It's conceivable that the query will check the history table just before the insertion, then check the notifications table just after the deletion. It turns out this isn't a problem either because the whole query sees the same DB snapshot [^3][^4].* *I can't think of a way to test this as it's a race condition, but I'm confident the Postgres docs are accurate. Performance =========== I copied the relevant (non-PII) columns from Production for data going back to 2022-04-01. I then ran several tests. Queries using the new view still make use of indices on a per-table basis, as the following query plan illustrates: QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ GroupAggregate (cost=1130820.02..1135353.89 rows=46502 width=97) (actual time=629.863..756.703 rows=72 loops=1) Group Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international -> Sort (cost=1130820.02..1131401.28 rows=232506 width=85) (actual time=629.756..708.914 rows=217563 loops=1) Sort Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international Sort Method: external merge Disk: 9320kB -> Subquery Scan on notifications_all_time_view (cost=1088506.43..1098969.20 rows=232506 width=85) (actual time=416.118..541.669 rows=217563 loops=1) -> Unique (cost=1088506.43..1096644.14 rows=232506 width=725) (actual time=416.115..513.065 rows=217563 loops=1) -> Sort (cost=1088506.43..1089087.70 rows=232506 width=725) (actual time=416.115..451.190 rows=217563 loops=1) Sort Key: notifications_no_pii.id, notifications_no_pii.job_id, notifications_no_pii.service_id, notifications_no_pii.template_id, notifications_no_pii.key_type, notifications_no_pii.billable_units, notifications_no_pii.notification_type, notifications_no_pii.created_at, notifications_no_pii.sent_by, notifications_no_pii.notification_status, notifications_no_pii.international, notifications_no_pii.rate_multiplier, notifications_no_pii.postage Sort Method: external merge Disk: 23936kB -> Append (cost=114.42..918374.12 rows=232506 width=725) (actual time=2.051..298.229 rows=217563 loops=1) -> Bitmap Heap Scan on notifications_no_pii (cost=114.42..8557.55 rows=2042 width=113) (actual time=1.405..1.442 rows=0 loops=1) Recheck Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: ((key_type)::text = ANY ('{normal,team}'::text[])) -> Bitmap Index Scan on ix_notifications_no_piiservice_id_composite (cost=0.00..113.91 rows=2202 width=0) (actual time=1.402..1.439 rows=0 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) -> Index Scan using ix_notifications_history_no_pii_service_id_composite on notifications_history_no_pii (cost=0.70..906328.97 rows=230464 width=113) (actual time=0.645..281.612 rows=217563 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND ((key_type)::text = ANY ('{normal,team}'::text[])) AND (notification_type = 'sms'::notification_type) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) Planning Time: 18.032 ms Execution Time: 759.001 ms (21 rows) Queries using the new view appear to be slower than without, but the differences I've seen are minimal: the original queries execute in seconds locally and in Production, so it's not a big issue. Notes: Performance ================== I downloaded a minimal set of columns for testing: \copy ( select id, notification_type, key_type, created_at, service_id, template_id, sent_by, rate_multiplier, international, billable_units, postage, job_id, notification_status from notifications ) to 'notifications.csv' delimiter ',' csv header; CREATE TABLE notifications_no_pii ( id uuid NOT NULL, notification_type public.notification_type NOT NULL, key_type character varying(255) NOT NULL, created_at timestamp without time zone NOT NULL, service_id uuid, template_id uuid, sent_by character varying, rate_multiplier numeric, international boolean, billable_units integer NOT NULL, postage character varying, job_id uuid, notification_status text ); copy notifications_no_pii from '/Users/ben.thorner/Desktop/notifications.csv' delimiter ',' csv header; CREATE INDEX ix_notifications_no_piicreated_at ON notifications_no_pii USING btree (created_at); CREATE INDEX ix_notifications_no_piijob_id ON notifications_no_pii USING btree (job_id); CREATE INDEX ix_notifications_no_piinotification_type_composite ON notifications_no_pii USING btree (notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piiservice_created_at ON notifications_no_pii USING btree (service_id, created_at); CREATE INDEX ix_notifications_no_piiservice_id_composite ON notifications_no_pii USING btree (service_id, notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piitemplate_id ON notifications_no_pii USING btree (template_id); And similarly for the history table. I then created a sepatate view across both of these temporary tables using just these columns. To test performance I created some queries that reflect what is run by the billing [^5] and status [^6] tasks e.g. explain analyze select template_id, sent_by, rate_multiplier, international, sum(billable_units), count(*) from notifications_all_time_view where notification_status in ('sending', 'sent', 'delivered', 'pending', 'temporary-failure', 'permanent-failure') and key_type in ('normal', 'team') and created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' group by 1,2,3,4; explain analyze select template_id, job_id, key_type, notification_status, count(*) from notifications_all_time_view where created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' and key_type in ('normal', 'team') group by 1,2,3,4; Between running queries I restarted my local database and also ran a command to purge disk caches [^7]. I tested on a few services: - c5956607-20b1-48b4-8983-85d11404e61f on 2022-05-02 (high volume) - 0cc696c6-b792-409d-99e9-64232f461b0f on 2022-04-06 (highest volume) - 01135db6-7819-4121-8b97-4aa2d741e372 on 2022-04-14 (very low volume) All execution results are of the same magnitude using the view compared to the worst case of either table on its own. [^1]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/notifications_dao.py#L389 [^2]: https://stackoverflow.com/questions/49925/what-is-the-difference-between-union-and-union-all [^3]: https://www.postgresql.org/docs/current/transaction-iso.html [^4]: https://dba.stackexchange.com/questions/210485/can-sub-selects-change-in-one-single-query-in-a-read-committed-transaction [^5]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_billing_dao.py#L471 [^6]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_notification_status_dao.py#L58 [^7]: https://stackoverflow.com/questions/28845524/echo-3-proc-sys-vm-drop-caches-on-mac-osx --- app/dao/fact_billing_dao.py | 10 +-- app/dao/fact_notification_status_dao.py | 10 +-- app/models.py | 33 ++++++++ app/utils.py | 26 +------ .../0373_add_notifications_all_time.py | 78 +++++++++++++++++++ tests/app/celery/test_reporting_tasks.py | 59 +++++++++++--- tests/app/test_utils.py | 47 ----------- tests/conftest.py | 1 + 8 files changed, 169 insertions(+), 95 deletions(-) create mode 100644 migrations/versions/0373_add_notifications_all_time.py diff --git a/app/dao/fact_billing_dao.py b/app/dao/fact_billing_dao.py index 79ff6589e..45afe25df 100644 --- a/app/dao/fact_billing_dao.py +++ b/app/dao/fact_billing_dao.py @@ -25,12 +25,13 @@ from app.models import ( AnnualBilling, FactBilling, LetterRate, + NotificationAllTimeView, NotificationHistory, Organisation, Rate, Service, ) -from app.utils import get_london_midnight_in_utc, get_notification_table_to_use +from app.utils import get_london_midnight_in_utc def fetch_sms_free_allowance_remainder_until_date(end_date): @@ -454,10 +455,7 @@ def fetch_billing_data_for_day(process_day, service_id=None, check_permissions=F for service in services: for notification_type in (SMS_TYPE, EMAIL_TYPE, LETTER_TYPE): if (not check_permissions) or service.has_permission(notification_type): - table = get_notification_table_to_use(service, notification_type, process_day, - has_delete_task_run=False) results = _query_for_billing_data( - table=table, notification_type=notification_type, start_date=start_date, end_date=end_date, @@ -468,7 +466,9 @@ def fetch_billing_data_for_day(process_day, service_id=None, check_permissions=F return transit_data -def _query_for_billing_data(table, notification_type, start_date, end_date, service): +def _query_for_billing_data(notification_type, start_date, end_date, service): + table = NotificationAllTimeView + def _email_query(): return db.session.query( table.template_id, diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index c37dae011..bb8a398e0 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -23,13 +23,13 @@ from app.models import ( NOTIFICATION_TEMPORARY_FAILURE, FactNotificationStatus, Notification, + NotificationAllTimeView, Service, Template, ) from app.utils import ( get_london_midnight_in_utc, get_london_month_from_utc_column, - get_notification_table_to_use, midnight_n_days_ago, ) @@ -47,13 +47,7 @@ def update_fact_notification_status(process_day, notification_type, service_id): ).delete() # query notifications or notification_history for the day, depending on their data retention - service = Service.query.get(service_id) - source_table = get_notification_table_to_use( - service, - notification_type, - process_day, - has_delete_task_run=False - ) + source_table = NotificationAllTimeView query = db.session.query( literal(process_day).label("process_day"), diff --git a/app/models.py b/app/models.py index 4e7d95320..cbdc1779f 100644 --- a/app/models.py +++ b/app/models.py @@ -1411,6 +1411,39 @@ class NotificationStatusTypes(db.Model): name = db.Column(db.String(), primary_key=True) +class NotificationAllTimeView(db.Model): + """ + WARNING: this view is a union of rows in "notifications" and + "notification_history". Any query on this view will query both + tables and therefore rely on *both* sets of indices. + """ + __tablename__ = 'notifications_all_time_view' + + id = db.Column(UUID(as_uuid=True), primary_key=True) + job_id = db.Column(UUID(as_uuid=True)) + job_row_number = db.Column(db.Integer) + service_id = db.Column(UUID(as_uuid=True)) + template_id = db.Column(UUID(as_uuid=True)) + template_version = db.Column(db.Integer) + api_key_id = db.Column(UUID(as_uuid=True)) + key_type = db.Column(db.String) + billable_units = db.Column(db.Integer) + notification_type = db.Column(notification_types) + created_at = db.Column(db.DateTime) + sent_at = db.Column(db.DateTime) + sent_by = db.Column(db.String) + updated_at = db.Column(db.DateTime) + status = db.Column('notification_status', db.Text) + reference = db.Column(db.String) + client_reference = db.Column(db.String) + international = db.Column(db.Boolean) + phone_prefix = db.Column(db.String) + rate_multiplier = db.Column(db.Numeric(asdecimal=False)) + created_by_id = db.Column(UUID(as_uuid=True)) + postage = db.Column(db.String) + document_download_count = db.Column(db.Integer) + + class Notification(db.Model): __tablename__ = 'notifications' diff --git a/app/utils.py b/app/utils.py index 86cfad6c5..abc0b0aa5 100644 --- a/app/utils.py +++ b/app/utils.py @@ -8,7 +8,7 @@ from notifications_utils.template import ( LetterPrintTemplate, SMSMessageTemplate, ) -from notifications_utils.timezones import convert_bst_to_utc, convert_utc_to_bst +from notifications_utils.timezones import convert_bst_to_utc from sqlalchemy import func DATETIME_FORMAT_NO_TIMEZONE = "%Y-%m-%d %H:%M:%S.%f" @@ -130,30 +130,6 @@ def email_address_is_nhs(email_address): )) -def get_notification_table_to_use(service, notification_type, process_day, has_delete_task_run): - """ - Work out what table will contain notification data for a service by looking up their data retention. - - Make sure that when you run this you think about whether the delete task has run for that day! If it's run, the - notifications from that day will have moved to NotificationHistory. The delete tasks run between 4 and 5am every - morning. - """ - from app.models import Notification, NotificationHistory - - data_retention = service.data_retention.get(notification_type) - days_of_retention = data_retention.days_of_retention if data_retention else 7 - - todays_bst_date = convert_utc_to_bst(datetime.utcnow()).date() - days_ago = todays_bst_date - process_day - - if not has_delete_task_run: - # if the task hasn't run yet, we've got an extra day of data in the notification table so can go back an extra - # day before looking at NotificationHistory - days_of_retention += 1 - - return Notification if days_ago <= timedelta(days=days_of_retention) else NotificationHistory - - def get_archived_db_column_value(column): date = datetime.utcnow().strftime("%Y-%m-%d") return f'_archived_{date}_{column}' diff --git a/migrations/versions/0373_add_notifications_all_time.py b/migrations/versions/0373_add_notifications_all_time.py new file mode 100644 index 000000000..3344603b8 --- /dev/null +++ b/migrations/versions/0373_add_notifications_all_time.py @@ -0,0 +1,78 @@ +""" + +Revision ID: 0373_add_notifications_view +Revises: 0372_remove_provider_rates +Create Date: 2022-05-18 09:39:45.260951 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0373_add_notifications_view' +down_revision = '0372_remove_provider_rates' + + +def upgrade(): + op.execute(""" + CREATE VIEW notifications_all_time_view AS + ( + SELECT + id, + job_id, + job_row_number, + service_id, + template_id, + template_version, + api_key_id, + key_type, + billable_units, + notification_type, + created_at, + sent_at, + sent_by, + updated_at, + notification_status, + reference, + client_reference, + international, + phone_prefix, + rate_multiplier, + created_by_id, + postage, + document_download_count + FROM notifications + ) UNION + ( + SELECT + id, + job_id, + job_row_number, + service_id, + template_id, + template_version, + api_key_id, + key_type, + billable_units, + notification_type, + created_at, + sent_at, + sent_by, + updated_at, + notification_status, + reference, + client_reference, + international, + phone_prefix, + rate_multiplier, + created_by_id, + postage, + document_download_count + FROM notification_history + ) + """) + + +def downgrade(): + op.execute("DROP VIEW notifications_all_time_view") diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index 14904a4d2..c90754463 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -31,7 +31,6 @@ from tests.app.db import ( create_notification_history, create_rate, create_service, - create_service_data_retention, create_template, ) @@ -115,6 +114,38 @@ def test_create_nightly_notification_status_triggers_relevant_tasks( assert types == expected_types_aggregated +def test_create_nightly_billing_for_day_checks_history( + sample_service, + sample_letter_template, + mocker +): + yesterday = datetime.now() - timedelta(days=1) + mocker.patch('app.dao.fact_billing_dao.get_rate', side_effect=mocker_get_rate) + + create_notification( + created_at=yesterday, + template=sample_letter_template, + status='sending', + ) + + create_notification_history( + created_at=yesterday, + template=sample_letter_template, + status='delivered', + ) + + records = FactBilling.query.all() + assert len(records) == 0 + + create_nightly_billing_for_day(str(yesterday.date())) + records = FactBilling.query.all() + assert len(records) == 1 + + record = records[0] + assert record.notification_type == LETTER_TYPE + assert record.notifications_sent == 2 + + @pytest.mark.parametrize('second_rate, records_num, billable_units, multiplier', [(1.0, 1, 2, [1]), (2.0, 2, 1, [1, 2])]) @@ -532,14 +563,10 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio second_template = create_template(service=second_service, template_type='email') third_template = create_template(service=second_service, template_type='letter') - create_service_data_retention(second_service, 'email', days_of_retention=3) - process_day = date.today() - timedelta(days=5) with freeze_time(datetime.combine(process_day, time.min)): create_notification(template=first_template, status='delivered') - - # 2nd service email has 3 day data retention - data has been moved to history and doesn't exist in notifications - create_notification_history(template=second_template, status='temporary-failure') + create_notification(template=second_template, status='temporary-failure') # team API key notifications are included create_notification(template=third_template, status='sending', key_type=KEY_TYPE_TEAM) @@ -547,6 +574,9 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio # test notifications are ignored create_notification(template=third_template, status='sending', key_type=KEY_TYPE_TEST) + # historical notifications are included + create_notification_history(template=third_template, status='delivered') + # these created notifications from a different day get ignored with freeze_time(datetime.combine(date.today() - timedelta(days=4), time.min)): create_notification(template=first_template) @@ -560,10 +590,11 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio create_nightly_notification_status_for_service_and_day(str(process_day), second_service.id, 'letter') new_fact_data = FactNotificationStatus.query.order_by( - FactNotificationStatus.notification_type + FactNotificationStatus.notification_type, + FactNotificationStatus.notification_status, ).all() - assert len(new_fact_data) == 3 + assert len(new_fact_data) == 4 email_failure_row = new_fact_data[0] assert email_failure_row.bst_date == process_day @@ -575,7 +606,15 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio assert email_failure_row.notification_count == 1 assert email_failure_row.key_type == KEY_TYPE_NORMAL - letter_sending_row = new_fact_data[1] + letter_delivered_row = new_fact_data[1] + assert letter_delivered_row.template_id == third_template.id + assert letter_delivered_row.service_id == second_service.id + assert letter_delivered_row.notification_type == 'letter' + assert letter_delivered_row.notification_status == 'delivered' + assert letter_delivered_row.notification_count == 1 + assert letter_delivered_row.key_type == KEY_TYPE_NORMAL + + letter_sending_row = new_fact_data[2] assert letter_sending_row.template_id == third_template.id assert letter_sending_row.service_id == second_service.id assert letter_sending_row.notification_type == 'letter' @@ -583,7 +622,7 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio assert letter_sending_row.notification_count == 1 assert letter_sending_row.key_type == KEY_TYPE_TEAM - sms_delivered_row = new_fact_data[2] + sms_delivered_row = new_fact_data[3] assert sms_delivered_row.template_id == first_template.id assert sms_delivered_row.service_id == first_service.id assert sms_delivered_row.notification_type == 'sms' diff --git a/tests/app/test_utils.py b/tests/app/test_utils.py index 7cc084eaf..8bf232b17 100644 --- a/tests/app/test_utils.py +++ b/tests/app/test_utils.py @@ -3,15 +3,12 @@ from datetime import date, datetime import pytest from freezegun import freeze_time -from app.models import Notification, NotificationHistory from app.utils import ( format_sequential_number, get_london_midnight_in_utc, get_midnight_for_day_before, - get_notification_table_to_use, midnight_n_days_ago, ) -from tests.app.db import create_service_data_retention @pytest.mark.parametrize('date, expected_date', [ @@ -57,49 +54,5 @@ def test_midnight_n_days_ago(current_time, arg, expected_datetime): assert midnight_n_days_ago(arg) == expected_datetime -@freeze_time('2019-01-10 00:30') -def test_get_notification_table_to_use(sample_service): - # it's currently early morning of Thurs 10th Jan. - # When the delete task runs a bit later, it'll delete data from last wednesday 2nd. - assert get_notification_table_to_use(sample_service, 'sms', date(2018, 12, 31), False) == NotificationHistory - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 1, 1), False) == NotificationHistory - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 1, 2), False) == Notification - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 1, 3), False) == Notification - - -@freeze_time('2019-01-10 00:30') -def test_get_notification_table_to_use_knows_if_delete_task_has_run(sample_service): - # it's currently early morning of Thurs 10th Jan. - # The delete task deletes/moves data from last wednesday 2nd. - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 1, 2), False) == Notification - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 1, 2), True) == NotificationHistory - - -@freeze_time('2019-06-09 23:30') -def test_get_notification_table_to_use_respects_daylight_savings_time(sample_service): - # current time is 12:30am on 10th july in BST - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 6, 1), False) == NotificationHistory - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 6, 2), False) == Notification - - -@freeze_time('2019-01-10 00:30') -def test_get_notification_table_to_use_checks_service_data_retention(sample_service): - create_service_data_retention(sample_service, 'email', days_of_retention=3) - create_service_data_retention(sample_service, 'letter', days_of_retention=9) - - # it's currently early morning of Thurs 10th Jan. - # three days retention means we'll delete sunday 6th's data when the delete task runs (so there's still three full - # days of monday, tuesday and wednesday left over) - assert get_notification_table_to_use(sample_service, 'email', date(2019, 1, 5), False) == NotificationHistory - assert get_notification_table_to_use(sample_service, 'email', date(2019, 1, 6), False) == Notification - - assert get_notification_table_to_use(sample_service, 'letter', date(2018, 12, 30), False) == NotificationHistory - assert get_notification_table_to_use(sample_service, 'letter', date(2018, 12, 31), False) == Notification - - # falls back to 7 days if not specified - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 1, 1), False) == NotificationHistory - assert get_notification_table_to_use(sample_service, 'sms', date(2019, 1, 2), False) == Notification - - def test_format_sequential_number(): assert format_sequential_number(123) == '0000007b' diff --git a/tests/conftest.py b/tests/conftest.py index 74a6f8e7e..4eef62245 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -123,6 +123,7 @@ def notify_db_session(_notify_db, sms_providers): "job_status", "provider_details_history", "template_process_type", + "notifications_all_time_view", "notification_status_types", "organisation_types", "service_permission_types",