From be6f37069b92c5fc98b7382cf91beb02f335b4f7 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 19 Nov 2018 17:09:27 +0000 Subject: [PATCH 001/118] Change job selection dao to take flexible retention into account Also test deleting jobs with flexible data retention Also update tests for default data retention following logic change: dao_get_jobs_older_than_data_retention now counts today at the start of the day, not at a time when function runs and updated tests reflect that --- app/celery/scheduled_tasks.py | 6 +- app/dao/jobs_dao.py | 39 ++++++++--- tests/app/celery/test_scheduled_tasks.py | 84 ++++++++++++++++++++---- tests/app/dao/test_jobs_dao.py | 8 +-- 4 files changed, 108 insertions(+), 29 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 9ff9cb956..e6870f38d 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -23,7 +23,7 @@ from app.dao.invited_org_user_dao import delete_org_invitations_created_more_tha from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago from app.dao.jobs_dao import ( dao_set_scheduled_jobs_to_pending, - dao_get_jobs_older_than_limited_by + dao_get_jobs_older_than_data_retention ) from app.dao.jobs_dao import dao_update_job from app.dao.notifications_dao import ( @@ -64,7 +64,7 @@ from app.v2.errors import JobIncompleteError @notify_celery.task(name="remove_csv_files") @statsd(namespace="tasks") def remove_csv_files(job_types): - jobs = dao_get_jobs_older_than_limited_by(job_types=job_types) + jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) for job in jobs: s3.remove_job_from_s3(job.service_id, job.id) current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) @@ -299,7 +299,7 @@ def delete_inbound_sms_older_than_seven_days(): @notify_celery.task(name="remove_transformed_dvla_files") @statsd(namespace="tasks") def remove_transformed_dvla_files(): - jobs = dao_get_jobs_older_than_limited_by(job_types=[LETTER_TYPE]) + jobs = dao_get_jobs_older_than_data_retention(notification_types=[LETTER_TYPE]) for job in jobs: s3.remove_transformed_dvla_file(job.id) current_app.logger.info("Transformed dvla file for job {} has been removed from s3.".format(job.id)) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 6e2297302..a4947e958 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -18,6 +18,7 @@ from app.models import ( LETTER_TYPE, NotificationHistory, Template, + ServiceDataRetention ) from app.variables import LETTER_TEST_API_FILENAME @@ -115,15 +116,37 @@ def dao_update_job(job): db.session.commit() -def dao_get_jobs_older_than_limited_by(job_types, older_than=7, limit_days=2): - end_date = datetime.utcnow() - timedelta(days=older_than) - start_date = end_date - timedelta(days=limit_days) +def dao_get_jobs_older_than_data_retention(notification_types): + flexible_data_retention = ServiceDataRetention.query.filter( + ServiceDataRetention.notification_type.in_(notification_types) + ).all() + jobs = [] + today = datetime.utcnow().date() + for f in flexible_data_retention: + end_date = today - timedelta(days=f.days_of_retention) + start_date = end_date - timedelta(days=2) - return Job.query.join(Template).filter( - Job.created_at < end_date, - Job.created_at >= start_date, - Template.template_type.in_(job_types) - ).order_by(desc(Job.created_at)).all() + jobs.extend(Job.query.join(Template).filter( + Job.created_at < end_date, + Job.created_at >= start_date, + Template.template_type == f.notification_type, + Job.service_id == f.service_id + ).order_by(desc(Job.created_at)).all()) + + end_date = today - timedelta(days=7) + start_date = end_date - timedelta(days=2) + for notification_type in notification_types: + services_with_data_retention = [ + x.service_id for x in flexible_data_retention if x.notification_type == notification_type + ] + jobs.extend(Job.query.join(Template).filter( + Job.created_at < end_date, + Job.created_at >= start_date, + Template.template_type == notification_type, + Job.service_id.notin_(services_with_data_retention) + ).order_by(desc(Job.created_at)).all()) + + return jobs def dao_get_all_letter_jobs(): diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 5ce737764..af58f0e97 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -52,11 +52,21 @@ from app.models import ( JOB_STATUS_IN_PROGRESS, JOB_STATUS_ERROR, LETTER_TYPE, - SMS_TYPE + SMS_TYPE, + EMAIL_TYPE ) from app.utils import get_london_midnight_in_utc from app.v2.errors import JobIncompleteError from tests.app.aws.test_s3 import single_s3_object_stub +from tests.app.db import ( + create_notification, + create_service, + create_template, + create_job, + create_service_callback_api, + create_service_data_retention +) + from tests.app.conftest import ( sample_job as create_sample_job, sample_notification_history as create_notification_history, @@ -64,9 +74,6 @@ from tests.app.conftest import ( create_custom_template, datetime_in_past ) -from tests.app.db import ( - create_notification, create_service, create_template, create_job, create_service_callback_api -) from tests.conftest import set_config_values @@ -273,10 +280,11 @@ def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_ def test_will_remove_csv_files_for_jobs_older_than_seven_days( notify_db, notify_db_session, mocker, sample_template ): - mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ + mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') + seven_days_ago = datetime.utcnow() - timedelta(days=7) just_under_seven_days = seven_days_ago + timedelta(seconds=1) eight_days_ago = seven_days_ago - timedelta(days=1) @@ -284,7 +292,7 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( just_under_nine_days = nine_days_ago + timedelta(seconds=1) nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago) + job3_to_delete = create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago) job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) @@ -294,10 +302,57 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( assert s3.remove_job_from_s3.call_args_list == [ call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id) + call(job2_to_delete.service_id, job2_to_delete.id), + call(job3_to_delete.service_id, job3_to_delete.id) ] +@freeze_time('2016-10-18T10:00:00') +def test_will_remove_csv_files_for_jobs_older_than_retention_period( + notify_db, notify_db_session, mocker +): + """ + Jobs older than retention period are deleted, but only two day's worth (two-day window) + """ + mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') + service_1 = create_service(service_name='service 1') + service_2 = create_service(service_name='service 2') + create_service_data_retention(service_id=service_1.id, notification_type=SMS_TYPE, days_of_retention=3) + create_service_data_retention(service_id=service_2.id, notification_type=EMAIL_TYPE, days_of_retention=30) + sms_template_service_1 = create_template(service=service_1) + email_template_service_1 = create_template(service=service_1, template_type='email') + + sms_template_service_2 = create_template(service=service_2) + email_template_service_2 = create_template(service=service_2, template_type='email') + + four_days_ago = datetime.utcnow() - timedelta(days=4) + eight_days_ago = datetime.utcnow() - timedelta(days=8) + thirty_one_days_ago = datetime.utcnow() - timedelta(days=31) + + _create_job = partial( + create_sample_job, + notify_db, + notify_db_session, + ) + + job1_to_delete = _create_job(service=service_1, template=sms_template_service_1, created_at=four_days_ago) + job2_to_delete = _create_job(service=service_1, template=email_template_service_1, created_at=eight_days_ago) + _create_job(service=service_1, template=email_template_service_1, created_at=four_days_ago) + + _create_job(service=service_2, template=email_template_service_2, created_at=eight_days_ago) + job3_to_delete = _create_job(service=service_2, template=email_template_service_2, created_at=thirty_one_days_ago) + job4_to_delete = _create_job(service=service_2, template=sms_template_service_2, created_at=eight_days_ago) + + remove_csv_files(job_types=[SMS_TYPE, EMAIL_TYPE]) + + s3.remove_job_from_s3.assert_has_calls([ + call(job1_to_delete.service_id, job1_to_delete.id), + call(job2_to_delete.service_id, job2_to_delete.id), + call(job3_to_delete.service_id, job3_to_delete.id), + call(job4_to_delete.service_id, job4_to_delete.id) + ], any_order=True) + + def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mocker): send_mock = mocker.patch( 'app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa @@ -545,17 +600,18 @@ def test_remove_dvla_transformed_files_removes_expected_files(mocker, sample_ser just_over_seven_days = seven_days_ago - timedelta(seconds=1) eight_days_ago = seven_days_ago - timedelta(days=1) nine_days_ago = eight_days_ago - timedelta(days=1) + ten_days_ago = nine_days_ago - timedelta(days=1) just_under_nine_days = nine_days_ago + timedelta(seconds=1) just_over_nine_days = nine_days_ago - timedelta(seconds=1) + just_over_ten_days = ten_days_ago - timedelta(seconds=1) - job(created_at=seven_days_ago) job(created_at=just_under_seven_days) - job_to_delete_1 = job(created_at=just_over_seven_days) - job_to_delete_2 = job(created_at=eight_days_ago) - job_to_delete_3 = job(created_at=nine_days_ago) - job_to_delete_4 = job(created_at=just_under_nine_days) - job(created_at=just_over_nine_days) - + job(created_at=just_over_seven_days) + job_to_delete_1 = job(created_at=eight_days_ago) + job_to_delete_2 = job(created_at=nine_days_ago) + job_to_delete_3 = job(created_at=just_under_nine_days) + job_to_delete_4 = job(created_at=just_over_nine_days) + job(created_at=just_over_ten_days) remove_transformed_dvla_files() s3.remove_transformed_dvla_file.assert_has_calls([ diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index a6bea2d48..bd8752ead 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -13,7 +13,7 @@ from app.dao.jobs_dao import ( dao_set_scheduled_jobs_to_pending, dao_get_future_scheduled_job_by_id_and_service_id, dao_get_notification_outcomes_for_job, - dao_get_jobs_older_than_limited_by + dao_get_jobs_older_than_data_retention, ) from app.models import ( Job, @@ -296,7 +296,7 @@ def test_should_get_jobs_seven_days_old(notify_db, notify_db_session, sample_tem job(created_at=nine_days_ago) job(created_at=nine_days_one_second_ago) - jobs = dao_get_jobs_older_than_limited_by(job_types=[sample_template.template_type]) + jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type]) assert len(jobs) == 1 assert jobs[0].id == job_to_delete.id @@ -359,8 +359,8 @@ def test_should_get_jobs_seven_days_old_filters_type(notify_db, notify_db_sessio job(template=sms_template) job(template=email_template) - jobs = dao_get_jobs_older_than_limited_by( - job_types=[EMAIL_TYPE, SMS_TYPE] + jobs = dao_get_jobs_older_than_data_retention( + notification_types=[EMAIL_TYPE, SMS_TYPE] ) assert len(jobs) == 2 From e5fd0271922b5e830a7ed7acfc9faa3aeb9416db Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Wed, 21 Nov 2018 14:21:37 +0000 Subject: [PATCH 002/118] Move nightly tasks before introduction of archived flag on jobs --- app/celery/scheduled_tasks.py | 1 + app/config.py | 13 +++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index e6870f38d..6ddaea633 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -67,6 +67,7 @@ def remove_csv_files(job_types): jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) for job in jobs: s3.remove_job_from_s3(job.service_id, job.id) + # job.archived = true; commit; current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) diff --git a/app/config.py b/app/config.py index 1ba7d6edc..c38bb611f 100644 --- a/app/config.py +++ b/app/config.py @@ -236,6 +236,11 @@ class Config(object): 'schedule': crontab(hour=2, minute=0), 'options': {'queue': QueueNames.PERIODIC} }, + 'remove_transformed_dvla_files': { + 'task': 'remove_transformed_dvla_files', + 'schedule': crontab(hour=3, minute=40), + 'options': {'queue': QueueNames.PERIODIC} + }, 'remove_sms_email_jobs': { 'task': 'remove_csv_files', 'schedule': crontab(hour=4, minute=0), @@ -244,15 +249,11 @@ class Config(object): }, 'remove_letter_jobs': { 'task': 'remove_csv_files', - 'schedule': crontab(hour=4, minute=20), + 'schedule': crontab(hour=4, minute=20), # this has to run AFTER remove_transformed_dvla_files + # since we mark jobs as archived 'options': {'queue': QueueNames.PERIODIC}, 'kwargs': {'job_types': [LETTER_TYPE]} }, - 'remove_transformed_dvla_files': { - 'task': 'remove_transformed_dvla_files', - 'schedule': crontab(hour=4, minute=40), - 'options': {'queue': QueueNames.PERIODIC} - }, 'raise-alert-if-letter-notifications-still-sending': { 'task': 'raise-alert-if-letter-notifications-still-sending', 'schedule': crontab(hour=16, minute=30), From 641cb6ec36e3105b67478c938f2cf179c0e0000e Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 22 Nov 2018 15:51:10 +0000 Subject: [PATCH 003/118] Add archived column to jobs table with default value of false --- app/models.py | 1 + .../versions/0245_archived_flag_jobs.py | 26 +++++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 migrations/versions/0245_archived_flag_jobs.py diff --git a/app/models.py b/app/models.py index e2d86679a..7accfc7c5 100644 --- a/app/models.py +++ b/app/models.py @@ -1067,6 +1067,7 @@ class Job(db.Model): job_status = db.Column( db.String(255), db.ForeignKey('job_status.name'), index=True, nullable=False, default='pending' ) + archived = db.Column(db.Boolean, nullable=False, default=False) VERIFY_CODE_TYPES = [EMAIL_TYPE, SMS_TYPE] diff --git a/migrations/versions/0245_archived_flag_jobs.py b/migrations/versions/0245_archived_flag_jobs.py new file mode 100644 index 000000000..467eb2151 --- /dev/null +++ b/migrations/versions/0245_archived_flag_jobs.py @@ -0,0 +1,26 @@ +""" + +Revision ID: 0245_archived_flag_jobs +Revises: 0244_another_letter_org +Create Date: 2018-11-22 16:32:01.105803 + +""" +from alembic import op +import sqlalchemy as sa + + +revision = '0245_archived_flag_jobs' +down_revision = '0244_another_letter_org' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=False, server_default=sa.false())) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('jobs', 'archived') + # ### end Alembic commands ### From 50811c3b8e548a2a1491173b441d0b645a9f3823 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 22 Nov 2018 16:47:07 +0000 Subject: [PATCH 004/118] Archive job after corresponding file deleted from s3 --- app/celery/scheduled_tasks.py | 5 +++-- app/dao/jobs_dao.py | 5 +++++ tests/app/celery/test_scheduled_tasks.py | 4 +++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 6ddaea633..ed8e068e8 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -23,7 +23,8 @@ from app.dao.invited_org_user_dao import delete_org_invitations_created_more_tha from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago from app.dao.jobs_dao import ( dao_set_scheduled_jobs_to_pending, - dao_get_jobs_older_than_data_retention + dao_get_jobs_older_than_data_retention, + dao_archive_job ) from app.dao.jobs_dao import dao_update_job from app.dao.notifications_dao import ( @@ -67,7 +68,7 @@ def remove_csv_files(job_types): jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) for job in jobs: s3.remove_job_from_s3(job.service_id, job.id) - # job.archived = true; commit; + dao_archive_job(job) current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index a4947e958..c4a3fc585 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -66,6 +66,11 @@ def dao_get_jobs_by_service_id(service_id, limit_days=None, page=1, page_size=50 def dao_get_job_by_id(job_id): return Job.query.filter_by(id=job_id).one() +def dao_archive_job(job): + job.archived = True + db.session.add(job) + db.session.commit() + def dao_set_scheduled_jobs_to_pending(): """ diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index af58f0e97..fadc68136 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -295,7 +295,7 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( job3_to_delete = create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago) job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) - create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) + dont_delete_me_1 = create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) create_sample_job(notify_db, notify_db_session, created_at=just_under_seven_days) remove_csv_files(job_types=[sample_template.template_type]) @@ -305,6 +305,8 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( call(job2_to_delete.service_id, job2_to_delete.id), call(job3_to_delete.service_id, job3_to_delete.id) ] + assert job1_to_delete.archived == True + assert dont_delete_me_1.archived == False @freeze_time('2016-10-18T10:00:00') From fd06924f3a6d242967cf528e769cb46cf3bb12f9 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 22 Nov 2018 17:49:04 +0000 Subject: [PATCH 005/118] Build a command to archive old jobs --- app/commands.py | 30 ++++++++++++++++++++++++ app/dao/jobs_dao.py | 1 + tests/app/celery/test_scheduled_tasks.py | 4 ++-- 3 files changed, 33 insertions(+), 2 deletions(-) diff --git a/app/commands.py b/app/commands.py index 918a25242..41f58c4dd 100644 --- a/app/commands.py +++ b/app/commands.py @@ -654,3 +654,33 @@ def populate_notification_postage(start_date): total_updated += result.rowcount current_app.logger.info('Total inserted/updated records = {}'.format(total_updated)) + + +@notify_command(name='archive-jobs-created-between-dates') +@click.option('-s', '--start_date', required=True, help="start date inclusive", type=click_dt(format='%Y-%m-%d')) +@click.option('-e', '--end_date', required=True, help="end date inclusive", type=click_dt(format='%Y-%m-%d')) +@statsd(namespace="tasks") +def update_jobs_archived_flag(start_date, end_date): + current_app.logger.info('Archiving jobs created between {} to {}'.format(start_date, end_date)) + + process_date = start_date + total_updated = 0 + + while process_date < end_date: + start_time = datetime.utcnow() + sql = """update + jobs set archived = true + where + created_at >= (date :start + time '00:00:00') at time zone 'Europe/London' + at time zone 'UTC' + and created_at < (date :end + time '00:00:00') at time zone 'Europe/London' at time zone 'UTC'""" + + result = db.session.execute(sql, {"start": process_date, "end": process_date + timedelta(days=1)}) + db.session.commit() + current_app.logger.info('jobs: --- Completed took {}ms. Archived {} jobs for {}'.format( + datetime.now() - start_time, result.rowcount, process_date)) + + process_date += timedelta(days=1) + + total_updated += result.rowcount + current_app.logger.info('Total archived jobs = {}'.format(total_updated)) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index c4a3fc585..6e3d2f25b 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -66,6 +66,7 @@ def dao_get_jobs_by_service_id(service_id, limit_days=None, page=1, page_size=50 def dao_get_job_by_id(job_id): return Job.query.filter_by(id=job_id).one() + def dao_archive_job(job): job.archived = True db.session.add(job) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index fadc68136..02007195f 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -305,8 +305,8 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( call(job2_to_delete.service_id, job2_to_delete.id), call(job3_to_delete.service_id, job3_to_delete.id) ] - assert job1_to_delete.archived == True - assert dont_delete_me_1.archived == False + assert job1_to_delete.archived is True + assert dont_delete_me_1.archived is False @freeze_time('2016-10-18T10:00:00') From f941b8b146bd75b056ea5db53877bc7b2a0170b5 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 26 Nov 2018 16:30:23 +0000 Subject: [PATCH 006/118] Use archived flag to see if job needs deleting from s3 bucket --- app/dao/jobs_dao.py | 6 ++---- tests/app/celery/test_scheduled_tasks.py | 3 +-- tests/app/conftest.py | 6 ++++-- tests/app/dao/test_jobs_dao.py | 4 ++-- tests/app/db.py | 6 ++++-- 5 files changed, 13 insertions(+), 12 deletions(-) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 6e3d2f25b..5eaf71457 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -130,24 +130,22 @@ def dao_get_jobs_older_than_data_retention(notification_types): today = datetime.utcnow().date() for f in flexible_data_retention: end_date = today - timedelta(days=f.days_of_retention) - start_date = end_date - timedelta(days=2) jobs.extend(Job.query.join(Template).filter( Job.created_at < end_date, - Job.created_at >= start_date, + Job.archived == False, # noqa Template.template_type == f.notification_type, Job.service_id == f.service_id ).order_by(desc(Job.created_at)).all()) end_date = today - timedelta(days=7) - start_date = end_date - timedelta(days=2) for notification_type in notification_types: services_with_data_retention = [ x.service_id for x in flexible_data_retention if x.notification_type == notification_type ] jobs.extend(Job.query.join(Template).filter( Job.created_at < end_date, - Job.created_at >= start_date, + Job.archived == False, # noqa Template.template_type == notification_type, Job.service_id.notin_(services_with_data_retention) ).order_by(desc(Job.created_at)).all()) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 02007195f..f62a98a60 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -292,7 +292,7 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( just_under_nine_days = nine_days_ago + timedelta(seconds=1) nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - job3_to_delete = create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago) + create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago, archived=True) job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) dont_delete_me_1 = create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) @@ -303,7 +303,6 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( assert s3.remove_job_from_s3.call_args_list == [ call(job1_to_delete.service_id, job1_to_delete.id), call(job2_to_delete.service_id, job2_to_delete.id), - call(job3_to_delete.service_id, job3_to_delete.id) ] assert job1_to_delete.archived is True assert dont_delete_me_1.archived is False diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 04d77a60e..ef7317acb 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -371,7 +371,8 @@ def sample_job( job_status='pending', scheduled_for=None, processing_started=None, - original_file_name='some.csv' + original_file_name='some.csv', + archived=False ): if service is None: service = sample_service(notify_db, notify_db_session) @@ -390,7 +391,8 @@ def sample_job( 'created_by': service.created_by, 'job_status': job_status, 'scheduled_for': scheduled_for, - 'processing_started': processing_started + 'processing_started': processing_started, + 'archived': archived } job = Job(**data) dao_create_job(job) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index bd8752ead..7d1d88346 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -293,8 +293,8 @@ def test_should_get_jobs_seven_days_old(notify_db, notify_db_session, sample_tem job(created_at=seven_days_ago) job(created_at=within_seven_days) job_to_delete = job(created_at=eight_days_ago) - job(created_at=nine_days_ago) - job(created_at=nine_days_one_second_ago) + job(created_at=nine_days_ago, archived=True) + job(created_at=nine_days_one_second_ago, archived=True) jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type]) diff --git a/tests/app/db.py b/tests/app/db.py index 3990a24cf..36e29953e 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -261,7 +261,8 @@ def create_job( job_status='pending', scheduled_for=None, processing_started=None, - original_file_name='some.csv' + original_file_name='some.csv', + archived=False ): data = { 'id': uuid.uuid4(), @@ -275,7 +276,8 @@ def create_job( 'created_by': template.created_by, 'job_status': job_status, 'scheduled_for': scheduled_for, - 'processing_started': processing_started + 'processing_started': processing_started, + 'archived': archived } job = Job(**data) dao_create_job(job) From 452924faafcfb4dcb1eb960ea30ab000f1f93962 Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Wed, 28 Nov 2018 14:01:59 +0000 Subject: [PATCH 007/118] Update jobs archived flag before setting the default value Running an update before setting the column default value reduces the time the table is locked (since most rows don't have a NULL value anymore), but the migration takes slightly longer to run overall. --- migrations/versions/0245_archived_flag_jobs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/migrations/versions/0245_archived_flag_jobs.py b/migrations/versions/0245_archived_flag_jobs.py index 467eb2151..cfcbb8f1f 100644 --- a/migrations/versions/0245_archived_flag_jobs.py +++ b/migrations/versions/0245_archived_flag_jobs.py @@ -15,7 +15,9 @@ down_revision = '0244_another_letter_org' def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=False, server_default=sa.false())) + op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=True)) + op.execute('update jobs set archived = false') + op.alter_column('jobs', 'archived', nullable=False, server_default=sa.false()) # ### end Alembic commands ### From 614a2dae2cfbb3d056e4963ec69941aeb374b723 Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Fri, 30 Nov 2018 15:41:58 +0000 Subject: [PATCH 008/118] Enable pessimistic DB connection disconnect handling By default, SQLAlchemy will start a transaction with an existing connection without checking that the connection is still valid. Enabling "pre-ping" makes the ORM send a `SELECT 1` when acquiring a connection, which should help avoid some errors caused by connections breaking during a DB failover. The added statement has a constant overhead for all transactions, so we should only keep it enabled when we're planning to switch or upgrade the database server. https://docs.sqlalchemy.org/en/latest/core/pooling.html#disconnect-handling-pessimistic --- app/__init__.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/app/__init__.py b/app/__init__.py index b4a470ac0..eea36a485 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -4,7 +4,7 @@ import string import uuid from flask import _request_ctx_stack, request, g, jsonify -from flask_sqlalchemy import SQLAlchemy +from flask_sqlalchemy import SQLAlchemy as _SQLAlchemy from flask_marshmallow import Marshmallow from flask_migrate import Migrate from time import monotonic @@ -27,6 +27,13 @@ from app.encryption import Encryption DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" DATE_FORMAT = "%Y-%m-%d" + +class SQLAlchemy(_SQLAlchemy): + def apply_pool_defaults(self, app, options): + _SQLAlchemy.apply_pool_defaults(self, app, options) + options["pool_pre_ping"] = True + + db = SQLAlchemy() migrate = Migrate() ma = Marshmallow() From b17fd21bb88c407d8244325206ec0032c0cb7d6c Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Fri, 30 Nov 2018 16:20:08 +0000 Subject: [PATCH 009/118] Revert "Enable pessimistic DB connection disconnect handling" Once the DB upgrade is complete we no longer want the added overhead of "pre-ping" connection check. --- app/__init__.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/app/__init__.py b/app/__init__.py index eea36a485..b4a470ac0 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -4,7 +4,7 @@ import string import uuid from flask import _request_ctx_stack, request, g, jsonify -from flask_sqlalchemy import SQLAlchemy as _SQLAlchemy +from flask_sqlalchemy import SQLAlchemy from flask_marshmallow import Marshmallow from flask_migrate import Migrate from time import monotonic @@ -27,13 +27,6 @@ from app.encryption import Encryption DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" DATE_FORMAT = "%Y-%m-%d" - -class SQLAlchemy(_SQLAlchemy): - def apply_pool_defaults(self, app, options): - _SQLAlchemy.apply_pool_defaults(self, app, options) - options["pool_pre_ping"] = True - - db = SQLAlchemy() migrate = Migrate() ma = Marshmallow() From f5ea77ffa06e355101974a0eb9f24afbed325d4a Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Fri, 30 Nov 2018 16:35:00 +0000 Subject: [PATCH 010/118] Add reference to one off letters MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Letters should always have a reference, because that’s what DVLA use to tell us when they’ve sent a letter. If a letter has a reference of `None` then DVLA say they’ve sent a letter with a reference of `'None'`. This means we can never reconcile the letter, which means it stays in `created`, which means it never gets billed. We don’t think this has affected any real letters yet, just ones that we’ve sent as tests. --- app/service/send_notification.py | 10 +- .../service/test_send_one_off_notification.py | 97 ++++++++++++++++++- 2 files changed, 103 insertions(+), 4 deletions(-) diff --git a/app/service/send_notification.py b/app/service/send_notification.py index b80baf770..62e61283c 100644 --- a/app/service/send_notification.py +++ b/app/service/send_notification.py @@ -1,5 +1,6 @@ from sqlalchemy.orm.exc import NoResultFound +from app import create_random_identifier from app.config import QueueNames from app.dao.notifications_dao import _update_notification_status from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id @@ -37,6 +38,12 @@ def validate_created_by(service, created_by_id): raise BadRequestError(message=message) +def create_one_off_reference(template_type): + if template_type != LETTER_TYPE: + return None + return create_random_identifier() + + def send_one_off_notification(service_id, post_data): service = dao_fetch_service_by_id(service_id) template = dao_get_template_by_id_and_service_id( @@ -77,7 +84,8 @@ def send_one_off_notification(service_id, post_data): api_key_id=None, key_type=KEY_TYPE_NORMAL, created_by_id=post_data['created_by'], - reply_to_text=reply_to + reply_to_text=reply_to, + reference=create_one_off_reference(template.template_type), ) queue_name = QueueNames.PRIORITY if template.process_type == PRIORITY else None diff --git a/tests/app/service/test_send_one_off_notification.py b/tests/app/service/test_send_one_off_notification.py index f8ffdf28a..0c20611dc 100644 --- a/tests/app/service/test_send_one_off_notification.py +++ b/tests/app/service/test_send_one_off_notification.py @@ -10,7 +10,9 @@ from app.config import QueueNames from app.dao.service_whitelist_dao import dao_add_and_commit_whitelisted_contacts from app.service.send_notification import send_one_off_notification from app.models import ( + EMAIL_TYPE, KEY_TYPE_NORMAL, + LETTER_TYPE, MOBILE_TYPE, PRIORITY, SMS_TYPE, @@ -64,13 +66,17 @@ def test_send_one_off_notification_calls_celery_correctly(persist_mock, celery_m ) -def test_send_one_off_notification_calls_persist_correctly( +def test_send_one_off_notification_calls_persist_correctly_for_sms( persist_mock, celery_mock, notify_db_session ): service = create_service() - template = create_template(service=service, content="Hello (( Name))\nYour thing is due soon") + template = create_template( + service=service, + template_type=SMS_TYPE, + content="Hello (( Name))\nYour thing is due soon", + ) post_data = { 'template_id': str(template.id), @@ -91,7 +97,92 @@ def test_send_one_off_notification_calls_persist_correctly( api_key_id=None, key_type=KEY_TYPE_NORMAL, created_by_id=str(service.created_by_id), - reply_to_text='testing' + reply_to_text='testing', + reference=None, + ) + + +def test_send_one_off_notification_calls_persist_correctly_for_email( + persist_mock, + celery_mock, + notify_db_session +): + service = create_service() + template = create_template( + service=service, + template_type=EMAIL_TYPE, + subject="Test subject", + content="Hello (( Name))\nYour thing is due soon", + ) + + post_data = { + 'template_id': str(template.id), + 'to': 'test@example.com', + 'personalisation': {'name': 'foo'}, + 'created_by': str(service.created_by_id) + } + + send_one_off_notification(service.id, post_data) + + persist_mock.assert_called_once_with( + template_id=template.id, + template_version=template.version, + recipient=post_data['to'], + service=template.service, + personalisation={'name': 'foo'}, + notification_type=EMAIL_TYPE, + api_key_id=None, + key_type=KEY_TYPE_NORMAL, + created_by_id=str(service.created_by_id), + reply_to_text=None, + reference=None, + ) + + +def test_send_one_off_notification_calls_persist_correctly_for_letter( + mocker, + persist_mock, + celery_mock, + notify_db_session +): + mocker.patch( + 'app.service.send_notification.create_random_identifier', + return_value='this-is-random-in-real-life', + ) + service = create_service() + template = create_template( + service=service, + template_type=LETTER_TYPE, + subject="Test subject", + content="Hello (( Name))\nYour thing is due soon", + ) + + post_data = { + 'template_id': str(template.id), + 'to': 'First Last', + 'personalisation': { + 'name': 'foo', + 'address line 1': 'First Last', + 'address line 2': '1 Example Street', + 'postcode': 'SW1A 1AA', + }, + 'created_by': str(service.created_by_id) + } + + send_one_off_notification(service.id, post_data) + + persist_mock.assert_called_once_with( + template_id=template.id, + template_version=template.version, + recipient=post_data['to'], + service=template.service, + personalisation=post_data['personalisation'], + notification_type=LETTER_TYPE, + api_key_id=None, + key_type=KEY_TYPE_NORMAL, + created_by_id=str(service.created_by_id), + reply_to_text=None, + reference='this-is-random-in-real-life', ) From 3cfeadcae873b6e45c73d3b637dff256d190ad12 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Mon, 3 Dec 2018 11:33:59 +0000 Subject: [PATCH 011/118] Refactor if statement to be positive --- app/service/send_notification.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app/service/send_notification.py b/app/service/send_notification.py index 62e61283c..a00d151a4 100644 --- a/app/service/send_notification.py +++ b/app/service/send_notification.py @@ -39,9 +39,9 @@ def validate_created_by(service, created_by_id): def create_one_off_reference(template_type): - if template_type != LETTER_TYPE: - return None - return create_random_identifier() + if template_type == LETTER_TYPE: + return create_random_identifier() + return None def send_one_off_notification(service_id, post_data): From 902e1b403aa5ee4a8b1e285e4d3ac6b2f4438748 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Tue, 13 Nov 2018 14:20:24 +0000 Subject: [PATCH 012/118] Update update_notification_status_by_id DAO function Replaced `.with_for_lockmode()`, which is now deprecated, with `.with_for_update() - https://docs.sqlalchemy.org/en/latest/orm/query.html#sqlalchemy.orm.query.Query.with_lockmode The function should update any statuses that are not 'final', so added `pending-virus-check` to the list of statuses that the function can update. --- app/dao/notifications_dao.py | 6 ++++-- tests/app/dao/notification_dao/test_notification_dao.py | 8 ++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 564ab6acf..a80cdbefe 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -39,6 +39,7 @@ from app.models import ( NOTIFICATION_DELIVERED, NOTIFICATION_SENDING, NOTIFICATION_PENDING, + NOTIFICATION_PENDING_VIRUS_CHECK, NOTIFICATION_TECHNICAL_FAILURE, NOTIFICATION_TEMPORARY_FAILURE, NOTIFICATION_PERMANENT_FAILURE, @@ -145,13 +146,14 @@ def _update_notification_status(notification, status): @statsd(namespace="dao") @transactional def update_notification_status_by_id(notification_id, status, sent_by=None): - notification = Notification.query.with_lockmode("update").filter( + notification = Notification.query.with_for_update().filter( Notification.id == notification_id, or_( Notification.status == NOTIFICATION_CREATED, Notification.status == NOTIFICATION_SENDING, Notification.status == NOTIFICATION_PENDING, - Notification.status == NOTIFICATION_SENT + Notification.status == NOTIFICATION_SENT, + Notification.status == NOTIFICATION_PENDING_VIRUS_CHECK )).first() if not notification: diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index ad3a9aa41..50c100192 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -141,6 +141,14 @@ def test_should_update_status_by_id_if_created(notify_db, notify_db_session): assert updated.status == 'failed' +def test_should_update_status_by_id_if_pending_virus_check(notify_db, notify_db_session): + notification = sample_notification(notify_db, notify_db_session, status='pending-virus-check') + assert Notification.query.get(notification.id).status == 'pending-virus-check' + updated = update_notification_status_by_id(notification.id, 'cancelled') + assert Notification.query.get(notification.id).status == 'cancelled' + assert updated.status == 'cancelled' + + def test_should_update_status_by_id_and_set_sent_by(notify_db, notify_db_session): notification = sample_notification(notify_db, notify_db_session, status='sending') From 365c462e930eedf2d94b985608bc5d9a930634a1 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Thu, 15 Nov 2018 10:55:29 +0000 Subject: [PATCH 013/118] Update get_notification_by_id to take an optional service_id It can be useful to get a notification by id while checking that the notification belongs to a given service. This changes the get_notification_by_id DAO function to optionally also filter by service_id so that we can check this. --- app/dao/notifications_dao.py | 14 +++++++---- .../notification_dao/test_notification_dao.py | 23 ++++++++++++++++++- 2 files changed, 31 insertions(+), 6 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index a80cdbefe..0ff1ffa67 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -227,11 +227,15 @@ def get_notification_with_personalisation(service_id, notification_id, key_type) @statsd(namespace="dao") -def get_notification_by_id(notification_id, _raise=False): - if _raise: - return Notification.query.filter_by(id=notification_id).one() - else: - return Notification.query.filter_by(id=notification_id).first() +def get_notification_by_id(notification_id, service_id=None, _raise=False): + filters = [Notification.id == notification_id] + + if service_id: + filters.append(Notification.service_id == service_id) + + query = Notification.query.filter(*filters) + + return query.one() if _raise else query.first() def get_notifications(filter_dict=None): diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index 50c100192..c40b315d3 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -5,6 +5,8 @@ from functools import partial import pytest from freezegun import freeze_time from sqlalchemy.exc import SQLAlchemyError, IntegrityError +from sqlalchemy.orm.exc import NoResultFound + from app.dao.notifications_dao import ( dao_create_notification, @@ -520,7 +522,7 @@ def test_save_notification_with_no_job(sample_template, mmg_provider): assert notification_from_db.status == 'created' -def test_get_notification_by_id(notify_db, notify_db_session, sample_template): +def test_get_notification_with_personalisation_by_id(notify_db, notify_db_session, sample_template): notification = sample_notification(notify_db=notify_db, notify_db_session=notify_db_session, template=sample_template, scheduled_for='2017-05-05 14:15', @@ -534,6 +536,25 @@ def test_get_notification_by_id(notify_db, notify_db_session, sample_template): assert notification_from_db.scheduled_notification.scheduled_for == datetime(2017, 5, 5, 14, 15) +def test_get_notification_by_id_when_notification_exists(sample_notification): + notification_from_db = get_notification_by_id(sample_notification.id) + + assert sample_notification == notification_from_db + + +def test_get_notification_by_id_when_notification_does_not_exist(notify_db_session, fake_uuid): + notification_from_db = get_notification_by_id(fake_uuid) + + assert notification_from_db is None + + +def test_get_notification_by_id_when_notification_exists_for_different_service(sample_notification): + another_service = create_service(service_name='Another service') + + with pytest.raises(NoResultFound): + get_notification_by_id(sample_notification.id, another_service.id, _raise=True) + + def test_get_notifications_by_reference(sample_template): client_reference = 'some-client-ref' assert len(Notification.query.all()) == 0 From c766febe9468706b821ddb86b0f5df92b4339716 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 3 Dec 2018 13:59:25 +0000 Subject: [PATCH 014/118] Update /platform-stats to return the data from ft_notification_status, that way the request should not time out for a long date range. Next steps is to update the query for platform admin stats for all services. --- app/dao/fact_notification_status_dao.py | 46 ++++++++++++ app/platform_stats/rest.py | 4 +- .../dao/test_fact_notification_status_dao.py | 72 ++++++++++++++++++- tests/app/platform_stats/test_rest.py | 4 +- 4 files changed, 120 insertions(+), 6 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index f80f175d8..486aa1213 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -138,3 +138,49 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days(service_ all_stats_table.c.notification_type, all_stats_table.c.status, ).all() + + +def fetch_notification_status_totals_for_all_services(start_date, end_date): + stats = db.session.query( + FactNotificationStatus.notification_type.label('notification_type'), + FactNotificationStatus.notification_status.label('status'), + FactNotificationStatus.key_type, + func.sum(FactNotificationStatus.notification_count).label('count') + ).filter( + FactNotificationStatus.bst_date >= start_date, + FactNotificationStatus.bst_date <= end_date + ).group_by( + FactNotificationStatus.notification_type, + FactNotificationStatus.notification_status, + FactNotificationStatus.key_type, + ).order_by( + FactNotificationStatus.notification_type + ) + today = get_london_midnight_in_utc(datetime.utcnow()) + if start_date <= today.date() <= end_date: + stats_for_today = db.session.query( + Notification.notification_type.cast(db.Text).label('notification_type'), + Notification.status, + Notification.key_type, + func.count().label('count') + ).filter( + Notification.created_at >= today + ).group_by( + Notification.notification_type.cast(db.Text), + Notification.status, + Notification.key_type, + ) + all_stats_table = stats.union_all(stats_for_today).subquery() + query = db.session.query( + all_stats_table.c.notification_type, + all_stats_table.c.status, + func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), + ).group_by( + all_stats_table.c.notification_type, + all_stats_table.c.status, + ).order_by( + all_stats_table.c.notification_type + ) + else: + query = stats + return query.all() diff --git a/app/platform_stats/rest.py b/app/platform_stats/rest.py index 54e94cc47..efe936f84 100644 --- a/app/platform_stats/rest.py +++ b/app/platform_stats/rest.py @@ -2,7 +2,7 @@ from datetime import datetime from flask import Blueprint, jsonify, request -from app.dao.notifications_dao import fetch_aggregate_stats_by_date_range_for_all_services +from app.dao.fact_notification_status_dao import fetch_notification_status_totals_for_all_services from app.errors import register_errors from app.platform_stats.platform_stats_schema import platform_stats_request from app.service.statistics import format_admin_stats @@ -23,7 +23,7 @@ def get_platform_stats(): start_date = datetime.strptime(request.args.get('start_date', today), '%Y-%m-%d').date() end_date = datetime.strptime(request.args.get('end_date', today), '%Y-%m-%d').date() - data = fetch_aggregate_stats_by_date_range_for_all_services(start_date=start_date, end_date=end_date) + data = fetch_notification_status_totals_for_all_services(start_date=start_date, end_date=end_date) stats = format_admin_stats(data) return jsonify(stats) diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 5d0296b15..9dcb975eb 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -1,14 +1,17 @@ from datetime import timedelta, datetime, date from uuid import UUID +import pytest + from app.dao.fact_notification_status_dao import ( update_fact_notification_status, fetch_notification_status_for_day, fetch_notification_status_for_service_by_month, fetch_notification_status_for_service_for_day, - fetch_notification_status_for_service_for_today_and_7_previous_days + fetch_notification_status_for_service_for_today_and_7_previous_days, + fetch_notification_status_totals_for_all_services ) -from app.models import FactNotificationStatus, KEY_TYPE_TEST, KEY_TYPE_TEAM, EMAIL_TYPE, SMS_TYPE +from app.models import FactNotificationStatus, KEY_TYPE_TEST, KEY_TYPE_TEAM, EMAIL_TYPE, SMS_TYPE, LETTER_TYPE from freezegun import freeze_time from tests.app.db import create_notification, create_service, create_template, create_ft_notification_status @@ -220,3 +223,68 @@ def test_fetch_notification_status_for_service_for_today_and_7_previous_days(not assert results[3].notification_type == 'sms' assert results[3].status == 'delivered' assert results[3].count == 19 + + +@pytest.mark.parametrize( + "start_date, end_date, expected_email, expected_letters, expected_sms, expected_created_sms", + [ + (29, 30, 3, 10, 10, 1), # not including today + (29, 31, 4, 10, 11, 2), # today included + (26, 31, 4, 15, 11, 2), + ] + +) +@freeze_time('2018-10-31 14:00') +def test_fetch_notification_status_totals_for_all_services( + notify_db_session, + start_date, + end_date, + expected_email, + expected_letters, + expected_sms, + expected_created_sms +): + set_up_data() + + results = sorted( + fetch_notification_status_totals_for_all_services( + start_date=date(2018, 10, start_date), end_date=date(2018, 10, end_date)), + key=lambda x: (x.notification_type, x.status) + ) + + assert len(results) == 4 + + assert results[0].notification_type == 'email' + assert results[0].status == 'delivered' + assert results[0].count == expected_email + + assert results[1].notification_type == 'letter' + assert results[1].status == 'delivered' + assert results[1].count == expected_letters + + assert results[2].notification_type == 'sms' + assert results[2].status == 'created' + assert results[2].count == expected_created_sms + + assert results[3].notification_type == 'sms' + assert results[3].status == 'delivered' + assert results[3].count == expected_sms + + +def set_up_data(): + service_2 = create_service(service_name='service_2') + create_template(service=service_2, template_type=LETTER_TYPE) + service_1 = create_service(service_name='service_1') + sms_template = create_template(service=service_1, template_type=SMS_TYPE) + email_template = create_template(service=service_1, template_type=EMAIL_TYPE) + create_ft_notification_status(date(2018, 10, 24), 'sms', service_1, count=8) + create_ft_notification_status(date(2018, 10, 26), 'letter', service_1, count=5) + create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, count=10) + create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, notification_status='created') + create_ft_notification_status(date(2018, 10, 29), 'email', service_1, count=3) + create_ft_notification_status(date(2018, 10, 29), 'letter', service_2, count=10) + + create_notification(service_1.templates[0], created_at=datetime(2018, 10, 30, 12, 0, 0), status='delivered') + create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0)) + create_notification(sms_template, created_at=datetime(2018, 10, 31, 12, 0, 0), status='delivered') + create_notification(email_template, created_at=datetime(2018, 10, 31, 13, 0, 0), status='delivered') diff --git a/tests/app/platform_stats/test_rest.py b/tests/app/platform_stats/test_rest.py index 5afa7fa43..601923062 100644 --- a/tests/app/platform_stats/test_rest.py +++ b/tests/app/platform_stats/test_rest.py @@ -6,7 +6,7 @@ from freezegun import freeze_time @freeze_time('2018-06-01') def test_get_platform_stats_uses_todays_date_if_no_start_or_end_date_is_provided(admin_request, mocker): today = datetime.now().date() - dao_mock = mocker.patch('app.platform_stats.rest.fetch_aggregate_stats_by_date_range_for_all_services') + dao_mock = mocker.patch('app.platform_stats.rest.fetch_notification_status_totals_for_all_services') mocker.patch('app.service.rest.statistics.format_statistics') admin_request.get('platform_stats.get_platform_stats') @@ -17,7 +17,7 @@ def test_get_platform_stats_uses_todays_date_if_no_start_or_end_date_is_provided def test_get_platform_stats_can_filter_by_date(admin_request, mocker): start_date = date(2017, 1, 1) end_date = date(2018, 1, 1) - dao_mock = mocker.patch('app.platform_stats.rest.fetch_aggregate_stats_by_date_range_for_all_services') + dao_mock = mocker.patch('app.platform_stats.rest.fetch_notification_status_totals_for_all_services') mocker.patch('app.service.rest.statistics.format_statistics') admin_request.get('platform_stats.get_platform_stats', start_date=start_date, end_date=end_date) From 1d67b55b16692d070c08ed3946da5dab851d644a Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Thu, 22 Nov 2018 11:53:32 +0000 Subject: [PATCH 015/118] Add endpoint for cancelling letters --- app/letters/utils.py | 13 +++ app/service/rest.py | 32 ++++++- tests/app/letters/test_letter_utils.py | 25 ++++++ tests/app/service/test_rest.py | 111 +++++++++++++++++++++++++ 4 files changed, 179 insertions(+), 2 deletions(-) diff --git a/app/letters/utils.py b/app/letters/utils.py index 0bf489435..c7e558244 100644 --- a/app/letters/utils.py +++ b/app/letters/utils.py @@ -162,3 +162,16 @@ def _move_s3_object(source_bucket, source_filename, target_bucket, target_filena current_app.logger.info("Moved letter PDF: {}/{} to {}/{}".format( source_bucket, source_filename, target_bucket, target_filename)) + + +def letter_print_day(created_at): + bst_print_datetime = convert_utc_to_bst(created_at) + timedelta(hours=6, minutes=30) + bst_print_date = bst_print_datetime.date() + + current_bst_date = convert_utc_to_bst(datetime.utcnow()).date() + + if bst_print_date >= current_bst_date: + return 'today' + else: + print_date = bst_print_datetime.strftime('%d %B').lstrip('0') + return 'on {}'.format(print_date) diff --git a/app/service/rest.py b/app/service/rest.py index ce0b37048..f1c4f947e 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -7,6 +7,8 @@ from flask import ( current_app, Blueprint ) +from notifications_utils.letter_timings import letter_can_be_cancelled +from notifications_utils.timezones import convert_utc_to_bst from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound @@ -80,7 +82,8 @@ from app.errors import ( InvalidRequest, register_errors ) -from app.models import Service, EmailBranding +from app.letters.utils import letter_print_day +from app.models import LETTER_TYPE, NOTIFICATION_CANCELLED, Service, EmailBranding from app.schema_validation import validate from app.service import statistics from app.service.service_data_retention_schema import ( @@ -103,7 +106,7 @@ from app.schemas import ( notifications_filter_schema, detailed_service_schema ) -from app.utils import pagination_links, convert_utc_to_bst +from app.utils import pagination_links service_blueprint = Blueprint('service', __name__) @@ -384,6 +387,31 @@ def get_notification_for_service(service_id, notification_id): ), 200 +@service_blueprint.route('//notifications//cancel', methods=['POST']) +def cancel_notification_for_service(service_id, notification_id): + notification = notifications_dao.get_notification_by_id(notification_id, service_id) + + if not notification: + raise InvalidRequest('Notification not found', status_code=404) + elif notification.notification_type != LETTER_TYPE: + raise InvalidRequest('Notification cannot be cancelled - only letters can be cancelled', status_code=400) + elif not letter_can_be_cancelled(notification.status, notification.created_at): + print_day = letter_print_day(notification.created_at) + + raise InvalidRequest( + "It’s too late to cancel this letter. Printing started {} at 5.30pm".format(print_day), + status_code=400) + + updated_notification = notifications_dao.update_notification_status_by_id( + notification_id, + NOTIFICATION_CANCELLED, + ) + + return jsonify( + notification_with_template_schema.dump(updated_notification).data + ), 200 + + def search_for_notification_by_to_field(service_id, search_term, statuses, notification_type): results = notifications_dao.dao_get_notifications_by_to_field( service_id=service_id, diff --git a/tests/app/letters/test_letter_utils.py b/tests/app/letters/test_letter_utils.py index 32f3f9df9..7b989a582 100644 --- a/tests/app/letters/test_letter_utils.py +++ b/tests/app/letters/test_letter_utils.py @@ -10,6 +10,7 @@ from app.letters.utils import ( get_bucket_name_and_prefix_for_notification, get_letter_pdf_filename, get_letter_pdf, + letter_print_day, upload_letter_pdf, ScanErrorType, move_failed_pdf, get_folder_name ) @@ -274,3 +275,27 @@ def test_get_folder_name_in_british_summer_time(notify_api, freeze_date, expecte def test_get_folder_name_returns_empty_string_for_test_letter(): assert '' == get_folder_name(datetime.utcnow(), is_test_or_scan_letter=True) + + +@freeze_time('2017-07-07 20:00:00') +def test_letter_print_day_returns_today_if_letter_was_printed_after_1730_yesterday(): + created_at = datetime(2017, 7, 6, 17, 30) + assert letter_print_day(created_at) == 'today' + + +@freeze_time('2017-07-07 16:30:00') +def test_letter_print_day_returns_today_if_letter_was_printed_today(): + created_at = datetime(2017, 7, 7, 12, 0) + assert letter_print_day(created_at) == 'today' + + +@pytest.mark.parametrize('created_at, formatted_date', [ + (datetime(2017, 7, 5, 16, 30), 'on 6 July'), + (datetime(2017, 7, 6, 16, 29), 'on 6 July'), + (datetime(2016, 8, 8, 10, 00), 'on 8 August'), + (datetime(2016, 12, 12, 17, 29), 'on 12 December'), + (datetime(2016, 12, 12, 17, 30), 'on 13 December'), +]) +@freeze_time('2017-07-07 16:30:00') +def test_letter_print_day_returns_formatted_date_if_letter_printed_before_1730_yesterday(created_at, formatted_date): + assert letter_print_day(created_at) == formatted_date diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 13b0cb422..a1a40b3a8 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -2790,3 +2790,114 @@ def test_get_organisation_for_service_id_return_empty_dict_if_service_not_in_org service_id=fake_uuid ) assert response == {} + + +def test_cancel_notification_for_service_raises_invalid_request_when_notification_is_not_found( + admin_request, + sample_service, + fake_uuid, +): + response = admin_request.post( + 'service.cancel_notification_for_service', + service_id=sample_service.id, + notification_id=fake_uuid, + _expected_status=404 + ) + assert response['message'] == 'Notification not found' + assert response['result'] == 'error' + + +def test_cancel_notification_for_service_raises_invalid_request_when_notification_is_not_a_letter( + admin_request, + sample_notification, +): + response = admin_request.post( + 'service.cancel_notification_for_service', + service_id=sample_notification.service_id, + notification_id=sample_notification.id, + _expected_status=400 + ) + assert response['message'] == 'Notification cannot be cancelled - only letters can be cancelled' + assert response['result'] == 'error' + + +@pytest.mark.parametrize('notification_status', [ + 'cancelled', + 'sending', + 'sent', + 'delivered', + 'pending', + 'failed', + 'technical-failure', + 'temporary-failure', + 'permanent-failure', + 'validation-failed', + 'virus-scan-failed', + 'returned-letter', +]) +@freeze_time('2018-07-07 12:00:00') +def test_cancel_notification_for_service_raises_invalid_request_when_letter_is_in_wrong_state_to_be_cancelled( + admin_request, + sample_letter_notification, + notification_status, +): + sample_letter_notification.status = notification_status + + response = admin_request.post( + 'service.cancel_notification_for_service', + service_id=sample_letter_notification.service_id, + notification_id=sample_letter_notification.id, + _expected_status=400 + ) + assert response['message'] == 'It’s too late to cancel this letter. Printing started today at 5.30pm' + assert response['result'] == 'error' + + +@pytest.mark.parametrize('notification_status', ['created', 'pending-virus-check']) +@freeze_time('2018-07-07 16:00:00') +def test_cancel_notification_for_service_updates_letter_if_letter_is_in_cancellable_state( + admin_request, + sample_letter_notification, + notification_status, +): + sample_letter_notification.status = notification_status + sample_letter_notification.created_at = datetime.now() + + response = admin_request.post( + 'service.cancel_notification_for_service', + service_id=sample_letter_notification.service_id, + notification_id=sample_letter_notification.id, + ) + assert response['status'] == 'cancelled' + + +@freeze_time('2017-12-12 17:30:00') +def test_cancel_notification_for_service_raises_error_if_its_too_late_to_cancel( + admin_request, + sample_letter_notification, +): + sample_letter_notification.created_at = datetime(2017, 12, 11, 17, 0) + + response = admin_request.post( + 'service.cancel_notification_for_service', + service_id=sample_letter_notification.service_id, + notification_id=sample_letter_notification.id, + _expected_status=400 + ) + assert response['message'] == 'It’s too late to cancel this letter. Printing started on 11 December at 5.30pm' + assert response['result'] == 'error' + + +@freeze_time('2018-7-7 16:00:00') +def test_cancel_notification_for_service_updates_letter_if_still_time_to_cancel( + admin_request, + sample_letter_notification, +): + sample_letter_notification.created_at = datetime(2018, 7, 7, 10, 0) + + response = admin_request.post( + 'service.cancel_notification_for_service', + service_id=sample_letter_notification.service_id, + notification_id=sample_letter_notification.id, + ) + assert response['status'] == 'cancelled' From 90d9135fcfbfe2a974d6aaedd4f2cdd114f4edb3 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Thu, 22 Nov 2018 15:17:17 +0000 Subject: [PATCH 016/118] Return cancelled letters for dashboard Added cancelled letters to the number of failed letters in the statistics that get used for the dashboard. At some point, we want to stop including cancelled letters in the stats, but for now this keeps things consistent with our current letter failure state, permanent-failure. --- app/models.py | 1 + app/service/statistics.py | 2 +- tests/app/service/test_statistics.py | 3 ++- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/app/models.py b/app/models.py index 7accfc7c5..93a9bcd0c 100644 --- a/app/models.py +++ b/app/models.py @@ -1135,6 +1135,7 @@ NOTIFICATION_STATUS_TYPES_COMPLETED = [ NOTIFICATION_TEMPORARY_FAILURE, NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_RETURNED_LETTER, + NOTIFICATION_CANCELLED, ] NOTIFICATION_STATUS_SUCCESS = [ diff --git a/app/service/statistics.py b/app/service/statistics.py index 3d22b20d6..e1ed57aeb 100644 --- a/app/service/statistics.py +++ b/app/service/statistics.py @@ -86,7 +86,7 @@ def _update_statuses_from_row(update_dict, row): update_dict['delivered'] += row.count elif row.status in ( 'failed', 'technical-failure', 'temporary-failure', - 'permanent-failure', 'validation-failed', 'virus-scan-failed'): + 'permanent-failure', 'validation-failed', 'virus-scan-failed', 'cancelled'): update_dict['failed'] += row.count diff --git a/tests/app/service/test_statistics.py b/tests/app/service/test_statistics.py index 715553359..6ba357d3b 100644 --- a/tests/app/service/test_statistics.py +++ b/tests/app/service/test_statistics.py @@ -38,7 +38,8 @@ NewStatsRow = collections.namedtuple('row', ('notification_type', 'status', 'key StatsRow('letter', 'validation-failed', 1), StatsRow('letter', 'virus-scan-failed', 1), StatsRow('letter', 'permanent-failure', 1), - ], [4, 0, 4], [0, 0, 0], [3, 0, 3]), + StatsRow('letter', 'cancelled', 1), + ], [4, 0, 4], [0, 0, 0], [4, 0, 4]), 'convert_sent_to_delivered': ([ StatsRow('sms', 'sending', 1), StatsRow('sms', 'delivered', 1), From 20fbb96bc24d7f43afe793b3b26d60505a1a1271 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 4 Dec 2018 12:02:43 +0000 Subject: [PATCH 017/118] Added key_type to resultset --- app/dao/fact_notification_status_dao.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 486aa1213..1390201e8 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -144,7 +144,7 @@ def fetch_notification_status_totals_for_all_services(start_date, end_date): stats = db.session.query( FactNotificationStatus.notification_type.label('notification_type'), FactNotificationStatus.notification_status.label('status'), - FactNotificationStatus.key_type, + FactNotificationStatus.key_type.label('key_type'), func.sum(FactNotificationStatus.notification_count).label('count') ).filter( FactNotificationStatus.bst_date >= start_date, @@ -174,10 +174,12 @@ def fetch_notification_status_totals_for_all_services(start_date, end_date): query = db.session.query( all_stats_table.c.notification_type, all_stats_table.c.status, + all_stats_table.c.key_type, func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), ).group_by( all_stats_table.c.notification_type, all_stats_table.c.status, + all_stats_table.c.key_type, ).order_by( all_stats_table.c.notification_type ) From 1d4e4eae94647ffebd770f45deff23a08721b984 Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Tue, 4 Dec 2018 11:48:28 +0000 Subject: [PATCH 018/118] Disable unused SQLAlchemy configuration flags We don't seem to use recorded queries or modification tracking anywhere in the app, and both features potentially increase memory usage. This removes deprecated SQLALCHEMY_COMMIT_ON_TEARDOWN options. It's been removed from the docs and the default matches the value we set anyway. --- app/config.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/app/config.py b/app/config.py index c38bb611f..d93d7c497 100644 --- a/app/config.py +++ b/app/config.py @@ -117,9 +117,8 @@ class Config(object): AWS_REGION = 'eu-west-1' INVITATION_EXPIRATION_DAYS = 2 NOTIFY_APP_NAME = 'api' - SQLALCHEMY_COMMIT_ON_TEARDOWN = False - SQLALCHEMY_RECORD_QUERIES = True - SQLALCHEMY_TRACK_MODIFICATIONS = True + SQLALCHEMY_RECORD_QUERIES = False + SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_POOL_SIZE = int(os.environ.get('SQLALCHEMY_POOL_SIZE', 5)) SQLALCHEMY_POOL_TIMEOUT = 30 SQLALCHEMY_POOL_RECYCLE = 300 From 76ea46bc70c2e4d1baf64c6579ade42b3235e1ae Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 4 Dec 2018 13:55:56 +0000 Subject: [PATCH 019/118] Added test for platform stats using query rather than mock. --- tests/app/platform_stats/test_rest.py | 37 +++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/tests/app/platform_stats/test_rest.py b/tests/app/platform_stats/test_rest.py index 601923062..91474171e 100644 --- a/tests/app/platform_stats/test_rest.py +++ b/tests/app/platform_stats/test_rest.py @@ -2,6 +2,9 @@ from datetime import date, datetime from freezegun import freeze_time +from app.models import SMS_TYPE, EMAIL_TYPE +from tests.app.db import create_service, create_template, create_ft_notification_status, create_notification + @freeze_time('2018-06-01') def test_get_platform_stats_uses_todays_date_if_no_start_or_end_date_is_provided(admin_request, mocker): @@ -35,3 +38,37 @@ def test_get_platform_stats_validates_the_date(admin_request): assert response['errors'][0]['message'] == 'start_date time data {} does not match format %Y-%m-%d'.format( start_date) + + +@freeze_time('2018-10-31 14:00') +def test_get_platform_stats_with_real_query(admin_request, notify_db_session): + service_1 = create_service(service_name='service_1') + sms_template = create_template(service=service_1, template_type=SMS_TYPE) + email_template = create_template(service=service_1, template_type=EMAIL_TYPE) + create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, count=10) + create_ft_notification_status(date(2018, 10, 29), 'email', service_1, count=3) + + create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0), key_type='test') + create_notification(sms_template, created_at=datetime(2018, 10, 31, 12, 0, 0), status='delivered') + create_notification(email_template, created_at=datetime(2018, 10, 31, 13, 0, 0), status='delivered') + + response = admin_request.get( + 'platform_stats.get_platform_stats', start_date=date(2018, 10, 29), + ) + assert response == { + 'email': { + 'failures': { + 'virus-scan-failed': 0, 'temporary-failure': 0, 'permanent-failure': 0, 'technical-failure': 0}, + 'total': 4, 'test-key': 0 + }, + 'letter': { + 'failures': { + 'virus-scan-failed': 0, 'temporary-failure': 0, 'permanent-failure': 0, 'technical-failure': 0}, + 'total': 0, 'test-key': 0 + }, + 'sms': { + 'failures': { + 'virus-scan-failed': 0, 'temporary-failure': 0, 'permanent-failure': 0, 'technical-failure': 0}, + 'total': 11, 'test-key': 1 + } + } From 39ca5b952587aeb2d864e6fcae17a4ea07e2ed7a Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 4 Dec 2018 17:39:43 +0000 Subject: [PATCH 020/118] New query for finding if provider is slow The delivery for provider is slow if more than threshold (currently we pass in threshold 10%) either took x (for now 4) minutes to deliver, or are still sending after that time. We look at all notifications for current provider which are delivered or sending, and are not under test key, for the last 10 minutes. We are using created_at to establish if notifications are from last 10 minutes because we have an index on it, so the query is faster. Also write tests for new is_delivery_slow_for_provider query --- app/dao/notifications_dao.py | 35 ++-- .../notification_dao/test_notification_dao.py | 179 +++++++----------- 2 files changed, 93 insertions(+), 121 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 564ab6acf..6692d674d 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -437,22 +437,35 @@ def get_total_sent_notifications_in_date_range(start_date, end_date, notificatio def is_delivery_slow_for_provider( - sent_at, + created_at, provider, threshold, delivery_time, - service_id, - template_id ): - count = db.session.query(Notification).filter( - Notification.service_id == service_id, - Notification.template_id == template_id, - Notification.sent_at >= sent_at, - Notification.status == NOTIFICATION_DELIVERED, + count = db.session.query( + case( + [( + Notification.status == NOTIFICATION_DELIVERED, + (Notification.updated_at - Notification.sent_at) >= delivery_time + )], + else_=(datetime.utcnow() - Notification.sent_at) >= delivery_time + ).label("slow"), func.count() + + ).filter( + Notification.created_at >= created_at, + Notification.sent_at.isnot(None), + Notification.status.in_([NOTIFICATION_DELIVERED, NOTIFICATION_SENDING]), Notification.sent_by == provider, - (Notification.updated_at - Notification.sent_at) >= delivery_time, - ).count() - return count >= threshold + Notification.key_type != KEY_TYPE_TEST + ).group_by("slow").all() + + print(count) + counts = {c[0]: c[1] for c in count} + total_notifications = sum(counts.values()) + if total_notifications: + return counts.get(True, 0) / total_notifications >= threshold + else: + return False @statsd(namespace="dao") diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index ad3a9aa41..774670279 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -43,6 +43,8 @@ from app.models import ( ScheduledNotification, NOTIFICATION_STATUS_TYPES, NOTIFICATION_STATUS_TYPES_FAILED, + NOTIFICATION_TEMPORARY_FAILURE, + NOTIFICATION_SENDING, NOTIFICATION_SENT, NOTIFICATION_DELIVERED, KEY_TYPE_NORMAL, @@ -1177,131 +1179,88 @@ def test_get_total_sent_notifications_for_email_excludes_sms_counts( assert total_count == 2 -@freeze_time("2016-01-10 12:00:00.000000") -def test_slow_provider_delivery_returns_for_sent_notifications( - sample_template -): - now = datetime.utcnow() - one_minute_from_now = now + timedelta(minutes=1) - five_minutes_from_now = now + timedelta(minutes=5) +def test_is_delivery_slow_for_provider_not_slow_when_no_notifications(notify_db_session): + assert not is_delivery_slow_for_provider(datetime.utcnow(), "firetext", 0.1, timedelta(minutes=4)) - notification_five_minutes_to_deliver = partial( +@pytest.mark.parametrize( + "normal_sending,slow_sending,normal_delivered,slow_delivered,threshold,expected_result", + [ + (0, 0, 0, 0, 0.1, False), + (1, 0, 0, 0, 0.1, False), + (1, 1, 0, 0, 0.1, True), + (0, 0, 1, 1, 0.1, True), + (1, 1, 1, 1, 0.5, True), + (1, 1, 1, 1, 0.6, False), + (45, 5, 45, 5, 0.1, True), + ] +) +@freeze_time("2018-12-04 12:00:00.000000") +def test_delivery_is_delivery_slow_for_provider( + notify_db_session, + sample_template, + normal_sending, + slow_sending, + normal_delivered, + slow_delivered, + threshold, + expected_result +): + normal_notification = partial( create_notification, template=sample_template, - status='delivered', sent_by='mmg', - updated_at=five_minutes_from_now + sent_at=datetime.now(), + updated_at=datetime.now() ) - notification_five_minutes_to_deliver(sent_at=now) - notification_five_minutes_to_deliver(sent_at=one_minute_from_now) - notification_five_minutes_to_deliver(sent_at=one_minute_from_now) - - slow_delivery = is_delivery_slow_for_provider( - sent_at=one_minute_from_now, - provider='mmg', - threshold=2, - delivery_time=timedelta(minutes=3), - service_id=sample_template.service.id, - template_id=sample_template.id - ) - - assert slow_delivery - - -@freeze_time("2016-01-10 12:00:00.000000") -def test_slow_provider_delivery_observes_threshold( - sample_template -): - now = datetime.utcnow() - five_minutes_from_now = now + timedelta(minutes=5) - - notification_five_minutes_to_deliver = partial( + slow_notification = partial( create_notification, template=sample_template, - status='delivered', - sent_at=now, sent_by='mmg', - updated_at=five_minutes_from_now + sent_at=datetime.now() - timedelta(minutes=5), + updated_at=datetime.now() ) - notification_five_minutes_to_deliver() - notification_five_minutes_to_deliver() - - slow_delivery = is_delivery_slow_for_provider( - sent_at=now, - provider='mmg', - threshold=3, - delivery_time=timedelta(minutes=5), - service_id=sample_template.service.id, - template_id=sample_template.id - ) - - assert not slow_delivery + for _ in range(normal_sending): + normal_notification(status='sending') + for _ in range(slow_sending): + slow_notification(status='sending') + for _ in range(normal_delivered): + normal_notification(status='delivered') + for _ in range(slow_delivered): + slow_notification(status='delivered') -@freeze_time("2016-01-10 12:00:00.000000") -def test_slow_provider_delivery_returns_for_delivered_notifications_only( - sample_template + assert is_delivery_slow_for_provider(datetime.utcnow(), "mmg", threshold, timedelta(minutes=4)) is expected_result + +@pytest.mark.parametrize("options,expected_result", [ + ({"status": NOTIFICATION_TEMPORARY_FAILURE, "sent_by": "mmg"}, False), + ({"status": NOTIFICATION_DELIVERED, "sent_by": "firetext"}, False), + ({"status": NOTIFICATION_DELIVERED, "sent_by": "mmg"}, True), + ({"status": NOTIFICATION_DELIVERED, "sent_by": "mmg", "sent_at": None}, False), + ({"status": NOTIFICATION_DELIVERED, "sent_by": "mmg", "key_type": KEY_TYPE_TEST}, False), + ({"status": NOTIFICATION_SENDING, "sent_by": "firetext"}, False), + ({"status": NOTIFICATION_SENDING, "sent_by": "mmg"}, True), + ({"status": NOTIFICATION_SENDING, "sent_by": "mmg", "sent_at": None}, False), + ({"status": NOTIFICATION_SENDING, "sent_by": "mmg", "key_type": KEY_TYPE_TEST}, False), +]) +@freeze_time("2018-12-04 12:00:00.000000") +def test_delivery_is_delivery_slow_for_provider_filters_out_notifications_it_should_not_count( + notify_db_session, + sample_template, + options, + expected_result ): - now = datetime.utcnow() - five_minutes_from_now = now + timedelta(minutes=5) - - notification_five_minutes_to_deliver = partial( - create_notification, - template=sample_template, - sent_at=now, - sent_by='firetext', - created_at=now, - updated_at=five_minutes_from_now + create_notification_with = { + "template": sample_template, + "sent_at": datetime.now() - timedelta(minutes=5), + "updated_at": datetime.now(), + } + create_notification_with.update(options) + create_notification( + **create_notification_with ) - - notification_five_minutes_to_deliver(status='sending') - notification_five_minutes_to_deliver(status='delivered') - notification_five_minutes_to_deliver(status='delivered') - - slow_delivery = is_delivery_slow_for_provider( - sent_at=now, - provider='firetext', - threshold=2, - delivery_time=timedelta(minutes=5), - service_id=sample_template.service.id, - template_id=sample_template.id - ) - - assert slow_delivery - - -@freeze_time("2016-01-10 12:00:00.000000") -def test_slow_provider_delivery_does_not_return_for_standard_delivery_time( - sample_template -): - now = datetime.utcnow() - five_minutes_from_now = now + timedelta(minutes=5) - - notification = partial( - create_notification, - template=sample_template, - created_at=now, - sent_at=now, - sent_by='mmg', - status='delivered' - ) - - notification(updated_at=five_minutes_from_now - timedelta(seconds=1)) - notification(updated_at=five_minutes_from_now - timedelta(seconds=1)) - notification(updated_at=five_minutes_from_now) - - slow_delivery = is_delivery_slow_for_provider( - sent_at=now, - provider='mmg', - threshold=2, - delivery_time=timedelta(minutes=5), - service_id=sample_template.service.id, - template_id=sample_template.id - ) - - assert not slow_delivery + assert is_delivery_slow_for_provider(datetime.utcnow(), "mmg", 0.1, timedelta(minutes=4)) is expected_result def test_dao_get_notifications_by_to_field(sample_template): From 418060fbdb4188e9f16c43ca747d7bb7542f664f Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Wed, 5 Dec 2018 14:40:07 +0000 Subject: [PATCH 021/118] Update switch provider on slow delivery task to change max once evey 10 minutes --- app/celery/scheduled_tasks.py | 35 +++-- app/dao/notifications_dao.py | 1 - tests/app/celery/test_scheduled_tasks.py | 132 +++--------------- .../notification_dao/test_notification_dao.py | 7 +- 4 files changed, 36 insertions(+), 139 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index ed8e068e8..148749994 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -256,28 +256,25 @@ def switch_current_sms_provider_on_slow_delivery(): Switch providers if there are at least two slow delivery notifications (more than four minutes) in the last ten minutes. Search from the time we last switched to the current provider. """ - functional_test_provider_service_id = current_app.config.get('FUNCTIONAL_TEST_PROVIDER_SERVICE_ID') - functional_test_provider_template_id = current_app.config.get('FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID') + current_provider = get_current_provider('sms') + if current_provider.updated_at > datetime.utcnow() - timedelta(minutes=10): + current_app.logger.info("Slow delivery provider switched less than 10 minutes ago.") + return + slow_delivery_notifications = is_delivery_slow_for_provider( + provider=current_provider.identifier, + threshold=0.1, + created_at=datetime.utcnow() - timedelta(minutes=10), + delivery_time=timedelta(minutes=4), + ) - if functional_test_provider_service_id and functional_test_provider_template_id: - current_provider = get_current_provider('sms') - slow_delivery_notifications = is_delivery_slow_for_provider( - provider=current_provider.identifier, - threshold=2, - sent_at=max(datetime.utcnow() - timedelta(minutes=10), current_provider.updated_at), - delivery_time=timedelta(minutes=4), - service_id=functional_test_provider_service_id, - template_id=functional_test_provider_template_id + if slow_delivery_notifications: + current_app.logger.warning( + 'Slow delivery notifications detected for provider {}'.format( + current_provider.identifier + ) ) - if slow_delivery_notifications: - current_app.logger.warning( - 'Slow delivery notifications detected for provider {}'.format( - current_provider.identifier - ) - ) - - dao_toggle_sms_provider(current_provider.identifier) + dao_toggle_sms_provider(current_provider.identifier) @notify_celery.task(name="delete-inbound-sms") diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 6692d674d..750053fa6 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -459,7 +459,6 @@ def is_delivery_slow_for_provider( Notification.key_type != KEY_TYPE_TEST ).group_by("slow").all() - print(count) counts = {c[0]: c[1] for c in count} total_notifications = sum(counts.values()) if total_notifications: diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index f62a98a60..979c2df5f 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -47,7 +47,6 @@ from app.dao.provider_details_dao import ( from app.exceptions import NotificationTechnicalFailureException from app.models import ( NotificationHistory, - Service, StatsTemplateUsageByMonth, JOB_STATUS_IN_PROGRESS, JOB_STATUS_ERROR, @@ -71,33 +70,21 @@ from tests.app.conftest import ( sample_job as create_sample_job, sample_notification_history as create_notification_history, sample_template as create_sample_template, - create_custom_template, datetime_in_past ) from tests.conftest import set_config_values -def _create_slow_delivery_notification(provider='mmg'): +def _create_slow_delivery_notification(template, provider='mmg'): now = datetime.utcnow() five_minutes_from_now = now + timedelta(minutes=5) - service = Service.query.get(current_app.config['FUNCTIONAL_TEST_PROVIDER_SERVICE_ID']) - if not service: - service = create_service( - service_id=current_app.config.get('FUNCTIONAL_TEST_PROVIDER_SERVICE_ID') - ) - - template = create_custom_template( - service=service, - user=service.users[0], - template_config_name='FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID', - template_type='sms' - ) create_notification( template=template, status='delivered', sent_by=provider, - updated_at=five_minutes_from_now + updated_at=five_minutes_from_now, + sent_at=now, ) @@ -129,8 +116,9 @@ def test_should_have_decorated_tasks_functions(): @pytest.fixture(scope='function') def prepare_current_provider(restore_provider_details): initial_provider = get_current_provider('sms') - initial_provider.updated_at = datetime.utcnow() - timedelta(minutes=30) dao_update_provider_details(initial_provider) + initial_provider.updated_at = datetime.utcnow() - timedelta(minutes=30) + db.session.commit() def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): @@ -413,25 +401,6 @@ def test_send_total_sent_notifications_to_performance_platform_calls_with_correc ]) -def test_switch_current_sms_provider_on_slow_delivery_does_not_run_if_config_unset( - notify_api, - mocker -): - get_notifications_mock = mocker.patch( - 'app.celery.scheduled_tasks.is_delivery_slow_for_provider' - ) - toggle_sms_mock = mocker.patch('app.celery.scheduled_tasks.dao_toggle_sms_provider') - - with set_config_values(notify_api, { - 'FUNCTIONAL_TEST_PROVIDER_SERVICE_ID': None, - 'FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID': None - }): - switch_current_sms_provider_on_slow_delivery() - - assert get_notifications_mock.called is False - assert toggle_sms_mock.called is False - - def test_switch_providers_on_slow_delivery_runs_if_config_set( notify_api, mocker, @@ -451,96 +420,31 @@ def test_switch_providers_on_slow_delivery_runs_if_config_set( assert get_notifications_mock.called is True -def test_switch_providers_triggers_on_slow_notification_delivery( - notify_api, - mocker, - prepare_current_provider, - sample_user -): - mocker.patch('app.provider_details.switch_providers.get_user_by_id', return_value=sample_user) - starting_provider = get_current_provider('sms') - - with set_config_values(notify_api, { - 'FUNCTIONAL_TEST_PROVIDER_SERVICE_ID': '7954469d-8c6d-43dc-b8f7-86be2d69f5f3', - 'FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID': '331a63e6-f1aa-4588-ad3f-96c268788ae7' - }): - _create_slow_delivery_notification(starting_provider.identifier) - _create_slow_delivery_notification(starting_provider.identifier) - switch_current_sms_provider_on_slow_delivery() - - new_provider = get_current_provider('sms') - assert new_provider.identifier != starting_provider.identifier - assert new_provider.priority < starting_provider.priority - - -def test_switch_providers_on_slow_delivery_does_not_switch_if_already_switched( - notify_api, - mocker, - prepare_current_provider, - sample_user -): - mocker.patch('app.provider_details.switch_providers.get_user_by_id', return_value=sample_user) - starting_provider = get_current_provider('sms') - - with set_config_values(notify_api, { - 'FUNCTIONAL_TEST_PROVIDER_SERVICE_ID': '7954469d-8c6d-43dc-b8f7-86be2d69f5f3', - 'FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID': '331a63e6-f1aa-4588-ad3f-96c268788ae7' - }): - _create_slow_delivery_notification() - _create_slow_delivery_notification() - - switch_current_sms_provider_on_slow_delivery() - switch_current_sms_provider_on_slow_delivery() - - new_provider = get_current_provider('sms') - assert new_provider.identifier != starting_provider.identifier - assert new_provider.priority < starting_provider.priority - - -def test_switch_providers_on_slow_delivery_does_not_switch_based_on_older_notifications( +def test_switch_providers_on_slow_delivery_switches_once_then_does_not_switch_if_already_switched( notify_api, mocker, prepare_current_provider, sample_user, - + sample_template ): - """ - Assume we have three slow delivery notifications for the current provider x. This triggers - a switch to provider y. If we experience some slow delivery notifications on this provider, - we switch back to provider x. - - Provider x had three slow deliveries initially, but we do not want to trigger another switch - based on these as they are old. We only want to look for slow notifications after the point at - which we switched back to provider x. - """ mocker.patch('app.provider_details.switch_providers.get_user_by_id', return_value=sample_user) starting_provider = get_current_provider('sms') - with set_config_values(notify_api, { - 'FUNCTIONAL_TEST_PROVIDER_SERVICE_ID': '7954469d-8c6d-43dc-b8f7-86be2d69f5f3', - 'FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID': '331a63e6-f1aa-4588-ad3f-96c268788ae7' - }): - # Provider x -> y - _create_slow_delivery_notification(starting_provider.identifier) - _create_slow_delivery_notification(starting_provider.identifier) - _create_slow_delivery_notification(starting_provider.identifier) - switch_current_sms_provider_on_slow_delivery() + _create_slow_delivery_notification(sample_template) + _create_slow_delivery_notification(sample_template) - current_provider = get_current_provider('sms') - assert current_provider.identifier != starting_provider.identifier + switch_current_sms_provider_on_slow_delivery() - # Provider y -> x - _create_slow_delivery_notification(current_provider.identifier) - _create_slow_delivery_notification(current_provider.identifier) - switch_current_sms_provider_on_slow_delivery() + new_provider = get_current_provider('sms') + _create_slow_delivery_notification(sample_template, new_provider.identifier) + _create_slow_delivery_notification(sample_template, new_provider.identifier) + switch_current_sms_provider_on_slow_delivery() - new_provider = get_current_provider('sms') - assert new_provider.identifier != current_provider.identifier + final_provider = get_current_provider('sms') - # Expect to stay on provider x - switch_current_sms_provider_on_slow_delivery() - current_provider = get_current_provider('sms') - assert starting_provider.identifier == current_provider.identifier + assert new_provider.identifier != starting_provider.identifier + assert new_provider.priority < starting_provider.priority + assert final_provider.identifier == new_provider.identifier @freeze_time("2017-05-01 14:00:00") diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index 774670279..f7dfd981f 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -1179,9 +1179,6 @@ def test_get_total_sent_notifications_for_email_excludes_sms_counts( assert total_count == 2 -def test_is_delivery_slow_for_provider_not_slow_when_no_notifications(notify_db_session): - assert not is_delivery_slow_for_provider(datetime.utcnow(), "firetext", 0.1, timedelta(minutes=4)) - @pytest.mark.parametrize( "normal_sending,slow_sending,normal_delivered,slow_delivered,threshold,expected_result", [ @@ -1195,7 +1192,7 @@ def test_is_delivery_slow_for_provider_not_slow_when_no_notifications(notify_db_ ] ) @freeze_time("2018-12-04 12:00:00.000000") -def test_delivery_is_delivery_slow_for_provider( +def test_is_delivery_slow_for_provider( notify_db_session, sample_template, normal_sending, @@ -1230,9 +1227,9 @@ def test_delivery_is_delivery_slow_for_provider( for _ in range(slow_delivered): slow_notification(status='delivered') - assert is_delivery_slow_for_provider(datetime.utcnow(), "mmg", threshold, timedelta(minutes=4)) is expected_result + @pytest.mark.parametrize("options,expected_result", [ ({"status": NOTIFICATION_TEMPORARY_FAILURE, "sent_by": "mmg"}, False), ({"status": NOTIFICATION_DELIVERED, "sent_by": "firetext"}, False), From 6938600ab89c5c058a77e4411fd067daebd64685 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Wed, 5 Dec 2018 14:46:21 +0000 Subject: [PATCH 022/118] Switch providers on slow delivery only produces logs --- app/celery/scheduled_tasks.py | 4 ++-- tests/app/celery/test_scheduled_tasks.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 148749994..1c8d561c7 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -37,7 +37,7 @@ from app.dao.notifications_dao import ( ) from app.dao.provider_details_dao import ( get_current_provider, - dao_toggle_sms_provider + # dao_toggle_sms_provider ) from app.dao.service_callback_api_dao import get_service_delivery_status_callback_api_for_service from app.dao.services_dao import ( @@ -274,7 +274,7 @@ def switch_current_sms_provider_on_slow_delivery(): ) ) - dao_toggle_sms_provider(current_provider.identifier) + # dao_toggle_sms_provider(current_provider.identifier) @notify_celery.task(name="delete-inbound-sms") diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 979c2df5f..4b7e9f2ac 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -420,6 +420,7 @@ def test_switch_providers_on_slow_delivery_runs_if_config_set( assert get_notifications_mock.called is True +@pytest.mark.skip(reason="Not switching it on yet") def test_switch_providers_on_slow_delivery_switches_once_then_does_not_switch_if_already_switched( notify_api, mocker, From a265871a4212ac1ca5622af753600438a2ac06b6 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 6 Dec 2018 11:53:54 +0000 Subject: [PATCH 023/118] Serialize Notification now also returns sender email address sent_by_email_address field was added because sometimes two people at one institution have the same name and then email address, which is unique, is more useful. --- app/models.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/app/models.py b/app/models.py index 93a9bcd0c..2e48bcdb9 100644 --- a/app/models.py +++ b/app/models.py @@ -1413,6 +1413,12 @@ class Notification(db.Model): else: return None + def get_created_by_email_address(self): + if self.created_by: + return self.created_by.email_address + else: + return None + def serialize_for_csv(self): created_at_in_bst = convert_utc_to_bst(self.created_at) serialized = { @@ -1424,6 +1430,7 @@ class Notification(db.Model): "status": self.formatted_status, "created_at": time.strftime('%A %d %B %Y at %H:%M', created_at_in_bst.timetuple()), "created_by_name": self.get_created_by_name(), + "created_by_email_address": self.get_created_by_email_address(), } return serialized @@ -1454,6 +1461,7 @@ class Notification(db.Model): "subject": self.subject, "created_at": self.created_at.strftime(DATETIME_FORMAT), "created_by_name": self.get_created_by_name(), + "created_by_email_address": self.get_created_by_email_address(), "sent_at": self.sent_at.strftime(DATETIME_FORMAT) if self.sent_at else None, "completed_at": self.completed_at(), "scheduled_for": ( From 9ab6542678e7f01d49c5b604fd4a7232552ad862 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 6 Dec 2018 15:57:22 +0000 Subject: [PATCH 024/118] Change created_at format for Notification serialize_for_csv Change date formatting on serialize_for_csv so it is more machine-readable while still remaining human-readable --- app/models.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/app/models.py b/app/models.py index 2e48bcdb9..bc1d331f0 100644 --- a/app/models.py +++ b/app/models.py @@ -1,5 +1,4 @@ import itertools -import time import uuid import datetime from flask import url_for, current_app @@ -1428,7 +1427,7 @@ class Notification(db.Model): "template_type": self.template.template_type, "job_name": self.job.original_file_name if self.job else '', "status": self.formatted_status, - "created_at": time.strftime('%A %d %B %Y at %H:%M', created_at_in_bst.timetuple()), + "created_at": created_at_in_bst.strftime("%Y-%m-%d %H:%M:%S"), "created_by_name": self.get_created_by_name(), "created_by_email_address": self.get_created_by_email_address(), } @@ -1461,7 +1460,6 @@ class Notification(db.Model): "subject": self.subject, "created_at": self.created_at.strftime(DATETIME_FORMAT), "created_by_name": self.get_created_by_name(), - "created_by_email_address": self.get_created_by_email_address(), "sent_at": self.sent_at.strftime(DATETIME_FORMAT) if self.sent_at else None, "completed_at": self.completed_at(), "scheduled_for": ( From 9a76d6706e2d4e26f1f56546687726815d6ac794 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 6 Dec 2018 16:13:42 +0000 Subject: [PATCH 025/118] Update tests to match the new csv data --- tests/app/job/test_rest.py | 2 +- tests/app/test_model.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index dcf0cf5f8..2dfe16854 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -844,5 +844,5 @@ def test_get_all_notifications_for_job_returns_csv_format( assert len(resp['notifications']) == 1 notification = resp['notifications'][0] assert set(notification.keys()) == \ - set(['created_at', 'created_by_name', 'template_type', + set(['created_at', 'created_by_name', 'created_by_email_address', 'template_type', 'template_name', 'job_name', 'status', 'row_number', 'recipient']) diff --git a/tests/app/test_model.py b/tests/app/test_model.py index d7e51f7cc..afea955d9 100644 --- a/tests/app/test_model.py +++ b/tests/app/test_model.py @@ -146,7 +146,7 @@ def test_notification_for_csv_returns_bst_correctly(sample_template): notification = create_notification(sample_template) serialized = notification.serialize_for_csv() - assert serialized['created_at'] == 'Monday 27 March 2017 at 00:01' + assert serialized['created_at'] == '2017-03-27 00:01:53' def test_notification_personalisation_getter_returns_empty_dict_from_None(): From 474acc5bba5cf24177976d8883d848a370ce0031 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 10 Dec 2018 12:59:16 +0000 Subject: [PATCH 026/118] new chillmaid approved error messages these get shown on the front-end so make sure their content is good --- app/template_folder/rest.py | 7 ++----- tests/app/template_folder/test_template_folder_rest.py | 6 ++---- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/app/template_folder/rest.py b/app/template_folder/rest.py index f2f79c521..010bc1a66 100644 --- a/app/template_folder/rest.py +++ b/app/template_folder/rest.py @@ -141,11 +141,8 @@ def move_to_template_folder(service_id, target_template_folder_id=None): def _validate_folder_move(target_template_folder, target_template_folder_id, template_folder, template_folder_id): if str(target_template_folder_id) == str(template_folder_id): - msg = 'Could not move folder to itself' + msg = 'You cannot move a folder to itself' raise InvalidRequest(msg, status_code=400) if target_template_folder and template_folder.is_parent_of(target_template_folder): - msg = 'Could not move to folder: {} is an ancestor of target folder {}'.format( - template_folder_id, - target_template_folder_id - ) + msg = 'You cannot move a folder to one of its subfolders' raise InvalidRequest(msg, status_code=400) diff --git a/tests/app/template_folder/test_template_folder_rest.py b/tests/app/template_folder/test_template_folder_rest.py index bc9058aca..ff861f0de 100644 --- a/tests/app/template_folder/test_template_folder_rest.py +++ b/tests/app/template_folder/test_template_folder_rest.py @@ -325,9 +325,7 @@ def test_move_to_folder_rejects_if_it_would_cause_folder_loop(admin_request, sam }, _expected_status=400 ) - assert response['message'] == 'Could not move to folder: {} is an ancestor of target folder {}'.format( - f1.id, target_folder.id - ) + assert response['message'] == 'You cannot move a folder to one of its subfolders' def test_move_to_folder_itself_is_rejected(admin_request, sample_service): @@ -343,7 +341,7 @@ def test_move_to_folder_itself_is_rejected(admin_request, sample_service): }, _expected_status=400 ) - assert response['message'] == 'Could not move folder to itself' + assert response['message'] == 'You cannot move a folder to itself' def test_move_to_folder_skips_archived_templates(admin_request, sample_service): From 5b90fd6fb07301724da64f6f612302ceae0c1f9f Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 10 Dec 2018 16:27:59 +0000 Subject: [PATCH 027/118] Removed unused method --- app/dao/notifications_dao.py | 44 +++------------- .../notification_dao/test_notification_dao.py | 51 +------------------ 2 files changed, 8 insertions(+), 87 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 564ab6acf..f7b8168a9 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -7,26 +7,25 @@ from datetime import ( from boto.exception import BotoClientError from flask import current_app - +from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES from notifications_utils.recipients import ( validate_and_format_email_address, InvalidEmailError, try_validate_and_format_phone_number ) from notifications_utils.statsd_decorators import statsd -from werkzeug.datastructures import MultiDict +from notifications_utils.timezones import convert_utc_to_bst from sqlalchemy import (desc, func, or_, asc) from sqlalchemy.orm import joinedload -from sqlalchemy.sql.expression import case from sqlalchemy.sql import functions -from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES -from notifications_utils.timezones import convert_utc_to_bst +from sqlalchemy.sql.expression import case +from werkzeug.datastructures import MultiDict from app import db, create_uuid from app.aws.s3 import remove_s3_object, get_s3_bucket_objects -from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE -from app.utils import midnight_n_days_ago, escape_special_characters +from app.dao.dao_utils import transactional from app.errors import InvalidRequest +from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE from app.models import ( Notification, NotificationHistory, @@ -47,9 +46,8 @@ from app.models import ( EMAIL_TYPE, ServiceDataRetention ) - -from app.dao.dao_utils import transactional from app.utils import get_london_midnight_in_utc +from app.utils import midnight_n_days_ago, escape_special_characters @statsd(namespace="dao") @@ -623,31 +621,3 @@ def guess_notification_type(search_term): return EMAIL_TYPE else: return SMS_TYPE - - -@statsd(namespace='dao') -def fetch_aggregate_stats_by_date_range_for_all_services(start_date, end_date): - start_date = get_london_midnight_in_utc(start_date) - end_date = get_london_midnight_in_utc(end_date + timedelta(days=1)) - table = NotificationHistory - - if start_date >= datetime.utcnow() - timedelta(days=7): - table = Notification - - query = db.session.query( - table.notification_type, - table.status, - table.key_type, - func.count(table.id).label('count') - ).filter( - table.created_at >= start_date, - table.created_at < end_date - ).group_by( - table.notification_type, - table.key_type, - table.status - ).order_by( - table.notification_type, - ) - - return query.all() diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index ad3a9aa41..68cce13ba 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime, timedelta, date +from datetime import datetime, timedelta from functools import partial import pytest @@ -33,7 +33,6 @@ from app.dao.notifications_dao import ( dao_get_notifications_by_references, dao_get_notification_history_by_reference, notifications_not_yet_sent, - fetch_aggregate_stats_by_date_range_for_all_services, ) from app.dao.services_dao import dao_update_service from app.models import ( @@ -1802,51 +1801,3 @@ def test_notifications_not_yet_sent_return_no_rows(sample_service, notification_ results = notifications_not_yet_sent(older_than, notification_type) assert len(results) == 0 - - -def test_fetch_aggregate_stats_by_date_range_for_all_services_returns_empty_list_when_no_stats(notify_db_session): - start_date = date(2018, 1, 1) - end_date = date(2018, 1, 5) - - result = fetch_aggregate_stats_by_date_range_for_all_services(start_date, end_date) - assert result == [] - - -@freeze_time('2018-01-08') -def test_fetch_aggregate_stats_by_date_range_for_all_services_groups_stats( - sample_template, - sample_email_template, - sample_letter_template, -): - today = datetime.now().date() - - for i in range(3): - create_notification(template=sample_email_template, status='permanent-failure', - created_at=today) - - create_notification(template=sample_email_template, status='sent', created_at=today) - create_notification(template=sample_template, status='sent', created_at=today) - create_notification(template=sample_template, status='sent', created_at=today, - key_type=KEY_TYPE_TEAM) - create_notification(template=sample_letter_template, status='virus-scan-failed', - created_at=today) - - result = fetch_aggregate_stats_by_date_range_for_all_services(today, today) - - assert len(result) == 5 - assert ('email', 'permanent-failure', 'normal', 3) in result - assert ('email', 'sent', 'normal', 1) in result - assert ('sms', 'sent', 'normal', 1) in result - assert ('sms', 'sent', 'team', 1) in result - assert ('letter', 'virus-scan-failed', 'normal', 1) in result - - -def test_fetch_aggregate_stats_by_date_range_for_all_services_uses_bst_date(sample_template): - query_day = datetime(2018, 6, 5).date() - create_notification(sample_template, status='sent', created_at=datetime(2018, 6, 4, 23, 59)) - create_notification(sample_template, status='created', created_at=datetime(2018, 6, 5, 23, 00)) - - result = fetch_aggregate_stats_by_date_range_for_all_services(query_day, query_day) - - assert len(result) == 1 - assert result[0].status == 'sent' From 5ed7564066a12f4cb7c7d267cb908b390593c4f8 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 10 Dec 2018 16:11:31 +0000 Subject: [PATCH 028/118] Remove unused config variables We don't use FUNCTIONAL_TEST_PROVIDER_SERVICE_ID or UNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID anymore so we can safely delete them from config and tests. --- app/config.py | 5 ----- tests/app/celery/test_scheduled_tasks.py | 20 -------------------- 2 files changed, 25 deletions(-) diff --git a/app/config.py b/app/config.py index c38bb611f..f27222b67 100644 --- a/app/config.py +++ b/app/config.py @@ -291,9 +291,6 @@ class Config(object): SIMULATED_SMS_NUMBERS = ('+447700900000', '+447700900111', '+447700900222') - FUNCTIONAL_TEST_PROVIDER_SERVICE_ID = None - FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID = None - DVLA_BUCKETS = { 'job': '{}-dvla-file-per-job'.format(os.getenv('NOTIFY_ENVIRONMENT')), 'notification': '{}-dvla-letter-api-files'.format(os.getenv('NOTIFY_ENVIRONMENT')) @@ -439,8 +436,6 @@ class Live(Config): INVALID_PDF_BUCKET_NAME = 'production-letters-invalid-pdf' STATSD_ENABLED = True FROM_NUMBER = 'GOVUK' - FUNCTIONAL_TEST_PROVIDER_SERVICE_ID = '6c1d81bb-dae2-4ee9-80b0-89a4aae9f649' - FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID = 'ba9e1789-a804-40b8-871f-cc60d4c1286f' PERFORMANCE_PLATFORM_ENABLED = True API_RATE_LIMIT_ENABLED = True CHECK_PROXY_HEADER = True diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 4b7e9f2ac..a0c621529 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -72,7 +72,6 @@ from tests.app.conftest import ( sample_template as create_sample_template, datetime_in_past ) -from tests.conftest import set_config_values def _create_slow_delivery_notification(template, provider='mmg'): @@ -401,25 +400,6 @@ def test_send_total_sent_notifications_to_performance_platform_calls_with_correc ]) -def test_switch_providers_on_slow_delivery_runs_if_config_set( - notify_api, - mocker, - prepare_current_provider -): - get_notifications_mock = mocker.patch( - 'app.celery.scheduled_tasks.is_delivery_slow_for_provider', - return_value=[] - ) - - with set_config_values(notify_api, { - 'FUNCTIONAL_TEST_PROVIDER_SERVICE_ID': '7954469d-8c6d-43dc-b8f7-86be2d69f5f3', - 'FUNCTIONAL_TEST_PROVIDER_SMS_TEMPLATE_ID': '331a63e6-f1aa-4588-ad3f-96c268788ae7' - }): - switch_current_sms_provider_on_slow_delivery() - - assert get_notifications_mock.called is True - - @pytest.mark.skip(reason="Not switching it on yet") def test_switch_providers_on_slow_delivery_switches_once_then_does_not_switch_if_already_switched( notify_api, From dfc12cc3540e000cc8539e9d1279b9e991f110d3 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 11 Dec 2018 14:57:10 +0000 Subject: [PATCH 029/118] Remove the join to TemplateHistory. We are adding an index to Notifications to optimize the get_notifications_for_service. We need to build the index concurrently which can not be run inside a transaction block so the index will need to be run on the db directly. CREATE INDEX CONCURRENTLY ix_notifications_service_created_at ON notifications (service_id, created_at); DROP INDEX CONCURRENTLY ix_notifications_service_created_at --- app/dao/notifications_dao.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index f5b733033..654cdf691 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -32,7 +32,6 @@ from app.models import ( NotificationHistory, ScheduledNotification, Template, - TemplateHistory, KEY_TYPE_TEST, LETTER_TYPE, NOTIFICATION_CREATED, @@ -312,7 +311,7 @@ def _filter_query(query, filter_dict=None): # filter by template template_types = multidict.getlist('template_type') if template_types: - query = query.join(TemplateHistory).filter(TemplateHistory.template_type.in_(template_types)) + query = query.filter(Notification.notification_type.in_(template_types)) return query From abe01c0bc057c3764a450d3b368bec507e1ebcf5 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 11 Dec 2018 15:14:08 +0000 Subject: [PATCH 030/118] Revert "Switch providers on slow delivery only produces logs" This reverts commit 6938600ab89c5c058a77e4411fd067daebd64685. --- app/celery/scheduled_tasks.py | 4 ++-- tests/app/celery/test_scheduled_tasks.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 1c8d561c7..148749994 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -37,7 +37,7 @@ from app.dao.notifications_dao import ( ) from app.dao.provider_details_dao import ( get_current_provider, - # dao_toggle_sms_provider + dao_toggle_sms_provider ) from app.dao.service_callback_api_dao import get_service_delivery_status_callback_api_for_service from app.dao.services_dao import ( @@ -274,7 +274,7 @@ def switch_current_sms_provider_on_slow_delivery(): ) ) - # dao_toggle_sms_provider(current_provider.identifier) + dao_toggle_sms_provider(current_provider.identifier) @notify_celery.task(name="delete-inbound-sms") diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index a0c621529..b2fee242a 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -400,7 +400,6 @@ def test_send_total_sent_notifications_to_performance_platform_calls_with_correc ]) -@pytest.mark.skip(reason="Not switching it on yet") def test_switch_providers_on_slow_delivery_switches_once_then_does_not_switch_if_already_switched( notify_api, mocker, From af185adf4c96f50dd7b11a547680593db7610938 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 11 Dec 2018 15:28:38 +0000 Subject: [PATCH 031/118] Log the ratio of slow notifications --- app/celery/scheduled_tasks.py | 2 +- app/dao/notifications_dao.py | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 148749994..aff2ba452 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -258,7 +258,7 @@ def switch_current_sms_provider_on_slow_delivery(): """ current_provider = get_current_provider('sms') if current_provider.updated_at > datetime.utcnow() - timedelta(minutes=10): - current_app.logger.info("Slow delivery provider switched less than 10 minutes ago.") + current_app.logger.info("Slow delivery notifications provider switched less than 10 minutes ago.") return slow_delivery_notifications = is_delivery_slow_for_provider( provider=current_provider.identifier, diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index f5b733033..93592dfc2 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -467,8 +467,13 @@ def is_delivery_slow_for_provider( counts = {c[0]: c[1] for c in count} total_notifications = sum(counts.values()) + slow_notifications = counts.get(True, 0) + if total_notifications: - return counts.get(True, 0) / total_notifications >= threshold + current_app.logger.info("Slow delivery notifications count: {} out of {}. Ratio {}".format( + slow_notifications, total_notifications, slow_notifications / total_notifications + )) + return slow_notifications / total_notifications >= threshold else: return False From 21a67556b83b7905134439d55afe33c35e4b3422 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Wed, 12 Dec 2018 12:14:49 +0000 Subject: [PATCH 032/118] Add an index on notifications for (service_id, created_at) to improve the performance of the notification queries. We've already performed this update on production since you need to create the index concurrently, which is not allowed from the alembic script. For that reason we are checking if the index exists. --- .../versions/0246_notifications_index.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 migrations/versions/0246_notifications_index.py diff --git a/migrations/versions/0246_notifications_index.py b/migrations/versions/0246_notifications_index.py new file mode 100644 index 000000000..37d8fd772 --- /dev/null +++ b/migrations/versions/0246_notifications_index.py @@ -0,0 +1,26 @@ +""" + +Revision ID: 0246_notifications_index +Revises: 0245_archived_flag_jobs +Create Date: 2018-12-12 12:00:09.770775 + +""" +from alembic import op + +revision = '0246_notifications_index' +down_revision = '0245_archived_flag_jobs' + + +def upgrade(): + conn = op.get_bind() + conn.execute( + "CREATE INDEX IF NOT EXISTS ix_notifications_service_created_at ON notifications (service_id, created_at)" + ) + + +def downgrade(): + conn = op.get_bind() + conn.execute( + "DROP INDEX IF EXISTS ix_notifications_service_created_at" + ) + From bf62d3ad5f7fb9e9e47a91a93dc656b1227de20f Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 12 Dec 2018 12:57:04 +0000 Subject: [PATCH 033/118] infer template/service from job for notification/ft_noti_status in db.py --- tests/app/conftest.py | 2 ++ tests/app/db.py | 8 +++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index ef7317acb..15f76846f 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -1101,7 +1101,9 @@ def restore_provider_details(notify_db, notify_db_session): @pytest.fixture def admin_request(client): + class AdminRequest: + app = client.application @staticmethod def get(endpoint, _expected_status=200, **endpoint_kwargs): diff --git a/tests/app/db.py b/tests/app/db.py index 36e29953e..94d6ac287 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -164,7 +164,7 @@ def create_template( def create_notification( - template, + template=None, job=None, job_row_number=None, to_field=None, @@ -190,6 +190,10 @@ def create_notification( created_by_id=None, postage=None ): + assert job or template + if job: + template = job.template + if created_at is None: created_at = datetime.utcnow() @@ -557,6 +561,8 @@ def create_ft_notification_status( notification_status='delivered', count=1 ): + if job: + template = job.template if template: service = template.service notification_type = template.template_type From 63b3a3849f4b3b603a6d73bf762b4e86094b1372 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Tue, 11 Dec 2018 17:40:11 +0000 Subject: [PATCH 034/118] move job dao tests to use db.py instead of conftest directly --- tests/app/dao/test_jobs_dao.py | 201 ++++++++------------------------- 1 file changed, 44 insertions(+), 157 deletions(-) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 7d1d88346..535808d3b 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -19,106 +19,14 @@ from app.models import ( Job, EMAIL_TYPE, SMS_TYPE, LETTER_TYPE ) -from tests.app.conftest import sample_job as create_job -from tests.app.conftest import sample_notification as create_notification -from tests.app.conftest import sample_service as create_service -from tests.app.conftest import sample_template as create_template -from tests.app.db import ( - create_user -) +from tests.app.db import create_job, create_service, create_template def test_should_have_decorated_notifications_dao_functions(): assert dao_get_notification_outcomes_for_job.__wrapped__.__name__ == 'dao_get_notification_outcomes_for_job' # noqa -def test_should_get_all_statuses_for_notifications_associated_with_job( - notify_db, - notify_db_session, - sample_service, - sample_job -): - notification = partial(create_notification, notify_db, notify_db_session, service=sample_service, job=sample_job) - notification(status='created') - notification(status='sending') - notification(status='delivered') - notification(status='pending') - notification(status='failed') - notification(status='technical-failure') - notification(status='temporary-failure') - notification(status='permanent-failure') - notification(status='sent') - - results = dao_get_notification_outcomes_for_job(sample_service.id, sample_job.id) - assert set([(row.count, row.status) for row in results]) == set([ - (1, 'created'), - (1, 'sending'), - (1, 'delivered'), - (1, 'pending'), - (1, 'failed'), - (1, 'technical-failure'), - (1, 'temporary-failure'), - (1, 'permanent-failure'), - (1, 'sent') - ]) - - -def test_should_count_of_statuses_for_notifications_associated_with_job( - notify_db, - notify_db_session, - sample_service, - sample_job -): - notification = partial(create_notification, notify_db, notify_db_session, service=sample_service, job=sample_job) - notification(status='created') - notification(status='created') - - notification(status='sending') - notification(status='sending') - notification(status='sending') - notification(status='sending') - notification(status='delivered') - notification(status='delivered') - - results = dao_get_notification_outcomes_for_job(sample_service.id, sample_job.id) - assert set([(row.count, row.status) for row in results]) == set([ - (2, 'created'), - (4, 'sending'), - (2, 'delivered') - ]) - - -def test_should_return_zero_length_array_if_no_notifications_for_job(sample_service, sample_job): - assert len(dao_get_notification_outcomes_for_job(sample_job.id, sample_service.id)) == 0 - - -def test_should_return_notifications_only_for_this_job(notify_db, notify_db_session, sample_service): - job_1 = create_job(notify_db, notify_db_session, service=sample_service) - job_2 = create_job(notify_db, notify_db_session, service=sample_service) - - create_notification(notify_db, notify_db_session, service=sample_service, job=job_1, status='created') - create_notification(notify_db, notify_db_session, service=sample_service, job=job_2, status='created') - - results = dao_get_notification_outcomes_for_job(sample_service.id, job_1.id) - assert [(row.count, row.status) for row in results] == [ - (1, 'created') - ] - - -def test_should_return_notifications_only_for_this_service(notify_db, notify_db_session): - service_1 = create_service(notify_db, notify_db_session, service_name="one", email_from="one") - service_2 = create_service(notify_db, notify_db_session, service_name="two", email_from="two") - - job_1 = create_job(notify_db, notify_db_session, service=service_1) - job_2 = create_job(notify_db, notify_db_session, service=service_2) - - create_notification(notify_db, notify_db_session, service=service_1, job=job_1, status='created') - create_notification(notify_db, notify_db_session, service=service_2, job=job_2, status='created') - - assert len(dao_get_notification_outcomes_for_job(service_1.id, job_2.id)) == 0 - - -def test_create_job(sample_template): +def test_create_sample_job(sample_template): assert Job.query.count() == 0 job_id = uuid.uuid4() @@ -147,14 +55,12 @@ def test_get_job_by_id(sample_job): assert sample_job == job_from_db -def test_get_jobs_for_service(notify_db, notify_db_session, sample_template): - one_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template) +def test_get_jobs_for_service(sample_template): + one_job = create_job(sample_template) - other_user = create_user(email="test@digital.cabinet-office.gov.uk") - other_service = create_service(notify_db, notify_db_session, user=other_user, service_name="other service", - email_from='other.service') - other_template = create_template(notify_db, notify_db_session, service=other_service) - other_job = create_job(notify_db, notify_db_session, service=other_service, template=other_template) + other_service = create_service(service_name="other service") + other_template = create_template(service=other_service) + other_job = create_job(other_template) one_job_from_db = dao_get_jobs_by_service_id(one_job.service_id).items other_job_from_db = dao_get_jobs_by_service_id(other_job.service_id).items @@ -168,10 +74,9 @@ def test_get_jobs_for_service(notify_db, notify_db_session, sample_template): assert one_job_from_db != other_job_from_db -def test_get_jobs_for_service_with_limit_days_param(notify_db, notify_db_session, sample_template): - one_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template) - old_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template, - created_at=datetime.now() - timedelta(days=8)) +def test_get_jobs_for_service_with_limit_days_param(sample_template): + one_job = create_job(sample_template) + old_job = create_job(sample_template, created_at=datetime.now() - timedelta(days=8)) jobs = dao_get_jobs_by_service_id(one_job.service_id).items @@ -185,34 +90,27 @@ def test_get_jobs_for_service_with_limit_days_param(notify_db, notify_db_session assert old_job not in jobs_limit_days -def test_get_jobs_for_service_with_limit_days_edge_case(notify_db, notify_db_session, sample_template): - one_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template) - job_two = create_job(notify_db, notify_db_session, sample_template.service, sample_template, - created_at=(datetime.now() - timedelta(days=7)).date()) - one_second_after_midnight = datetime.combine((datetime.now() - timedelta(days=7)).date(), - datetime.strptime("000001", "%H%M%S").time()) - just_after_midnight_job = create_job(notify_db, notify_db_session, sample_template.service, sample_template, - created_at=one_second_after_midnight) - job_eight_days_old = create_job(notify_db, notify_db_session, sample_template.service, sample_template, - created_at=datetime.now() - timedelta(days=8)) +@freeze_time('2017-06-10') +def test_get_jobs_for_service_with_limit_days_edge_case(sample_template): + one_job = create_job(sample_template) + just_after_midnight_job = create_job(sample_template, created_at=datetime(2017, 6, 2, 23, 0, 1)) + just_before_midnight_job = create_job(sample_template, created_at=datetime(2017, 6, 2, 22, 59, 0)) jobs_limit_days = dao_get_jobs_by_service_id(one_job.service_id, limit_days=7).items - assert len(jobs_limit_days) == 3 + assert len(jobs_limit_days) == 2 assert one_job in jobs_limit_days - assert job_two in jobs_limit_days assert just_after_midnight_job in jobs_limit_days - assert job_eight_days_old not in jobs_limit_days + assert just_before_midnight_job not in jobs_limit_days def test_get_jobs_for_service_in_processed_at_then_created_at_order(notify_db, notify_db_session, sample_template): - _create_job = partial(create_job, notify_db, notify_db_session, sample_template.service, sample_template) from_hour = partial(datetime, 2001, 1, 1) created_jobs = [ - _create_job(created_at=from_hour(2), processing_started=None), - _create_job(created_at=from_hour(1), processing_started=None), - _create_job(created_at=from_hour(1), processing_started=from_hour(4)), - _create_job(created_at=from_hour(2), processing_started=from_hour(3)), + create_job(sample_template, created_at=from_hour(2), processing_started=None), + create_job(sample_template, created_at=from_hour(1), processing_started=None), + create_job(sample_template, created_at=from_hour(1), processing_started=from_hour(4)), + create_job(sample_template, created_at=from_hour(2), processing_started=from_hour(3)), ] jobs = dao_get_jobs_by_service_id(sample_template.service.id).items @@ -235,21 +133,20 @@ def test_update_job(sample_job): assert job_from_db.job_status == 'in progress' -def test_set_scheduled_jobs_to_pending_gets_all_jobs_in_scheduled_state_before_now(notify_db, notify_db_session): +def test_set_scheduled_jobs_to_pending_gets_all_jobs_in_scheduled_state_before_now(sample_template): one_minute_ago = datetime.utcnow() - timedelta(minutes=1) one_hour_ago = datetime.utcnow() - timedelta(minutes=60) - job_new = create_job(notify_db, notify_db_session, scheduled_for=one_minute_ago, job_status='scheduled') - job_old = create_job(notify_db, notify_db_session, scheduled_for=one_hour_ago, job_status='scheduled') + job_new = create_job(sample_template, scheduled_for=one_minute_ago, job_status='scheduled') + job_old = create_job(sample_template, scheduled_for=one_hour_ago, job_status='scheduled') jobs = dao_set_scheduled_jobs_to_pending() assert len(jobs) == 2 assert jobs[0].id == job_old.id assert jobs[1].id == job_new.id -def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_not_scheduled(notify_db, notify_db_session): +def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_not_scheduled(sample_template, sample_job): one_minute_ago = datetime.utcnow() - timedelta(minutes=1) - create_job(notify_db, notify_db_session) - job_scheduled = create_job(notify_db, notify_db_session, scheduled_for=one_minute_ago, job_status='scheduled') + job_scheduled = create_job(sample_template, scheduled_for=one_minute_ago, job_status='scheduled') jobs = dao_set_scheduled_jobs_to_pending() assert len(jobs) == 1 assert jobs[0].id == job_scheduled.id @@ -260,11 +157,11 @@ def test_set_scheduled_jobs_to_pending_gets_ignores_jobs_scheduled_in_the_future assert len(jobs) == 0 -def test_set_scheduled_jobs_to_pending_updates_rows(notify_db, notify_db_session): +def test_set_scheduled_jobs_to_pending_updates_rows(sample_template): one_minute_ago = datetime.utcnow() - timedelta(minutes=1) one_hour_ago = datetime.utcnow() - timedelta(minutes=60) - create_job(notify_db, notify_db_session, scheduled_for=one_minute_ago, job_status='scheduled') - create_job(notify_db, notify_db_session, scheduled_for=one_hour_ago, job_status='scheduled') + create_job(sample_template, scheduled_for=one_minute_ago, job_status='scheduled') + create_job(sample_template, scheduled_for=one_hour_ago, job_status='scheduled') jobs = dao_set_scheduled_jobs_to_pending() assert len(jobs) == 2 assert jobs[0].job_status == 'pending' @@ -277,7 +174,7 @@ def test_get_future_scheduled_job_gets_a_job_yet_to_send(sample_scheduled_job): @freeze_time('2016-10-31 10:00:00') -def test_should_get_jobs_seven_days_old(notify_db, notify_db_session, sample_template): +def test_should_get_jobs_seven_days_old(sample_template): """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ @@ -289,12 +186,11 @@ def test_should_get_jobs_seven_days_old(notify_db, notify_db_session, sample_tem nine_days_ago = eight_days_ago - timedelta(days=2) nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - job = partial(create_job, notify_db, notify_db_session) - job(created_at=seven_days_ago) - job(created_at=within_seven_days) - job_to_delete = job(created_at=eight_days_ago) - job(created_at=nine_days_ago, archived=True) - job(created_at=nine_days_one_second_ago, archived=True) + create_job(sample_template, created_at=seven_days_ago) + create_job(sample_template, created_at=within_seven_days) + job_to_delete = create_job(sample_template, created_at=eight_days_ago) + create_job(sample_template, created_at=nine_days_ago, archived=True) + create_job(sample_template, created_at=nine_days_one_second_ago, archived=True) jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type]) @@ -306,7 +202,7 @@ def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_ with freeze_time('2015-01-01T00:00:00') as the_time: for _ in range(10): the_time.tick(timedelta(hours=1)) - create_job(notify_db, notify_db_session, sample_service, sample_template) + create_job(sample_template) res = dao_get_jobs_by_service_id(sample_service.id, page=1, page_size=2) @@ -328,19 +224,11 @@ def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_ 'Report', ]) def test_get_jobs_for_service_doesnt_return_test_messages( - notify_db, - notify_db_session, sample_template, sample_job, file_name, ): - create_job( - notify_db, - notify_db_session, - sample_template.service, - sample_template, - original_file_name=file_name, - ) + create_job(sample_template, original_file_name=file_name,) jobs = dao_get_jobs_by_service_id(sample_job.service_id).items @@ -348,16 +236,15 @@ def test_get_jobs_for_service_doesnt_return_test_messages( @freeze_time('2016-10-31 10:00:00') -def test_should_get_jobs_seven_days_old_filters_type(notify_db, notify_db_session): +def test_should_get_jobs_seven_days_old_filters_type(sample_service): eight_days_ago = datetime.utcnow() - timedelta(days=8) - letter_template = create_template(notify_db, notify_db_session, template_type=LETTER_TYPE) - sms_template = create_template(notify_db, notify_db_session, template_type=SMS_TYPE) - email_template = create_template(notify_db, notify_db_session, template_type=EMAIL_TYPE) + letter_template = create_template(sample_service, template_type=LETTER_TYPE) + sms_template = create_template(sample_service, template_type=SMS_TYPE) + email_template = create_template(sample_service, template_type=EMAIL_TYPE) - job = partial(create_job, notify_db, notify_db_session, created_at=eight_days_ago) - job_to_remain = job(template=letter_template) - job(template=sms_template) - job(template=email_template) + job_to_remain = create_job(letter_template, created_at=eight_days_ago) + create_job(sms_template, created_at=eight_days_ago) + create_job(email_template, created_at=eight_days_ago) jobs = dao_get_jobs_older_than_data_retention( notification_types=[EMAIL_TYPE, SMS_TYPE] From e555a7595b97f368747810857b3d34c68af820ca Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 12 Dec 2018 12:49:52 +0000 Subject: [PATCH 035/118] move job rest tests to use db.py instead of conftest directly --- tests/app/job/test_rest.py | 392 +++++++++++-------------------------- 1 file changed, 118 insertions(+), 274 deletions(-) diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 2dfe16854..e1797233a 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -1,21 +1,18 @@ import json import uuid from datetime import datetime, timedelta -from functools import partial from freezegun import freeze_time import pytest import pytz + import app.celery.tasks +from app.dao.templates_dao import dao_update_template +from app.models import JOB_STATUS_TYPES, JOB_STATUS_PENDING from tests import create_authorization_header from tests.conftest import set_config -from tests.app.conftest import ( - sample_job as create_job, - sample_notification as create_notification -) -from app.dao.templates_dao import dao_update_template -from app.models import NOTIFICATION_STATUS_TYPES, JOB_STATUS_TYPES, JOB_STATUS_PENDING +from tests.app.db import create_job, create_notification def test_get_job_with_invalid_service_id_returns404(client, sample_service): @@ -444,54 +441,26 @@ def test_create_job_returns_400_if_archived_template(client, sample_template, mo assert 'Template has been deleted' in resp_json['message']['template'] -def _setup_jobs(notify_db, notify_db_session, template, number_of_jobs=5): +def _setup_jobs(template, number_of_jobs=5): for i in range(number_of_jobs): - create_job( - notify_db, - notify_db_session, - service=template.service, - template=template) + create_job(template=template) -def test_get_all_notifications_for_job_in_order_of_job_number( - client, notify_db, notify_db_session, sample_service -): - main_job = create_job(notify_db, notify_db_session, service=sample_service) - another_job = create_job(notify_db, notify_db_session, service=sample_service) +def test_get_all_notifications_for_job_in_order_of_job_number(admin_request, sample_template): + main_job = create_job(sample_template) + another_job = create_job(sample_template) - notification_1 = create_notification( - notify_db, - notify_db_session, - job=main_job, - to_field="1", - created_at=datetime.utcnow(), - job_row_number=1 + notification_1 = create_notification(job=main_job, to_field="1", job_row_number=1) + notification_2 = create_notification(job=main_job, to_field="2", job_row_number=2) + notification_3 = create_notification(job=main_job, to_field="3", job_row_number=3) + create_notification(job=another_job) + + resp = admin_request.get( + 'job.get_all_notifications_for_service_job', + service_id=main_job.service_id, + job_id=main_job.id ) - notification_2 = create_notification( - notify_db, - notify_db_session, - job=main_job, - to_field="2", - created_at=datetime.utcnow(), - job_row_number=2 - ) - notification_3 = create_notification( - notify_db, - notify_db_session, - job=main_job, - to_field="3", - created_at=datetime.utcnow(), - job_row_number=3 - ) - create_notification(notify_db, notify_db_session, job=another_job) - auth_header = create_authorization_header() - - response = client.get( - path='/service/{}/job/{}/notifications'.format(sample_service.id, main_job.id), - headers=[auth_header]) - - resp = json.loads(response.get_data(as_text=True)) assert len(resp['notifications']) == 3 assert resp['notifications'][0]['to'] == notification_1.to assert resp['notifications'][0]['job_row_number'] == notification_1.job_row_number @@ -499,133 +468,76 @@ def test_get_all_notifications_for_job_in_order_of_job_number( assert resp['notifications'][1]['job_row_number'] == notification_2.job_row_number assert resp['notifications'][2]['to'] == notification_3.to assert resp['notifications'][2]['job_row_number'] == notification_3.job_row_number - assert response.status_code == 200 @pytest.mark.parametrize( "expected_notification_count, status_args", [ - (1, '?status={}'.format(NOTIFICATION_STATUS_TYPES[0])), - (0, '?status={}'.format(NOTIFICATION_STATUS_TYPES[1])), - (1, '?status={}&status={}&status={}'.format(*NOTIFICATION_STATUS_TYPES[0:3])), - (0, '?status={}&status={}&status={}'.format(*NOTIFICATION_STATUS_TYPES[3:6])), + (1, ['created']), + (0, ['sending']), + (1, ['created', 'sending']), + (0, ['sending', 'delivered']), ] ) def test_get_all_notifications_for_job_filtered_by_status( - client, - notify_db, - notify_db_session, - sample_service, + admin_request, + sample_job, expected_notification_count, status_args ): - job = create_job(notify_db, notify_db_session, service=sample_service) + create_notification(job=sample_job, to_field="1", status='created') - create_notification( - notify_db, - notify_db_session, - job=job, - to_field="1", - created_at=datetime.utcnow(), - status=NOTIFICATION_STATUS_TYPES[0], - job_row_number=1 + resp = admin_request.get( + 'job.get_all_notifications_for_service_job', + service_id=sample_job.service_id, + job_id=sample_job.id, + status=status_args ) - - response = client.get( - path='/service/{}/job/{}/notifications{}'.format(sample_service.id, job.id, status_args), - headers=[create_authorization_header()] - ) - resp = json.loads(response.get_data(as_text=True)) assert len(resp['notifications']) == expected_notification_count - assert response.status_code == 200 def test_get_all_notifications_for_job_returns_correct_format( - client, + admin_request, sample_notification_with_job ): service_id = sample_notification_with_job.service_id job_id = sample_notification_with_job.job_id - response = client.get( - path='/service/{}/job/{}/notifications'.format(service_id, job_id), - headers=[create_authorization_header()] - ) - assert response.status_code == 200 - resp = json.loads(response.get_data(as_text=True)) + + resp = admin_request.get('job.get_all_notifications_for_service_job', service_id=service_id, job_id=job_id) + assert len(resp['notifications']) == 1 assert resp['notifications'][0]['id'] == str(sample_notification_with_job.id) assert resp['notifications'][0]['status'] == sample_notification_with_job.status -def test_get_job_by_id(notify_api, sample_job): +def test_get_job_by_id(admin_request, sample_job): job_id = str(sample_job.id) service_id = sample_job.service.id - with notify_api.test_request_context(): - with notify_api.test_client() as client: - path = '/service/{}/job/{}'.format(service_id, job_id) - auth_header = create_authorization_header() - response = client.get(path, headers=[auth_header]) - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) - assert resp_json['data']['id'] == job_id - assert resp_json['data']['statistics'] == [] - assert resp_json['data']['created_by']['name'] == 'Test User' + resp_json = admin_request.get('job.get_job_by_service_and_job_id', service_id=service_id, job_id=job_id) -def test_get_job_by_id_should_return_statistics(client, notify_db, notify_db_session, notify_api, sample_job): - job_id = str(sample_job.id) - service_id = sample_job.service.id - partial_notification = partial( - create_notification, notify_db, notify_db_session, service=sample_job.service, job=sample_job - ) - partial_notification(status='created') - partial_notification(status='sending') - partial_notification(status='delivered') - partial_notification(status='pending') - partial_notification(status='failed') - partial_notification(status='technical-failure') # noqa - partial_notification(status='temporary-failure') # noqa - partial_notification(status='permanent-failure') # noqa - - path = '/service/{}/job/{}'.format(service_id, job_id) - auth_header = create_authorization_header() - response = client.get(path, headers=[auth_header]) - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) assert resp_json['data']['id'] == job_id - assert {'status': 'created', 'count': 1} in resp_json['data']['statistics'] - assert {'status': 'sending', 'count': 1} in resp_json['data']['statistics'] - assert {'status': 'delivered', 'count': 1} in resp_json['data']['statistics'] - assert {'status': 'pending', 'count': 1} in resp_json['data']['statistics'] - assert {'status': 'failed', 'count': 1} in resp_json['data']['statistics'] - assert {'status': 'technical-failure', 'count': 1} in resp_json['data']['statistics'] - assert {'status': 'temporary-failure', 'count': 1} in resp_json['data']['statistics'] - assert {'status': 'permanent-failure', 'count': 1} in resp_json['data']['statistics'] + assert resp_json['data']['statistics'] == [] assert resp_json['data']['created_by']['name'] == 'Test User' -def test_get_job_by_id_should_return_summed_statistics(client, notify_db, notify_db_session, notify_api, sample_job): +def test_get_job_by_id_should_return_summed_statistics(admin_request, sample_job): job_id = str(sample_job.id) service_id = sample_job.service.id - partial_notification = partial( - create_notification, notify_db, notify_db_session, service=sample_job.service, job=sample_job - ) - partial_notification(status='created') - partial_notification(status='created') - partial_notification(status='created') - partial_notification(status='sending') - partial_notification(status='failed') - partial_notification(status='failed') - partial_notification(status='failed') - partial_notification(status='technical-failure') - partial_notification(status='temporary-failure') - partial_notification(status='temporary-failure') - path = '/service/{}/job/{}'.format(service_id, job_id) - auth_header = create_authorization_header() - response = client.get(path, headers=[auth_header]) - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) + create_notification(job=sample_job, status='created') + create_notification(job=sample_job, status='created') + create_notification(job=sample_job, status='created') + create_notification(job=sample_job, status='sending') + create_notification(job=sample_job, status='failed') + create_notification(job=sample_job, status='failed') + create_notification(job=sample_job, status='failed') + create_notification(job=sample_job, status='technical-failure') + create_notification(job=sample_job, status='temporary-failure') + create_notification(job=sample_job, status='temporary-failure') + + resp_json = admin_request.get('job.get_job_by_service_and_job_id', service_id=service_id, job_id=job_id) + assert resp_json['data']['id'] == job_id assert {'status': 'created', 'count': 3} in resp_json['data']['statistics'] assert {'status': 'sending', 'count': 1} in resp_json['data']['statistics'] @@ -635,32 +547,23 @@ def test_get_job_by_id_should_return_summed_statistics(client, notify_db, notify assert resp_json['data']['created_by']['name'] == 'Test User' -def test_get_jobs(client, notify_db, notify_db_session, sample_template): - _setup_jobs(notify_db, notify_db_session, sample_template) +def test_get_jobs(admin_request, sample_template): + _setup_jobs(sample_template) service_id = sample_template.service.id - path = '/service/{}/job'.format(service_id) - auth_header = create_authorization_header() - response = client.get(path, headers=[auth_header]) - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) + resp_json = admin_request.get('job.get_jobs_by_service', service_id=service_id) assert len(resp_json['data']) == 5 -def test_get_jobs_with_limit_days(admin_request, notify_db, notify_db_session, sample_template): +def test_get_jobs_with_limit_days(admin_request, sample_template): for time in [ 'Sunday 1st July 2018 22:59', 'Sunday 2nd July 2018 23:00', # beginning of monday morning 'Monday 3rd July 2018 12:00' ]: with freeze_time(time): - create_job( - notify_db, - notify_db_session, - service=sample_template.service, - template=sample_template, - ) + create_job(template=sample_template) with freeze_time('Monday 9th July 2018 12:00'): resp_json = admin_request.get('job.get_jobs_by_service', service_id=sample_template.service_id, limit_days=7) @@ -668,51 +571,35 @@ def test_get_jobs_with_limit_days(admin_request, notify_db, notify_db_session, s assert len(resp_json['data']) == 2 -def test_get_jobs_should_return_statistics(client, notify_db, notify_db_session, notify_api, sample_service): +def test_get_jobs_should_return_statistics(admin_request, sample_template): now = datetime.utcnow() earlier = datetime.utcnow() - timedelta(days=1) - job_1 = create_job(notify_db, notify_db_session, service=sample_service, created_at=earlier) - job_2 = create_job(notify_db, notify_db_session, service=sample_service, created_at=now) - partial_notification = partial(create_notification, notify_db, notify_db_session, service=sample_service) - partial_notification(job=job_1, status='created') - partial_notification(job=job_1, status='created') - partial_notification(job=job_1, status='created') - partial_notification(job=job_2, status='sending') - partial_notification(job=job_2, status='sending') - partial_notification(job=job_2, status='sending') + job_1 = create_job(sample_template, created_at=earlier) + job_2 = create_job(sample_template, created_at=now) + create_notification(job=job_1, status='created') + create_notification(job=job_1, status='created') + create_notification(job=job_1, status='created') + create_notification(job=job_2, status='sending') + create_notification(job=job_2, status='sending') + create_notification(job=job_2, status='sending') - with notify_api.test_request_context(): - with notify_api.test_client() as client: - path = '/service/{}/job'.format(sample_service.id) - auth_header = create_authorization_header() - response = client.get(path, headers=[auth_header]) - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) - assert len(resp_json['data']) == 2 - assert resp_json['data'][0]['id'] == str(job_2.id) - assert {'status': 'sending', 'count': 3} in resp_json['data'][0]['statistics'] - assert resp_json['data'][1]['id'] == str(job_1.id) - assert {'status': 'created', 'count': 3} in resp_json['data'][1]['statistics'] + resp_json = admin_request.get('job.get_jobs_by_service', service_id=sample_template.service_id) + + assert len(resp_json['data']) == 2 + assert resp_json['data'][0]['id'] == str(job_2.id) + assert {'status': 'sending', 'count': 3} in resp_json['data'][0]['statistics'] + assert resp_json['data'][1]['id'] == str(job_1.id) + assert {'status': 'created', 'count': 3} in resp_json['data'][1]['statistics'] -def test_get_jobs_should_return_no_stats_if_no_rows_in_notifications( - client, - notify_db, - notify_db_session, - notify_api, - sample_service, -): - +def test_get_jobs_should_return_no_stats_if_no_rows_in_notifications(admin_request, sample_template): now = datetime.utcnow() earlier = datetime.utcnow() - timedelta(days=1) - job_1 = create_job(notify_db, notify_db_session, service=sample_service, created_at=earlier) - job_2 = create_job(notify_db, notify_db_session, service=sample_service, created_at=now) + job_1 = create_job(sample_template, created_at=earlier) + job_2 = create_job(sample_template, created_at=now) + + resp_json = admin_request.get('job.get_jobs_by_service', service_id=sample_template.service_id) - path = '/service/{}/job'.format(sample_service.id) - auth_header = create_authorization_header() - response = client.get(path, headers=[auth_header]) - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) assert len(resp_json['data']) == 2 assert resp_json['data'][0]['id'] == str(job_2.id) assert resp_json['data'][0]['statistics'] == [] @@ -720,23 +607,12 @@ def test_get_jobs_should_return_no_stats_if_no_rows_in_notifications( assert resp_json['data'][1]['statistics'] == [] -def test_get_jobs_should_paginate( - notify_db, - notify_db_session, - client, - sample_template -): - create_10_jobs(notify_db, notify_db_session, sample_template.service, sample_template) +def test_get_jobs_should_paginate(admin_request, sample_template): + create_10_jobs(sample_template) - path = '/service/{}/job'.format(sample_template.service_id) - auth_header = create_authorization_header() + with set_config(admin_request.app, 'PAGE_SIZE', 2): + resp_json = admin_request.get('job.get_jobs_by_service', service_id=sample_template.service_id) - with set_config(client.application, 'PAGE_SIZE', 2): - response = client.get(path, headers=[auth_header]) - - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) - assert len(resp_json['data']) == 2 assert resp_json['data'][0]['created_at'] == '2015-01-01T10:00:00+00:00' assert resp_json['data'][1]['created_at'] == '2015-01-01T09:00:00+00:00' assert resp_json['page_size'] == 2 @@ -745,23 +621,12 @@ def test_get_jobs_should_paginate( assert set(resp_json['links'].keys()) == {'next', 'last'} -def test_get_jobs_accepts_page_parameter( - notify_db, - notify_db_session, - client, - sample_template -): - create_10_jobs(notify_db, notify_db_session, sample_template.service, sample_template) +def test_get_jobs_accepts_page_parameter(admin_request, sample_template): + create_10_jobs(sample_template) - path = '/service/{}/job'.format(sample_template.service_id) - auth_header = create_authorization_header() + with set_config(admin_request.app, 'PAGE_SIZE', 2): + resp_json = admin_request.get('job.get_jobs_by_service', service_id=sample_template.service_id, page=2) - with set_config(client.application, 'PAGE_SIZE', 2): - response = client.get(path, headers=[auth_header], query_string={'page': 2}) - - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) - assert len(resp_json['data']) == 2 assert resp_json['data'][0]['created_at'] == '2015-01-01T08:00:00+00:00' assert resp_json['data'][1]['created_at'] == '2015-01-01T07:00:00+00:00' assert resp_json['page_size'] == 2 @@ -778,71 +643,50 @@ def test_get_jobs_accepts_page_parameter( # bad statuses are accepted, just return no data ('foo', []) ]) -def test_get_jobs_can_filter_on_statuses( - notify_db, - notify_db_session, - client, - sample_service, - statuses_filter, - expected_statuses -): - create_job(notify_db, notify_db_session, job_status='pending') - create_job(notify_db, notify_db_session, job_status='in progress') - create_job(notify_db, notify_db_session, job_status='finished') - create_job(notify_db, notify_db_session, job_status='sending limits exceeded') - create_job(notify_db, notify_db_session, job_status='scheduled') - create_job(notify_db, notify_db_session, job_status='cancelled') - create_job(notify_db, notify_db_session, job_status='ready to send') - create_job(notify_db, notify_db_session, job_status='sent to dvla') - create_job(notify_db, notify_db_session, job_status='error') +def test_get_jobs_can_filter_on_statuses(admin_request, sample_template, statuses_filter, expected_statuses): + create_job(sample_template, job_status='pending') + create_job(sample_template, job_status='in progress') + create_job(sample_template, job_status='finished') + create_job(sample_template, job_status='sending limits exceeded') + create_job(sample_template, job_status='scheduled') + create_job(sample_template, job_status='cancelled') + create_job(sample_template, job_status='ready to send') + create_job(sample_template, job_status='sent to dvla') + create_job(sample_template, job_status='error') - path = '/service/{}/job'.format(sample_service.id) - response = client.get( - path, - headers=[create_authorization_header()], - query_string={'statuses': statuses_filter} + resp_json = admin_request.get( + 'job.get_jobs_by_service', + service_id=sample_template.service_id, + statuses=statuses_filter ) - assert response.status_code == 200 - resp_json = json.loads(response.get_data(as_text=True)) - from pprint import pprint - pprint(resp_json) assert {x['job_status'] for x in resp_json['data']} == set(expected_statuses) -def create_10_jobs(db, session, service, template): +def create_10_jobs(template): with freeze_time('2015-01-01T00:00:00') as the_time: for _ in range(10): the_time.tick(timedelta(hours=1)) - create_job(db, session, service, template) + create_job(template) -def test_get_all_notifications_for_job_returns_csv_format( - client, - notify_db, - notify_db_session, -): - job = create_job(notify_db, notify_db_session) - notification = create_notification( - notify_db, - notify_db_session, - job=job, - job_row_number=1, - created_at=datetime.utcnow(), +def test_get_all_notifications_for_job_returns_csv_format(admin_request, sample_notification_with_job): + resp = admin_request.get( + 'job.get_all_notifications_for_service_job', + service_id=sample_notification_with_job.service_id, + job_id=sample_notification_with_job.job_id, + format_for_csv=True ) - path = '/service/{}/job/{}/notifications'.format(notification.service.id, job.id) - - response = client.get( - path=path, - headers=[create_authorization_header()], - query_string={'format_for_csv': True} - ) - assert response.status_code == 200 - - resp = json.loads(response.get_data(as_text=True)) assert len(resp['notifications']) == 1 - notification = resp['notifications'][0] - assert set(notification.keys()) == \ - set(['created_at', 'created_by_name', 'created_by_email_address', 'template_type', - 'template_name', 'job_name', 'status', 'row_number', 'recipient']) + assert set(resp['notifications'][0].keys()) == { + 'created_at', + 'created_by_name', + 'created_by_email_address', + 'template_type', + 'template_name', + 'job_name', + 'status', + 'row_number', + 'recipient' + } From b80beab76cc9362bf540956e00cd52ffae249325 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 12 Dec 2018 12:57:33 +0000 Subject: [PATCH 036/118] use ft_notification_status and notifications for job statistics we previously always read from NotificationHistory to get the notification status stats for a job. Now, if the job is more than three days old read from ft_notification_status table, otherwise read from the notifications table (to keep live updates). --- app/dao/fact_notification_status_dao.py | 11 +++++ app/dao/jobs_dao.py | 19 +++----- app/job/rest.py | 14 ++++-- .../dao/test_fact_notification_status_dao.py | 20 ++++++++- tests/app/dao/test_jobs_dao.py | 43 ++++++++++++++++++- tests/app/job/test_rest.py | 33 +++++++++++++- 6 files changed, 120 insertions(+), 20 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 1390201e8..8925c81f0 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -186,3 +186,14 @@ def fetch_notification_status_totals_for_all_services(start_date, end_date): else: query = stats return query.all() + + +def fetch_notification_statuses_for_job(job_id): + return db.session.query( + FactNotificationStatus.notification_status.label('status'), + func.sum(FactNotificationStatus.notification_count).label('count'), + ).filter( + FactNotificationStatus.job_id == job_id, + ).group_by( + FactNotificationStatus.notification_status + ).all() diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 5eaf71457..e22c1c709 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -16,7 +16,7 @@ from app.models import ( JOB_STATUS_PENDING, JOB_STATUS_SCHEDULED, LETTER_TYPE, - NotificationHistory, + Notification, Template, ServiceDataRetention ) @@ -25,19 +25,14 @@ from app.variables import LETTER_TEST_API_FILENAME @statsd(namespace="dao") def dao_get_notification_outcomes_for_job(service_id, job_id): - query = db.session.query( - func.count(NotificationHistory.status).label('count'), - NotificationHistory.status - ) - - return query.filter( - NotificationHistory.service_id == service_id + return db.session.query( + func.count(Notification.status).label('count'), + Notification.status ).filter( - NotificationHistory.job_id == job_id + Notification.service_id == service_id, + Notification.job_id == job_id ).group_by( - NotificationHistory.status - ).order_by( - asc(NotificationHistory.status) + Notification.status ).all() diff --git a/app/job/rest.py b/app/job/rest.py index 6e64e045c..61665ec45 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -1,3 +1,4 @@ +import dateutil from flask import ( Blueprint, jsonify, @@ -13,6 +14,7 @@ from app.dao.jobs_dao import ( dao_get_jobs_by_service_id, dao_get_future_scheduled_job_by_id_and_service_id, dao_get_notification_outcomes_for_job) +from app.dao.fact_notification_status_dao import fetch_notification_statuses_for_job from app.dao.services_dao import dao_fetch_service_by_id from app.dao.templates_dao import dao_get_template_by_id from app.dao.notifications_dao import get_notifications_for_job @@ -24,7 +26,7 @@ from app.schemas import ( ) from app.celery.tasks import process_job from app.models import JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, JOB_STATUS_CANCELLED, LETTER_TYPE -from app.utils import pagination_links +from app.utils import pagination_links, midnight_n_days_ago from app.config import QueueNames from app.errors import ( register_errors, @@ -171,8 +173,14 @@ def get_paginated_jobs(service_id, limit_days, statuses, page): ) data = job_schema.dump(pagination.items, many=True).data for job_data in data: - statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id']) - job_data['statistics'] = [{'status': statistic[1], 'count': statistic[0]} for statistic in statistics] + created_at = dateutil.parser.parse(job_data['created_at']).replace(tzinfo=None) + if created_at < midnight_n_days_ago(3): + # ft_notification_status table + statistics = fetch_notification_statuses_for_job(job_data['id']) + else: + # notifications table + statistics = dao_get_notification_outcomes_for_job(service_id, job_data['id']) + job_data['statistics'] = [{'status': statistic.status, 'count': statistic.count} for statistic in statistics] return { 'data': data, diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 9dcb975eb..db90c6d61 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -9,11 +9,12 @@ from app.dao.fact_notification_status_dao import ( fetch_notification_status_for_service_by_month, fetch_notification_status_for_service_for_day, fetch_notification_status_for_service_for_today_and_7_previous_days, - fetch_notification_status_totals_for_all_services + fetch_notification_status_totals_for_all_services, + fetch_notification_statuses_for_job, ) from app.models import FactNotificationStatus, KEY_TYPE_TEST, KEY_TYPE_TEAM, EMAIL_TYPE, SMS_TYPE, LETTER_TYPE from freezegun import freeze_time -from tests.app.db import create_notification, create_service, create_template, create_ft_notification_status +from tests.app.db import create_notification, create_service, create_template, create_ft_notification_status, create_job def test_update_fact_notification_status(notify_db_session): @@ -288,3 +289,18 @@ def set_up_data(): create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0)) create_notification(sms_template, created_at=datetime(2018, 10, 31, 12, 0, 0), status='delivered') create_notification(email_template, created_at=datetime(2018, 10, 31, 13, 0, 0), status='delivered') + + +def test_fetch_notification_statuses_for_job(sample_template): + j1 = create_job(sample_template) + j2 = create_job(sample_template) + + create_ft_notification_status(date(2018, 10, 1), job=j1, notification_status='created', count=1) + create_ft_notification_status(date(2018, 10, 1), job=j1, notification_status='delivered', count=2) + create_ft_notification_status(date(2018, 10, 2), job=j1, notification_status='created', count=4) + create_ft_notification_status(date(2018, 10, 1), job=j2, notification_status='created', count=8) + + assert {x.status: x.count for x in fetch_notification_statuses_for_job(j1.id)} == { + 'created': 5, + 'delivered': 2 + } diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 535808d3b..524d17f8e 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -19,13 +19,54 @@ from app.models import ( Job, EMAIL_TYPE, SMS_TYPE, LETTER_TYPE ) -from tests.app.db import create_job, create_service, create_template +from tests.app.db import create_job, create_service, create_template, create_notification def test_should_have_decorated_notifications_dao_functions(): assert dao_get_notification_outcomes_for_job.__wrapped__.__name__ == 'dao_get_notification_outcomes_for_job' # noqa +def test_should_count_of_statuses_for_notifications_associated_with_job(sample_template, sample_job): + create_notification(sample_template, job=sample_job, status='created') + create_notification(sample_template, job=sample_job, status='created') + create_notification(sample_template, job=sample_job, status='created') + create_notification(sample_template, job=sample_job, status='sending') + create_notification(sample_template, job=sample_job, status='delivered') + + results = dao_get_notification_outcomes_for_job(sample_template.service_id, sample_job.id) + assert {row.status: row.count for row in results} == { + 'created': 3, + 'sending': 1, + 'delivered': 1, + } + + +def test_should_return_zero_length_array_if_no_notifications_for_job(sample_service, sample_job): + assert len(dao_get_notification_outcomes_for_job(sample_job.id, sample_service.id)) == 0 + + +def test_should_return_notifications_only_for_this_job(sample_template): + job_1 = create_job(sample_template) + job_2 = create_job(sample_template) + + create_notification(sample_template, job=job_1, status='created') + create_notification(sample_template, job=job_2, status='sent') + + results = dao_get_notification_outcomes_for_job(sample_template.service_id, job_1.id) + assert {row.status: row.count for row in results} == {'created': 1} + + +def test_should_return_notifications_only_for_this_service(sample_notification_with_job): + other_service = create_service(service_name='one') + other_template = create_template(service=other_service) + other_job = create_job(other_template) + + create_notification(other_template, job=other_job) + + assert len(dao_get_notification_outcomes_for_job(sample_notification_with_job.service_id, other_job.id)) == 0 + assert len(dao_get_notification_outcomes_for_job(other_service.id, sample_notification_with_job.id)) == 0 + + def test_create_sample_job(sample_template): assert Job.query.count() == 0 diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index e1797233a..9adb6e8ac 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -1,6 +1,6 @@ import json import uuid -from datetime import datetime, timedelta +from datetime import datetime, timedelta, date from freezegun import freeze_time import pytest @@ -12,7 +12,7 @@ from app.models import JOB_STATUS_TYPES, JOB_STATUS_PENDING from tests import create_authorization_header from tests.conftest import set_config -from tests.app.db import create_job, create_notification +from tests.app.db import create_ft_notification_status, create_job, create_notification def test_get_job_with_invalid_service_id_returns404(client, sample_service): @@ -690,3 +690,32 @@ def test_get_all_notifications_for_job_returns_csv_format(admin_request, sample_ 'row_number', 'recipient' } + + +@freeze_time('2017-06-10 12:00') +def test_get_jobs_should_retrieve_from_ft_notification_status_for_old_jobs(admin_request, sample_template): + # it's the 10th today, so 3 days should include all of 7th, 8th, 9th, and some of 10th. + just_three_days_ago = datetime(2017, 6, 6, 22, 59, 59) + not_quite_three_days_ago = just_three_days_ago + timedelta(seconds=1) + + job_1 = create_job(sample_template, created_at=just_three_days_ago) + job_2 = create_job(sample_template, created_at=not_quite_three_days_ago) + + # some notifications created more than three days ago, some created after the midnight cutoff + create_ft_notification_status(date(2017, 6, 6), job=job_1, notification_status='delivered', count=2) + create_ft_notification_status(date(2017, 6, 7), job=job_1, notification_status='delivered', count=4) + # job2's new enough + create_notification(job=job_2, status='created', created_at=not_quite_three_days_ago) + + # this isn't picked up because the job is too new + create_ft_notification_status(date(2017, 6, 7), job=job_2, notification_status='delivered', count=8) + + # this isn't picked up because we're using the ft status table for job_1 as it's old + create_notification(job=job_1, status='created', created_at=not_quite_three_days_ago) + + resp_json = admin_request.get('job.get_jobs_by_service', service_id=sample_template.service_id) + + assert resp_json['data'][0]['id'] == str(job_2.id) + assert resp_json['data'][0]['statistics'] == [{'status': 'created', 'count': 1}] + assert resp_json['data'][1]['id'] == str(job_1.id) + assert resp_json['data'][1]['statistics'] == [{'status': 'delivered', 'count': 6}] From 2f4f381fa4085725be56a5683605584fd0812b8c Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Thu, 13 Dec 2018 11:09:47 +0000 Subject: [PATCH 037/118] Include live/trial mode in list of services We need this so we can check in the admin whether users have any live services. --- app/user/rest.py | 6 ++++-- tests/app/user/test_rest.py | 18 ++++++++++++------ 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/app/user/rest.py b/app/user/rest.py index 0bca01a4c..65097b6d4 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -449,7 +449,8 @@ def get_orgs_and_services(user): 'services': [ { 'id': service.id, - 'name': service.name + 'name': service.name, + 'restricted': service.restricted, } for service in org.services if service.active and service in user.services @@ -460,7 +461,8 @@ def get_orgs_and_services(user): 'services_without_organisations': [ { 'id': service.id, - 'name': service.name + 'name': service.name, + 'restricted': service.restricted, } for service in user.services if ( service.active and diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 1db989706..32222581a 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -631,11 +631,13 @@ def test_get_orgs_and_services_nests_services(admin_request, sample_user): 'services': [ { 'name': service1.name, - 'id': str(service1.id) + 'id': str(service1.id), + 'restricted': False, }, { 'name': service2.name, - 'id': str(service2.id) + 'id': str(service2.id), + 'restricted': False, } ] }, @@ -648,7 +650,8 @@ def test_get_orgs_and_services_nests_services(admin_request, sample_user): 'services_without_organisations': [ { 'name': service3.name, - 'id': str(service3.id) + 'id': str(service3.id), + 'restricted': False, } ] } @@ -683,7 +686,8 @@ def test_get_orgs_and_services_only_returns_active(admin_request, sample_user): 'services': [ { 'name': service1.name, - 'id': str(service1.id) + 'id': str(service1.id), + 'restricted': False, } ] } @@ -691,7 +695,8 @@ def test_get_orgs_and_services_only_returns_active(admin_request, sample_user): 'services_without_organisations': [ { 'name': service4.name, - 'id': str(service4.id) + 'id': str(service4.id), + 'restricted': False, } ] } @@ -727,7 +732,8 @@ def test_get_orgs_and_services_only_shows_users_orgs_and_services(admin_request, 'services_without_organisations': [ { 'name': service1.name, - 'id': str(service1.id) + 'id': str(service1.id), + 'restricted': False, } ] } From 16311a8288eefe00ccb5bfce736ab7e211a0a905 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Fri, 14 Dec 2018 10:58:07 +0000 Subject: [PATCH 038/118] Add letter logo for Neath Port Talbot Council --- .../versions/0247_another_letter_org.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 migrations/versions/0247_another_letter_org.py diff --git a/migrations/versions/0247_another_letter_org.py b/migrations/versions/0247_another_letter_org.py new file mode 100644 index 000000000..be2a988c0 --- /dev/null +++ b/migrations/versions/0247_another_letter_org.py @@ -0,0 +1,35 @@ +"""empty message + +Revision ID: 0247_another_letter_org +Revises: 0246_notifications_index + +""" + +# revision identifiers, used by Alembic. +revision = '0247_another_letter_org' +down_revision = '0246_notifications_index' + +from alembic import op + + +NEW_ORGANISATIONS = [ + ('520', 'Neath Port Talbot Council', 'npt'), +] + + +def upgrade(): + for numeric_id, name, filename in NEW_ORGANISATIONS: + op.execute(""" + INSERT + INTO dvla_organisation + VALUES ('{}', '{}', '{}') + """.format(numeric_id, name, filename)) + + +def downgrade(): + for numeric_id, _, _ in NEW_ORGANISATIONS: + op.execute(""" + DELETE + FROM dvla_organisation + WHERE id = '{}' + """.format(numeric_id)) From 52a7dcf86cb5806146c71be62c2e159f4eccebd7 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Fri, 14 Dec 2018 12:45:58 +0000 Subject: [PATCH 039/118] Add choose_postage service permission and add postage to Template --- app/models.py | 12 +++++ .../versions/0248_enable_choose_postage.py | 54 +++++++++++++++++++ tests/app/v2/template/test_get_template.py | 1 + 3 files changed, 67 insertions(+) create mode 100644 migrations/versions/0248_enable_choose_postage.py diff --git a/app/models.py b/app/models.py index bc1d331f0..555a27ddd 100644 --- a/app/models.py +++ b/app/models.py @@ -257,6 +257,7 @@ LETTERS_AS_PDF = 'letters_as_pdf' PRECOMPILED_LETTER = 'precompiled_letter' UPLOAD_DOCUMENT = 'upload_document' EDIT_FOLDERS = 'edit_folders' +CHOOSE_POSTAGE = 'choose_postage' SERVICE_PERMISSION_TYPES = [ EMAIL_TYPE, @@ -270,6 +271,7 @@ SERVICE_PERMISSION_TYPES = [ PRECOMPILED_LETTER, UPLOAD_DOCUMENT, EDIT_FOLDERS, + CHOOSE_POSTAGE ] @@ -762,6 +764,15 @@ class TemplateBase(db.Model): archived = db.Column(db.Boolean, nullable=False, default=False) hidden = db.Column(db.Boolean, nullable=False, default=False) subject = db.Column(db.Text) + postage = db.Column(db.String, nullable=True) + CheckConstraint(""" + CASE WHEN template_type = 'letter' THEN + postage in ('first', 'second') OR + postage is null + ELSE + postage is null + END + """) @declared_attr def service_id(cls): @@ -861,6 +872,7 @@ class TemplateBase(db.Model): } for key in self._as_utils_template().placeholders }, + "postage": self.postage, } return serialized diff --git a/migrations/versions/0248_enable_choose_postage.py b/migrations/versions/0248_enable_choose_postage.py new file mode 100644 index 000000000..b72d6749f --- /dev/null +++ b/migrations/versions/0248_enable_choose_postage.py @@ -0,0 +1,54 @@ +""" + +Revision ID: 0248_enable_choose_postage +Revises: 0247_another_letter_org +Create Date: 2018-12-14 12:09:31.375634 + +""" +from alembic import op +import sqlalchemy as sa + + +revision = '0248_enable_choose_postage' +down_revision = '0247_another_letter_org' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute("INSERT INTO service_permission_types VALUES ('choose_postage')") + op.add_column('templates', sa.Column('postage', sa.String(), nullable=True)) + op.add_column('templates_history', sa.Column('postage', sa.String(), nullable=True)) + op.execute(""" + ALTER TABLE templates ADD CONSTRAINT "chk_templates_postage_null" + CHECK ( + CASE WHEN template_type = 'letter' THEN + postage in ('first', 'second') OR + postage is null + ELSE + postage is null + END + ) + """) + op.execute(""" + ALTER TABLE templates_history ADD CONSTRAINT "chk_templates_history_postage_null" + CHECK ( + CASE WHEN template_type = 'letter' THEN + postage in ('first', 'second') OR + postage is null + ELSE + postage is null + END + ) + """) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('chk_templates_history_postage_null', 'templates_history', type_='check') + op.drop_constraint('chk_templates_postage_null', 'templates', type_='check') + op.drop_column('templates_history', 'postage') + op.drop_column('templates', 'postage') + op.execute("DELETE FROM service_permissions WHERE permission = 'choose_postage'") + op.execute("DELETE FROM service_permission_types WHERE name = 'choose_postage'") + # ### end Alembic commands ### diff --git a/tests/app/v2/template/test_get_template.py b/tests/app/v2/template/test_get_template.py index 3be2ce889..900621db7 100644 --- a/tests/app/v2/template/test_get_template.py +++ b/tests/app/v2/template/test_get_template.py @@ -41,6 +41,7 @@ def test_get_template_by_id_returns_200(client, sample_service, tmp_type, expect "subject": expected_subject, 'name': expected_name, 'personalisation': {}, + 'postage': None, } assert json_response == expected_response From 86c3d96a830ec1a042f965a4e18787e59f45491d Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 17 Dec 2018 10:37:19 +0000 Subject: [PATCH 040/118] Test postage constraints on TemplateBase model --- tests/app/dao/test_templates_dao.py | 35 +++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/tests/app/dao/test_templates_dao.py b/tests/app/dao/test_templates_dao.py index b3d27a6d6..d86cec7a0 100644 --- a/tests/app/dao/test_templates_dao.py +++ b/tests/app/dao/test_templates_dao.py @@ -1,6 +1,7 @@ from datetime import datetime from freezegun import freeze_time +from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm.exc import NoResultFound import pytest @@ -523,3 +524,37 @@ def test_get_multiple_template_details_returns_templates_for_list_of_ids(sample_ assert res[0].template_type assert res[0].name assert not res[0].is_precompiled_letter + + +@pytest.mark.parametrize("template_type,postage", [('letter', 'third'), ('sms', 'second')]) +def test_template_postage_constraint_on_create(sample_service, sample_user, template_type, postage): + data = { + 'name': 'Sample Template', + 'template_type': template_type, + 'content': "Template content", + 'service': sample_service, + 'created_by': sample_user, + 'postage': postage + } + template = Template(**data) + with pytest.raises(expected_exception=SQLAlchemyError): + dao_create_template(template) + + +def test_template_postage_constraint_on_update(sample_service, sample_user): + data = { + 'name': 'Sample Template', + 'template_type': "letter", + 'content': "Template content", + 'service': sample_service, + 'created_by': sample_user, + 'postage': 'second' + } + template = Template(**data) + dao_create_template(template) + created = dao_get_all_templates_for_service(sample_service.id)[0] + assert created.name == 'Sample Template' + + created.postage = 'third' + with pytest.raises(expected_exception=SQLAlchemyError): + dao_update_template(created) From 251aecab1b52bacb92d4f30dabfa098b7ec0cb3d Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 17 Dec 2018 15:45:24 +0000 Subject: [PATCH 041/118] base job start of processing_started rather than created_at otherwise scheduled jobs will be viewed as old, and we'll pull stats from the statistics tables, even if they might have not even started yet --- app/job/rest.py | 8 ++++++-- tests/app/job/test_rest.py | 22 ++++++++++++++-------- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/app/job/rest.py b/app/job/rest.py index 61665ec45..20f37efed 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -173,8 +173,12 @@ def get_paginated_jobs(service_id, limit_days, statuses, page): ) data = job_schema.dump(pagination.items, many=True).data for job_data in data: - created_at = dateutil.parser.parse(job_data['created_at']).replace(tzinfo=None) - if created_at < midnight_n_days_ago(3): + start = job_data['processing_started'] + start = dateutil.parser.parse(start).replace(tzinfo=None) if start else None + + if start is None: + statistics = [] + elif start.replace(tzinfo=None) < midnight_n_days_ago(3): # ft_notification_status table statistics = fetch_notification_statuses_for_job(job_data['id']) else: diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 9adb6e8ac..03d4a289e 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -574,8 +574,8 @@ def test_get_jobs_with_limit_days(admin_request, sample_template): def test_get_jobs_should_return_statistics(admin_request, sample_template): now = datetime.utcnow() earlier = datetime.utcnow() - timedelta(days=1) - job_1 = create_job(sample_template, created_at=earlier) - job_2 = create_job(sample_template, created_at=now) + job_1 = create_job(sample_template, processing_started=earlier) + job_2 = create_job(sample_template, processing_started=now) create_notification(job=job_1, status='created') create_notification(job=job_1, status='created') create_notification(job=job_1, status='created') @@ -698,8 +698,10 @@ def test_get_jobs_should_retrieve_from_ft_notification_status_for_old_jobs(admin just_three_days_ago = datetime(2017, 6, 6, 22, 59, 59) not_quite_three_days_ago = just_three_days_ago + timedelta(seconds=1) - job_1 = create_job(sample_template, created_at=just_three_days_ago) - job_2 = create_job(sample_template, created_at=not_quite_three_days_ago) + job_1 = create_job(sample_template, created_at=just_three_days_ago, processing_started=just_three_days_ago) + job_2 = create_job(sample_template, created_at=just_three_days_ago, processing_started=not_quite_three_days_ago) + # is old but hasn't started yet (probably a scheduled job). We don't have any stats for this job yet. + job_3 = create_job(sample_template, created_at=just_three_days_ago, processing_started=None) # some notifications created more than three days ago, some created after the midnight cutoff create_ft_notification_status(date(2017, 6, 6), job=job_1, notification_status='delivered', count=2) @@ -709,13 +711,17 @@ def test_get_jobs_should_retrieve_from_ft_notification_status_for_old_jobs(admin # this isn't picked up because the job is too new create_ft_notification_status(date(2017, 6, 7), job=job_2, notification_status='delivered', count=8) + # this isn't picked up - while the job is old, it started in last 3 days so we look at notification table instead + create_ft_notification_status(date(2017, 6, 7), job=job_3, notification_status='delivered', count=16) # this isn't picked up because we're using the ft status table for job_1 as it's old create_notification(job=job_1, status='created', created_at=not_quite_three_days_ago) resp_json = admin_request.get('job.get_jobs_by_service', service_id=sample_template.service_id) - assert resp_json['data'][0]['id'] == str(job_2.id) - assert resp_json['data'][0]['statistics'] == [{'status': 'created', 'count': 1}] - assert resp_json['data'][1]['id'] == str(job_1.id) - assert resp_json['data'][1]['statistics'] == [{'status': 'delivered', 'count': 6}] + assert resp_json['data'][0]['id'] == str(job_3.id) + assert resp_json['data'][0]['statistics'] == [] + assert resp_json['data'][1]['id'] == str(job_2.id) + assert resp_json['data'][1]['statistics'] == [{'status': 'created', 'count': 1}] + assert resp_json['data'][2]['id'] == str(job_1.id) + assert resp_json['data'][2]['statistics'] == [{'status': 'delivered', 'count': 6}] From e9fb60f05c6d3e6d933ad88cdbb6b532f2e11ae9 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Thu, 13 Dec 2018 12:01:50 +0000 Subject: [PATCH 042/118] Send extra headers to Template Preview /precompiled/sanitise endpoint We want to send two new headers, ServiceId and NotificationId to the template preview /precompiled/sanitise endpoint. This is to allow us to log errors from this endpoint in template preview with all the information needed, instead of needing to pass the information back to notifications-api and to log it there. --- app/celery/letters_pdf_tasks.py | 4 +++- tests/app/celery/test_letters_pdf_tasks.py | 21 +++++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/app/celery/letters_pdf_tasks.py b/app/celery/letters_pdf_tasks.py index a40f2bd91..485f258bf 100644 --- a/app/celery/letters_pdf_tasks.py +++ b/app/celery/letters_pdf_tasks.py @@ -261,7 +261,9 @@ def _sanitise_precompiled_pdf(self, notification, precompiled_pdf): current_app.config['TEMPLATE_PREVIEW_API_HOST'] ), data=precompiled_pdf, - headers={'Authorization': 'Token {}'.format(current_app.config['TEMPLATE_PREVIEW_API_KEY'])} + headers={'Authorization': 'Token {}'.format(current_app.config['TEMPLATE_PREVIEW_API_KEY']), + 'Service-ID': str(notification.service_id), + 'Notification-ID': str(notification.id)} ) resp.raise_for_status() return resp.content diff --git a/tests/app/celery/test_letters_pdf_tasks.py b/tests/app/celery/test_letters_pdf_tasks.py index 4af7edca2..13815ce05 100644 --- a/tests/app/celery/test_letters_pdf_tasks.py +++ b/tests/app/celery/test_letters_pdf_tasks.py @@ -508,6 +508,27 @@ def test_sanitise_precompiled_pdf_returns_none_on_validation_error(rmock, sample assert res is None +def test_sanitise_precompiled_pdf_passes_the_service_id_and_notification_id_to_template_preview( + mocker, + sample_letter_notification, +): + tp_mock = mocker.patch('app.celery.letters_pdf_tasks.requests_post') + sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK + mock_celery = Mock(**{'retry.side_effect': Retry}) + _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b'old_pdf') + + service_id = str(sample_letter_notification.service_id) + notification_id = str(sample_letter_notification.id) + + tp_mock.assert_called_once_with( + 'http://localhost:9999/precompiled/sanitise', + data=b'old_pdf', + headers={'Authorization': 'Token my-secret-key', + 'Service-ID': service_id, + 'Notification-ID': notification_id} + ) + + def test_sanitise_precompiled_pdf_retries_on_http_error(rmock, sample_letter_notification): sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK rmock.post('http://localhost:9999/precompiled/sanitise', content=b'new_pdf', status_code=500) From a4f288072195f77b5d934d5b2aa0d47f8230975d Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Thu, 20 Dec 2018 10:57:14 +0000 Subject: [PATCH 043/118] Fix log messages when emails and letters don't get deleted --- app/celery/scheduled_tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index aff2ba452..7c811ca1d 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -147,7 +147,7 @@ def delete_email_notifications_older_than_seven_days(): ) ) except SQLAlchemyError: - current_app.logger.exception("Failed to delete sms notifications") + current_app.logger.exception("Failed to delete email notifications") raise @@ -166,7 +166,7 @@ def delete_letter_notifications_older_than_seven_days(): ) ) except SQLAlchemyError: - current_app.logger.exception("Failed to delete sms notifications") + current_app.logger.exception("Failed to delete letter notifications") raise From 7d026ad385b1dd73f4ce53c1de9639a761442fef Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Thu, 20 Dec 2018 12:31:00 +0000 Subject: [PATCH 044/118] wip --- app/dao/notifications_dao.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 7544dfdea..f47a85139 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -315,8 +315,9 @@ def _filter_query(query, filter_dict=None): @statsd(namespace="dao") -@transactional def delete_notifications_created_more_than_a_week_ago_by_type(notification_type): + current_app.logger.info('Deleting {} notifications for services with flexible data retention'.format(notification_type)) + flexible_data_retention = ServiceDataRetention.query.filter( ServiceDataRetention.notification_type == notification_type ).all() @@ -329,6 +330,9 @@ def delete_notifications_created_more_than_a_week_ago_by_type(notification_type) if notification_type == LETTER_TYPE: _delete_letters_from_s3(query) deleted += query.delete(synchronize_session='fetch') + db.session.commit() + + current_app.logger.info('Deleting {} notifications for services without flexible data retention'.format(notification_type)) seven_days_ago = convert_utc_to_bst(datetime.utcnow()).date() - timedelta(days=7) services_with_data_retention = [x.service_id for x in flexible_data_retention] @@ -339,6 +343,10 @@ def delete_notifications_created_more_than_a_week_ago_by_type(notification_type) if notification_type == LETTER_TYPE: _delete_letters_from_s3(query=query) deleted += query.delete(synchronize_session='fetch') + db.session.commit() + + current_app.logger.info('Finished deleting {} notifications'.format(notification_type)) + return deleted From 89923eab715e0af8878912b0f50c1dc02134450c Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Thu, 20 Dec 2018 16:09:38 +0000 Subject: [PATCH 045/118] Updated the query to improve the performance. The scheduled job for deleting emails has failed silently for many nights now. --- app/dao/notifications_dao.py | 42 +++++++++++++++++++++++++----------- 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index f47a85139..1229d3940 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -44,7 +44,8 @@ from app.models import ( NOTIFICATION_SENT, SMS_TYPE, EMAIL_TYPE, - ServiceDataRetention + ServiceDataRetention, + Service ) from app.utils import get_london_midnight_in_utc from app.utils import midnight_n_days_ago, escape_special_characters @@ -315,8 +316,9 @@ def _filter_query(query, filter_dict=None): @statsd(namespace="dao") -def delete_notifications_created_more_than_a_week_ago_by_type(notification_type): - current_app.logger.info('Deleting {} notifications for services with flexible data retention'.format(notification_type)) +def delete_notifications_created_more_than_a_week_ago_by_type(notification_type, qry_limit=10000): + current_app.logger.info( + 'Deleting {} notifications for services with flexible data retention'.format(notification_type)) flexible_data_retention = ServiceDataRetention.query.filter( ServiceDataRetention.notification_type == notification_type @@ -332,18 +334,34 @@ def delete_notifications_created_more_than_a_week_ago_by_type(notification_type) deleted += query.delete(synchronize_session='fetch') db.session.commit() - current_app.logger.info('Deleting {} notifications for services without flexible data retention'.format(notification_type)) + current_app.logger.info( + 'Deleting {} notifications for services without flexible data retention'.format(notification_type)) seven_days_ago = convert_utc_to_bst(datetime.utcnow()).date() - timedelta(days=7) services_with_data_retention = [x.service_id for x in flexible_data_retention] - query = db.session.query(Notification).filter(func.date(Notification.created_at) < seven_days_ago, - Notification.notification_type == notification_type, - Notification.service_id.notin_( - services_with_data_retention)) - if notification_type == LETTER_TYPE: - _delete_letters_from_s3(query=query) - deleted += query.delete(synchronize_session='fetch') - db.session.commit() + service_ids_to_purge = db.session.query(Service.id).filter(Service.id.notin_(services_with_data_retention)).all() + + for service_id in service_ids_to_purge: + subquery = db.session.query( + Notification + ).filter( + Notification.notification_type == notification_type, + func.date(Notification.created_at) < seven_days_ago, + Notification.service_id == service_id + ).limit(qry_limit) + + if notification_type == LETTER_TYPE: + _delete_letters_from_s3(query=subquery) + + number_deleted = db.session.query(Notification).filter( + Notification.id.in_([x.id for x in subquery.all()])).delete(synchronize_session='fetch') + deleted += number_deleted + db.session.commit() + while number_deleted > 0: + number_deleted = db.session.query(Notification).filter( + Notification.id.in_([x.id for x in subquery.all()])).delete(synchronize_session='fetch') + deleted += number_deleted + db.session.commit() current_app.logger.info('Finished deleting {} notifications'.format(notification_type)) From d8a0a3f5ab8510e4cc5858201189f28317fa1d38 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Thu, 20 Dec 2018 17:02:49 +0000 Subject: [PATCH 046/118] Added a test to make sure the loop exits. --- .../test_notification_dao_delete_notifications.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index 1479921bd..8c39c4ccb 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -186,6 +186,20 @@ def test_delete_notifications_does_try_to_delete_from_s3_when_letter_has_not_bee mock_get_s3.assert_not_called() +def test_delete_notifications_calls_subquery( + notify_db_session, mocker +): + service = create_service() + sms_template = create_template(service=service) + create_notification(template=sms_template, created_at=datetime.now() - timedelta(days=8)) + create_notification(template=sms_template, created_at=datetime.now() - timedelta(days=8)) + create_notification(template=sms_template, created_at=datetime.now() - timedelta(days=8)) + + assert Notification.query.count() == 3 + delete_notifications_created_more_than_a_week_ago_by_type('sms', qry_limit=1) + assert Notification.query.count() == 0 + + def _create_templates(sample_service): email_template = create_template(service=sample_service, template_type='email') sms_template = create_template(service=sample_service) From 67cffc22b64edb8e729975d4ef69738f689ab024 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Thu, 20 Dec 2018 17:03:46 +0000 Subject: [PATCH 047/118] Oops it missed the limit --- .../test_notification_dao_delete_notifications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index 8c39c4ccb..4b3d8c9e8 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -182,7 +182,7 @@ def test_delete_notifications_does_try_to_delete_from_s3_when_letter_has_not_bee create_notification(template=letter_template, status='sending', reference='LETTER_REF') - delete_notifications_created_more_than_a_week_ago_by_type('email') + delete_notifications_created_more_than_a_week_ago_by_type('email', qry_limit=1) mock_get_s3.assert_not_called() From 62a807616163d946ff49a106adf0393f86e614f1 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Fri, 21 Dec 2018 13:57:35 +0000 Subject: [PATCH 048/118] Commit the deletes every 10,000 rows. --- app/celery/scheduled_tasks.py | 7 +++--- app/dao/notifications_dao.py | 40 +++++++++++++++++++++-------------- 2 files changed, 28 insertions(+), 19 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 7c811ca1d..cde969034 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -492,9 +492,10 @@ def replay_created_notifications(): notification_type ) - current_app.logger.info("Sending {} {} notifications " - "to the delivery queue because the notification " - "status was created.".format(len(notifications_to_resend), notification_type)) + if len(notifications_to_resend) > 0: + current_app.logger.info("Sending {} {} notifications " + "to the delivery queue because the notification " + "status was created.".format(len(notifications_to_resend), notification_type)) for n in notifications_to_resend: send_notification_to_queue(notification=n, research_mode=n.service.research_mode) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 1229d3940..781ff9141 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -325,19 +325,22 @@ def delete_notifications_created_more_than_a_week_ago_by_type(notification_type, ).all() deleted = 0 for f in flexible_data_retention: - days_of_retention = convert_utc_to_bst(datetime.utcnow()).date() - timedelta(days=f.days_of_retention) + days_of_retention = get_london_midnight_in_utc( + convert_utc_to_bst(datetime.utcnow()).date()) - timedelta(days=f.days_of_retention) query = db.session.query(Notification).filter( - func.date(Notification.created_at) < days_of_retention, - Notification.notification_type == f.notification_type, Notification.service_id == f.service_id) + Notification.created_at < days_of_retention, + Notification.notification_type == f.notification_type, + Notification.service_id == f.service_id + ) if notification_type == LETTER_TYPE: _delete_letters_from_s3(query) - deleted += query.delete(synchronize_session='fetch') - db.session.commit() + + deleted += _delete_notifications(deleted, query) current_app.logger.info( 'Deleting {} notifications for services without flexible data retention'.format(notification_type)) - seven_days_ago = convert_utc_to_bst(datetime.utcnow()).date() - timedelta(days=7) + seven_days_ago = get_london_midnight_in_utc(convert_utc_to_bst(datetime.utcnow()).date()) - timedelta(days=7) services_with_data_retention = [x.service_id for x in flexible_data_retention] service_ids_to_purge = db.session.query(Service.id).filter(Service.id.notin_(services_with_data_retention)).all() @@ -346,28 +349,33 @@ def delete_notifications_created_more_than_a_week_ago_by_type(notification_type, Notification ).filter( Notification.notification_type == notification_type, - func.date(Notification.created_at) < seven_days_ago, + Notification.created_at < seven_days_ago, Notification.service_id == service_id ).limit(qry_limit) if notification_type == LETTER_TYPE: _delete_letters_from_s3(query=subquery) - number_deleted = db.session.query(Notification).filter( - Notification.id.in_([x.id for x in subquery.all()])).delete(synchronize_session='fetch') - deleted += number_deleted - db.session.commit() - while number_deleted > 0: - number_deleted = db.session.query(Notification).filter( - Notification.id.in_([x.id for x in subquery.all()])).delete(synchronize_session='fetch') - deleted += number_deleted - db.session.commit() + deleted += _delete_notifications(deleted, subquery) current_app.logger.info('Finished deleting {} notifications'.format(notification_type)) return deleted +def _delete_notifications(deleted, query): + number_deleted = db.session.query(Notification).filter( + Notification.id.in_([x.id for x in query.all()])).delete(synchronize_session='fetch') + deleted += number_deleted + db.session.commit() + while number_deleted > 0: + number_deleted = db.session.query(Notification).filter( + Notification.id.in_([x.id for x in query.all()])).delete(synchronize_session='fetch') + deleted += number_deleted + db.session.commit() + return deleted + + def _delete_letters_from_s3(query): letters_to_delete_from_s3 = query.all() for letter in letters_to_delete_from_s3: From 8e832a1178cdeb919e8210ee8807b4a329810df6 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Fri, 21 Dec 2018 14:09:29 +0000 Subject: [PATCH 049/118] Adding a log message --- app/dao/notifications_dao.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 781ff9141..9eae49f68 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -334,7 +334,8 @@ def delete_notifications_created_more_than_a_week_ago_by_type(notification_type, ) if notification_type == LETTER_TYPE: _delete_letters_from_s3(query) - + current_app.logger.info( + "Deleting {} notifications for service id: {}".format(notification_type, f.service_id)) deleted += _delete_notifications(deleted, query) current_app.logger.info( From 19f7678b0560fcf1d00ab32f0c5b543bc728abb4 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 17 Dec 2018 10:03:54 +0000 Subject: [PATCH 050/118] Don't allow to set postage per template if no service permission --- app/template/rest.py | 14 +++++++- app/template/template_schemas.py | 3 +- tests/app/dao/test_templates_dao.py | 2 +- tests/app/db.py | 2 ++ tests/app/template/test_rest.py | 56 +++++++++++++++++++++++++++++ 5 files changed, 74 insertions(+), 3 deletions(-) diff --git a/app/template/rest.py b/app/template/rest.py index 8cbb4125f..9dc32c94f 100644 --- a/app/template/rest.py +++ b/app/template/rest.py @@ -31,7 +31,7 @@ from app.errors import ( InvalidRequest ) from app.letters.utils import get_letter_pdf -from app.models import SMS_TYPE, Template +from app.models import SMS_TYPE, Template, CHOOSE_POSTAGE from app.notifications.validators import service_has_permission, check_reply_to from app.schema_validation import validate from app.schemas import (template_schema, template_history_schema) @@ -78,6 +78,12 @@ def create_template(service_id): errors = {'template_type': [message]} raise InvalidRequest(errors, 403) + if new_template.postage: + if not service_has_permission(CHOOSE_POSTAGE, fetched_service.permissions): + message = "Setting postage on templates is not enabled for this service." + errors = {'template_postage': [message]} + raise InvalidRequest(errors, 403) + new_template.service = fetched_service over_limit = _content_count_greater_than_limit(new_template.content, new_template.template_type) @@ -110,6 +116,12 @@ def update_template(service_id, template_id): if data.get('redact_personalisation') is True: return redact_template(fetched_template, data) + if data.get('postage'): + if not service_has_permission(CHOOSE_POSTAGE, fetched_template.service.permissions): + message = "Setting postage on templates is not enabled for this service." + errors = {'template_postage': [message]} + raise InvalidRequest(errors, 403) + if "reply_to" in data: check_reply_to(service_id, data.get("reply_to"), fetched_template.template_type) updated = dao_update_template_reply_to(template_id=template_id, reply_to=data.get("reply_to")) diff --git a/app/template/template_schemas.py b/app/template/template_schemas.py index f63ad4575..9f38262a5 100644 --- a/app/template/template_schemas.py +++ b/app/template/template_schemas.py @@ -17,7 +17,8 @@ post_create_template_schema = { "content": {"type": "string"}, "subject": {"type": "string"}, "created_by": uuid, - "parent_folder_id": uuid + "parent_folder_id": uuid, + "postage": {"type": "string"}, }, "if": { "properties": { diff --git a/tests/app/dao/test_templates_dao.py b/tests/app/dao/test_templates_dao.py index d86cec7a0..cbe6c6b72 100644 --- a/tests/app/dao/test_templates_dao.py +++ b/tests/app/dao/test_templates_dao.py @@ -153,7 +153,7 @@ def test_dao_update_template_reply_to_none_to_some(sample_service, sample_user): assert template_history.updated_at == updated.updated_at -def test_dao_update_tempalte_reply_to_some_to_some(sample_service, sample_user): +def test_dao_update_template_reply_to_some_to_some(sample_service, sample_user): letter_contact = create_letter_contact(sample_service, 'Edinburgh, ED1 1AA') letter_contact_2 = create_letter_contact(sample_service, 'London, N1 1DE') diff --git a/tests/app/db.py b/tests/app/db.py index 94d6ac287..a2db88ae6 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -140,6 +140,7 @@ def create_template( hidden=False, archived=False, folder=None, + postage=None, ): data = { 'name': template_name or '{} Template Name'.format(template_type), @@ -150,6 +151,7 @@ def create_template( 'reply_to': reply_to, 'hidden': hidden, 'folder': folder, + 'postage': postage, } if template_type != SMS_TYPE: data['subject'] = subject diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py index 7cf46aa80..91c569a00 100644 --- a/tests/app/template/test_rest.py +++ b/tests/app/template/test_rest.py @@ -17,6 +17,7 @@ from app.models import ( EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, + CHOOSE_POSTAGE, Template, TemplateHistory ) @@ -210,6 +211,34 @@ def test_should_raise_error_on_create_if_no_permission( assert json_resp['message'] == expected_error +def test_should_raise_error_on_create_if_no_choose_postage_permission(client, sample_user): + service = create_service(service_permissions=[LETTER_TYPE]) + data = { + 'name': 'my template', + 'template_type': LETTER_TYPE, + 'content': 'template content', + 'service': str(service.id), + 'created_by': str(sample_user.id), + 'subject': "Some letter", + 'postage': 'first', + } + + data = json.dumps(data) + auth_header = create_authorization_header() + + response = client.post( + '/service/{}/template'.format(service.id), + headers=[('Content-Type', 'application/json'), auth_header], + data=data + ) + json_resp = json.loads(response.get_data(as_text=True)) + assert response.status_code == 403 + assert json_resp['result'] == 'error' + assert json_resp['message'] == { + "template_postage": ["Setting postage on templates is not enabled for this service."] + } + + @pytest.mark.parametrize('template_factory, expected_error', [ (sample_template_without_sms_permission, {'template_type': ['Updating text message templates is not allowed']}), (sample_template_without_email_permission, {'template_type': ['Updating email templates is not allowed']}), @@ -239,6 +268,33 @@ def test_should_be_error_on_update_if_no_permission( assert json_resp['message'] == expected_error +def test_should_be_error_on_update_if_no_choose_postage_permission(client, sample_user): + service = create_service(service_name='some_service', service_permissions=[LETTER_TYPE]) + template = create_template(service, template_type=LETTER_TYPE) + data = { + 'content': 'new template content', + 'created_by': str(sample_user.id), + 'postage': 'first' + } + + data = json.dumps(data) + auth_header = create_authorization_header() + + update_response = client.post( + '/service/{}/template/{}'.format( + template.service_id, template.id), + headers=[('Content-Type', 'application/json'), auth_header], + data=data + ) + + json_resp = json.loads(update_response.get_data(as_text=True)) + assert update_response.status_code == 403 + assert json_resp['result'] == 'error' + assert json_resp['message'] == { + "template_postage": ["Setting postage on templates is not enabled for this service."] + } + + def test_should_error_if_created_by_missing(client, sample_user, sample_service): service_id = str(sample_service.id) data = { From e6524af89cecdab4aea3cef2a0b98cbb1245d924 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 17 Dec 2018 17:49:51 +0000 Subject: [PATCH 051/118] Choose postage when persisting a notification --- app/notifications/process_notifications.py | 11 ++++- tests/app/db.py | 2 + .../test_process_notification.py | 41 ++++++++++++++++++- tests/app/template/test_rest.py | 1 - 4 files changed, 51 insertions(+), 4 deletions(-) diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 6483ac766..8fc2f15f6 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -23,7 +23,8 @@ from app.models import ( LETTER_TYPE, NOTIFICATION_CREATED, Notification, - ScheduledNotification + ScheduledNotification, + CHOOSE_POSTAGE ) from app.dao.notifications_dao import ( dao_create_notification, @@ -31,6 +32,8 @@ from app.dao.notifications_dao import ( dao_created_scheduled_notification ) +from app.dao.templates_dao import dao_get_template_by_id + from app.v2.errors import BadRequestError from app.utils import ( cache_key_for_service_template_counter, @@ -109,7 +112,11 @@ def persist_notification( elif notification_type == EMAIL_TYPE: notification.normalised_to = format_email_address(notification.to) elif notification_type == LETTER_TYPE: - notification.postage = service.postage + template = dao_get_template_by_id(template_id, template_version) + if service.has_permission(CHOOSE_POSTAGE) and template.postage: + notification.postage = template.postage + else: + notification.postage = service.postage # if simulated create a Notification model to return but do not persist the Notification to the dB if not simulated: diff --git a/tests/app/db.py b/tests/app/db.py index a2db88ae6..02a2df85c 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -81,6 +81,7 @@ def create_service( prefix_sms=True, message_limit=1000, organisation_type='central', + postage='second' ): service = Service( name=service_name, @@ -90,6 +91,7 @@ def create_service( created_by=user or create_user(email='{}@digital.cabinet-office.gov.uk'.format(uuid.uuid4())), prefix_sms=prefix_sms, organisation_type=organisation_type, + postage=postage ) dao_create_service(service, service.created_by, service_id, service_permissions=service_permissions) diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 609a863d8..192644bde 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -13,7 +13,9 @@ from app.models import ( Notification, NotificationHistory, ScheduledNotification, - Template + Template, + LETTER_TYPE, + CHOOSE_POSTAGE ) from app.notifications.process_notifications import ( create_content_for_notification, @@ -27,6 +29,8 @@ from app.utils import cache_key_for_service_template_counter from app.v2.errors import BadRequestError from tests.app.conftest import sample_api_key as create_api_key +from tests.app.db import create_service, create_template + def test_create_content_for_notification_passes(sample_email_template): template = Template.query.get(sample_email_template.id) @@ -477,6 +481,41 @@ def test_persist_email_notification_stores_normalised_email( assert persisted_notification.normalised_to == expected_recipient_normalised +@pytest.mark.parametrize( + "service_permissions, template_postage, expected_postage", + [ + ([LETTER_TYPE], "first", "second"), + ([LETTER_TYPE, CHOOSE_POSTAGE], "first", "first"), + ([LETTER_TYPE, CHOOSE_POSTAGE], None, "second"), + ] +) +def test_persist_letter_notification_finds_correct_postage( + mocker, + notify_db, + notify_db_session, + service_permissions, + template_postage, + expected_postage +): + service = create_service(service_permissions=service_permissions, postage="second") + api_key = create_api_key(notify_db, notify_db_session, service=service) + template = create_template(service, template_type=LETTER_TYPE, postage=template_postage) + mocker.patch('app.dao.templates_dao.dao_get_template_by_id', return_value=template) + persist_notification( + template_id=template.id, + template_version=template.version, + recipient="Jane Doe, 10 Downing Street, London", + service=service, + personalisation=None, + notification_type=LETTER_TYPE, + api_key_id=api_key.id, + key_type=api_key.key_type, + ) + persisted_notification = Notification.query.all()[0] + + assert persisted_notification.postage == expected_postage + + @pytest.mark.parametrize('utc_time, day_in_key', [ ('2016-01-01 23:00:00', '2016-01-01'), ('2016-06-01 22:59:00', '2016-06-01'), diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py index 91c569a00..429172011 100644 --- a/tests/app/template/test_rest.py +++ b/tests/app/template/test_rest.py @@ -17,7 +17,6 @@ from app.models import ( EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, - CHOOSE_POSTAGE, Template, TemplateHistory ) From 1b30e867072d2667aec1d9438a9fe00e37494f53 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 17 Dec 2018 17:50:39 +0000 Subject: [PATCH 052/118] Update v2 template schema to include postage --- app/v2/template/template_schemas.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/app/v2/template/template_schemas.py b/app/v2/template/template_schemas.py index b1b1f4820..ebd0b8342 100644 --- a/app/v2/template/template_schemas.py +++ b/app/v2/template/template_schemas.py @@ -37,6 +37,7 @@ get_template_by_id_response = { "body": {"type": "string"}, "subject": {"type": ["string", "null"]}, "name": {"type": "string"}, + "postage": {"type": "string"} }, "required": ["id", "type", "created_at", "updated_at", "version", "created_by", "body", "name"], } @@ -63,7 +64,8 @@ post_template_preview_response = { "type": {"enum": TEMPLATE_TYPES}, "version": {"type": "integer"}, "body": {"type": "string"}, - "subject": {"type": ["string", "null"]} + "subject": {"type": ["string", "null"]}, + "postage": {"type": "string"} }, "required": ["id", "type", "version", "body"] } @@ -77,5 +79,6 @@ def create_post_template_preview_response(template, template_object): "type": template.template_type, "version": template.version, "body": str(template_object), - "subject": subject + "subject": subject, + "postage": template.postage } From 4929a6ac083984517cf8461818e2e3d81d9cc2bb Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 18 Dec 2018 18:21:03 +0000 Subject: [PATCH 053/118] Include postage in checking if template changed --- app/template/rest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/template/rest.py b/app/template/rest.py index 9dc32c94f..79b1d7d8b 100644 --- a/app/template/rest.py +++ b/app/template/rest.py @@ -203,7 +203,7 @@ def get_template_versions(service_id, template_id): def _template_has_not_changed(current_data, updated_template): return all( current_data[key] == updated_template[key] - for key in ('name', 'content', 'subject', 'archived', 'process_type') + for key in ('name', 'content', 'subject', 'archived', 'process_type', 'postage') ) From 686c58acee0eeb65186be4c229caf5c1715710df Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 24 Dec 2018 11:27:29 +0000 Subject: [PATCH 054/118] Test post letter request sets notification postage correctly --- tests/app/conftest.py | 2 ++ .../test_post_letter_notifications.py | 24 ++++++++++++++----- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 15f76846f..6b04e83fe 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -158,6 +158,7 @@ def sample_service( email_from=None, permissions=None, research_mode=None, + postage="second", ): if user is None: user = create_user() @@ -170,6 +171,7 @@ def sample_service( 'restricted': restricted, 'email_from': email_from, 'created_by': user, + "postage": postage, } service = Service.query.filter_by(name=service_name).first() if not service: diff --git a/tests/app/v2/notifications/test_post_letter_notifications.py b/tests/app/v2/notifications/test_post_letter_notifications.py index 4b42af6dc..734928088 100644 --- a/tests/app/v2/notifications/test_post_letter_notifications.py +++ b/tests/app/v2/notifications/test_post_letter_notifications.py @@ -20,6 +20,7 @@ from app.models import ( NOTIFICATION_DELIVERED, NOTIFICATION_PENDING_VIRUS_CHECK, SMS_TYPE, + CHOOSE_POSTAGE ) from app.schema_validation import validate from app.v2.errors import RateLimitError @@ -95,12 +96,23 @@ def test_post_letter_notification_returns_201(client, sample_letter_template, mo mock.assert_called_once_with([str(notification.id)], queue=QueueNames.CREATE_LETTERS_PDF) -@pytest.mark.parametrize('postage', ['first', 'second']) -def test_post_letter_notification_sets_postage(client, sample_letter_template, mocker, postage): - sample_letter_template.service.postage = postage +@pytest.mark.parametrize('service_permissions, service_postage, template_postage, expected_postage', [ + ([LETTER_TYPE], "second", "first", "second"), + ([LETTER_TYPE], "first", "second", "first"), + ([LETTER_TYPE], "first", None, "first"), + ([LETTER_TYPE, CHOOSE_POSTAGE], "second", "first", "first"), + ([LETTER_TYPE, CHOOSE_POSTAGE], "second", None, "second"), + ([LETTER_TYPE, CHOOSE_POSTAGE], "second", "second", "second"), + ([LETTER_TYPE, CHOOSE_POSTAGE], "first", "second", "second"), +]) +def test_post_letter_notification_sets_postage( + client, notify_db_session, mocker, service_permissions, service_postage, template_postage, expected_postage +): + service = create_service(service_permissions=service_permissions, postage=service_postage) + template = create_template(service, template_type="letter", postage=template_postage) mocker.patch('app.celery.tasks.letters_pdf_tasks.create_letters_pdf.apply_async') data = { - 'template_id': str(sample_letter_template.id), + 'template_id': str(template.id), 'personalisation': { 'address_line_1': 'Her Royal Highness Queen Elizabeth II', 'address_line_2': 'Buckingham Palace', @@ -110,11 +122,11 @@ def test_post_letter_notification_sets_postage(client, sample_letter_template, m } } - resp_json = letter_request(client, data, service_id=sample_letter_template.service_id) + resp_json = letter_request(client, data, service_id=service.id) assert validate(resp_json, post_letter_response) == resp_json notification = Notification.query.one() - assert notification.postage == postage + assert notification.postage == expected_postage @pytest.mark.parametrize('env', [ From fb1ca9b20d636f73a4a37f59fec88488614ea617 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 24 Dec 2018 12:24:17 +0000 Subject: [PATCH 055/118] Test postage setting on happy path for create and update template --- tests/app/template/test_rest.py | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py index 429172011..64c51becc 100644 --- a/tests/app/template/test_rest.py +++ b/tests/app/template/test_rest.py @@ -18,7 +18,8 @@ from app.models import ( LETTER_TYPE, SMS_TYPE, Template, - TemplateHistory + TemplateHistory, + CHOOSE_POSTAGE ) from app.dao.templates_dao import dao_get_template_by_id, dao_redact_template @@ -43,7 +44,7 @@ from tests.conftest import set_config_values def test_should_create_a_new_template_for_a_service( client, sample_user, template_type, subject ): - service = create_service(service_permissions=[template_type]) + service = create_service(service_permissions=[template_type, CHOOSE_POSTAGE]) data = { 'name': 'my template', 'template_type': template_type, @@ -53,6 +54,8 @@ def test_should_create_a_new_template_for_a_service( } if subject: data.update({'subject': subject}) + if template_type == LETTER_TYPE: + data.update({'postage': 'first'}) data = json.dumps(data) auth_header = create_authorization_header() @@ -76,6 +79,11 @@ def test_should_create_a_new_template_for_a_service( else: assert not json_resp['data']['subject'] + if template_type == LETTER_TYPE: + assert json_resp['data']['postage'] == 'first' + else: + assert not json_resp['data']['postage'] + template = Template.query.get(json_resp['data']['id']) from app.schemas import template_schema assert sorted(json_resp['data']) == sorted(template_schema.dump(template).data) @@ -356,16 +364,19 @@ def test_must_have_a_subject_on_an_email_or_letter_template(client, sample_user, assert json_resp['errors'][0]["message"] == 'subject is a required property' -def test_update_should_update_a_template(client, sample_user, sample_template): +def test_update_should_update_a_template(client, sample_user): + service = create_service(service_permissions=[LETTER_TYPE, CHOOSE_POSTAGE]) + template = create_template(service, template_type="letter", postage="second") data = { - 'content': 'my template has new content ', - 'created_by': str(sample_user.id) + 'content': 'my template has new content, swell!', + 'created_by': str(sample_user.id), + 'postage': 'first' } data = json.dumps(data) auth_header = create_authorization_header() update_response = client.post( - '/service/{}/template/{}'.format(sample_template.service_id, sample_template.id), + '/service/{}/template/{}'.format(service.id, template.id), headers=[('Content-Type', 'application/json'), auth_header], data=data ) @@ -373,10 +384,11 @@ def test_update_should_update_a_template(client, sample_user, sample_template): assert update_response.status_code == 200 update_json_resp = json.loads(update_response.get_data(as_text=True)) assert update_json_resp['data']['content'] == ( - 'my template has new content ' + 'my template has new content, swell!' ) - assert update_json_resp['data']['name'] == sample_template.name - assert update_json_resp['data']['template_type'] == sample_template.template_type + assert update_json_resp['data']['postage'] == 'first' + assert update_json_resp['data']['name'] == template.name + assert update_json_resp['data']['template_type'] == template.template_type assert update_json_resp['data']['version'] == 2 From 941e14f71a43692f2e678cfeb100559b120ec8ad Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Thu, 27 Dec 2018 14:00:53 +0000 Subject: [PATCH 056/118] Added the limit to the query for the services with data retention. Also did a bit of refactoring. --- app/dao/notifications_dao.py | 59 ++++++++++++++++++++++-------------- 1 file changed, 37 insertions(+), 22 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 9eae49f68..f27190ad3 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -327,16 +327,17 @@ def delete_notifications_created_more_than_a_week_ago_by_type(notification_type, for f in flexible_data_retention: days_of_retention = get_london_midnight_in_utc( convert_utc_to_bst(datetime.utcnow()).date()) - timedelta(days=f.days_of_retention) - query = db.session.query(Notification).filter( - Notification.created_at < days_of_retention, - Notification.notification_type == f.notification_type, - Notification.service_id == f.service_id - ) + if notification_type == LETTER_TYPE: - _delete_letters_from_s3(query) + _delete_letters_from_s3( + notification_type, f.service_id, days_of_retention, qry_limit + ) + current_app.logger.info( "Deleting {} notifications for service id: {}".format(notification_type, f.service_id)) - deleted += _delete_notifications(deleted, query) + deleted += _delete_notifications( + deleted, notification_type, days_of_retention, f.service_id, qry_limit + ) current_app.logger.info( 'Deleting {} notifications for services without flexible data retention'.format(notification_type)) @@ -346,39 +347,53 @@ def delete_notifications_created_more_than_a_week_ago_by_type(notification_type, service_ids_to_purge = db.session.query(Service.id).filter(Service.id.notin_(services_with_data_retention)).all() for service_id in service_ids_to_purge: - subquery = db.session.query( - Notification - ).filter( - Notification.notification_type == notification_type, - Notification.created_at < seven_days_ago, - Notification.service_id == service_id - ).limit(qry_limit) - if notification_type == LETTER_TYPE: - _delete_letters_from_s3(query=subquery) + _delete_letters_from_s3( + notification_type, service_id, seven_days_ago, qry_limit + ) - deleted += _delete_notifications(deleted, subquery) + deleted += _delete_notifications( + deleted, notification_type, seven_days_ago, service_id, qry_limit + ) current_app.logger.info('Finished deleting {} notifications'.format(notification_type)) return deleted -def _delete_notifications(deleted, query): +def _delete_notifications( + deleted, notification_type, date_to_delete_from, service_id, query_limit): + + subquery = db.session.query( + Notification.id + ).filter( + Notification.notification_type == notification_type, + Notification.service_id == service_id, + Notification.created_at < date_to_delete_from + ).limit(query_limit).subquery() + number_deleted = db.session.query(Notification).filter( - Notification.id.in_([x.id for x in query.all()])).delete(synchronize_session='fetch') + Notification.id.in_(subquery)).delete(synchronize_session='fetch') deleted += number_deleted db.session.commit() while number_deleted > 0: number_deleted = db.session.query(Notification).filter( - Notification.id.in_([x.id for x in query.all()])).delete(synchronize_session='fetch') + Notification.id.in_(subquery)).delete(synchronize_session='fetch') deleted += number_deleted db.session.commit() return deleted -def _delete_letters_from_s3(query): - letters_to_delete_from_s3 = query.all() +def _delete_letters_from_s3( + notification_type, service_id, date_to_delete_from, query_limit +): + letters_to_delete_from_s3 = db.session.query( + Notification + ).filter( + Notification.notification_type == notification_type, + Notification.created_at < date_to_delete_from, + Notification.service_id == service_id + ).limit(query_limit).all() for letter in letters_to_delete_from_s3: bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] if letter.sent_at: From 39963d978441ca44da038956f6403fc1ab5f44e5 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 31 Dec 2018 16:08:08 +0000 Subject: [PATCH 057/118] Created a query to get the notification status counts per notification type and service for all service for a given date range. The query follows the same pattern as the other queries, getting the statistics from the fact_notification_status table for dates older than today and union that with today. Tests required. --- app/dao/fact_notification_status_dao.py | 94 ++++++++++++++++++- .../dao/test_fact_notification_status_dao.py | 11 ++- 2 files changed, 103 insertions(+), 2 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 8925c81f0..3389225d7 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -8,7 +8,7 @@ from sqlalchemy.sql.expression import literal from sqlalchemy.types import DateTime, Integer from app import db -from app.models import Notification, NotificationHistory, FactNotificationStatus, KEY_TYPE_TEST +from app.models import Notification, NotificationHistory, FactNotificationStatus, KEY_TYPE_TEST, Service from app.utils import get_london_midnight_in_utc, midnight_n_days_ago @@ -197,3 +197,95 @@ def fetch_notification_statuses_for_job(job_id): ).group_by( FactNotificationStatus.notification_status ).all() + + +def fetch_stats_for_all_services_by_date_range(start_date, end_date,include_from_test_key=True): + stats = db.session.query( + FactNotificationStatus.service_id.label('service_id'), + Service.name.label('name'), + Service.restricted.label('restricted'), + Service.research_mode.label('research_mode'), + Service.active.label('active'), + Service.created_at.label('created_at'), + FactNotificationStatus.notification_type.label('notification_type'), + FactNotificationStatus.notification_status.label('status'), + func.sum(FactNotificationStatus.notification_count).label('count') + ).filter( + FactNotificationStatus.bst_date >= start_date, + FactNotificationStatus.bst_date <= end_date, + FactNotificationStatus.service_id == Service.id, + ).group_by( + FactNotificationStatus.service_id.label('service_id'), + Service.name, + Service.restricted, + Service.research_mode, + Service.active, + Service.created_at, + FactNotificationStatus.notification_type, + FactNotificationStatus.notification_status, + ).order_by( + FactNotificationStatus.service_id, + FactNotificationStatus.notification_type + ) + if not include_from_test_key: + stats = stats.filter(FactNotificationStatus.key_type != KEY_TYPE_TEST) + + today = get_london_midnight_in_utc(datetime.utcnow()) + if start_date <= today.date() <= end_date: + subquery = db.session.query( + Notification.notification_type.cast(db.Text).label('notification_type'), + Notification.status.label('status'), + Notification.service_id.label('service_id'), + func.count(Notification.id).label('count') + ).filter( + Notification.created_at >= today + ).group_by( + Notification.notification_type, + Notification.status, + Notification.service_id + ) + if not include_from_test_key: + subquery = subquery.filter(FactNotificationStatus.key_type != KEY_TYPE_TEST) + subquery = subquery.subquery() + + stats_for_today = db.session.query( + Service.id.label('service_id'), + Service.name.label('name'), + Service.restricted.label('restricted'), + Service.research_mode.label('research_mode'), + Service.active.label('active'), + Service.created_at.label('created_at'), + subquery.c.notification_type.label('notification_type'), + subquery.c.status.label('status'), + subquery.c.count.label('count') + ).outerjoin( + subquery, + subquery.c.service_id == Service.id + ).order_by(Service.id) + + all_stats_table = stats.union_all(stats_for_today).subquery() + query = db.session.query( + all_stats_table.c.service_id, + all_stats_table.c.name, + all_stats_table.c.restricted, + all_stats_table.c.research_mode, + all_stats_table.c.active, + all_stats_table.c.created_at, + all_stats_table.c.notification_type, + all_stats_table.c.status, + func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), + ).group_by( + all_stats_table.c.service_id, + all_stats_table.c.name, + all_stats_table.c.restricted, + all_stats_table.c.research_mode, + all_stats_table.c.active, + all_stats_table.c.created_at, + all_stats_table.c.notification_type, + all_stats_table.c.status, + ).order_by( + all_stats_table.c.service_id + ) + else: + query = stats + return query.all() diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index db90c6d61..15a5b7605 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -11,7 +11,7 @@ from app.dao.fact_notification_status_dao import ( fetch_notification_status_for_service_for_today_and_7_previous_days, fetch_notification_status_totals_for_all_services, fetch_notification_statuses_for_job, -) + fetch_stats_for_all_services_by_date_range) from app.models import FactNotificationStatus, KEY_TYPE_TEST, KEY_TYPE_TEAM, EMAIL_TYPE, SMS_TYPE, LETTER_TYPE from freezegun import freeze_time from tests.app.db import create_notification, create_service, create_template, create_ft_notification_status, create_job @@ -304,3 +304,12 @@ def test_fetch_notification_statuses_for_job(sample_template): 'created': 5, 'delivered': 2 } + + +@freeze_time('2018-10-31 14:00') +def test_fetch_stats_for_all_services_by_date_range(notify_db_session): + set_up_data() + results = fetch_stats_for_all_services_by_date_range( start_date=date(2018, 10, 29), + end_date=date(2018, 10, 31)) + print(results) + assert len(results) == 2 From 80454579eeffbac5a1c621f0811c467fbd23bf90 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 2 Jan 2019 14:26:48 +0000 Subject: [PATCH 058/118] fix multi worker exit script previously the script would: try and SIGTERM each celery process every second for the 9 second timeout, and then SIGKILL every second after, with no upper bound. This commit changes this to: * SIGTERM each process once. * Wait nine seconds (checking if the pid files are still present each second) * SIGKILL any remaining processes once. * exit --- scripts/run_multi_worker_app_paas.sh | 42 +++++++++++++--------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/scripts/run_multi_worker_app_paas.sh b/scripts/run_multi_worker_app_paas.sh index 3965ee577..5ddc933ec 100755 --- a/scripts/run_multi_worker_app_paas.sh +++ b/scripts/run_multi_worker_app_paas.sh @@ -39,13 +39,15 @@ log_stream_name = {hostname} EOF } -# For every PID, check if it's still running -# if it is, send the sigterm +# For every PID, check if it's still running. if it is, send the sigterm. then wait 9 seconds before sending sigkill function on_exit { + echo "multi worker app exiting" wait_time=0 - while true; do - # refresh pids to account for the case that - # some workers may have terminated but others not + + send_signal_to_celery_processes TERM + + # check if the apps are still running every second + while [[ "$wait_time" -le "$TERMINATE_TIMEOUT" ]]; do get_celery_pids # look here for explanation regarding this syntax: @@ -53,28 +55,14 @@ function on_exit { PROCESS_COUNT="${#APP_PIDS[@]}" if [[ "${PROCESS_COUNT}" -eq "0" ]]; then echo "No more .pid files found, exiting" - break + return 0 fi - echo "Terminating celery processes with pids "${APP_PIDS} - for APP_PID in ${APP_PIDS}; do - # if TERMINATE_TIMEOUT is reached, send SIGKILL - if [[ "$wait_time" -ge "$TERMINATE_TIMEOUT" ]]; then - echo "Timeout reached, killing process with pid ${APP_PID}" - kill -9 ${APP_PID} || true - continue - else - echo "Timeout not reached yet, checking " ${APP_PID} - # else, if process is still running send SIGTERM - if [[ $(kill -0 ${APP_PID} 2&>/dev/null) ]]; then - echo "Terminating celery process with pid ${APP_PID}" - kill ${APP_PID} || true - fi - fi - done let wait_time=wait_time+1 sleep 1 done + + send_signal_to_celery_processes KILL } function get_celery_pids { @@ -85,6 +73,16 @@ function get_celery_pids { fi } +function send_signal_to_celery_processes { + # refresh pids to account for the case that some workers may have terminated but others not + get_celery_pids + # send signal to all remaining apps + for APP_PID in ${APP_PIDS}; do + echo "Sending signal ${1} to process with pid ${APP_PID}" + kill -s ${1} ${APP_PID} || true + done +} + function start_application { eval "$@" get_celery_pids From 98e501c4c4e09c67e1a6fe6c7261385dfd3e9b0b Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Wed, 2 Jan 2019 15:45:06 +0000 Subject: [PATCH 059/118] Bump utils to 30.7.2 --- requirements-app.txt | 2 +- requirements.txt | 24 ++++++++++++------------ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/requirements-app.txt b/requirements-app.txt index b0fa41bf3..0941e7f4b 100644 --- a/requirements-app.txt +++ b/requirements-app.txt @@ -29,6 +29,6 @@ awscli-cwlogs>=1.4,<1.5 # Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default itsdangerous==0.24 # pyup: <1.0.0 -git+https://github.com/alphagov/notifications-utils.git@30.7.1#egg=notifications-utils==30.7.1 +git+https://github.com/alphagov/notifications-utils.git@30.7.2#egg=notifications-utils==30.7.2 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 diff --git a/requirements.txt b/requirements.txt index 1df4c529f..09bfcfe90 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,22 +31,22 @@ awscli-cwlogs>=1.4,<1.5 # Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default itsdangerous==0.24 # pyup: <1.0.0 -git+https://github.com/alphagov/notifications-utils.git@30.7.1#egg=notifications-utils==30.7.1 +git+https://github.com/alphagov/notifications-utils.git@30.7.2#egg=notifications-utils==30.7.2 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 ## The following requirements were added by pip freeze: -alembic==1.0.3 +alembic==1.0.5 amqp==1.4.9 anyjson==0.3.3 attrs==18.2.0 -awscli==1.16.62 -bcrypt==3.1.4 +awscli==1.16.81 +bcrypt==3.1.5 billiard==3.3.0.23 bleach==2.1.3 boto3==1.6.16 -botocore==1.12.52 -certifi==2018.10.15 +botocore==1.12.71 +certifi==2018.11.29 chardet==3.0.4 Click==7.0 colorama==0.3.9 @@ -55,7 +55,7 @@ Flask-Redis==0.3.0 future==0.17.1 greenlet==0.4.15 html5lib==1.0.1 -idna==2.7 +idna==2.8 Jinja2==2.10 jmespath==0.9.3 kombu==3.0.37 @@ -65,20 +65,20 @@ mistune==0.8.3 monotonic==1.5 orderedset==2.0.1 phonenumbers==8.9.4 -pyasn1==0.4.4 +pyasn1==0.4.5 pycparser==2.19 PyPDF2==1.26.0 -pyrsistent==0.14.7 +pyrsistent==0.14.8 python-dateutil==2.7.5 python-editor==1.0.3 python-json-logger==0.1.8 pytz==2018.7 PyYAML==3.12 -redis==2.10.6 -requests==2.20.1 +redis==3.0.1 +requests==2.21.0 rsa==3.4.2 s3transfer==0.1.13 -six==1.11.0 +six==1.12.0 smartypants==2.0.1 statsd==3.2.2 urllib3==1.24.1 From c12594949f3c7f9fa8dbbe66a557f22c244a3af2 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Fri, 28 Dec 2018 16:15:12 +0000 Subject: [PATCH 060/118] Refactor service_factory --- tests/app/conftest.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 6b04e83fe..38b483f0e 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -76,20 +76,26 @@ def service_factory(notify_db, notify_db_session): user = create_user() if not email_from: email_from = service_name - service = sample_service(notify_db, notify_db_session, service_name, user, email_from=email_from) + service = Service.query.filter_by(name=service_name).first() + if not service: + service = create_service( + email_from=email_from, + service_name=service_name, + service_permissions=None, + user=user, + ) if template_type == 'email': - sample_template( - notify_db, - notify_db_session, + create_template( + service, + template_name="Template Name", template_type=template_type, - subject_line=service.email_from, - service=service + subject=service.email_from, ) else: - sample_template( - notify_db, - notify_db_session, - service=service + create_template( + service, + template_name="Template Name", + template_type='sms', ) return service From 923703120b3dc5a21992717443cc35053d6b51ba Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 13:01:24 +0000 Subject: [PATCH 061/118] Check if test service exists before it gets created --- tests/app/conftest.py | 15 +++++++-------- tests/app/db.py | 28 +++++++++++++++------------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 38b483f0e..e77deb4a1 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -76,14 +76,13 @@ def service_factory(notify_db, notify_db_session): user = create_user() if not email_from: email_from = service_name - service = Service.query.filter_by(name=service_name).first() - if not service: - service = create_service( - email_from=email_from, - service_name=service_name, - service_permissions=None, - user=user, - ) + + service = create_service( + email_from=email_from, + service_name=service_name, + service_permissions=None, + user=user, + ) if template_type == 'email': create_template( service, diff --git a/tests/app/db.py b/tests/app/db.py index 02a2df85c..3783026e6 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -83,21 +83,23 @@ def create_service( organisation_type='central', postage='second' ): - service = Service( - name=service_name, - message_limit=message_limit, - restricted=restricted, - email_from=email_from if email_from else service_name.lower().replace(' ', '.'), - created_by=user or create_user(email='{}@digital.cabinet-office.gov.uk'.format(uuid.uuid4())), - prefix_sms=prefix_sms, - organisation_type=organisation_type, - postage=postage - ) + service = Service.query.filter_by(name=service_name).first() + if not service: + service = Service( + name=service_name, + message_limit=message_limit, + restricted=restricted, + email_from=email_from if email_from else service_name.lower().replace(' ', '.'), + created_by=user or create_user(email='{}@digital.cabinet-office.gov.uk'.format(uuid.uuid4())), + prefix_sms=prefix_sms, + organisation_type=organisation_type, + postage=postage + ) - dao_create_service(service, service.created_by, service_id, service_permissions=service_permissions) + dao_create_service(service, service.created_by, service_id, service_permissions=service_permissions) - service.active = active - service.research_mode = research_mode + service.active = active + service.research_mode = research_mode return service From 3306b9fc97e44b91caad8086e831e85f047bbb4f Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 14:18:52 +0000 Subject: [PATCH 062/118] use conditional in create_service to establish user --- tests/app/db.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/app/db.py b/tests/app/db.py index 3783026e6..e0ca9bddd 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -90,12 +90,11 @@ def create_service( message_limit=message_limit, restricted=restricted, email_from=email_from if email_from else service_name.lower().replace(' ', '.'), - created_by=user or create_user(email='{}@digital.cabinet-office.gov.uk'.format(uuid.uuid4())), + created_by=user if user else create_user(email='{}@digital.cabinet-office.gov.uk'.format(uuid.uuid4())), prefix_sms=prefix_sms, organisation_type=organisation_type, postage=postage ) - dao_create_service(service, service.created_by, service_id, service_permissions=service_permissions) service.active = active From 95115e7ae6ae3eefcb49196e015e728ee3e02409 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 14:34:02 +0000 Subject: [PATCH 063/118] Use create_service instead of sample_service when creating service permission for tests --- tests/app/conftest.py | 2 +- tests/app/db.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index e77deb4a1..ff817501e 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -802,7 +802,7 @@ def sample_user_service_permission( if user is None: user = create_user() if service is None: - service = sample_service(notify_db, notify_db_session, user=user) + service = create_service(user=user) data = { 'user': user, 'service': service, diff --git a/tests/app/db.py b/tests/app/db.py index e0ca9bddd..ee17c48cd 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -17,7 +17,7 @@ from app.dao.service_data_retention_dao import insert_service_data_retention from app.dao.service_inbound_api_dao import save_service_inbound_api from app.dao.service_permissions_dao import dao_add_service_permission from app.dao.service_sms_sender_dao import update_existing_sms_sender_with_inbound_number, dao_update_service_sms_sender -from app.dao.services_dao import dao_create_service +from app.dao.services_dao import dao_create_service, dao_add_user_to_service from app.dao.templates_dao import dao_create_template, dao_update_template from app.dao.users_dao import save_model_user from app.models import ( @@ -99,6 +99,9 @@ def create_service( service.active = active service.research_mode = research_mode + else: + if user not in service.users: + dao_add_user_to_service(service, user) return service From e8ce669b723e8c3a11ef322739b2278dc2fe4895 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 15:04:01 +0000 Subject: [PATCH 064/118] Use create_service instead of sample_service when creating sample_email_template for tests --- tests/app/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index ff817501e..bca4f3776 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -295,7 +295,7 @@ def sample_email_template( if user is None: user = create_user() if service is None: - service = sample_service(notify_db, notify_db_session, permissions=permissions) + service = create_service(user=user, service_permissions=permissions) data = { 'name': template_name, 'template_type': template_type, From 0bcf13d85cf1f5a22cc08810e231de4df803d6d6 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 15:16:00 +0000 Subject: [PATCH 065/118] sample_api_key uses create_service instead of sample_service --- tests/app/conftest.py | 2 +- tests/app/db.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index bca4f3776..cadced70d 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -350,7 +350,7 @@ def sample_api_key(notify_db, key_type=KEY_TYPE_NORMAL, name=None): if service is None: - service = sample_service(notify_db, notify_db_session) + service = create_service() data = {'service': service, 'name': name or uuid.uuid4(), 'created_by': service.created_by, 'key_type': key_type} api_key = ApiKey(**data) save_model_api_key(api_key) diff --git a/tests/app/db.py b/tests/app/db.py index ee17c48cd..d6d4b9553 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -100,7 +100,7 @@ def create_service( service.active = active service.research_mode = research_mode else: - if user not in service.users: + if user and user not in service.users: dao_add_user_to_service(service, user) return service From a3310c2da6d7f7bb8cfcc6e2bf439b6bebb1cefd Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 15:20:30 +0000 Subject: [PATCH 066/118] sample_job uses create_service instead of sample_service --- tests/app/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index cadced70d..48fe278d2 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -382,7 +382,7 @@ def sample_job( archived=False ): if service is None: - service = sample_service(notify_db, notify_db_session) + service = create_service() if template is None: template = sample_template(notify_db, notify_db_session, service=service) From d367daaf6edc684637dfb088015baba4e42cbd12 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 15:36:25 +0000 Subject: [PATCH 067/118] Some more conftest fixtures use create_service instead of sample_service --- tests/app/conftest.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 48fe278d2..8ecb0ae20 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -442,7 +442,7 @@ def sample_email_job(notify_db, service=None, template=None): if service is None: - service = sample_service(notify_db, notify_db_session) + service = create_service() if template is None: template = sample_email_template( notify_db, @@ -550,7 +550,7 @@ def sample_notification( if created_at is None: created_at = datetime.utcnow() if service is None: - service = sample_service(notify_db, notify_db_session) + service = create_service() if template is None: template = sample_template(notify_db, notify_db_session, service=service) @@ -639,7 +639,7 @@ def sample_notification_with_api_key(notify_db, notify_db_session): @pytest.fixture(scope='function') def sample_email_notification(notify_db, notify_db_session): created_at = datetime.utcnow() - service = sample_service(notify_db, notify_db_session) + service = create_service() template = sample_email_template(notify_db, notify_db_session, service=service) job = sample_job(notify_db, notify_db_session, service=service, template=template) @@ -741,7 +741,7 @@ def sample_invited_user(notify_db, to_email_address=None): if service is None: - service = sample_service(notify_db, notify_db_session) + service = create_service() if to_email_address is None: to_email_address = 'invited_user@digital.gov.uk' @@ -781,7 +781,7 @@ def sample_permission(notify_db, 'permission': permission } if service is None: - service = sample_service(notify_db, notify_db_session) + service = create_service() if service: data['service'] = service p_model = Permission.query.filter_by( From 154257027f8c69dd7e6df5c2fe8b272190d1259e Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 15:53:56 +0000 Subject: [PATCH 068/118] Nothing in conftest uses sample_service now :) --- tests/app/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 8ecb0ae20..abcb0f64a 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -1036,7 +1036,7 @@ def notify_service(notify_db, notify_db_session): @pytest.fixture(scope='function') def sample_service_whitelist(notify_db, notify_db_session, service=None, email_address=None, mobile_number=None): if service is None: - service = sample_service(notify_db, notify_db_session) + service = create_service() if email_address: whitelisted_user = ServiceWhitelist.from_string(service.id, EMAIL_TYPE, email_address) From cde30de100bcf86b8558f3c4ea0ac8330f002783 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 16:14:09 +0000 Subject: [PATCH 069/118] Use create_template instead of sample_template in sample_notification --- tests/app/conftest.py | 2 +- .../notification_dao/test_notification_dao_template_usage.py | 2 +- tests/app/notifications/test_rest.py | 2 +- tests/app/template_statistics/test_rest.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index abcb0f64a..306eb2637 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -552,7 +552,7 @@ def sample_notification( if service is None: service = create_service() if template is None: - template = sample_template(notify_db, notify_db_session, service=service) + template = create_template(service=service) if job is None and api_key is None: # we didn't specify in test - lets create it diff --git a/tests/app/dao/notification_dao/test_notification_dao_template_usage.py b/tests/app/dao/notification_dao/test_notification_dao_template_usage.py index 76800694f..88aaa783d 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_template_usage.py +++ b/tests/app/dao/notification_dao/test_notification_dao_template_usage.py @@ -22,7 +22,7 @@ from tests.app.db import ( def test_last_template_usage_should_get_right_data(sample_notification): results = dao_get_last_template_usage(sample_notification.template_id, 'sms', sample_notification.service_id) - assert results.template.name == 'Template Name' + assert results.template.name == 'sms Template Name' assert results.template.template_type == 'sms' assert results.created_at == sample_notification.created_at assert results.template_id == sample_notification.template_id diff --git a/tests/app/notifications/test_rest.py b/tests/app/notifications/test_rest.py index 0c9051d90..a035500cb 100644 --- a/tests/app/notifications/test_rest.py +++ b/tests/app/notifications/test_rest.py @@ -153,7 +153,7 @@ def test_get_all_notifications(client, sample_notification): assert notifications['notifications'][0]['to'] == '+447700900855' assert notifications['notifications'][0]['service'] == str(sample_notification.service_id) - assert notifications['notifications'][0]['body'] == "This is a template:\nwith a newline" + assert notifications['notifications'][0]['body'] == 'Dear Sir/Madam, Hello. Yours Truly, The Government.' def test_normal_api_key_returns_notifications_created_from_jobs_and_from_api( diff --git a/tests/app/template_statistics/test_rest.py b/tests/app/template_statistics/test_rest.py index b9559e134..1a37cc6e7 100644 --- a/tests/app/template_statistics/test_rest.py +++ b/tests/app/template_statistics/test_rest.py @@ -59,7 +59,7 @@ def test_get_template_statistics_for_service_by_day_returns_template_info(admin_ assert json_resp['data'][0]['count'] == 1 assert json_resp['data'][0]['template_id'] == str(sample_notification.template_id) - assert json_resp['data'][0]['template_name'] == 'Template Name' + assert json_resp['data'][0]['template_name'] == 'sms Template Name' assert json_resp['data'][0]['template_type'] == 'sms' assert json_resp['data'][0]['is_precompiled_letter'] is False From f79c0b03e7ca00c02f8a4d9f417e673883f8cc98 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 31 Dec 2018 16:19:41 +0000 Subject: [PATCH 070/118] Sample job uses create_template instead of sample template --- tests/app/conftest.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 306eb2637..407372d64 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -384,8 +384,7 @@ def sample_job( if service is None: service = create_service() if template is None: - template = sample_template(notify_db, notify_db_session, - service=service) + template = create_template(service=service) data = { 'id': uuid.uuid4(), 'service_id': service.id, From eea324a19d0e1b10a44700977ddadaa8a6724d85 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Wed, 2 Jan 2019 17:15:27 +0000 Subject: [PATCH 071/118] Add flag on create_service to decide whether we should also check if service exists --- tests/app/conftest.py | 28 +++++++++++++++------------- tests/app/db.py | 8 +++++--- 2 files changed, 20 insertions(+), 16 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 407372d64..5b6aa2234 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -82,6 +82,7 @@ def service_factory(notify_db, notify_db_session): service_name=service_name, service_permissions=None, user=user, + check_if_service_exists=True, ) if template_type == 'email': create_template( @@ -200,7 +201,8 @@ def sample_service( def _sample_service_full_permissions(notify_db_session): service = create_service( service_name="sample service full permissions", - service_permissions=set(SERVICE_PERMISSION_TYPES) + service_permissions=set(SERVICE_PERMISSION_TYPES), + check_if_service_exists=True ) create_inbound_number('12345', service_id=service.id) return service @@ -233,7 +235,7 @@ def sample_template( if service is None: service = Service.query.filter_by(name='Sample service').first() if not service: - service = create_service(service_permissions=permissions) + service = create_service(service_permissions=permissions, check_if_service_exists=True) if created_by is None: created_by = create_user() @@ -295,7 +297,7 @@ def sample_email_template( if user is None: user = create_user() if service is None: - service = create_service(user=user, service_permissions=permissions) + service = create_service(user=user, service_permissions=permissions, check_if_service_exists=True) data = { 'name': template_name, 'template_type': template_type, @@ -350,7 +352,7 @@ def sample_api_key(notify_db, key_type=KEY_TYPE_NORMAL, name=None): if service is None: - service = create_service() + service = create_service(check_if_service_exists=True) data = {'service': service, 'name': name or uuid.uuid4(), 'created_by': service.created_by, 'key_type': key_type} api_key = ApiKey(**data) save_model_api_key(api_key) @@ -382,7 +384,7 @@ def sample_job( archived=False ): if service is None: - service = create_service() + service = create_service(check_if_service_exists=True) if template is None: template = create_template(service=service) data = { @@ -441,7 +443,7 @@ def sample_email_job(notify_db, service=None, template=None): if service is None: - service = create_service() + service = create_service(check_if_service_exists=True) if template is None: template = sample_email_template( notify_db, @@ -549,7 +551,7 @@ def sample_notification( if created_at is None: created_at = datetime.utcnow() if service is None: - service = create_service() + service = create_service(check_if_service_exists=True) if template is None: template = create_template(service=service) @@ -638,7 +640,7 @@ def sample_notification_with_api_key(notify_db, notify_db_session): @pytest.fixture(scope='function') def sample_email_notification(notify_db, notify_db_session): created_at = datetime.utcnow() - service = create_service() + service = create_service(check_if_service_exists=True) template = sample_email_template(notify_db, notify_db_session, service=service) job = sample_job(notify_db, notify_db_session, service=service, template=template) @@ -740,7 +742,7 @@ def sample_invited_user(notify_db, to_email_address=None): if service is None: - service = create_service() + service = create_service(check_if_service_exists=True) if to_email_address is None: to_email_address = 'invited_user@digital.gov.uk' @@ -780,7 +782,7 @@ def sample_permission(notify_db, 'permission': permission } if service is None: - service = create_service() + service = create_service(check_if_service_exists=True) if service: data['service'] = service p_model = Permission.query.filter_by( @@ -801,7 +803,7 @@ def sample_user_service_permission( if user is None: user = create_user() if service is None: - service = create_service(user=user) + service = create_service(user=user, check_if_service_exists=True) data = { 'user': user, 'service': service, @@ -1035,7 +1037,7 @@ def notify_service(notify_db, notify_db_session): @pytest.fixture(scope='function') def sample_service_whitelist(notify_db, notify_db_session, service=None, email_address=None, mobile_number=None): if service is None: - service = create_service() + service = create_service(check_if_service_exists=True) if email_address: whitelisted_user = ServiceWhitelist.from_string(service.id, EMAIL_TYPE, email_address) @@ -1060,7 +1062,7 @@ def sample_provider_rate(notify_db, notify_db_session, valid_from=None, rate=Non @pytest.fixture def sample_inbound_numbers(notify_db, notify_db_session, sample_service): - service = create_service(service_name='sample service 2') + service = create_service(service_name='sample service 2', check_if_service_exists=True) inbound_numbers = list() inbound_numbers.append(create_inbound_number(number='1', provider='mmg')) inbound_numbers.append(create_inbound_number(number='2', provider='mmg', active=False, service_id=service.id)) diff --git a/tests/app/db.py b/tests/app/db.py index d6d4b9553..5c4889057 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -81,10 +81,12 @@ def create_service( prefix_sms=True, message_limit=1000, organisation_type='central', - postage='second' + postage='second', + check_if_service_exists=False ): - service = Service.query.filter_by(name=service_name).first() - if not service: + if check_if_service_exists: + service = Service.query.filter_by(name=service_name).first() + if (not check_if_service_exists) or (check_if_service_exists and not service): service = Service( name=service_name, message_limit=message_limit, From 2355ee011f36f4060bff7325b1b4bf9b997685b7 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 20 Dec 2018 16:01:39 +0000 Subject: [PATCH 072/118] log more info when we receive multiple delivery callbacks for one notification Previously, we logged a warning containing the notification reference and new status. However it wasn't a great message - this new one includes the notification id, the old status, the time difference and more. This separates out logs for callbacks for notifications we don't know (error level) and duplicates (info level). --- app/dao/notifications_dao.py | 62 +++++++++++++------ .../notifications_ses_callback.py | 3 - app/notifications/process_client_response.py | 4 -- 3 files changed, 44 insertions(+), 25 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index f27190ad3..d683cb636 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -15,7 +15,7 @@ from notifications_utils.recipients import ( ) from notifications_utils.statsd_decorators import statsd from notifications_utils.timezones import convert_utc_to_bst -from sqlalchemy import (desc, func, or_, asc) +from sqlalchemy import (desc, func, asc) from sqlalchemy.orm import joinedload from sqlalchemy.sql import functions from sqlalchemy.sql.expression import case @@ -144,17 +144,23 @@ def _update_notification_status(notification, status): @statsd(namespace="dao") @transactional def update_notification_status_by_id(notification_id, status, sent_by=None): - notification = Notification.query.with_for_update().filter( - Notification.id == notification_id, - or_( - Notification.status == NOTIFICATION_CREATED, - Notification.status == NOTIFICATION_SENDING, - Notification.status == NOTIFICATION_PENDING, - Notification.status == NOTIFICATION_SENT, - Notification.status == NOTIFICATION_PENDING_VIRUS_CHECK - )).first() + notification = Notification.query.with_for_update().filter(Notification.id == notification_id).first() if not notification: + current_app.logger.error('notification not found for id {} (update to status {})'.format( + notification_id, + status + )) + return None + + if notification.status not in { + NOTIFICATION_CREATED, + NOTIFICATION_SENDING, + NOTIFICATION_PENDING, + NOTIFICATION_SENT, + NOTIFICATION_PENDING_VIRUS_CHECK + }: + _duplicate_update_warning(notification, status) return None if notification.international and not country_records_delivery(notification.phone_prefix): @@ -170,15 +176,19 @@ def update_notification_status_by_id(notification_id, status, sent_by=None): @statsd(namespace="dao") @transactional def update_notification_status_by_reference(reference, status): - notification = Notification.query.filter( - Notification.reference == reference, - or_( - Notification.status == NOTIFICATION_SENDING, - Notification.status == NOTIFICATION_PENDING, - Notification.status == NOTIFICATION_SENT - )).first() + # this is used to update letters and emails + notification = Notification.query.filter(Notification.reference == reference).first() - if not notification or notification.status == NOTIFICATION_SENT: + if not notification: + current_app.logger.error('notification not found for reference {} (update to {})'.format(reference, status)) + return None + + if notification.status not in { + NOTIFICATION_SENDING, + NOTIFICATION_PENDING, + NOTIFICATION_SENT, + }: + _duplicate_update_warning(notification, status) return None return _update_notification_status( @@ -693,3 +703,19 @@ def guess_notification_type(search_term): return EMAIL_TYPE else: return SMS_TYPE + + +def _duplicate_update_warning(notification, status): + current_app.logger.info( + ( + 'Duplicate callback received. Notification id {id} received a status update to {new_status}' + '{time_diff} after being set to {old_status}. {type} sent by {sent_by}' + ).format( + id=notification.id, + old_status=notification.status, + new_status=status, + time_diff=datetime.utcnow() - notification.sent_at, + type=notification.notification_type, + sent_by=notification.sent_by + ) + ) diff --git a/app/notifications/notifications_ses_callback.py b/app/notifications/notifications_ses_callback.py index e90cfeced..d75abcd4e 100644 --- a/app/notifications/notifications_ses_callback.py +++ b/app/notifications/notifications_ses_callback.py @@ -61,9 +61,6 @@ def process_ses_response(ses_request): notification_status ) if not notification: - warning = "SES callback failed: notification either not found or already updated " \ - "from sending. Status {} for notification reference {}".format(notification_status, reference) - current_app.logger.warning(warning) return if not aws_response_dict['success']: diff --git a/app/notifications/process_client_response.py b/app/notifications/process_client_response.py index d45c3cc92..3de60c587 100644 --- a/app/notifications/process_client_response.py +++ b/app/notifications/process_client_response.py @@ -81,10 +81,6 @@ def _process_for_status(notification_status, client_name, provider_reference): sent_by=client_name.lower() ) if not notification: - current_app.logger.warning("{} callback failed: notification {} either not found or already updated " - "from sending. Status {}".format(client_name, - provider_reference, - notification_status)) return statsd_client.incr('callback.{}.{}'.format(client_name.lower(), notification_status)) From 021625abb3c676e9794fd6107024ee038f9c0a57 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Fri, 28 Dec 2018 14:29:59 +0000 Subject: [PATCH 073/118] make sure log line works if notification still in created --- app/dao/notifications_dao.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index d683cb636..c78582851 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -185,8 +185,7 @@ def update_notification_status_by_reference(reference, status): if notification.status not in { NOTIFICATION_SENDING, - NOTIFICATION_PENDING, - NOTIFICATION_SENT, + NOTIFICATION_PENDING }: _duplicate_update_warning(notification, status) return None @@ -714,7 +713,7 @@ def _duplicate_update_warning(notification, status): id=notification.id, old_status=notification.status, new_status=status, - time_diff=datetime.utcnow() - notification.sent_at, + time_diff=datetime.utcnow() - (notification.updated_at or notification.created_at), type=notification.notification_type, sent_by=notification.sent_by ) From 2e53ba1e3e5d831b0a284c61af34c68686a333a6 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Fri, 4 Jan 2019 16:11:21 +0000 Subject: [PATCH 074/118] bump utils brings in https://github.com/alphagov/notifications-utils/pull/563 --- requirements-app.txt | 2 +- requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements-app.txt b/requirements-app.txt index 0941e7f4b..f48c7c058 100644 --- a/requirements-app.txt +++ b/requirements-app.txt @@ -29,6 +29,6 @@ awscli-cwlogs>=1.4,<1.5 # Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default itsdangerous==0.24 # pyup: <1.0.0 -git+https://github.com/alphagov/notifications-utils.git@30.7.2#egg=notifications-utils==30.7.2 +git+https://github.com/alphagov/notifications-utils.git@30.7.3#egg=notifications-utils==30.7.3 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 diff --git a/requirements.txt b/requirements.txt index 09bfcfe90..57ae41765 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,7 +31,7 @@ awscli-cwlogs>=1.4,<1.5 # Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default itsdangerous==0.24 # pyup: <1.0.0 -git+https://github.com/alphagov/notifications-utils.git@30.7.2#egg=notifications-utils==30.7.2 +git+https://github.com/alphagov/notifications-utils.git@30.7.3#egg=notifications-utils==30.7.3 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 @@ -40,12 +40,12 @@ alembic==1.0.5 amqp==1.4.9 anyjson==0.3.3 attrs==18.2.0 -awscli==1.16.81 +awscli==1.16.83 bcrypt==3.1.5 billiard==3.3.0.23 bleach==2.1.3 boto3==1.6.16 -botocore==1.12.71 +botocore==1.12.73 certifi==2018.11.29 chardet==3.0.4 Click==7.0 From bd9a6352fdcbdf796fd74eb980d7d8660e5082c2 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Fri, 4 Jan 2019 16:45:39 +0000 Subject: [PATCH 075/118] Optimise the query for getting the platform statistics for all services. The page should render for all time after this change. This is one step closer to eliminating the need to read from NotificationHistory. --- app/dao/fact_notification_status_dao.py | 10 +++-- app/service/rest.py | 9 ++--- .../dao/test_fact_notification_status_dao.py | 35 +++++++++++++--- tests/app/service/test_rest.py | 40 +++++++++++-------- 4 files changed, 63 insertions(+), 31 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 3389225d7..c273ca066 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -199,7 +199,7 @@ def fetch_notification_statuses_for_job(job_id): ).all() -def fetch_stats_for_all_services_by_date_range(start_date, end_date,include_from_test_key=True): +def fetch_stats_for_all_services_by_date_range(start_date, end_date, include_from_test_key=True): stats = db.session.query( FactNotificationStatus.service_id.label('service_id'), Service.name.label('name'), @@ -230,8 +230,8 @@ def fetch_stats_for_all_services_by_date_range(start_date, end_date,include_from if not include_from_test_key: stats = stats.filter(FactNotificationStatus.key_type != KEY_TYPE_TEST) - today = get_london_midnight_in_utc(datetime.utcnow()) - if start_date <= today.date() <= end_date: + if start_date <= datetime.utcnow().date() <= end_date: + today = get_london_midnight_in_utc(datetime.utcnow()) subquery = db.session.query( Notification.notification_type.cast(db.Text).label('notification_type'), Notification.status.label('status'), @@ -284,7 +284,9 @@ def fetch_stats_for_all_services_by_date_range(start_date, end_date,include_from all_stats_table.c.notification_type, all_stats_table.c.status, ).order_by( - all_stats_table.c.service_id + all_stats_table.c.name, + all_stats_table.c.notification_type, + all_stats_table.c.status ) else: query = stats diff --git a/app/service/rest.py b/app/service/rest.py index f1c4f947e..f62a99967 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -23,8 +23,8 @@ from app.dao.api_key_dao import ( from app.dao.fact_notification_status_dao import ( fetch_notification_status_for_service_by_month, fetch_notification_status_for_service_for_day, - fetch_notification_status_for_service_for_today_and_7_previous_days -) + fetch_notification_status_for_service_for_today_and_7_previous_days, + fetch_stats_for_all_services_by_date_range) from app.dao.inbound_numbers_dao import dao_allocate_number_for_service from app.dao.organisation_dao import dao_get_organisation_by_service_id from app.dao.service_data_retention_dao import ( @@ -56,7 +56,6 @@ from app.dao.services_dao import ( dao_remove_user_from_service, dao_suspend_service, dao_update_service, - fetch_stats_by_date_range_for_all_services ) from app.dao.service_whitelist_dao import ( dao_fetch_service_whitelist, @@ -472,10 +471,10 @@ def get_detailed_services(start_date, end_date, only_active=False, include_from_ only_active=only_active) else: - stats = fetch_stats_by_date_range_for_all_services(start_date=start_date, + stats = fetch_stats_for_all_services_by_date_range(start_date=start_date, end_date=end_date, include_from_test_key=include_from_test_key, - only_active=only_active) + ) results = [] for service_id, rows in itertools.groupby(stats, lambda x: x.service_id): rows = list(rows) diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 15a5b7605..64b6b5bc3 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -289,6 +289,7 @@ def set_up_data(): create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0)) create_notification(sms_template, created_at=datetime(2018, 10, 31, 12, 0, 0), status='delivered') create_notification(email_template, created_at=datetime(2018, 10, 31, 13, 0, 0), status='delivered') + return service_1, service_2 def test_fetch_notification_statuses_for_job(sample_template): @@ -308,8 +309,32 @@ def test_fetch_notification_statuses_for_job(sample_template): @freeze_time('2018-10-31 14:00') def test_fetch_stats_for_all_services_by_date_range(notify_db_session): - set_up_data() - results = fetch_stats_for_all_services_by_date_range( start_date=date(2018, 10, 29), - end_date=date(2018, 10, 31)) - print(results) - assert len(results) == 2 + service_1, service_2 = set_up_data() + results = fetch_stats_for_all_services_by_date_range(start_date=date(2018, 10, 29), + end_date=date(2018, 10, 31)) + assert len(results) == 5 + + assert results[0].service_id == service_1.id + assert results[0].notification_type == 'email' + assert results[0].status == 'delivered' + assert results[0].count == 4 + + assert results[1].service_id == service_1.id + assert results[1].notification_type == 'sms' + assert results[1].status == 'created' + assert results[1].count == 2 + + assert results[2].service_id == service_1.id + assert results[2].notification_type == 'sms' + assert results[2].status == 'delivered' + assert results[2].count == 11 + + assert results[3].service_id == service_2.id + assert results[3].notification_type == 'letter' + assert results[3].status == 'delivered' + assert results[3].count == 10 + + assert results[4].service_id == service_2.id + assert not results[4].notification_type + assert not results[4].status + assert not results[4].count diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index a1a40b3a8..737846aa5 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1642,31 +1642,37 @@ def test_get_detailed_services_only_includes_todays_notifications(notify_db, not } -@pytest.mark.parametrize( - 'set_time', - ['2017-03-28T12:00:00', '2017-01-28T12:00:00', '2017-01-02T12:00:00', '2017-10-31T12:00:00'] -) -def test_get_detailed_services_for_date_range(notify_db, notify_db_session, set_time): +@pytest.mark.parametrize("start_date_delta, end_date_delta", + [(2, 1), + (3, 2), + (1, 0) + ]) +@freeze_time('2017-03-28T12:00:00') +def test_get_detailed_services_for_date_range(sample_template, start_date_delta, end_date_delta): from app.service.rest import get_detailed_services - with freeze_time(set_time): - create_sample_notification(notify_db, notify_db_session, created_at=datetime.utcnow() - timedelta(days=3)) - create_sample_notification(notify_db, notify_db_session, created_at=datetime.utcnow() - timedelta(days=2)) - create_sample_notification(notify_db, notify_db_session, created_at=datetime.utcnow() - timedelta(days=1)) - create_sample_notification(notify_db, notify_db_session, created_at=datetime.utcnow()) + create_ft_notification_status(bst_date=(datetime.utcnow() - timedelta(days=3)).date(), + service=sample_template.service, + notification_type='sms') + create_ft_notification_status(bst_date=(datetime.utcnow() - timedelta(days=2)).date(), + service=sample_template.service, + notification_type='sms') + create_ft_notification_status(bst_date=(datetime.utcnow() - timedelta(days=1)).date(), + service=sample_template.service, + notification_type='sms') - start_date = (datetime.utcnow() - timedelta(days=2)).date() - end_date = (datetime.utcnow() - timedelta(days=1)).date() + create_notification(template=sample_template, created_at=datetime.utcnow(), status='delivered') + + start_date = (datetime.utcnow() - timedelta(days=start_date_delta)).date() + end_date = (datetime.utcnow() - timedelta(days=end_date_delta)).date() data = get_detailed_services(only_active=False, include_from_test_key=True, start_date=start_date, end_date=end_date) assert len(data) == 1 - assert data[0]['statistics'] == { - EMAIL_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, - SMS_TYPE: {'delivered': 0, 'failed': 0, 'requested': 2}, - LETTER_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0} - } + assert data[0]['statistics'][EMAIL_TYPE] == {'delivered': 0, 'failed': 0, 'requested': 0} + assert data[0]['statistics'][SMS_TYPE] == {'delivered': 2, 'failed': 0, 'requested': 2} + assert data[0]['statistics'][LETTER_TYPE] == {'delivered': 0, 'failed': 0, 'requested': 0} def test_search_for_notification_by_to_field(client, sample_template, sample_email_template): From b068a850fa9e0f97ab28c5642bea7f1d5ae80b23 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 7 Jan 2019 02:12:39 +0000 Subject: [PATCH 076/118] Update pytest-cov from 2.6.0 to 2.6.1 --- requirements_for_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_for_test.txt b/requirements_for_test.txt index 8e750c8e0..50ddceb17 100644 --- a/requirements_for_test.txt +++ b/requirements_for_test.txt @@ -4,7 +4,7 @@ pytest==3.10.1 moto==1.3.7 pytest-env==0.6.2 pytest-mock==1.10.0 -pytest-cov==2.6.0 +pytest-cov==2.6.1 pytest-xdist==1.24.1 coveralls==1.5.1 freezegun==0.3.11 From 40a17a0e93f977cabb5538c972b2519439414c8a Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 7 Jan 2019 11:14:25 +0000 Subject: [PATCH 077/118] pin redis to 2.x while we sort out v3 compatibility --- requirements-app.txt | 4 +++- requirements.txt | 13 +++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/requirements-app.txt b/requirements-app.txt index f48c7c058..09c0ee350 100644 --- a/requirements-app.txt +++ b/requirements-app.txt @@ -29,6 +29,8 @@ awscli-cwlogs>=1.4,<1.5 # Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default itsdangerous==0.24 # pyup: <1.0.0 -git+https://github.com/alphagov/notifications-utils.git@30.7.3#egg=notifications-utils==30.7.3 +git+https://github.com/alphagov/notifications-utils.git@30.7.2#egg=notifications-utils==30.7.2 +# pinned until upgrade to redis-py 3 works +redis==2.10.6 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 diff --git a/requirements.txt b/requirements.txt index 57ae41765..f711cac44 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,7 +31,9 @@ awscli-cwlogs>=1.4,<1.5 # Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default itsdangerous==0.24 # pyup: <1.0.0 -git+https://github.com/alphagov/notifications-utils.git@30.7.3#egg=notifications-utils==30.7.3 +git+https://github.com/alphagov/notifications-utils.git@30.7.2#egg=notifications-utils==30.7.2 +# pinned until upgrade to redis-py 3 works +redis==2.10.6 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 @@ -40,12 +42,12 @@ alembic==1.0.5 amqp==1.4.9 anyjson==0.3.3 attrs==18.2.0 -awscli==1.16.83 +awscli==1.16.84 bcrypt==3.1.5 billiard==3.3.0.23 bleach==2.1.3 boto3==1.6.16 -botocore==1.12.73 +botocore==1.12.74 certifi==2018.11.29 chardet==3.0.4 Click==7.0 @@ -68,13 +70,12 @@ phonenumbers==8.9.4 pyasn1==0.4.5 pycparser==2.19 PyPDF2==1.26.0 -pyrsistent==0.14.8 +pyrsistent==0.14.9 python-dateutil==2.7.5 python-editor==1.0.3 python-json-logger==0.1.8 -pytz==2018.7 +pytz==2018.9 PyYAML==3.12 -redis==3.0.1 requests==2.21.0 rsa==3.4.2 s3transfer==0.1.13 From 8fbe60bb904932fa346d59f1310321fb068d5639 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 7 Jan 2019 15:37:26 +0000 Subject: [PATCH 078/118] Remove unused query --- app/dao/services_dao.py | 45 ----------------- tests/app/dao/test_services_dao.py | 78 ------------------------------ 2 files changed, 123 deletions(-) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 40918f7e7..fb5cba297 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -335,51 +335,6 @@ def dao_fetch_todays_stats_for_all_services(include_from_test_key=True, only_act return query.all() -@statsd(namespace='dao') -def fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True, only_active=True): - start_date = get_london_midnight_in_utc(start_date) - end_date = get_london_midnight_in_utc(end_date + timedelta(days=1)) - table = NotificationHistory - - if start_date >= datetime.utcnow() - timedelta(days=7): - table = Notification - subquery = db.session.query( - table.notification_type, - table.status, - table.service_id, - func.count(table.id).label('count') - ).filter( - table.created_at >= start_date, - table.created_at < end_date - ).group_by( - table.notification_type, - table.status, - table.service_id - ) - if not include_from_test_key: - subquery = subquery.filter(table.key_type != KEY_TYPE_TEST) - subquery = subquery.subquery() - - query = db.session.query( - Service.id.label('service_id'), - Service.name, - Service.restricted, - Service.research_mode, - Service.active, - Service.created_at, - subquery.c.notification_type, - subquery.c.status, - subquery.c.count - ).outerjoin( - subquery, - subquery.c.service_id == Service.id - ).order_by(Service.id) - if only_active: - query = query.filter(Service.active) - - return query.all() - - @transactional @version_class(Service) @version_class(ApiKey) diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index 6ea7123e3..024b1c019 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -27,7 +27,6 @@ from app.dao.services_dao import ( dao_fetch_todays_stats_for_service, fetch_todays_total_message_count, dao_fetch_todays_stats_for_all_services, - fetch_stats_by_date_range_for_all_services, dao_suspend_service, dao_resume_service, dao_fetch_active_users_for_service, @@ -775,25 +774,6 @@ def test_dao_fetch_todays_stats_for_all_services_can_exclude_from_test_key(notif assert stats[0].count == 2 -def test_fetch_stats_by_date_range_for_all_services(notify_db_session): - template = create_template(service=create_service()) - create_notification(template=template, created_at=datetime.now() - timedelta(days=4)) - create_notification(template=template, created_at=datetime.now() - timedelta(days=3)) - result_one = create_notification(template=template, created_at=datetime.now() - timedelta(days=2)) - create_notification(template=template, created_at=datetime.now() - timedelta(days=1)) - create_notification(template=template, created_at=datetime.now()) - - start_date = (datetime.utcnow() - timedelta(days=2)).date() - end_date = (datetime.utcnow() - timedelta(days=1)).date() - - results = fetch_stats_by_date_range_for_all_services(start_date, end_date) - - assert len(results) == 1 - assert results[0] == (result_one.service.id, result_one.service.name, result_one.service.restricted, - result_one.service.research_mode, result_one.service.active, - result_one.service.created_at, 'sms', 'created', 2) - - @freeze_time('2001-01-01T23:59:00') def test_dao_suspend_service_marks_service_as_inactive_and_expires_api_keys(notify_db_session): service = create_service() @@ -807,64 +787,6 @@ def test_dao_suspend_service_marks_service_as_inactive_and_expires_api_keys(noti assert api_key.expiry_date == datetime(2001, 1, 1, 23, 59, 00) -@pytest.mark.parametrize("start_delta, end_delta, expected", - [("5", "1", "4"), # a date range less than 7 days ago returns test and normal notifications - ("9", "8", "1"), # a date range older than 9 days does not return test notifications. - ("8", "4", "2")]) # a date range that starts more than 7 days ago -@freeze_time('2017-10-23T00:00:00') -def test_fetch_stats_by_date_range_for_all_services_returns_test_notifications(notify_db_session, - start_delta, - end_delta, - expected): - template = create_template(service=create_service()) - result_one = create_notification(template=template, created_at=datetime.now(), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=2), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=3), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=4), key_type='normal') - create_notification(template=template, created_at=datetime.now() - timedelta(days=4), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=8), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=8), key_type='normal') - - start_date = (datetime.utcnow() - timedelta(days=int(start_delta))).date() - end_date = (datetime.utcnow() - timedelta(days=int(end_delta))).date() - - results = fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True) - - assert len(results) == 1 - assert results[0] == (result_one.service.id, result_one.service.name, result_one.service.restricted, - result_one.service.research_mode, result_one.service.active, result_one.service.created_at, - 'sms', 'created', int(expected)) - - -@pytest.mark.parametrize("start_delta, end_delta, expected", - [("5", "1", "4"), # a date range less than 7 days ago returns test and normal notifications - ("9", "8", "1"), # a date range older than 9 days does not return test notifications. - ("8", "4", "2")]) # a date range that starts more than 7 days ago -@freeze_time('2017-10-23T23:00:00') -def test_fetch_stats_by_date_range_during_bst_hour_for_all_services_returns_test_notifications( - notify_db_session, start_delta, end_delta, expected -): - template = create_template(service=create_service()) - result_one = create_notification(template=template, created_at=datetime.now(), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=2), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=3), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=4), key_type='normal') - create_notification(template=template, created_at=datetime.now() - timedelta(days=4), key_type='test') - create_notification(template=template, created_at=datetime.now() - timedelta(days=8), key_type='normal') - create_notification(template=template, created_at=datetime.now() - timedelta(days=9), key_type='normal') - create_notification(template=template, created_at=datetime.now() - timedelta(days=9), key_type='test') - - start_date = (datetime.utcnow() - timedelta(days=int(start_delta))).date() - end_date = (datetime.utcnow() - timedelta(days=int(end_delta))).date() - - results = fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True) - - assert len(results) == 1 - assert results[0] == (result_one.service.id, result_one.service.name, result_one.service.restricted, - result_one.service.research_mode, result_one.service.active, result_one.service.created_at, - 'sms', 'created', int(expected)) - - @freeze_time('2001-01-01T23:59:00') def test_dao_resume_service_marks_service_as_active_and_api_keys_are_still_revoked(notify_db_session): service = create_service() From 47c403f6ab17a108773c6d0229438c6b9edeb2d3 Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Mon, 7 Jan 2019 17:12:00 +0000 Subject: [PATCH 079/118] Don't return pagination links for API Message log requests Flask-SQLAlchemy paginate function issues a separate query to get the total count of rows for a given filter. This query (with filters used by the API integration Message log page) is slow for services with large number of notifications. Since Message log page doesn't actually allow users to paginate through the response (it only shows the last 50 messages) we can use limit instead of paginate, which requires passing in another flag from admin to the dao method. `count` flag has been added to `paginate` in March 2018, however there was no release of flask-sqlalchemy since then, so we need to pull the dev version of the package from Github. --- app/dao/notifications_dao.py | 4 +++- app/schemas.py | 1 + app/service/rest.py | 4 ++++ requirements-app.txt | 2 +- requirements.txt | 2 +- .../notification_dao/test_notification_dao.py | 10 +++++++++ tests/app/service/test_rest.py | 22 +++++++++++++++++++ 7 files changed, 42 insertions(+), 3 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index c78582851..9426a28c0 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -255,6 +255,7 @@ def get_notifications_for_service( filter_dict=None, page=1, page_size=None, + count_pages=True, limit_days=None, key_type=None, personalisation=False, @@ -300,7 +301,8 @@ def get_notifications_for_service( return query.order_by(desc(Notification.created_at)).paginate( page=page, - per_page=page_size + per_page=page_size, + count=count_pages ) diff --git a/app/schemas.py b/app/schemas.py index 460b52240..0ebc42dc4 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -596,6 +596,7 @@ class NotificationsFilterSchema(ma.Schema): format_for_csv = fields.String() to = fields.String() include_one_off = fields.Boolean(required=False) + count_pages = fields.Boolean(required=False) @pre_load def handle_multidict(self, in_data): diff --git a/app/service/rest.py b/app/service/rest.py index f1c4f947e..fd03e7d05 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -345,16 +345,20 @@ def get_all_notifications_for_service(service_id): include_from_test_key = data.get('include_from_test_key', False) include_one_off = data.get('include_one_off', True) + count_pages = data.get('count_pages', True) + pagination = notifications_dao.get_notifications_for_service( service_id, filter_dict=data, page=page, page_size=page_size, + count_pages=count_pages, limit_days=limit_days, include_jobs=include_jobs, include_from_test_key=include_from_test_key, include_one_off=include_one_off ) + kwargs = request.args.to_dict() kwargs['service_id'] = service_id diff --git a/requirements-app.txt b/requirements-app.txt index 09c0ee350..f4097456a 100644 --- a/requirements-app.txt +++ b/requirements-app.txt @@ -7,7 +7,7 @@ docopt==0.6.2 Flask-Bcrypt==0.7.1 flask-marshmallow==0.9.0 Flask-Migrate==2.3.0 -Flask-SQLAlchemy==2.3.2 +git+https://github.com/mitsuhiko/flask-sqlalchemy.git@500e732dd1b975a56ab06a46bd1a20a21e682262#egg=Flask-SQLAlchemy==2.3.2.dev20190108 Flask==1.0.2 click-datetime==0.2 eventlet==0.23.0 diff --git a/requirements.txt b/requirements.txt index f711cac44..e232d5b51 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ docopt==0.6.2 Flask-Bcrypt==0.7.1 flask-marshmallow==0.9.0 Flask-Migrate==2.3.0 -Flask-SQLAlchemy==2.3.2 +git+https://github.com/mitsuhiko/flask-sqlalchemy.git@500e732dd1b975a56ab06a46bd1a20a21e682262#egg=Flask-SQLAlchemy==2.3.2.dev20190108 Flask==1.0.2 click-datetime==0.2 eventlet==0.23.0 diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index ad8875656..197b6556a 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -909,6 +909,16 @@ def test_should_return_notifications_including_one_offs_by_default(sample_user, assert len(include_one_offs_by_default) == 2 +def test_should_not_count_pages_when_given_a_flag(sample_user, sample_template): + create_notification(sample_template) + notification = create_notification(sample_template) + + pagination = get_notifications_for_service(sample_template.service_id, count_pages=False, page_size=1) + assert len(pagination.items) == 1 + assert pagination.total is None + assert pagination.items[0].id == notification.id + + def test_get_notifications_created_by_api_or_csv_are_returned_correctly_excluding_test_key_notifications( notify_db, notify_db_session, diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index a1a40b3a8..c12b0e223 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1407,6 +1407,28 @@ def test_get_only_api_created_notifications_for_service( assert resp['notifications'][0]['id'] == str(without_job.id) +def test_get_notifications_for_service_without_page_count( + admin_request, + sample_job, + sample_template, + sample_user, +): + create_notification(sample_template) + without_job = create_notification(sample_template) + + resp = admin_request.get( + 'service.get_all_notifications_for_service', + service_id=sample_template.service_id, + page_size=1, + include_jobs=False, + include_one_off=False, + count_pages=False + ) + assert len(resp['notifications']) == 1 + assert resp['total'] is None + assert resp['notifications'][0]['id'] == str(without_job.id) + + @pytest.mark.parametrize('should_prefix', [ True, False, From 7f9b64d3dfd685d8dce2db391f1f1e53f7cb8723 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Tue, 8 Jan 2019 14:15:20 +0000 Subject: [PATCH 080/118] bump utils (inc redis bump) --- requirements-app.txt | 4 +--- requirements.txt | 20 +++++++++----------- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/requirements-app.txt b/requirements-app.txt index 09c0ee350..cf98ffea9 100644 --- a/requirements-app.txt +++ b/requirements-app.txt @@ -29,8 +29,6 @@ awscli-cwlogs>=1.4,<1.5 # Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default itsdangerous==0.24 # pyup: <1.0.0 -git+https://github.com/alphagov/notifications-utils.git@30.7.2#egg=notifications-utils==30.7.2 -# pinned until upgrade to redis-py 3 works -redis==2.10.6 +git+https://github.com/alphagov/notifications-utils.git@30.7.4#egg=notifications-utils==30.7.4 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 diff --git a/requirements.txt b/requirements.txt index f711cac44..433bb3693 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,9 +31,7 @@ awscli-cwlogs>=1.4,<1.5 # Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default itsdangerous==0.24 # pyup: <1.0.0 -git+https://github.com/alphagov/notifications-utils.git@30.7.2#egg=notifications-utils==30.7.2 -# pinned until upgrade to redis-py 3 works -redis==2.10.6 +git+https://github.com/alphagov/notifications-utils.git@30.7.4#egg=notifications-utils==30.7.4 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 @@ -42,12 +40,12 @@ alembic==1.0.5 amqp==1.4.9 anyjson==0.3.3 attrs==18.2.0 -awscli==1.16.84 +awscli==1.16.85 bcrypt==3.1.5 billiard==3.3.0.23 -bleach==2.1.3 +bleach==3.0.2 boto3==1.6.16 -botocore==1.12.74 +botocore==1.12.75 certifi==2018.11.29 chardet==3.0.4 Click==7.0 @@ -56,32 +54,32 @@ docutils==0.14 Flask-Redis==0.3.0 future==0.17.1 greenlet==0.4.15 -html5lib==1.0.1 idna==2.8 Jinja2==2.10 jmespath==0.9.3 kombu==3.0.37 Mako==1.0.7 MarkupSafe==1.1.0 -mistune==0.8.3 +mistune==0.8.4 monotonic==1.5 orderedset==2.0.1 -phonenumbers==8.9.4 +phonenumbers==8.10.2 pyasn1==0.4.5 pycparser==2.19 PyPDF2==1.26.0 pyrsistent==0.14.9 python-dateutil==2.7.5 python-editor==1.0.3 -python-json-logger==0.1.8 +python-json-logger==0.1.10 pytz==2018.9 PyYAML==3.12 +redis==3.0.1 requests==2.21.0 rsa==3.4.2 s3transfer==0.1.13 six==1.12.0 smartypants==2.0.1 -statsd==3.2.2 +statsd==3.3.0 urllib3==1.24.1 webencodings==0.5.1 Werkzeug==0.14.1 From e3a79e80c90657a882c142cfddd860771187a38b Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 8 Jan 2019 17:50:34 +0000 Subject: [PATCH 081/118] Cancelled notifications don't show as failures in statistics --- app/service/statistics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/service/statistics.py b/app/service/statistics.py index e1ed57aeb..3d22b20d6 100644 --- a/app/service/statistics.py +++ b/app/service/statistics.py @@ -86,7 +86,7 @@ def _update_statuses_from_row(update_dict, row): update_dict['delivered'] += row.count elif row.status in ( 'failed', 'technical-failure', 'temporary-failure', - 'permanent-failure', 'validation-failed', 'virus-scan-failed', 'cancelled'): + 'permanent-failure', 'validation-failed', 'virus-scan-failed'): update_dict['failed'] += row.count From 5d838415d3d4c7fbb12fc93d55414c8973abe783 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 9 Jan 2019 11:26:08 +0000 Subject: [PATCH 082/118] fix filter to look at right table a query for notifications was filtering on FtNotificationStatus - we aren't joining to that table in the query, so sqlalchemy added a cross join between ft_notification_status (3.7k rows) and Notifications (3.9m rows), resulting in a 1.3 trillion row materialised table. This query took 17 hours and pending. Also, remove orders from querys other than the outer one, since we're grouping anyway. --- app/dao/fact_notification_status_dao.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index c273ca066..bf6ec3c8e 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -153,8 +153,6 @@ def fetch_notification_status_totals_for_all_services(start_date, end_date): FactNotificationStatus.notification_type, FactNotificationStatus.notification_status, FactNotificationStatus.key_type, - ).order_by( - FactNotificationStatus.notification_type ) today = get_london_midnight_in_utc(datetime.utcnow()) if start_date <= today.date() <= end_date: @@ -184,7 +182,9 @@ def fetch_notification_status_totals_for_all_services(start_date, end_date): all_stats_table.c.notification_type ) else: - query = stats + query = stats.order_by( + FactNotificationStatus.notification_type + ) return query.all() @@ -245,7 +245,7 @@ def fetch_stats_for_all_services_by_date_range(start_date, end_date, include_fro Notification.service_id ) if not include_from_test_key: - subquery = subquery.filter(FactNotificationStatus.key_type != KEY_TYPE_TEST) + subquery = subquery.filter(Notification.key_type != KEY_TYPE_TEST) subquery = subquery.subquery() stats_for_today = db.session.query( @@ -261,7 +261,7 @@ def fetch_stats_for_all_services_by_date_range(start_date, end_date, include_fro ).outerjoin( subquery, subquery.c.service_id == Service.id - ).order_by(Service.id) + ) all_stats_table = stats.union_all(stats_for_today).subquery() query = db.session.query( From 3e21f5748144b09c6611234cb85cbdd6d2904ca8 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 9 Jan 2019 11:43:40 +0000 Subject: [PATCH 083/118] fix platform admin stats row-order bug now that we're reading from two tables (ft_notification_status and notifications) for stats, we'll get a couple of rows for each notification type. If a service doesn't have any rows in one of those tables, the query will return a row with nulls for the notification types and counts. Some services will have history but no stats from today, others will have data from today but no history. This commit acknowledges that any row might have nulls, not just the first row. --- app/service/rest.py | 5 +---- app/service/statistics.py | 5 ++++- tests/app/service/test_statistics.py | 4 ++++ 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/app/service/rest.py b/app/service/rest.py index f62a99967..e355ebc5b 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -478,10 +478,7 @@ def get_detailed_services(start_date, end_date, only_active=False, include_from_ results = [] for service_id, rows in itertools.groupby(stats, lambda x: x.service_id): rows = list(rows) - if rows[0].count is None: - s = statistics.create_zeroed_stats_dicts() - else: - s = statistics.format_statistics(rows) + s = statistics.format_statistics(rows) results.append({ 'id': str(rows[0].service_id), 'name': rows[0].name, diff --git a/app/service/statistics.py b/app/service/statistics.py index e1ed57aeb..d942ad427 100644 --- a/app/service/statistics.py +++ b/app/service/statistics.py @@ -13,7 +13,10 @@ def format_statistics(statistics): # so we can return emails/sms * created, sent, and failed counts = create_zeroed_stats_dicts() for row in statistics: - _update_statuses_from_row(counts[row.notification_type], row) + # any row could be null, if the service either has no notifications in the notifications table, + # or no historical data in the ft_notification_status table. + if row.notification_type: + _update_statuses_from_row(counts[row.notification_type], row) return counts diff --git a/tests/app/service/test_statistics.py b/tests/app/service/test_statistics.py index 6ba357d3b..d553b90f2 100644 --- a/tests/app/service/test_statistics.py +++ b/tests/app/service/test_statistics.py @@ -45,6 +45,10 @@ NewStatsRow = collections.namedtuple('row', ('notification_type', 'status', 'key StatsRow('sms', 'delivered', 1), StatsRow('sms', 'sent', 1), ], [0, 0, 0], [3, 2, 0], [0, 0, 0]), + 'handles_none_rows': ([ + StatsRow('sms', 'sending', 1), + StatsRow(None, None, None) + ], [0, 0, 0], [1, 0, 0], [0, 0, 0]) }) def test_format_statistics(stats, email_counts, sms_counts, letter_counts): From e179e1e4a44db4078a48b3f5b8fff4e01466024c Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Wed, 9 Jan 2019 13:30:28 +0000 Subject: [PATCH 084/118] Update test --- tests/app/service/test_statistics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/service/test_statistics.py b/tests/app/service/test_statistics.py index 6ba357d3b..b1a341c8f 100644 --- a/tests/app/service/test_statistics.py +++ b/tests/app/service/test_statistics.py @@ -39,7 +39,7 @@ NewStatsRow = collections.namedtuple('row', ('notification_type', 'status', 'key StatsRow('letter', 'virus-scan-failed', 1), StatsRow('letter', 'permanent-failure', 1), StatsRow('letter', 'cancelled', 1), - ], [4, 0, 4], [0, 0, 0], [4, 0, 4]), + ], [4, 0, 4], [0, 0, 0], [4, 0, 3]), 'convert_sent_to_delivered': ([ StatsRow('sms', 'sending', 1), StatsRow('sms', 'delivered', 1), From 4a26ee18139dcef875d911f9f89141393a7cfd3e Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Wed, 9 Jan 2019 12:22:51 +0000 Subject: [PATCH 085/118] Set statement timeout on all DB connections A recent issue with a long-running query (#2288) highlighted the fact that even though the original HTTP connection might be closed (for example after gorouter timeout of 15 minutes, which returns a 504 response to the client), the request worker will not be stopped. This means that the worker is spending time and potentially DB resources generating a response that will never be delivered. Gunicorn's timeout setting only applies to sync workers and there doesn't seem to be an option to interrupt individual requests in gevent/eventlet deployments. Since the most likely (and potentially most dangerous) scenario for this is a long-running DB query, we can set a statement timeout on our DB connections. This will raise a sqlalchemy.exc.OperationalError (wrapping psycopg2.extensions.QueryCanceledError), interrupting the request after the given timeout has been reached. This is a Postgres client setting, so the database itself will abort the transaction when it reaches the set timeout. Since this will also apply to our celery tasks (including potentially long-running nightly tasks) we set a timeout of 20 minutes to begin with. This can potentially be split in the future to set a different value for each app, so that we could limit API requests even more. --- app/__init__.py | 15 ++++++++++++++- app/config.py | 1 + 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/app/__init__.py b/app/__init__.py index b4a470ac0..1f94c697e 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -4,7 +4,7 @@ import string import uuid from flask import _request_ctx_stack, request, g, jsonify -from flask_sqlalchemy import SQLAlchemy +from flask_sqlalchemy import SQLAlchemy as _SQLAlchemy from flask_marshmallow import Marshmallow from flask_migrate import Migrate from time import monotonic @@ -27,6 +27,19 @@ from app.encryption import Encryption DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" DATE_FORMAT = "%Y-%m-%d" + +class SQLAlchemy(_SQLAlchemy): + """We need to subclass SQLAlchemy in order to override create_engine options""" + + def apply_driver_hacks(self, app, info, options): + super().apply_driver_hacks(app, info, options) + if 'connect_args' not in options: + options['connect_args'] = {} + options['connect_args']["options"] = "-c statement_timeout={}".format( + int(app.config['SQLALCHEMY_STATEMENT_TIMEOUT']) * 1000 + ) + + db = SQLAlchemy() migrate = Migrate() ma = Marshmallow() diff --git a/app/config.py b/app/config.py index 138d18b65..a0569534e 100644 --- a/app/config.py +++ b/app/config.py @@ -122,6 +122,7 @@ class Config(object): SQLALCHEMY_POOL_SIZE = int(os.environ.get('SQLALCHEMY_POOL_SIZE', 5)) SQLALCHEMY_POOL_TIMEOUT = 30 SQLALCHEMY_POOL_RECYCLE = 300 + SQLALCHEMY_STATEMENT_TIMEOUT = 1200 PAGE_SIZE = 50 API_PAGE_SIZE = 250 TEST_MESSAGE_FILENAME = 'Test message' From 56bae2b07758ac2079013d5acef1c46eb1525a81 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Wed, 9 Jan 2019 17:49:19 +0000 Subject: [PATCH 086/118] Allow users to set postage per precompiled letter --- .../process_letter_notifications.py | 3 ++- app/notifications/process_notifications.py | 14 ++++++++----- app/v2/notifications/notification_schemas.py | 3 ++- .../test_post_letter_notifications.py | 21 ++++++++++++++----- 4 files changed, 29 insertions(+), 12 deletions(-) diff --git a/app/notifications/process_letter_notifications.py b/app/notifications/process_letter_notifications.py index 984ad257e..94e52bbd8 100644 --- a/app/notifications/process_letter_notifications.py +++ b/app/notifications/process_letter_notifications.py @@ -20,6 +20,7 @@ def create_letter_notification(letter_data, template, api_key, status, reply_to_ client_reference=letter_data.get('reference'), status=status, reply_to_text=reply_to_text, - billable_units=billable_units + billable_units=billable_units, + postage=letter_data.get('postage') ) return notification diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 8fc2f15f6..88cec5b0d 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -75,7 +75,8 @@ def persist_notification( created_by_id=None, status=NOTIFICATION_CREATED, reply_to_text=None, - billable_units=None + billable_units=None, + postage=None ): notification_created_at = created_at or datetime.utcnow() if not notification_id: @@ -112,11 +113,14 @@ def persist_notification( elif notification_type == EMAIL_TYPE: notification.normalised_to = format_email_address(notification.to) elif notification_type == LETTER_TYPE: - template = dao_get_template_by_id(template_id, template_version) - if service.has_permission(CHOOSE_POSTAGE) and template.postage: - notification.postage = template.postage + if postage: + notification.postage = postage else: - notification.postage = service.postage + template = dao_get_template_by_id(template_id, template_version) + if service.has_permission(CHOOSE_POSTAGE) and template.postage: + notification.postage = template.postage + else: + notification.postage = service.postage # if simulated create a Notification model to return but do not persist the Notification to the dB if not simulated: diff --git a/app/v2/notifications/notification_schemas.py b/app/v2/notifications/notification_schemas.py index 39c78d727..1ee7d1dc4 100644 --- a/app/v2/notifications/notification_schemas.py +++ b/app/v2/notifications/notification_schemas.py @@ -239,7 +239,8 @@ post_precompiled_letter_request = { "title": "POST v2/notifications/letter", "properties": { "reference": {"type": "string"}, - "content": {"type": "string"} + "content": {"type": "string"}, + "postage": {"type": "string"} }, "required": ["reference", "content"], "additionalProperties": False diff --git a/tests/app/v2/notifications/test_post_letter_notifications.py b/tests/app/v2/notifications/test_post_letter_notifications.py index 734928088..163f63384 100644 --- a/tests/app/v2/notifications/test_post_letter_notifications.py +++ b/tests/app/v2/notifications/test_post_letter_notifications.py @@ -469,16 +469,27 @@ def test_post_precompiled_letter_with_invalid_base64(client, notify_user, mocker assert not Notification.query.first() -@pytest.mark.parametrize('postage', ['first', 'second']) -def test_post_precompiled_letter_notification_returns_201(client, notify_user, mocker, postage): +@pytest.mark.parametrize('service_postage, notification_postage, expected_postage', [ + ('second', 'second', 'second'), + ('second', 'first', 'first'), + ('second', None, 'second'), + ('first', 'first', 'first'), + ('first', 'second', 'second'), + ('first', None, 'first'), +]) +def test_post_precompiled_letter_notification_returns_201( + client, notify_user, mocker, service_postage, notification_postage, expected_postage +): sample_service = create_service(service_permissions=['letter', 'precompiled_letter']) - sample_service.postage = postage + sample_service.postage = service_postage s3mock = mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf') mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task') data = { "reference": "letter-reference", "content": "bGV0dGVyLWNvbnRlbnQ=" } + if notification_postage: + data["postage"] = notification_postage auth_header = create_authorization_header(service_id=sample_service.id) response = client.post( path="v2/notifications/letter", @@ -493,10 +504,10 @@ def test_post_precompiled_letter_notification_returns_201(client, notify_user, m assert notification.billable_units == 0 assert notification.status == NOTIFICATION_PENDING_VIRUS_CHECK - assert notification.postage == postage + assert notification.postage == expected_postage notification_history = NotificationHistory.query.one() - assert notification_history.postage == postage + assert notification_history.postage == expected_postage resp_json = json.loads(response.get_data(as_text=True)) assert resp_json == {'id': str(notification.id), 'reference': 'letter-reference'} From 5a1094b6fd961aeda11d51fa3f67c2dd5b559cd7 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 10 Jan 2019 16:04:06 +0000 Subject: [PATCH 087/118] Throw error if postage parameter for precompiled POST request incorrect --- app/schema_validation/__init__.py | 7 +++++++ app/v2/notifications/notification_schemas.py | 2 +- .../test_post_letter_notifications.py | 20 +++++++++++++++++++ 3 files changed, 28 insertions(+), 1 deletion(-) diff --git a/app/schema_validation/__init__.py b/app/schema_validation/__init__.py index 382d9229c..dfa03446f 100644 --- a/app/schema_validation/__init__.py +++ b/app/schema_validation/__init__.py @@ -29,6 +29,13 @@ def validate(json_to_validate, schema): validate_email_address(instance) return True + @format_checker.checks('postage', raises=ValidationError) + def validate_schema_postage(instance): + if isinstance(instance, str): + if instance not in ["first", "second"]: + raise ValidationError("invalid. It must be either first or second.") + return True + @format_checker.checks('datetime_within_next_day', raises=ValidationError) def validate_schema_date_with_hour(instance): if isinstance(instance, str): diff --git a/app/v2/notifications/notification_schemas.py b/app/v2/notifications/notification_schemas.py index 1ee7d1dc4..733eb8aef 100644 --- a/app/v2/notifications/notification_schemas.py +++ b/app/v2/notifications/notification_schemas.py @@ -240,7 +240,7 @@ post_precompiled_letter_request = { "properties": { "reference": {"type": "string"}, "content": {"type": "string"}, - "postage": {"type": "string"} + "postage": {"type": "string", "format": "postage"} }, "required": ["reference", "content"], "additionalProperties": False diff --git a/tests/app/v2/notifications/test_post_letter_notifications.py b/tests/app/v2/notifications/test_post_letter_notifications.py index 163f63384..db6cd4159 100644 --- a/tests/app/v2/notifications/test_post_letter_notifications.py +++ b/tests/app/v2/notifications/test_post_letter_notifications.py @@ -511,3 +511,23 @@ def test_post_precompiled_letter_notification_returns_201( resp_json = json.loads(response.get_data(as_text=True)) assert resp_json == {'id': str(notification.id), 'reference': 'letter-reference'} + + +def test_post_letter_notification_throws_error_for_invalid_postage(client, notify_user, mocker): + sample_service = create_service(service_permissions=['letter', 'precompiled_letter']) + data = { + "reference": "letter-reference", + "content": "bGV0dGVyLWNvbnRlbnQ=", + "postage": "space unicorn" + } + auth_header = create_authorization_header(service_id=sample_service.id) + response = client.post( + path="v2/notifications/letter", + data=json.dumps(data), + headers=[('Content-Type', 'application/json'), auth_header]) + + assert response.status_code == 400, response.get_data(as_text=True) + resp_json = json.loads(response.get_data(as_text=True)) + assert resp_json['errors'][0]['message'] == "postage invalid. It must be either first or second." + + assert not Notification.query.first() From 20fe055ac93891bce6825654670b508d8e2170d7 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Thu, 10 Jan 2019 16:08:15 +0000 Subject: [PATCH 088/118] remove unused usage functions --- app/dao/notification_usage_dao.py | 180 -------------- tests/app/dao/test_notification_usage_dao.py | 235 ------------------- 2 files changed, 415 deletions(-) delete mode 100644 app/dao/notification_usage_dao.py delete mode 100644 tests/app/dao/test_notification_usage_dao.py diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py deleted file mode 100644 index 7105f5b3e..000000000 --- a/app/dao/notification_usage_dao.py +++ /dev/null @@ -1,180 +0,0 @@ -from datetime import datetime, timedelta - -from notifications_utils.statsd_decorators import statsd -from sqlalchemy import Float, Integer, and_ -from sqlalchemy import func, case, cast -from sqlalchemy import literal_column - -from app import db -from app.dao.date_util import get_financial_year -from app.models import ( - NotificationHistory, - Rate, - NOTIFICATION_STATUS_TYPES_BILLABLE, - KEY_TYPE_TEST, - SMS_TYPE, - EMAIL_TYPE, - LETTER_TYPE, - LetterRate, - Service -) -from app.utils import get_london_month_from_utc_column - - -@statsd(namespace="dao") -def get_billing_data_for_month(service_id, start_date, end_date, notification_type): - results = [] - - if notification_type == EMAIL_TYPE: - return billing_data_per_month_query(0, service_id, start_date, end_date, EMAIL_TYPE) - - elif notification_type == SMS_TYPE: - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - - if not rates: - return [] - - # so the start end date in the query are the valid from the rate, not the month - # - this is going to take some thought - for r, n in zip(rates, rates[1:]): - results.extend( - billing_data_per_month_query( - r.rate, service_id, max(r.valid_from, start_date), - min(n.valid_from, end_date), SMS_TYPE - ) - ) - results.extend( - billing_data_per_month_query( - rates[-1].rate, service_id, max(rates[-1].valid_from, start_date), - end_date, SMS_TYPE - ) - ) - elif notification_type == LETTER_TYPE: - results.extend(billing_letter_data_per_month_query(service_id, start_date, end_date)) - - return results - - -@statsd(namespace="dao") -def get_monthly_billing_data(service_id, year): - start_date, end_date = get_financial_year(year) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - - if not rates: - return [] - - result = [] - for r, n in zip(rates, rates[1:]): - result.extend(billing_data_per_month_query(r.rate, service_id, r.valid_from, n.valid_from, SMS_TYPE)) - result.extend(billing_data_per_month_query(rates[-1].rate, service_id, rates[-1].valid_from, end_date, SMS_TYPE)) - - return [(datetime.strftime(x[0], "%B"), x[1], x[2], x[3], x[4], x[5]) for x in result] - - -def billing_data_filter(notification_type, start_date, end_date, service_id): - return [ - NotificationHistory.notification_type == notification_type, - NotificationHistory.created_at.between(start_date, end_date), - NotificationHistory.service_id == service_id, - NotificationHistory.status.in_(NOTIFICATION_STATUS_TYPES_BILLABLE), - NotificationHistory.key_type != KEY_TYPE_TEST - ] - - -def get_rates_for_daterange(start_date, end_date, notification_type): - rates = Rate.query.filter(Rate.notification_type == notification_type).order_by(Rate.valid_from).all() - - if not rates: - return [] - - results = [] - for current_rate, current_rate_expiry_date in zip(rates, rates[1:]): - if is_between(current_rate.valid_from, start_date, end_date) or \ - is_between(current_rate_expiry_date.valid_from - timedelta(microseconds=1), start_date, end_date): - results.append(current_rate) - - if is_between(rates[-1].valid_from, start_date, end_date): - results.append(rates[-1]) - - if not results: - for x in reversed(rates): - if start_date >= x.valid_from: - results.append(x) - break - - return results - - -def is_between(date, start_date, end_date): - return start_date <= date <= end_date - - -@statsd(namespace="dao") -def billing_data_per_month_query(rate, service_id, start_date, end_date, notification_type): - month = get_london_month_from_utc_column(NotificationHistory.created_at) - if notification_type == SMS_TYPE: - filter_subq = func.sum(NotificationHistory.billable_units).label('billing_units') - elif notification_type == EMAIL_TYPE: - filter_subq = func.count(NotificationHistory.billable_units).label('billing_units') - - results = db.session.query( - month.label('month'), - filter_subq, - rate_multiplier().label('rate_multiplier'), - NotificationHistory.international, - NotificationHistory.notification_type, - cast(rate, Float()).label('rate') - ).filter( - *billing_data_filter(notification_type, start_date, end_date, service_id) - ).group_by( - NotificationHistory.notification_type, - month, - NotificationHistory.rate_multiplier, - NotificationHistory.international - ).order_by( - month, - rate_multiplier() - ) - return results.all() - - -def rate_multiplier(): - return cast(case([ - (NotificationHistory.rate_multiplier == None, literal_column("'1'")), # noqa - (NotificationHistory.rate_multiplier != None, NotificationHistory.rate_multiplier), # noqa - ]), Integer()) - - -@statsd(namespace="dao") -def billing_letter_data_per_month_query(service_id, start_date, end_date): - month = get_london_month_from_utc_column(NotificationHistory.created_at) - crown = Service.query.get(service_id).crown - results = db.session.query( - month.label('month'), - func.count(NotificationHistory.billable_units).label('billing_units'), - rate_multiplier().label('rate_multiplier'), - NotificationHistory.international, - NotificationHistory.notification_type, - cast(LetterRate.rate, Float()).label('rate') - ).join( - LetterRate, - and_(NotificationHistory.created_at >= LetterRate.start_date, - (LetterRate.end_date == None) | # noqa - (LetterRate.end_date > NotificationHistory.created_at)) - ).filter( - LetterRate.sheet_count == NotificationHistory.billable_units, - LetterRate.crown == crown, - LetterRate.post_class == 'second', - NotificationHistory.created_at < end_date, - *billing_data_filter(LETTER_TYPE, start_date, end_date, service_id) - ).group_by( - NotificationHistory.notification_type, - month, - NotificationHistory.rate_multiplier, - NotificationHistory.international, - LetterRate.rate - ).order_by( - month, - rate_multiplier() - ) - return results.all() diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py deleted file mode 100644 index 4e1bb27ab..000000000 --- a/tests/app/dao/test_notification_usage_dao.py +++ /dev/null @@ -1,235 +0,0 @@ -import uuid -from datetime import datetime, timedelta - -from freezegun import freeze_time - -from app.dao.date_util import get_financial_year -from app.dao.notification_usage_dao import ( - get_rates_for_daterange, - get_billing_data_for_month, - get_monthly_billing_data, - billing_letter_data_per_month_query -) -from app.models import ( - Rate, - SMS_TYPE, -) -from tests.app.db import create_notification, create_rate, create_template, create_service - - -def test_get_rates_for_daterange(notify_db, notify_db_session): - set_up_rate(notify_db, datetime(2016, 5, 18), 0.016) - set_up_rate(notify_db, datetime(2017, 3, 31, 23), 0.0158) - start_date, end_date = get_financial_year(2017) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - assert len(rates) == 1 - assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2017-03-31 23:00:00" - assert rates[0].rate == 0.0158 - - -def test_get_rates_for_daterange_multiple_result_per_year(notify_db, notify_db_session): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.015) - set_up_rate(notify_db, datetime(2016, 5, 18), 0.016) - set_up_rate(notify_db, datetime(2017, 4, 1), 0.0158) - start_date, end_date = get_financial_year(2016) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - assert len(rates) == 2 - assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2016-04-01 00:00:00" - assert rates[0].rate == 0.015 - assert datetime.strftime(rates[1].valid_from, '%Y-%m-%d %H:%M:%S') == "2016-05-18 00:00:00" - assert rates[1].rate == 0.016 - - -def test_get_rates_for_daterange_returns_correct_rates(notify_db, notify_db_session): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.015) - set_up_rate(notify_db, datetime(2016, 9, 1), 0.016) - set_up_rate(notify_db, datetime(2017, 6, 1), 0.0175) - start_date, end_date = get_financial_year(2017) - rates_2017 = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - assert len(rates_2017) == 2 - assert datetime.strftime(rates_2017[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2016-09-01 00:00:00" - assert rates_2017[0].rate == 0.016 - assert datetime.strftime(rates_2017[1].valid_from, '%Y-%m-%d %H:%M:%S') == "2017-06-01 00:00:00" - assert rates_2017[1].rate == 0.0175 - - -def test_get_rates_for_daterange_in_the_future(notify_db, notify_db_session): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.015) - set_up_rate(notify_db, datetime(2017, 6, 1), 0.0175) - start_date, end_date = get_financial_year(2018) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2017-06-01 00:00:00" - assert rates[0].rate == 0.0175 - - -def test_get_rates_for_daterange_returns_empty_list_if_year_is_before_earliest_rate(notify_db, notify_db_session): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.015) - set_up_rate(notify_db, datetime(2017, 6, 1), 0.0175) - start_date, end_date = get_financial_year(2015) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - assert rates == [] - - -def test_get_rates_for_daterange_early_rate(notify_db, notify_db_session): - set_up_rate(notify_db, datetime(2015, 6, 1), 0.014) - set_up_rate(notify_db, datetime(2016, 6, 1), 0.015) - set_up_rate(notify_db, datetime(2016, 9, 1), 0.016) - set_up_rate(notify_db, datetime(2017, 6, 1), 0.0175) - start_date, end_date = get_financial_year(2016) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - assert len(rates) == 3 - - -def test_get_rates_for_daterange_edge_case(notify_db, notify_db_session): - set_up_rate(notify_db, datetime(2016, 3, 31, 23, 00), 0.015) - set_up_rate(notify_db, datetime(2017, 3, 31, 23, 00), 0.0175) - start_date, end_date = get_financial_year(2016) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - assert len(rates) == 1 - assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2016-03-31 23:00:00" - assert rates[0].rate == 0.015 - - -def test_get_rates_for_daterange_where_daterange_is_one_month_that_falls_between_rate_valid_from( - notify_db, notify_db_session -): - set_up_rate(notify_db, datetime(2017, 1, 1), 0.175) - set_up_rate(notify_db, datetime(2017, 3, 31), 0.123) - start_date = datetime(2017, 2, 1, 00, 00, 00) - end_date = datetime(2017, 2, 28, 23, 59, 59, 99999) - rates = get_rates_for_daterange(start_date, end_date, SMS_TYPE) - assert len(rates) == 1 - assert datetime.strftime(rates[0].valid_from, '%Y-%m-%d %H:%M:%S') == "2017-01-01 00:00:00" - assert rates[0].rate == 0.175 - - -def test_get_monthly_billing_data(notify_db, notify_db_session, sample_template, sample_email_template): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.014) - # previous year - create_notification(template=sample_template, created_at=datetime(2016, 3, 31), sent_at=datetime(2016, 3, 31), - status='sending', billable_units=1) - # current year - create_notification(template=sample_template, created_at=datetime(2016, 4, 2), sent_at=datetime(2016, 4, 2), - status='sending', billable_units=1) - create_notification(template=sample_template, created_at=datetime(2016, 5, 18), sent_at=datetime(2016, 5, 18), - status='sending', billable_units=2) - create_notification(template=sample_template, created_at=datetime(2016, 7, 22), sent_at=datetime(2016, 7, 22), - status='sending', billable_units=3) - create_notification(template=sample_template, created_at=datetime(2016, 7, 22), sent_at=datetime(2016, 7, 22), - status='sending', billable_units=3, rate_multiplier=2) - create_notification(template=sample_template, created_at=datetime(2016, 7, 22), sent_at=datetime(2016, 7, 22), - status='sending', billable_units=3, rate_multiplier=2) - create_notification(template=sample_template, created_at=datetime(2016, 7, 30), sent_at=datetime(2016, 7, 22), - status='sending', billable_units=4) - - create_notification(template=sample_email_template, created_at=datetime(2016, 8, 22), sent_at=datetime(2016, 7, 22), - status='sending', billable_units=0) - create_notification(template=sample_email_template, created_at=datetime(2016, 8, 30), sent_at=datetime(2016, 7, 22), - status='sending', billable_units=0) - # next year - create_notification(template=sample_template, created_at=datetime(2017, 3, 31, 23, 00, 00), - sent_at=datetime(2017, 3, 31), status='sending', billable_units=6) - results = get_monthly_billing_data(sample_template.service_id, 2016) - assert len(results) == 4 - # (billable_units, rate_multiplier, international, type, rate) - assert results[0] == ('April', 1, 1, False, SMS_TYPE, 0.014) - assert results[1] == ('May', 2, 1, False, SMS_TYPE, 0.014) - assert results[2] == ('July', 7, 1, False, SMS_TYPE, 0.014) - assert results[3] == ('July', 6, 2, False, SMS_TYPE, 0.014) - - -def test_get_monthly_billing_data_with_multiple_rates(notify_db, notify_db_session, sample_template, - sample_email_template): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.014) - set_up_rate(notify_db, datetime(2016, 6, 5), 0.0175) - set_up_rate(notify_db, datetime(2017, 7, 5), 0.018) - # previous year - create_notification(template=sample_template, created_at=datetime(2016, 3, 31), sent_at=datetime(2016, 3, 31), - status='sending', billable_units=1) - # current year - create_notification(template=sample_template, created_at=datetime(2016, 4, 2), sent_at=datetime(2016, 4, 2), - status='sending', billable_units=1) - create_notification(template=sample_template, created_at=datetime(2016, 5, 18), sent_at=datetime(2016, 5, 18), - status='sending', billable_units=2) - create_notification(template=sample_template, created_at=datetime(2016, 6, 1), sent_at=datetime(2016, 6, 1), - status='sending', billable_units=3) - create_notification(template=sample_template, created_at=datetime(2016, 6, 15), sent_at=datetime(2016, 6, 15), - status='sending', billable_units=4) - create_notification(template=sample_email_template, created_at=datetime(2016, 8, 22), - sent_at=datetime(2016, 7, 22), - status='sending', billable_units=0) - create_notification(template=sample_email_template, created_at=datetime(2016, 8, 30), - sent_at=datetime(2016, 7, 22), - status='sending', billable_units=0) - # next year - create_notification(template=sample_template, created_at=datetime(2017, 3, 31, 23, 00, 00), - sent_at=datetime(2017, 3, 31), status='sending', billable_units=6) - results = get_monthly_billing_data(sample_template.service_id, 2016) - assert len(results) == 4 - assert results[0] == ('April', 1, 1, False, SMS_TYPE, 0.014) - assert results[1] == ('May', 2, 1, False, SMS_TYPE, 0.014) - assert results[2] == ('June', 3, 1, False, SMS_TYPE, 0.014) - assert results[3] == ('June', 4, 1, False, SMS_TYPE, 0.0175) - - -def test_get_monthly_billing_data_with_no_notifications_for_daterange(notify_db, notify_db_session, sample_template): - set_up_rate(notify_db, datetime(2016, 4, 1), 0.014) - results = get_monthly_billing_data(sample_template.service_id, 2016) - assert results == [] - - -def set_up_rate(notify_db, start_date, value): - rate = Rate(id=uuid.uuid4(), valid_from=start_date, rate=value, notification_type=SMS_TYPE) - notify_db.session.add(rate) - - -@freeze_time("2016-05-01") -def test_get_billing_data_for_month_where_start_date_before_rate_returns_empty( - sample_template -): - create_rate(datetime(2016, 4, 1), 0.014, SMS_TYPE) - - results = get_monthly_billing_data( - service_id=sample_template.service_id, - year=2015 - ) - - assert not results - - -@freeze_time("2016-05-01") -def test_get_monthly_billing_data_where_start_date_before_rate_returns_empty( - sample_template -): - now = datetime.utcnow() - create_rate(now, 0.014, SMS_TYPE) - - results = get_billing_data_for_month( - service_id=sample_template.service_id, - start_date=now - timedelta(days=2), - end_date=now - timedelta(days=1), - notification_type=SMS_TYPE - ) - - assert not results - - -def test_billing_letter_data_per_month_query( - notify_db_session -): - service = create_service() - template = create_template(service=service, template_type='letter') - create_notification(template=template, billable_units=1, created_at=datetime(2017, 2, 1, 13, 21), - status='delivered') - create_notification(template=template, billable_units=1, created_at=datetime(2017, 2, 1, 13, 21), - status='delivered') - create_notification(template=template, billable_units=1, created_at=datetime(2017, 2, 1, 13, 21), - status='delivered') - - results = billing_letter_data_per_month_query(service_id=service.id, - start_date=datetime(2017, 2, 1), - end_date=datetime(2017, 2, 28)) - - assert len(results) == 1 - assert results[0].rate == 0.3 - assert results[0].billing_units == 3 From 507138cc94ea0757342d82c50c2710f4714cc196 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Thu, 10 Jan 2019 16:24:51 +0000 Subject: [PATCH 089/118] Create a new query for template monthly stats. --- app/dao/fact_notification_status_dao.py | 84 ++++++++++++++++++- app/service/rest.py | 15 +++- .../dao/test_fact_notification_status_dao.py | 29 ++++++- tests/app/service/test_statistics_rest.py | 18 +--- 4 files changed, 123 insertions(+), 23 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index bf6ec3c8e..7231dc7d8 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -4,12 +4,12 @@ from flask import current_app from notifications_utils.timezones import convert_bst_to_utc from sqlalchemy import func from sqlalchemy.dialects.postgresql import insert -from sqlalchemy.sql.expression import literal +from sqlalchemy.sql.expression import literal, extract from sqlalchemy.types import DateTime, Integer from app import db -from app.models import Notification, NotificationHistory, FactNotificationStatus, KEY_TYPE_TEST, Service -from app.utils import get_london_midnight_in_utc, midnight_n_days_ago +from app.models import Notification, NotificationHistory, FactNotificationStatus, KEY_TYPE_TEST, Service, Template +from app.utils import get_london_midnight_in_utc, midnight_n_days_ago, get_london_month_from_utc_column def fetch_notification_status_for_day(process_day, service_id=None): @@ -291,3 +291,81 @@ def fetch_stats_for_all_services_by_date_range(start_date, end_date, include_fro else: query = stats return query.all() + + +def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): + # services_dao.replaces dao_fetch_monthly_historical_usage_by_template_for_service + stats = db.session.query( + FactNotificationStatus.template_id.label('template_id'), + Template.name.label('name'), + Template.template_type.label('template_type'), + Template.is_precompiled_letter.label('is_precompiled_letter'), + extract('month', FactNotificationStatus.bst_date).label('month'), + extract('year', FactNotificationStatus.bst_date).label('year'), + func.sum(FactNotificationStatus.notification_count).label('count') + ).join( + Template, FactNotificationStatus.template_id == Template.id + ).filter( + FactNotificationStatus.service_id == service_id, + FactNotificationStatus.bst_date >= start_date, + FactNotificationStatus.bst_date <= end_date, + ).group_by( + FactNotificationStatus.template_id, + Template.name, + Template.template_type, + Template.is_precompiled_letter, + extract('month', FactNotificationStatus.bst_date).label('month'), + extract('year', FactNotificationStatus.bst_date).label('year'), + ) + + if start_date <= datetime.utcnow() <= end_date: + today = get_london_midnight_in_utc(datetime.utcnow()) + month = get_london_month_from_utc_column(Notification.created_at) + + stats_for_today = db.session.query( + Notification.template_id.label('template_id'), + Template.name.label('name'), + Template.template_type.label('template_type'), + Template.is_precompiled_letter.label('is_precompiled_letter'), + extract('month', month).label('month'), + extract('year', month).label('year'), + func.count().label('count') + ).join( + Template, Notification.template_id == Template.id, + ).filter( + Notification.created_at >= today, + Notification.service_id == service_id, + # we don't want to include test keys + Notification.key_type != KEY_TYPE_TEST + ).group_by( + Notification.template_id, + Template.hidden, + Template.name, + Template.template_type, + month + ) + + all_stats_table = stats.union_all(stats_for_today).subquery() + query = db.session.query( + all_stats_table.c.template_id, + all_stats_table.c.name, + all_stats_table.c.is_precompiled_letter, + all_stats_table.c.template_type, + all_stats_table.c.month, + all_stats_table.c.year, + func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), + ).group_by( + all_stats_table.c.template_id, + all_stats_table.c.name, + all_stats_table.c.is_precompiled_letter, + all_stats_table.c.template_type, + all_stats_table.c.month, + all_stats_table.c.year, + ).order_by( + all_stats_table.c.year, + all_stats_table.c.month, + all_stats_table.c.name + ) + else: + query = stats + return query.all() diff --git a/app/service/rest.py b/app/service/rest.py index 48fd6ae75..e00c99aa7 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -24,7 +24,8 @@ from app.dao.fact_notification_status_dao import ( fetch_notification_status_for_service_by_month, fetch_notification_status_for_service_for_day, fetch_notification_status_for_service_for_today_and_7_previous_days, - fetch_stats_for_all_services_by_date_range) + fetch_stats_for_all_services_by_date_range, fetch_monthly_template_usage_for_service +) from app.dao.inbound_numbers_dao import dao_allocate_number_for_service from app.dao.organisation_dao import dao_get_organisation_by_service_id from app.dao.service_data_retention_dao import ( @@ -579,10 +580,16 @@ def resume_service(service_id): @service_blueprint.route('//notifications/templates_usage/monthly', methods=['GET']) def get_monthly_template_usage(service_id): try: - data = dao_fetch_monthly_historical_usage_by_template_for_service( - service_id, - int(request.args.get('year', 'NaN')) + start_date, end_date = get_financial_year(int(request.args.get('year', 'NaN'))) + data = fetch_monthly_template_usage_for_service( + start_date=start_date, + end_date=end_date, + service_id=service_id ) + # data = dao_fetch_monthly_historical_usage_by_template_for_service( + # service_id, + # int(request.args.get('year', 'NaN')) + # ) stats = list() for i in data: diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 64b6b5bc3..0e541f7a9 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -11,7 +11,8 @@ from app.dao.fact_notification_status_dao import ( fetch_notification_status_for_service_for_today_and_7_previous_days, fetch_notification_status_totals_for_all_services, fetch_notification_statuses_for_job, - fetch_stats_for_all_services_by_date_range) + fetch_stats_for_all_services_by_date_range, fetch_monthly_template_usage_for_service +) from app.models import FactNotificationStatus, KEY_TYPE_TEST, KEY_TYPE_TEAM, EMAIL_TYPE, SMS_TYPE, LETTER_TYPE from freezegun import freeze_time from tests.app.db import create_notification, create_service, create_template, create_ft_notification_status, create_job @@ -338,3 +339,29 @@ def test_fetch_stats_for_all_services_by_date_range(notify_db_session): assert not results[4].notification_type assert not results[4].status assert not results[4].count + + +def test_fetch_monthly_template_usage_for_service(sample_service): + template_one = create_template(service=sample_service, template_type='sms', template_name='one') + template_two = create_template(service=sample_service, template_type='email', template_name='one') + template_three = create_template(service=sample_service, template_type='letter', template_name='one') + + create_ft_notification_status(bst_date=date(2018, 1, 1), + service=sample_service, + template=template_one, + count=2) + create_ft_notification_status(bst_date=date(2018, 2, 1), + service=sample_service, + template=template_two, + count=3) + create_ft_notification_status(bst_date=date(2018, 3, 1), + service=sample_service, + template=template_three, + count=5) + + results = fetch_monthly_template_usage_for_service( + datetime(2017, 4, 1), datetime(2018, 3, 31), sample_service.id + ) + + print(results) + assert len(results) == 3 diff --git a/tests/app/service/test_statistics_rest.py b/tests/app/service/test_statistics_rest.py index 5b9719637..edf9e5b9a 100644 --- a/tests/app/service/test_statistics_rest.py +++ b/tests/app/service/test_statistics_rest.py @@ -28,13 +28,7 @@ def test_get_template_usage_by_month_returns_correct_data( admin_request, sample_template ): - create_notification(sample_template, created_at=datetime(2016, 4, 1), status='created') - create_notification(sample_template, created_at=datetime(2017, 4, 1), status='sending') - create_notification(sample_template, created_at=datetime(2017, 4, 1), status='permanent-failure') - create_notification(sample_template, created_at=datetime(2017, 4, 1), status='temporary-failure') - - daily_stats_template_usage_by_month() - + create_ft_notification_status(bst_date=date(2017, 4, 2), template=sample_template, count=3) create_notification(sample_template, created_at=datetime.utcnow()) resp_json = admin_request.get( @@ -85,14 +79,8 @@ def test_get_template_usage_by_month_returns_two_templates(admin_request, sample template_name=PRECOMPILED_TEMPLATE_NAME, hidden=True ) - - create_notification(template_one, created_at=datetime(2017, 4, 1), status='created') - create_notification(sample_template, created_at=datetime(2017, 4, 1), status='sending') - create_notification(sample_template, created_at=datetime(2017, 4, 1), status='permanent-failure') - create_notification(sample_template, created_at=datetime(2017, 4, 1), status='temporary-failure') - - daily_stats_template_usage_by_month() - + create_ft_notification_status(bst_date=datetime(2017, 4, 1), template=template_one, count=1) + create_ft_notification_status(bst_date=datetime(2017, 4, 1), template=sample_template, count=3) create_notification(sample_template, created_at=datetime.utcnow()) resp_json = admin_request.get( From 685bff40d1dc354b2a642f290929559cfd4f751d Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 10 Jan 2019 17:31:32 +0000 Subject: [PATCH 090/118] Stop validate function from being too complex by moving subfunctions out of it --- app/schema_validation/__init__.py | 88 +++++++++++++++++-------------- 1 file changed, 47 insertions(+), 41 deletions(-) diff --git a/app/schema_validation/__init__.py b/app/schema_validation/__init__.py index dfa03446f..98e67a50a 100644 --- a/app/schema_validation/__init__.py +++ b/app/schema_validation/__init__.py @@ -8,48 +8,54 @@ from notifications_utils.recipients import (validate_phone_number, validate_emai InvalidEmailError) +format_checker = FormatChecker() + + +@format_checker.checks("validate_uuid", raises=Exception) +def validate_uuid(instance): + if isinstance(instance, str): + UUID(instance) + return True + + +@format_checker.checks('phone_number', raises=InvalidPhoneError) +def validate_schema_phone_number(instance): + if isinstance(instance, str): + validate_phone_number(instance, international=True) + return True + + +@format_checker.checks('email_address', raises=InvalidEmailError) +def validate_schema_email_address(instance): + if isinstance(instance, str): + validate_email_address(instance) + return True + + +@format_checker.checks('postage', raises=ValidationError) +def validate_schema_postage(instance): + if isinstance(instance, str): + if instance not in ["first", "second"]: + raise ValidationError("invalid. It must be either first or second.") + return True + + +@format_checker.checks('datetime_within_next_day', raises=ValidationError) +def validate_schema_date_with_hour(instance): + if isinstance(instance, str): + try: + dt = iso8601.parse_date(instance).replace(tzinfo=None) + if dt < datetime.utcnow(): + raise ValidationError("datetime can not be in the past") + if dt > datetime.utcnow() + timedelta(hours=24): + raise ValidationError("datetime can only be 24 hours in the future") + except ParseError: + raise ValidationError("datetime format is invalid. It must be a valid ISO8601 date time format, " + "https://en.wikipedia.org/wiki/ISO_8601") + return True + + def validate(json_to_validate, schema): - format_checker = FormatChecker() - - @format_checker.checks("validate_uuid", raises=Exception) - def validate_uuid(instance): - if isinstance(instance, str): - UUID(instance) - return True - - @format_checker.checks('phone_number', raises=InvalidPhoneError) - def validate_schema_phone_number(instance): - if isinstance(instance, str): - validate_phone_number(instance, international=True) - return True - - @format_checker.checks('email_address', raises=InvalidEmailError) - def validate_schema_email_address(instance): - if isinstance(instance, str): - validate_email_address(instance) - return True - - @format_checker.checks('postage', raises=ValidationError) - def validate_schema_postage(instance): - if isinstance(instance, str): - if instance not in ["first", "second"]: - raise ValidationError("invalid. It must be either first or second.") - return True - - @format_checker.checks('datetime_within_next_day', raises=ValidationError) - def validate_schema_date_with_hour(instance): - if isinstance(instance, str): - try: - dt = iso8601.parse_date(instance).replace(tzinfo=None) - if dt < datetime.utcnow(): - raise ValidationError("datetime can not be in the past") - if dt > datetime.utcnow() + timedelta(hours=24): - raise ValidationError("datetime can only be 24 hours in the future") - except ParseError: - raise ValidationError("datetime format is invalid. It must be a valid ISO8601 date time format, " - "https://en.wikipedia.org/wiki/ISO_8601") - return True - validator = Draft7Validator(schema, format_checker=format_checker) errors = list(validator.iter_errors(json_to_validate)) if errors.__len__() > 0: From bd5126481c193b7d5c5c5aad5a741175420b232a Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Thu, 10 Jan 2019 16:54:06 +0000 Subject: [PATCH 091/118] Remove cancelled from requested statuses in service statistics --- app/service/statistics.py | 3 ++- tests/app/service/test_statistics.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/app/service/statistics.py b/app/service/statistics.py index 37de941e5..5c6accaa5 100644 --- a/app/service/statistics.py +++ b/app/service/statistics.py @@ -84,7 +84,8 @@ def create_zeroed_stats_dicts(): def _update_statuses_from_row(update_dict, row): - update_dict['requested'] += row.count + if row.status != 'cancelled': + update_dict['requested'] += row.count if row.status in ('delivered', 'sent'): update_dict['delivered'] += row.count elif row.status in ( diff --git a/tests/app/service/test_statistics.py b/tests/app/service/test_statistics.py index 0e55d67dd..07a5d5c09 100644 --- a/tests/app/service/test_statistics.py +++ b/tests/app/service/test_statistics.py @@ -39,7 +39,7 @@ NewStatsRow = collections.namedtuple('row', ('notification_type', 'status', 'key StatsRow('letter', 'virus-scan-failed', 1), StatsRow('letter', 'permanent-failure', 1), StatsRow('letter', 'cancelled', 1), - ], [4, 0, 4], [0, 0, 0], [4, 0, 3]), + ], [4, 0, 4], [0, 0, 0], [3, 0, 3]), 'convert_sent_to_delivered': ([ StatsRow('sms', 'sending', 1), StatsRow('sms', 'delivered', 1), From 3559063b9110aa04b773dfc56db05297e4ce472f Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Fri, 11 Jan 2019 14:44:23 +0000 Subject: [PATCH 092/118] Add letter logo for North Somerset council --- .../versions/0249_another_letter_org.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 migrations/versions/0249_another_letter_org.py diff --git a/migrations/versions/0249_another_letter_org.py b/migrations/versions/0249_another_letter_org.py new file mode 100644 index 000000000..e4423ede8 --- /dev/null +++ b/migrations/versions/0249_another_letter_org.py @@ -0,0 +1,35 @@ +"""empty message + +Revision ID: 0249_another_letter_org +Revises: 0248_enable_choose_postage + +""" + +# revision identifiers, used by Alembic. +revision = '0249_another_letter_org' +down_revision = '0248_enable_choose_postage' + +from alembic import op + + +NEW_ORGANISATIONS = [ + ('521', 'North Somerset Council', 'north-somerset'), +] + + +def upgrade(): + for numeric_id, name, filename in NEW_ORGANISATIONS: + op.execute(""" + INSERT + INTO dvla_organisation + VALUES ('{}', '{}', '{}') + """.format(numeric_id, name, filename)) + + +def downgrade(): + for numeric_id, _, _ in NEW_ORGANISATIONS: + op.execute(""" + DELETE + FROM dvla_organisation + WHERE id = '{}' + """.format(numeric_id)) From a9b755b08cc11da64905a4666b43d821783f6d9f Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Fri, 11 Jan 2019 09:23:05 +0000 Subject: [PATCH 093/118] Move letters which can't be opened to invalid PDF bucket If a precompiled letter can't be opened (e.g. because it isn't a valid PDF) we were setting its billable units to 0, but not moving it to the invalid PDF bucket. If a precompiled letter failed sanitisation, we were moving it to the invalid PDF bucket but not setting its billable units to 0. This commit makes sure that we always set the billable units to 0 and move the PDF to the right bucket if it fails sanitisation or can't be opened. --- app/celery/letters_pdf_tasks.py | 29 ++++++++------ tests/app/celery/test_letters_pdf_tasks.py | 45 ++++++++++++++++++---- 2 files changed, 55 insertions(+), 19 deletions(-) diff --git a/app/celery/letters_pdf_tasks.py b/app/celery/letters_pdf_tasks.py index 485f258bf..a57be4044 100644 --- a/app/celery/letters_pdf_tasks.py +++ b/app/celery/letters_pdf_tasks.py @@ -188,7 +188,12 @@ def process_virus_scan_passed(self, filename): scan_pdf_object = s3.get_s3_object(current_app.config['LETTERS_SCAN_BUCKET_NAME'], filename) old_pdf = scan_pdf_object.get()['Body'].read() - billable_units = _get_page_count(notification, old_pdf) + try: + billable_units = _get_page_count(notification, old_pdf) + except PdfReadError: + _move_invalid_letter_and_update_status(notification.reference, filename, scan_pdf_object) + return + new_pdf = _sanitise_precompiled_pdf(self, notification, old_pdf) # TODO: Remove this once CYSP update their template to not cross over the margins @@ -198,12 +203,7 @@ def process_virus_scan_passed(self, filename): if not new_pdf: current_app.logger.info('Invalid precompiled pdf received {} ({})'.format(notification.id, filename)) - - notification.status = NOTIFICATION_VALIDATION_FAILED - dao_update_notification(notification) - - move_scan_to_invalid_pdf_bucket(filename) - scan_pdf_object.delete() + _move_invalid_letter_and_update_status(notification.reference, filename, scan_pdf_object) return else: current_app.logger.info( @@ -233,14 +233,19 @@ def _get_page_count(notification, old_pdf): return billable_units except PdfReadError as e: current_app.logger.exception(msg='Invalid PDF received for notification_id: {}'.format(notification.id)) - update_letter_pdf_status( - reference=notification.reference, - status=NOTIFICATION_VALIDATION_FAILED, - billable_units=0 - ) raise e +def _move_invalid_letter_and_update_status(notification_reference, filename, scan_pdf_object): + move_scan_to_invalid_pdf_bucket(filename) + scan_pdf_object.delete() + + update_letter_pdf_status( + reference=notification_reference, + status=NOTIFICATION_VALIDATION_FAILED, + billable_units=0) + + def _upload_pdf_to_test_or_live_pdf_bucket(pdf_data, filename, is_test_letter): target_bucket_config = 'TEST_LETTERS_BUCKET_NAME' if is_test_letter else 'LETTERS_PDF_BUCKET_NAME' target_bucket_name = current_app.config[target_bucket_config] diff --git a/tests/app/celery/test_letters_pdf_tasks.py b/tests/app/celery/test_letters_pdf_tasks.py index 13815ce05..ce7c9cd79 100644 --- a/tests/app/celery/test_letters_pdf_tasks.py +++ b/tests/app/celery/test_letters_pdf_tasks.py @@ -23,7 +23,6 @@ from app.celery.letters_pdf_tasks import ( process_virus_scan_failed, process_virus_scan_error, replay_letters_in_error, - _get_page_count, _sanitise_precompiled_pdf ) from app.letters.utils import get_letter_pdf_filename, ScanErrorType @@ -417,6 +416,7 @@ def test_process_letter_task_check_virus_scan_passed_when_sanitise_fails( process_virus_scan_passed(filename) assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED + assert sample_letter_notification.billable_units == 0 mock_sanitise.assert_called_once_with( ANY, sample_letter_notification, @@ -432,13 +432,44 @@ def test_process_letter_task_check_virus_scan_passed_when_sanitise_fails( ) -def test_get_page_count_set_notification_to_permanent_failure_when_not_pdf( - sample_letter_notification +@freeze_time('2018-01-01 18:00') +@mock_s3 +@pytest.mark.parametrize('key_type,is_test_letter', [ + (KEY_TYPE_NORMAL, False), (KEY_TYPE_TEST, True) +]) +def test_process_letter_task_check_virus_scan_passed_when_file_cannot_be_opened( + sample_letter_notification, mocker, key_type, is_test_letter ): - with pytest.raises(expected_exception=PdfReadError): - _get_page_count(sample_letter_notification, b'pdf_content') - updated_notification = Notification.query.filter_by(id=sample_letter_notification.id).first() - assert updated_notification.status == NOTIFICATION_VALIDATION_FAILED + filename = 'NOTIFY.{}'.format(sample_letter_notification.reference) + source_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME'] + target_bucket_name = current_app.config['INVALID_PDF_BUCKET_NAME'] + + conn = boto3.resource('s3', region_name='eu-west-1') + conn.create_bucket(Bucket=source_bucket_name) + conn.create_bucket(Bucket=target_bucket_name) + + s3 = boto3.client('s3', region_name='eu-west-1') + s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b'pdf_content') + + sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK + sample_letter_notification.key_type = key_type + mock_move_s3 = mocker.patch('app.letters.utils._move_s3_object') + + mock_get_page_count = mocker.patch('app.celery.letters_pdf_tasks._get_page_count', side_effect=PdfReadError) + mock_sanitise = mocker.patch('app.celery.letters_pdf_tasks._sanitise_precompiled_pdf') + + process_virus_scan_passed(filename) + + mock_sanitise.assert_not_called() + mock_get_page_count.assert_called_once_with( + sample_letter_notification, b'pdf_content' + ) + mock_move_s3.assert_called_once_with( + source_bucket_name, filename, + target_bucket_name, filename + ) + assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED + assert sample_letter_notification.billable_units == 0 def test_process_letter_task_check_virus_scan_failed(sample_letter_notification, mocker): From c3c9d1eac987cd6b443641a7d0ec9fba148c005f Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Fri, 11 Jan 2019 17:09:42 +0000 Subject: [PATCH 094/118] Add unit tests. Fix data types in result set. --- app/dao/fact_notification_status_dao.py | 4 +-- app/service/rest.py | 1 - .../dao/test_fact_notification_status_dao.py | 36 +++++++++++++++---- 3 files changed, 32 insertions(+), 9 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 7231dc7d8..f5466bbec 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -351,8 +351,8 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): all_stats_table.c.name, all_stats_table.c.is_precompiled_letter, all_stats_table.c.template_type, - all_stats_table.c.month, - all_stats_table.c.year, + func.cast(all_stats_table.c.month, Integer).label('month'), + func.cast(all_stats_table.c.year, Integer).label('year'), func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), ).group_by( all_stats_table.c.template_id, diff --git a/app/service/rest.py b/app/service/rest.py index e00c99aa7..4aa0447b0 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -49,7 +49,6 @@ from app.dao.services_dao import ( dao_create_service, dao_fetch_all_services, dao_fetch_all_services_by_user, - dao_fetch_monthly_historical_usage_by_template_for_service, dao_fetch_service_by_id, dao_fetch_todays_stats_for_service, dao_fetch_todays_stats_for_all_services, diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 0e541f7a9..9b1f92f63 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -341,10 +341,11 @@ def test_fetch_stats_for_all_services_by_date_range(notify_db_session): assert not results[4].count +@freeze_time('2018-01-04 14:00') def test_fetch_monthly_template_usage_for_service(sample_service): - template_one = create_template(service=sample_service, template_type='sms', template_name='one') - template_two = create_template(service=sample_service, template_type='email', template_name='one') - template_three = create_template(service=sample_service, template_type='letter', template_name='one') + template_one = create_template(service=sample_service, template_type='sms', template_name='1_one') + template_two = create_template(service=sample_service, template_type='email', template_name='2_two') + template_three = create_template(service=sample_service, template_type='letter', template_name='3_three') create_ft_notification_status(bst_date=date(2018, 1, 1), service=sample_service, @@ -353,15 +354,38 @@ def test_fetch_monthly_template_usage_for_service(sample_service): create_ft_notification_status(bst_date=date(2018, 2, 1), service=sample_service, template=template_two, - count=3) + count=4) create_ft_notification_status(bst_date=date(2018, 3, 1), service=sample_service, template=template_three, count=5) - + create_notification(template=template_one) results = fetch_monthly_template_usage_for_service( datetime(2017, 4, 1), datetime(2018, 3, 31), sample_service.id ) - print(results) assert len(results) == 3 + + assert results[0].template_id == template_one.id + assert results[0].name == template_one.name + assert results[0].is_precompiled_letter is False + assert results[0].template_type == template_one.template_type + assert results[0].month == 1 + assert results[0].year == 2018 + assert results[0].count == 3 + + assert results[1].template_id == template_two.id + assert results[1].name == template_two.name + assert results[1].is_precompiled_letter is False + assert results[1].template_type == template_two.template_type + assert results[1].month == 2 + assert results[1].year == 2018 + assert results[1].count == 4 + + assert results[2].template_id == template_three.id + assert results[2].name == template_three.name + assert results[2].is_precompiled_letter is False + assert results[2].template_type == template_three.template_type + assert results[2].month == 3 + assert results[2].year == 2018 + assert results[2].count == 5 From 1d955e5f2d7c7e55c595a0623a5506d4c432af1a Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Fri, 11 Jan 2019 21:23:48 +0000 Subject: [PATCH 095/118] Update pytest-xdist from 1.24.1 to 1.26.0 --- requirements_for_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_for_test.txt b/requirements_for_test.txt index 8e750c8e0..32931cbff 100644 --- a/requirements_for_test.txt +++ b/requirements_for_test.txt @@ -5,7 +5,7 @@ moto==1.3.7 pytest-env==0.6.2 pytest-mock==1.10.0 pytest-cov==2.6.0 -pytest-xdist==1.24.1 +pytest-xdist==1.26.0 coveralls==1.5.1 freezegun==0.3.11 requests-mock==1.5.2 From b5a3ef9576e160c58c8715a738b397d7e0ffbb9f Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 14 Jan 2019 15:28:26 +0000 Subject: [PATCH 096/118] Added order by. Added more unit tests. Remove comments. --- app/dao/fact_notification_status_dao.py | 14 ++- app/service/rest.py | 5 - .../dao/test_fact_notification_status_dao.py | 116 ++++++++++++++---- 3 files changed, 105 insertions(+), 30 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index f5466bbec..6bb341409 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -8,7 +8,10 @@ from sqlalchemy.sql.expression import literal, extract from sqlalchemy.types import DateTime, Integer from app import db -from app.models import Notification, NotificationHistory, FactNotificationStatus, KEY_TYPE_TEST, Service, Template +from app.models import ( + Notification, NotificationHistory, FactNotificationStatus, KEY_TYPE_TEST, Service, Template, + NOTIFICATION_CANCELLED +) from app.utils import get_london_midnight_in_utc, midnight_n_days_ago, get_london_month_from_utc_column @@ -309,6 +312,7 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): FactNotificationStatus.service_id == service_id, FactNotificationStatus.bst_date >= start_date, FactNotificationStatus.bst_date <= end_date, + FactNotificationStatus.notification_status != NOTIFICATION_CANCELLED ).group_by( FactNotificationStatus.template_id, Template.name, @@ -316,6 +320,10 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): Template.is_precompiled_letter, extract('month', FactNotificationStatus.bst_date).label('month'), extract('year', FactNotificationStatus.bst_date).label('year'), + ).order_by( + extract('year', FactNotificationStatus.bst_date), + extract('month', FactNotificationStatus.bst_date), + Template.name ) if start_date <= datetime.utcnow() <= end_date: @@ -335,8 +343,8 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): ).filter( Notification.created_at >= today, Notification.service_id == service_id, - # we don't want to include test keys - Notification.key_type != KEY_TYPE_TEST + Notification.key_type != KEY_TYPE_TEST, + Notification.status != NOTIFICATION_CANCELLED ).group_by( Notification.template_id, Template.hidden, diff --git a/app/service/rest.py b/app/service/rest.py index 4aa0447b0..279d00ed5 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -585,11 +585,6 @@ def get_monthly_template_usage(service_id): end_date=end_date, service_id=service_id ) - # data = dao_fetch_monthly_historical_usage_by_template_for_service( - # service_id, - # int(request.args.get('year', 'NaN')) - # ) - stats = list() for i in data: stats.append( diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 9b1f92f63..a7adffce0 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -341,51 +341,123 @@ def test_fetch_stats_for_all_services_by_date_range(notify_db_session): assert not results[4].count -@freeze_time('2018-01-04 14:00') +@freeze_time('2018-03-30 14:00') def test_fetch_monthly_template_usage_for_service(sample_service): - template_one = create_template(service=sample_service, template_type='sms', template_name='1_one') - template_two = create_template(service=sample_service, template_type='email', template_name='2_two') - template_three = create_template(service=sample_service, template_type='letter', template_name='3_three') + template_one = create_template(service=sample_service, template_type='sms', template_name='a') + template_two = create_template(service=sample_service, template_type='email', template_name='b') + template_three = create_template(service=sample_service, template_type='letter', template_name='c') + + create_ft_notification_status(bst_date=date(2017, 12, 10), + service=sample_service, + template=template_two, + count=3) + create_ft_notification_status(bst_date=date(2017, 12, 10), + service=sample_service, + template=template_one, + count=6) create_ft_notification_status(bst_date=date(2018, 1, 1), service=sample_service, template=template_one, - count=2) - create_ft_notification_status(bst_date=date(2018, 2, 1), - service=sample_service, - template=template_two, count=4) + create_ft_notification_status(bst_date=date(2018, 3, 1), service=sample_service, template=template_three, count=5) - create_notification(template=template_one) + create_notification(template=template_three, created_at=datetime.utcnow() - timedelta(days=1)) + create_notification(template=template_three, created_at=datetime.utcnow()) results = fetch_monthly_template_usage_for_service( datetime(2017, 4, 1), datetime(2018, 3, 31), sample_service.id ) - assert len(results) == 3 + assert len(results) == 4 assert results[0].template_id == template_one.id assert results[0].name == template_one.name assert results[0].is_precompiled_letter is False assert results[0].template_type == template_one.template_type - assert results[0].month == 1 - assert results[0].year == 2018 - assert results[0].count == 3 - + assert results[0].month == 12 + assert results[0].year == 2017 + assert results[0].count == 6 assert results[1].template_id == template_two.id assert results[1].name == template_two.name assert results[1].is_precompiled_letter is False assert results[1].template_type == template_two.template_type + assert results[1].month == 12 + assert results[1].year == 2017 + assert results[1].count == 3 + + assert results[2].template_id == template_one.id + assert results[2].name == template_one.name + assert results[2].is_precompiled_letter is False + assert results[2].template_type == template_one.template_type + assert results[2].month == 1 + assert results[2].year == 2018 + assert results[2].count == 4 + + assert results[3].template_id == template_three.id + assert results[3].name == template_three.name + assert results[3].is_precompiled_letter is False + assert results[3].template_type == template_three.template_type + assert results[3].month == 3 + assert results[3].year == 2018 + assert results[3].count == 6 + + +@freeze_time('2018-03-30 14:00') +def test_fetch_monthly_template_usage_for_service_does_join_to_notifications_if_today_is_not_in_date_range( + sample_service +): + template_one = create_template(service=sample_service, template_type='sms', template_name='a') + template_two = create_template(service=sample_service, template_type='email', template_name='b') + create_ft_notification_status(bst_date=date(2018, 2, 1), + service=template_two.service, + template=template_two, + count=15) + create_ft_notification_status(bst_date=date(2018, 2, 2), + service=template_one.service, + template=template_one, + count=20) + create_ft_notification_status(bst_date=date(2018, 3, 1), + service=template_one.service, + template=template_one, + count=3) + create_notification(template=template_one, created_at=datetime.utcnow()) + results = fetch_monthly_template_usage_for_service( + datetime(2018, 1, 1), datetime(2018, 2, 20), template_one.service_id + ) + + assert len(results) == 2 + + assert results[0].template_id == template_one.id + assert results[0].name == template_one.name + assert results[0].is_precompiled_letter == template_one.is_precompiled_letter + assert results[0].template_type == template_one.template_type + assert results[0].month == 2 + assert results[0].year == 2018 + assert results[0].count == 20 + assert results[1].template_id == template_two.id + assert results[1].name == template_two.name + assert results[1].is_precompiled_letter == template_two.is_precompiled_letter + assert results[1].template_type == template_two.template_type assert results[1].month == 2 assert results[1].year == 2018 - assert results[1].count == 4 + assert results[1].count == 15 - assert results[2].template_id == template_three.id - assert results[2].name == template_three.name - assert results[2].is_precompiled_letter is False - assert results[2].template_type == template_three.template_type - assert results[2].month == 3 - assert results[2].year == 2018 - assert results[2].count == 5 + +@freeze_time('2018-03-30 14:00') +def test_fetch_monthly_template_usage_for_service_does_not_include_cancelled_status( + sample_template +): + create_ft_notification_status(bst_date=date(2018, 3, 1), + service=sample_template.service, + template=sample_template, + notification_status='cancelled', + count=15) + create_notification(template=sample_template, created_at=datetime.utcnow(), status='cancelled') + results = fetch_monthly_template_usage_for_service( + datetime(2018, 1, 1), datetime(2018, 3, 31), sample_template.service_id + ) + + assert len(results) == 0 From efad58edd8f9099f9e731830f3f5dddbedd6f3e8 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Mon, 14 Jan 2019 16:30:36 +0000 Subject: [PATCH 097/118] There is no need to have a separate table to store template monthly statistics. It's easy enough to aggregate the stats from ft_notification_status. This removes the nightly task, and all the dao methods. The next PR will remove the table. --- app/celery/scheduled_tasks.py | 19 - app/config.py | 5 - app/dao/services_dao.py | 74 --- app/dao/stats_template_usage_by_month_dao.py | 60 --- tests/app/celery/test_scheduled_tasks.py | 150 ------ tests/app/dao/test_services_dao.py | 436 +----------------- .../test_stats_template_usage_by_month_dao.py | 155 ------- tests/app/service/test_statistics_rest.py | 17 - 8 files changed, 1 insertion(+), 915 deletions(-) delete mode 100644 app/dao/stats_template_usage_by_month_dao.py delete mode 100644 tests/app/dao/test_stats_template_usage_by_month_dao.py diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index cde969034..818206e76 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -40,10 +40,6 @@ from app.dao.provider_details_dao import ( dao_toggle_sms_provider ) from app.dao.service_callback_api_dao import get_service_delivery_status_callback_api_for_service -from app.dao.services_dao import ( - dao_fetch_monthly_historical_stats_by_template -) -from app.dao.stats_template_usage_by_month_dao import insert_or_update_stats_for_template from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago from app.exceptions import NotificationTechnicalFailureException from app.models import ( @@ -405,21 +401,6 @@ def check_job_status(): raise JobIncompleteError("Job(s) {} have not completed.".format(job_ids)) -@notify_celery.task(name='daily-stats-template-usage-by-month') -@statsd(namespace="tasks") -def daily_stats_template_usage_by_month(): - results = dao_fetch_monthly_historical_stats_by_template() - - for result in results: - if result.template_id: - insert_or_update_stats_for_template( - result.template_id, - result.month, - result.year, - result.count - ) - - @notify_celery.task(name='raise-alert-if-no-letter-ack-file') @statsd(namespace="tasks") def letter_raise_alert_if_no_ack_file_for_zip(): diff --git a/app/config.py b/app/config.py index a0569534e..f69e30869 100644 --- a/app/config.py +++ b/app/config.py @@ -195,11 +195,6 @@ class Config(object): 'schedule': crontab(hour=0, minute=5), 'options': {'queue': QueueNames.PERIODIC} }, - 'daily-stats-template-usage-by-month': { - 'task': 'daily-stats-template-usage-by-month', - 'schedule': crontab(hour=0, minute=10), - 'options': {'queue': QueueNames.PERIODIC} - }, 'create-nightly-billing': { 'task': 'create-nightly-billing', 'schedule': crontab(hour=0, minute=15), diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index fb5cba297..342ba7dfb 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -11,9 +11,7 @@ from app.dao.dao_utils import ( transactional, version_class ) -from app.dao.date_util import get_financial_year from app.dao.service_sms_sender_dao import insert_service_sms_sender -from app.dao.stats_template_usage_by_month_dao import dao_get_template_usage_stats_by_service from app.models import ( AnnualBilling, ApiKey, @@ -389,75 +387,3 @@ def dao_fetch_monthly_historical_stats_by_template(): year, month ).all() - - -@statsd(namespace="dao") -def dao_fetch_monthly_historical_usage_by_template_for_service(service_id, year): - - results = dao_get_template_usage_stats_by_service(service_id, year) - - stats = [] - for result in results: - stat = type("", (), {})() - stat.template_id = result.template_id - stat.template_type = result.template_type - stat.name = str(result.name) - stat.month = result.month - stat.year = result.year - stat.count = result.count - stat.is_precompiled_letter = result.is_precompiled_letter - stats.append(stat) - - month = get_london_month_from_utc_column(Notification.created_at) - year_func = func.date_trunc("year", Notification.created_at) - start_date = datetime.combine(date.today(), time.min) - - fy_start, fy_end = get_financial_year(year) - - if fy_start < datetime.now() < fy_end: - today_results = db.session.query( - Notification.template_id, - Template.is_precompiled_letter, - Template.name, - Template.template_type, - extract('month', month).label('month'), - extract('year', year_func).label('year'), - func.count().label('count') - ).join( - Template, Notification.template_id == Template.id, - ).filter( - Notification.created_at >= start_date, - Notification.service_id == service_id, - # we don't want to include test keys - Notification.key_type != KEY_TYPE_TEST - ).group_by( - Notification.template_id, - Template.hidden, - Template.name, - Template.template_type, - month, - year_func - ).order_by( - Notification.template_id - ).all() - - for today_result in today_results: - add_to_stats = True - for stat in stats: - if today_result.template_id == stat.template_id and today_result.month == stat.month \ - and today_result.year == stat.year: - stat.count = stat.count + today_result.count - add_to_stats = False - - if add_to_stats: - new_stat = type("StatsTemplateUsageByMonth", (), {})() - new_stat.template_id = today_result.template_id - new_stat.template_type = today_result.template_type - new_stat.name = today_result.name - new_stat.month = int(today_result.month) - new_stat.year = int(today_result.year) - new_stat.count = today_result.count - new_stat.is_precompiled_letter = today_result.is_precompiled_letter - stats.append(new_stat) - - return stats diff --git a/app/dao/stats_template_usage_by_month_dao.py b/app/dao/stats_template_usage_by_month_dao.py deleted file mode 100644 index 541ab7193..000000000 --- a/app/dao/stats_template_usage_by_month_dao.py +++ /dev/null @@ -1,60 +0,0 @@ -from notifications_utils.statsd_decorators import statsd -from sqlalchemy import or_, and_, desc - -from app import db -from app.dao.dao_utils import transactional -from app.models import StatsTemplateUsageByMonth, Template - - -@transactional -@statsd(namespace="dao") -def insert_or_update_stats_for_template(template_id, month, year, count): - result = db.session.query( - StatsTemplateUsageByMonth - ).filter( - StatsTemplateUsageByMonth.template_id == template_id, - StatsTemplateUsageByMonth.month == month, - StatsTemplateUsageByMonth.year == year - ).update( - { - 'count': count - } - ) - if result == 0: - monthly_stats = StatsTemplateUsageByMonth( - template_id=template_id, - month=month, - year=year, - count=count - ) - - db.session.add(monthly_stats) - - -@statsd(namespace="dao") -def dao_get_template_usage_stats_by_service(service_id, year): - return db.session.query( - StatsTemplateUsageByMonth.template_id, - Template.name, - Template.template_type, - Template.is_precompiled_letter, - StatsTemplateUsageByMonth.month, - StatsTemplateUsageByMonth.year, - StatsTemplateUsageByMonth.count - ).join( - Template, StatsTemplateUsageByMonth.template_id == Template.id - ).filter( - Template.service_id == service_id - ).filter( - or_( - and_( - StatsTemplateUsageByMonth.month.in_([4, 5, 6, 7, 8, 9, 10, 11, 12]), - StatsTemplateUsageByMonth.year == year - ), and_( - StatsTemplateUsageByMonth.month.in_([1, 2, 3]), - StatsTemplateUsageByMonth.year == year + 1 - ) - ) - ).order_by( - desc(StatsTemplateUsageByMonth.month) - ).all() diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index b2fee242a..e97b4643c 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -1,4 +1,3 @@ -import functools from datetime import datetime, timedelta from functools import partial from unittest.mock import call, patch, PropertyMock @@ -31,7 +30,6 @@ from app.celery.scheduled_tasks import ( send_total_sent_notifications_to_performance_platform, switch_current_sms_provider_on_slow_delivery, timeout_notifications, - daily_stats_template_usage_by_month, letter_raise_alert_if_no_ack_file_for_zip, replay_created_notifications ) @@ -46,8 +44,6 @@ from app.dao.provider_details_dao import ( ) from app.exceptions import NotificationTechnicalFailureException from app.models import ( - NotificationHistory, - StatsTemplateUsageByMonth, JOB_STATUS_IN_PROGRESS, JOB_STATUS_ERROR, LETTER_TYPE, @@ -69,7 +65,6 @@ from tests.app.db import ( from tests.app.conftest import ( sample_job as create_sample_job, sample_notification_history as create_notification_history, - sample_template as create_sample_template, datetime_in_past ) @@ -806,151 +801,6 @@ def test_check_job_status_task_sets_jobs_to_error(mocker, sample_template): assert job_2.job_status == JOB_STATUS_IN_PROGRESS -def test_daily_stats_template_usage_by_month(notify_db, notify_db_session): - notification_history = functools.partial( - create_notification_history, - notify_db, - notify_db_session, - status='delivered' - ) - - template_one = create_sample_template(notify_db, notify_db_session) - template_two = create_sample_template(notify_db, notify_db_session) - - notification_history(created_at=datetime(2017, 10, 1), sample_template=template_one) - notification_history(created_at=datetime(2016, 4, 1), sample_template=template_two) - notification_history(created_at=datetime(2016, 4, 1), sample_template=template_two) - notification_history(created_at=datetime.now(), sample_template=template_two) - - daily_stats_template_usage_by_month() - - result = db.session.query( - StatsTemplateUsageByMonth - ).order_by( - StatsTemplateUsageByMonth.year, - StatsTemplateUsageByMonth.month - ).all() - - assert len(result) == 2 - - assert result[0].template_id == template_two.id - assert result[0].month == 4 - assert result[0].year == 2016 - assert result[0].count == 2 - - assert result[1].template_id == template_one.id - assert result[1].month == 10 - assert result[1].year == 2017 - assert result[1].count == 1 - - -def test_daily_stats_template_usage_by_month_no_data(): - daily_stats_template_usage_by_month() - - results = db.session.query(StatsTemplateUsageByMonth).all() - - assert len(results) == 0 - - -def test_daily_stats_template_usage_by_month_multiple_runs(notify_db, notify_db_session): - notification_history = functools.partial( - create_notification_history, - notify_db, - notify_db_session, - status='delivered' - ) - - template_one = create_sample_template(notify_db, notify_db_session) - template_two = create_sample_template(notify_db, notify_db_session) - - notification_history(created_at=datetime(2017, 11, 1), sample_template=template_one) - notification_history(created_at=datetime(2016, 4, 1), sample_template=template_two) - notification_history(created_at=datetime(2016, 4, 1), sample_template=template_two) - notification_history(created_at=datetime.now(), sample_template=template_two) - - daily_stats_template_usage_by_month() - - template_three = create_sample_template(notify_db, notify_db_session) - - notification_history(created_at=datetime(2017, 10, 1), sample_template=template_three) - notification_history(created_at=datetime(2017, 9, 1), sample_template=template_three) - notification_history(created_at=datetime(2016, 4, 1), sample_template=template_two) - notification_history(created_at=datetime(2016, 4, 1), sample_template=template_two) - notification_history(created_at=datetime.now(), sample_template=template_two) - - daily_stats_template_usage_by_month() - - result = db.session.query( - StatsTemplateUsageByMonth - ).order_by( - StatsTemplateUsageByMonth.year, - StatsTemplateUsageByMonth.month - ).all() - - assert len(result) == 4 - - assert result[0].template_id == template_two.id - assert result[0].month == 4 - assert result[0].year == 2016 - assert result[0].count == 4 - - assert result[1].template_id == template_three.id - assert result[1].month == 9 - assert result[1].year == 2017 - assert result[1].count == 1 - - assert result[2].template_id == template_three.id - assert result[2].month == 10 - assert result[2].year == 2017 - assert result[2].count == 1 - - assert result[3].template_id == template_one.id - assert result[3].month == 11 - assert result[3].year == 2017 - assert result[3].count == 1 - - -def test_dao_fetch_monthly_historical_stats_by_template_null_template_id_not_counted(notify_db, notify_db_session): - notification_history = functools.partial( - create_notification_history, - notify_db, - notify_db_session, - status='delivered' - ) - - template_one = create_sample_template(notify_db, notify_db_session, template_name='1') - history = notification_history(created_at=datetime(2017, 2, 1), sample_template=template_one) - - NotificationHistory.query.filter( - NotificationHistory.id == history.id - ).update( - { - 'template_id': None - } - ) - - daily_stats_template_usage_by_month() - - result = db.session.query( - StatsTemplateUsageByMonth - ).all() - - assert len(result) == 0 - - notification_history(created_at=datetime(2017, 2, 1), sample_template=template_one) - - daily_stats_template_usage_by_month() - - result = db.session.query( - StatsTemplateUsageByMonth - ).order_by( - StatsTemplateUsageByMonth.year, - StatsTemplateUsageByMonth.month - ).all() - - assert len(result) == 1 - - def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None): if subfolder == '2018-01-11/zips_sent': return ['NOTIFY.20180111175007.ZIP.TXT', 'NOTIFY.20180111175008.ZIP.TXT'] diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index 024b1c019..766f2ef59 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime, timedelta +from datetime import datetime import pytest from freezegun import freeze_time @@ -7,7 +7,6 @@ from sqlalchemy.exc import IntegrityError, SQLAlchemyError from sqlalchemy.orm.exc import FlushError, NoResultFound from app import db -from app.celery.scheduled_tasks import daily_stats_template_usage_by_month from app.dao.inbound_numbers_dao import ( dao_set_inbound_number_to_service, dao_get_available_inbound_numbers, @@ -32,7 +31,6 @@ from app.dao.services_dao import ( dao_fetch_active_users_for_service, dao_fetch_service_by_inbound_number, dao_fetch_monthly_historical_stats_by_template, - dao_fetch_monthly_historical_usage_by_template_for_service ) from app.dao.users_dao import save_model_user, create_user_code from app.models import ( @@ -901,441 +899,9 @@ def test_dao_fetch_monthly_historical_stats_by_template(notify_db_session): assert result[1].count == 1 -def test_dao_fetch_monthly_historical_usage_by_template_for_service_no_stats_today( - notify_db_session, -): - service = create_service() - template_one = create_template(service=service, template_name='1') - template_two = create_template(service=service, template_name='2') - - n = create_notification(created_at=datetime(2017, 10, 1), template=template_one, status='delivered') - create_notification(created_at=datetime(2017, 4, 1), template=template_two, status='delivered') - create_notification(created_at=datetime(2017, 4, 1), template=template_two, status='delivered') - create_notification(created_at=datetime.now(), template=template_two, status='delivered') - - daily_stats_template_usage_by_month() - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 2 - - assert result[0].template_id == template_two.id - assert result[0].name == template_two.name - assert result[0].template_type == template_two.template_type - assert result[0].month == 4 - assert result[0].year == 2017 - assert result[0].count == 2 - - assert result[1].template_id == template_one.id - assert result[1].name == template_one.name - assert result[1].template_type == template_two.template_type - assert result[1].month == 10 - assert result[1].year == 2017 - assert result[1].count == 1 - - -@freeze_time("2017-11-10 11:09:00.000000") -def test_dao_fetch_monthly_historical_usage_by_template_for_service_add_to_historical( - notify_db_session, -): - service = create_service() - template_one = create_template(service=service, template_name='1') - template_two = create_template(service=service, template_name='2') - template_three = create_template(service=service, template_name='3') - - date = datetime.now() - day = date.day - month = date.month - year = date.year - - n = create_notification(created_at=datetime(2017, 9, 1), template=template_one, status='delivered') - create_notification(created_at=datetime(year, month, day) - timedelta(days=1), template=template_two, - status='delivered') - create_notification(created_at=datetime(year, month, day) - timedelta(days=1), template=template_two, - status='delivered') - - daily_stats_template_usage_by_month() - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 2 - - assert result[0].template_id == template_one.id - assert result[0].name == template_one.name - assert result[0].template_type == template_one.template_type - assert result[0].month == 9 - assert result[0].year == 2017 - assert result[0].count == 1 - - assert result[1].template_id == template_two.id - assert result[1].name == template_two.name - assert result[1].template_type == template_two.template_type - assert result[1].month == 11 - assert result[1].year == 2017 - assert result[1].count == 2 - - create_notification( - template=template_three, - created_at=datetime.now(), - status='delivered' - ) - create_notification( - template=template_two, - created_at=datetime.now(), - status='delivered' - ) - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 3 - - assert result[0].template_id == template_one.id - assert result[0].name == template_one.name - assert result[0].template_type == template_one.template_type - assert result[0].month == 9 - assert result[0].year == 2017 - assert result[0].count == 1 - - assert result[1].template_id == template_two.id - assert result[1].name == template_two.name - assert result[1].template_type == template_two.template_type - assert result[1].month == month - assert result[1].year == year - assert result[1].count == 3 - - assert result[2].template_id == template_three.id - assert result[2].name == template_three.name - assert result[2].template_type == template_three.template_type - assert result[2].month == 11 - assert result[2].year == 2017 - assert result[2].count == 1 - - -@freeze_time("2017-11-10 11:09:00.000000") -def test_dao_fetch_monthly_historical_usage_by_template_for_service_does_add_old_notification( - notify_db_session, -): - template_one, template_three, template_two = create_email_sms_letter_template() - - date = datetime.now() - day = date.day - month = date.month - year = date.year - - n = create_notification(created_at=datetime(2017, 9, 1), template=template_one, status='delivered') - create_notification(created_at=datetime(year, month, day) - timedelta(days=1), template=template_two, - status='delivered') - create_notification(created_at=datetime(year, month, day) - timedelta(days=1), template=template_two, - status='delivered') - - daily_stats_template_usage_by_month() - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 2 - - assert result[0].template_id == template_one.id - assert result[0].name == template_one.name - assert result[0].template_type == template_one.template_type - assert result[0].month == 9 - assert result[0].year == 2017 - assert result[0].count == 1 - - assert result[1].template_id == template_two.id - assert result[1].name == template_two.name - assert result[1].template_type == template_two.template_type - assert result[1].month == 11 - assert result[1].year == 2017 - assert result[1].count == 2 - - create_notification( - template=template_three, - created_at=datetime.utcnow() - timedelta(days=2), - status='delivered' - ) - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 2 - - -@freeze_time("2017-11-10 11:09:00.000000") -def test_dao_fetch_monthly_historical_usage_by_template_for_service_get_this_year_only( - notify_db_session, -): - template_one, template_three, template_two = create_email_sms_letter_template() - - date = datetime.now() - day = date.day - month = date.month - year = date.year - - n = create_notification(created_at=datetime(2016, 9, 1), template=template_one, status='delivered') - create_notification(created_at=datetime(year, month, day) - timedelta(days=1), template=template_two, - status='delivered') - create_notification(created_at=datetime(year, month, day) - timedelta(days=1), template=template_two, - status='delivered') - - daily_stats_template_usage_by_month() - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 1 - - assert result[0].template_id == template_two.id - assert result[0].name == template_two.name - assert result[0].template_type == template_two.template_type - assert result[0].month == 11 - assert result[0].year == 2017 - assert result[0].count == 2 - - create_notification( - template=template_three, - created_at=datetime.utcnow() - timedelta(days=2) - ) - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 1 - - create_notification( - template=template_three, - created_at=datetime.utcnow() - ) - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 2 - - def create_email_sms_letter_template(): service = create_service() template_one = create_template(service=service, template_name='1', template_type='email') template_two = create_template(service=service, template_name='2', template_type='sms') template_three = create_template(service=service, template_name='3', template_type='letter') return template_one, template_three, template_two - - -@freeze_time("2017-11-10 11:09:00.000000") -def test_dao_fetch_monthly_historical_usage_by_template_for_service_combined_historical_current( - notify_db_session, -): - template_one = create_template(service=create_service(), template_name='1') - - date = datetime.now() - day = date.day - month = date.month - year = date.year - - n = create_notification(status='delivered', created_at=datetime(year, month, day) - timedelta(days=30), - template=template_one) - - daily_stats_template_usage_by_month() - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 1 - - assert result[0].template_id == template_one.id - assert result[0].name == template_one.name - assert result[0].template_type == template_one.template_type - assert result[0].month == 10 - assert result[0].year == 2017 - assert result[0].count == 1 - - create_notification( - template=template_one, - created_at=datetime.utcnow() - ) - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 2 - - assert result[0].template_id == template_one.id - assert result[0].name == template_one.name - assert result[0].template_type == template_one.template_type - assert result[0].month == 10 - assert result[0].year == 2017 - assert result[0].count == 1 - - assert result[1].template_id == template_one.id - assert result[1].name == template_one.name - assert result[1].template_type == template_one.template_type - assert result[1].month == 11 - assert result[1].year == 2017 - assert result[1].count == 1 - - -@freeze_time("2017-11-10 11:09:00.000000") -def test_dao_fetch_monthly_historical_usage_by_template_for_service_does_not_return_double_precision_values( - notify_db_session, -): - template_one = create_template(service=create_service()) - - n = create_notification( - template=template_one, - created_at=datetime.utcnow() - ) - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.month, x.year) - ) - - assert len(result) == 1 - - assert result[0].template_id == template_one.id - assert result[0].name == template_one.name - assert result[0].template_type == template_one.template_type - assert result[0].month == 11 - assert len(str(result[0].month)) == 2 - assert result[0].year == 2017 - assert len(str(result[0].year)) == 4 - assert result[0].count == 1 - - -@freeze_time("2018-03-10 11:09:00.000000") -def test_dao_fetch_monthly_historical_usage_by_template_for_service_returns_financial_year( - notify_db, - notify_db_session, -): - service = create_service() - template_one = create_template(service=service, template_name='1', template_type='email') - - date = datetime.now() - day = date.day - year = date.year - - create_notification(template=template_one, status='delivered', created_at=datetime(year - 1, 1, day)) - create_notification(template=template_one, status='delivered', created_at=datetime(year - 1, 3, day)) - create_notification(template=template_one, status='delivered', created_at=datetime(year - 1, 4, day)) - create_notification(template=template_one, status='delivered', created_at=datetime(year - 1, 5, day)) - create_notification(template=template_one, status='delivered', created_at=datetime(year, 1, day)) - create_notification(template=template_one, status='delivered', created_at=datetime(year, 2, day)) - - daily_stats_template_usage_by_month() - - n = create_notification( - template=template_one, - created_at=datetime.utcnow() - ) - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2017), - key=lambda x: (x.year, x.month) - ) - - assert len(result) == 5 - - assert result[0].month == 4 - assert result[0].year == 2017 - assert result[1].month == 5 - assert result[1].year == 2017 - assert result[2].month == 1 - assert result[2].year == 2018 - assert result[3].month == 2 - assert result[3].year == 2018 - assert result[4].month == 3 - assert result[4].year == 2018 - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(n.service_id, 2014), - key=lambda x: (x.year, x.month) - ) - - assert len(result) == 0 - - -@freeze_time("2018-03-10 11:09:00.000000") -def test_dao_fetch_monthly_historical_usage_by_template_for_service_only_returns_for_service( - notify_db_session -): - template_one = create_template(service=create_service(), template_name='1', template_type='email') - - date = datetime.now() - day = date.day - year = date.year - - create_notification(template=template_one, created_at=datetime(year, 1, day)) - create_notification(template=template_one, created_at=datetime(year, 2, day)) - create_notification(template=template_one, created_at=datetime(year, 3, day)) - - service_two = create_service(service_name='other_service', user=create_user()) - template_two = create_template(service=service_two, template_name='1', template_type='email') - - create_notification(template=template_two) - create_notification(template=template_two) - - daily_stats_template_usage_by_month() - - x = dao_fetch_monthly_historical_usage_by_template_for_service(template_one.service_id, 2017) - - result = sorted( - x, - key=lambda x: (x.year, x.month) - ) - - assert len(result) == 3 - - result = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(service_two.id, 2017), - key=lambda x: (x.year, x.month) - ) - - assert len(result) == 1 - - -@freeze_time("2018-01-01 11:09:00.000000") -def test_dao_fetch_monthly_historical_usage_by_template_for_service_ignores_test_api_keys(notify_db_session): - service = create_service() - template_1 = create_template(service, template_name='1') - template_2 = create_template(service, template_name='2') - template_3 = create_template(service, template_name='3') - - create_notification(template_1, key_type=KEY_TYPE_TEST) - create_notification(template_2, key_type=KEY_TYPE_TEAM) - create_notification(template_3, key_type=KEY_TYPE_NORMAL) - - results = sorted( - dao_fetch_monthly_historical_usage_by_template_for_service(service.id, 2017), - key=lambda x: x.name - ) - - assert len(results) == 2 - # template_1 only used with test keys - assert results[0].template_id == template_2.id - assert results[0].count == 1 - - assert results[1].template_id == template_3.id - assert results[1].count == 1 diff --git a/tests/app/dao/test_stats_template_usage_by_month_dao.py b/tests/app/dao/test_stats_template_usage_by_month_dao.py deleted file mode 100644 index 676e00952..000000000 --- a/tests/app/dao/test_stats_template_usage_by_month_dao.py +++ /dev/null @@ -1,155 +0,0 @@ -from app import db -from app.dao.stats_template_usage_by_month_dao import ( - insert_or_update_stats_for_template, - dao_get_template_usage_stats_by_service -) -from app.models import StatsTemplateUsageByMonth, LETTER_TYPE, PRECOMPILED_TEMPLATE_NAME - -from tests.app.db import create_service, create_template - - -def test_create_stats_for_template(notify_db_session, sample_template): - assert StatsTemplateUsageByMonth.query.count() == 0 - - insert_or_update_stats_for_template(sample_template.id, 1, 2017, 10) - stats_by_month = StatsTemplateUsageByMonth.query.filter( - StatsTemplateUsageByMonth.template_id == sample_template.id - ).all() - - assert len(stats_by_month) == 1 - assert stats_by_month[0].template_id == sample_template.id - assert stats_by_month[0].month == 1 - assert stats_by_month[0].year == 2017 - assert stats_by_month[0].count == 10 - - -def test_update_stats_for_template(notify_db_session, sample_template): - assert StatsTemplateUsageByMonth.query.count() == 0 - - insert_or_update_stats_for_template(sample_template.id, 1, 2017, 10) - insert_or_update_stats_for_template(sample_template.id, 1, 2017, 20) - insert_or_update_stats_for_template(sample_template.id, 2, 2017, 30) - - stats_by_month = StatsTemplateUsageByMonth.query.filter( - StatsTemplateUsageByMonth.template_id == sample_template.id - ).order_by(StatsTemplateUsageByMonth.template_id).all() - - assert len(stats_by_month) == 2 - - assert stats_by_month[0].template_id == sample_template.id - assert stats_by_month[0].month == 1 - assert stats_by_month[0].year == 2017 - assert stats_by_month[0].count == 20 - - assert stats_by_month[1].template_id == sample_template.id - assert stats_by_month[1].month == 2 - assert stats_by_month[1].year == 2017 - assert stats_by_month[1].count == 30 - - -def test_dao_get_template_usage_stats_by_service(sample_service): - - email_template = create_template(service=sample_service, template_type="email") - - new_service = create_service(service_name="service_one") - - template_new_service = create_template(service=new_service) - - db.session.add(StatsTemplateUsageByMonth( - template_id=email_template.id, - month=4, - year=2017, - count=10 - )) - - db.session.add(StatsTemplateUsageByMonth( - template_id=template_new_service.id, - month=4, - year=2017, - count=10 - )) - - result = dao_get_template_usage_stats_by_service(sample_service.id, 2017) - - assert len(result) == 1 - - -def test_dao_get_template_usage_stats_by_service_for_precompiled_letters(sample_service): - - letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) - - precompiled_letter_template = create_template( - service=sample_service, template_name=PRECOMPILED_TEMPLATE_NAME, hidden=True, template_type=LETTER_TYPE) - - db.session.add(StatsTemplateUsageByMonth( - template_id=letter_template.id, - month=5, - year=2017, - count=10 - )) - - db.session.add(StatsTemplateUsageByMonth( - template_id=precompiled_letter_template.id, - month=4, - year=2017, - count=20 - )) - - result = dao_get_template_usage_stats_by_service(sample_service.id, 2017) - - assert len(result) == 2 - assert [ - (letter_template.id, 'letter Template Name', 'letter', False, 5, 2017, 10), - (precompiled_letter_template.id, PRECOMPILED_TEMPLATE_NAME, 'letter', True, 4, 2017, 20) - ] == result - - -def test_dao_get_template_usage_stats_by_service_specific_year(sample_service): - - email_template = create_template(service=sample_service, template_type="email") - - db.session.add(StatsTemplateUsageByMonth( - template_id=email_template.id, - month=3, - year=2017, - count=10 - )) - - db.session.add(StatsTemplateUsageByMonth( - template_id=email_template.id, - month=4, - year=2017, - count=10 - )) - - db.session.add(StatsTemplateUsageByMonth( - template_id=email_template.id, - month=3, - year=2018, - count=10 - )) - - db.session.add(StatsTemplateUsageByMonth( - template_id=email_template.id, - month=4, - year=2018, - count=10 - )) - - result = dao_get_template_usage_stats_by_service(sample_service.id, 2017) - - assert len(result) == 2 - - assert result[0].template_id == email_template.id - assert result[0].name == email_template.name - assert result[0].template_type == email_template.template_type - assert result[0].month == 4 - assert result[0].year == 2017 - assert result[0].count == 10 - - assert result[1].template_id == email_template.id - assert result[1].name == email_template.name - assert result[1].template_type == email_template.template_type - assert result[1].month == 3 - assert result[1].year == 2018 - assert result[1].count == 10 diff --git a/tests/app/service/test_statistics_rest.py b/tests/app/service/test_statistics_rest.py index edf9e5b9a..519612de5 100644 --- a/tests/app/service/test_statistics_rest.py +++ b/tests/app/service/test_statistics_rest.py @@ -4,7 +4,6 @@ from datetime import datetime, date import pytest from freezegun import freeze_time -from app.celery.scheduled_tasks import daily_stats_template_usage_by_month from app.models import ( EMAIL_TYPE, SMS_TYPE, @@ -55,22 +54,6 @@ def test_get_template_usage_by_month_returns_correct_data( assert resp_json[1]["count"] == 1 -@freeze_time('2017-11-11 02:00') -def test_get_template_usage_by_month_returns_no_data(admin_request, sample_template): - create_notification(sample_template, created_at=datetime(2016, 4, 1), status='created') - - daily_stats_template_usage_by_month() - - create_notification(sample_template, created_at=datetime.utcnow()) - - resp_json = admin_request.get( - 'service.get_monthly_template_usage', - service_id=sample_template.service_id, - year=2015 - ) - assert resp_json['stats'] == [] - - @freeze_time('2017-11-11 02:00') def test_get_template_usage_by_month_returns_two_templates(admin_request, sample_template, sample_service): template_one = create_template( From 876346f4693b664f296b4454edb335585eb5bdec Mon Sep 17 00:00:00 2001 From: Alexey Bezhan Date: Mon, 14 Jan 2019 16:58:57 +0000 Subject: [PATCH 098/118] Add an option to group notification stats for 7 days by template Currently, admin app requests service statistics (with notification counts grouped by status) and template statistics (with counts by template) in order to display the service dashboard. Service statistics are gathered from FactNotificationStatus table (counts for the last 7 days) combined with Notification (counts for today). Template statistics are currently gathered from redis cache, which contains a separate counter per template per day. It's hard for us to maintain consistency between redis and DB counts. Currently it doesn't update the count for cancelled letters, counter resets in the middle of the day might produce a wrong result for the rest of the week and cleared redis cache can't be repopulated for services with low data retention periods). Since FactNotificationStatus already contains separate counts for each template_id we can use the existing logic with some additional filters to get separate counts for each template and status combination, which would allow us to populate the service dashboard page from one query response. --- app/dao/fact_notification_status_dao.py | 18 ++++++-- .../dao/test_fact_notification_status_dao.py | 46 ++++++++++++++++++- 2 files changed, 60 insertions(+), 4 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 6bb341409..44c1a7b8b 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -107,12 +107,13 @@ def fetch_notification_status_for_service_for_day(bst_day, service_id): ).all() -def fetch_notification_status_for_service_for_today_and_7_previous_days(service_id, limit_days=7): +def fetch_notification_status_for_service_for_today_and_7_previous_days(service_id, by_template=False, limit_days=7): start_date = midnight_n_days_ago(limit_days) now = datetime.utcnow() stats_for_7_days = db.session.query( FactNotificationStatus.notification_type.label('notification_type'), FactNotificationStatus.notification_status.label('status'), + *([FactNotificationStatus.template_id.label('template_id')] if by_template else []), FactNotificationStatus.notification_count.label('count') ).filter( FactNotificationStatus.service_id == service_id, @@ -123,6 +124,7 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days(service_ stats_for_today = db.session.query( Notification.notification_type.cast(db.Text), Notification.status, + *([Notification.template_id] if by_template else []), func.count().label('count') ).filter( Notification.created_at >= get_london_midnight_in_utc(now), @@ -130,14 +132,24 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days(service_ Notification.key_type != KEY_TYPE_TEST ).group_by( Notification.notification_type, + *([Notification.template_id] if by_template else []), Notification.status ) + all_stats_table = stats_for_7_days.union_all(stats_for_today).subquery() - return db.session.query( + + query = db.session.query( + *([Template.name, Template.is_precompiled_letter, all_stats_table.c.template_id] if by_template else []), all_stats_table.c.notification_type, all_stats_table.c.status, func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), - ).group_by( + ) + + if by_template: + query = query.filter(all_stats_table.c.template_id == Template.id) + + return query.group_by( + *([Template.name, Template.is_precompiled_letter, all_stats_table.c.template_id] if by_template else []), all_stats_table.c.notification_type, all_stats_table.c.status, ).all() diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index a7adffce0..7b5832130 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -2,6 +2,7 @@ from datetime import timedelta, datetime, date from uuid import UUID import pytest +import mock from app.dao.fact_notification_status_dao import ( update_fact_notification_status, @@ -188,6 +189,7 @@ def test_fetch_notification_status_for_service_for_day(notify_db_session): def test_fetch_notification_status_for_service_for_today_and_7_previous_days(notify_db_session): service_1 = create_service(service_name='service_1') sms_template = create_template(service=service_1, template_type=SMS_TYPE) + sms_template_2 = create_template(service=service_1, template_type=SMS_TYPE) email_template = create_template(service=service_1, template_type=EMAIL_TYPE) create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, count=10) @@ -197,6 +199,7 @@ def test_fetch_notification_status_for_service_for_today_and_7_previous_days(not create_ft_notification_status(date(2018, 10, 26), 'letter', service_1, count=5) create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0)) + create_notification(sms_template_2, created_at=datetime(2018, 10, 31, 11, 0, 0)) create_notification(sms_template, created_at=datetime(2018, 10, 31, 12, 0, 0), status='delivered') create_notification(email_template, created_at=datetime(2018, 10, 31, 13, 0, 0), status='delivered') @@ -220,13 +223,54 @@ def test_fetch_notification_status_for_service_for_today_and_7_previous_days(not assert results[2].notification_type == 'sms' assert results[2].status == 'created' - assert results[2].count == 2 + assert results[2].count == 3 assert results[3].notification_type == 'sms' assert results[3].status == 'delivered' assert results[3].count == 19 +@freeze_time('2018-10-31T18:00:00') +def test_fetch_notification_status_by_template_for_service_for_today_and_7_previous_days(notify_db_session): + service_1 = create_service(service_name='service_1') + sms_template = create_template(template_name='sms Template 1', service=service_1, template_type=SMS_TYPE) + sms_template_2 = create_template(template_name='sms Template 2', service=service_1, template_type=SMS_TYPE) + email_template = create_template(service=service_1, template_type=EMAIL_TYPE) + + # create unused email template + create_template(service=service_1, template_type=EMAIL_TYPE) + + create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, count=10) + create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, count=11) + create_ft_notification_status(date(2018, 10, 24), 'sms', service_1, count=8) + create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, notification_status='created') + create_ft_notification_status(date(2018, 10, 29), 'email', service_1, count=3) + create_ft_notification_status(date(2018, 10, 26), 'letter', service_1, count=5) + + create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0)) + create_notification(sms_template, created_at=datetime(2018, 10, 31, 12, 0, 0), status='delivered') + create_notification(sms_template_2, created_at=datetime(2018, 10, 31, 12, 0, 0), status='delivered') + create_notification(email_template, created_at=datetime(2018, 10, 31, 13, 0, 0), status='delivered') + + # too early, shouldn't be included + create_notification(service_1.templates[0], created_at=datetime(2018, 10, 30, 12, 0, 0), status='delivered') + + results = fetch_notification_status_for_service_for_today_and_7_previous_days(service_1.id, by_template=True) + + assert [ + ('email Template Name', False, mock.ANY, 'email', 'delivered', 1), + ('email Template Name', False, mock.ANY, 'email', 'delivered', 3), + ('letter Template Name', False, mock.ANY, 'letter', 'delivered', 5), + ('sms Template 1', False, mock.ANY, 'sms', 'created', 1), + ('sms Template Name', False, mock.ANY, 'sms', 'created', 1), + ('sms Template 1', False, mock.ANY, 'sms', 'delivered', 1), + ('sms Template 2', False, mock.ANY, 'sms', 'delivered', 1), + ('sms Template Name', False, mock.ANY, 'sms', 'delivered', 8), + ('sms Template Name', False, mock.ANY, 'sms', 'delivered', 10), + ('sms Template Name', False, mock.ANY, 'sms', 'delivered', 11), + ] == sorted(results, key=lambda x: (x.notification_type, x.status, x.name, x.count)) + + @pytest.mark.parametrize( "start_date, end_date, expected_email, expected_letters, expected_sms, expected_created_sms", [ From 5ebeb9937ad801f0db799acd573867bc168bc404 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Mon, 14 Jan 2019 17:45:56 +0000 Subject: [PATCH 099/118] Avoid call to database to get template in persist_notifications --- app/celery/tasks.py | 1 + app/notifications/process_letter_notifications.py | 1 + app/notifications/process_notifications.py | 10 ++++------ app/notifications/rest.py | 1 + app/service/send_notification.py | 1 + tests/app/notifications/test_process_notification.py | 1 + tests/app/service/test_send_one_off_notification.py | 4 ++++ 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 96d26fe7d..1948c0964 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -307,6 +307,7 @@ def save_letter( saved_notification = persist_notification( template_id=notification['template'], template_version=notification['template_version'], + template_postage=template.postage, recipient=recipient, service=service, personalisation=notification['personalisation'], diff --git a/app/notifications/process_letter_notifications.py b/app/notifications/process_letter_notifications.py index 94e52bbd8..06d1127bf 100644 --- a/app/notifications/process_letter_notifications.py +++ b/app/notifications/process_letter_notifications.py @@ -7,6 +7,7 @@ def create_letter_notification(letter_data, template, api_key, status, reply_to_ notification = persist_notification( template_id=template.id, template_version=template.version, + template_postage=template.postage, # we only accept addresses_with_underscores from the API (from CSV we also accept dashes, spaces etc) recipient=letter_data['personalisation']['address_line_1'], service=template.service, diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 88cec5b0d..f8850c3e9 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -32,8 +32,6 @@ from app.dao.notifications_dao import ( dao_created_scheduled_notification ) -from app.dao.templates_dao import dao_get_template_by_id - from app.v2.errors import BadRequestError from app.utils import ( cache_key_for_service_template_counter, @@ -76,7 +74,8 @@ def persist_notification( status=NOTIFICATION_CREATED, reply_to_text=None, billable_units=None, - postage=None + postage=None, + template_postage=None ): notification_created_at = created_at or datetime.utcnow() if not notification_id: @@ -116,9 +115,8 @@ def persist_notification( if postage: notification.postage = postage else: - template = dao_get_template_by_id(template_id, template_version) - if service.has_permission(CHOOSE_POSTAGE) and template.postage: - notification.postage = template.postage + if service.has_permission(CHOOSE_POSTAGE) and template_postage: + notification.postage = template_postage else: notification.postage = service.postage diff --git a/app/notifications/rest.py b/app/notifications/rest.py index 04286688a..aa4be0ea9 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -124,6 +124,7 @@ def send_notification(notification_type): simulated = simulated_recipient(notification_form['to'], notification_type) notification_model = persist_notification(template_id=template.id, template_version=template.version, + template_postage=template.postage, recipient=request.get_json()['to'], service=authenticated_service, personalisation=notification_form.get('personalisation', None), diff --git a/app/service/send_notification.py b/app/service/send_notification.py index a00d151a4..26307b8c3 100644 --- a/app/service/send_notification.py +++ b/app/service/send_notification.py @@ -77,6 +77,7 @@ def send_one_off_notification(service_id, post_data): notification = persist_notification( template_id=template.id, template_version=template.version, + template_postage=template.postage, recipient=post_data['to'], service=service, personalisation=personalisation, diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 192644bde..9710146e0 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -504,6 +504,7 @@ def test_persist_letter_notification_finds_correct_postage( persist_notification( template_id=template.id, template_version=template.version, + template_postage=template.postage, recipient="Jane Doe, 10 Downing Street, London", service=service, personalisation=None, diff --git a/tests/app/service/test_send_one_off_notification.py b/tests/app/service/test_send_one_off_notification.py index 0c20611dc..b70459fc8 100644 --- a/tests/app/service/test_send_one_off_notification.py +++ b/tests/app/service/test_send_one_off_notification.py @@ -90,6 +90,7 @@ def test_send_one_off_notification_calls_persist_correctly_for_sms( persist_mock.assert_called_once_with( template_id=template.id, template_version=template.version, + template_postage=None, recipient=post_data['to'], service=template.service, personalisation={'name': 'foo'}, @@ -127,6 +128,7 @@ def test_send_one_off_notification_calls_persist_correctly_for_email( persist_mock.assert_called_once_with( template_id=template.id, template_version=template.version, + template_postage=None, recipient=post_data['to'], service=template.service, personalisation={'name': 'foo'}, @@ -153,6 +155,7 @@ def test_send_one_off_notification_calls_persist_correctly_for_letter( template = create_template( service=service, template_type=LETTER_TYPE, + postage='first', subject="Test subject", content="Hello (( Name))\nYour thing is due soon", ) @@ -174,6 +177,7 @@ def test_send_one_off_notification_calls_persist_correctly_for_letter( persist_mock.assert_called_once_with( template_id=template.id, template_version=template.version, + template_postage='first', recipient=post_data['to'], service=template.service, personalisation=post_data['personalisation'], From 52831813d89dfb237e6cf15a0c0d51cee032bf80 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 15 Jan 2019 11:55:45 +0000 Subject: [PATCH 100/118] Change template statistics endpoint to use fact_notification_status_dao --- app/dao/fact_notification_status_dao.py | 6 +- app/template_statistics/rest.py | 16 +- .../dao/test_fact_notification_status_dao.py | 2 +- tests/app/template_statistics/test_rest.py | 177 +++--------------- 4 files changed, 44 insertions(+), 157 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 44c1a7b8b..3e4339016 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -139,7 +139,11 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days(service_ all_stats_table = stats_for_7_days.union_all(stats_for_today).subquery() query = db.session.query( - *([Template.name, Template.is_precompiled_letter, all_stats_table.c.template_id] if by_template else []), + *([ + Template.name.label("template_name"), + Template.is_precompiled_letter, + all_stats_table.c.template_id + ] if by_template else []), all_stats_table.c.notification_type, all_stats_table.c.status, func.cast(func.sum(all_stats_table.c.count), Integer).label('count'), diff --git a/app/template_statistics/rest.py b/app/template_statistics/rest.py index 1c0f3b27d..f1c2ff1f0 100644 --- a/app/template_statistics/rest.py +++ b/app/template_statistics/rest.py @@ -14,6 +14,7 @@ from app.dao.templates_dao import ( dao_get_multiple_template_details, dao_get_template_by_id_and_service_id ) +from app.dao.fact_notification_status_dao import fetch_notification_status_for_service_for_today_and_7_previous_days from app.schemas import notification_with_template_schema from app.utils import cache_key_for_service_template_usage_per_day, last_n_days @@ -39,8 +40,21 @@ def get_template_statistics_for_service_by_day(service_id): if whole_days < 0 or whole_days > 7: raise InvalidRequest({'whole_days': ['whole_days must be between 0 and 7']}, status_code=400) + data = fetch_notification_status_for_service_for_today_and_7_previous_days( + service_id, by_template=True, limit_days=whole_days + ) - return jsonify(data=_get_template_statistics_for_last_n_days(service_id, whole_days)) + return jsonify(data=[ + { + 'count': row.count, + 'template_id': str(row.template_id), + 'template_name': row.template_name, + 'template_type': row.notification_type, + 'is_precompiled_letter': row.is_precompiled_letter, + 'status': row.status + } + for row in data + ]) @template_statistics.route('/') diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 7b5832130..5267261b5 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -268,7 +268,7 @@ def test_fetch_notification_status_by_template_for_service_for_today_and_7_previ ('sms Template Name', False, mock.ANY, 'sms', 'delivered', 8), ('sms Template Name', False, mock.ANY, 'sms', 'delivered', 10), ('sms Template Name', False, mock.ANY, 'sms', 'delivered', 11), - ] == sorted(results, key=lambda x: (x.notification_type, x.status, x.name, x.count)) + ] == sorted(results, key=lambda x: (x.notification_type, x.status, x.template_name, x.count)) @pytest.mark.parametrize( diff --git a/tests/app/template_statistics/test_rest.py b/tests/app/template_statistics/test_rest.py index 1a37cc6e7..45659a712 100644 --- a/tests/app/template_statistics/test_rest.py +++ b/tests/app/template_statistics/test_rest.py @@ -1,15 +1,10 @@ import uuid -from datetime import datetime -from unittest.mock import Mock, call, ANY +from unittest.mock import Mock import pytest -from flask import current_app from freezegun import freeze_time -from tests.app.db import ( - create_notification, - create_template, -) +from tests.app.db import create_notification def set_up_get_all_from_hash(mock_redis, side_effect): @@ -80,169 +75,46 @@ def test_get_template_statistics_for_service_by_day_accepts_old_query_string( assert len(json_resp['data']) == 1 -@freeze_time('2018-01-01 12:00:00') -def test_get_template_statistics_for_service_by_day_gets_out_of_redis_if_available( - admin_request, - mocker, - sample_template -): - mock_redis = mocker.patch('app.template_statistics.rest.redis_store') - set_up_get_all_from_hash(mock_redis, [ - {sample_template.id: 3} - ]) - - json_resp = admin_request.get( - 'template_statistics.get_template_statistics_for_service_by_day', - service_id=sample_template.service_id, - whole_days=0 - ) - - assert len(json_resp['data']) == 1 - assert json_resp['data'][0]['count'] == 3 - assert json_resp['data'][0]['template_id'] == str(sample_template.id) - mock_redis.get_all_from_hash.assert_called_once_with( - 'service-{}-template-usage-{}'.format(sample_template.service_id, '2018-01-01') - ) - - @freeze_time('2018-01-02 12:00:00') -def test_get_template_statistics_for_service_by_day_goes_to_db_if_not_in_redis( +def test_get_template_statistics_for_service_by_day_goes_to_db( admin_request, mocker, sample_template ): - mock_redis = mocker.patch('app.template_statistics.rest.redis_store') # first time it is called redis returns data, second time returns none - set_up_get_all_from_hash(mock_redis, [ - {sample_template.id: 2}, - None - ]) mock_dao = mocker.patch( - 'app.template_statistics.rest.dao_get_template_usage', + 'app.template_statistics.rest.fetch_notification_status_for_service_for_today_and_7_previous_days', return_value=[ - Mock(id=sample_template.id, count=3) + Mock( + template_id=sample_template.id, + count=3, + template_name=sample_template.name, + notification_type=sample_template.template_type, + status='created', + is_precompiled_letter=False + ) ] ) - json_resp = admin_request.get( 'template_statistics.get_template_statistics_for_service_by_day', service_id=sample_template.service_id, whole_days=1 ) - assert len(json_resp['data']) == 1 - assert json_resp['data'][0]['count'] == 5 - assert json_resp['data'][0]['template_id'] == str(sample_template.id) - # first redis call - assert mock_redis.get_all_from_hash.mock_calls == [ - call('service-{}-template-usage-{}'.format(sample_template.service_id, '2018-01-01')), - call('service-{}-template-usage-{}'.format(sample_template.service_id, '2018-01-02')) - ] + assert json_resp['data'] == [{ + "template_id": str(sample_template.id), + "count": 3, + "template_name": sample_template.name, + "template_type": sample_template.template_type, + "status": "created", + "is_precompiled_letter": False + + }] # dao only called for 2nd, since redis returned values for first call mock_dao.assert_called_once_with( - str(sample_template.service_id), day=datetime(2018, 1, 2) + str(sample_template.service_id), limit_days=1, by_template=True ) - mock_redis.set_hash_and_expire.assert_called_once_with( - 'service-{}-template-usage-{}'.format(sample_template.service_id, '2018-01-02'), - # sets the data that the dao returned - {str(sample_template.id): 3}, - current_app.config['EXPIRE_CACHE_EIGHT_DAYS'] - ) - - -def test_get_template_statistics_for_service_by_day_combines_templates_correctly( - admin_request, - mocker, - sample_service -): - t1 = create_template(sample_service, template_name='1') - t2 = create_template(sample_service, template_name='2') - t3 = create_template(sample_service, template_name='3') # noqa - mock_redis = mocker.patch('app.template_statistics.rest.redis_store') - - # first time it is called redis returns data, second time returns none - set_up_get_all_from_hash(mock_redis, [ - {t1.id: 2}, - None, - {t1.id: 1, t2.id: 4}, - ]) - mock_dao = mocker.patch( - 'app.template_statistics.rest.dao_get_template_usage', - return_value=[ - Mock(id=t1.id, count=8) - ] - ) - - json_resp = admin_request.get( - 'template_statistics.get_template_statistics_for_service_by_day', - service_id=sample_service.id, - whole_days=2 - ) - - assert len(json_resp['data']) == 2 - assert json_resp['data'][0]['template_id'] == str(t1.id) - assert json_resp['data'][0]['count'] == 11 - assert json_resp['data'][1]['template_id'] == str(t2.id) - assert json_resp['data'][1]['count'] == 4 - - assert mock_redis.get_all_from_hash.call_count == 3 - # dao only called for 2nd day - assert mock_dao.call_count == 1 - - -@freeze_time('2018-03-28 00:00:00') -def test_get_template_statistics_for_service_by_day_gets_stats_for_correct_days( - admin_request, - mocker, - sample_template -): - mock_redis = mocker.patch('app.template_statistics.rest.redis_store') - - # first time it is called redis returns data, second time returns none - set_up_get_all_from_hash(mock_redis, [ - {sample_template.id: 1}, # last weds - None, - {sample_template.id: 1}, - {sample_template.id: 1}, - {sample_template.id: 1}, - {sample_template.id: 1}, - None, - None, # current day - ]) - mock_dao = mocker.patch( - 'app.template_statistics.rest.dao_get_template_usage', - return_value=[ - Mock(id=sample_template.id, count=2) - ] - ) - - json_resp = admin_request.get( - 'template_statistics.get_template_statistics_for_service_by_day', - service_id=sample_template.service_id, - whole_days=7 - ) - - assert len(json_resp['data']) == 1 - assert json_resp['data'][0]['count'] == 11 - assert json_resp['data'][0]['template_id'] == str(sample_template.id) - - assert mock_redis.get_all_from_hash.call_count == 8 - - assert '2018-03-21' in mock_redis.get_all_from_hash.mock_calls[0][1][0] # last wednesday - assert '2018-03-22' in mock_redis.get_all_from_hash.mock_calls[1][1][0] - assert '2018-03-23' in mock_redis.get_all_from_hash.mock_calls[2][1][0] - assert '2018-03-24' in mock_redis.get_all_from_hash.mock_calls[3][1][0] - assert '2018-03-25' in mock_redis.get_all_from_hash.mock_calls[4][1][0] - assert '2018-03-26' in mock_redis.get_all_from_hash.mock_calls[5][1][0] - assert '2018-03-27' in mock_redis.get_all_from_hash.mock_calls[6][1][0] - assert '2018-03-28' in mock_redis.get_all_from_hash.mock_calls[7][1][0] # current day (wednesday) - - mock_dao.mock_calls == [ - call(ANY, day=datetime(2018, 3, 22)), - call(ANY, day=datetime(2018, 3, 27)), - call(ANY, day=datetime(2018, 3, 28)) - ] def test_get_template_statistics_for_service_by_day_returns_empty_list_if_no_templates( @@ -250,7 +122,6 @@ def test_get_template_statistics_for_service_by_day_returns_empty_list_if_no_tem mocker, sample_service ): - mock_redis = mocker.patch('app.template_statistics.rest.redis_store') json_resp = admin_request.get( 'template_statistics.get_template_statistics_for_service_by_day', @@ -259,9 +130,7 @@ def test_get_template_statistics_for_service_by_day_returns_empty_list_if_no_tem ) assert len(json_resp['data']) == 0 - assert mock_redis.get_all_from_hash.call_count == 8 - # make sure we don't try and set any empty hashes in redis - assert mock_redis.set_hash_and_expire.call_count == 0 + # get_template_statistics_for_template From 3ce0024eeccc773444b2fabe5b9748c4e85415e3 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 15 Jan 2019 12:15:20 +0000 Subject: [PATCH 101/118] Remove unused functions for getting template statistics --- app/dao/notifications_dao.py | 34 ----- app/dao/templates_dao.py | 15 -- app/template_statistics/rest.py | 68 +-------- .../notification_dao/test_notification_dao.py | 2 - .../test_notification_dao_template_usage.py | 136 +----------------- tests/app/dao/test_templates_dao.py | 16 --- 6 files changed, 6 insertions(+), 265 deletions(-) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 9426a28c0..15c9e997a 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -30,7 +30,6 @@ from app.models import ( Notification, NotificationHistory, ScheduledNotification, - Template, KEY_TYPE_TEST, LETTER_TYPE, NOTIFICATION_CREATED, @@ -51,39 +50,6 @@ from app.utils import get_london_midnight_in_utc from app.utils import midnight_n_days_ago, escape_special_characters -@statsd(namespace="dao") -def dao_get_template_usage(service_id, day): - start = get_london_midnight_in_utc(day) - end = get_london_midnight_in_utc(day + timedelta(days=1)) - - notifications_aggregate_query = db.session.query( - func.count().label('count'), - Notification.template_id - ).filter( - Notification.created_at >= start, - Notification.created_at < end, - Notification.service_id == service_id, - Notification.key_type != KEY_TYPE_TEST, - ).group_by( - Notification.template_id - ).subquery() - - query = db.session.query( - Template.id, - Template.name, - Template.template_type, - Template.is_precompiled_letter, - func.coalesce(notifications_aggregate_query.c.count, 0).label('count') - ).outerjoin( - notifications_aggregate_query, - notifications_aggregate_query.c.template_id == Template.id - ).filter( - Template.service_id == service_id - ).order_by(Template.name) - - return query.all() - - @statsd(namespace="dao") def dao_get_last_template_usage(template_id, template_type, service_id): # By adding the service_id to the filter the performance of the query is greatly improved. diff --git a/app/dao/templates_dao.py b/app/dao/templates_dao.py index e5e93199f..66cbe865a 100644 --- a/app/dao/templates_dao.py +++ b/app/dao/templates_dao.py @@ -129,18 +129,3 @@ def dao_get_template_versions(service_id, template_id): ).order_by( desc(TemplateHistory.version) ).all() - - -def dao_get_multiple_template_details(template_ids): - query = db.session.query( - Template.id, - Template.template_type, - Template.name, - Template.is_precompiled_letter - ).filter( - Template.id.in_(template_ids) - ).order_by( - Template.name - ) - - return query.all() diff --git a/app/template_statistics/rest.py b/app/template_statistics/rest.py index f1c2ff1f0..fc179b49a 100644 --- a/app/template_statistics/rest.py +++ b/app/template_statistics/rest.py @@ -1,25 +1,10 @@ -from flask import ( - Blueprint, - jsonify, - request, - current_app -) - -from app import redis_store -from app.dao.notifications_dao import ( - dao_get_template_usage, - dao_get_last_template_usage -) -from app.dao.templates_dao import ( - dao_get_multiple_template_details, - dao_get_template_by_id_and_service_id -) +from flask import Blueprint, jsonify, request +from app.dao.notifications_dao import dao_get_last_template_usage +from app.dao.templates_dao import dao_get_template_by_id_and_service_id from app.dao.fact_notification_status_dao import fetch_notification_status_for_service_for_today_and_7_previous_days from app.schemas import notification_with_template_schema -from app.utils import cache_key_for_service_template_usage_per_day, last_n_days from app.errors import register_errors, InvalidRequest -from collections import Counter template_statistics = Blueprint('template_statistics', __name__, @@ -67,50 +52,3 @@ def get_template_statistics_for_template_id(service_id, template_id): data = notification_with_template_schema.dump(notification).data return jsonify(data=data) - - -def _get_template_statistics_for_last_n_days(service_id, whole_days): - template_stats_by_id = Counter() - - # 0 whole_days = last 1 days (ie since midnight today) = today. - # 7 whole days = last 8 days (ie since midnight this day last week) = a week and a bit - for day in last_n_days(whole_days + 1): - # "{SERVICE_ID}-template-usage-{YYYY-MM-DD}" - key = cache_key_for_service_template_usage_per_day(service_id, day) - stats = redis_store.get_all_from_hash(key) - if stats: - stats = { - k.decode('utf-8'): int(v) for k, v in stats.items() - } - else: - # key didn't exist (or redis was down) - lets populate from DB. - stats = { - str(row.id): row.count for row in dao_get_template_usage(service_id, day=day) - } - # if there is data in db, but not in redis - lets put it in redis so we don't have to do - # this calc again next time. If there isn't any data, we can't put it in redis. - # Zero length hashes aren't a thing in redis. (There'll only be no data if the service has no templates) - # Nothing is stored if redis is down. - if stats: - redis_store.set_hash_and_expire( - key, - stats, - current_app.config['EXPIRE_CACHE_EIGHT_DAYS'] - ) - template_stats_by_id += Counter(stats) - - # attach count from stats to name/type/etc from database - template_details = dao_get_multiple_template_details(template_stats_by_id.keys()) - return [ - { - 'count': template_stats_by_id[str(template.id)], - 'template_id': str(template.id), - 'template_name': template.name, - 'template_type': template.template_type, - 'is_precompiled_letter': template.is_precompiled_letter - } - for template in template_details - # we don't want to return templates with no count to the front-end, - # but they're returned from the DB and might be put in redis like that (if there was no data that day) - if template_stats_by_id[str(template.id)] != 0 - ] diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index 197b6556a..90dc73035 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -16,7 +16,6 @@ from app.dao.notifications_dao import ( dao_get_last_template_usage, dao_get_notifications_by_to_field, dao_get_scheduled_notifications, - dao_get_template_usage, dao_timeout_notifications, dao_update_notification, dao_update_notifications_by_reference, @@ -70,7 +69,6 @@ from tests.app.db import ( def test_should_have_decorated_notifications_dao_functions(): assert dao_get_last_template_usage.__wrapped__.__name__ == 'dao_get_last_template_usage' # noqa - assert dao_get_template_usage.__wrapped__.__name__ == 'dao_get_template_usage' # noqa assert dao_create_notification.__wrapped__.__name__ == 'dao_create_notification' # noqa assert update_notification_status_by_id.__wrapped__.__name__ == 'update_notification_status_by_id' # noqa assert dao_update_notification.__wrapped__.__name__ == 'dao_update_notification' # noqa diff --git a/tests/app/dao/notification_dao/test_notification_dao_template_usage.py b/tests/app/dao/notification_dao/test_notification_dao_template_usage.py index 88aaa783d..4006fd9c2 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_template_usage.py +++ b/tests/app/dao/notification_dao/test_notification_dao_template_usage.py @@ -1,23 +1,7 @@ -import uuid -from datetime import datetime, timedelta, date - +from datetime import datetime, timedelta import pytest -from freezegun import freeze_time - -from app.dao.notifications_dao import ( - dao_get_last_template_usage, - dao_get_template_usage -) -from app.models import ( - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - KEY_TYPE_TEAM -) -from tests.app.db import ( - create_notification, - create_service, - create_template -) +from app.dao.notifications_dao import dao_get_last_template_usage +from tests.app.db import create_notification, create_template def test_last_template_usage_should_get_right_data(sample_notification): @@ -70,117 +54,3 @@ def test_last_template_usage_should_be_able_to_get_no_template_usage_history_if_ sample_template): results = dao_get_last_template_usage(sample_template.id, 'sms', sample_template.service_id) assert not results - - -@freeze_time('2018-01-01') -def test_should_by_able_to_get_template_count(sample_template, sample_email_template): - create_notification(sample_template) - create_notification(sample_template) - create_notification(sample_template) - create_notification(sample_email_template) - create_notification(sample_email_template) - - results = dao_get_template_usage(sample_template.service_id, date.today()) - assert results[0].name == sample_email_template.name - assert results[0].template_type == sample_email_template.template_type - assert results[0].count == 2 - - assert results[1].name == sample_template.name - assert results[1].template_type == sample_template.template_type - assert results[1].count == 3 - - -@freeze_time('2018-01-01') -def test_template_usage_should_ignore_test_keys( - sample_team_api_key, - sample_test_api_key, - sample_api_key, - sample_template -): - - create_notification(sample_template, api_key=sample_api_key, key_type=KEY_TYPE_NORMAL) - create_notification(sample_template, api_key=sample_team_api_key, key_type=KEY_TYPE_TEAM) - create_notification(sample_template, api_key=sample_test_api_key, key_type=KEY_TYPE_TEST) - create_notification(sample_template) - - results = dao_get_template_usage(sample_template.service_id, date.today()) - assert results[0].name == sample_template.name - assert results[0].template_type == sample_template.template_type - assert results[0].count == 3 - - -def test_template_usage_should_filter_by_service(notify_db_session): - service_1 = create_service(service_name='test1') - service_2 = create_service(service_name='test2') - service_3 = create_service(service_name='test3') - - template_1 = create_template(service_1) - template_2 = create_template(service_2) # noqa - template_3a = create_template(service_3, template_name='a') - template_3b = create_template(service_3, template_name='b') # noqa - - # two for service_1, one for service_3 - create_notification(template_1) - create_notification(template_1) - - create_notification(template_3a) - - res1 = dao_get_template_usage(service_1.id, date.today()) - res2 = dao_get_template_usage(service_2.id, date.today()) - res3 = dao_get_template_usage(service_3.id, date.today()) - - assert len(res1) == 1 - assert res1[0].count == 2 - - assert len(res2) == 1 - assert res2[0].count == 0 - - assert len(res3) == 2 - assert res3[0].count == 1 - assert res3[1].count == 0 - - -def test_template_usage_should_by_able_to_get_zero_count_from_notifications_history_if_no_rows(sample_service): - results = dao_get_template_usage(sample_service.id, date.today()) - assert len(results) == 0 - - -def test_template_usage_should_by_able_to_get_zero_count_from_notifications_history_if_no_service(): - results = dao_get_template_usage(str(uuid.uuid4()), date.today()) - assert len(results) == 0 - - -def test_template_usage_should_by_able_to_get_template_count_for_specific_day(sample_template): - # too early - create_notification(sample_template, created_at=datetime(2017, 6, 7, 22, 59, 0)) - # just right - create_notification(sample_template, created_at=datetime(2017, 6, 7, 23, 0, 0)) - create_notification(sample_template, created_at=datetime(2017, 6, 7, 23, 0, 0)) - create_notification(sample_template, created_at=datetime(2017, 6, 8, 22, 59, 0)) - create_notification(sample_template, created_at=datetime(2017, 6, 8, 22, 59, 0)) - create_notification(sample_template, created_at=datetime(2017, 6, 8, 22, 59, 0)) - # too late - create_notification(sample_template, created_at=datetime(2017, 6, 8, 23, 0, 0)) - - results = dao_get_template_usage(sample_template.service_id, day=date(2017, 6, 8)) - - assert len(results) == 1 - assert results[0].count == 5 - - -def test_template_usage_should_by_able_to_get_template_count_for_specific_timezone_boundary(sample_template): - # too early - create_notification(sample_template, created_at=datetime(2018, 3, 24, 23, 59, 0)) - # just right - create_notification(sample_template, created_at=datetime(2018, 3, 25, 0, 0, 0)) - create_notification(sample_template, created_at=datetime(2018, 3, 25, 0, 0, 0)) - create_notification(sample_template, created_at=datetime(2018, 3, 25, 22, 59, 0)) - create_notification(sample_template, created_at=datetime(2018, 3, 25, 22, 59, 0)) - create_notification(sample_template, created_at=datetime(2018, 3, 25, 22, 59, 0)) - # too late - create_notification(sample_template, created_at=datetime(2018, 3, 25, 23, 0, 0)) - - results = dao_get_template_usage(sample_template.service_id, day=date(2018, 3, 25)) - - assert len(results) == 1 - assert results[0].count == 5 diff --git a/tests/app/dao/test_templates_dao.py b/tests/app/dao/test_templates_dao.py index cbe6c6b72..f585e2b5d 100644 --- a/tests/app/dao/test_templates_dao.py +++ b/tests/app/dao/test_templates_dao.py @@ -11,7 +11,6 @@ from app.dao.templates_dao import ( dao_get_all_templates_for_service, dao_update_template, dao_get_template_versions, - dao_get_multiple_template_details, dao_redact_template, dao_update_template_reply_to ) from app.models import ( @@ -511,21 +510,6 @@ def test_get_template_versions_is_empty_for_hidden_templates(notify_db, notify_d assert len(versions) == 0 -def test_get_multiple_template_details_returns_templates_for_list_of_ids(sample_service): - t1 = create_template(sample_service) - t2 = create_template(sample_service) - create_template(sample_service) # t3 - - res = dao_get_multiple_template_details([t1.id, t2.id]) - - assert {x.id for x in res} == {t1.id, t2.id} - # make sure correct properties are on each row - assert res[0].id - assert res[0].template_type - assert res[0].name - assert not res[0].is_precompiled_letter - - @pytest.mark.parametrize("template_type,postage", [('letter', 'third'), ('sms', 'second')]) def test_template_postage_constraint_on_create(sample_service, sample_user, template_type, postage): data = { From d36c4d8a7872a9fd38fb59bb4ec55aeac438594d Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 15 Jan 2019 14:38:45 +0000 Subject: [PATCH 102/118] Remove now unused methods that populated template usage redis cache --- app/commands.py | 54 +------------- app/notifications/process_notifications.py | 19 +---- app/utils.py | 7 -- tests/app/commands/test_populate_redis.py | 73 ------------------- .../test_process_notification.py | 48 ------------ 5 files changed, 4 insertions(+), 197 deletions(-) delete mode 100644 tests/app/commands/test_populate_redis.py diff --git a/app/commands.py b/app/commands.py index 41f58c4dd..b14e5680d 100644 --- a/app/commands.py +++ b/app/commands.py @@ -1,4 +1,3 @@ -import sys import functools import uuid from datetime import datetime, timedelta @@ -9,10 +8,9 @@ import flask from click_datetime import Datetime as click_dt from flask import current_app, json from sqlalchemy.orm.exc import NoResultFound -from sqlalchemy import func from notifications_utils.statsd_decorators import statsd -from app import db, DATETIME_FORMAT, encryption, redis_store +from app import db, DATETIME_FORMAT, encryption from app.celery.scheduled_tasks import send_total_sent_notifications_to_performance_platform from app.celery.service_callback_tasks import send_delivery_status_to_service from app.celery.letters_pdf_tasks import create_letters_pdf @@ -34,11 +32,7 @@ from app.dao.services_dao import ( from app.dao.users_dao import delete_model_user, delete_user_verify_codes from app.models import PROVIDERS, User, Notification from app.performance_platform.processing_time import send_processing_time_for_start_and_end -from app.utils import ( - cache_key_for_service_template_usage_per_day, - get_london_midnight_in_utc, - get_midnight_for_day_before, -) +from app.utils import get_london_midnight_in_utc, get_midnight_for_day_before @click.group(name='command', help='Additional commands') @@ -430,50 +424,6 @@ def migrate_data_to_ft_billing(start_date, end_date): current_app.logger.info('Total inserted/updated records = {}'.format(total_updated)) -@notify_command() -@click.option('-s', '--service_id', required=True, type=click.UUID) -@click.option('-d', '--day', required=True, type=click_dt(format='%Y-%m-%d')) -def populate_redis_template_usage(service_id, day): - """ - Recalculate and replace the stats in redis for a day. - To be used if redis data is lost for some reason. - """ - if not current_app.config['REDIS_ENABLED']: - current_app.logger.error('Cannot populate redis template usage - redis not enabled') - sys.exit(1) - - # the day variable is set by click to be midnight of that day - start_time = get_london_midnight_in_utc(day) - end_time = get_london_midnight_in_utc(day + timedelta(days=1)) - - usage = { - str(row.template_id): row.count - for row in db.session.query( - Notification.template_id, - func.count().label('count') - ).filter( - Notification.service_id == service_id, - Notification.created_at >= start_time, - Notification.created_at < end_time - ).group_by( - Notification.template_id - ) - } - current_app.logger.info('Populating usage dict for service {} day {}: {}'.format( - service_id, - day, - usage.items()) - ) - if usage: - key = cache_key_for_service_template_usage_per_day(service_id, day) - redis_store.set_hash_and_expire( - key, - usage, - current_app.config['EXPIRE_CACHE_EIGHT_DAYS'], - raise_exception=True - ) - - @notify_command(name='rebuild-ft-billing-for-day') @click.option('-s', '--service_id', required=False, type=click.UUID) @click.option('-d', '--day', help="The date to recalculate, as YYYY-MM-DD", required=True, diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 8fc2f15f6..c71fb1631 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -9,7 +9,7 @@ from notifications_utils.recipients import ( validate_and_format_phone_number, format_email_address ) -from notifications_utils.timezones import convert_bst_to_utc, convert_utc_to_bst +from notifications_utils.timezones import convert_bst_to_utc from app import redis_store from app.celery import provider_tasks @@ -35,11 +35,7 @@ from app.dao.notifications_dao import ( from app.dao.templates_dao import dao_get_template_by_id from app.v2.errors import BadRequestError -from app.utils import ( - cache_key_for_service_template_counter, - cache_key_for_service_template_usage_per_day, - get_template_instance, -) +from app.utils import cache_key_for_service_template_counter, get_template_instance def create_content_for_notification(template, personalisation): @@ -127,23 +123,12 @@ def persist_notification( if redis_store.get_all_from_hash(cache_key_for_service_template_counter(service.id)): redis_store.increment_hash_value(cache_key_for_service_template_counter(service.id), template_id) - increment_template_usage_cache(service.id, template_id, notification_created_at) - current_app.logger.info( "{} {} created at {}".format(notification_type, notification_id, notification_created_at) ) return notification -def increment_template_usage_cache(service_id, template_id, created_at): - key = cache_key_for_service_template_usage_per_day(service_id, convert_utc_to_bst(created_at)) - redis_store.increment_hash_value(key, template_id) - # set key to expire in eight days - we don't know if we've just created the key or not, so must assume that we - # have and reset the expiry. Eight days is longer than any notification is in the notifications table, so we'll - # always capture the full week's numbers - redis_store.expire(key, current_app.config['EXPIRE_CACHE_EIGHT_DAYS']) - - def send_notification_to_queue(notification, research_mode, queue=None): if research_mode or notification.key_type == KEY_TYPE_TEST: queue = QueueNames.RESEARCH_MODE diff --git a/app/utils.py b/app/utils.py index b00a53bda..25bbab968 100644 --- a/app/utils.py +++ b/app/utils.py @@ -72,13 +72,6 @@ def cache_key_for_service_template_counter(service_id, limit_days=7): return "{}-template-counter-limit-{}-days".format(service_id, limit_days) -def cache_key_for_service_template_usage_per_day(service_id, datetime): - """ - You should pass a BST datetime into this function - """ - return "service-{}-template-usage-{}".format(service_id, datetime.date().isoformat()) - - def get_public_notify_type_text(notify_type, plural=False): from app.models import (SMS_TYPE, UPLOAD_DOCUMENT, PRECOMPILED_LETTER) notify_type_text = notify_type diff --git a/tests/app/commands/test_populate_redis.py b/tests/app/commands/test_populate_redis.py deleted file mode 100644 index 25001642a..000000000 --- a/tests/app/commands/test_populate_redis.py +++ /dev/null @@ -1,73 +0,0 @@ -from datetime import datetime - -from freezegun import freeze_time -import pytest - -from app.commands import populate_redis_template_usage - -from tests.conftest import set_config -from tests.app.db import create_notification, create_template, create_service - - -def test_populate_redis_template_usage_does_nothing_if_redis_disabled(mocker, notify_api, sample_service): - mock_redis = mocker.patch('app.commands.redis_store') - with set_config(notify_api, 'REDIS_ENABLED', False): - with pytest.raises(SystemExit) as exit_signal: - populate_redis_template_usage.callback.__wrapped__(sample_service.id, datetime.utcnow()) - - assert mock_redis.mock_calls == [] - # sys.exit with nonzero exit code - assert exit_signal.value.code != 0 - - -def test_populate_redis_template_usage_does_nothing_if_no_data(mocker, notify_api, sample_service): - mock_redis = mocker.patch('app.commands.redis_store') - with set_config(notify_api, 'REDIS_ENABLED', True): - populate_redis_template_usage.callback.__wrapped__(sample_service.id, datetime.utcnow()) - - assert mock_redis.mock_calls == [] - - -@freeze_time('2017-06-12') -def test_populate_redis_template_usage_only_populates_for_today(mocker, notify_api, sample_template): - mock_redis = mocker.patch('app.commands.redis_store') - # created at in utc - create_notification(sample_template, created_at=datetime(2017, 6, 9, 23, 0, 0)) - create_notification(sample_template, created_at=datetime(2017, 6, 9, 23, 0, 0)) - create_notification(sample_template, created_at=datetime(2017, 6, 10, 0, 0, 0)) - create_notification(sample_template, created_at=datetime(2017, 6, 10, 23, 0, 0)) # actually on 11th BST - - with set_config(notify_api, 'REDIS_ENABLED', True): - populate_redis_template_usage.callback.__wrapped__(sample_template.service_id, datetime(2017, 6, 10)) - - mock_redis.set_hash_and_expire.assert_called_once_with( - 'service-{}-template-usage-2017-06-10'.format(sample_template.service_id), - {str(sample_template.id): 3}, - notify_api.config['EXPIRE_CACHE_EIGHT_DAYS'], - raise_exception=True - ) - - -@freeze_time('2017-06-12') -def test_populate_redis_template_usage_only_populates_for_given_service(mocker, notify_api, notify_db_session): - mock_redis = mocker.patch('app.commands.redis_store') - # created at in utc - s1 = create_service(service_name='a') - s2 = create_service(service_name='b') - t1 = create_template(s1) - t2 = create_template(s2) - - create_notification(t1, created_at=datetime(2017, 6, 10)) - create_notification(t1, created_at=datetime(2017, 6, 10)) - - create_notification(t2, created_at=datetime(2017, 6, 10)) - - with set_config(notify_api, 'REDIS_ENABLED', True): - populate_redis_template_usage.callback.__wrapped__(s1.id, datetime(2017, 6, 10)) - - mock_redis.set_hash_and_expire.assert_called_once_with( - 'service-{}-template-usage-2017-06-10'.format(s1.id), - {str(t1.id): 2}, - notify_api.config['EXPIRE_CACHE_EIGHT_DAYS'], - raise_exception=True - ) diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 192644bde..5d02d7bee 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -213,7 +213,6 @@ def test_persist_notification_with_optionals(sample_job, sample_api_key, mocker) @freeze_time("2016-01-01 11:09:00.061258") def test_persist_notification_doesnt_touch_cache_for_old_keys_that_dont_exist(sample_template, sample_api_key, mocker): mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') - mock_incr_hash_value = mocker.patch('app.notifications.process_notifications.redis_store.increment_hash_value') mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=None) mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value=None) @@ -229,16 +228,11 @@ def test_persist_notification_doesnt_touch_cache_for_old_keys_that_dont_exist(sa reference="ref" ) mock_incr.assert_not_called() - mock_incr_hash_value.assert_called_once_with( - "service-{}-template-usage-2016-01-01".format(sample_template.service_id), - sample_template.id - ) @freeze_time("2016-01-01 11:09:00.061258") def test_persist_notification_increments_cache_if_key_exists(sample_template, sample_api_key, mocker): mock_incr = mocker.patch('app.notifications.process_notifications.redis_store.incr') - mock_incr_hash_value = mocker.patch('app.notifications.process_notifications.redis_store.increment_hash_value') mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=1) mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value={sample_template.id, 1}) @@ -255,10 +249,6 @@ def test_persist_notification_increments_cache_if_key_exists(sample_template, sa reference="ref2") mock_incr.assert_called_once_with(str(sample_template.service_id) + "-2016-01-01-count", ) - assert mock_incr_hash_value.mock_calls == [ - call("{}-template-counter-limit-7-days".format(sample_template.service_id), sample_template.id), - call("service-{}-template-usage-2016-01-01".format(sample_template.service_id), sample_template.id), - ] @pytest.mark.parametrize(( @@ -516,44 +506,6 @@ def test_persist_letter_notification_finds_correct_postage( assert persisted_notification.postage == expected_postage -@pytest.mark.parametrize('utc_time, day_in_key', [ - ('2016-01-01 23:00:00', '2016-01-01'), - ('2016-06-01 22:59:00', '2016-06-01'), - ('2016-06-01 23:00:00', '2016-06-02'), -]) -def test_persist_notification_increments_and_expires_redis_template_usage( - utc_time, - day_in_key, - sample_template, - sample_api_key, - mocker -): - mock_incr_hash_value = mocker.patch('app.notifications.process_notifications.redis_store.increment_hash_value') - mock_expire = mocker.patch('app.notifications.process_notifications.redis_store.expire') - mocker.patch('app.notifications.process_notifications.redis_store.get', return_value=None) - mocker.patch('app.notifications.process_notifications.redis_store.get_all_from_hash', return_value=None) - - with freeze_time(utc_time): - persist_notification( - template_id=sample_template.id, - template_version=sample_template.version, - recipient='+447111111122', - service=sample_template.service, - personalisation={}, - notification_type='sms', - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - ) - mock_incr_hash_value.assert_called_once_with( - 'service-{}-template-usage-{}'.format(str(sample_template.service_id), day_in_key), - sample_template.id - ) - mock_expire.assert_called_once_with( - 'service-{}-template-usage-{}'.format(str(sample_template.service_id), day_in_key), - current_app.config['EXPIRE_CACHE_EIGHT_DAYS'] - ) - - def test_persist_notification_with_billable_units_stores_correct_info( sample_template, ): From ac3832a91860f182b40d4831019b3792ff7ecb04 Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Tue, 15 Jan 2019 14:46:40 +0000 Subject: [PATCH 103/118] Remove old redis template cache --- app/notifications/process_notifications.py | 4 +--- app/utils.py | 4 ---- tests/app/notifications/test_process_notification.py | 6 ------ 3 files changed, 1 insertion(+), 13 deletions(-) diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index c71fb1631..8afc5859b 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -35,7 +35,7 @@ from app.dao.notifications_dao import ( from app.dao.templates_dao import dao_get_template_by_id from app.v2.errors import BadRequestError -from app.utils import cache_key_for_service_template_counter, get_template_instance +from app.utils import get_template_instance def create_content_for_notification(template, personalisation): @@ -120,8 +120,6 @@ def persist_notification( if key_type != KEY_TYPE_TEST: if redis_store.get(redis.daily_limit_cache_key(service.id)): redis_store.incr(redis.daily_limit_cache_key(service.id)) - if redis_store.get_all_from_hash(cache_key_for_service_template_counter(service.id)): - redis_store.increment_hash_value(cache_key_for_service_template_counter(service.id), template_id) current_app.logger.info( "{} {} created at {}".format(notification_type, notification_id, notification_created_at) diff --git a/app/utils.py b/app/utils.py index 25bbab968..d8916341f 100644 --- a/app/utils.py +++ b/app/utils.py @@ -68,10 +68,6 @@ def get_london_month_from_utc_column(column): ) -def cache_key_for_service_template_counter(service_id, limit_days=7): - return "{}-template-counter-limit-{}-days".format(service_id, limit_days) - - def get_public_notify_type_text(notify_type, plural=False): from app.models import (SMS_TYPE, UPLOAD_DOCUMENT, PRECOMPILED_LETTER) notify_type_text = notify_type diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 5d02d7bee..9ab89c162 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -1,13 +1,11 @@ import datetime import uuid -from unittest.mock import call import pytest from boto3.exceptions import Boto3Error from sqlalchemy.exc import SQLAlchemyError from freezegun import freeze_time from collections import namedtuple -from flask import current_app from app.models import ( Notification, @@ -25,7 +23,6 @@ from app.notifications.process_notifications import ( simulated_recipient ) from notifications_utils.recipients import validate_and_format_phone_number, validate_and_format_email_address -from app.utils import cache_key_for_service_template_counter from app.v2.errors import BadRequestError from tests.app.conftest import sample_api_key as create_api_key @@ -172,8 +169,6 @@ def test_persist_notification_with_optionals(sample_job, sample_api_key, mocker) assert Notification.query.count() == 0 assert NotificationHistory.query.count() == 0 mocked_redis = mocker.patch('app.notifications.process_notifications.redis_store.get') - mock_service_template_cache = mocker.patch( - 'app.notifications.process_notifications.redis_store.get_all_from_hash') n_id = uuid.uuid4() created_at = datetime.datetime(2016, 11, 11, 16, 8, 18) persist_notification( @@ -200,7 +195,6 @@ def test_persist_notification_with_optionals(sample_job, sample_api_key, mocker) assert persisted_notification.job_row_number == 10 assert persisted_notification.created_at == created_at mocked_redis.assert_called_once_with(str(sample_job.service_id) + "-2016-01-01-count") - mock_service_template_cache.assert_called_once_with(cache_key_for_service_template_counter(sample_job.service_id)) assert persisted_notification.client_reference == "ref from client" assert persisted_notification.reference is None assert persisted_notification.international is False From a4d89359c527e59bc09dd58c9dbea63042e8dc98 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 15 Jan 2019 16:13:38 +0000 Subject: [PATCH 104/118] Adding a filter to exclude test keys for the template monthly usage query. Added a test. --- app/dao/fact_notification_status_dao.py | 3 ++- .../dao/test_fact_notification_status_dao.py | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 6bb341409..c90e1685c 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -312,7 +312,8 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): FactNotificationStatus.service_id == service_id, FactNotificationStatus.bst_date >= start_date, FactNotificationStatus.bst_date <= end_date, - FactNotificationStatus.notification_status != NOTIFICATION_CANCELLED + FactNotificationStatus.key_type != KEY_TYPE_TEST, + FactNotificationStatus.notification_status != NOTIFICATION_CANCELLED, ).group_by( FactNotificationStatus.template_id, Template.name, diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index a7adffce0..33579596c 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -461,3 +461,21 @@ def test_fetch_monthly_template_usage_for_service_does_not_include_cancelled_sta ) assert len(results) == 0 + + +@freeze_time('2018-03-30 14:00') +def test_fetch_monthly_template_usage_for_service_does_not_include_test_notifications( + sample_template +): + create_ft_notification_status(bst_date=date(2018, 3, 1), + service=sample_template.service, + template=sample_template, + notification_status='delivered', + key_type='test', + count=15) + create_notification(template=sample_template, created_at=datetime.utcnow(), status='cancelled') + results = fetch_monthly_template_usage_for_service( + datetime(2018, 1, 1), datetime(2018, 3, 31), sample_template.service_id + ) + + assert len(results) == 0 From 3dca36ecfc03ae79805294e24bdf88d6b81168ad Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 15 Jan 2019 16:16:19 +0000 Subject: [PATCH 105/118] Actually test the right thing :) --- tests/app/dao/test_fact_notification_status_dao.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 33579596c..c84f65b06 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -473,7 +473,10 @@ def test_fetch_monthly_template_usage_for_service_does_not_include_test_notifica notification_status='delivered', key_type='test', count=15) - create_notification(template=sample_template, created_at=datetime.utcnow(), status='cancelled') + create_notification(template=sample_template, + created_at=datetime.utcnow(), + status='delivered', + key_type='test',) results = fetch_monthly_template_usage_for_service( datetime(2018, 1, 1), datetime(2018, 3, 31), sample_template.service_id ) From e148eca6ff033206a6642ad96808ade904a77985 Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Tue, 15 Jan 2019 16:55:56 +0000 Subject: [PATCH 106/118] Drop stats_template_usage_by_month table as it is no longer needed. --- app/models.py | 42 ------------------- .../0250_drop_stats_template_table.py | 36 ++++++++++++++++ 2 files changed, 36 insertions(+), 42 deletions(-) create mode 100644 migrations/versions/0250_drop_stats_template_table.py diff --git a/app/models.py b/app/models.py index 555a27ddd..b4ef0b8d3 100644 --- a/app/models.py +++ b/app/models.py @@ -1834,48 +1834,6 @@ class AuthType(db.Model): name = db.Column(db.String, primary_key=True) -class StatsTemplateUsageByMonth(db.Model): - __tablename__ = "stats_template_usage_by_month" - - template_id = db.Column( - UUID(as_uuid=True), - db.ForeignKey('templates.id'), - unique=False, - index=True, - nullable=False, - primary_key=True - ) - month = db.Column( - db.Integer, - nullable=False, - index=True, - unique=False, - primary_key=True, - default=datetime.datetime.month - ) - year = db.Column( - db.Integer, - nullable=False, - index=True, - unique=False, - primary_key=True, - default=datetime.datetime.year - ) - count = db.Column( - db.Integer, - nullable=False, - default=0 - ) - - def serialize(self): - return { - 'template_id': str(self.template_id), - 'month': self.month, - 'year': self.year, - 'count': self.count - } - - class DailySortedLetter(db.Model): __tablename__ = "daily_sorted_letter" diff --git a/migrations/versions/0250_drop_stats_template_table.py b/migrations/versions/0250_drop_stats_template_table.py new file mode 100644 index 000000000..f44af5384 --- /dev/null +++ b/migrations/versions/0250_drop_stats_template_table.py @@ -0,0 +1,36 @@ +""" + +Revision ID: 0250_drop_stats_template_table +Revises: 0249_another_letter_org +Create Date: 2019-01-15 16:47:08.049369 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0250_drop_stats_template_table' +down_revision = '0249_another_letter_org' + + +def upgrade(): + op.drop_index('ix_stats_template_usage_by_month_month', table_name='stats_template_usage_by_month') + op.drop_index('ix_stats_template_usage_by_month_template_id', table_name='stats_template_usage_by_month') + op.drop_index('ix_stats_template_usage_by_month_year', table_name='stats_template_usage_by_month') + op.drop_table('stats_template_usage_by_month') + + +def downgrade(): + op.create_table('stats_template_usage_by_month', + sa.Column('template_id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('month', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('year', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('count', sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(['template_id'], ['templates.id'], + name='stats_template_usage_by_month_template_id_fkey'), + sa.PrimaryKeyConstraint('template_id', 'month', 'year', name='stats_template_usage_by_month_pkey') + ) + op.create_index('ix_stats_template_usage_by_month_year', 'stats_template_usage_by_month', ['year'], unique=False) + op.create_index('ix_stats_template_usage_by_month_template_id', 'stats_template_usage_by_month', ['template_id'], + unique=False) + op.create_index('ix_stats_template_usage_by_month_month', 'stats_template_usage_by_month', ['month'], unique=False) From 9ab97d34816148eb689e2861f32f7fccc8a16afd Mon Sep 17 00:00:00 2001 From: Pea Tyczynska Date: Wed, 16 Jan 2019 16:57:57 +0000 Subject: [PATCH 107/118] Return notification postage in response for .post_precompiled_letter_notification --- app/v2/notifications/post_notifications.py | 3 ++- tests/app/v2/notifications/test_post_letter_notifications.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index b40e4e4b6..511155a7e 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -94,7 +94,8 @@ def post_precompiled_letter_notification(): resp = { 'id': notification.id, - 'reference': notification.client_reference + 'reference': notification.client_reference, + 'postage': notification.postage } return jsonify(resp), 201 diff --git a/tests/app/v2/notifications/test_post_letter_notifications.py b/tests/app/v2/notifications/test_post_letter_notifications.py index db6cd4159..22232aa73 100644 --- a/tests/app/v2/notifications/test_post_letter_notifications.py +++ b/tests/app/v2/notifications/test_post_letter_notifications.py @@ -510,7 +510,7 @@ def test_post_precompiled_letter_notification_returns_201( assert notification_history.postage == expected_postage resp_json = json.loads(response.get_data(as_text=True)) - assert resp_json == {'id': str(notification.id), 'reference': 'letter-reference'} + assert resp_json == {'id': str(notification.id), 'reference': 'letter-reference', 'postage': expected_postage} def test_post_letter_notification_throws_error_for_invalid_postage(client, notify_user, mocker): From 4427827b2ff7cc790d1e3400a9eeca7b8c22b991 Mon Sep 17 00:00:00 2001 From: Athanasios Voutsadakis Date: Tue, 15 Jan 2019 17:42:14 +0000 Subject: [PATCH 108/118] Handle celery PIDs more reliably This addresses some problems that existed in the previous approach: 1. There was a race condition that could occur between the time we were looking for the existence of the .pid files and actually reading them. 2. If for some reason the .pid file was left behind after a process had died, the script would never know because we do: kill -s ${1} ${APP_PID} || true --- scripts/run_multi_worker_app_paas.sh | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/scripts/run_multi_worker_app_paas.sh b/scripts/run_multi_worker_app_paas.sh index 5ddc933ec..f195e59dd 100755 --- a/scripts/run_multi_worker_app_paas.sh +++ b/scripts/run_multi_worker_app_paas.sh @@ -54,7 +54,7 @@ function on_exit { # https://unix.stackexchange.com/a/298942/230401 PROCESS_COUNT="${#APP_PIDS[@]}" if [[ "${PROCESS_COUNT}" -eq "0" ]]; then - echo "No more .pid files found, exiting" + echo "No celery process is running any more, exiting" return 0 fi @@ -66,21 +66,18 @@ function on_exit { } function get_celery_pids { - if [[ $(ls /home/vcap/app/celery*.pid) ]]; then - APP_PIDS=`cat /home/vcap/app/celery*.pid` - else - APP_PIDS=() - fi + # get the PIDs of the process whose parent is the root process + # print only pid and their command, get the ones with "celery" in their name + # and keep only these PIDs + APP_PIDS=$(pgrep -P 1 | xargs ps -o pid=,command= -p | grep celery | cut -f1 -d/) } function send_signal_to_celery_processes { # refresh pids to account for the case that some workers may have terminated but others not get_celery_pids # send signal to all remaining apps - for APP_PID in ${APP_PIDS}; do - echo "Sending signal ${1} to process with pid ${APP_PID}" - kill -s ${1} ${APP_PID} || true - done + echo ${APP_PIDS} | tr -d '\n' | tr -s ' ' | xargs echo "Sending signal ${1} to processes with pids: " + echo ${APP_PIDS} | xargs kill -s ${1} } function start_application { From b23851226066d2968a02560f20f56bc07347fdb7 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Thu, 17 Jan 2019 17:05:14 +0000 Subject: [PATCH 109/118] Add 5 new letter logos --- .../versions/0251_another_letter_org.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 migrations/versions/0251_another_letter_org.py diff --git a/migrations/versions/0251_another_letter_org.py b/migrations/versions/0251_another_letter_org.py new file mode 100644 index 000000000..2344da9d5 --- /dev/null +++ b/migrations/versions/0251_another_letter_org.py @@ -0,0 +1,39 @@ +"""empty message + +Revision ID: 0251_another_letter_org +Revises: 0250_drop_stats_template_table + +""" + +# revision identifiers, used by Alembic. +revision = '0251_another_letter_org' +down_revision = '0250_drop_stats_template_table' + +from alembic import op + + +NEW_ORGANISATIONS = [ + ('522', 'Anglesey Council', 'anglesey'), + ('523', 'Angus Council', 'angus'), + ('524', 'Cheshire East Council', 'cheshire-east'), + ('525', 'Newham Council', 'newham'), + ('526', 'Warwickshire Council', 'warwickshire'), +] + + +def upgrade(): + for numeric_id, name, filename in NEW_ORGANISATIONS: + op.execute(""" + INSERT + INTO dvla_organisation + VALUES ('{}', '{}', '{}') + """.format(numeric_id, name, filename)) + + +def downgrade(): + for numeric_id, _, _ in NEW_ORGANISATIONS: + op.execute(""" + DELETE + FROM dvla_organisation + WHERE id = '{}' + """.format(numeric_id)) From 6ac1f39fd0a8becc270e0b727ac983ad6b88609c Mon Sep 17 00:00:00 2001 From: Rebecca Law Date: Thu, 17 Jan 2019 17:20:21 +0000 Subject: [PATCH 110/118] Remove dao_fetch_monthly_historical_stats_by_template, a query using NotificationHistory that is no longer used. --- app/dao/services_dao.py | 29 +++-------------------------- tests/app/dao/test_services_dao.py | 26 -------------------------- 2 files changed, 3 insertions(+), 52 deletions(-) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 342ba7dfb..52536c116 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -1,8 +1,8 @@ import uuid -from datetime import date, datetime, timedelta, time +from datetime import date, datetime, timedelta from notifications_utils.statsd_decorators import statsd -from sqlalchemy import asc, func, extract +from sqlalchemy import asc, func from sqlalchemy.orm import joinedload from flask import current_app @@ -35,7 +35,7 @@ from app.models import ( SMS_TYPE, LETTER_TYPE, ) -from app.utils import get_london_month_from_utc_column, get_london_midnight_in_utc, midnight_n_days_ago +from app.utils import get_london_midnight_in_utc, midnight_n_days_ago DEFAULT_SERVICE_PERMISSIONS = [ SMS_TYPE, @@ -364,26 +364,3 @@ def dao_fetch_active_users_for_service(service_id): ) return query.all() - - -@statsd(namespace="dao") -def dao_fetch_monthly_historical_stats_by_template(): - month = get_london_month_from_utc_column(NotificationHistory.created_at) - year = func.date_trunc("year", NotificationHistory.created_at) - end_date = datetime.combine(date.today(), time.min) - - return db.session.query( - NotificationHistory.template_id, - extract('month', month).label('month'), - extract('year', year).label('year'), - func.count().label('count') - ).filter( - NotificationHistory.created_at < end_date - ).group_by( - NotificationHistory.template_id, - month, - year - ).order_by( - year, - month - ).all() diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index 766f2ef59..4a1ec0b4b 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -30,7 +30,6 @@ from app.dao.services_dao import ( dao_resume_service, dao_fetch_active_users_for_service, dao_fetch_service_by_inbound_number, - dao_fetch_monthly_historical_stats_by_template, ) from app.dao.users_dao import save_model_user, create_user_code from app.models import ( @@ -874,31 +873,6 @@ def _assert_service_permissions(service_permissions, expected): assert set(expected) == set(p.permission for p in service_permissions) -def test_dao_fetch_monthly_historical_stats_by_template(notify_db_session): - service = create_service() - template_one = create_template(service=service, template_name='1') - template_two = create_template(service=service, template_name='2') - - create_notification(created_at=datetime(2017, 10, 1), template=template_one, status='delivered') - create_notification(created_at=datetime(2016, 4, 1), template=template_two, status='delivered') - create_notification(created_at=datetime(2016, 4, 1), template=template_two, status='delivered') - create_notification(created_at=datetime.now(), template=template_two, status='delivered') - - result = sorted(dao_fetch_monthly_historical_stats_by_template(), key=lambda x: (x.month, x.year)) - - assert len(result) == 2 - - assert result[0].template_id == template_two.id - assert result[0].month == 4 - assert result[0].year == 2016 - assert result[0].count == 2 - - assert result[1].template_id == template_one.id - assert result[1].month == 10 - assert result[1].year == 2017 - assert result[1].count == 1 - - def create_email_sms_letter_template(): service = create_service() template_one = create_template(service=service, template_name='1', template_type='email') From d3d56a322459452787e0d4d2890823f9cb45283f Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Mon, 14 Jan 2019 17:22:41 +0000 Subject: [PATCH 111/118] separate nightly tasks and other scheduled tasks. other tasks is anything that is run on a different frequency than nightly --- app/celery/nightly_tasks.py | 317 +++++++++++++++++++++++ app/celery/scheduled_tasks.py | 306 +--------------------- app/commands.py | 2 +- app/config.py | 3 +- tests/app/celery/test_scheduled_tasks.py | 18 ++ tests/app/celery/test_tasks.py | 19 -- 6 files changed, 340 insertions(+), 325 deletions(-) create mode 100644 app/celery/nightly_tasks.py diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py new file mode 100644 index 000000000..a917ebfd0 --- /dev/null +++ b/app/celery/nightly_tasks.py @@ -0,0 +1,317 @@ +from datetime import ( + datetime, + timedelta +) + +import pytz +from flask import current_app +from notifications_utils.statsd_decorators import statsd +from sqlalchemy import func +from sqlalchemy.exc import SQLAlchemyError + +from app import notify_celery, performance_platform_client, zendesk_client +from app.aws import s3 +from app.celery.service_callback_tasks import ( + send_delivery_status_to_service, + create_delivery_status_callback_data, +) +from app.config import QueueNames +from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago +from app.dao.jobs_dao import ( + dao_get_jobs_older_than_data_retention, + dao_archive_job +) +from app.dao.notifications_dao import ( + dao_timeout_notifications, + delete_notifications_created_more_than_a_week_ago_by_type, +) +from app.dao.service_callback_api_dao import get_service_delivery_status_callback_api_for_service +from app.exceptions import NotificationTechnicalFailureException +from app.models import ( + Notification, + NOTIFICATION_SENDING, + LETTER_TYPE, + KEY_TYPE_NORMAL +) +from app.performance_platform import total_sent_notifications, processing_time + + +@notify_celery.task(name="remove_csv_files") +@statsd(namespace="tasks") +def remove_csv_files(job_types): + jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) + for job in jobs: + s3.remove_job_from_s3(job.service_id, job.id) + dao_archive_job(job) + current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) + + +@notify_celery.task(name="delete-sms-notifications") +@statsd(namespace="tasks") +def delete_sms_notifications_older_than_seven_days(): + try: + start = datetime.utcnow() + deleted = delete_notifications_created_more_than_a_week_ago_by_type('sms') + current_app.logger.info( + "Delete {} job started {} finished {} deleted {} sms notifications".format( + 'sms', + start, + datetime.utcnow(), + deleted + ) + ) + except SQLAlchemyError: + current_app.logger.exception("Failed to delete sms notifications") + raise + + +@notify_celery.task(name="delete-email-notifications") +@statsd(namespace="tasks") +def delete_email_notifications_older_than_seven_days(): + try: + start = datetime.utcnow() + deleted = delete_notifications_created_more_than_a_week_ago_by_type('email') + current_app.logger.info( + "Delete {} job started {} finished {} deleted {} email notifications".format( + 'email', + start, + datetime.utcnow(), + deleted + ) + ) + except SQLAlchemyError: + current_app.logger.exception("Failed to delete email notifications") + raise + + +@notify_celery.task(name="delete-letter-notifications") +@statsd(namespace="tasks") +def delete_letter_notifications_older_than_seven_days(): + try: + start = datetime.utcnow() + deleted = delete_notifications_created_more_than_a_week_ago_by_type('letter') + current_app.logger.info( + "Delete {} job started {} finished {} deleted {} letter notifications".format( + 'letter', + start, + datetime.utcnow(), + deleted + ) + ) + except SQLAlchemyError: + current_app.logger.exception("Failed to delete letter notifications") + raise + + +@notify_celery.task(name='timeout-sending-notifications') +@statsd(namespace="tasks") +def timeout_notifications(): + technical_failure_notifications, temporary_failure_notifications = \ + dao_timeout_notifications(current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD')) + + notifications = technical_failure_notifications + temporary_failure_notifications + for notification in notifications: + # queue callback task only if the service_callback_api exists + service_callback_api = get_service_delivery_status_callback_api_for_service(service_id=notification.service_id) + if service_callback_api: + encrypted_notification = create_delivery_status_callback_data(notification, service_callback_api) + send_delivery_status_to_service.apply_async([str(notification.id), encrypted_notification], + queue=QueueNames.CALLBACKS) + + current_app.logger.info( + "Timeout period reached for {} notifications, status has been updated.".format(len(notifications))) + if technical_failure_notifications: + message = "{} notifications have been updated to technical-failure because they " \ + "have timed out and are still in created.Notification ids: {}".format( + len(technical_failure_notifications), [str(x.id) for x in technical_failure_notifications]) + raise NotificationTechnicalFailureException(message) + + +@notify_celery.task(name='send-daily-performance-platform-stats') +@statsd(namespace="tasks") +def send_daily_performance_platform_stats(): + if performance_platform_client.active: + yesterday = datetime.utcnow() - timedelta(days=1) + send_total_sent_notifications_to_performance_platform(yesterday) + processing_time.send_processing_time_to_performance_platform() + + +def send_total_sent_notifications_to_performance_platform(day): + count_dict = total_sent_notifications.get_total_sent_notifications_for_day(day) + email_sent_count = count_dict.get('email').get('count') + sms_sent_count = count_dict.get('sms').get('count') + letter_sent_count = count_dict.get('letter').get('count') + start_date = count_dict.get('start_date') + + current_app.logger.info( + "Attempting to update Performance Platform for {} with {} emails, {} text messages and {} letters" + .format(start_date, email_sent_count, sms_sent_count, letter_sent_count) + ) + + total_sent_notifications.send_total_notifications_sent_for_day_stats( + start_date, + 'sms', + sms_sent_count + ) + + total_sent_notifications.send_total_notifications_sent_for_day_stats( + start_date, + 'email', + email_sent_count + ) + + total_sent_notifications.send_total_notifications_sent_for_day_stats( + start_date, + 'letter', + letter_sent_count + ) + + +@notify_celery.task(name="delete-inbound-sms") +@statsd(namespace="tasks") +def delete_inbound_sms_older_than_seven_days(): + try: + start = datetime.utcnow() + deleted = delete_inbound_sms_created_more_than_a_week_ago() + current_app.logger.info( + "Delete inbound sms job started {} finished {} deleted {} inbound sms notifications".format( + start, + datetime.utcnow(), + deleted + ) + ) + except SQLAlchemyError: + current_app.logger.exception("Failed to delete inbound sms notifications") + raise + + +@notify_celery.task(name="remove_transformed_dvla_files") +@statsd(namespace="tasks") +def remove_transformed_dvla_files(): + jobs = dao_get_jobs_older_than_data_retention(notification_types=[LETTER_TYPE]) + for job in jobs: + s3.remove_transformed_dvla_file(job.id) + current_app.logger.info("Transformed dvla file for job {} has been removed from s3.".format(job.id)) + + +@notify_celery.task(name="delete_dvla_response_files") +@statsd(namespace="tasks") +def delete_dvla_response_files_older_than_seven_days(): + try: + start = datetime.utcnow() + bucket_objects = s3.get_s3_bucket_objects( + current_app.config['DVLA_RESPONSE_BUCKET_NAME'], + 'root/dispatch' + ) + older_than_seven_days = s3.filter_s3_bucket_objects_within_date_range(bucket_objects) + + for f in older_than_seven_days: + s3.remove_s3_object(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], f['Key']) + + current_app.logger.info( + "Delete dvla response files started {} finished {} deleted {} files".format( + start, + datetime.utcnow(), + len(older_than_seven_days) + ) + ) + except SQLAlchemyError: + current_app.logger.exception("Failed to delete dvla response files") + raise + + +@notify_celery.task(name="raise-alert-if-letter-notifications-still-sending") +@statsd(namespace="tasks") +def raise_alert_if_letter_notifications_still_sending(): + today = datetime.utcnow().date() + + # Do nothing on the weekend + if today.isoweekday() in [6, 7]: + return + + if today.isoweekday() in [1, 2]: + offset_days = 4 + else: + offset_days = 2 + still_sending = Notification.query.filter( + Notification.notification_type == LETTER_TYPE, + Notification.status == NOTIFICATION_SENDING, + Notification.key_type == KEY_TYPE_NORMAL, + func.date(Notification.sent_at) <= today - timedelta(days=offset_days) + ).count() + + if still_sending: + message = "There are {} letters in the 'sending' state from {}".format( + still_sending, + (today - timedelta(days=offset_days)).strftime('%A %d %B') + ) + # Only send alerts in production + if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: + zendesk_client.create_ticket( + subject="[{}] Letters still sending".format(current_app.config['NOTIFY_ENVIRONMENT']), + message=message, + ticket_type=zendesk_client.TYPE_INCIDENT + ) + else: + current_app.logger.info(message) + + +@notify_celery.task(name='raise-alert-if-no-letter-ack-file') +@statsd(namespace="tasks") +def letter_raise_alert_if_no_ack_file_for_zip(): + # get a list of zip files since yesterday + zip_file_set = set() + + for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], + subfolder=datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent', + suffix='.TXT'): + subname = key.split('/')[-1] # strip subfolder in name + zip_file_set.add(subname.upper().rstrip('.TXT')) + + # get acknowledgement file + ack_file_set = set() + + yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # AWS datetime format + + for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], + subfolder='root/dispatch', suffix='.ACK.txt', last_modified=yesterday): + ack_file_set.add(key) + + today_str = datetime.utcnow().strftime('%Y%m%d') + + ack_content_set = set() + for key in ack_file_set: + if today_str in key: + content = s3.get_s3_file(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], key) + for zip_file in content.split('\n'): # each line + s = zip_file.split('|') + ack_content_set.add(s[0].upper()) + + message = ( + "Letter ack file does not contain all zip files sent. " + "Missing ack for zip files: {}, " + "pdf bucket: {}, subfolder: {}, " + "ack bucket: {}" + ).format( + str(sorted(zip_file_set - ack_content_set)), + current_app.config['LETTERS_PDF_BUCKET_NAME'], + datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent', + current_app.config['DVLA_RESPONSE_BUCKET_NAME'] + ) + # strip empty element before comparison + ack_content_set.discard('') + zip_file_set.discard('') + + if len(zip_file_set - ack_content_set) > 0: + if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: + zendesk_client.create_ticket( + subject="Letter acknowledge error", + message=message, + ticket_type=zendesk_client.TYPE_INCIDENT + ) + current_app.logger.error(message) + + if len(ack_content_set - zip_file_set) > 0: + current_app.logger.info( + "letter ack contains zip that is not for today: {}".format(ack_content_set - zip_file_set) + ) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 818206e76..af072d91b 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -3,34 +3,20 @@ from datetime import ( timedelta ) -import pytz from flask import current_app from notifications_utils.statsd_decorators import statsd -from sqlalchemy import and_, func +from sqlalchemy import and_ from sqlalchemy.exc import SQLAlchemyError from app import notify_celery -from app import performance_platform_client, zendesk_client -from app.aws import s3 -from app.celery.service_callback_tasks import ( - send_delivery_status_to_service, - create_delivery_status_callback_data, -) from app.celery.tasks import process_job from app.config import QueueNames, TaskNames -from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago from app.dao.invited_org_user_dao import delete_org_invitations_created_more_than_two_days_ago from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago -from app.dao.jobs_dao import ( - dao_set_scheduled_jobs_to_pending, - dao_get_jobs_older_than_data_retention, - dao_archive_job -) +from app.dao.jobs_dao import dao_set_scheduled_jobs_to_pending from app.dao.jobs_dao import dao_update_job from app.dao.notifications_dao import ( - dao_timeout_notifications, is_delivery_slow_for_provider, - delete_notifications_created_more_than_a_week_ago_by_type, dao_get_scheduled_notifications, set_scheduled_notification_to_processed, notifications_not_yet_sent @@ -39,35 +25,18 @@ from app.dao.provider_details_dao import ( get_current_provider, dao_toggle_sms_provider ) -from app.dao.service_callback_api_dao import get_service_delivery_status_callback_api_for_service from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago -from app.exceptions import NotificationTechnicalFailureException from app.models import ( Job, - Notification, - NOTIFICATION_SENDING, - LETTER_TYPE, JOB_STATUS_IN_PROGRESS, JOB_STATUS_ERROR, SMS_TYPE, EMAIL_TYPE, - KEY_TYPE_NORMAL ) from app.notifications.process_notifications import send_notification_to_queue -from app.performance_platform import total_sent_notifications, processing_time from app.v2.errors import JobIncompleteError -@notify_celery.task(name="remove_csv_files") -@statsd(namespace="tasks") -def remove_csv_files(job_types): - jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) - for job in jobs: - s3.remove_job_from_s3(job.service_id, job.id) - dao_archive_job(job) - current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) - - @notify_celery.task(name="run-scheduled-jobs") @statsd(namespace="tasks") def run_scheduled_jobs(): @@ -109,63 +78,6 @@ def delete_verify_codes(): raise -@notify_celery.task(name="delete-sms-notifications") -@statsd(namespace="tasks") -def delete_sms_notifications_older_than_seven_days(): - try: - start = datetime.utcnow() - deleted = delete_notifications_created_more_than_a_week_ago_by_type('sms') - current_app.logger.info( - "Delete {} job started {} finished {} deleted {} sms notifications".format( - 'sms', - start, - datetime.utcnow(), - deleted - ) - ) - except SQLAlchemyError: - current_app.logger.exception("Failed to delete sms notifications") - raise - - -@notify_celery.task(name="delete-email-notifications") -@statsd(namespace="tasks") -def delete_email_notifications_older_than_seven_days(): - try: - start = datetime.utcnow() - deleted = delete_notifications_created_more_than_a_week_ago_by_type('email') - current_app.logger.info( - "Delete {} job started {} finished {} deleted {} email notifications".format( - 'email', - start, - datetime.utcnow(), - deleted - ) - ) - except SQLAlchemyError: - current_app.logger.exception("Failed to delete email notifications") - raise - - -@notify_celery.task(name="delete-letter-notifications") -@statsd(namespace="tasks") -def delete_letter_notifications_older_than_seven_days(): - try: - start = datetime.utcnow() - deleted = delete_notifications_created_more_than_a_week_ago_by_type('letter') - current_app.logger.info( - "Delete {} job started {} finished {} deleted {} letter notifications".format( - 'letter', - start, - datetime.utcnow(), - deleted - ) - ) - except SQLAlchemyError: - current_app.logger.exception("Failed to delete letter notifications") - raise - - @notify_celery.task(name="delete-invitations") @statsd(namespace="tasks") def delete_invitations(): @@ -181,70 +93,6 @@ def delete_invitations(): raise -@notify_celery.task(name='timeout-sending-notifications') -@statsd(namespace="tasks") -def timeout_notifications(): - technical_failure_notifications, temporary_failure_notifications = \ - dao_timeout_notifications(current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD')) - - notifications = technical_failure_notifications + temporary_failure_notifications - for notification in notifications: - # queue callback task only if the service_callback_api exists - service_callback_api = get_service_delivery_status_callback_api_for_service(service_id=notification.service_id) - if service_callback_api: - encrypted_notification = create_delivery_status_callback_data(notification, service_callback_api) - send_delivery_status_to_service.apply_async([str(notification.id), encrypted_notification], - queue=QueueNames.CALLBACKS) - - current_app.logger.info( - "Timeout period reached for {} notifications, status has been updated.".format(len(notifications))) - if technical_failure_notifications: - message = "{} notifications have been updated to technical-failure because they " \ - "have timed out and are still in created.Notification ids: {}".format( - len(technical_failure_notifications), [str(x.id) for x in technical_failure_notifications]) - raise NotificationTechnicalFailureException(message) - - -@notify_celery.task(name='send-daily-performance-platform-stats') -@statsd(namespace="tasks") -def send_daily_performance_platform_stats(): - if performance_platform_client.active: - yesterday = datetime.utcnow() - timedelta(days=1) - send_total_sent_notifications_to_performance_platform(yesterday) - processing_time.send_processing_time_to_performance_platform() - - -def send_total_sent_notifications_to_performance_platform(day): - count_dict = total_sent_notifications.get_total_sent_notifications_for_day(day) - email_sent_count = count_dict.get('email').get('count') - sms_sent_count = count_dict.get('sms').get('count') - letter_sent_count = count_dict.get('letter').get('count') - start_date = count_dict.get('start_date') - - current_app.logger.info( - "Attempting to update Performance Platform for {} with {} emails, {} text messages and {} letters" - .format(start_date, email_sent_count, sms_sent_count, letter_sent_count) - ) - - total_sent_notifications.send_total_notifications_sent_for_day_stats( - start_date, - 'sms', - sms_sent_count - ) - - total_sent_notifications.send_total_notifications_sent_for_day_stats( - start_date, - 'email', - email_sent_count - ) - - total_sent_notifications.send_total_notifications_sent_for_day_stats( - start_date, - 'letter', - letter_sent_count - ) - - @notify_celery.task(name='switch-current-sms-provider-on-slow-delivery') @statsd(namespace="tasks") def switch_current_sms_provider_on_slow_delivery(): @@ -273,95 +121,6 @@ def switch_current_sms_provider_on_slow_delivery(): dao_toggle_sms_provider(current_provider.identifier) -@notify_celery.task(name="delete-inbound-sms") -@statsd(namespace="tasks") -def delete_inbound_sms_older_than_seven_days(): - try: - start = datetime.utcnow() - deleted = delete_inbound_sms_created_more_than_a_week_ago() - current_app.logger.info( - "Delete inbound sms job started {} finished {} deleted {} inbound sms notifications".format( - start, - datetime.utcnow(), - deleted - ) - ) - except SQLAlchemyError: - current_app.logger.exception("Failed to delete inbound sms notifications") - raise - - -@notify_celery.task(name="remove_transformed_dvla_files") -@statsd(namespace="tasks") -def remove_transformed_dvla_files(): - jobs = dao_get_jobs_older_than_data_retention(notification_types=[LETTER_TYPE]) - for job in jobs: - s3.remove_transformed_dvla_file(job.id) - current_app.logger.info("Transformed dvla file for job {} has been removed from s3.".format(job.id)) - - -@notify_celery.task(name="delete_dvla_response_files") -@statsd(namespace="tasks") -def delete_dvla_response_files_older_than_seven_days(): - try: - start = datetime.utcnow() - bucket_objects = s3.get_s3_bucket_objects( - current_app.config['DVLA_RESPONSE_BUCKET_NAME'], - 'root/dispatch' - ) - older_than_seven_days = s3.filter_s3_bucket_objects_within_date_range(bucket_objects) - - for f in older_than_seven_days: - s3.remove_s3_object(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], f['Key']) - - current_app.logger.info( - "Delete dvla response files started {} finished {} deleted {} files".format( - start, - datetime.utcnow(), - len(older_than_seven_days) - ) - ) - except SQLAlchemyError: - current_app.logger.exception("Failed to delete dvla response files") - raise - - -@notify_celery.task(name="raise-alert-if-letter-notifications-still-sending") -@statsd(namespace="tasks") -def raise_alert_if_letter_notifications_still_sending(): - today = datetime.utcnow().date() - - # Do nothing on the weekend - if today.isoweekday() in [6, 7]: - return - - if today.isoweekday() in [1, 2]: - offset_days = 4 - else: - offset_days = 2 - still_sending = Notification.query.filter( - Notification.notification_type == LETTER_TYPE, - Notification.status == NOTIFICATION_SENDING, - Notification.key_type == KEY_TYPE_NORMAL, - func.date(Notification.sent_at) <= today - timedelta(days=offset_days) - ).count() - - if still_sending: - message = "There are {} letters in the 'sending' state from {}".format( - still_sending, - (today - timedelta(days=offset_days)).strftime('%A %d %B') - ) - # Only send alerts in production - if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: - zendesk_client.create_ticket( - subject="[{}] Letters still sending".format(current_app.config['NOTIFY_ENVIRONMENT']), - message=message, - ticket_type=zendesk_client.TYPE_INCIDENT - ) - else: - current_app.logger.info(message) - - @notify_celery.task(name='check-job-status') @statsd(namespace="tasks") def check_job_status(): @@ -401,67 +160,6 @@ def check_job_status(): raise JobIncompleteError("Job(s) {} have not completed.".format(job_ids)) -@notify_celery.task(name='raise-alert-if-no-letter-ack-file') -@statsd(namespace="tasks") -def letter_raise_alert_if_no_ack_file_for_zip(): - # get a list of zip files since yesterday - zip_file_set = set() - - for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], - subfolder=datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent', - suffix='.TXT'): - subname = key.split('/')[-1] # strip subfolder in name - zip_file_set.add(subname.upper().rstrip('.TXT')) - - # get acknowledgement file - ack_file_set = set() - - yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # AWS datetime format - - for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], - subfolder='root/dispatch', suffix='.ACK.txt', last_modified=yesterday): - ack_file_set.add(key) - - today_str = datetime.utcnow().strftime('%Y%m%d') - - ack_content_set = set() - for key in ack_file_set: - if today_str in key: - content = s3.get_s3_file(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], key) - for zip_file in content.split('\n'): # each line - s = zip_file.split('|') - ack_content_set.add(s[0].upper()) - - message = ( - "Letter ack file does not contain all zip files sent. " - "Missing ack for zip files: {}, " - "pdf bucket: {}, subfolder: {}, " - "ack bucket: {}" - ).format( - str(sorted(zip_file_set - ack_content_set)), - current_app.config['LETTERS_PDF_BUCKET_NAME'], - datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent', - current_app.config['DVLA_RESPONSE_BUCKET_NAME'] - ) - # strip empty element before comparison - ack_content_set.discard('') - zip_file_set.discard('') - - if len(zip_file_set - ack_content_set) > 0: - if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: - zendesk_client.create_ticket( - subject="Letter acknowledge error", - message=message, - ticket_type=zendesk_client.TYPE_INCIDENT - ) - current_app.logger.error(message) - - if len(ack_content_set - zip_file_set) > 0: - current_app.logger.info( - "letter ack contains zip that is not for today: {}".format(ack_content_set - zip_file_set) - ) - - @notify_celery.task(name='replay-created-notifications') @statsd(namespace="tasks") def replay_created_notifications(): diff --git a/app/commands.py b/app/commands.py index b14e5680d..c977f333b 100644 --- a/app/commands.py +++ b/app/commands.py @@ -11,7 +11,7 @@ from sqlalchemy.orm.exc import NoResultFound from notifications_utils.statsd_decorators import statsd from app import db, DATETIME_FORMAT, encryption -from app.celery.scheduled_tasks import send_total_sent_notifications_to_performance_platform +from app.celery.nightly_tasks import send_total_sent_notifications_to_performance_platform from app.celery.service_callback_tasks import send_delivery_status_to_service from app.celery.letters_pdf_tasks import create_letters_pdf from app.config import QueueNames diff --git a/app/config.py b/app/config.py index f69e30869..b07d04b13 100644 --- a/app/config.py +++ b/app/config.py @@ -159,6 +159,7 @@ class Config(object): CELERY_TASK_SERIALIZER = 'json' CELERY_IMPORTS = ('app.celery.tasks', 'app.celery.scheduled_tasks', 'app.celery.reporting_tasks') CELERYBEAT_SCHEDULE = { + # app/celery/scheduled_tasks.py 'run-scheduled-jobs': { 'task': 'run-scheduled-jobs', 'schedule': crontab(minute=1), @@ -189,7 +190,7 @@ class Config(object): 'schedule': crontab(minute='0, 15, 30, 45'), 'options': {'queue': QueueNames.PERIODIC} }, - # nightly tasks: + # app/celery/nightly_tasks.py 'timeout-sending-notifications': { 'task': 'timeout-sending-notifications', 'schedule': crontab(hour=0, minute=5), diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index e97b4643c..a120741e7 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -905,3 +905,21 @@ def test_replay_created_notifications(notify_db_session, sample_service, mocker) queue='send-email-tasks') sms_delivery_queue.assert_called_once_with([str(old_sms.id)], queue="send-sms-tasks") + + +def test_check_job_status_task_does_not_raise_error(sample_template): + create_job( + template=sample_template, + notification_count=3, + created_at=datetime.utcnow() - timedelta(hours=2), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_FINISHED) + create_job( + template=sample_template, + notification_count=3, + created_at=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_FINISHED) + + check_job_status() diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 84ad84cab..13c6f9b4f 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -15,7 +15,6 @@ from notifications_utils.columns import Row from app import (encryption, DATETIME_FORMAT) from app.celery import provider_tasks from app.celery import tasks -from app.celery.scheduled_tasks import check_job_status from app.celery.tasks import ( process_job, process_row, @@ -1396,24 +1395,6 @@ def test_send_inbound_sms_to_service_does_not_retries_if_request_returns_404(not mocked.call_count == 0 -def test_check_job_status_task_does_not_raise_error(sample_template): - create_job( - template=sample_template, - notification_count=3, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_FINISHED) - create_job( - template=sample_template, - notification_count=3, - created_at=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_FINISHED) - - check_job_status() - - def test_process_incomplete_job_sms(mocker, sample_template): mocker.patch('app.celery.tasks.s3.get_job_from_s3', return_value=load_example_csv('multiple_sms')) From d783e2b23632e7f944c41118d0eddd532e646fa3 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 16 Jan 2019 17:32:19 +0000 Subject: [PATCH 112/118] move tests from test_scheduled_tasks to test_nightly_tasks --- tests/app/celery/test_nightly_tasks.py | 583 ++++++++++++++++++++++ tests/app/celery/test_scheduled_tasks.py | 597 +---------------------- 2 files changed, 587 insertions(+), 593 deletions(-) create mode 100644 tests/app/celery/test_nightly_tasks.py diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py new file mode 100644 index 000000000..0720b4b89 --- /dev/null +++ b/tests/app/celery/test_nightly_tasks.py @@ -0,0 +1,583 @@ +from datetime import datetime, timedelta +from functools import partial +from unittest.mock import call, patch, PropertyMock + +import pytest +import pytz +from flask import current_app +from freezegun import freeze_time +from notifications_utils.clients.zendesk.zendesk_client import ZendeskClient + +from app.celery import nightly_tasks +from app.celery.nightly_tasks import ( + delete_dvla_response_files_older_than_seven_days, + delete_email_notifications_older_than_seven_days, + delete_inbound_sms_older_than_seven_days, + delete_letter_notifications_older_than_seven_days, + delete_sms_notifications_older_than_seven_days, + raise_alert_if_letter_notifications_still_sending, + _remove_csv_files, + remove_transformed_dvla_files, + s3, + send_daily_performance_platform_stats, + send_total_sent_notifications_to_performance_platform, + timeout_notifications, + letter_raise_alert_if_no_ack_file_for_zip, +) +from app.celery.service_callback_tasks import create_delivery_status_callback_data +from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient +from app.config import QueueNames +from app.exceptions import NotificationTechnicalFailureException +from app.models import ( + LETTER_TYPE, + SMS_TYPE, + EMAIL_TYPE +) +from app.utils import get_london_midnight_in_utc +from tests.app.aws.test_s3 import single_s3_object_stub +from tests.app.db import ( + create_notification, + create_service, + create_template, + create_job, + create_service_callback_api, + create_service_data_retention +) + +from tests.app.conftest import ( + sample_job as create_sample_job, + sample_notification_history as create_notification_history, + datetime_in_past +) + + +def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None): + if subfolder == '2018-01-11/zips_sent': + return ['NOTIFY.20180111175007.ZIP.TXT', 'NOTIFY.20180111175008.ZIP.TXT'] + if subfolder == 'root/dispatch': + return ['root/dispatch/NOTIFY.20180111175733.ACK.txt'] + + +def mock_s3_get_list_diff(bucket_name, subfolder='', suffix='', last_modified=None): + if subfolder == '2018-01-11/zips_sent': + return ['NOTIFY.20180111175007.ZIP.TXT', 'NOTIFY.20180111175008.ZIP.TXT', 'NOTIFY.20180111175009.ZIP.TXT', + 'NOTIFY.20180111175010.ZIP.TXT'] + if subfolder == 'root/dispatch': + return ['root/dispatch/NOTIFY.20180111175733.ACK.txt'] + + +@freeze_time('2016-10-18T10:00:00') +def test_will_remove_csv_files_for_jobs_older_than_seven_days( + notify_db, notify_db_session, mocker, sample_template +): + """ + Jobs older than seven days are deleted, but only two day's worth (two-day window) + """ + mocker.patch('app.celery.nightly_tasks.s3.remove_job_from_s3') + + seven_days_ago = datetime.utcnow() - timedelta(days=7) + just_under_seven_days = seven_days_ago + timedelta(seconds=1) + eight_days_ago = seven_days_ago - timedelta(days=1) + nine_days_ago = eight_days_ago - timedelta(days=1) + just_under_nine_days = nine_days_ago + timedelta(seconds=1) + nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) + + create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago, archived=True) + job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) + job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) + dont_delete_me_1 = create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) + create_sample_job(notify_db, notify_db_session, created_at=just_under_seven_days) + + _remove_csv_files(job_types=[sample_template.template_type]) + + assert s3.remove_job_from_s3.call_args_list == [ + call(job1_to_delete.service_id, job1_to_delete.id), + call(job2_to_delete.service_id, job2_to_delete.id), + ] + assert job1_to_delete.archived is True + assert dont_delete_me_1.archived is False + + +@freeze_time('2016-10-18T10:00:00') +def test_will_remove_csv_files_for_jobs_older_than_retention_period( + notify_db, notify_db_session, mocker +): + """ + Jobs older than retention period are deleted, but only two day's worth (two-day window) + """ + mocker.patch('app.celery.nightly_tasks.s3.remove_job_from_s3') + service_1 = create_service(service_name='service 1') + service_2 = create_service(service_name='service 2') + create_service_data_retention(service_id=service_1.id, notification_type=SMS_TYPE, days_of_retention=3) + create_service_data_retention(service_id=service_2.id, notification_type=EMAIL_TYPE, days_of_retention=30) + sms_template_service_1 = create_template(service=service_1) + email_template_service_1 = create_template(service=service_1, template_type='email') + + sms_template_service_2 = create_template(service=service_2) + email_template_service_2 = create_template(service=service_2, template_type='email') + + four_days_ago = datetime.utcnow() - timedelta(days=4) + eight_days_ago = datetime.utcnow() - timedelta(days=8) + thirty_one_days_ago = datetime.utcnow() - timedelta(days=31) + + _create_job = partial( + create_sample_job, + notify_db, + notify_db_session, + ) + + job1_to_delete = _create_job(service=service_1, template=sms_template_service_1, created_at=four_days_ago) + job2_to_delete = _create_job(service=service_1, template=email_template_service_1, created_at=eight_days_ago) + _create_job(service=service_1, template=email_template_service_1, created_at=four_days_ago) + + _create_job(service=service_2, template=email_template_service_2, created_at=eight_days_ago) + job3_to_delete = _create_job(service=service_2, template=email_template_service_2, created_at=thirty_one_days_ago) + job4_to_delete = _create_job(service=service_2, template=sms_template_service_2, created_at=eight_days_ago) + + _remove_csv_files(job_types=[SMS_TYPE, EMAIL_TYPE]) + + s3.remove_job_from_s3.assert_has_calls([ + call(job1_to_delete.service_id, job1_to_delete.id), + call(job2_to_delete.service_id, job2_to_delete.id), + call(job3_to_delete.service_id, job3_to_delete.id), + call(job4_to_delete.service_id, job4_to_delete.id) + ], any_order=True) + + +@freeze_time('2017-01-01 10:00:00') +def test_remove_csv_files_filters_by_type(mocker, sample_service): + mocker.patch('app.celery.nightly_tasks.s3.remove_job_from_s3') + """ + Jobs older than seven days are deleted, but only two day's worth (two-day window) + """ + letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) + sms_template = create_template(service=sample_service, template_type=SMS_TYPE) + + eight_days_ago = datetime.utcnow() - timedelta(days=8) + + job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) + create_job(template=sms_template, created_at=eight_days_ago) + + _remove_csv_files(job_types=[LETTER_TYPE]) + + assert s3.remove_job_from_s3.call_args_list == [ + call(job_to_delete.service_id, job_to_delete.id), + ] + + +def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): + mocked = mocker.patch('app.celery.nightly_tasks.delete_notifications_created_more_than_a_week_ago_by_type') + delete_sms_notifications_older_than_seven_days() + mocked.assert_called_once_with('sms') + + +def test_should_call_delete_email_notifications_more_than_week_in_task(notify_api, mocker): + mocked_notifications = mocker.patch( + 'app.celery.nightly_tasks.delete_notifications_created_more_than_a_week_ago_by_type') + delete_email_notifications_older_than_seven_days() + mocked_notifications.assert_called_once_with('email') + + +def test_should_call_delete_letter_notifications_more_than_week_in_task(notify_api, mocker): + mocked = mocker.patch('app.celery.nightly_tasks.delete_notifications_created_more_than_a_week_ago_by_type') + delete_letter_notifications_older_than_seven_days() + mocked.assert_called_once_with('letter') + + +def test_update_status_of_notifications_after_timeout(notify_api, sample_template): + with notify_api.test_request_context(): + not1 = create_notification( + template=sample_template, + status='sending', + created_at=datetime.utcnow() - timedelta( + seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + 10)) + not2 = create_notification( + template=sample_template, + status='created', + created_at=datetime.utcnow() - timedelta( + seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + 10)) + not3 = create_notification( + template=sample_template, + status='pending', + created_at=datetime.utcnow() - timedelta( + seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + 10)) + with pytest.raises(NotificationTechnicalFailureException) as e: + timeout_notifications() + assert str(not2.id) in e.value.message + assert not1.status == 'temporary-failure' + assert not2.status == 'technical-failure' + assert not3.status == 'temporary-failure' + + +def test_not_update_status_of_notification_before_timeout(notify_api, sample_template): + with notify_api.test_request_context(): + not1 = create_notification( + template=sample_template, + status='sending', + created_at=datetime.utcnow() - timedelta( + seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') - 10)) + timeout_notifications() + assert not1.status == 'sending' + + +def test_should_not_update_status_of_letter_notifications(client, sample_letter_template): + created_at = datetime.utcnow() - timedelta(days=5) + not1 = create_notification(template=sample_letter_template, status='sending', created_at=created_at) + not2 = create_notification(template=sample_letter_template, status='created', created_at=created_at) + + timeout_notifications() + + assert not1.status == 'sending' + assert not2.status == 'created' + + +def test_timeout_notifications_sends_status_update_to_service(client, sample_template, mocker): + callback_api = create_service_callback_api(service=sample_template.service) + mocked = mocker.patch('app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async') + notification = create_notification( + template=sample_template, + status='sending', + created_at=datetime.utcnow() - timedelta( + seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + 10)) + timeout_notifications() + + encrypted_data = create_delivery_status_callback_data(notification, callback_api) + mocked.assert_called_once_with([str(notification.id), encrypted_data], queue=QueueNames.CALLBACKS) + + +def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mocker): + send_mock = mocker.patch( + 'app.celery.nightly_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa + + with patch.object( + PerformancePlatformClient, + 'active', + new_callable=PropertyMock + ) as mock_active: + mock_active.return_value = False + send_daily_performance_platform_stats() + + assert send_mock.call_count == 0 + + +@freeze_time("2016-01-11 12:30:00") +def test_send_total_sent_notifications_to_performance_platform_calls_with_correct_totals( + notify_db, + notify_db_session, + sample_template, + mocker +): + perf_mock = mocker.patch( + 'app.celery.nightly_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa + + notification_history = partial( + create_notification_history, + notify_db, + notify_db_session, + sample_template, + status='delivered' + ) + + notification_history(notification_type='email') + notification_history(notification_type='sms') + + # Create some notifications for the day before + yesterday = datetime(2016, 1, 10, 15, 30, 0, 0) + with freeze_time(yesterday): + notification_history(notification_type='sms') + notification_history(notification_type='sms') + notification_history(notification_type='email') + notification_history(notification_type='email') + notification_history(notification_type='email') + + with patch.object( + PerformancePlatformClient, + 'active', + new_callable=PropertyMock + ) as mock_active: + mock_active.return_value = True + send_total_sent_notifications_to_performance_platform(yesterday) + + perf_mock.assert_has_calls([ + call(get_london_midnight_in_utc(yesterday), 'sms', 2), + call(get_london_midnight_in_utc(yesterday), 'email', 3) + ]) + + +def test_should_call_delete_inbound_sms_older_than_seven_days(notify_api, mocker): + mocker.patch('app.celery.nightly_tasks.delete_inbound_sms_created_more_than_a_week_ago') + delete_inbound_sms_older_than_seven_days() + assert nightly_tasks.delete_inbound_sms_created_more_than_a_week_ago.call_count == 1 + + +@freeze_time('2017-01-01 10:00:00') +def test_remove_dvla_transformed_files_removes_expected_files(mocker, sample_service): + mocker.patch('app.celery.nightly_tasks.s3.remove_transformed_dvla_file') + + letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) + + job = partial(create_job, template=letter_template) + + seven_days_ago = datetime.utcnow() - timedelta(days=7) + just_under_seven_days = seven_days_ago + timedelta(seconds=1) + just_over_seven_days = seven_days_ago - timedelta(seconds=1) + eight_days_ago = seven_days_ago - timedelta(days=1) + nine_days_ago = eight_days_ago - timedelta(days=1) + ten_days_ago = nine_days_ago - timedelta(days=1) + just_under_nine_days = nine_days_ago + timedelta(seconds=1) + just_over_nine_days = nine_days_ago - timedelta(seconds=1) + just_over_ten_days = ten_days_ago - timedelta(seconds=1) + + job(created_at=just_under_seven_days) + job(created_at=just_over_seven_days) + job_to_delete_1 = job(created_at=eight_days_ago) + job_to_delete_2 = job(created_at=nine_days_ago) + job_to_delete_3 = job(created_at=just_under_nine_days) + job_to_delete_4 = job(created_at=just_over_nine_days) + job(created_at=just_over_ten_days) + remove_transformed_dvla_files() + + s3.remove_transformed_dvla_file.assert_has_calls([ + call(job_to_delete_1.id), + call(job_to_delete_2.id), + call(job_to_delete_3.id), + call(job_to_delete_4.id), + ], any_order=True) + + +def test_remove_dvla_transformed_files_does_not_remove_files(mocker, sample_service): + mocker.patch('app.celery.nightly_tasks.s3.remove_transformed_dvla_file') + + letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) + + job = partial(create_job, template=letter_template) + + yesterday = datetime.utcnow() - timedelta(days=1) + six_days_ago = datetime.utcnow() - timedelta(days=6) + seven_days_ago = six_days_ago - timedelta(days=1) + just_over_nine_days = seven_days_ago - timedelta(days=2, seconds=1) + + job(created_at=yesterday) + job(created_at=six_days_ago) + job(created_at=seven_days_ago) + job(created_at=just_over_nine_days) + + remove_transformed_dvla_files() + + s3.remove_transformed_dvla_file.assert_has_calls([]) + + +@freeze_time("2016-01-01 11:00:00") +def test_delete_dvla_response_files_older_than_seven_days_removes_old_files(notify_api, mocker): + AFTER_SEVEN_DAYS = datetime_in_past(days=8) + single_page_s3_objects = [{ + "Contents": [ + single_s3_object_stub('bar/foo1.txt', AFTER_SEVEN_DAYS), + single_s3_object_stub('bar/foo2.txt', AFTER_SEVEN_DAYS), + ] + }] + mocker.patch( + 'app.celery.nightly_tasks.s3.get_s3_bucket_objects', return_value=single_page_s3_objects[0]["Contents"] + ) + remove_s3_mock = mocker.patch('app.celery.nightly_tasks.s3.remove_s3_object') + + delete_dvla_response_files_older_than_seven_days() + + remove_s3_mock.assert_has_calls([ + call(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], single_page_s3_objects[0]["Contents"][0]["Key"]), + call(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], single_page_s3_objects[0]["Contents"][1]["Key"]) + ]) + + +@freeze_time("2016-01-01 11:00:00") +def test_delete_dvla_response_files_older_than_seven_days_does_not_remove_files(notify_api, mocker): + START_DATE = datetime_in_past(days=9) + JUST_BEFORE_START_DATE = datetime_in_past(days=9, seconds=1) + END_DATE = datetime_in_past(days=7) + JUST_AFTER_END_DATE = END_DATE + timedelta(seconds=1) + + single_page_s3_objects = [{ + "Contents": [ + single_s3_object_stub('bar/foo1.txt', JUST_BEFORE_START_DATE), + single_s3_object_stub('bar/foo2.txt', START_DATE), + single_s3_object_stub('bar/foo3.txt', END_DATE), + single_s3_object_stub('bar/foo4.txt', JUST_AFTER_END_DATE), + ] + }] + mocker.patch( + 'app.celery.nightly_tasks.s3.get_s3_bucket_objects', return_value=single_page_s3_objects[0]["Contents"] + ) + remove_s3_mock = mocker.patch('app.celery.nightly_tasks.s3.remove_s3_object') + delete_dvla_response_files_older_than_seven_days() + + remove_s3_mock.assert_not_called() + + +@freeze_time("2018-01-17 17:00:00") +def test_alert_if_letter_notifications_still_sending(sample_letter_template, mocker): + two_days_ago = datetime(2018, 1, 15, 13, 30) + create_notification(template=sample_letter_template, status='sending', sent_at=two_days_ago) + + mock_create_ticket = mocker.patch("app.celery.nightly_tasks.zendesk_client.create_ticket") + + raise_alert_if_letter_notifications_still_sending() + + mock_create_ticket.assert_called_once_with( + subject="[test] Letters still sending", + message="There are 1 letters in the 'sending' state from Monday 15 January", + ticket_type=ZendeskClient.TYPE_INCIDENT + ) + + +def test_alert_if_letter_notifications_still_sending_a_day_ago_no_alert(sample_letter_template, mocker): + today = datetime.utcnow() + one_day_ago = today - timedelta(days=1) + create_notification(template=sample_letter_template, status='sending', sent_at=one_day_ago) + + mock_create_ticket = mocker.patch("app.celery.nightly_tasks.zendesk_client.create_ticket") + + raise_alert_if_letter_notifications_still_sending() + assert not mock_create_ticket.called + + +@freeze_time("2018-01-17 17:00:00") +def test_alert_if_letter_notifications_still_sending_only_alerts_sending(sample_letter_template, mocker): + two_days_ago = datetime(2018, 1, 15, 13, 30) + create_notification(template=sample_letter_template, status='sending', sent_at=two_days_ago) + create_notification(template=sample_letter_template, status='delivered', sent_at=two_days_ago) + create_notification(template=sample_letter_template, status='failed', sent_at=two_days_ago) + + mock_create_ticket = mocker.patch("app.celery.nightly_tasks.zendesk_client.create_ticket") + + raise_alert_if_letter_notifications_still_sending() + + mock_create_ticket.assert_called_once_with( + subject="[test] Letters still sending", + message="There are 1 letters in the 'sending' state from Monday 15 January", + ticket_type='incident' + ) + + +@freeze_time("2018-01-17 17:00:00") +def test_alert_if_letter_notifications_still_sending_alerts_for_older_than_offset(sample_letter_template, mocker): + three_days_ago = datetime(2018, 1, 14, 13, 30) + create_notification(template=sample_letter_template, status='sending', sent_at=three_days_ago) + + mock_create_ticket = mocker.patch("app.celery.nightly_tasks.zendesk_client.create_ticket") + + raise_alert_if_letter_notifications_still_sending() + + mock_create_ticket.assert_called_once_with( + subject="[test] Letters still sending", + message="There are 1 letters in the 'sending' state from Monday 15 January", + ticket_type='incident' + ) + + +@freeze_time("2018-01-14 17:00:00") +def test_alert_if_letter_notifications_still_sending_does_nothing_on_the_weekend(sample_letter_template, mocker): + yesterday = datetime(2018, 1, 13, 13, 30) + create_notification(template=sample_letter_template, status='sending', sent_at=yesterday) + + mock_create_ticket = mocker.patch("app.celery.nightly_tasks.zendesk_client.create_ticket") + + raise_alert_if_letter_notifications_still_sending() + + assert not mock_create_ticket.called + + +@freeze_time("2018-01-15 17:00:00") +def test_monday_alert_if_letter_notifications_still_sending_reports_thursday_letters(sample_letter_template, mocker): + thursday = datetime(2018, 1, 11, 13, 30) + yesterday = datetime(2018, 1, 14, 13, 30) + create_notification(template=sample_letter_template, status='sending', sent_at=thursday) + create_notification(template=sample_letter_template, status='sending', sent_at=yesterday) + + mock_create_ticket = mocker.patch("app.celery.nightly_tasks.zendesk_client.create_ticket") + + raise_alert_if_letter_notifications_still_sending() + + mock_create_ticket.assert_called_once_with( + subject="[test] Letters still sending", + message="There are 1 letters in the 'sending' state from Thursday 11 January", + ticket_type='incident' + ) + + +@freeze_time("2018-01-16 17:00:00") +def test_tuesday_alert_if_letter_notifications_still_sending_reports_friday_letters(sample_letter_template, mocker): + friday = datetime(2018, 1, 12, 13, 30) + yesterday = datetime(2018, 1, 14, 13, 30) + create_notification(template=sample_letter_template, status='sending', sent_at=friday) + create_notification(template=sample_letter_template, status='sending', sent_at=yesterday) + + mock_create_ticket = mocker.patch("app.celery.nightly_tasks.zendesk_client.create_ticket") + + raise_alert_if_letter_notifications_still_sending() + + mock_create_ticket.assert_called_once_with( + subject="[test] Letters still sending", + message="There are 1 letters in the 'sending' state from Friday 12 January", + ticket_type='incident' + ) + + +@freeze_time('2018-01-11T23:00:00') +def test_letter_not_raise_alert_if_ack_files_match_zip_list(mocker, notify_db): + mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=mock_s3_get_list_match) + mock_get_file = mocker.patch("app.aws.s3.get_s3_file", + return_value='NOTIFY.20180111175007.ZIP|20180111175733\n' + 'NOTIFY.20180111175008.ZIP|20180111175734') + + letter_raise_alert_if_no_ack_file_for_zip() + + yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # Datatime format on AWS + subfoldername = datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent' + assert mock_file_list.call_count == 2 + assert mock_file_list.call_args_list == [ + call(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], subfolder=subfoldername, suffix='.TXT'), + call(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], subfolder='root/dispatch', + suffix='.ACK.txt', last_modified=yesterday), + ] + assert mock_get_file.call_count == 1 + + +@freeze_time('2018-01-11T23:00:00') +def test_letter_raise_alert_if_ack_files_not_match_zip_list(mocker, notify_db): + mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=mock_s3_get_list_diff) + mock_get_file = mocker.patch("app.aws.s3.get_s3_file", + return_value='NOTIFY.20180111175007.ZIP|20180111175733\n' + 'NOTIFY.20180111175008.ZIP|20180111175734') + mock_zendesk = mocker.patch("app.celery.nightly_tasks.zendesk_client.create_ticket") + + letter_raise_alert_if_no_ack_file_for_zip() + + assert mock_file_list.call_count == 2 + assert mock_get_file.call_count == 1 + + message = "Letter ack file does not contain all zip files sent. " \ + "Missing ack for zip files: {}, " \ + "pdf bucket: {}, subfolder: {}, " \ + "ack bucket: {}".format(str(['NOTIFY.20180111175009.ZIP', 'NOTIFY.20180111175010.ZIP']), + current_app.config['LETTERS_PDF_BUCKET_NAME'], + datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent', + current_app.config['DVLA_RESPONSE_BUCKET_NAME']) + + mock_zendesk.assert_called_once_with( + subject="Letter acknowledge error", + message=message, + ticket_type='incident' + ) + + +@freeze_time('2018-01-11T23:00:00') +def test_letter_not_raise_alert_if_no_files_do_not_cause_error(mocker, notify_db): + mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=None) + mock_get_file = mocker.patch("app.aws.s3.get_s3_file", + return_value='NOTIFY.20180111175007.ZIP|20180111175733\n' + 'NOTIFY.20180111175008.ZIP|20180111175734') + + letter_raise_alert_if_no_ack_file_for_zip() + + assert mock_file_list.call_count == 2 + assert mock_get_file.call_count == 0 diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index a120741e7..bf4eb9507 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -1,40 +1,20 @@ from datetime import datetime, timedelta -from functools import partial -from unittest.mock import call, patch, PropertyMock +from unittest.mock import call import pytest -import pytz -from flask import current_app from freezegun import freeze_time -from notifications_utils.clients.zendesk.zendesk_client import ZendeskClient from app import db from app.celery import scheduled_tasks from app.celery.scheduled_tasks import ( check_job_status, - delete_dvla_response_files_older_than_seven_days, - delete_email_notifications_older_than_seven_days, - delete_inbound_sms_older_than_seven_days, delete_invitations, - delete_notifications_created_more_than_a_week_ago_by_type, - delete_letter_notifications_older_than_seven_days, - delete_sms_notifications_older_than_seven_days, delete_verify_codes, - raise_alert_if_letter_notifications_still_sending, - remove_csv_files, - remove_transformed_dvla_files, run_scheduled_jobs, - s3, - send_daily_performance_platform_stats, send_scheduled_notifications, - send_total_sent_notifications_to_performance_platform, switch_current_sms_provider_on_slow_delivery, - timeout_notifications, - letter_raise_alert_if_no_ack_file_for_zip, replay_created_notifications ) -from app.celery.service_callback_tasks import create_delivery_status_callback_data -from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient from app.config import QueueNames, TaskNames from app.dao.jobs_dao import dao_get_job_by_id from app.dao.notifications_dao import dao_get_scheduled_notifications @@ -42,31 +22,19 @@ from app.dao.provider_details_dao import ( dao_update_provider_details, get_current_provider ) -from app.exceptions import NotificationTechnicalFailureException from app.models import ( JOB_STATUS_IN_PROGRESS, JOB_STATUS_ERROR, - LETTER_TYPE, - SMS_TYPE, - EMAIL_TYPE + JOB_STATUS_FINISHED, ) -from app.utils import get_london_midnight_in_utc from app.v2.errors import JobIncompleteError -from tests.app.aws.test_s3 import single_s3_object_stub + from tests.app.db import ( create_notification, - create_service, create_template, create_job, - create_service_callback_api, - create_service_data_retention -) - -from tests.app.conftest import ( - sample_job as create_sample_job, - sample_notification_history as create_notification_history, - datetime_in_past ) +from tests.app.conftest import sample_job as create_sample_job def _create_slow_delivery_notification(template, provider='mmg'): @@ -82,31 +50,6 @@ def _create_slow_delivery_notification(template, provider='mmg'): ) -@pytest.mark.skip(reason="This doesn't actually test the celery task wraps the function") -def test_should_have_decorated_tasks_functions(): - """ - TODO: This test needs to be reviewed as this doesn't actually - test that the celery task is wrapping the function. We're also - running similar tests elsewhere which also need review. - """ - assert delete_verify_codes.__wrapped__.__name__ == 'delete_verify_codes' - assert delete_notifications_created_more_than_a_week_ago_by_type.__wrapped__.__name__ == \ - 'delete_notifications_created_more_than_a_week_ago_by_type' - assert timeout_notifications.__wrapped__.__name__ == 'timeout_notifications' - assert delete_invitations.__wrapped__.__name__ == 'delete_invitations' - assert run_scheduled_jobs.__wrapped__.__name__ == 'run_scheduled_jobs' - assert remove_csv_files.__wrapped__.__name__ == 'remove_csv_files' - assert send_daily_performance_platform_stats.__wrapped__.__name__ == 'send_daily_performance_platform_stats' - assert switch_current_sms_provider_on_slow_delivery.__wrapped__.__name__ == \ - 'switch_current_sms_provider_on_slow_delivery' - assert delete_inbound_sms_older_than_seven_days.__wrapped__.__name__ == \ - 'delete_inbound_sms_older_than_seven_days' - assert remove_transformed_dvla_files.__wrapped__.__name__ == \ - 'remove_transformed_dvla_files' - assert delete_dvla_response_files_older_than_seven_days.__wrapped__.__name__ == \ - 'delete_dvla_response_files_older_than_seven_days' - - @pytest.fixture(scope='function') def prepare_current_provider(restore_provider_details): initial_provider = get_current_provider('sms') @@ -115,25 +58,6 @@ def prepare_current_provider(restore_provider_details): db.session.commit() -def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): - mocked = mocker.patch('app.celery.scheduled_tasks.delete_notifications_created_more_than_a_week_ago_by_type') - delete_sms_notifications_older_than_seven_days() - mocked.assert_called_once_with('sms') - - -def test_should_call_delete_email_notifications_more_than_week_in_task(notify_api, mocker): - mocked_notifications = mocker.patch( - 'app.celery.scheduled_tasks.delete_notifications_created_more_than_a_week_ago_by_type') - delete_email_notifications_older_than_seven_days() - mocked_notifications.assert_called_once_with('email') - - -def test_should_call_delete_letter_notifications_more_than_week_in_task(notify_api, mocker): - mocked = mocker.patch('app.celery.scheduled_tasks.delete_notifications_created_more_than_a_week_ago_by_type') - delete_letter_notifications_older_than_seven_days() - mocked.assert_called_once_with('letter') - - def test_should_call_delete_codes_on_delete_verify_codes_task(notify_api, mocker): mocker.patch('app.celery.scheduled_tasks.delete_codes_older_created_more_than_a_day_ago') delete_verify_codes() @@ -146,67 +70,6 @@ def test_should_call_delete_invotations_on_delete_invitations_task(notify_api, m assert scheduled_tasks.delete_invitations_created_more_than_two_days_ago.call_count == 1 -def test_update_status_of_notifications_after_timeout(notify_api, sample_template): - with notify_api.test_request_context(): - not1 = create_notification( - template=sample_template, - status='sending', - created_at=datetime.utcnow() - timedelta( - seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + 10)) - not2 = create_notification( - template=sample_template, - status='created', - created_at=datetime.utcnow() - timedelta( - seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + 10)) - not3 = create_notification( - template=sample_template, - status='pending', - created_at=datetime.utcnow() - timedelta( - seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + 10)) - with pytest.raises(NotificationTechnicalFailureException) as e: - timeout_notifications() - assert str(not2.id) in e.value.message - assert not1.status == 'temporary-failure' - assert not2.status == 'technical-failure' - assert not3.status == 'temporary-failure' - - -def test_not_update_status_of_notification_before_timeout(notify_api, sample_template): - with notify_api.test_request_context(): - not1 = create_notification( - template=sample_template, - status='sending', - created_at=datetime.utcnow() - timedelta( - seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') - 10)) - timeout_notifications() - assert not1.status == 'sending' - - -def test_should_not_update_status_of_letter_notifications(client, sample_letter_template): - created_at = datetime.utcnow() - timedelta(days=5) - not1 = create_notification(template=sample_letter_template, status='sending', created_at=created_at) - not2 = create_notification(template=sample_letter_template, status='created', created_at=created_at) - - timeout_notifications() - - assert not1.status == 'sending' - assert not2.status == 'created' - - -def test_timeout_notifications_sends_status_update_to_service(client, sample_template, mocker): - callback_api = create_service_callback_api(service=sample_template.service) - mocked = mocker.patch('app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async') - notification = create_notification( - template=sample_template, - status='sending', - created_at=datetime.utcnow() - timedelta( - seconds=current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD') + 10)) - timeout_notifications() - - encrypted_data = create_delivery_status_callback_data(notification, callback_api) - mocked.assert_called_once_with([str(notification.id), encrypted_data], queue=QueueNames.CALLBACKS) - - def test_should_update_scheduled_jobs_and_put_on_queue(notify_db, notify_db_session, mocker): mocked = mocker.patch('app.celery.tasks.process_job.apply_async') @@ -258,143 +121,6 @@ def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_ ]) -@freeze_time('2016-10-18T10:00:00') -def test_will_remove_csv_files_for_jobs_older_than_seven_days( - notify_db, notify_db_session, mocker, sample_template -): - """ - Jobs older than seven days are deleted, but only two day's worth (two-day window) - """ - mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') - - seven_days_ago = datetime.utcnow() - timedelta(days=7) - just_under_seven_days = seven_days_ago + timedelta(seconds=1) - eight_days_ago = seven_days_ago - timedelta(days=1) - nine_days_ago = eight_days_ago - timedelta(days=1) - just_under_nine_days = nine_days_ago + timedelta(seconds=1) - nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - - create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago, archived=True) - job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) - job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) - dont_delete_me_1 = create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) - create_sample_job(notify_db, notify_db_session, created_at=just_under_seven_days) - - remove_csv_files(job_types=[sample_template.template_type]) - - assert s3.remove_job_from_s3.call_args_list == [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - ] - assert job1_to_delete.archived is True - assert dont_delete_me_1.archived is False - - -@freeze_time('2016-10-18T10:00:00') -def test_will_remove_csv_files_for_jobs_older_than_retention_period( - notify_db, notify_db_session, mocker -): - """ - Jobs older than retention period are deleted, but only two day's worth (two-day window) - """ - mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') - service_1 = create_service(service_name='service 1') - service_2 = create_service(service_name='service 2') - create_service_data_retention(service_id=service_1.id, notification_type=SMS_TYPE, days_of_retention=3) - create_service_data_retention(service_id=service_2.id, notification_type=EMAIL_TYPE, days_of_retention=30) - sms_template_service_1 = create_template(service=service_1) - email_template_service_1 = create_template(service=service_1, template_type='email') - - sms_template_service_2 = create_template(service=service_2) - email_template_service_2 = create_template(service=service_2, template_type='email') - - four_days_ago = datetime.utcnow() - timedelta(days=4) - eight_days_ago = datetime.utcnow() - timedelta(days=8) - thirty_one_days_ago = datetime.utcnow() - timedelta(days=31) - - _create_job = partial( - create_sample_job, - notify_db, - notify_db_session, - ) - - job1_to_delete = _create_job(service=service_1, template=sms_template_service_1, created_at=four_days_ago) - job2_to_delete = _create_job(service=service_1, template=email_template_service_1, created_at=eight_days_ago) - _create_job(service=service_1, template=email_template_service_1, created_at=four_days_ago) - - _create_job(service=service_2, template=email_template_service_2, created_at=eight_days_ago) - job3_to_delete = _create_job(service=service_2, template=email_template_service_2, created_at=thirty_one_days_ago) - job4_to_delete = _create_job(service=service_2, template=sms_template_service_2, created_at=eight_days_ago) - - remove_csv_files(job_types=[SMS_TYPE, EMAIL_TYPE]) - - s3.remove_job_from_s3.assert_has_calls([ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - call(job3_to_delete.service_id, job3_to_delete.id), - call(job4_to_delete.service_id, job4_to_delete.id) - ], any_order=True) - - -def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mocker): - send_mock = mocker.patch( - 'app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa - - with patch.object( - PerformancePlatformClient, - 'active', - new_callable=PropertyMock - ) as mock_active: - mock_active.return_value = False - send_daily_performance_platform_stats() - - assert send_mock.call_count == 0 - - -@freeze_time("2016-01-11 12:30:00") -def test_send_total_sent_notifications_to_performance_platform_calls_with_correct_totals( - notify_db, - notify_db_session, - sample_template, - mocker -): - perf_mock = mocker.patch( - 'app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa - - notification_history = partial( - create_notification_history, - notify_db, - notify_db_session, - sample_template, - status='delivered' - ) - - notification_history(notification_type='email') - notification_history(notification_type='sms') - - # Create some notifications for the day before - yesterday = datetime(2016, 1, 10, 15, 30, 0, 0) - with freeze_time(yesterday): - notification_history(notification_type='sms') - notification_history(notification_type='sms') - notification_history(notification_type='email') - notification_history(notification_type='email') - notification_history(notification_type='email') - - with patch.object( - PerformancePlatformClient, - 'active', - new_callable=PropertyMock - ) as mock_active: - mock_active.return_value = True - send_total_sent_notifications_to_performance_platform(yesterday) - - perf_mock.assert_has_calls([ - call(get_london_midnight_in_utc(yesterday), 'sms', 2), - call(get_london_midnight_in_utc(yesterday), 'email', 3) - ]) - - def test_switch_providers_on_slow_delivery_switches_once_then_does_not_switch_if_already_switched( notify_api, mocker, @@ -440,245 +166,6 @@ def test_should_send_all_scheduled_notifications_to_deliver_queue(sample_templat assert not scheduled_notifications -def test_should_call_delete_inbound_sms_older_than_seven_days(notify_api, mocker): - mocker.patch('app.celery.scheduled_tasks.delete_inbound_sms_created_more_than_a_week_ago') - delete_inbound_sms_older_than_seven_days() - assert scheduled_tasks.delete_inbound_sms_created_more_than_a_week_ago.call_count == 1 - - -@freeze_time('2017-01-01 10:00:00') -def test_remove_csv_files_filters_by_type(mocker, sample_service): - mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') - """ - Jobs older than seven days are deleted, but only two day's worth (two-day window) - """ - letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) - sms_template = create_template(service=sample_service, template_type=SMS_TYPE) - - eight_days_ago = datetime.utcnow() - timedelta(days=8) - - job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) - create_job(template=sms_template, created_at=eight_days_ago) - - remove_csv_files(job_types=[LETTER_TYPE]) - - assert s3.remove_job_from_s3.call_args_list == [ - call(job_to_delete.service_id, job_to_delete.id), - ] - - -@freeze_time('2017-01-01 10:00:00') -def test_remove_dvla_transformed_files_removes_expected_files(mocker, sample_service): - mocker.patch('app.celery.scheduled_tasks.s3.remove_transformed_dvla_file') - - letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) - - job = partial(create_job, template=letter_template) - - seven_days_ago = datetime.utcnow() - timedelta(days=7) - just_under_seven_days = seven_days_ago + timedelta(seconds=1) - just_over_seven_days = seven_days_ago - timedelta(seconds=1) - eight_days_ago = seven_days_ago - timedelta(days=1) - nine_days_ago = eight_days_ago - timedelta(days=1) - ten_days_ago = nine_days_ago - timedelta(days=1) - just_under_nine_days = nine_days_ago + timedelta(seconds=1) - just_over_nine_days = nine_days_ago - timedelta(seconds=1) - just_over_ten_days = ten_days_ago - timedelta(seconds=1) - - job(created_at=just_under_seven_days) - job(created_at=just_over_seven_days) - job_to_delete_1 = job(created_at=eight_days_ago) - job_to_delete_2 = job(created_at=nine_days_ago) - job_to_delete_3 = job(created_at=just_under_nine_days) - job_to_delete_4 = job(created_at=just_over_nine_days) - job(created_at=just_over_ten_days) - remove_transformed_dvla_files() - - s3.remove_transformed_dvla_file.assert_has_calls([ - call(job_to_delete_1.id), - call(job_to_delete_2.id), - call(job_to_delete_3.id), - call(job_to_delete_4.id), - ], any_order=True) - - -def test_remove_dvla_transformed_files_does_not_remove_files(mocker, sample_service): - mocker.patch('app.celery.scheduled_tasks.s3.remove_transformed_dvla_file') - - letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) - - job = partial(create_job, template=letter_template) - - yesterday = datetime.utcnow() - timedelta(days=1) - six_days_ago = datetime.utcnow() - timedelta(days=6) - seven_days_ago = six_days_ago - timedelta(days=1) - just_over_nine_days = seven_days_ago - timedelta(days=2, seconds=1) - - job(created_at=yesterday) - job(created_at=six_days_ago) - job(created_at=seven_days_ago) - job(created_at=just_over_nine_days) - - remove_transformed_dvla_files() - - s3.remove_transformed_dvla_file.assert_has_calls([]) - - -@freeze_time("2016-01-01 11:00:00") -def test_delete_dvla_response_files_older_than_seven_days_removes_old_files(notify_api, mocker): - AFTER_SEVEN_DAYS = datetime_in_past(days=8) - single_page_s3_objects = [{ - "Contents": [ - single_s3_object_stub('bar/foo1.txt', AFTER_SEVEN_DAYS), - single_s3_object_stub('bar/foo2.txt', AFTER_SEVEN_DAYS), - ] - }] - mocker.patch( - 'app.celery.scheduled_tasks.s3.get_s3_bucket_objects', return_value=single_page_s3_objects[0]["Contents"] - ) - remove_s3_mock = mocker.patch('app.celery.scheduled_tasks.s3.remove_s3_object') - - delete_dvla_response_files_older_than_seven_days() - - remove_s3_mock.assert_has_calls([ - call(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], single_page_s3_objects[0]["Contents"][0]["Key"]), - call(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], single_page_s3_objects[0]["Contents"][1]["Key"]) - ]) - - -@freeze_time("2016-01-01 11:00:00") -def test_delete_dvla_response_files_older_than_seven_days_does_not_remove_files(notify_api, mocker): - START_DATE = datetime_in_past(days=9) - JUST_BEFORE_START_DATE = datetime_in_past(days=9, seconds=1) - END_DATE = datetime_in_past(days=7) - JUST_AFTER_END_DATE = END_DATE + timedelta(seconds=1) - - single_page_s3_objects = [{ - "Contents": [ - single_s3_object_stub('bar/foo1.txt', JUST_BEFORE_START_DATE), - single_s3_object_stub('bar/foo2.txt', START_DATE), - single_s3_object_stub('bar/foo3.txt', END_DATE), - single_s3_object_stub('bar/foo4.txt', JUST_AFTER_END_DATE), - ] - }] - mocker.patch( - 'app.celery.scheduled_tasks.s3.get_s3_bucket_objects', return_value=single_page_s3_objects[0]["Contents"] - ) - remove_s3_mock = mocker.patch('app.celery.scheduled_tasks.s3.remove_s3_object') - delete_dvla_response_files_older_than_seven_days() - - remove_s3_mock.assert_not_called() - - -@freeze_time("2018-01-17 17:00:00") -def test_alert_if_letter_notifications_still_sending(sample_letter_template, mocker): - two_days_ago = datetime(2018, 1, 15, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=two_days_ago) - - mock_create_ticket = mocker.patch("app.celery.scheduled_tasks.zendesk_client.create_ticket") - - raise_alert_if_letter_notifications_still_sending() - - mock_create_ticket.assert_called_once_with( - subject="[test] Letters still sending", - message="There are 1 letters in the 'sending' state from Monday 15 January", - ticket_type=ZendeskClient.TYPE_INCIDENT - ) - - -def test_alert_if_letter_notifications_still_sending_a_day_ago_no_alert(sample_letter_template, mocker): - today = datetime.utcnow() - one_day_ago = today - timedelta(days=1) - create_notification(template=sample_letter_template, status='sending', sent_at=one_day_ago) - - mock_create_ticket = mocker.patch("app.celery.scheduled_tasks.zendesk_client.create_ticket") - - raise_alert_if_letter_notifications_still_sending() - assert not mock_create_ticket.called - - -@freeze_time("2018-01-17 17:00:00") -def test_alert_if_letter_notifications_still_sending_only_alerts_sending(sample_letter_template, mocker): - two_days_ago = datetime(2018, 1, 15, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=two_days_ago) - create_notification(template=sample_letter_template, status='delivered', sent_at=two_days_ago) - create_notification(template=sample_letter_template, status='failed', sent_at=two_days_ago) - - mock_create_ticket = mocker.patch("app.celery.scheduled_tasks.zendesk_client.create_ticket") - - raise_alert_if_letter_notifications_still_sending() - - mock_create_ticket.assert_called_once_with( - subject="[test] Letters still sending", - message="There are 1 letters in the 'sending' state from Monday 15 January", - ticket_type='incident' - ) - - -@freeze_time("2018-01-17 17:00:00") -def test_alert_if_letter_notifications_still_sending_alerts_for_older_than_offset(sample_letter_template, mocker): - three_days_ago = datetime(2018, 1, 14, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=three_days_ago) - - mock_create_ticket = mocker.patch("app.celery.scheduled_tasks.zendesk_client.create_ticket") - - raise_alert_if_letter_notifications_still_sending() - - mock_create_ticket.assert_called_once_with( - subject="[test] Letters still sending", - message="There are 1 letters in the 'sending' state from Monday 15 January", - ticket_type='incident' - ) - - -@freeze_time("2018-01-14 17:00:00") -def test_alert_if_letter_notifications_still_sending_does_nothing_on_the_weekend(sample_letter_template, mocker): - yesterday = datetime(2018, 1, 13, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=yesterday) - - mock_create_ticket = mocker.patch("app.celery.scheduled_tasks.zendesk_client.create_ticket") - - raise_alert_if_letter_notifications_still_sending() - - assert not mock_create_ticket.called - - -@freeze_time("2018-01-15 17:00:00") -def test_monday_alert_if_letter_notifications_still_sending_reports_thursday_letters(sample_letter_template, mocker): - thursday = datetime(2018, 1, 11, 13, 30) - yesterday = datetime(2018, 1, 14, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=thursday) - create_notification(template=sample_letter_template, status='sending', sent_at=yesterday) - - mock_create_ticket = mocker.patch("app.celery.scheduled_tasks.zendesk_client.create_ticket") - - raise_alert_if_letter_notifications_still_sending() - - mock_create_ticket.assert_called_once_with( - subject="[test] Letters still sending", - message="There are 1 letters in the 'sending' state from Thursday 11 January", - ticket_type='incident' - ) - - -@freeze_time("2018-01-16 17:00:00") -def test_tuesday_alert_if_letter_notifications_still_sending_reports_friday_letters(sample_letter_template, mocker): - friday = datetime(2018, 1, 12, 13, 30) - yesterday = datetime(2018, 1, 14, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=friday) - create_notification(template=sample_letter_template, status='sending', sent_at=yesterday) - - mock_create_ticket = mocker.patch("app.celery.scheduled_tasks.zendesk_client.create_ticket") - - raise_alert_if_letter_notifications_still_sending() - - mock_create_ticket.assert_called_once_with( - subject="[test] Letters still sending", - message="There are 1 letters in the 'sending' state from Friday 12 January", - ticket_type='incident' - ) - - def test_check_job_status_task_raises_job_incomplete_error(mocker, sample_template): mock_celery = mocker.patch('app.celery.tasks.notify_celery.send_task') job = create_job(template=sample_template, notification_count=3, @@ -801,82 +288,6 @@ def test_check_job_status_task_sets_jobs_to_error(mocker, sample_template): assert job_2.job_status == JOB_STATUS_IN_PROGRESS -def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None): - if subfolder == '2018-01-11/zips_sent': - return ['NOTIFY.20180111175007.ZIP.TXT', 'NOTIFY.20180111175008.ZIP.TXT'] - if subfolder == 'root/dispatch': - return ['root/dispatch/NOTIFY.20180111175733.ACK.txt'] - - -def mock_s3_get_list_diff(bucket_name, subfolder='', suffix='', last_modified=None): - if subfolder == '2018-01-11/zips_sent': - return ['NOTIFY.20180111175007.ZIP.TXT', 'NOTIFY.20180111175008.ZIP.TXT', 'NOTIFY.20180111175009.ZIP.TXT', - 'NOTIFY.20180111175010.ZIP.TXT'] - if subfolder == 'root/dispatch': - return ['root/dispatch/NOTIFY.20180111175733.ACK.txt'] - - -@freeze_time('2018-01-11T23:00:00') -def test_letter_not_raise_alert_if_ack_files_match_zip_list(mocker, notify_db): - mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=mock_s3_get_list_match) - mock_get_file = mocker.patch("app.aws.s3.get_s3_file", - return_value='NOTIFY.20180111175007.ZIP|20180111175733\n' - 'NOTIFY.20180111175008.ZIP|20180111175734') - - letter_raise_alert_if_no_ack_file_for_zip() - - yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # Datatime format on AWS - subfoldername = datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent' - assert mock_file_list.call_count == 2 - assert mock_file_list.call_args_list == [ - call(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], subfolder=subfoldername, suffix='.TXT'), - call(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], subfolder='root/dispatch', - suffix='.ACK.txt', last_modified=yesterday), - ] - assert mock_get_file.call_count == 1 - - -@freeze_time('2018-01-11T23:00:00') -def test_letter_raise_alert_if_ack_files_not_match_zip_list(mocker, notify_db): - mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=mock_s3_get_list_diff) - mock_get_file = mocker.patch("app.aws.s3.get_s3_file", - return_value='NOTIFY.20180111175007.ZIP|20180111175733\n' - 'NOTIFY.20180111175008.ZIP|20180111175734') - mock_zendesk = mocker.patch("app.celery.scheduled_tasks.zendesk_client.create_ticket") - - letter_raise_alert_if_no_ack_file_for_zip() - - assert mock_file_list.call_count == 2 - assert mock_get_file.call_count == 1 - - message = "Letter ack file does not contain all zip files sent. " \ - "Missing ack for zip files: {}, " \ - "pdf bucket: {}, subfolder: {}, " \ - "ack bucket: {}".format(str(['NOTIFY.20180111175009.ZIP', 'NOTIFY.20180111175010.ZIP']), - current_app.config['LETTERS_PDF_BUCKET_NAME'], - datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent', - current_app.config['DVLA_RESPONSE_BUCKET_NAME']) - - mock_zendesk.assert_called_once_with( - subject="Letter acknowledge error", - message=message, - ticket_type='incident' - ) - - -@freeze_time('2018-01-11T23:00:00') -def test_letter_not_raise_alert_if_no_files_do_not_cause_error(mocker, notify_db): - mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=None) - mock_get_file = mocker.patch("app.aws.s3.get_s3_file", - return_value='NOTIFY.20180111175007.ZIP|20180111175733\n' - 'NOTIFY.20180111175008.ZIP|20180111175734') - - letter_raise_alert_if_no_ack_file_for_zip() - - assert mock_file_list.call_count == 2 - assert mock_get_file.call_count == 0 - - def test_replay_created_notifications(notify_db_session, sample_service, mocker): email_delivery_queue = mocker.patch('app.celery.provider_tasks.deliver_email.apply_async') sms_delivery_queue = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async') From 754c65a6a258dd126c809d820d11805bd6a8a4fb Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Wed, 16 Jan 2019 14:11:03 +0000 Subject: [PATCH 113/118] create cronitor decorator that alerts if tasks fail make a decorator that pings cronitor before and after each task run. Designed for use with nightly tasks, so we have visibility if they fail. We have a bunch of cronitor monitors set up - 5 character keys that go into a URL that we then make a GET to with a self-explanatory url path (run/fail/complete). the cronitor URLs are defined in the credentials repo as a dictionary of celery task names to URL slugs. If the name passed in to the decorator isn't in that dict, it won't run. to use it, all you need to do is call `@cronitor(my_task_name)` instead of `@notify_celery.task`, and make sure that the task name and the matching slug are included in the credentials repo (or locally, json dumped and stored in the CRONITOR_KEYS environment variable) --- app/celery/nightly_tasks.py | 29 +++++++++++++- app/celery/reporting_tasks.py | 3 ++ app/config.py | 23 ++++++----- app/cronitor.py | 50 ++++++++++++++++++++++++ manifest-api-base.yml | 1 + manifest-delivery-base.yml | 1 + tests/app/celery/test_reporting_tasks.py | 4 -- 7 files changed, 96 insertions(+), 15 deletions(-) create mode 100644 app/cronitor.py diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index a917ebfd0..0c9be4e8b 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -30,15 +30,30 @@ from app.exceptions import NotificationTechnicalFailureException from app.models import ( Notification, NOTIFICATION_SENDING, + EMAIL_TYPE, + SMS_TYPE, LETTER_TYPE, KEY_TYPE_NORMAL ) from app.performance_platform import total_sent_notifications, processing_time +from app.cronitor import cronitor -@notify_celery.task(name="remove_csv_files") +@notify_celery.task(name="remove_sms_email_jobs") +@cronitor("remove_sms_email_jobs") @statsd(namespace="tasks") -def remove_csv_files(job_types): +def remove_sms_email_csv_files(job_types): + _remove_csv_files([EMAIL_TYPE, SMS_TYPE]) + + +@notify_celery.task(name="remove_letter_jobs") +@cronitor("remove_letter_jobs") +@statsd(namespace="tasks") +def remove_letter_csv_files(job_types): + _remove_csv_files([LETTER_TYPE]) + + +def _remove_csv_files(job_types): jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) for job in jobs: s3.remove_job_from_s3(job.service_id, job.id) @@ -47,6 +62,7 @@ def remove_csv_files(job_types): @notify_celery.task(name="delete-sms-notifications") +@cronitor("delete-sms-notifications") @statsd(namespace="tasks") def delete_sms_notifications_older_than_seven_days(): try: @@ -66,6 +82,7 @@ def delete_sms_notifications_older_than_seven_days(): @notify_celery.task(name="delete-email-notifications") +@cronitor("delete-email-notifications") @statsd(namespace="tasks") def delete_email_notifications_older_than_seven_days(): try: @@ -85,6 +102,7 @@ def delete_email_notifications_older_than_seven_days(): @notify_celery.task(name="delete-letter-notifications") +@cronitor("delete-letter-notifications") @statsd(namespace="tasks") def delete_letter_notifications_older_than_seven_days(): try: @@ -104,6 +122,7 @@ def delete_letter_notifications_older_than_seven_days(): @notify_celery.task(name='timeout-sending-notifications') +@cronitor('timeout-sending-notifications') @statsd(namespace="tasks") def timeout_notifications(): technical_failure_notifications, temporary_failure_notifications = \ @@ -128,6 +147,7 @@ def timeout_notifications(): @notify_celery.task(name='send-daily-performance-platform-stats') +@cronitor('send-daily-performance-platform-stats') @statsd(namespace="tasks") def send_daily_performance_platform_stats(): if performance_platform_client.active: @@ -168,6 +188,7 @@ def send_total_sent_notifications_to_performance_platform(day): @notify_celery.task(name="delete-inbound-sms") +@cronitor("delete-inbound-sms") @statsd(namespace="tasks") def delete_inbound_sms_older_than_seven_days(): try: @@ -186,6 +207,7 @@ def delete_inbound_sms_older_than_seven_days(): @notify_celery.task(name="remove_transformed_dvla_files") +@cronitor("remove_transformed_dvla_files") @statsd(namespace="tasks") def remove_transformed_dvla_files(): jobs = dao_get_jobs_older_than_data_retention(notification_types=[LETTER_TYPE]) @@ -194,6 +216,7 @@ def remove_transformed_dvla_files(): current_app.logger.info("Transformed dvla file for job {} has been removed from s3.".format(job.id)) +# TODO: remove me, i'm not being run by anything @notify_celery.task(name="delete_dvla_response_files") @statsd(namespace="tasks") def delete_dvla_response_files_older_than_seven_days(): @@ -221,6 +244,7 @@ def delete_dvla_response_files_older_than_seven_days(): @notify_celery.task(name="raise-alert-if-letter-notifications-still-sending") +@cronitor("raise-alert-if-letter-notifications-still-sending") @statsd(namespace="tasks") def raise_alert_if_letter_notifications_still_sending(): today = datetime.utcnow().date() @@ -257,6 +281,7 @@ def raise_alert_if_letter_notifications_still_sending(): @notify_celery.task(name='raise-alert-if-no-letter-ack-file') +@cronitor('raise-alert-if-no-letter-ack-file') @statsd(namespace="tasks") def letter_raise_alert_if_no_ack_file_for_zip(): # get a list of zip files since yesterday diff --git a/app/celery/reporting_tasks.py b/app/celery/reporting_tasks.py index 4d14c0e64..80c5d1bc0 100644 --- a/app/celery/reporting_tasks.py +++ b/app/celery/reporting_tasks.py @@ -4,6 +4,7 @@ from flask import current_app from notifications_utils.statsd_decorators import statsd from app import notify_celery +from app.cronitor import cronitor from app.dao.fact_billing_dao import ( fetch_billing_data_for_day, update_fact_billing @@ -12,6 +13,7 @@ from app.dao.fact_notification_status_dao import fetch_notification_status_for_d @notify_celery.task(name="create-nightly-billing") +@cronitor("create-nightly-billing") @statsd(namespace="tasks") def create_nightly_billing(day_start=None): # day_start is a datetime.date() object. e.g. @@ -34,6 +36,7 @@ def create_nightly_billing(day_start=None): @notify_celery.task(name="create-nightly-notification-status") +@cronitor("create-nightly-notification-status") @statsd(namespace="tasks") def create_nightly_notification_status(day_start=None): # day_start is a datetime.date() object. e.g. diff --git a/app/config.py b/app/config.py index b07d04b13..7ce5cfb0f 100644 --- a/app/config.py +++ b/app/config.py @@ -5,10 +5,6 @@ import json from celery.schedules import crontab from kombu import Exchange, Queue -from app.models import ( - EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, -) - if os.environ.get('VCAP_SERVICES'): # on cloudfoundry, config is a json blob in VCAP_SERVICES - unpack it, and populate # standard environment variables from it @@ -108,6 +104,10 @@ class Config(object): DEBUG = False NOTIFY_LOG_PATH = os.getenv('NOTIFY_LOG_PATH') + # Cronitor + CRONITOR_ENABLED = False + CRONITOR_KEYS = json.loads(os.environ.get('CRONITOR_KEYS', '{}')) + ########################### # Default config values ### ########################### @@ -157,7 +157,12 @@ class Config(object): CELERY_TIMEZONE = 'Europe/London' CELERY_ACCEPT_CONTENT = ['json'] CELERY_TASK_SERIALIZER = 'json' - CELERY_IMPORTS = ('app.celery.tasks', 'app.celery.scheduled_tasks', 'app.celery.reporting_tasks') + CELERY_IMPORTS = ( + 'app.celery.tasks', + 'app.celery.scheduled_tasks', + 'app.celery.reporting_tasks', + 'app.celery.nightly_tasks', + ) CELERYBEAT_SCHEDULE = { # app/celery/scheduled_tasks.py 'run-scheduled-jobs': { @@ -238,17 +243,15 @@ class Config(object): 'options': {'queue': QueueNames.PERIODIC} }, 'remove_sms_email_jobs': { - 'task': 'remove_csv_files', + 'task': 'remove_sms_email_jobs', 'schedule': crontab(hour=4, minute=0), 'options': {'queue': QueueNames.PERIODIC}, - 'kwargs': {'job_types': [EMAIL_TYPE, SMS_TYPE]} }, 'remove_letter_jobs': { - 'task': 'remove_csv_files', + 'task': 'remove_letter_jobs', 'schedule': crontab(hour=4, minute=20), # this has to run AFTER remove_transformed_dvla_files # since we mark jobs as archived 'options': {'queue': QueueNames.PERIODIC}, - 'kwargs': {'job_types': [LETTER_TYPE]} }, 'raise-alert-if-letter-notifications-still-sending': { 'task': 'raise-alert-if-letter-notifications-still-sending', @@ -436,6 +439,8 @@ class Live(Config): API_RATE_LIMIT_ENABLED = True CHECK_PROXY_HEADER = True + CRONITOR_ENABLED = True + class CloudFoundryConfig(Config): pass diff --git a/app/cronitor.py b/app/cronitor.py new file mode 100644 index 000000000..7f496fe1e --- /dev/null +++ b/app/cronitor.py @@ -0,0 +1,50 @@ +import requests +from functools import wraps +from flask import current_app + + +def cronitor(task_name): + # check if task_name is in config + def decorator(func): + def ping_cronitor(command): + if not current_app.config['CRONITOR_ENABLED']: + return + + task_slug = current_app.config['CRONITOR_KEYS'].get(task_name) + if not task_slug: + current_app.logger.error( + 'Cronitor enabled but task_name {} not found in environment'.format(task_name) + ) + + if command not in {'run', 'complete', 'fail'}: + raise ValueError('command {} not a valid cronitor command'.format(command)) + + resp = requests.get( + 'https://cronitor.link/{}/{}'.format(task_slug, command), + # cronitor limits msg to 1000 characters + params={ + 'host': current_app.config['API_HOST_NAME'], + } + ) + if resp.status_code != 200: + current_app.logger.warning('Cronitor API returned {} for task {}, body {}'.format( + resp.status_code, + task_name, + resp.text + )) + + @wraps(func) + def inner_decorator(*args, **kwargs): + ping_cronitor('run') + try: + ret = func(*args, **kwargs) + status = 'complete' + return ret + except Exception: + status = 'fail' + raise + finally: + ping_cronitor(status) + + return inner_decorator + return decorator diff --git a/manifest-api-base.yml b/manifest-api-base.yml index 10096f97e..3ef91e6bd 100644 --- a/manifest-api-base.yml +++ b/manifest-api-base.yml @@ -22,6 +22,7 @@ env: SECRET_KEY: null ROUTE_SECRET_KEY_1: null ROUTE_SECRET_KEY_2: null + CRONITOR_KEYS: null PERFORMANCE_PLATFORM_ENDPOINTS: null diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index 5ce75e7fc..1751b3e66 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -20,6 +20,7 @@ env: SECRET_KEY: null ROUTE_SECRET_KEY_1: null ROUTE_SECRET_KEY_2: null + CRONITOR_KEYS: null PERFORMANCE_PLATFORM_ENDPOINTS: null diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index 8918a33ce..ade175db2 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -20,10 +20,6 @@ from app import db from tests.app.db import create_service, create_template, create_notification -def test_reporting_should_have_decorated_tasks_functions(): - assert create_nightly_billing.__wrapped__.__name__ == 'create_nightly_billing' - - def mocker_get_rate( non_letter_rates, letter_rates, notification_type, date, crown=None, rate_multiplier=None, post_class="second" ): From e1760adcd3c785dbaa891df15880f678b6fe4efd Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Fri, 18 Jan 2019 15:29:04 +0000 Subject: [PATCH 114/118] suppress cronitor request errors --- app/cronitor.py | 24 +++++---- tests/app/test_cronitor.py | 101 +++++++++++++++++++++++++++++++++++++ 2 files changed, 114 insertions(+), 11 deletions(-) create mode 100644 tests/app/test_cronitor.py diff --git a/app/cronitor.py b/app/cronitor.py index 7f496fe1e..83a12f61f 100644 --- a/app/cronitor.py +++ b/app/cronitor.py @@ -15,22 +15,24 @@ def cronitor(task_name): current_app.logger.error( 'Cronitor enabled but task_name {} not found in environment'.format(task_name) ) + return if command not in {'run', 'complete', 'fail'}: raise ValueError('command {} not a valid cronitor command'.format(command)) - resp = requests.get( - 'https://cronitor.link/{}/{}'.format(task_slug, command), - # cronitor limits msg to 1000 characters - params={ - 'host': current_app.config['API_HOST_NAME'], - } - ) - if resp.status_code != 200: - current_app.logger.warning('Cronitor API returned {} for task {}, body {}'.format( - resp.status_code, + try: + resp = requests.get( + 'https://cronitor.link/{}/{}'.format(task_slug, command), + # cronitor limits msg to 1000 characters + params={ + 'host': current_app.config['API_HOST_NAME'], + } + ) + resp.raise_for_status() + except requests.RequestException as e: + current_app.logger.warning('Cronitor API failed for task {} due to {}'.format( task_name, - resp.text + repr(e) )) @wraps(func) diff --git a/tests/app/test_cronitor.py b/tests/app/test_cronitor.py new file mode 100644 index 000000000..8e1aaa6b4 --- /dev/null +++ b/tests/app/test_cronitor.py @@ -0,0 +1,101 @@ +from urllib import parse + +import requests +import pytest + +from app.cronitor import cronitor + +from tests.conftest import set_config_values + + +def _cronitor_url(key, command): + return parse.urlunparse(parse.ParseResult( + scheme='https', + netloc='cronitor.link', + path='{}/{}'.format(key, command), + params='', + query=parse.urlencode({'host': 'http://localhost:6011'}), + fragment='' + )) + + +RUN_LINK = _cronitor_url('secret', 'run') +FAIL_LINK = _cronitor_url('secret', 'fail') +COMPLETE_LINK = _cronitor_url('secret', 'complete') + + +@cronitor('hello') +def successful_task(): + return 1 + + +@cronitor('hello') +def crashing_task(): + raise ValueError + + +def test_cronitor_sends_run_and_complete(notify_api, rmock): + rmock.get(RUN_LINK, status_code=200) + rmock.get(COMPLETE_LINK, status_code=200) + + with set_config_values(notify_api, { + 'CRONITOR_ENABLED': True, + 'CRONITOR_KEYS': {'hello': 'secret'} + }): + assert successful_task() == 1 + + assert rmock.call_count == 2 + assert rmock.request_history[0].url == RUN_LINK + assert rmock.request_history[1].url == COMPLETE_LINK + + +def test_cronitor_sends_run_and_fail_if_exception(notify_api, rmock): + rmock.get(RUN_LINK, status_code=200) + rmock.get(FAIL_LINK, status_code=200) + + with set_config_values(notify_api, { + 'CRONITOR_ENABLED': True, + 'CRONITOR_KEYS': {'hello': 'secret'} + }): + with pytest.raises(ValueError): + crashing_task() + + assert rmock.call_count == 2 + assert rmock.request_history[0].url == RUN_LINK + assert rmock.request_history[1].url == FAIL_LINK + + +def test_cronitor_does_nothing_if_cronitor_not_enabled(notify_api, rmock): + with set_config_values(notify_api, { + 'CRONITOR_ENABLED': False, + 'CRONITOR_KEYS': {'hello': 'secret'} + }): + assert successful_task() == 1 + + assert rmock.called is False + + +def test_cronitor_does_nothing_if_name_not_recognised(notify_api, rmock, caplog): + with set_config_values(notify_api, { + 'CRONITOR_ENABLED': True, + 'CRONITOR_KEYS': {'not-hello': 'other'} + }): + assert successful_task() == 1 + + error_log = caplog.records[0] + assert error_log.levelname == 'ERROR' + assert error_log.msg == 'Cronitor enabled but task_name hello not found in environment' + assert rmock.called is False + + +def test_cronitor_doesnt_crash_if_request_fails(notify_api, rmock): + rmock.get(RUN_LINK, exc=requests.exceptions.ConnectTimeout) + rmock.get(COMPLETE_LINK, status_code=500) + + with set_config_values(notify_api, { + 'CRONITOR_ENABLED': True, + 'CRONITOR_KEYS': {'hello': 'secret'} + }): + assert successful_task() == 1 + + assert rmock.call_count == 2 From f5198bf71dea97284472a5a34ce5cfed6628a597 Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Tue, 22 Jan 2019 10:31:37 +0000 Subject: [PATCH 115/118] remove unnecessary job_types arg from remove_csv_files celery tasks --- app/celery/nightly_tasks.py | 4 +- tests/app/celery/test_nightly_tasks.py | 69 +++++++++++--------------- 2 files changed, 30 insertions(+), 43 deletions(-) diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index 0c9be4e8b..e452befd1 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -42,14 +42,14 @@ from app.cronitor import cronitor @notify_celery.task(name="remove_sms_email_jobs") @cronitor("remove_sms_email_jobs") @statsd(namespace="tasks") -def remove_sms_email_csv_files(job_types): +def remove_sms_email_csv_files(): _remove_csv_files([EMAIL_TYPE, SMS_TYPE]) @notify_celery.task(name="remove_letter_jobs") @cronitor("remove_letter_jobs") @statsd(namespace="tasks") -def remove_letter_csv_files(job_types): +def remove_letter_csv_files(): _remove_csv_files([LETTER_TYPE]) diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index 0720b4b89..93e047448 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -16,7 +16,8 @@ from app.celery.nightly_tasks import ( delete_letter_notifications_older_than_seven_days, delete_sms_notifications_older_than_seven_days, raise_alert_if_letter_notifications_still_sending, - _remove_csv_files, + remove_letter_csv_files, + remove_sms_email_csv_files, remove_transformed_dvla_files, s3, send_daily_performance_platform_stats, @@ -44,11 +45,7 @@ from tests.app.db import ( create_service_data_retention ) -from tests.app.conftest import ( - sample_job as create_sample_job, - sample_notification_history as create_notification_history, - datetime_in_past -) +from tests.app.conftest import datetime_in_past def mock_s3_get_list_match(bucket_name, subfolder='', suffix='', last_modified=None): @@ -82,13 +79,13 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days( just_under_nine_days = nine_days_ago + timedelta(seconds=1) nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago, archived=True) - job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) - job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) - dont_delete_me_1 = create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) - create_sample_job(notify_db, notify_db_session, created_at=just_under_seven_days) + create_job(sample_template, created_at=nine_days_one_second_ago, archived=True) + job1_to_delete = create_job(sample_template, created_at=eight_days_ago) + job2_to_delete = create_job(sample_template, created_at=just_under_nine_days) + dont_delete_me_1 = create_job(sample_template, created_at=seven_days_ago) + create_job(sample_template, created_at=just_under_seven_days) - _remove_csv_files(job_types=[sample_template.template_type]) + remove_sms_email_csv_files() assert s3.remove_job_from_s3.call_args_list == [ call(job1_to_delete.service_id, job1_to_delete.id), @@ -120,21 +117,15 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period( eight_days_ago = datetime.utcnow() - timedelta(days=8) thirty_one_days_ago = datetime.utcnow() - timedelta(days=31) - _create_job = partial( - create_sample_job, - notify_db, - notify_db_session, - ) + job1_to_delete = create_job(sms_template_service_1, created_at=four_days_ago) + job2_to_delete = create_job(email_template_service_1, created_at=eight_days_ago) + create_job(email_template_service_1, created_at=four_days_ago) - job1_to_delete = _create_job(service=service_1, template=sms_template_service_1, created_at=four_days_ago) - job2_to_delete = _create_job(service=service_1, template=email_template_service_1, created_at=eight_days_ago) - _create_job(service=service_1, template=email_template_service_1, created_at=four_days_ago) + create_job(email_template_service_2, created_at=eight_days_ago) + job3_to_delete = create_job(email_template_service_2, created_at=thirty_one_days_ago) + job4_to_delete = create_job(sms_template_service_2, created_at=eight_days_ago) - _create_job(service=service_2, template=email_template_service_2, created_at=eight_days_ago) - job3_to_delete = _create_job(service=service_2, template=email_template_service_2, created_at=thirty_one_days_ago) - job4_to_delete = _create_job(service=service_2, template=sms_template_service_2, created_at=eight_days_ago) - - _remove_csv_files(job_types=[SMS_TYPE, EMAIL_TYPE]) + remove_sms_email_csv_files() s3.remove_job_from_s3.assert_has_calls([ call(job1_to_delete.service_id, job1_to_delete.id), @@ -158,7 +149,7 @@ def test_remove_csv_files_filters_by_type(mocker, sample_service): job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) create_job(template=sms_template, created_at=eight_days_ago) - _remove_csv_files(job_types=[LETTER_TYPE]) + remove_letter_csv_files() assert s3.remove_job_from_s3.call_args_list == [ call(job_to_delete.service_id, job_to_delete.id), @@ -265,30 +256,26 @@ def test_send_total_sent_notifications_to_performance_platform_calls_with_correc notify_db, notify_db_session, sample_template, + sample_email_template, mocker ): + sms = sample_template + email = sample_email_template + perf_mock = mocker.patch( 'app.celery.nightly_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa - notification_history = partial( - create_notification_history, - notify_db, - notify_db_session, - sample_template, - status='delivered' - ) - - notification_history(notification_type='email') - notification_history(notification_type='sms') + create_notification(email, status='delivered') + create_notification(sms, status='delivered') # Create some notifications for the day before yesterday = datetime(2016, 1, 10, 15, 30, 0, 0) with freeze_time(yesterday): - notification_history(notification_type='sms') - notification_history(notification_type='sms') - notification_history(notification_type='email') - notification_history(notification_type='email') - notification_history(notification_type='email') + create_notification(sms, status='delivered') + create_notification(sms, status='delivered') + create_notification(email, status='delivered') + create_notification(email, status='delivered') + create_notification(email, status='delivered') with patch.object( PerformancePlatformClient, From afcdf1f9a1ea8a324f0ca19abb07e096c4914a06 Mon Sep 17 00:00:00 2001 From: Toby Lorne Date: Wed, 23 Jan 2019 14:26:43 +0000 Subject: [PATCH 116/118] Exit if celery processes are not running In 4427827b2ff7cc790d1e3400a9eeca7b8c22b991 and celery monitoring was changed from using PID files to actually looking at processes. If celery workers get OOM killed (for instance) the container init script would not restart them, this is because `get_celery_pids` would not contain any processes that contained the string celery. This would cause the pipe to fail (-o pipefail). APP_PIDS would not get updated but the script would continue to run. This caused the script to not restart the celery processes. We think the correct behaviour when celery processes are killed (i.e. there are no more celery processes running in a container) is to kill the container. The PaaS should then schedule new ones which may remediate the cause of the celery processes being killed. Upon detection of no celery processes running, some diagnostic information from the environment is sent to the logs, e.g.: ``` CF_INSTANCE_ADDR=10.0.32.4:61012 CF_INSTANCE_INTERNAL_IP=10.255.184.9 CF_INSTANCE_GUID=81c57dbc-e706-411e-6a5f-2013 CF_INSTANCE_PORT=61012 CF_INSTANCE_IP=10.0.32.4 ``` Then the script (which is the container entrypoint) exits 1. Co-author: @servingupaces @tlwr --- scripts/run_multi_worker_app_paas.sh | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/scripts/run_multi_worker_app_paas.sh b/scripts/run_multi_worker_app_paas.sh index f195e59dd..b44ab28d3 100755 --- a/scripts/run_multi_worker_app_paas.sh +++ b/scripts/run_multi_worker_app_paas.sh @@ -69,7 +69,10 @@ function get_celery_pids { # get the PIDs of the process whose parent is the root process # print only pid and their command, get the ones with "celery" in their name # and keep only these PIDs + + set +o pipefail # so grep returning no matches does not premature fail pipe APP_PIDS=$(pgrep -P 1 | xargs ps -o pid=,command= -p | grep celery | cut -f1 -d/) + set -o pipefail # pipefail should be set everywhere else } function send_signal_to_celery_processes { @@ -98,9 +101,28 @@ function start_logs_tail { echo "tail pid: ${LOGS_TAIL_PID}" } +function ensure_celery_is_running { + if [ "${APP_PIDS}" = "" ]; then + echo "There are no celery processes running, this container is bad" + + echo "Exporting CF information for diagnosis" + + env | grep CF + + echo "Sleeping 15 seconds for logs to get shipped" + + sleep 15 + + exit 1 + fi +} + function run { while true; do get_celery_pids + + ensure_celery_is_running + for APP_PID in ${APP_PIDS}; do kill -0 ${APP_PID} 2&>/dev/null || return 1 done From fa4cff5eb75ec47851002b2aefade133ce6b240c Mon Sep 17 00:00:00 2001 From: Athanasios Voutsadakis Date: Wed, 23 Jan 2019 16:00:00 +0000 Subject: [PATCH 117/118] Bump sender memory to 3GB --- manifest-delivery-base.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index 1751b3e66..bf136b1df 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -68,7 +68,7 @@ applications: - name: notify-delivery-worker-sender command: scripts/run_multi_worker_app_paas.sh celery multi start 3 -c 10 -A run_celery.notify_celery --loglevel=INFO -Q send-sms-tasks,send-email-tasks - memory: 2G + memory: 3G env: NOTIFY_APP_NAME: delivery-worker-sender From 3528aab25ba17c4fc38c7d38170c1bdcc80efc7a Mon Sep 17 00:00:00 2001 From: Athanasios Voutsadakis Date: Wed, 23 Jan 2019 16:23:58 +0000 Subject: [PATCH 118/118] Kill the other processes started by the script We use exec to start awslogs_agent and then a tail to print logs to stdout. CF docs[1] recommend to use exec to start processes which seems to imply that as long as there are commands running the container will remain up and running. This commit ensures that if there are no celery tasks running we will kill any other processes that we have started, so that the container will no longer be considered healthy by cloudfoundry and will be replaced. 1: https://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html#start-commands --- scripts/run_multi_worker_app_paas.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/scripts/run_multi_worker_app_paas.sh b/scripts/run_multi_worker_app_paas.sh index b44ab28d3..6824923ea 100755 --- a/scripts/run_multi_worker_app_paas.sh +++ b/scripts/run_multi_worker_app_paas.sh @@ -113,6 +113,10 @@ function ensure_celery_is_running { sleep 15 + echo "Killing awslogs_agent and tail" + kill -9 ${AWSLOGS_AGENT_PID} + kill -9 ${LOGS_TAIL_PID} + exit 1 fi }