From 5045590d75e1110d7d4ae89b86c5ad934cfe390b Mon Sep 17 00:00:00 2001 From: Leo Hemsted Date: Tue, 11 Jun 2019 13:57:17 +0100 Subject: [PATCH 1/9] allow you to pass in date to send perf stats make it easier to replay sending data for a day if it failed the first time round --- app/celery/nightly_tasks.py | 14 ++++++++++---- app/performance_platform/processing_time.py | 11 +++++------ .../performance_platform/test_processing_time.py | 4 ++-- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index 97c1fa59a..73086fb40 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -150,11 +150,17 @@ def timeout_notifications(): @notify_celery.task(name='send-daily-performance-platform-stats') @cronitor('send-daily-performance-platform-stats') @statsd(namespace="tasks") -def send_daily_performance_platform_stats(): +def send_daily_performance_platform_stats(date=None): + # date is a string in the format of "YYYY-MM-DD" + if date is None: + date = (datetime.utcnow() - timedelta(days=1)).date() + else: + date = datetime.strptime(date, "%Y-%m-%d").date() + if performance_platform_client.active: - yesterday = datetime.utcnow() - timedelta(days=1) - send_total_sent_notifications_to_performance_platform(bst_date=yesterday.date()) - processing_time.send_processing_time_to_performance_platform() + + send_total_sent_notifications_to_performance_platform(bst_date=date) + processing_time.send_processing_time_to_performance_platform(bst_date=date) def send_total_sent_notifications_to_performance_platform(bst_date): diff --git a/app/performance_platform/processing_time.py b/app/performance_platform/processing_time.py index ee2bfde1a..756c08bc1 100644 --- a/app/performance_platform/processing_time.py +++ b/app/performance_platform/processing_time.py @@ -1,16 +1,15 @@ -from datetime import datetime +from datetime import timedelta from flask import current_app -from app.utils import get_midnight_for_day_before, get_london_midnight_in_utc +from app.utils import get_london_midnight_in_utc from app.dao.notifications_dao import dao_get_total_notifications_sent_per_day_for_performance_platform from app import performance_platform_client -def send_processing_time_to_performance_platform(): - today = datetime.utcnow() - start_time = get_midnight_for_day_before(today) - end_time = get_london_midnight_in_utc(today) +def send_processing_time_to_performance_platform(bst_date): + start_time = get_london_midnight_in_utc(bst_date) + end_time = get_london_midnight_in_utc(bst_date + timedelta(days=1)) send_processing_time_for_start_and_end(start_time, end_time) diff --git a/tests/app/performance_platform/test_processing_time.py b/tests/app/performance_platform/test_processing_time.py index 2458e866d..010c50097 100644 --- a/tests/app/performance_platform/test_processing_time.py +++ b/tests/app/performance_platform/test_processing_time.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, date from freezegun import freeze_time @@ -19,7 +19,7 @@ def test_send_processing_time_to_performance_platform_generates_correct_calls(mo create_notification(sample_template, created_at=created_at, sent_at=created_at + timedelta(seconds=15)) create_notification(sample_template, created_at=datetime.utcnow() - timedelta(days=2)) - send_processing_time_to_performance_platform() + send_processing_time_to_performance_platform(date(2016, 10, 17)) send_mock.assert_any_call(datetime(2016, 10, 16, 23, 0), 'messages-total', 2) send_mock.assert_any_call(datetime(2016, 10, 16, 23, 0), 'messages-within-10-secs', 1) From b6fdb269e4bcb2c589f2e6bae1fd0118df96acf2 Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Wed, 12 Jun 2019 10:34:15 +0100 Subject: [PATCH 2/9] Return all required org and services info for user MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The admin app now needs to know a few extra things about orgs and services in order to list them. At the moment it does this by making multiple API calls. This commit adds extra fields to the existing response. Once the admin app is using this fields we’ll be able to remove: - `reponse['services_without_organisations']` - `reponse['organisations']['services']` --- app/user/rest.py | 15 ++- tests/app/user/test_rest.py | 199 ++++++++++++++++++++++++------------ 2 files changed, 145 insertions(+), 69 deletions(-) diff --git a/app/user/rest.py b/app/user/rest.py index 456008c30..872735de2 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -515,7 +515,11 @@ def get_orgs_and_services(user): } for service in org.services if service.active and service in user.services - ] + ], + 'count_of_live_services': len([ + service for service in org.services + if service.active and not service.restricted + ]), } for org in user.organisations if org.active ], @@ -534,5 +538,14 @@ def get_orgs_and_services(user): service.organisation not in user.organisations ) ) + ], + 'services': [ + { + 'id': service.id, + 'name': service.name, + 'restricted': service.restricted, + 'organisation': service.organisation.id if service.organisation else None, + } + for service in user.services if service.active ] } diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 35e3809ce..8a51d8968 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -835,38 +835,63 @@ def test_get_orgs_and_services_nests_services(admin_request, sample_user): resp = admin_request.get('user.get_organisations_and_services_for_user', user_id=sample_user.id) - assert resp == { - 'organisations': [ - { - 'name': org1.name, - 'id': str(org1.id), - 'services': [ - { - 'name': service1.name, - 'id': str(service1.id), - 'restricted': False, - }, - { - 'name': service2.name, - 'id': str(service2.id), - 'restricted': False, - } - ] - }, - { - 'name': org2.name, - 'id': str(org2.id), - 'services': [] - } - ], - 'services_without_organisations': [ - { - 'name': service3.name, - 'id': str(service3.id), - 'restricted': False, - } - ] + assert set(resp.keys()) == { + 'organisations', + 'services_without_organisations', + 'services', } + assert resp['organisations'] == [ + { + 'name': org1.name, + 'id': str(org1.id), + 'services': [ + { + 'name': service1.name, + 'id': str(service1.id), + 'restricted': False, + }, + { + 'name': service2.name, + 'id': str(service2.id), + 'restricted': False, + } + ], + 'count_of_live_services': 2, + }, + { + 'name': org2.name, + 'id': str(org2.id), + 'services': [], + 'count_of_live_services': 0, + }, + ] + assert resp['services_without_organisations'] == [ + { + 'name': service3.name, + 'id': str(service3.id), + 'restricted': False, + } + ] + assert resp['services'] == [ + { + 'name': service1.name, + 'id': str(service1.id), + 'restricted': False, + 'organisation': str(org1.id), + }, + { + 'name': service2.name, + 'id': str(service2.id), + 'restricted': False, + 'organisation': str(org1.id), + }, + { + 'name': service3.name, + 'id': str(service3.id), + 'restricted': False, + 'organisation': None, + }, + ] def test_get_orgs_and_services_only_returns_active(admin_request, sample_user): @@ -890,28 +915,52 @@ def test_get_orgs_and_services_only_returns_active(admin_request, sample_user): resp = admin_request.get('user.get_organisations_and_services_for_user', user_id=sample_user.id) - assert resp == { - 'organisations': [ - { - 'name': org1.name, - 'id': str(org1.id), - 'services': [ - { - 'name': service1.name, - 'id': str(service1.id), - 'restricted': False, - } - ] - } - ], - 'services_without_organisations': [ - { - 'name': service4.name, - 'id': str(service4.id), - 'restricted': False, - } - ] + assert set(resp.keys()) == { + 'organisations', + 'services_without_organisations', + 'services', } + assert resp['organisations'] == [ + { + 'name': org1.name, + 'id': str(org1.id), + 'services': [ + { + 'name': service1.name, + 'id': str(service1.id), + 'restricted': False, + } + ], + 'count_of_live_services': 1, + } + ] + assert resp['services_without_organisations'] == [ + { + 'name': service4.name, + 'id': str(service4.id), + 'restricted': False, + } + ] + assert resp['services'] == [ + { + 'name': service1.name, + 'id': str(service1.id), + 'restricted': False, + 'organisation': str(org1.id) + }, + { + 'name': service3.name, + 'id': str(service3.id), + 'restricted': False, + 'organisation': str(org2.id) + }, + { + 'name': service4.name, + 'id': str(service4.id), + 'restricted': False, + 'organisation': None, + }, + ] def test_get_orgs_and_services_only_shows_users_orgs_and_services(admin_request, sample_user): @@ -932,23 +981,37 @@ def test_get_orgs_and_services_only_shows_users_orgs_and_services(admin_request, resp = admin_request.get('user.get_organisations_and_services_for_user', user_id=sample_user.id) - assert resp == { - 'organisations': [ - { - 'name': org2.name, - 'id': str(org2.id), - 'services': [] - } - ], - # service1 belongs to org1, but the user doesn't know about org1 - 'services_without_organisations': [ - { - 'name': service1.name, - 'id': str(service1.id), - 'restricted': False, - } - ] + assert set(resp.keys()) == { + 'organisations', + 'services_without_organisations', + 'services', } + assert resp['organisations'] == [ + { + 'name': org2.name, + 'id': str(org2.id), + 'services': [], + 'count_of_live_services': 0, + } + ] + # service1 belongs to org1, but the user doesn't know about org1 + assert resp['services_without_organisations'] == [ + { + 'name': service1.name, + 'id': str(service1.id), + 'restricted': False, + } + ] + # 'services' always returns the org_id no matter whether the user + # belongs to that org or not + assert resp['services'] == [ + { + 'name': service1.name, + 'id': str(service1.id), + 'restricted': False, + 'organisation': str(org1.id), + } + ] def test_find_users_by_email_finds_user_by_partial_email(notify_db, client): From d974ab3b868846f47bc8d4749a58e19d3ce651ff Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Wed, 12 Jun 2019 13:15:25 +0100 Subject: [PATCH 3/9] Return count of live services on organisations too This makes it consistent, so the admin app can always rely on that property being available. --- app/models.py | 8 ++++++++ app/user/rest.py | 5 +---- tests/app/organisation/test_rest.py | 4 ++++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/app/models.py b/app/models.py index 0465214f4..cf3bc47a7 100644 --- a/app/models.py +++ b/app/models.py @@ -367,6 +367,13 @@ class Organisation(db.Model): nullable=True, ) + @property + def live_services(self): + return [ + service for service in self.services + if service.active and not service.restricted + ] + def serialize(self): return { "id": str(self.id), @@ -384,6 +391,7 @@ class Organisation(db.Model): domain.domain for domain in self.domains ], "request_to_go_live_notes": self.request_to_go_live_notes, + "count_of_live_services": len(self.live_services), } diff --git a/app/user/rest.py b/app/user/rest.py index 872735de2..444dac914 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -516,10 +516,7 @@ def get_orgs_and_services(user): for service in org.services if service.active and service in user.services ], - 'count_of_live_services': len([ - service for service in org.services - if service.active and not service.restricted - ]), + 'count_of_live_services': len(org.live_services), } for org in user.organisations if org.active ], diff --git a/tests/app/organisation/test_rest.py b/tests/app/organisation/test_rest.py index 4c231e50b..26a1155fc 100644 --- a/tests/app/organisation/test_rest.py +++ b/tests/app/organisation/test_rest.py @@ -26,8 +26,10 @@ def test_get_all_organisations(admin_request, notify_db_session): assert len(response) == 2 assert response[0]['name'] == 'active org' assert response[0]['active'] is True + assert response[0]['count_of_live_services'] == 0 assert response[1]['name'] == 'inactive org' assert response[1]['active'] is False + assert response[1]['count_of_live_services'] == 0 def test_get_organisation_by_id(admin_request, notify_db_session): @@ -53,6 +55,7 @@ def test_get_organisation_by_id(admin_request, notify_db_session): 'email_branding_id', 'domains', 'request_to_go_live_notes', + 'count_of_live_services', } assert response['id'] == str(org.id) assert response['name'] == 'test_org_1' @@ -66,6 +69,7 @@ def test_get_organisation_by_id(admin_request, notify_db_session): assert response['email_branding_id'] is None assert response['domains'] == [] assert response['request_to_go_live_notes'] is None + assert response['count_of_live_services'] == 0 def test_get_organisation_by_id_returns_domains(admin_request, notify_db_session): From 57705fd6fe4fea7dae283112509ed3b4c6c3e65f Mon Sep 17 00:00:00 2001 From: Andy Paine Date: Fri, 14 Jun 2019 15:22:18 +0100 Subject: [PATCH 4/9] AUTO: Explicitly include FIRETEXT_URL in manifest - We are explicit about MMG_URL but not FIRETEXT_URL - credentials has already been updated (checked by doing make generate-manifest for all envs) --- manifest.yml.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/manifest.yml.j2 b/manifest.yml.j2 index 6d8f56c99..62fddcf0d 100644 --- a/manifest.yml.j2 +++ b/manifest.yml.j2 @@ -84,6 +84,7 @@ applications: MMG_INBOUND_SMS_AUTH: '{{ MMG_INBOUND_SMS_AUTH | tojson }}' MMG_INBOUND_SMS_USERNAME: '{{ MMG_INBOUND_SMS_USERNAME | tojson }}' + FIRETEXT_URL: '{{ FIRETEXT_URL }}' FIRETEXT_API_KEY: '{{ FIRETEXT_API_KEY }}' LOADTESTING_API_KEY: '{{ LOADTESTING_API_KEY }}' FIRETEXT_INBOUND_SMS_AUTH: '{{ FIRETEXT_INBOUND_SMS_AUTH | tojson }}' From 8977f5193e3fd90bf1f5f1781ad9f4ff1bd9983e Mon Sep 17 00:00:00 2001 From: Chris Hill-Scott Date: Thu, 13 Jun 2019 16:43:34 +0100 Subject: [PATCH 5/9] Add columns for on whose behalf agreement signed MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is changing because we’re going to introduce accepting contracts and MoUs online. Previously --- We had one column for who signed the agreement, which is foreign keyed to the user table. This is still relevant, because there will always be a user who is clicking the button. Now --- We add two new fields for the name and email address of the person on whose behalf the agreement is being accepted. This person: - is different from the one signing the agreement - won’t necessarily have a Notify account --- app/models.py | 4 ++++ .../0296_agreement_signed_by_person.py | 22 +++++++++++++++++++ tests/app/organisation/test_rest.py | 8 +++++++ 3 files changed, 34 insertions(+) create mode 100644 migrations/versions/0296_agreement_signed_by_person.py diff --git a/app/models.py b/app/models.py index cf3bc47a7..320aa33de 100644 --- a/app/models.py +++ b/app/models.py @@ -344,6 +344,8 @@ class Organisation(db.Model): db.ForeignKey('users.id'), nullable=True, ) + agreement_signed_on_behalf_of_name = db.Column(db.String(255), nullable=True) + agreement_signed_on_behalf_of_email_address = db.Column(db.String(255), nullable=True) agreement_signed_version = db.Column(db.Float, nullable=True) crown = db.Column(db.Boolean, nullable=True) organisation_type = db.Column(db.String(255), nullable=True) @@ -386,6 +388,8 @@ class Organisation(db.Model): "agreement_signed": self.agreement_signed, "agreement_signed_at": self.agreement_signed_at, "agreement_signed_by_id": self.agreement_signed_by_id, + "agreement_signed_on_behalf_of_name": self.agreement_signed_on_behalf_of_name, + "agreement_signed_on_behalf_of_email_address": self.agreement_signed_on_behalf_of_email_address, "agreement_signed_version": self.agreement_signed_version, "domains": [ domain.domain for domain in self.domains diff --git a/migrations/versions/0296_agreement_signed_by_person.py b/migrations/versions/0296_agreement_signed_by_person.py new file mode 100644 index 000000000..f10d97c1b --- /dev/null +++ b/migrations/versions/0296_agreement_signed_by_person.py @@ -0,0 +1,22 @@ +""" + +Revision ID: 0296_agreement_signed_by_person +Revises: 0295_api_key_constraint +Create Date: 2019-06-13 16:40:32.982607 + +""" +from alembic import op +import sqlalchemy as sa + +revision = '0296_agreement_signed_by_person' +down_revision = '0295_api_key_constraint' + + +def upgrade(): + op.add_column('organisation', sa.Column('agreement_signed_on_behalf_of_email_address', sa.String(length=255), nullable=True)) + op.add_column('organisation', sa.Column('agreement_signed_on_behalf_of_name', sa.String(length=255), nullable=True)) + + +def downgrade(): + op.drop_column('organisation', 'agreement_signed_on_behalf_of_name') + op.drop_column('organisation', 'agreement_signed_on_behalf_of_email_address') diff --git a/tests/app/organisation/test_rest.py b/tests/app/organisation/test_rest.py index 26a1155fc..d99670d7a 100644 --- a/tests/app/organisation/test_rest.py +++ b/tests/app/organisation/test_rest.py @@ -51,6 +51,8 @@ def test_get_organisation_by_id(admin_request, notify_db_session): 'agreement_signed_at', 'agreement_signed_by_id', 'agreement_signed_version', + 'agreement_signed_on_behalf_of_name', + 'agreement_signed_on_behalf_of_email_address', 'letter_branding_id', 'email_branding_id', 'domains', @@ -70,6 +72,8 @@ def test_get_organisation_by_id(admin_request, notify_db_session): assert response['domains'] == [] assert response['request_to_go_live_notes'] is None assert response['count_of_live_services'] == 0 + assert response['agreement_signed_on_behalf_of_name'] is None + assert response['agreement_signed_on_behalf_of_email_address'] is None def test_get_organisation_by_id_returns_domains(admin_request, notify_db_session): @@ -197,6 +201,8 @@ def test_post_update_organisation_updates_fields( 'active': False, 'agreement_signed': agreement_signed, 'crown': crown, + 'agreement_signed_on_behalf_of_name': 'Firstname Lastname', + 'agreement_signed_on_behalf_of_email_address': 'test@example.com', } assert org.agreement_signed is None assert org.crown is None @@ -217,6 +223,8 @@ def test_post_update_organisation_updates_fields( assert organisation[0].agreement_signed == agreement_signed assert organisation[0].crown == crown assert organisation[0].domains == [] + assert organisation[0].agreement_signed_on_behalf_of_name == 'Firstname Lastname' + assert organisation[0].agreement_signed_on_behalf_of_email_address == 'test@example.com' @pytest.mark.parametrize('domain_list', ( From 3d01276ce2fda068ba3a38b7c8ccdc691fa38263 Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Tue, 11 Jun 2019 11:00:04 +0100 Subject: [PATCH 6/9] Log exception and set precompiled letter to tech-failure if S3 errors The `process_virus_scan_passed` task now catches S3 errors - if these occur, it logs an exception and puts the letter in a `technical-failure` state. We don't retry the task, because the most common reason for failure would be the letter not being in the expected S3 bucket, in which case retrying would make no difference. --- app/celery/letters_pdf_tasks.py | 51 ++++++++++------- tests/app/celery/test_letters_pdf_tasks.py | 66 ++++++++++++++++++++++ 2 files changed, 97 insertions(+), 20 deletions(-) diff --git a/app/celery/letters_pdf_tasks.py b/app/celery/letters_pdf_tasks.py index b164e64f8..4cd0caf1d 100644 --- a/app/celery/letters_pdf_tasks.py +++ b/app/celery/letters_pdf_tasks.py @@ -210,7 +210,7 @@ def process_virus_scan_passed(self, filename): try: billable_units = _get_page_count(notification, old_pdf) except PdfReadError: - _move_invalid_letter_and_update_status(notification.reference, filename, scan_pdf_object) + _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object) return new_pdf = _sanitise_precompiled_pdf(self, notification, old_pdf) @@ -222,7 +222,7 @@ def process_virus_scan_passed(self, filename): if not new_pdf: current_app.logger.info('Invalid precompiled pdf received {} ({})'.format(notification.id, filename)) - _move_invalid_letter_and_update_status(notification.reference, filename, scan_pdf_object) + _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object) return else: current_app.logger.info( @@ -230,18 +230,23 @@ def process_virus_scan_passed(self, filename): current_app.logger.info('notification id {} ({}) sanitised and ready to send'.format(notification.id, filename)) - _upload_pdf_to_test_or_live_pdf_bucket( - new_pdf, - filename, - is_test_letter=is_test_key) + try: + _upload_pdf_to_test_or_live_pdf_bucket( + new_pdf, + filename, + is_test_letter=is_test_key) - update_letter_pdf_status( - reference=reference, - status=NOTIFICATION_DELIVERED if is_test_key else NOTIFICATION_CREATED, - billable_units=billable_units - ) - - scan_pdf_object.delete() + update_letter_pdf_status( + reference=reference, + status=NOTIFICATION_DELIVERED if is_test_key else NOTIFICATION_CREATED, + billable_units=billable_units + ) + scan_pdf_object.delete() + except BotoClientError: + current_app.logger.exception( + "Error uploading letter to live pdf bucket for notification: {}".format(notification.id) + ) + update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) def _get_page_count(notification, old_pdf): @@ -255,14 +260,20 @@ def _get_page_count(notification, old_pdf): raise e -def _move_invalid_letter_and_update_status(notification_reference, filename, scan_pdf_object): - move_scan_to_invalid_pdf_bucket(filename) - scan_pdf_object.delete() +def _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object): + try: + move_scan_to_invalid_pdf_bucket(filename) + scan_pdf_object.delete() - update_letter_pdf_status( - reference=notification_reference, - status=NOTIFICATION_VALIDATION_FAILED, - billable_units=0) + update_letter_pdf_status( + reference=notification.reference, + status=NOTIFICATION_VALIDATION_FAILED, + billable_units=0) + except BotoClientError: + current_app.logger.exception( + "Error when moving letter with id {} to invalid PDF bucket".format(notification.id) + ) + update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) def _upload_pdf_to_test_or_live_pdf_bucket(pdf_data, filename, is_test_letter): diff --git a/tests/app/celery/test_letters_pdf_tasks.py b/tests/app/celery/test_letters_pdf_tasks.py index 8396347e6..f99af3e80 100644 --- a/tests/app/celery/test_letters_pdf_tasks.py +++ b/tests/app/celery/test_letters_pdf_tasks.py @@ -23,6 +23,7 @@ from app.celery.letters_pdf_tasks import ( process_virus_scan_failed, process_virus_scan_error, replay_letters_in_error, + _move_invalid_letter_and_update_status, _sanitise_precompiled_pdf ) from app.letters.utils import get_letter_pdf_filename, ScanErrorType @@ -46,6 +47,10 @@ from tests.conftest import set_config_values def test_should_have_decorated_tasks_functions(): assert create_letters_pdf.__wrapped__.__name__ == 'create_letters_pdf' + assert collate_letter_pdfs_for_day.__wrapped__.__name__ == 'collate_letter_pdfs_for_day' + assert process_virus_scan_passed.__wrapped__.__name__ == 'process_virus_scan_passed' + assert process_virus_scan_failed.__wrapped__.__name__ == 'process_virus_scan_failed' + assert process_virus_scan_error.__wrapped__.__name__ == 'process_virus_scan_error' @pytest.mark.parametrize('personalisation', [{'name': 'test'}, None]) @@ -517,6 +522,67 @@ def test_process_letter_task_check_virus_scan_passed_when_file_cannot_be_opened( assert sample_letter_notification.billable_units == 0 +@mock_s3 +def test_process_virus_scan_passed_logs_error_and_sets_tech_failure_if_s3_error_uploading_to_live_bucket( + mocker, + sample_letter_notification, +): + mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') + + sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK + filename = 'NOTIFY.{}'.format(sample_letter_notification.reference) + + source_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME'] + conn = boto3.resource('s3', region_name='eu-west-1') + conn.create_bucket(Bucket=source_bucket_name) + + s3 = boto3.client('s3', region_name='eu-west-1') + s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b'pdf_content') + + mocker.patch('app.celery.letters_pdf_tasks._get_page_count', return_value=1) + mocker.patch('app.celery.letters_pdf_tasks._sanitise_precompiled_pdf', return_value=b'pdf_content') + + error_response = { + 'Error': { + 'Code': 'InvalidParameterValue', + 'Message': 'some error message from amazon', + 'Type': 'Sender' + } + } + mocker.patch('app.celery.letters_pdf_tasks._upload_pdf_to_test_or_live_pdf_bucket', + side_effect=ClientError(error_response, 'operation_name')) + + process_virus_scan_passed(filename) + + assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE + mock_logger.assert_called_once_with( + 'Error uploading letter to live pdf bucket for notification: {}'.format(sample_letter_notification.id) + ) + + +def test_move_invalid_letter_and_update_status_logs_error_and_sets_tech_failure_state_if_s3_error( + mocker, + sample_letter_notification, +): + error_response = { + 'Error': { + 'Code': 'InvalidParameterValue', + 'Message': 'some error message from amazon', + 'Type': 'Sender' + } + } + mocker.patch('app.celery.letters_pdf_tasks.move_scan_to_invalid_pdf_bucket', + side_effect=ClientError(error_response, 'operation_name')) + mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') + + _move_invalid_letter_and_update_status(sample_letter_notification, 'filename', mocker.Mock()) + + assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE + mock_logger.assert_called_once_with( + 'Error when moving letter with id {} to invalid PDF bucket'.format(sample_letter_notification.id) + ) + + def test_process_letter_task_check_virus_scan_failed(sample_letter_notification, mocker): filename = 'NOTIFY.{}'.format(sample_letter_notification.reference) sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK From a2f324ad7eef71961a82e8f8cba3264be6b239ab Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Tue, 11 Jun 2019 13:16:34 +0100 Subject: [PATCH 7/9] Add scheduled task to find precompiled letters in wrong state Added a task which runs twice a day on weekdays and checks for letters that have been in the state of `pending-virus-check` for over 90 minutes. This is just logging an exception for now, not trying to fix things, since we will need to manually check where the issue was. --- app/celery/scheduled_tasks.py | 17 ++++++++++++- app/config.py | 5 ++++ app/dao/notifications_dao.py | 12 +++++++++ tests/app/celery/test_scheduled_tasks.py | 31 +++++++++++++++++++++++- 4 files changed, 63 insertions(+), 2 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index f19aaf135..ad955f12c 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -19,7 +19,8 @@ from app.dao.notifications_dao import ( is_delivery_slow_for_provider, dao_get_scheduled_notifications, set_scheduled_notification_to_processed, - notifications_not_yet_sent + notifications_not_yet_sent, + dao_precompiled_letters_still_pending_virus_check, ) from app.dao.provider_details_dao import ( get_current_provider, @@ -178,3 +179,17 @@ def replay_created_notifications(): for n in notifications_to_resend: send_notification_to_queue(notification=n, research_mode=n.service.research_mode) + + +@notify_celery.task(name='check-precompiled-letter-state') +@statsd(namespace="tasks") +def check_precompiled_letter_state(): + letters = dao_precompiled_letters_still_pending_virus_check() + letter_ids = [str(letter.id) for letter in letters] + + if len(letters) > 0: + current_app.logger.exception( + '{} precompiled letters have been pending-virus-check for over 90 minutes. Notifications: {}'.format( + len(letters), + letter_ids) + ) diff --git a/app/config.py b/app/config.py index 50889727a..3690944a9 100644 --- a/app/config.py +++ b/app/config.py @@ -276,6 +276,11 @@ class Config(object): 'schedule': crontab(hour=23, minute=00), 'options': {'queue': QueueNames.PERIODIC} }, + 'check-precompiled-letter-state': { + 'task': 'check-precompiled-letter-state', + 'schedule': crontab(day_of_week='mon-fri', hour='9,15', minute=0), + 'options': {'queue', QueueNames.PERIODIC} + }, } CELERY_QUEUES = [] diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 35873cf66..069a8ea2a 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -692,6 +692,18 @@ def notifications_not_yet_sent(should_be_sending_after_seconds, notification_typ return notifications +def dao_precompiled_letters_still_pending_virus_check(): + ninety_minutes_ago = datetime.utcnow() - timedelta(seconds=5400) + + notifications = Notification.query.filter( + Notification.created_at < ninety_minutes_ago, + Notification.status == NOTIFICATION_PENDING_VIRUS_CHECK + ).order_by( + Notification.created_at + ).all() + return notifications + + def guess_notification_type(search_term): if set(search_term) & set(string.ascii_letters + '@'): return EMAIL_TYPE diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index bf4eb9507..95eb7498f 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -13,7 +13,8 @@ from app.celery.scheduled_tasks import ( run_scheduled_jobs, send_scheduled_notifications, switch_current_sms_provider_on_slow_delivery, - replay_created_notifications + replay_created_notifications, + check_precompiled_letter_state, ) from app.config import QueueNames, TaskNames from app.dao.jobs_dao import dao_get_job_by_id @@ -26,6 +27,8 @@ from app.models import ( JOB_STATUS_IN_PROGRESS, JOB_STATUS_ERROR, JOB_STATUS_FINISHED, + NOTIFICATION_DELIVERED, + NOTIFICATION_PENDING_VIRUS_CHECK, ) from app.v2.errors import JobIncompleteError @@ -334,3 +337,29 @@ def test_check_job_status_task_does_not_raise_error(sample_template): job_status=JOB_STATUS_FINISHED) check_job_status() + + +@freeze_time("2019-05-30 14:00:00") +def test_check_precompiled_letter_state(mocker, sample_letter_template): + mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') + + create_notification(template=sample_letter_template, + status=NOTIFICATION_PENDING_VIRUS_CHECK, + created_at=datetime.utcnow() - timedelta(seconds=5400)) + create_notification(template=sample_letter_template, + status=NOTIFICATION_DELIVERED, + created_at=datetime.utcnow() - timedelta(seconds=6000)) + noti_1 = create_notification(template=sample_letter_template, + status=NOTIFICATION_PENDING_VIRUS_CHECK, + created_at=datetime.utcnow() - timedelta(seconds=5401)) + noti_2 = create_notification(template=sample_letter_template, + status=NOTIFICATION_PENDING_VIRUS_CHECK, + created_at=datetime.utcnow() - timedelta(seconds=70000)) + + check_precompiled_letter_state() + + mock_logger.assert_called_once_with( + "2 precompiled letters have been pending-virus-check for over 90 minutes. Notifications: ['{}', '{}']".format( + noti_2.id, + noti_1.id) + ) From c518f6ca76f3f7c47d8ec1f6e4cac13d340e116c Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Tue, 11 Jun 2019 15:13:06 +0100 Subject: [PATCH 8/9] Add scheduled task to find old letters which still have 'created' status Added a scheduled task to run once a day and check if there were any letters from before 17.30 that still have a status of 'created'. This logs an exception instead of trying to fix the error because the fix will be different depending on which bucket the letter is in. --- app/celery/scheduled_tasks.py | 15 +++++++++ app/config.py | 5 +++ app/dao/notifications_dao.py | 16 +++++++++- tests/app/celery/test_scheduled_tasks.py | 39 ++++++++++++++++++++++++ 4 files changed, 74 insertions(+), 1 deletion(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index ad955f12c..ebef57b40 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -21,6 +21,7 @@ from app.dao.notifications_dao import ( set_scheduled_notification_to_processed, notifications_not_yet_sent, dao_precompiled_letters_still_pending_virus_check, + dao_old_letters_with_created_status, ) from app.dao.provider_details_dao import ( get_current_provider, @@ -193,3 +194,17 @@ def check_precompiled_letter_state(): len(letters), letter_ids) ) + + +@notify_celery.task(name='check-templated-letter-state') +@statsd(namespace="tasks") +def check_templated_letter_state(): + letters = dao_old_letters_with_created_status() + letter_ids = [str(letter.id) for letter in letters] + + if len(letters) > 0: + current_app.logger.exception( + "{} letters were created before 17.30 yesterday and still have 'created' state. Notifications: {}".format( + len(letters), + letter_ids) + ) diff --git a/app/config.py b/app/config.py index 3690944a9..2c5ee54ae 100644 --- a/app/config.py +++ b/app/config.py @@ -259,6 +259,11 @@ class Config(object): # since we mark jobs as archived 'options': {'queue': QueueNames.PERIODIC}, }, + 'check-templated-letter-state': { + 'task': 'check-templated-letter-state', + 'schedule': crontab(day_of_week='mon-fri', hour=9, minute=0), + 'options': {'queue', QueueNames.PERIODIC} + }, 'raise-alert-if-letter-notifications-still-sending': { 'task': 'raise-alert-if-letter-notifications-still-sending', 'schedule': crontab(hour=16, minute=30), diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 069a8ea2a..882189ae3 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -14,7 +14,7 @@ from notifications_utils.recipients import ( try_validate_and_format_phone_number ) from notifications_utils.statsd_decorators import statsd -from notifications_utils.timezones import convert_utc_to_bst +from notifications_utils.timezones import convert_bst_to_utc, convert_utc_to_bst from sqlalchemy import (desc, func, asc) from sqlalchemy.orm import joinedload from sqlalchemy.orm.exc import NoResultFound @@ -692,6 +692,20 @@ def notifications_not_yet_sent(should_be_sending_after_seconds, notification_typ return notifications +def dao_old_letters_with_created_status(): + yesterday_bst = convert_utc_to_bst(datetime.utcnow()) - timedelta(days=1) + last_processing_deadline = yesterday_bst.replace(hour=17, minute=30, second=0, microsecond=0) + + notifications = Notification.query.filter( + Notification.created_at < convert_bst_to_utc(last_processing_deadline), + Notification.notification_type == LETTER_TYPE, + Notification.status == NOTIFICATION_CREATED + ).order_by( + Notification.created_at + ).all() + return notifications + + def dao_precompiled_letters_still_pending_virus_check(): ninety_minutes_ago = datetime.utcnow() - timedelta(seconds=5400) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 95eb7498f..0b57b489f 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -15,6 +15,7 @@ from app.celery.scheduled_tasks import ( switch_current_sms_provider_on_slow_delivery, replay_created_notifications, check_precompiled_letter_state, + check_templated_letter_state, ) from app.config import QueueNames, TaskNames from app.dao.jobs_dao import dao_get_job_by_id @@ -363,3 +364,41 @@ def test_check_precompiled_letter_state(mocker, sample_letter_template): noti_2.id, noti_1.id) ) + + +@freeze_time("2019-05-30 14:00:00") +def test_check_templated_letter_state_during_bst(mocker, sample_letter_template): + mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') + + noti_1 = create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 1, 12, 0)) + noti_2 = create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 29, 16, 29)) + create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 29, 16, 30)) + create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 29, 17, 29)) + create_notification(template=sample_letter_template, status='delivered', created_at=datetime(2019, 5, 28, 10, 0)) + create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 30, 10, 0)) + + check_templated_letter_state() + + mock_logger.assert_called_once_with( + "2 letters were created before 17.30 yesterday and still have 'created' state. " + "Notifications: ['{}', '{}']".format(noti_1.id, noti_2.id) + ) + + +@freeze_time("2019-01-30 14:00:00") +def test_check_templated_letter_state_during_utc(mocker, sample_letter_template): + mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') + + noti_1 = create_notification(template=sample_letter_template, created_at=datetime(2018, 12, 1, 12, 0)) + noti_2 = create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 29, 17, 29)) + create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 29, 17, 30)) + create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 29, 18, 29)) + create_notification(template=sample_letter_template, status='delivered', created_at=datetime(2019, 1, 29, 10, 0)) + create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 30, 10, 0)) + + check_templated_letter_state() + + mock_logger.assert_called_once_with( + "2 letters were created before 17.30 yesterday and still have 'created' state. " + "Notifications: ['{}', '{}']".format(noti_1.id, noti_2.id) + ) From a790acc09129c9e21f5fc7a501a83da9457e9a6c Mon Sep 17 00:00:00 2001 From: Katie Smith Date: Mon, 17 Jun 2019 13:52:36 +0100 Subject: [PATCH 9/9] Create a Zendesk ticket for letters in the wrong state This creates a Zendesk ticket if either the `check_precompiled_letter_state` or `check_templated_letter_state` tasks fail. --- app/celery/scheduled_tasks.py | 40 ++++++++++++++++-------- tests/app/celery/test_scheduled_tasks.py | 39 ++++++++++++++++------- 2 files changed, 55 insertions(+), 24 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index ebef57b40..1cf508b31 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -8,7 +8,7 @@ from notifications_utils.statsd_decorators import statsd from sqlalchemy import and_ from sqlalchemy.exc import SQLAlchemyError -from app import notify_celery +from app import notify_celery, zendesk_client from app.celery.tasks import process_job from app.config import QueueNames, TaskNames from app.dao.invited_org_user_dao import delete_org_invitations_created_more_than_two_days_ago @@ -186,25 +186,39 @@ def replay_created_notifications(): @statsd(namespace="tasks") def check_precompiled_letter_state(): letters = dao_precompiled_letters_still_pending_virus_check() - letter_ids = [str(letter.id) for letter in letters] if len(letters) > 0: - current_app.logger.exception( - '{} precompiled letters have been pending-virus-check for over 90 minutes. Notifications: {}'.format( - len(letters), - letter_ids) - ) + letter_ids = [str(letter.id) for letter in letters] + + msg = "{} precompiled letters have been pending-virus-check for over 90 minutes. " \ + "Notifications: {}".format(len(letters), letter_ids) + + current_app.logger.exception(msg) + + if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: + zendesk_client.create_ticket( + subject="[{}] Letters still pending virus check".format(current_app.config['NOTIFY_ENVIRONMENT']), + message=msg, + ticket_type=zendesk_client.TYPE_INCIDENT + ) @notify_celery.task(name='check-templated-letter-state') @statsd(namespace="tasks") def check_templated_letter_state(): letters = dao_old_letters_with_created_status() - letter_ids = [str(letter.id) for letter in letters] if len(letters) > 0: - current_app.logger.exception( - "{} letters were created before 17.30 yesterday and still have 'created' state. Notifications: {}".format( - len(letters), - letter_ids) - ) + letter_ids = [str(letter.id) for letter in letters] + + msg = "{} letters were created before 17.30 yesterday and still have 'created' status. " \ + "Notifications: {}".format(len(letters), letter_ids) + + current_app.logger.exception(msg) + + if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: + zendesk_client.create_ticket( + subject="[{}] Letters still in 'created' status".format(current_app.config['NOTIFY_ENVIRONMENT']), + message=msg, + ticket_type=zendesk_client.TYPE_INCIDENT + ) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 0b57b489f..baf108380 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -62,7 +62,7 @@ def prepare_current_provider(restore_provider_details): db.session.commit() -def test_should_call_delete_codes_on_delete_verify_codes_task(notify_api, mocker): +def test_should_call_delete_codes_on_delete_verify_codes_task(notify_db_session, mocker): mocker.patch('app.celery.scheduled_tasks.delete_codes_older_created_more_than_a_day_ago') delete_verify_codes() assert scheduled_tasks.delete_codes_older_created_more_than_a_day_ago.call_count == 1 @@ -343,6 +343,7 @@ def test_check_job_status_task_does_not_raise_error(sample_template): @freeze_time("2019-05-30 14:00:00") def test_check_precompiled_letter_state(mocker, sample_letter_template): mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') + mock_create_ticket = mocker.patch('app.celery.nightly_tasks.zendesk_client.create_ticket') create_notification(template=sample_letter_template, status=NOTIFICATION_PENDING_VIRUS_CHECK, @@ -359,16 +360,21 @@ def test_check_precompiled_letter_state(mocker, sample_letter_template): check_precompiled_letter_state() - mock_logger.assert_called_once_with( - "2 precompiled letters have been pending-virus-check for over 90 minutes. Notifications: ['{}', '{}']".format( - noti_2.id, - noti_1.id) + message = "2 precompiled letters have been pending-virus-check for over 90 minutes. " \ + "Notifications: ['{}', '{}']".format(noti_2.id, noti_1.id) + + mock_logger.assert_called_once_with(message) + mock_create_ticket.assert_called_with( + message=message, + subject='[test] Letters still pending virus check', + ticket_type='incident' ) @freeze_time("2019-05-30 14:00:00") def test_check_templated_letter_state_during_bst(mocker, sample_letter_template): mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') + mock_create_ticket = mocker.patch('app.celery.nightly_tasks.zendesk_client.create_ticket') noti_1 = create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 1, 12, 0)) noti_2 = create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 29, 16, 29)) @@ -379,15 +385,21 @@ def test_check_templated_letter_state_during_bst(mocker, sample_letter_template) check_templated_letter_state() - mock_logger.assert_called_once_with( - "2 letters were created before 17.30 yesterday and still have 'created' state. " - "Notifications: ['{}', '{}']".format(noti_1.id, noti_2.id) + message = "2 letters were created before 17.30 yesterday and still have 'created' status. " \ + "Notifications: ['{}', '{}']".format(noti_1.id, noti_2.id) + + mock_logger.assert_called_once_with(message) + mock_create_ticket.assert_called_with( + message=message, + subject="[test] Letters still in 'created' status", + ticket_type='incident' ) @freeze_time("2019-01-30 14:00:00") def test_check_templated_letter_state_during_utc(mocker, sample_letter_template): mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') + mock_create_ticket = mocker.patch('app.celery.nightly_tasks.zendesk_client.create_ticket') noti_1 = create_notification(template=sample_letter_template, created_at=datetime(2018, 12, 1, 12, 0)) noti_2 = create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 29, 17, 29)) @@ -398,7 +410,12 @@ def test_check_templated_letter_state_during_utc(mocker, sample_letter_template) check_templated_letter_state() - mock_logger.assert_called_once_with( - "2 letters were created before 17.30 yesterday and still have 'created' state. " - "Notifications: ['{}', '{}']".format(noti_1.id, noti_2.id) + message = "2 letters were created before 17.30 yesterday and still have 'created' status. " \ + "Notifications: ['{}', '{}']".format(noti_1.id, noti_2.id) + + mock_logger.assert_called_once_with(message) + mock_create_ticket.assert_called_with( + message=message, + subject="[test] Letters still in 'created' status", + ticket_type='incident' )