2018-10-16 15:08:15 +01:00
|
|
|
import io
|
2017-12-13 15:52:38 +00:00
|
|
|
import math
|
2018-03-19 13:52:01 +00:00
|
|
|
from datetime import datetime
|
2018-10-18 16:01:59 +01:00
|
|
|
from uuid import UUID
|
2019-03-21 15:40:24 +00:00
|
|
|
from hashlib import sha512
|
|
|
|
|
from base64 import urlsafe_b64encode
|
2017-12-19 14:18:05 +00:00
|
|
|
|
2018-10-16 15:08:15 +01:00
|
|
|
from PyPDF2.utils import PdfReadError
|
2018-03-19 13:52:01 +00:00
|
|
|
from botocore.exceptions import ClientError as BotoClientError
|
2017-12-19 14:18:05 +00:00
|
|
|
from flask import current_app
|
2018-10-16 15:08:15 +01:00
|
|
|
from notifications_utils.pdf import pdf_page_count
|
2017-12-11 11:00:27 +00:00
|
|
|
from requests import (
|
|
|
|
|
post as requests_post,
|
|
|
|
|
RequestException
|
|
|
|
|
)
|
2018-08-24 15:12:02 +01:00
|
|
|
from celery.exceptions import MaxRetriesExceededError
|
2018-03-19 13:52:01 +00:00
|
|
|
from notifications_utils.statsd_decorators import statsd
|
2018-08-21 18:02:17 +01:00
|
|
|
from notifications_utils.s3 import s3upload
|
2017-12-11 11:00:27 +00:00
|
|
|
|
|
|
|
|
from app import notify_celery
|
|
|
|
|
from app.aws import s3
|
2017-12-19 14:18:05 +00:00
|
|
|
from app.config import QueueNames, TaskNames
|
2017-12-13 15:52:38 +00:00
|
|
|
from app.dao.notifications_dao import (
|
|
|
|
|
get_notification_by_id,
|
|
|
|
|
update_notification_status_by_id,
|
2018-01-15 17:00:00 +00:00
|
|
|
dao_update_notification,
|
2018-03-23 12:04:37 +00:00
|
|
|
dao_get_notification_by_reference,
|
2018-01-15 17:00:00 +00:00
|
|
|
dao_get_notifications_by_references,
|
2018-03-19 13:52:01 +00:00
|
|
|
dao_update_notifications_by_reference,
|
2017-12-13 15:52:38 +00:00
|
|
|
)
|
2018-04-03 12:31:52 +01:00
|
|
|
from app.errors import VirusScanError
|
2018-03-19 13:52:01 +00:00
|
|
|
from app.letters.utils import (
|
|
|
|
|
get_reference_from_filename,
|
2018-08-21 18:02:17 +01:00
|
|
|
get_folder_name,
|
2018-03-23 15:27:24 +00:00
|
|
|
upload_letter_pdf,
|
2018-08-21 18:02:17 +01:00
|
|
|
ScanErrorType,
|
2018-10-16 17:20:34 +01:00
|
|
|
move_failed_pdf,
|
|
|
|
|
move_scan_to_invalid_pdf_bucket,
|
2018-08-21 18:02:17 +01:00
|
|
|
move_error_pdf_to_scan_bucket,
|
2018-06-27 16:40:30 +01:00
|
|
|
get_file_names_from_error_bucket
|
|
|
|
|
)
|
2018-03-23 12:04:37 +00:00
|
|
|
from app.models import (
|
|
|
|
|
KEY_TYPE_TEST,
|
|
|
|
|
NOTIFICATION_CREATED,
|
|
|
|
|
NOTIFICATION_DELIVERED,
|
2018-08-21 18:02:17 +01:00
|
|
|
NOTIFICATION_TECHNICAL_FAILURE,
|
2018-10-18 16:01:59 +01:00
|
|
|
NOTIFICATION_VALIDATION_FAILED,
|
|
|
|
|
NOTIFICATION_VIRUS_SCAN_FAILED,
|
2018-03-23 12:04:37 +00:00
|
|
|
)
|
2019-04-05 10:26:18 +01:00
|
|
|
from app.cronitor import cronitor
|
2017-12-11 11:00:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@notify_celery.task(bind=True, name="create-letters-pdf", max_retries=15, default_retry_delay=300)
|
|
|
|
|
@statsd(namespace="tasks")
|
|
|
|
|
def create_letters_pdf(self, notification_id):
|
|
|
|
|
try:
|
2017-12-12 11:56:42 +00:00
|
|
|
notification = get_notification_by_id(notification_id, _raise=True)
|
2017-12-13 15:52:38 +00:00
|
|
|
pdf_data, billable_units = get_letters_pdf(
|
2017-12-11 11:00:27 +00:00
|
|
|
notification.template,
|
|
|
|
|
contact_block=notification.reply_to_text,
|
2019-02-12 15:47:50 +00:00
|
|
|
filename=notification.service.letter_branding and notification.service.letter_branding.filename,
|
2017-12-11 11:00:27 +00:00
|
|
|
values=notification.personalisation
|
|
|
|
|
)
|
2018-01-23 17:45:35 +00:00
|
|
|
|
2018-02-23 10:39:32 +00:00
|
|
|
upload_letter_pdf(notification, pdf_data)
|
2017-12-13 15:52:38 +00:00
|
|
|
|
2017-12-14 11:38:17 +00:00
|
|
|
notification.billable_units = billable_units
|
|
|
|
|
dao_update_notification(notification)
|
2017-12-13 15:52:38 +00:00
|
|
|
|
2017-12-14 11:38:17 +00:00
|
|
|
current_app.logger.info(
|
|
|
|
|
'Letter notification reference {reference}: billable units set to {billable_units}'.format(
|
|
|
|
|
reference=str(notification.reference), billable_units=billable_units))
|
2017-12-13 15:52:38 +00:00
|
|
|
|
2017-12-11 11:00:27 +00:00
|
|
|
except (RequestException, BotoClientError):
|
|
|
|
|
try:
|
|
|
|
|
current_app.logger.exception(
|
|
|
|
|
"Letters PDF notification creation for id: {} failed".format(notification_id)
|
|
|
|
|
)
|
|
|
|
|
self.retry(queue=QueueNames.RETRY)
|
2018-08-24 15:12:02 +01:00
|
|
|
except MaxRetriesExceededError:
|
2018-10-22 11:11:07 +01:00
|
|
|
current_app.logger.error(
|
2017-12-11 11:00:27 +00:00
|
|
|
"RETRY FAILED: task create_letters_pdf failed for notification {}".format(notification_id),
|
|
|
|
|
)
|
|
|
|
|
update_notification_status_by_id(notification_id, 'technical-failure')
|
|
|
|
|
|
|
|
|
|
|
2018-10-23 15:52:44 +01:00
|
|
|
def get_letters_pdf(template, contact_block, filename, values):
|
2017-12-11 11:00:27 +00:00
|
|
|
template_for_letter_print = {
|
|
|
|
|
"subject": template.subject,
|
|
|
|
|
"content": template.content
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
data = {
|
|
|
|
|
'letter_contact_block': contact_block,
|
|
|
|
|
'template': template_for_letter_print,
|
|
|
|
|
'values': values,
|
2018-10-17 16:31:27 +01:00
|
|
|
'filename': filename,
|
2017-12-11 11:00:27 +00:00
|
|
|
}
|
|
|
|
|
resp = requests_post(
|
|
|
|
|
'{}/print.pdf'.format(
|
|
|
|
|
current_app.config['TEMPLATE_PREVIEW_API_HOST']
|
|
|
|
|
),
|
|
|
|
|
json=data,
|
|
|
|
|
headers={'Authorization': 'Token {}'.format(current_app.config['TEMPLATE_PREVIEW_API_KEY'])}
|
|
|
|
|
)
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
|
2017-12-13 15:52:38 +00:00
|
|
|
pages_per_sheet = 2
|
|
|
|
|
billable_units = math.ceil(int(resp.headers.get("X-pdf-page-count", 0)) / pages_per_sheet)
|
|
|
|
|
|
|
|
|
|
return resp.content, billable_units
|
2017-12-19 14:18:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@notify_celery.task(name='collate-letter-pdfs-for-day')
|
2019-06-03 11:46:07 +01:00
|
|
|
@cronitor("collate-letter-pdfs-for-day")
|
2018-09-12 17:16:34 +01:00
|
|
|
def collate_letter_pdfs_for_day(date=None):
|
|
|
|
|
if not date:
|
|
|
|
|
# Using the truncated date is ok because UTC to BST does not make a difference to the date,
|
|
|
|
|
# since it is triggered mid afternoon.
|
|
|
|
|
date = datetime.utcnow().strftime("%Y-%m-%d")
|
|
|
|
|
|
2019-03-21 15:40:24 +00:00
|
|
|
letter_pdfs = sorted(
|
|
|
|
|
s3.get_s3_bucket_objects(
|
|
|
|
|
current_app.config['LETTERS_PDF_BUCKET_NAME'],
|
|
|
|
|
subfolder=date
|
|
|
|
|
),
|
|
|
|
|
key=lambda letter: letter['Key']
|
2017-12-19 14:18:05 +00:00
|
|
|
)
|
2019-03-19 13:48:17 +00:00
|
|
|
for i, letters in enumerate(group_letters(letter_pdfs)):
|
2017-12-19 14:18:05 +00:00
|
|
|
filenames = [letter['Key'] for letter in letters]
|
2019-03-21 15:40:24 +00:00
|
|
|
|
|
|
|
|
hash = urlsafe_b64encode(sha512(''.join(filenames).encode()).digest())[:20].decode()
|
|
|
|
|
# eg NOTIFY.2018-12-31.001.Wjrui5nAvObjPd-3GEL-.ZIP
|
|
|
|
|
dvla_filename = 'NOTIFY.{date}.{num:03}.{hash}.ZIP'.format(
|
|
|
|
|
date=date,
|
|
|
|
|
num=i + 1,
|
|
|
|
|
hash=hash
|
|
|
|
|
)
|
|
|
|
|
|
2017-12-19 14:18:05 +00:00
|
|
|
current_app.logger.info(
|
2019-03-19 13:48:17 +00:00
|
|
|
'Calling task zip-and-send-letter-pdfs for {} pdfs to upload {} with total size {:,} bytes'.format(
|
2017-12-19 14:18:05 +00:00
|
|
|
len(filenames),
|
2019-03-19 13:48:17 +00:00
|
|
|
dvla_filename,
|
2017-12-19 14:18:05 +00:00
|
|
|
sum(letter['Size'] for letter in letters)
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
notify_celery.send_task(
|
|
|
|
|
name=TaskNames.ZIP_AND_SEND_LETTER_PDFS,
|
2019-03-19 13:48:17 +00:00
|
|
|
kwargs={
|
|
|
|
|
'filenames_to_zip': filenames,
|
|
|
|
|
'upload_filename': dvla_filename
|
|
|
|
|
},
|
2018-01-02 17:18:01 +00:00
|
|
|
queue=QueueNames.PROCESS_FTP,
|
|
|
|
|
compression='zlib'
|
2017-12-19 14:18:05 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def group_letters(letter_pdfs):
|
|
|
|
|
"""
|
|
|
|
|
Group letters in chunks of MAX_LETTER_PDF_ZIP_FILESIZE. Will add files to lists, never going over that size.
|
|
|
|
|
If a single file is (somehow) larger than MAX_LETTER_PDF_ZIP_FILESIZE that'll be in a list on it's own.
|
|
|
|
|
If there are no files, will just exit (rather than yielding an empty list).
|
|
|
|
|
"""
|
|
|
|
|
running_filesize = 0
|
|
|
|
|
list_of_files = []
|
|
|
|
|
for letter in letter_pdfs:
|
2018-01-15 17:00:00 +00:00
|
|
|
if letter['Key'].lower().endswith('.pdf') and letter_in_created_state(letter['Key']):
|
2018-01-02 17:18:01 +00:00
|
|
|
if (
|
|
|
|
|
running_filesize + letter['Size'] > current_app.config['MAX_LETTER_PDF_ZIP_FILESIZE'] or
|
|
|
|
|
len(list_of_files) >= current_app.config['MAX_LETTER_PDF_COUNT_PER_ZIP']
|
|
|
|
|
):
|
2017-12-22 15:38:49 +00:00
|
|
|
yield list_of_files
|
|
|
|
|
running_filesize = 0
|
|
|
|
|
list_of_files = []
|
|
|
|
|
|
|
|
|
|
running_filesize += letter['Size']
|
|
|
|
|
list_of_files.append(letter)
|
2017-12-19 14:18:05 +00:00
|
|
|
|
|
|
|
|
if list_of_files:
|
|
|
|
|
yield list_of_files
|
2018-01-15 17:00:00 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def letter_in_created_state(filename):
|
|
|
|
|
# filename looks like '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF'
|
|
|
|
|
subfolder = filename.split('/')[0]
|
2018-03-19 13:52:01 +00:00
|
|
|
ref = get_reference_from_filename(filename)
|
2018-01-15 17:00:00 +00:00
|
|
|
notifications = dao_get_notifications_by_references([ref])
|
|
|
|
|
if notifications:
|
|
|
|
|
if notifications[0].status == NOTIFICATION_CREATED:
|
|
|
|
|
return True
|
|
|
|
|
current_app.logger.info('Collating letters for {} but notification with reference {} already in {}'.format(
|
|
|
|
|
subfolder,
|
|
|
|
|
ref,
|
|
|
|
|
notifications[0].status
|
|
|
|
|
))
|
|
|
|
|
return False
|
2018-03-19 13:52:01 +00:00
|
|
|
|
|
|
|
|
|
2018-08-21 18:02:17 +01:00
|
|
|
@notify_celery.task(bind=True, name='process-virus-scan-passed', max_retries=15, default_retry_delay=300)
|
|
|
|
|
def process_virus_scan_passed(self, filename):
|
2018-03-19 13:52:01 +00:00
|
|
|
reference = get_reference_from_filename(filename)
|
2018-03-23 12:04:37 +00:00
|
|
|
notification = dao_get_notification_by_reference(reference)
|
2018-04-03 12:31:52 +01:00
|
|
|
current_app.logger.info('notification id {} Virus scan passed: {}'.format(notification.id, filename))
|
2018-03-19 13:52:01 +00:00
|
|
|
|
2018-03-23 12:04:37 +00:00
|
|
|
is_test_key = notification.key_type == KEY_TYPE_TEST
|
2018-08-21 18:02:17 +01:00
|
|
|
|
|
|
|
|
scan_pdf_object = s3.get_s3_object(current_app.config['LETTERS_SCAN_BUCKET_NAME'], filename)
|
|
|
|
|
old_pdf = scan_pdf_object.get()['Body'].read()
|
|
|
|
|
|
2019-01-11 09:23:05 +00:00
|
|
|
try:
|
|
|
|
|
billable_units = _get_page_count(notification, old_pdf)
|
|
|
|
|
except PdfReadError:
|
2019-06-11 11:00:04 +01:00
|
|
|
_move_invalid_letter_and_update_status(notification, filename, scan_pdf_object)
|
2019-01-11 09:23:05 +00:00
|
|
|
return
|
|
|
|
|
|
2018-10-18 16:43:14 +01:00
|
|
|
new_pdf = _sanitise_precompiled_pdf(self, notification, old_pdf)
|
2018-08-21 18:02:17 +01:00
|
|
|
|
2018-10-15 16:07:22 +01:00
|
|
|
# TODO: Remove this once CYSP update their template to not cross over the margins
|
2018-10-18 12:19:07 +01:00
|
|
|
if notification.service_id == UUID('fe44178f-3b45-4625-9f85-2264a36dd9ec'): # CYSP
|
2018-10-15 16:07:22 +01:00
|
|
|
# Check your state pension submit letters with good addresses and notify tags, so just use their supplied pdf
|
|
|
|
|
new_pdf = old_pdf
|
|
|
|
|
|
2018-08-21 18:02:17 +01:00
|
|
|
if not new_pdf:
|
|
|
|
|
current_app.logger.info('Invalid precompiled pdf received {} ({})'.format(notification.id, filename))
|
2019-06-11 11:00:04 +01:00
|
|
|
_move_invalid_letter_and_update_status(notification, filename, scan_pdf_object)
|
2018-09-03 13:24:51 +01:00
|
|
|
return
|
2018-09-18 17:46:19 +01:00
|
|
|
else:
|
|
|
|
|
current_app.logger.info(
|
|
|
|
|
"Validation was successful for precompiled pdf {} ({})".format(notification.id, filename))
|
2018-08-21 18:02:17 +01:00
|
|
|
|
|
|
|
|
current_app.logger.info('notification id {} ({}) sanitised and ready to send'.format(notification.id, filename))
|
|
|
|
|
|
2019-06-11 11:00:04 +01:00
|
|
|
try:
|
|
|
|
|
_upload_pdf_to_test_or_live_pdf_bucket(
|
|
|
|
|
new_pdf,
|
|
|
|
|
filename,
|
|
|
|
|
is_test_letter=is_test_key)
|
|
|
|
|
|
|
|
|
|
update_letter_pdf_status(
|
|
|
|
|
reference=reference,
|
|
|
|
|
status=NOTIFICATION_DELIVERED if is_test_key else NOTIFICATION_CREATED,
|
|
|
|
|
billable_units=billable_units
|
|
|
|
|
)
|
|
|
|
|
scan_pdf_object.delete()
|
|
|
|
|
except BotoClientError:
|
|
|
|
|
current_app.logger.exception(
|
|
|
|
|
"Error uploading letter to live pdf bucket for notification: {}".format(notification.id)
|
|
|
|
|
)
|
|
|
|
|
update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE)
|
2018-08-21 18:02:17 +01:00
|
|
|
|
|
|
|
|
|
2018-10-16 15:08:15 +01:00
|
|
|
def _get_page_count(notification, old_pdf):
|
|
|
|
|
try:
|
|
|
|
|
pages = pdf_page_count(io.BytesIO(old_pdf))
|
|
|
|
|
pages_per_sheet = 2
|
|
|
|
|
billable_units = math.ceil(pages / pages_per_sheet)
|
|
|
|
|
return billable_units
|
|
|
|
|
except PdfReadError as e:
|
|
|
|
|
current_app.logger.exception(msg='Invalid PDF received for notification_id: {}'.format(notification.id))
|
|
|
|
|
raise e
|
|
|
|
|
|
|
|
|
|
|
2019-06-11 11:00:04 +01:00
|
|
|
def _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object):
|
|
|
|
|
try:
|
|
|
|
|
move_scan_to_invalid_pdf_bucket(filename)
|
|
|
|
|
scan_pdf_object.delete()
|
|
|
|
|
|
|
|
|
|
update_letter_pdf_status(
|
|
|
|
|
reference=notification.reference,
|
|
|
|
|
status=NOTIFICATION_VALIDATION_FAILED,
|
|
|
|
|
billable_units=0)
|
|
|
|
|
except BotoClientError:
|
|
|
|
|
current_app.logger.exception(
|
|
|
|
|
"Error when moving letter with id {} to invalid PDF bucket".format(notification.id)
|
|
|
|
|
)
|
|
|
|
|
update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE)
|
2019-01-11 09:23:05 +00:00
|
|
|
|
|
|
|
|
|
2018-08-21 18:02:17 +01:00
|
|
|
def _upload_pdf_to_test_or_live_pdf_bucket(pdf_data, filename, is_test_letter):
|
|
|
|
|
target_bucket_config = 'TEST_LETTERS_BUCKET_NAME' if is_test_letter else 'LETTERS_PDF_BUCKET_NAME'
|
|
|
|
|
target_bucket_name = current_app.config[target_bucket_config]
|
|
|
|
|
target_filename = get_folder_name(datetime.utcnow(), is_test_letter) + filename
|
|
|
|
|
|
|
|
|
|
s3upload(
|
|
|
|
|
filedata=pdf_data,
|
|
|
|
|
region=current_app.config['AWS_REGION'],
|
|
|
|
|
bucket_name=target_bucket_name,
|
|
|
|
|
file_location=target_filename
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2018-10-18 16:43:14 +01:00
|
|
|
def _sanitise_precompiled_pdf(self, notification, precompiled_pdf):
|
2018-08-21 18:02:17 +01:00
|
|
|
try:
|
|
|
|
|
resp = requests_post(
|
|
|
|
|
'{}/precompiled/sanitise'.format(
|
|
|
|
|
current_app.config['TEMPLATE_PREVIEW_API_HOST']
|
|
|
|
|
),
|
|
|
|
|
data=precompiled_pdf,
|
2018-12-13 12:01:50 +00:00
|
|
|
headers={'Authorization': 'Token {}'.format(current_app.config['TEMPLATE_PREVIEW_API_KEY']),
|
|
|
|
|
'Service-ID': str(notification.service_id),
|
|
|
|
|
'Notification-ID': str(notification.id)}
|
2018-08-21 18:02:17 +01:00
|
|
|
)
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
return resp.content
|
|
|
|
|
except RequestException as ex:
|
2018-08-24 15:12:02 +01:00
|
|
|
if ex.response is not None and ex.response.status_code == 400:
|
2018-10-18 16:46:04 +01:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"sanitise_precompiled_pdf validation error for notification: {}".format(notification.id)
|
2018-09-03 13:24:51 +01:00
|
|
|
)
|
2018-08-21 18:02:17 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
current_app.logger.exception(
|
2018-10-18 16:43:14 +01:00
|
|
|
"sanitise_precompiled_pdf failed for notification: {}".format(notification.id)
|
2018-08-21 18:02:17 +01:00
|
|
|
)
|
|
|
|
|
self.retry(queue=QueueNames.RETRY)
|
2018-08-24 15:12:02 +01:00
|
|
|
except MaxRetriesExceededError:
|
2018-10-22 11:11:07 +01:00
|
|
|
current_app.logger.error(
|
2018-10-18 16:43:14 +01:00
|
|
|
"RETRY FAILED: sanitise_precompiled_pdf failed for notification {}".format(notification.id),
|
2018-08-21 18:02:17 +01:00
|
|
|
)
|
2018-10-16 17:20:34 +01:00
|
|
|
|
|
|
|
|
notification.status = NOTIFICATION_TECHNICAL_FAILURE
|
|
|
|
|
dao_update_notification(notification)
|
2018-08-21 18:02:17 +01:00
|
|
|
raise
|
|
|
|
|
|
2018-03-19 13:52:01 +00:00
|
|
|
|
2018-03-20 14:56:42 +00:00
|
|
|
@notify_celery.task(name='process-virus-scan-failed')
|
|
|
|
|
def process_virus_scan_failed(filename):
|
2018-03-23 15:27:24 +00:00
|
|
|
move_failed_pdf(filename, ScanErrorType.FAILURE)
|
2018-03-19 13:52:01 +00:00
|
|
|
reference = get_reference_from_filename(filename)
|
2018-04-03 12:31:52 +01:00
|
|
|
notification = dao_get_notification_by_reference(reference)
|
2018-10-24 14:50:50 +01:00
|
|
|
updated_count = update_letter_pdf_status(reference, NOTIFICATION_VIRUS_SCAN_FAILED, billable_units=0)
|
2018-03-19 13:52:01 +00:00
|
|
|
|
|
|
|
|
if updated_count != 1:
|
|
|
|
|
raise Exception(
|
|
|
|
|
"There should only be one letter notification for each reference. Found {} notifications".format(
|
|
|
|
|
updated_count
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
2018-06-27 16:40:30 +01:00
|
|
|
error = VirusScanError('notification id {} Virus scan failed: {}'.format(notification.id, filename))
|
|
|
|
|
current_app.logger.exception(error)
|
|
|
|
|
raise error
|
2018-04-03 12:31:52 +01:00
|
|
|
|
2018-03-19 13:52:01 +00:00
|
|
|
|
2018-03-23 15:27:24 +00:00
|
|
|
@notify_celery.task(name='process-virus-scan-error')
|
|
|
|
|
def process_virus_scan_error(filename):
|
|
|
|
|
move_failed_pdf(filename, ScanErrorType.ERROR)
|
|
|
|
|
reference = get_reference_from_filename(filename)
|
2018-04-03 12:31:52 +01:00
|
|
|
notification = dao_get_notification_by_reference(reference)
|
2018-10-24 14:50:50 +01:00
|
|
|
updated_count = update_letter_pdf_status(reference, NOTIFICATION_TECHNICAL_FAILURE, billable_units=0)
|
2018-03-23 15:27:24 +00:00
|
|
|
|
|
|
|
|
if updated_count != 1:
|
|
|
|
|
raise Exception(
|
|
|
|
|
"There should only be one letter notification for each reference. Found {} notifications".format(
|
|
|
|
|
updated_count
|
|
|
|
|
)
|
|
|
|
|
)
|
2018-06-27 16:40:30 +01:00
|
|
|
error = VirusScanError('notification id {} Virus scan error: {}'.format(notification.id, filename))
|
|
|
|
|
current_app.logger.exception(error)
|
|
|
|
|
raise error
|
2018-04-03 12:31:52 +01:00
|
|
|
|
2018-03-23 15:27:24 +00:00
|
|
|
|
2018-10-24 15:13:11 +01:00
|
|
|
def update_letter_pdf_status(reference, status, billable_units):
|
2018-03-19 13:52:01 +00:00
|
|
|
return dao_update_notifications_by_reference(
|
|
|
|
|
references=[reference],
|
|
|
|
|
update_dict={
|
|
|
|
|
'status': status,
|
2018-10-16 15:08:15 +01:00
|
|
|
'billable_units': billable_units,
|
2018-03-19 13:52:01 +00:00
|
|
|
'updated_at': datetime.utcnow()
|
2018-08-30 14:27:57 +01:00
|
|
|
})[0]
|
2018-06-27 16:40:30 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def replay_letters_in_error(filename=None):
|
|
|
|
|
# This method can be used to replay letters that end up in the ERROR directory.
|
|
|
|
|
# We had an incident where clamAV was not processing the virus scan.
|
|
|
|
|
if filename:
|
|
|
|
|
move_error_pdf_to_scan_bucket(filename)
|
|
|
|
|
# call task to add the filename to anti virus queue
|
|
|
|
|
current_app.logger.info("Calling scan_file for: {}".format(filename))
|
2019-02-25 14:26:47 +00:00
|
|
|
|
|
|
|
|
if current_app.config['ANTIVIRUS_ENABLED']:
|
|
|
|
|
notify_celery.send_task(
|
|
|
|
|
name=TaskNames.SCAN_FILE,
|
|
|
|
|
kwargs={'filename': filename},
|
|
|
|
|
queue=QueueNames.ANTIVIRUS,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
# stub out antivirus in dev
|
|
|
|
|
process_virus_scan_passed.apply_async(
|
|
|
|
|
kwargs={'filename': filename},
|
|
|
|
|
queue=QueueNames.LETTERS,
|
|
|
|
|
)
|
2018-06-27 16:40:30 +01:00
|
|
|
else:
|
|
|
|
|
error_files = get_file_names_from_error_bucket()
|
|
|
|
|
for item in error_files:
|
|
|
|
|
moved_file_name = item.key.split('/')[1]
|
|
|
|
|
current_app.logger.info("Calling scan_file for: {}".format(moved_file_name))
|
|
|
|
|
move_error_pdf_to_scan_bucket(moved_file_name)
|
|
|
|
|
# call task to add the filename to anti virus queue
|
2019-02-25 14:26:47 +00:00
|
|
|
if current_app.config['ANTIVIRUS_ENABLED']:
|
|
|
|
|
notify_celery.send_task(
|
|
|
|
|
name=TaskNames.SCAN_FILE,
|
|
|
|
|
kwargs={'filename': moved_file_name},
|
|
|
|
|
queue=QueueNames.ANTIVIRUS,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
# stub out antivirus in dev
|
|
|
|
|
process_virus_scan_passed.apply_async(
|
|
|
|
|
kwargs={'filename': moved_file_name},
|
|
|
|
|
queue=QueueNames.LETTERS,
|
|
|
|
|
)
|