Files
notifications-admin/app/main/s3_client.py
Chris Hill-Scott 68292d2299 Add endpoints to serve the agreement
Rather than making users contact us to get the agreement, we should just
let them download it, when we know which version to send them.

This commit adds two endpoints:
- one to serve a page which links to the agreement
- one to serve the agreement itself

These pages are not linked to anywhere because the underlying files
don’t exist yet. So I haven’t bothered putting real content on the page
yet either. I imagine the deploy sequence will be:

1. Upload the files to the buckets in each environment
2. Deploy this code through each enviroment, checking the links work
3. Make another PR to start linking to the endpoints added by this
   commit
2018-03-27 11:35:17 +01:00

124 lines
3.9 KiB
Python

import uuid
import botocore
from boto3 import resource
from flask import current_app
from notifications_utils.s3 import s3upload as utils_s3upload
FILE_LOCATION_STRUCTURE = 'service-{}-notify/{}.csv'
TEMP_TAG = 'temp-{user_id}_'
LOGO_LOCATION_STRUCTURE = '{temp}{unique_id}-{filename}'
def get_s3_object(bucket_name, filename):
s3 = resource('s3')
return s3.Object(bucket_name, filename)
def delete_s3_object(filename):
bucket_name = current_app.config['LOGO_UPLOAD_BUCKET_NAME']
get_s3_object(bucket_name, filename).delete()
def rename_s3_object(old_name, new_name):
bucket_name = current_app.config['LOGO_UPLOAD_BUCKET_NAME']
get_s3_object(bucket_name, new_name).copy_from(
CopySource='{}/{}'.format(bucket_name, old_name))
delete_s3_object(old_name)
def get_s3_objects_filter_by_prefix(prefix):
bucket_name = current_app.config['LOGO_UPLOAD_BUCKET_NAME']
s3 = resource('s3')
return s3.Bucket(bucket_name).objects.filter(Prefix=prefix)
def get_temp_truncated_filename(filename, user_id):
return filename[len(TEMP_TAG.format(user_id=user_id)):]
def s3upload(service_id, filedata, region):
upload_id = str(uuid.uuid4())
upload_file_name = FILE_LOCATION_STRUCTURE.format(service_id, upload_id)
utils_s3upload(filedata=filedata['data'],
region=region,
bucket_name=current_app.config['CSV_UPLOAD_BUCKET_NAME'],
file_location=upload_file_name)
return upload_id
def s3download(service_id, upload_id):
contents = ''
try:
bucket_name = current_app.config['CSV_UPLOAD_BUCKET_NAME']
upload_file_name = FILE_LOCATION_STRUCTURE.format(service_id, upload_id)
key = get_s3_object(bucket_name, upload_file_name)
contents = key.get()['Body'].read().decode('utf-8')
except botocore.exceptions.ClientError as e:
current_app.logger.error("Unable to download s3 file {}".format(
FILE_LOCATION_STRUCTURE.format(service_id, upload_id)))
raise e
return contents
def get_mou(organisation_is_crown):
bucket = current_app.config['MOU_BUCKET_NAME']
filename = 'crown.pdf' if organisation_is_crown else 'non-crown.pdf'
attachment_filename = 'GOV.UK Notify data sharing and financial agreement{}.pdf'.format(
'' if organisation_is_crown else ' (non-crown)'
)
try:
key = get_s3_object(bucket, filename)
return {
'filename_or_fp': key.get()['Body'],
'attachment_filename': attachment_filename,
'as_attachment': True,
}
except botocore.exceptions.ClientError as exception:
current_app.logger.error("Unable to download s3 file {}/{}".format(
bucket, filename
))
raise exception
def upload_logo(filename, filedata, region, user_id):
upload_file_name = LOGO_LOCATION_STRUCTURE.format(
temp=TEMP_TAG.format(user_id=user_id),
unique_id=str(uuid.uuid4()),
filename=filename
)
bucket_name = current_app.config['LOGO_UPLOAD_BUCKET_NAME']
utils_s3upload(
filedata=filedata,
region=region,
bucket_name=bucket_name,
file_location=upload_file_name,
content_type='image/png'
)
return upload_file_name
def persist_logo(filename, user_id):
if filename.startswith(TEMP_TAG.format(user_id=user_id)):
persisted_filename = get_temp_truncated_filename(
filename=filename, user_id=user_id)
else:
return filename
rename_s3_object(filename, persisted_filename)
return persisted_filename
def delete_temp_files_created_by(user_id):
for obj in get_s3_objects_filter_by_prefix(TEMP_TAG.format(user_id=user_id)):
delete_s3_object(obj.key)
def delete_temp_file(filename):
if not filename.startswith(TEMP_TAG[:5]):
raise ValueError('Not a temp file: {}'.format(filename))
delete_s3_object(filename)