Refactor s3upload

s3upload function has been moved to notifications-utils.
https://github.com/alphagov/notifications-utils/pull/138 must be merge first.
This commit is contained in:
Rebecca Law
2017-04-06 10:27:13 +01:00
parent f8618f8989
commit 65a91c440b
3 changed files with 5 additions and 56 deletions

View File

@@ -1,21 +0,0 @@
import uuid
import boto3
from itsdangerous import URLSafeSerializer
from flask import current_app
def add_notification_to_queue(service_id, template_id, type_, notification):
q = boto3.resource(
'sqs', region_name=current_app.config['AWS_REGION']
).create_queue(QueueName="{}_{}".format(
current_app.config['NOTIFICATION_QUEUE_PREFIX'],
str(service_id)))
notification_id = str(uuid.uuid4())
serializer = URLSafeSerializer(current_app.config.get('SECRET_KEY'))
encrypted = serializer.dumps(notification, current_app.config.get('DANGEROUS_SALT'))
q.send_message(MessageBody=encrypted,
MessageAttributes={'type': {'StringValue': type_, 'DataType': 'String'},
'notification_id': {'StringValue': notification_id, 'DataType': 'String'},
'service_id': {'StringValue': str(service_id), 'DataType': 'String'},
'template_id': {'StringValue': str(template_id), 'DataType': 'String'}})
return notification_id

View File

@@ -1,7 +1,5 @@
import random
import botocore
from boto3 import resource
from datetime import (datetime)
from flask import current_app
@@ -43,6 +41,7 @@ from app.models import (
from app.notifications.process_notifications import persist_notification
from app.service.utils import service_allowed_to_send_to
from app.statsd_decorators import statsd
from notifications_utils.s3 import s3upload
@notify_celery.task(name="process-job")
@@ -272,7 +271,7 @@ def persist_letter(
handle_exception(self, notification, notification_id, e)
@notify_celery.task(bind=True, name="build-dvla-file", countdown=30, max_retries=15, default_retry_delay=300)
@notify_celery.task(bind=True, name="build-dvla-file", countdown=30, max_retries=15, default_retry_delay=30)
@statsd(namespace="tasks")
def build_dvla_file(self, job_id):
try:
@@ -285,6 +284,7 @@ def build_dvla_file(self, job_id):
file_location="{}-dvla-job.text".format(job_id)
)
dao_update_job_status(job_id, JOB_STATUS_READY_TO_SEND)
notify_celery.send_task("aggregrate-dvla-files", ([str(job_id)], ), queue='aggregate-dvla-files')
else:
current_app.logger.info("All notifications for job {} are not persisted".format(job_id))
self.retry(queue="retry", exc="All notifications for job {} are not persisted".format(job_id))
@@ -322,37 +322,6 @@ def create_dvla_file_contents(job_id):
return file_contents
def s3upload(filedata, region, bucket_name, file_location):
# TODO: move this method to utils. Will need to change the filedata from here to send contents in filedata['data']
_s3 = resource('s3')
# contents = filedata['data']
contents = filedata
exists = True
try:
_s3.meta.client.head_bucket(
Bucket=bucket_name)
except botocore.exceptions.ClientError as e:
error_code = int(e.response['Error']['Code'])
if error_code == 404:
exists = False
else:
current_app.logger.error(
"Unable to create s3 bucket {}".format(bucket_name))
raise e
if not exists:
_s3.create_bucket(Bucket=bucket_name,
CreateBucketConfiguration={'LocationConstraint': region})
upload_id = create_uuid()
upload_file_name = file_location
key = _s3.Object(bucket_name, upload_file_name)
key.put(Body=contents, ServerSideEncryption='AES256')
return upload_id
def handle_exception(task, notification, notification_id, exc):
if not get_notification_by_id(notification_id):
retry_msg = '{task} notification for job {job} row number {row} and notification id {noti}'.format(