Updated to retrieve csv upload from new bucket.

Fix test errors.
This commit is contained in:
Nicholas Staples
2016-04-07 13:44:04 +01:00
parent 74700334e6
commit 143d1b0db8
9 changed files with 88 additions and 45 deletions

View File

@@ -1,16 +1,23 @@
from boto3 import resource from boto3 import resource
from flask import current_app
FILE_LOCATION_STRUCTURE = 'service-{}-notify/{}.csv'
def get_s3_job_object(bucket_name, job_id): def get_s3_job_object(bucket_name, file_location):
s3 = resource('s3') s3 = resource('s3')
return s3.Object(bucket_name, '{}.csv'.format(job_id)) return s3.Object(bucket_name, file_location)
def get_job_from_s3(bucket_name, job_id): def get_job_from_s3(service_id, job_id):
obj = get_s3_job_object(bucket_name, job_id) bucket_name = current_app.config['CSV_UPLOAD_BUCKET_NAME']
file_location = FILE_LOCATION_STRUCTURE.format(service_id, job_id)
obj = get_s3_job_object(bucket_name, file_location)
return obj.get()['Body'].read().decode('utf-8') return obj.get()['Body'].read().decode('utf-8')
def remove_job_from_s3(bucket_name, job_id): def remove_job_from_s3(service_id, job_id):
obj = get_s3_job_object(bucket_name, job_id) bucket_name = current_app.config['CSV_UPLOAD_BUCKET_NAME']
file_location = FILE_LOCATION_STRUCTURE.format(service_id, job_id)
obj = get_s3_job_object(bucket_name, file_location)
return obj.delete() return obj.delete()

View File

@@ -146,7 +146,7 @@ def process_job(job_id):
) )
for recipient, personalisation in RecipientCSV( for recipient, personalisation in RecipientCSV(
s3.get_job_from_s3(job.bucket_name, job_id), s3.get_job_from_s3(str(service.id), str(job_id)),
template_type=template.template_type, template_type=template.template_type,
placeholders=template.placeholders placeholders=template.placeholders
).recipients_and_personalisation: ).recipients_and_personalisation:
@@ -191,7 +191,7 @@ def process_job(job_id):
@notify_celery.task(name="remove-job") @notify_celery.task(name="remove-job")
def remove_job(job_id): def remove_job(job_id):
job = dao_get_job_by_id(job_id) job = dao_get_job_by_id(job_id)
s3.remove_job_from_s3(job.bucket_name, job_id) s3.remove_job_from_s3(job.service.id, str(job_id))
current_app.logger.info("Job {} has been removed from s3.".format(job_id)) current_app.logger.info("Job {} has been removed from s3.".format(job_id))

View File

@@ -171,8 +171,6 @@ class Job(db.Model):
id = db.Column(UUID(as_uuid=True), primary_key=True) id = db.Column(UUID(as_uuid=True), primary_key=True)
original_file_name = db.Column(db.String, nullable=False) original_file_name = db.Column(db.String, nullable=False)
bucket_name = db.Column(db.String, nullable=False)
file_name = db.Column(db.String, nullable=False)
service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'), index=True, unique=False, nullable=False) service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'), index=True, unique=False, nullable=False)
service = db.relationship('Service', backref=db.backref('jobs', lazy='dynamic')) service = db.relationship('Service', backref=db.backref('jobs', lazy='dynamic'))
template_id = db.Column(db.BigInteger, db.ForeignKey('templates.id'), index=True, unique=False) template_id = db.Column(db.BigInteger, db.ForeignKey('templates.id'), index=True, unique=False)

View File

@@ -80,17 +80,26 @@ class Config(object):
TWILIO_NUMBER = os.getenv('TWILIO_NUMBER') TWILIO_NUMBER = os.getenv('TWILIO_NUMBER')
FIRETEXT_NUMBER = os.getenv('FIRETEXT_NUMBER') FIRETEXT_NUMBER = os.getenv('FIRETEXT_NUMBER')
FIRETEXT_API_KEY = os.getenv("FIRETEXT_API_KEY") FIRETEXT_API_KEY = os.getenv("FIRETEXT_API_KEY")
CSV_UPLOAD_BUCKET_NAME = '{}-notifications-csv-upload'.format(
os.environ['NOTIFY_API_ENVIRONMENT'].split('config.')[1].lower())
class Development(Config): class Development(Config):
DEBUG = True DEBUG = True
class Preview(Config):
pass
class Test(Development): class Test(Development):
pass pass
configs = { configs = {
'development': 'config.Development',
'test': 'config.Test',
'live': 'config_live.Live', 'live': 'config_live.Live',
'staging': 'config_staging.Staging', 'staging': 'config_staging.Staging',
'preview': 'config.Config' 'preview': 'config.Preview'
} }

View File

@@ -0,0 +1,28 @@
"""empty message
Revision ID: 0046_remove_bucketname
Revises: 0045_template_stats_update_time
Create Date: 2016-04-07 12:23:55.050714
"""
# revision identifiers, used by Alembic.
revision = '0046_remove_bucketname'
down_revision = '0045_template_stats_update_time'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('jobs', 'file_name')
op.drop_column('jobs', 'bucket_name')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('jobs', sa.Column('bucket_name', sa.VARCHAR(), autoincrement=False, nullable=False))
op.add_column('jobs', sa.Column('file_name', sa.VARCHAR(), autoincrement=False, nullable=False))
### end Alembic commands ###

View File

@@ -76,8 +76,10 @@ def test_should_process_sms_job(sample_job, mocker, mock_celery_remove_job):
mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") mocker.patch('app.celery.tasks.create_uuid', return_value="uuid")
process_job(sample_job.id) process_job(sample_job.id)
s3.get_job_from_s3.assert_called_once_with(
s3.get_job_from_s3.assert_called_once_with(sample_job.bucket_name, sample_job.id) str(sample_job.service.id),
str(sample_job.id)
)
assert encryption.encrypt.call_args[0][0]['to'] == '+441234123123' assert encryption.encrypt.call_args[0][0]['to'] == '+441234123123'
assert encryption.encrypt.call_args[0][0]['personalisation'] == {} assert encryption.encrypt.call_args[0][0]['personalisation'] == {}
tasks.send_sms.apply_async.assert_called_once_with( tasks.send_sms.apply_async.assert_called_once_with(
@@ -191,7 +193,10 @@ def test_should_process_sms_job_if_exactly_on_send_limits(notify_db,
process_job(job.id) process_job(job.id)
s3.get_job_from_s3.assert_called_once_with(job.bucket_name, job.id) s3.get_job_from_s3.assert_called_once_with(
str(job.service.id),
str(job.id)
)
job = jobs_dao.dao_get_job_by_id(job.id) job = jobs_dao.dao_get_job_by_id(job.id)
assert job.status == 'finished' assert job.status == 'finished'
tasks.send_email.apply_async.assert_called_with( tasks.send_email.apply_async.assert_called_with(
@@ -212,7 +217,10 @@ def test_should_not_create_send_task_for_empty_file(sample_job, mocker, mock_cel
process_job(sample_job.id) process_job(sample_job.id)
s3.get_job_from_s3.assert_called_once_with(sample_job.bucket_name, sample_job.id) s3.get_job_from_s3.assert_called_once_with(
str(sample_job.service.id),
str(sample_job.id)
)
job = jobs_dao.dao_get_job_by_id(sample_job.id) job = jobs_dao.dao_get_job_by_id(sample_job.id)
assert job.status == 'finished' assert job.status == 'finished'
tasks.send_sms.apply_async.assert_not_called tasks.send_sms.apply_async.assert_not_called
@@ -227,7 +235,10 @@ def test_should_process_email_job(sample_email_job, mocker, mock_celery_remove_j
process_job(sample_email_job.id) process_job(sample_email_job.id)
s3.get_job_from_s3.assert_called_once_with(sample_email_job.bucket_name, sample_email_job.id) s3.get_job_from_s3.assert_called_once_with(
str(sample_email_job.service.id),
str(sample_email_job.id)
)
assert encryption.encrypt.call_args[0][0]['to'] == 'test@test.com' assert encryption.encrypt.call_args[0][0]['to'] == 'test@test.com'
assert encryption.encrypt.call_args[0][0]['personalisation'] == {} assert encryption.encrypt.call_args[0][0]['personalisation'] == {}
tasks.send_email.apply_async.assert_called_once_with( tasks.send_email.apply_async.assert_called_once_with(
@@ -256,8 +267,8 @@ def test_should_process_all_sms_job(sample_job,
process_job(sample_job_with_placeholdered_template.id) process_job(sample_job_with_placeholdered_template.id)
s3.get_job_from_s3.assert_called_once_with( s3.get_job_from_s3.assert_called_once_with(
sample_job_with_placeholdered_template.bucket_name, str(sample_job_with_placeholdered_template.service.id),
sample_job_with_placeholdered_template.id str(sample_job_with_placeholdered_template.id)
) )
assert encryption.encrypt.call_args[0][0]['to'] == '+441234123120' assert encryption.encrypt.call_args[0][0]['to'] == '+441234123120'
assert encryption.encrypt.call_args[0][0]['personalisation'] == {'name': 'chris'} assert encryption.encrypt.call_args[0][0]['personalisation'] == {'name': 'chris'}

View File

@@ -44,17 +44,21 @@ def sample_user(notify_db,
notify_db_session, notify_db_session,
mobile_numnber="+447700900986", mobile_numnber="+447700900986",
email="notify@digital.cabinet-office.gov.uk"): email="notify@digital.cabinet-office.gov.uk"):
data = { try:
'name': 'Test User', data = {
'email_address': email, 'name': 'Test User',
'password': 'password', 'email_address': email,
'mobile_number': mobile_numnber, 'password': 'password',
'state': 'active' 'mobile_number': mobile_numnber,
} 'state': 'active'
usr = User.query.filter_by(email_address=email).first() }
if not usr: usr = User.query.filter_by(email_address=email).first()
usr = User(**data) if not usr:
save_model_user(usr) usr = User(**data)
save_model_user(usr)
except Exception:
import traceback
traceback.print_exc()
return usr return usr
@@ -211,14 +215,11 @@ def sample_job(notify_db,
template = sample_template(notify_db, notify_db_session, template = sample_template(notify_db, notify_db_session,
service=service) service=service)
job_id = uuid.uuid4() job_id = uuid.uuid4()
bucket_name = 'service-{}-notify'.format(service.id)
file_name = '{}.csv'.format(job_id)
data = { data = {
'id': uuid.uuid4(), 'id': uuid.uuid4(),
'service_id': service.id, 'service_id': service.id,
'service': service,
'template_id': template.id, 'template_id': template.id,
'bucket_name': bucket_name,
'file_name': file_name,
'original_file_name': 'some.csv', 'original_file_name': 'some.csv',
'notification_count': notification_count, 'notification_count': notification_count,
'created_at': created_at 'created_at': created_at
@@ -255,14 +256,11 @@ def sample_email_job(notify_db,
notify_db_session, notify_db_session,
service=service) service=service)
job_id = uuid.uuid4() job_id = uuid.uuid4()
bucket_name = 'service-{}-notify'.format(service.id)
file_name = '{}.csv'.format(job_id)
data = { data = {
'id': uuid.uuid4(), 'id': uuid.uuid4(),
'service_id': service.id, 'service_id': service.id,
'service': service,
'template_id': template.id, 'template_id': template.id,
'bucket_name': bucket_name,
'file_name': file_name,
'original_file_name': 'some.csv', 'original_file_name': 'some.csv',
'notification_count': 1 'notification_count': 1
} }

View File

@@ -15,14 +15,10 @@ def test_create_job(sample_template):
assert Job.query.count() == 0 assert Job.query.count() == 0
job_id = uuid.uuid4() job_id = uuid.uuid4()
bucket_name = 'service-{}-notify'.format(sample_template.service.id)
file_name = '{}.csv'.format(job_id)
data = { data = {
'id': job_id, 'id': job_id,
'service_id': sample_template.service.id, 'service_id': sample_template.service.id,
'template_id': sample_template.id, 'template_id': sample_template.id,
'bucket_name': bucket_name,
'file_name': file_name,
'original_file_name': 'some.csv', 'original_file_name': 'some.csv',
'notification_count': 1 'notification_count': 1
} }

View File

@@ -99,8 +99,6 @@ def test_create_job(notify_api, sample_template, mocker):
'service': str(sample_template.service.id), 'service': str(sample_template.service.id),
'template': sample_template.id, 'template': sample_template.id,
'original_file_name': 'thisisatest.csv', 'original_file_name': 'thisisatest.csv',
'bucket_name': 'service-{}-notify'.format(sample_template.service.id),
'file_name': '{}.csv'.format(job_id),
'notification_count': 1 'notification_count': 1
} }
path = '/service/{}/job'.format(sample_template.service.id) path = '/service/{}/job'.format(sample_template.service.id)
@@ -153,10 +151,8 @@ def test_create_job_returns_400_if_missing_data(notify_api, sample_template, moc
app.celery.tasks.process_job.apply_async.assert_not_called() app.celery.tasks.process_job.apply_async.assert_not_called()
assert resp_json['result'] == 'error' assert resp_json['result'] == 'error'
assert 'Missing data for required field.' in resp_json['message']['original_file_name'] assert 'Missing data for required field.' in resp_json['message']['original_file_name']
assert 'Missing data for required field.' in resp_json['message']['file_name']
assert 'Missing data for required field.' in resp_json['message']['notification_count'] assert 'Missing data for required field.' in resp_json['message']['notification_count']
assert 'Missing data for required field.' in resp_json['message']['id'] assert 'Missing data for required field.' in resp_json['message']['id']
assert 'Missing data for required field.' in resp_json['message']['bucket_name']
def test_create_job_returns_404_if_missing_service(notify_api, sample_template, mocker): def test_create_job_returns_404_if_missing_service(notify_api, sample_template, mocker):