Create new task to build dvla file.

This will transform each notification in a job to a row in a file.
The file is then uploaded to S3.
The files will later be aggregated by the notifications-ftp app to send to dvla.

The method to upload the file to S3 should be pulled into notifications-utils package.
It is the same method used in notifications-admin.
This commit is contained in:
Rebecca Law
2017-03-15 15:26:58 +00:00
parent ea4214c7d5
commit 140179b4b6
6 changed files with 169 additions and 20 deletions

View File

@@ -1,7 +1,8 @@
from datetime import datetime
import uuid
from app.models import Service, User, Template, Notification, SMS_TYPE, KEY_TYPE_NORMAL
from app.dao.jobs_dao import dao_create_job
from app.models import Service, User, Template, Notification, SMS_TYPE, KEY_TYPE_NORMAL, Job
from app.dao.users_dao import save_model_user
from app.dao.notifications_dao import dao_create_notification
from app.dao.templates_dao import dao_create_template
@@ -105,3 +106,30 @@ def create_notification(
notification = Notification(**data)
dao_create_notification(notification)
return notification
def create_job(template,
notification_count=1,
created_at=None,
job_status='pending',
scheduled_for=None,
processing_started=None,
original_file_name='some.csv'):
data = {
'id': uuid.uuid4(),
'service_id': template.service_id,
'service': template.service,
'template_id': template.id,
'template_version': template.version,
'original_file_name': original_file_name,
'notification_count': notification_count,
'created_at': created_at or datetime.utcnow(),
'created_by': template.created_by,
'job_status': job_status,
'scheduled_for': scheduled_for,
'processing_started': processing_started
}
job = Job(**data)
dao_create_job(job)
return job