add broadcast_event table

It's clear that we need a way to track updates to a broadcast message.
It's also clear that we'll need some kind of audit log that captures
exactly what was sent out in a message.

This commit adds a new database table, `broadcast_event`, which maps 1:1
with CAP XML sent to the CBCs. We'll create one of these just before
sending out.

The main driver for this was that cancel and update messages need to
contain a list of references of all previous messages that they're
amending. This is of format `{sender},{identifier},{sent_timestamp}`,
and the identifier itself needs to be unique for each message.
This commit is contained in:
Leo Hemsted
2020-07-24 12:46:28 +01:00
parent 5dc8b43242
commit 36ae5fadf6
4 changed files with 136 additions and 1 deletions

View File

@@ -1,5 +1,5 @@
from app import db from app import db
from app.models import BroadcastMessage from app.models import BroadcastMessage, BroadcastEvent
from app.dao.dao_utils import transactional from app.dao.dao_utils import transactional
@@ -28,3 +28,16 @@ def dao_get_broadcast_messages_for_service(service_id):
return BroadcastMessage.query.filter( return BroadcastMessage.query.filter(
BroadcastMessage.service_id == service_id BroadcastMessage.service_id == service_id
).order_by(BroadcastMessage.created_at) ).order_by(BroadcastMessage.created_at)
def dao_get_earlier_events_for_broadcast_event(broadcast_event_id):
"""
This is used to build up the references list.
"""
this_event = BroadcastEvent.query.get(broadcast_event_id)
return BroadcastEvent.query.filter(
BroadcastEvent.broadcast_message_id == this_event.id,
BroadcastEvent.sent_at < this_event.sent_at
).order_by(
BroadcastEvent.sent_at.asc()
)

View File

@@ -2278,3 +2278,81 @@ class BroadcastMessage(db.Model):
'approved_by_id': str(self.approved_by_id), 'approved_by_id': str(self.approved_by_id),
'cancelled_by_id': str(self.cancelled_by_id), 'cancelled_by_id': str(self.cancelled_by_id),
} }
class BroadcastEventMessageType:
ALERT = 'alert'
UPDATE = 'update'
CANCEL = 'cancel'
MESSAGE_TYPES = [ALERT, UPDATE, CANCEL]
class BroadcastEvent(db.Model):
"""
This table represents a single CAP XML blob that we sent to the mobile network providers.
We should be able to create the complete CAP message without joining from this to any other tables, eg
template, service, or broadcast_message.
The only exception to this is that we will have to join to itself to find other broadcast_events with the
same broadcast_message_id when building up the `<references>` xml field for updating/cancelling an existing message.
As such, this shouldn't have foreign keys to things that can change or be deleted.
"""
__tablename__ = 'broadcast_event'
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
# TODO: do we need this? or should we just join via broadcast_message.
service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'))
service = db.relationship('Service')
broadcast_message_id = db.Column(UUID(as_uuid=True), db.ForeignKey('broadcast_message.id'), nullable=False)
broadcast_message = db.relationship('BroadcastMessage', backref='events')
# this is used for <sent> in the cap xml
sent_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
# msgType. alert, cancel, or update. (other options in the spec are "ack" and "error")
message_type = db.Column(db.String, nullable=False)
# reckon: this will be json containing {'headline': '...', 'description': '...', 'title': '...'}. anything that isnt
# hardcoded in utils/cbc proxy
transmitted_content = db.Column(
JSONB(none_as_null=True),
nullable=True,
default=lambda: {'headline': '', 'description': '', 'title': ''}
)
# unsubstantiated reckon: even if we're sending a cancel, we'll still need to provide areas
transmitted_areas = db.Column(JSONB(none_as_null=True), nullable=False, default=list)
transmitted_sender = db.Column(db.String(), nullable=False)
# TODO: do we need this?
transmitted_starts_at = db.Column(db.DateTime, nullable=True)
transmitted_finishes_at = db.Column(db.DateTime, nullable=True)
# @property
# def reference(self):
# # TODO: write this `from_event` function
# return BroadcastMessageTemplate.from_event(self.serialize()).reference
def serialize(self):
return {
'id': self.id,
'service_id': self.service_id,
# 'reference': self.reference,
'broadcast_message_id': self.broadcast_message_id,
'sent_at': self.sent_at,
'message_type': self.message_type,
'transmitted_content': self.transmitted_content,
'transmitted_areas': self.transmitted_areas,
'transmitted_sender': self.transmitted_sender,
'transmitted_starts_at': self.updated_at.strftime(DATETIME_FORMAT) if self.transmitted_starts_at else None,
'transmitted_finishes_at': self.updated_at.strftime(DATETIME_FORMAT) if self.transmitted_finishes_at else None,
}

View File

@@ -0,0 +1,43 @@
"""
Revision ID: 0326_broadcast_event
Revises: 0325_int_letter_rates_fix
Create Date: 2020-07-24 12:40:35.809523
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0326_broadcast_event'
down_revision = '0325_int_letter_rates_fix'
def upgrade():
op.create_table('broadcast_event',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('broadcast_message_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('sent_at', sa.DateTime(), nullable=False),
sa.Column('message_type', sa.String(), nullable=False),
sa.Column('transmitted_content', postgresql.JSONB(none_as_null=True, astext_type=sa.Text()), nullable=True),
sa.Column('transmitted_areas', postgresql.JSONB(none_as_null=True, astext_type=sa.Text()), nullable=False),
sa.Column('transmitted_sender', sa.String(), nullable=False),
sa.Column('transmitted_starts_at', sa.DateTime(), nullable=True),
sa.Column('transmitted_finishes_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['broadcast_message_id'], ['broadcast_message.id'], ),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id')
)
# this shouldn't be nullable. it defaults to `[]` in python.
op.alter_column('broadcast_message', 'areas', existing_type=postgresql.JSONB(astext_type=sa.Text()), nullable=False)
# this can't be nullable. it defaults to 'draft' in python.
op.alter_column('broadcast_message', 'status', existing_type=sa.VARCHAR(), nullable=False)
op.create_foreign_key(None, 'broadcast_message', 'broadcast_status_type', ['status'], ['name'])
def downgrade():
op.drop_constraint('broadcast_message_status_fkey', 'broadcast_message', type_='foreignkey')
op.alter_column('broadcast_message', 'status', existing_type=sa.VARCHAR(), nullable=True)
op.alter_column('broadcast_message', 'areas', existing_type=postgresql.JSONB(astext_type=sa.Text()), nullable=True)
op.drop_table('broadcast_event')

View File

@@ -117,6 +117,7 @@ def notify_db_session(notify_db, sms_providers):
"organisation_types", "organisation_types",
"service_permission_types", "service_permission_types",
"auth_type", "auth_type",
"broadcast_status_type",
"invite_status_type", "invite_status_type",
"service_callback_type"]: "service_callback_type"]:
notify_db.engine.execute(tbl.delete()) notify_db.engine.execute(tbl.delete())