Files
notifications-api/app/models.py

2378 lines
74 KiB
Python
Raw Permalink Normal View History

import itertools
2016-01-28 11:42:13 +00:00
import uuid
2021-03-10 13:55:06 +00:00
from flask import current_app, url_for
2023-04-12 13:30:13 -04:00
from sqlalchemy import CheckConstraint, Index, UniqueConstraint
from sqlalchemy.dialects.postgresql import JSON, JSONB, UUID
2021-03-10 13:55:06 +00:00
from sqlalchemy.ext.associationproxy import association_proxy
2025-01-10 11:21:39 -08:00
from sqlalchemy.ext.declarative import DeclarativeMeta, declared_attr
2023-01-06 10:02:23 -05:00
from sqlalchemy.orm import validates
2021-03-10 13:55:06 +00:00
from sqlalchemy.orm.collections import attribute_mapped_collection
from app import db, encryption
from app.enums import (
AgreementStatus,
AgreementType,
AuthType,
BrandType,
CallbackType,
CodeType,
InvitedUserStatus,
JobStatus,
KeyType,
NotificationStatus,
NotificationType,
OrganizationType,
PermissionType,
RecipientType,
ServicePermissionType,
TemplateProcessType,
TemplateType,
)
2021-03-10 13:55:06 +00:00
from app.hashing import check_hash, hashpw
from app.history_meta import Versioned
from app.utils import (
DATETIME_FORMAT,
DATETIME_FORMAT_NO_TIMEZONE,
get_dt_string_or_none,
2024-05-23 13:59:51 -07:00
utc_now,
)
from notifications_utils.clients.encryption.encryption_client import EncryptionError
from notifications_utils.recipients import (
InvalidEmailError,
InvalidPhoneError,
try_validate_and_format_phone_number,
validate_email_address,
validate_phone_number,
)
from notifications_utils.template import PlainTextEmailTemplate, SMSMessageTemplate
def filter_null_value_fields(obj):
2023-08-29 14:54:30 -07:00
return dict(filter(lambda x: x[1] is not None, obj.items()))
_enum_column_names = {
AuthType: "auth_types",
BrandType: "brand_types",
OrganizationType: "organization_types",
ServicePermissionType: "service_permission_types",
RecipientType: "recipient_types",
CallbackType: "callback_types",
KeyType: "key_types",
TemplateType: "template_types",
TemplateProcessType: "template_process_types",
NotificationType: "notification_types",
JobStatus: "job_statuses",
CodeType: "code_types",
NotificationStatus: "notify_statuses",
InvitedUserStatus: "invited_user_statuses",
PermissionType: "permission_types",
AgreementType: "agreement_types",
AgreementStatus: "agreement_statuses",
}
def enum_column(enum_type, **kwargs):
return db.Column(
db.Enum(
enum_type,
name=_enum_column_names[enum_type],
values_callable=(lambda x: [i.value for i in x]),
),
**kwargs,
)
class HistoryModel:
@classmethod
def from_original(cls, original):
history = cls()
history.update_from_original(original)
return history
def update_from_original(self, original):
for c in self.__table__.columns:
# in some cases, columns may have different names to their underlying db column - so only copy those
# that we can, and leave it up to subclasses to deal with any oddities/properties etc.
if hasattr(original, c.name):
setattr(self, c.name, getattr(original, c.name))
else:
2023-08-29 14:54:30 -07:00
current_app.logger.debug(
"{} has no column {} to copy from".format(original, c.name)
)
class User(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "users"
2016-04-08 13:34:46 +01:00
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name = db.Column(db.String, nullable=False, index=True, unique=False)
email_address = db.Column(db.String(255), nullable=False, index=True, unique=True)
login_uuid = db.Column(db.Text, nullable=True, index=True, unique=True)
created_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
_password = db.Column(db.String, index=False, unique=False, nullable=False)
mobile_number = db.Column(db.String, index=False, unique=False, nullable=True)
2023-08-29 14:54:30 -07:00
password_changed_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
logged_in_at = db.Column(db.DateTime, nullable=True)
failed_login_count = db.Column(db.Integer, nullable=False, default=0)
2023-08-29 14:54:30 -07:00
state = db.Column(db.String, nullable=False, default="pending")
platform_admin = db.Column(db.Boolean, nullable=False, default=False)
current_session_id = db.Column(UUID(as_uuid=True), nullable=True)
auth_type = enum_column(AuthType, index=True, nullable=False, default=AuthType.SMS)
email_access_validated_at = db.Column(
2023-08-29 14:54:30 -07:00
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
)
2023-11-17 09:01:27 -08:00
preferred_timezone = db.Column(
db.Text,
nullable=True,
index=False,
unique=False,
default="US/Eastern",
2023-11-17 09:01:27 -08:00
)
# either email auth or a mobile number must be provided
2023-08-29 14:54:30 -07:00
CheckConstraint(
"auth_type in (AuthType.EMAIL, AuthType.WEBAUTHN) or mobile_number is not null"
2023-08-29 14:54:30 -07:00
)
2023-08-29 14:54:30 -07:00
services = db.relationship("Service", secondary="user_to_service", backref="users")
2023-07-10 11:06:29 -07:00
organizations = db.relationship(
"Organization",
secondary="user_to_organization",
backref="users",
2023-08-29 14:54:30 -07:00
)
2023-01-06 10:02:23 -05:00
@validates("mobile_number")
def validate_mobile_number(self, key, number):
try:
if number is not None:
return validate_phone_number(number, international=True)
except InvalidPhoneError as err:
raise ValueError(str(err)) from err
@property
def password(self):
raise AttributeError("Password not readable")
@property
def can_use_webauthn(self):
if self.platform_admin:
return True
if self.auth_type == AuthType.WEBAUTHN:
return True
return any(
2023-08-29 14:54:30 -07:00
str(service.id) == current_app.config["NOTIFY_SERVICE_ID"]
for service in self.services
)
@password.setter
def password(self, password):
self._password = hashpw(password)
def check_password(self, password):
return check_hash(password, self._password)
def get_permissions(self, service_id=None):
from app.dao.permissions_dao import permission_dao
if service_id:
return [
2023-08-29 14:54:30 -07:00
x.permission
for x in permission_dao.get_permissions_by_user_id_and_service_id(
self.id, service_id
)
]
retval = {}
for x in permission_dao.get_permissions_by_user_id(self.id):
service_id = str(x.service_id)
if service_id not in retval:
retval[service_id] = []
retval[service_id].append(x.permission)
return retval
def serialize(self):
2023-12-05 07:29:18 -08:00
return {
2023-08-29 14:54:30 -07:00
"id": self.id,
"name": self.name,
"email_address": self.email_address,
"auth_type": self.auth_type,
"current_session_id": self.current_session_id,
"failed_login_count": self.failed_login_count,
"email_access_validated_at": self.email_access_validated_at.strftime(
DATETIME_FORMAT
),
"logged_in_at": get_dt_string_or_none(self.logged_in_at),
"mobile_number": self.mobile_number,
"organizations": [x.id for x in self.organizations if x.active],
"password_changed_at": self.password_changed_at.strftime(
DATETIME_FORMAT_NO_TIMEZONE
),
"permissions": self.get_permissions(),
"platform_admin": self.platform_admin,
"services": [x.id for x in self.services if x.active],
"can_use_webauthn": self.can_use_webauthn,
"state": self.state,
2023-12-04 14:53:29 -08:00
"preferred_timezone": self.preferred_timezone,
}
def serialize_for_users_list(self):
return {
2023-08-29 14:54:30 -07:00
"id": self.id,
"name": self.name,
"email_address": self.email_address,
"mobile_number": self.mobile_number,
}
class ServiceUser(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "user_to_service"
user_id = db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), primary_key=True)
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
primary_key=True,
2023-08-29 14:54:30 -07:00
)
__table_args__ = (
UniqueConstraint(
"user_id",
"service_id",
name="uix_user_to_service",
),
)
2023-07-10 11:06:29 -07:00
user_to_organization = db.Table(
2023-08-29 14:54:30 -07:00
"user_to_organization",
db.Model.metadata,
2023-08-29 14:54:30 -07:00
db.Column("user_id", UUID(as_uuid=True), db.ForeignKey("users.id")),
db.Column("organization_id", UUID(as_uuid=True), db.ForeignKey("organization.id")),
UniqueConstraint("user_id", "organization_id", name="uix_user_to_organization"),
)
user_folder_permissions = db.Table(
2023-08-29 14:54:30 -07:00
"user_folder_permissions",
db.Model.metadata,
2023-08-29 14:54:30 -07:00
db.Column("user_id", UUID(as_uuid=True), primary_key=True),
db.Column(
"template_folder_id",
UUID(as_uuid=True),
db.ForeignKey("template_folder.id"),
primary_key=True,
),
db.Column("service_id", UUID(as_uuid=True), primary_key=True),
db.ForeignKeyConstraint(
["user_id", "service_id"],
["user_to_service.user_id", "user_to_service.service_id"],
),
db.ForeignKeyConstraint(
["template_folder_id", "service_id"],
["template_folder.id", "template_folder.service_id"],
),
)
class EmailBranding(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "email_branding"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
colour = db.Column(db.String(7), nullable=True)
logo = db.Column(db.String(255), nullable=True)
name = db.Column(db.String(255), unique=True, nullable=False)
text = db.Column(db.String(255), nullable=True)
brand_type = enum_column(
BrandType,
index=True,
nullable=False,
default=BrandType.ORG,
)
def serialize(self):
serialized = {
"id": str(self.id),
"colour": self.colour,
"logo": self.logo,
"name": self.name,
"text": self.text,
2023-08-29 14:54:30 -07:00
"brand_type": self.brand_type,
}
return serialized
service_email_branding = db.Table(
2023-08-29 14:54:30 -07:00
"service_email_branding",
db.Model.metadata,
# service_id is a primary key as you can only have one email branding per service
2023-08-29 14:54:30 -07:00
db.Column(
"service_id",
UUID(as_uuid=True),
db.ForeignKey("services.id"),
primary_key=True,
nullable=False,
),
db.Column(
"email_branding_id",
UUID(as_uuid=True),
db.ForeignKey("email_branding.id"),
nullable=False,
),
)
class Domain(db.Model):
__tablename__ = "domain"
domain = db.Column(db.String(255), primary_key=True)
2023-08-29 14:54:30 -07:00
organization_id = db.Column(
"organization_id",
UUID(as_uuid=True),
db.ForeignKey("organization.id"),
nullable=False,
)
2023-07-10 11:06:29 -07:00
class Organization(db.Model):
__tablename__ = "organization"
2023-08-29 14:54:30 -07:00
id = db.Column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, unique=False
)
name = db.Column(db.String(255), nullable=False, unique=True, index=True)
active = db.Column(db.Boolean, nullable=False, default=True)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
agreement_signed = db.Column(db.Boolean, nullable=True)
agreement_signed_at = db.Column(db.DateTime, nullable=True)
agreement_signed_by_id = db.Column(
UUID(as_uuid=True),
2023-08-29 14:54:30 -07:00
db.ForeignKey("users.id"),
nullable=True,
)
2023-08-29 14:54:30 -07:00
agreement_signed_by = db.relationship("User")
agreement_signed_on_behalf_of_name = db.Column(db.String(255), nullable=True)
2023-08-29 14:54:30 -07:00
agreement_signed_on_behalf_of_email_address = db.Column(
db.String(255), nullable=True
)
agreement_signed_version = db.Column(db.Float, nullable=True)
organization_type = enum_column(OrganizationType, unique=False, nullable=True)
request_to_go_live_notes = db.Column(db.Text)
domains = db.relationship("Domain")
2023-08-29 14:54:30 -07:00
email_branding = db.relationship("EmailBranding")
email_branding_id = db.Column(
UUID(as_uuid=True),
2023-08-29 14:54:30 -07:00
db.ForeignKey("email_branding.id"),
nullable=True,
)
notes = db.Column(db.Text, nullable=True)
purchase_order_number = db.Column(db.String(255), nullable=True)
billing_contact_names = db.Column(db.Text, nullable=True)
billing_contact_email_addresses = db.Column(db.Text, nullable=True)
billing_reference = db.Column(db.String(255), nullable=True)
@property
def live_services(self):
return [
2023-08-29 14:54:30 -07:00
service
for service in self.services
if service.active and not service.restricted
]
@property
def domain_list(self):
2023-08-29 14:54:30 -07:00
return [domain.domain for domain in self.domains]
@property
def agreement(self):
try:
active_agreements = [
agreement
for agreement in self.agreements
if agreement.status == AgreementStatus.ACTIVE
]
return active_agreements[0]
except IndexError:
return None
@property
def agreement_active(self):
try:
return self.agreement.status == AgreementStatus.ACTIVE
except AttributeError:
return False
@property
def has_mou(self):
try:
return self.agreement.type == AgreementType.MOU
except AttributeError:
return False
def serialize(self):
return {
"id": str(self.id),
"name": self.name,
"active": self.active,
2023-07-10 11:06:29 -07:00
"organization_type": self.organization_type,
"email_branding_id": self.email_branding_id,
"agreement_signed": self.agreement_signed,
"agreement_signed_at": self.agreement_signed_at,
"agreement_signed_by_id": self.agreement_signed_by_id,
"agreement_signed_on_behalf_of_name": self.agreement_signed_on_behalf_of_name,
"agreement_signed_on_behalf_of_email_address": self.agreement_signed_on_behalf_of_email_address,
"agreement_signed_version": self.agreement_signed_version,
"domains": self.domain_list,
"request_to_go_live_notes": self.request_to_go_live_notes,
"count_of_live_services": len(self.live_services),
"notes": self.notes,
"purchase_order_number": self.purchase_order_number,
"billing_contact_names": self.billing_contact_names,
"billing_contact_email_addresses": self.billing_contact_email_addresses,
"billing_reference": self.billing_reference,
}
def serialize_for_list(self):
return {
2023-08-29 14:54:30 -07:00
"name": self.name,
"id": str(self.id),
"active": self.active,
"count_of_live_services": len(self.live_services),
"domains": self.domain_list,
"organization_type": self.organization_type,
}
class Service(db.Model, Versioned):
2023-08-29 14:54:30 -07:00
__tablename__ = "services"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name = db.Column(db.String(255), nullable=False, unique=True)
created_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
active = db.Column(
db.Boolean,
index=False,
unique=False,
nullable=False,
default=True,
2023-08-29 14:54:30 -07:00
)
message_limit = db.Column(db.BigInteger, index=False, unique=False, nullable=False)
2023-08-31 10:57:54 -04:00
total_message_limit = db.Column(
db.BigInteger,
index=False,
unique=False,
nullable=False,
2023-08-31 10:57:54 -04:00
)
restricted = db.Column(db.Boolean, index=False, unique=False, nullable=False)
email_from = db.Column(db.Text, index=False, unique=True, nullable=False)
2023-08-29 14:54:30 -07:00
created_by_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
created_by = db.relationship("User", foreign_keys=[created_by_id])
prefix_sms = db.Column(db.Boolean, nullable=False, default=True)
organization_type = enum_column(OrganizationType, unique=False, nullable=True)
rate_limit = db.Column(db.Integer, index=False, nullable=False, default=3000)
contact_link = db.Column(db.String(255), nullable=True, unique=False)
volume_sms = db.Column(db.Integer(), nullable=True, unique=False)
volume_email = db.Column(db.Integer(), nullable=True, unique=False)
consent_to_research = db.Column(db.Boolean, nullable=True)
count_as_live = db.Column(db.Boolean, nullable=False, default=True)
2023-08-29 14:54:30 -07:00
go_live_user_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
nullable=True,
2023-08-29 14:54:30 -07:00
)
go_live_user = db.relationship("User", foreign_keys=[go_live_user_id])
go_live_at = db.Column(db.DateTime, nullable=True)
2023-08-29 14:54:30 -07:00
organization_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("organization.id"),
index=True,
nullable=True,
2023-08-29 14:54:30 -07:00
)
organization = db.relationship("Organization", backref="services")
2021-01-13 11:53:16 +00:00
notes = db.Column(db.Text, nullable=True)
purchase_order_number = db.Column(db.String(255), nullable=True)
billing_contact_names = db.Column(db.Text, nullable=True)
billing_contact_email_addresses = db.Column(db.Text, nullable=True)
billing_reference = db.Column(db.String(255), nullable=True)
2021-01-13 11:53:16 +00:00
email_branding = db.relationship(
2023-08-29 14:54:30 -07:00
"EmailBranding",
secondary=service_email_branding,
uselist=False,
2023-08-29 14:54:30 -07:00
backref=db.backref("services", lazy="dynamic"),
)
@classmethod
def from_json(cls, data):
"""
Assumption: data has been validated appropriately.
Returns a Service object based on the provided data. Deserialises created_by to created_by_id as marshmallow
would.
"""
# validate json with marshmallow
fields = data.copy()
2023-08-29 14:54:30 -07:00
fields["created_by_id"] = fields.pop("created_by")
return cls(**fields)
def get_inbound_number(self):
if self.inbound_number and self.inbound_number.active:
return self.inbound_number.number
def get_default_sms_sender(self):
2025-01-06 11:45:31 -08:00
# notify-api-1513 let's try a minimalistic fix
# to see if we can get the right numbers back
default_sms_sender = [
x
for x in self.service_sms_senders
if x.is_default and x.service_id == self.id
]
current_app.logger.info(
f"#notify-api-1513 senders for service {self.name} are {self.service_sms_senders}"
)
2023-11-06 12:03:42 -07:00
return default_sms_sender[0].sms_sender
2017-09-20 10:45:35 +01:00
def get_default_reply_to_email_address(self):
default_reply_to = [x for x in self.reply_to_email_addresses if x.is_default]
return default_reply_to[0].email_address if default_reply_to else None
2017-09-20 10:45:35 +01:00
def has_permission(self, permission):
return permission in [p.permission for p in self.permissions]
def serialize_for_org_dashboard(self):
return {
2023-08-29 14:54:30 -07:00
"id": str(self.id),
"name": self.name,
"active": self.active,
"restricted": self.restricted,
}
class AnnualBilling(db.Model):
__tablename__ = "annual_billing"
2023-08-29 14:54:30 -07:00
id = db.Column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
unique=False,
2023-08-29 14:54:30 -07:00
)
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
unique=False,
index=True,
nullable=False,
)
financial_year_start = db.Column(
db.Integer,
nullable=False,
default=True,
unique=False,
2023-08-29 14:54:30 -07:00
)
free_sms_fragment_limit = db.Column(
db.Integer,
nullable=False,
index=False,
unique=False,
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
UniqueConstraint(
"financial_year_start",
"service_id",
name="ix_annual_billing_service_id",
2023-08-29 14:54:30 -07:00
)
service = db.relationship(
Service,
backref=db.backref("annual_billing", uselist=True),
2023-08-29 14:54:30 -07:00
)
__table_args__ = (
UniqueConstraint(
"service_id",
"financial_year_start",
name="uix_service_id_financial_year_start",
),
)
2017-10-26 13:25:11 +01:00
def serialize_free_sms_items(self):
return {
2023-08-29 14:54:30 -07:00
"free_sms_fragment_limit": self.free_sms_fragment_limit,
"financial_year_start": self.financial_year_start,
}
2017-10-26 13:25:11 +01:00
def serialize(self):
def serialize_service():
2023-08-29 14:54:30 -07:00
return {"id": str(self.service_id), "name": self.service.name}
2017-10-26 13:25:11 +01:00
2022-10-14 14:45:27 +00:00
return {
2017-10-26 13:25:11 +01:00
"id": str(self.id),
2023-08-29 14:54:30 -07:00
"free_sms_fragment_limit": self.free_sms_fragment_limit,
"service_id": self.service_id,
"financial_year_start": self.financial_year_start,
2017-10-26 13:25:11 +01:00
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
2017-10-26 13:25:11 +01:00
"service": serialize_service() if self.service else None,
}
class InboundNumber(db.Model):
__tablename__ = "inbound_numbers"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-04-12 13:30:13 -04:00
number = db.Column(db.String(255), unique=True, nullable=False)
provider = db.Column(db.String(), nullable=False)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
unique=True,
index=True,
nullable=True,
)
service = db.relationship(
Service,
backref=db.backref("inbound_number", uselist=False),
2023-08-29 14:54:30 -07:00
)
active = db.Column(
db.Boolean,
index=False,
unique=False,
nullable=False,
default=True,
2023-08-29 14:54:30 -07:00
)
created_at = db.Column(
db.DateTime,
2024-05-23 13:59:51 -07:00
default=utc_now(),
nullable=False,
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
2017-08-04 12:13:10 +01:00
def serialize(self):
2017-08-04 19:06:37 +01:00
def serialize_service():
2023-08-29 14:54:30 -07:00
return {"id": str(self.service_id), "name": self.service.name}
2017-08-04 19:06:37 +01:00
2017-08-04 16:05:03 +01:00
return {
2017-08-04 12:13:10 +01:00
"id": str(self.id),
"number": self.number,
"provider": self.provider,
2017-08-04 19:06:37 +01:00
"service": serialize_service() if self.service else None,
2017-08-04 12:13:10 +01:00
"active": self.active,
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
2017-08-04 12:13:10 +01:00
}
class ServiceSmsSender(db.Model):
__tablename__ = "service_sms_senders"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
sms_sender = db.Column(db.String(11), nullable=False)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
nullable=False,
unique=False,
)
service = db.relationship(
Service,
backref=db.backref("service_sms_senders", uselist=True),
2023-08-29 14:54:30 -07:00
)
is_default = db.Column(db.Boolean, nullable=False, default=True)
archived = db.Column(db.Boolean, nullable=False, default=False)
2023-08-29 14:54:30 -07:00
inbound_number_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("inbound_numbers.id"),
unique=True,
index=True,
nullable=True,
)
inbound_number = db.relationship(
InboundNumber,
backref=db.backref("inbound_number", uselist=False),
2023-08-29 14:54:30 -07:00
)
created_at = db.Column(
db.DateTime,
2024-05-23 13:59:51 -07:00
default=utc_now(),
nullable=False,
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
def get_reply_to_text(self):
return try_validate_and_format_phone_number(self.sms_sender)
def serialize(self):
return {
"id": str(self.id),
"sms_sender": self.sms_sender,
"service_id": str(self.service_id),
"is_default": self.is_default,
"archived": self.archived,
2024-04-01 15:12:33 -07:00
"inbound_number_id": (
str(self.inbound_number_id) if self.inbound_number_id else None
),
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
}
class ServicePermission(db.Model):
__tablename__ = "service_permissions"
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
primary_key=True,
index=True,
nullable=False,
)
permission = enum_column(
ServicePermissionType,
2023-08-29 14:54:30 -07:00
index=True,
primary_key=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
created_at = db.Column(
db.DateTime,
2024-05-23 13:59:51 -07:00
default=utc_now(),
nullable=False,
2023-08-29 14:54:30 -07:00
)
service_permission_types = db.relationship(
Service,
backref=db.backref("permissions", cascade="all, delete-orphan"),
2023-08-29 14:54:30 -07:00
)
def __repr__(self):
2023-08-29 14:54:30 -07:00
return "<{} has service permission: {}>".format(
self.service_id, self.permission
)
class ServiceGuestList(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "service_whitelist"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
service = db.relationship("Service", backref="guest_list")
recipient_type = enum_column(RecipientType, nullable=False)
recipient = db.Column(db.String(255), nullable=False)
2024-05-23 13:59:51 -07:00
created_at = db.Column(db.DateTime, default=utc_now())
@classmethod
def from_string(cls, service_id, recipient_type, recipient):
instance = cls(service_id=service_id, recipient_type=recipient_type)
try:
if recipient_type == RecipientType.MOBILE:
2023-08-29 14:54:30 -07:00
instance.recipient = validate_phone_number(
recipient, international=True
)
elif recipient_type == RecipientType.EMAIL:
2023-01-06 10:02:23 -05:00
instance.recipient = validate_email_address(recipient)
else:
2023-08-29 14:54:30 -07:00
raise ValueError("Invalid recipient type")
except InvalidPhoneError:
raise ValueError('Invalid guest list: "{}"'.format(recipient))
except InvalidEmailError:
raise ValueError('Invalid guest list: "{}"'.format(recipient))
else:
return instance
def __repr__(self):
2023-08-29 14:54:30 -07:00
return "Recipient {} of type: {}".format(self.recipient, self.recipient_type)
class ServiceInboundApi(db.Model, Versioned):
2023-08-29 14:54:30 -07:00
__tablename__ = "service_inbound_api"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
nullable=False,
unique=True,
)
service = db.relationship("Service", backref="inbound_api")
url = db.Column(db.String(), nullable=False)
_bearer_token = db.Column("bearer_token", db.String(), nullable=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
2024-05-23 13:59:51 -07:00
default=utc_now(),
nullable=False,
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(db.DateTime, nullable=True)
2023-08-29 14:54:30 -07:00
updated_by = db.relationship("User")
updated_by_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
@property
def bearer_token(self):
2023-08-15 10:35:34 -07:00
return encryption.decrypt(self._bearer_token)
@bearer_token.setter
def bearer_token(self, bearer_token):
if bearer_token:
self._bearer_token = encryption.encrypt(str(bearer_token))
def serialize(self):
return {
"id": str(self.id),
"service_id": str(self.service_id),
"url": self.url,
"updated_by_id": str(self.updated_by_id),
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
}
class ServiceCallbackApi(db.Model, Versioned):
2023-08-29 14:54:30 -07:00
__tablename__ = "service_callback_api"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
service = db.relationship("Service", backref="service_callback_api")
url = db.Column(db.String(), nullable=False)
callback_type = enum_column(CallbackType, nullable=True)
_bearer_token = db.Column("bearer_token", db.String(), nullable=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
2024-05-23 13:59:51 -07:00
default=utc_now(),
nullable=False,
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(db.DateTime, nullable=True)
2023-08-29 14:54:30 -07:00
updated_by = db.relationship("User")
updated_by_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
__table_args__ = (
2023-08-29 14:54:30 -07:00
UniqueConstraint(
"service_id",
"callback_type",
name="uix_service_callback_type",
2023-08-29 14:54:30 -07:00
),
)
@property
def bearer_token(self):
2023-08-15 10:35:34 -07:00
return encryption.decrypt(self._bearer_token)
@bearer_token.setter
def bearer_token(self, bearer_token):
if bearer_token:
self._bearer_token = encryption.encrypt(str(bearer_token))
def serialize(self):
return {
"id": str(self.id),
"service_id": str(self.service_id),
"url": self.url,
"updated_by_id": str(self.updated_by_id),
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
}
class ApiKey(db.Model, Versioned):
2023-08-29 14:54:30 -07:00
__tablename__ = "api_keys"
2016-01-13 09:25:46 +00:00
2016-04-08 13:34:46 +01:00
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name = db.Column(db.String(255), nullable=False)
_secret = db.Column("secret", db.String(255), unique=True, nullable=False)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
service = db.relationship("Service", backref="api_keys")
key_type = enum_column(KeyType, index=True, nullable=False)
2016-01-13 09:25:46 +00:00
expiry_date = db.Column(db.DateTime)
created_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
created_by = db.relationship("User")
created_by_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
2016-01-13 09:25:46 +00:00
__table_args__ = (
2023-08-29 14:54:30 -07:00
Index(
"uix_service_to_key_name",
"service_id",
"name",
unique=True,
postgresql_where=expiry_date.is_(None),
),
)
@property
def secret(self):
2023-08-15 10:35:34 -07:00
return encryption.decrypt(self._secret)
@secret.setter
def secret(self, secret):
if secret:
self._secret = encryption.encrypt(str(secret))
2016-01-13 09:25:46 +00:00
class TemplateFolder(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "template_folder"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
nullable=False,
2023-08-29 14:54:30 -07:00
)
name = db.Column(db.String, nullable=False)
2023-08-29 14:54:30 -07:00
parent_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("template_folder.id"),
nullable=True,
2023-08-29 14:54:30 -07:00
)
2023-08-29 14:54:30 -07:00
service = db.relationship("Service", backref="all_template_folders")
parent = db.relationship("TemplateFolder", remote_side=[id], backref="subfolders")
users = db.relationship(
2023-08-29 14:54:30 -07:00
"ServiceUser",
uselist=True,
2023-08-29 14:54:30 -07:00
backref=db.backref(
"folders", foreign_keys="user_folder_permissions.c.template_folder_id"
),
secondary="user_folder_permissions",
primaryjoin="TemplateFolder.id == user_folder_permissions.c.template_folder_id",
)
2023-08-29 14:54:30 -07:00
__table_args__ = (UniqueConstraint("id", "service_id", name="ix_id_service_id"), {})
def serialize(self):
return {
2023-08-29 14:54:30 -07:00
"id": self.id,
"name": self.name,
"parent_id": self.parent_id,
"service_id": self.service_id,
"users_with_permission": self.get_users_with_permission(),
}
def is_parent_of(self, other):
while other.parent is not None:
if other.parent == self:
return True
other = other.parent
return False
def get_users_with_permission(self):
service_users = self.users
2023-08-29 14:54:30 -07:00
users_with_permission = [
str(service_user.user_id) for service_user in service_users
]
return users_with_permission
template_folder_map = db.Table(
2023-08-29 14:54:30 -07:00
"template_folder_map",
db.Model.metadata,
# template_id is a primary key as a template can only belong in one folder
2023-08-29 14:54:30 -07:00
db.Column(
"template_id",
UUID(as_uuid=True),
db.ForeignKey("templates.id"),
primary_key=True,
nullable=False,
),
db.Column(
"template_folder_id",
UUID(as_uuid=True),
db.ForeignKey("template_folder.id"),
nullable=False,
),
)
class TemplateBase(db.Model):
__abstract__ = True
2016-01-13 11:04:13 +00:00
def __init__(self, **kwargs):
2023-08-29 14:54:30 -07:00
if "template_type" in kwargs:
self.template_type = kwargs.pop("template_type")
super().__init__(**kwargs)
2016-04-08 13:34:46 +01:00
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2016-01-13 11:04:13 +00:00
name = db.Column(db.String(255), nullable=False)
template_type = enum_column(TemplateType, nullable=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
2024-05-23 13:59:51 -07:00
updated_at = db.Column(db.DateTime, onupdate=utc_now())
content = db.Column(db.Text, nullable=False)
archived = db.Column(db.Boolean, nullable=False, default=False)
hidden = db.Column(db.Boolean, nullable=False, default=False)
subject = db.Column(db.Text)
@declared_attr
def service_id(cls):
2023-08-29 14:54:30 -07:00
return db.Column(
UUID(as_uuid=True), db.ForeignKey("services.id"), index=True, nullable=False
)
@declared_attr
def created_by_id(cls):
2023-08-29 14:54:30 -07:00
return db.Column(
UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=False
)
@declared_attr
def created_by(cls):
2023-08-29 14:54:30 -07:00
return db.relationship("User")
@declared_attr
def process_type(cls):
return enum_column(
TemplateProcessType,
index=True,
nullable=False,
default=TemplateProcessType.NORMAL,
2016-11-28 11:13:11 +00:00
)
2023-08-29 14:54:30 -07:00
redact_personalisation = association_proxy(
"template_redacted", "redact_personalisation"
)
# TODO: possibly unnecessary after removing letters
@property
def reply_to(self):
return None
@reply_to.setter
def reply_to(self, value):
if value is None:
pass
else:
2023-08-29 14:54:30 -07:00
raise ValueError(
"Unable to set sender for {} template".format(self.template_type)
)
def get_reply_to_text(self):
if self.template_type == TemplateType.EMAIL:
return self.service.get_default_reply_to_email_address()
elif self.template_type == TemplateType.SMS:
2023-08-29 14:54:30 -07:00
return try_validate_and_format_phone_number(
self.service.get_default_sms_sender()
)
else:
return None
def _as_utils_template(self):
if self.template_type == TemplateType.EMAIL:
return PlainTextEmailTemplate(self.__dict__)
elif self.template_type == TemplateType.SMS:
return SMSMessageTemplate(self.__dict__)
else:
raise ValueError(f"{self.template_type} is an invalid template type.")
def _as_utils_template_with_personalisation(self, values):
template = self._as_utils_template()
template.values = values
return template
def serialize_for_v2(self):
serialized = {
2017-03-28 10:41:25 +01:00
"id": str(self.id),
"type": self.template_type,
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
"created_by": self.created_by.email_address,
"version": self.version,
"body": self.content,
2024-04-01 15:12:33 -07:00
"subject": (
self.subject if self.template_type == TemplateType.EMAIL else None
),
"name": self.name,
"personalisation": {
key: {
2023-08-29 14:54:30 -07:00
"required": True,
}
for key in self._as_utils_template().placeholders
},
}
return serialized
class Template(TemplateBase):
2023-08-29 14:54:30 -07:00
__tablename__ = "templates"
2023-08-29 14:54:30 -07:00
service = db.relationship("Service", backref="templates")
version = db.Column(db.Integer, default=0, nullable=False)
folder = db.relationship(
2023-08-29 14:54:30 -07:00
"TemplateFolder",
secondary=template_folder_map,
uselist=False,
# eagerly load the folder whenever the template object is fetched
2023-08-29 14:54:30 -07:00
lazy="joined",
backref=db.backref("templates"),
)
def get_link(self):
return url_for(
"template.get_template_by_id_and_service_id",
service_id=self.service_id,
template_id=self.id,
2023-08-29 14:54:30 -07:00
_external=True,
)
@classmethod
def from_json(cls, data, folder):
"""
Assumption: data has been validated appropriately.
Returns a Template object based on the provided data.
"""
fields = data.copy()
2023-08-29 14:54:30 -07:00
fields["created_by_id"] = fields.pop("created_by")
fields["service_id"] = fields.pop("service")
fields["folder"] = folder
return cls(**fields)
class TemplateRedacted(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "template_redacted"
2023-08-29 14:54:30 -07:00
template_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("templates.id"),
primary_key=True,
nullable=False,
)
redact_personalisation = db.Column(db.Boolean, nullable=False, default=False)
2023-08-29 14:54:30 -07:00
updated_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_by_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
nullable=False,
index=True,
2023-08-29 14:54:30 -07:00
)
updated_by = db.relationship("User")
# uselist=False as this is a one-to-one relationship
2023-08-29 14:54:30 -07:00
template = db.relationship(
"Template",
uselist=False,
backref=db.backref("template_redacted", uselist=False),
)
class TemplateHistory(TemplateBase):
2023-08-29 14:54:30 -07:00
__tablename__ = "templates_history"
2023-08-29 14:54:30 -07:00
service = db.relationship("Service")
version = db.Column(db.Integer, primary_key=True, nullable=False)
2016-01-15 11:12:05 +00:00
@declared_attr
def template_redacted(cls):
2023-08-29 14:54:30 -07:00
return db.relationship(
"TemplateRedacted",
foreign_keys=[cls.id],
primaryjoin="TemplateRedacted.template_id == TemplateHistory.id",
)
def get_link(self):
return url_for(
2024-05-30 12:27:07 -07:00
"template.get_template_by_id_and_service_id",
template_id=self.id,
2024-05-30 12:27:07 -07:00
service_id=self.service.id,
version=self.version,
2023-08-29 14:54:30 -07:00
_external=True,
)
class ProviderDetails(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "provider_details"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
display_name = db.Column(db.String, nullable=False)
identifier = db.Column(db.String, nullable=False)
notification_type = enum_column(NotificationType, nullable=False)
active = db.Column(db.Boolean, default=False, nullable=False)
version = db.Column(db.Integer, default=1, nullable=False)
2023-08-29 14:54:30 -07:00
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
created_by_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
index=True,
nullable=True,
2023-08-29 14:54:30 -07:00
)
created_by = db.relationship("User")
supports_international = db.Column(db.Boolean, nullable=False, default=False)
class ProviderDetailsHistory(db.Model, HistoryModel):
2023-08-29 14:54:30 -07:00
__tablename__ = "provider_details_history"
id = db.Column(UUID(as_uuid=True), primary_key=True, nullable=False)
display_name = db.Column(db.String, nullable=False)
identifier = db.Column(db.String, nullable=False)
notification_type = enum_column(NotificationType, nullable=False)
active = db.Column(db.Boolean, nullable=False)
version = db.Column(db.Integer, primary_key=True, nullable=False)
2024-05-23 13:59:51 -07:00
updated_at = db.Column(db.DateTime, nullable=True, onupdate=utc_now())
2023-08-29 14:54:30 -07:00
created_by_id = db.Column(
UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=True
)
created_by = db.relationship("User")
supports_international = db.Column(db.Boolean, nullable=False, default=False)
2016-01-15 11:12:05 +00:00
class Job(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "jobs"
2016-01-15 11:12:05 +00:00
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2016-01-15 11:12:05 +00:00
original_file_name = db.Column(db.String, nullable=False)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
unique=False,
nullable=False,
)
service = db.relationship("Service", backref=db.backref("jobs", lazy="dynamic"))
template_id = db.Column(
UUID(as_uuid=True), db.ForeignKey("templates.id"), index=True, unique=False
)
template = db.relationship("Template", backref=db.backref("jobs", lazy="dynamic"))
template_version = db.Column(db.Integer, nullable=False)
2016-01-15 11:12:05 +00:00
created_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
2016-01-15 11:12:05 +00:00
updated_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
2016-02-22 14:56:09 +00:00
notification_count = db.Column(db.Integer, nullable=False)
notifications_sent = db.Column(db.Integer, nullable=False, default=0)
notifications_delivered = db.Column(db.Integer, nullable=False, default=0)
notifications_failed = db.Column(db.Integer, nullable=False, default=0)
processing_started = db.Column(
2023-08-29 14:54:30 -07:00
db.DateTime, index=False, unique=False, nullable=True
)
processing_finished = db.Column(
2023-08-29 14:54:30 -07:00
db.DateTime, index=False, unique=False, nullable=True
)
created_by = db.relationship("User")
created_by_id = db.Column(
UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=True
)
scheduled_for = db.Column(db.DateTime, index=True, unique=False, nullable=True)
job_status = enum_column(
JobStatus,
2023-08-29 14:54:30 -07:00
index=True,
nullable=False,
default=JobStatus.PENDING,
2016-08-24 14:16:39 +01:00
)
archived = db.Column(db.Boolean, nullable=False, default=False)
2016-01-21 17:29:24 +00:00
class VerifyCode(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "verify_codes"
2016-01-21 17:29:24 +00:00
2016-04-08 13:34:46 +01:00
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-08-29 14:54:30 -07:00
user_id = db.Column(
UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=False
)
user = db.relationship("User", backref=db.backref("verify_codes", lazy="dynamic"))
2016-01-21 17:29:24 +00:00
_code = db.Column(db.String, nullable=False)
code_type = enum_column(CodeType, index=False, unique=False, nullable=False)
2016-01-21 17:29:24 +00:00
expiry_datetime = db.Column(db.DateTime, nullable=False)
code_used = db.Column(db.Boolean, default=False)
created_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
2016-01-21 17:29:24 +00:00
@property
def code(self):
raise AttributeError("Code not readable")
@code.setter
def code(self, cde):
self._code = hashpw(cde)
def check_code(self, cde):
return check_hash(cde, self._code)
Use notification view for status / billing tasks This fixes a bug where (letter) notifications left in sending would temporarily get excluded from billing and status calculations once the service retention period had elapsed, and then get included once again when they finally get marked as delivered.* Status and billing tasks shouldn't need to have knowledge about which table their data is in and getting this wrong is the fundamental cause of the bug here. Adding a view across both tables abstracts this away while keeping the query complexity the same. Using a view also has the added benefit that we no longer need to care when the status / billing tasks run in comparison to the deletion task, since we will retrieve the same data irrespective (see below for a more detailed discussion on data integrity). *Such a scenario is rare but has happened. A New View ========== I've included all the columns that are shared between the two tables, even though only a subset are actually needed. Having extra columns has no impact and may be useful in future. Although the view isn't actually a table, SQLAlchemy appears to wrap it without any issues, noting that the package doesn't have any direct support for "view models". Because we're never inserting data, we don't need most of the kwargs when defining columns.* *Note that the "default" kwarg doesn't affect data that's retrieved, only data that's written (if no value is set). Data Integrity ============== The (new) tests cover the main scenarios. We need to be careful with how the view interacts with the deletion / archiving task. There are two concerns here: - Duplicates. The deletion task inserts before it deletes [^1], so we could end up double counting. It turns out this isn't a problem because a Postgres UNION is an implicit "DISTINCT" [^2]. I've also verified this manually, just to be on the safe side. - No data. It's conceivable that the query will check the history table just before the insertion, then check the notifications table just after the deletion. It turns out this isn't a problem either because the whole query sees the same DB snapshot [^3][^4].* *I can't think of a way to test this as it's a race condition, but I'm confident the Postgres docs are accurate. Performance =========== I copied the relevant (non-PII) columns from Production for data going back to 2022-04-01. I then ran several tests. Queries using the new view still make use of indices on a per-table basis, as the following query plan illustrates: QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ GroupAggregate (cost=1130820.02..1135353.89 rows=46502 width=97) (actual time=629.863..756.703 rows=72 loops=1) Group Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international -> Sort (cost=1130820.02..1131401.28 rows=232506 width=85) (actual time=629.756..708.914 rows=217563 loops=1) Sort Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international Sort Method: external merge Disk: 9320kB -> Subquery Scan on notifications_all_time_view (cost=1088506.43..1098969.20 rows=232506 width=85) (actual time=416.118..541.669 rows=217563 loops=1) -> Unique (cost=1088506.43..1096644.14 rows=232506 width=725) (actual time=416.115..513.065 rows=217563 loops=1) -> Sort (cost=1088506.43..1089087.70 rows=232506 width=725) (actual time=416.115..451.190 rows=217563 loops=1) Sort Key: notifications_no_pii.id, notifications_no_pii.job_id, notifications_no_pii.service_id, notifications_no_pii.template_id, notifications_no_pii.key_type, notifications_no_pii.billable_units, notifications_no_pii.notification_type, notifications_no_pii.created_at, notifications_no_pii.sent_by, notifications_no_pii.notification_status, notifications_no_pii.international, notifications_no_pii.rate_multiplier, notifications_no_pii.postage Sort Method: external merge Disk: 23936kB -> Append (cost=114.42..918374.12 rows=232506 width=725) (actual time=2.051..298.229 rows=217563 loops=1) -> Bitmap Heap Scan on notifications_no_pii (cost=114.42..8557.55 rows=2042 width=113) (actual time=1.405..1.442 rows=0 loops=1) Recheck Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: ((key_type)::text = ANY ('{normal,team}'::text[])) -> Bitmap Index Scan on ix_notifications_no_piiservice_id_composite (cost=0.00..113.91 rows=2202 width=0) (actual time=1.402..1.439 rows=0 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) -> Index Scan using ix_notifications_history_no_pii_service_id_composite on notifications_history_no_pii (cost=0.70..906328.97 rows=230464 width=113) (actual time=0.645..281.612 rows=217563 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND ((key_type)::text = ANY ('{normal,team}'::text[])) AND (notification_type = 'sms'::notification_type) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) Planning Time: 18.032 ms Execution Time: 759.001 ms (21 rows) Queries using the new view appear to be slower than without, but the differences I've seen are minimal: the original queries execute in seconds locally and in Production, so it's not a big issue. Notes: Performance ================== I downloaded a minimal set of columns for testing: \copy ( select id, notification_type, key_type, created_at, service_id, template_id, sent_by, rate_multiplier, international, billable_units, postage, job_id, notification_status from notifications ) to 'notifications.csv' delimiter ',' csv header; CREATE TABLE notifications_no_pii ( id uuid NOT NULL, notification_type public.notification_type NOT NULL, key_type character varying(255) NOT NULL, created_at timestamp without time zone NOT NULL, service_id uuid, template_id uuid, sent_by character varying, rate_multiplier numeric, international boolean, billable_units integer NOT NULL, postage character varying, job_id uuid, notification_status text ); copy notifications_no_pii from '/Users/ben.thorner/Desktop/notifications.csv' delimiter ',' csv header; CREATE INDEX ix_notifications_no_piicreated_at ON notifications_no_pii USING btree (created_at); CREATE INDEX ix_notifications_no_piijob_id ON notifications_no_pii USING btree (job_id); CREATE INDEX ix_notifications_no_piinotification_type_composite ON notifications_no_pii USING btree (notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piiservice_created_at ON notifications_no_pii USING btree (service_id, created_at); CREATE INDEX ix_notifications_no_piiservice_id_composite ON notifications_no_pii USING btree (service_id, notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piitemplate_id ON notifications_no_pii USING btree (template_id); And similarly for the history table. I then created a sepatate view across both of these temporary tables using just these columns. To test performance I created some queries that reflect what is run by the billing [^5] and status [^6] tasks e.g. explain analyze select template_id, sent_by, rate_multiplier, international, sum(billable_units), count(*) from notifications_all_time_view where notification_status in ('sending', 'sent', 'delivered', 'pending', 'temporary-failure', 'permanent-failure') and key_type in ('normal', 'team') and created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' group by 1,2,3,4; explain analyze select template_id, job_id, key_type, notification_status, count(*) from notifications_all_time_view where created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' and key_type in ('normal', 'team') group by 1,2,3,4; Between running queries I restarted my local database and also ran a command to purge disk caches [^7]. I tested on a few services: - c5956607-20b1-48b4-8983-85d11404e61f on 2022-05-02 (high volume) - 0cc696c6-b792-409d-99e9-64232f461b0f on 2022-04-06 (highest volume) - 01135db6-7819-4121-8b97-4aa2d741e372 on 2022-04-14 (very low volume) All execution results are of the same magnitude using the view compared to the worst case of either table on its own. [^1]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/notifications_dao.py#L389 [^2]: https://stackoverflow.com/questions/49925/what-is-the-difference-between-union-and-union-all [^3]: https://www.postgresql.org/docs/current/transaction-iso.html [^4]: https://dba.stackexchange.com/questions/210485/can-sub-selects-change-in-one-single-query-in-a-read-committed-transaction [^5]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_billing_dao.py#L471 [^6]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_notification_status_dao.py#L58 [^7]: https://stackoverflow.com/questions/28845524/echo-3-proc-sys-vm-drop-caches-on-mac-osx
2022-05-19 11:28:44 +01:00
class NotificationAllTimeView(db.Model):
"""
WARNING: this view is a union of rows in "notifications" and
"notification_history". Any query on this view will query both
tables and therefore rely on *both* sets of indices.
"""
2023-08-29 14:54:30 -07:00
__tablename__ = "notifications_all_time_view"
Use notification view for status / billing tasks This fixes a bug where (letter) notifications left in sending would temporarily get excluded from billing and status calculations once the service retention period had elapsed, and then get included once again when they finally get marked as delivered.* Status and billing tasks shouldn't need to have knowledge about which table their data is in and getting this wrong is the fundamental cause of the bug here. Adding a view across both tables abstracts this away while keeping the query complexity the same. Using a view also has the added benefit that we no longer need to care when the status / billing tasks run in comparison to the deletion task, since we will retrieve the same data irrespective (see below for a more detailed discussion on data integrity). *Such a scenario is rare but has happened. A New View ========== I've included all the columns that are shared between the two tables, even though only a subset are actually needed. Having extra columns has no impact and may be useful in future. Although the view isn't actually a table, SQLAlchemy appears to wrap it without any issues, noting that the package doesn't have any direct support for "view models". Because we're never inserting data, we don't need most of the kwargs when defining columns.* *Note that the "default" kwarg doesn't affect data that's retrieved, only data that's written (if no value is set). Data Integrity ============== The (new) tests cover the main scenarios. We need to be careful with how the view interacts with the deletion / archiving task. There are two concerns here: - Duplicates. The deletion task inserts before it deletes [^1], so we could end up double counting. It turns out this isn't a problem because a Postgres UNION is an implicit "DISTINCT" [^2]. I've also verified this manually, just to be on the safe side. - No data. It's conceivable that the query will check the history table just before the insertion, then check the notifications table just after the deletion. It turns out this isn't a problem either because the whole query sees the same DB snapshot [^3][^4].* *I can't think of a way to test this as it's a race condition, but I'm confident the Postgres docs are accurate. Performance =========== I copied the relevant (non-PII) columns from Production for data going back to 2022-04-01. I then ran several tests. Queries using the new view still make use of indices on a per-table basis, as the following query plan illustrates: QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ GroupAggregate (cost=1130820.02..1135353.89 rows=46502 width=97) (actual time=629.863..756.703 rows=72 loops=1) Group Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international -> Sort (cost=1130820.02..1131401.28 rows=232506 width=85) (actual time=629.756..708.914 rows=217563 loops=1) Sort Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international Sort Method: external merge Disk: 9320kB -> Subquery Scan on notifications_all_time_view (cost=1088506.43..1098969.20 rows=232506 width=85) (actual time=416.118..541.669 rows=217563 loops=1) -> Unique (cost=1088506.43..1096644.14 rows=232506 width=725) (actual time=416.115..513.065 rows=217563 loops=1) -> Sort (cost=1088506.43..1089087.70 rows=232506 width=725) (actual time=416.115..451.190 rows=217563 loops=1) Sort Key: notifications_no_pii.id, notifications_no_pii.job_id, notifications_no_pii.service_id, notifications_no_pii.template_id, notifications_no_pii.key_type, notifications_no_pii.billable_units, notifications_no_pii.notification_type, notifications_no_pii.created_at, notifications_no_pii.sent_by, notifications_no_pii.notification_status, notifications_no_pii.international, notifications_no_pii.rate_multiplier, notifications_no_pii.postage Sort Method: external merge Disk: 23936kB -> Append (cost=114.42..918374.12 rows=232506 width=725) (actual time=2.051..298.229 rows=217563 loops=1) -> Bitmap Heap Scan on notifications_no_pii (cost=114.42..8557.55 rows=2042 width=113) (actual time=1.405..1.442 rows=0 loops=1) Recheck Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: ((key_type)::text = ANY ('{normal,team}'::text[])) -> Bitmap Index Scan on ix_notifications_no_piiservice_id_composite (cost=0.00..113.91 rows=2202 width=0) (actual time=1.402..1.439 rows=0 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) -> Index Scan using ix_notifications_history_no_pii_service_id_composite on notifications_history_no_pii (cost=0.70..906328.97 rows=230464 width=113) (actual time=0.645..281.612 rows=217563 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND ((key_type)::text = ANY ('{normal,team}'::text[])) AND (notification_type = 'sms'::notification_type) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) Planning Time: 18.032 ms Execution Time: 759.001 ms (21 rows) Queries using the new view appear to be slower than without, but the differences I've seen are minimal: the original queries execute in seconds locally and in Production, so it's not a big issue. Notes: Performance ================== I downloaded a minimal set of columns for testing: \copy ( select id, notification_type, key_type, created_at, service_id, template_id, sent_by, rate_multiplier, international, billable_units, postage, job_id, notification_status from notifications ) to 'notifications.csv' delimiter ',' csv header; CREATE TABLE notifications_no_pii ( id uuid NOT NULL, notification_type public.notification_type NOT NULL, key_type character varying(255) NOT NULL, created_at timestamp without time zone NOT NULL, service_id uuid, template_id uuid, sent_by character varying, rate_multiplier numeric, international boolean, billable_units integer NOT NULL, postage character varying, job_id uuid, notification_status text ); copy notifications_no_pii from '/Users/ben.thorner/Desktop/notifications.csv' delimiter ',' csv header; CREATE INDEX ix_notifications_no_piicreated_at ON notifications_no_pii USING btree (created_at); CREATE INDEX ix_notifications_no_piijob_id ON notifications_no_pii USING btree (job_id); CREATE INDEX ix_notifications_no_piinotification_type_composite ON notifications_no_pii USING btree (notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piiservice_created_at ON notifications_no_pii USING btree (service_id, created_at); CREATE INDEX ix_notifications_no_piiservice_id_composite ON notifications_no_pii USING btree (service_id, notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piitemplate_id ON notifications_no_pii USING btree (template_id); And similarly for the history table. I then created a sepatate view across both of these temporary tables using just these columns. To test performance I created some queries that reflect what is run by the billing [^5] and status [^6] tasks e.g. explain analyze select template_id, sent_by, rate_multiplier, international, sum(billable_units), count(*) from notifications_all_time_view where notification_status in ('sending', 'sent', 'delivered', 'pending', 'temporary-failure', 'permanent-failure') and key_type in ('normal', 'team') and created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' group by 1,2,3,4; explain analyze select template_id, job_id, key_type, notification_status, count(*) from notifications_all_time_view where created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' and key_type in ('normal', 'team') group by 1,2,3,4; Between running queries I restarted my local database and also ran a command to purge disk caches [^7]. I tested on a few services: - c5956607-20b1-48b4-8983-85d11404e61f on 2022-05-02 (high volume) - 0cc696c6-b792-409d-99e9-64232f461b0f on 2022-04-06 (highest volume) - 01135db6-7819-4121-8b97-4aa2d741e372 on 2022-04-14 (very low volume) All execution results are of the same magnitude using the view compared to the worst case of either table on its own. [^1]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/notifications_dao.py#L389 [^2]: https://stackoverflow.com/questions/49925/what-is-the-difference-between-union-and-union-all [^3]: https://www.postgresql.org/docs/current/transaction-iso.html [^4]: https://dba.stackexchange.com/questions/210485/can-sub-selects-change-in-one-single-query-in-a-read-committed-transaction [^5]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_billing_dao.py#L471 [^6]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_notification_status_dao.py#L58 [^7]: https://stackoverflow.com/questions/28845524/echo-3-proc-sys-vm-drop-caches-on-mac-osx
2022-05-19 11:28:44 +01:00
# Tell alembic not to create this as a table. We have a migration where we manually set this up as a view.
# This is custom logic we apply - not built-in logic. See `migrations/env.py`
__table_args__ = {"info": {"managed_by_alembic": False}}
Use notification view for status / billing tasks This fixes a bug where (letter) notifications left in sending would temporarily get excluded from billing and status calculations once the service retention period had elapsed, and then get included once again when they finally get marked as delivered.* Status and billing tasks shouldn't need to have knowledge about which table their data is in and getting this wrong is the fundamental cause of the bug here. Adding a view across both tables abstracts this away while keeping the query complexity the same. Using a view also has the added benefit that we no longer need to care when the status / billing tasks run in comparison to the deletion task, since we will retrieve the same data irrespective (see below for a more detailed discussion on data integrity). *Such a scenario is rare but has happened. A New View ========== I've included all the columns that are shared between the two tables, even though only a subset are actually needed. Having extra columns has no impact and may be useful in future. Although the view isn't actually a table, SQLAlchemy appears to wrap it without any issues, noting that the package doesn't have any direct support for "view models". Because we're never inserting data, we don't need most of the kwargs when defining columns.* *Note that the "default" kwarg doesn't affect data that's retrieved, only data that's written (if no value is set). Data Integrity ============== The (new) tests cover the main scenarios. We need to be careful with how the view interacts with the deletion / archiving task. There are two concerns here: - Duplicates. The deletion task inserts before it deletes [^1], so we could end up double counting. It turns out this isn't a problem because a Postgres UNION is an implicit "DISTINCT" [^2]. I've also verified this manually, just to be on the safe side. - No data. It's conceivable that the query will check the history table just before the insertion, then check the notifications table just after the deletion. It turns out this isn't a problem either because the whole query sees the same DB snapshot [^3][^4].* *I can't think of a way to test this as it's a race condition, but I'm confident the Postgres docs are accurate. Performance =========== I copied the relevant (non-PII) columns from Production for data going back to 2022-04-01. I then ran several tests. Queries using the new view still make use of indices on a per-table basis, as the following query plan illustrates: QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ GroupAggregate (cost=1130820.02..1135353.89 rows=46502 width=97) (actual time=629.863..756.703 rows=72 loops=1) Group Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international -> Sort (cost=1130820.02..1131401.28 rows=232506 width=85) (actual time=629.756..708.914 rows=217563 loops=1) Sort Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international Sort Method: external merge Disk: 9320kB -> Subquery Scan on notifications_all_time_view (cost=1088506.43..1098969.20 rows=232506 width=85) (actual time=416.118..541.669 rows=217563 loops=1) -> Unique (cost=1088506.43..1096644.14 rows=232506 width=725) (actual time=416.115..513.065 rows=217563 loops=1) -> Sort (cost=1088506.43..1089087.70 rows=232506 width=725) (actual time=416.115..451.190 rows=217563 loops=1) Sort Key: notifications_no_pii.id, notifications_no_pii.job_id, notifications_no_pii.service_id, notifications_no_pii.template_id, notifications_no_pii.key_type, notifications_no_pii.billable_units, notifications_no_pii.notification_type, notifications_no_pii.created_at, notifications_no_pii.sent_by, notifications_no_pii.notification_status, notifications_no_pii.international, notifications_no_pii.rate_multiplier, notifications_no_pii.postage Sort Method: external merge Disk: 23936kB -> Append (cost=114.42..918374.12 rows=232506 width=725) (actual time=2.051..298.229 rows=217563 loops=1) -> Bitmap Heap Scan on notifications_no_pii (cost=114.42..8557.55 rows=2042 width=113) (actual time=1.405..1.442 rows=0 loops=1) Recheck Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: ((key_type)::text = ANY ('{normal,team}'::text[])) -> Bitmap Index Scan on ix_notifications_no_piiservice_id_composite (cost=0.00..113.91 rows=2202 width=0) (actual time=1.402..1.439 rows=0 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) -> Index Scan using ix_notifications_history_no_pii_service_id_composite on notifications_history_no_pii (cost=0.70..906328.97 rows=230464 width=113) (actual time=0.645..281.612 rows=217563 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND ((key_type)::text = ANY ('{normal,team}'::text[])) AND (notification_type = 'sms'::notification_type) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) Planning Time: 18.032 ms Execution Time: 759.001 ms (21 rows) Queries using the new view appear to be slower than without, but the differences I've seen are minimal: the original queries execute in seconds locally and in Production, so it's not a big issue. Notes: Performance ================== I downloaded a minimal set of columns for testing: \copy ( select id, notification_type, key_type, created_at, service_id, template_id, sent_by, rate_multiplier, international, billable_units, postage, job_id, notification_status from notifications ) to 'notifications.csv' delimiter ',' csv header; CREATE TABLE notifications_no_pii ( id uuid NOT NULL, notification_type public.notification_type NOT NULL, key_type character varying(255) NOT NULL, created_at timestamp without time zone NOT NULL, service_id uuid, template_id uuid, sent_by character varying, rate_multiplier numeric, international boolean, billable_units integer NOT NULL, postage character varying, job_id uuid, notification_status text ); copy notifications_no_pii from '/Users/ben.thorner/Desktop/notifications.csv' delimiter ',' csv header; CREATE INDEX ix_notifications_no_piicreated_at ON notifications_no_pii USING btree (created_at); CREATE INDEX ix_notifications_no_piijob_id ON notifications_no_pii USING btree (job_id); CREATE INDEX ix_notifications_no_piinotification_type_composite ON notifications_no_pii USING btree (notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piiservice_created_at ON notifications_no_pii USING btree (service_id, created_at); CREATE INDEX ix_notifications_no_piiservice_id_composite ON notifications_no_pii USING btree (service_id, notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piitemplate_id ON notifications_no_pii USING btree (template_id); And similarly for the history table. I then created a sepatate view across both of these temporary tables using just these columns. To test performance I created some queries that reflect what is run by the billing [^5] and status [^6] tasks e.g. explain analyze select template_id, sent_by, rate_multiplier, international, sum(billable_units), count(*) from notifications_all_time_view where notification_status in ('sending', 'sent', 'delivered', 'pending', 'temporary-failure', 'permanent-failure') and key_type in ('normal', 'team') and created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' group by 1,2,3,4; explain analyze select template_id, job_id, key_type, notification_status, count(*) from notifications_all_time_view where created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' and key_type in ('normal', 'team') group by 1,2,3,4; Between running queries I restarted my local database and also ran a command to purge disk caches [^7]. I tested on a few services: - c5956607-20b1-48b4-8983-85d11404e61f on 2022-05-02 (high volume) - 0cc696c6-b792-409d-99e9-64232f461b0f on 2022-04-06 (highest volume) - 01135db6-7819-4121-8b97-4aa2d741e372 on 2022-04-14 (very low volume) All execution results are of the same magnitude using the view compared to the worst case of either table on its own. [^1]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/notifications_dao.py#L389 [^2]: https://stackoverflow.com/questions/49925/what-is-the-difference-between-union-and-union-all [^3]: https://www.postgresql.org/docs/current/transaction-iso.html [^4]: https://dba.stackexchange.com/questions/210485/can-sub-selects-change-in-one-single-query-in-a-read-committed-transaction [^5]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_billing_dao.py#L471 [^6]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_notification_status_dao.py#L58 [^7]: https://stackoverflow.com/questions/28845524/echo-3-proc-sys-vm-drop-caches-on-mac-osx
2022-05-19 11:28:44 +01:00
id = db.Column(UUID(as_uuid=True), primary_key=True)
job_id = db.Column(UUID(as_uuid=True))
job_row_number = db.Column(db.Integer)
service_id = db.Column(UUID(as_uuid=True))
template_id = db.Column(UUID(as_uuid=True))
template_version = db.Column(db.Integer)
api_key_id = db.Column(UUID(as_uuid=True))
key_type = db.Column(db.String)
billable_units = db.Column(db.Integer)
notification_type = enum_column(NotificationType)
Use notification view for status / billing tasks This fixes a bug where (letter) notifications left in sending would temporarily get excluded from billing and status calculations once the service retention period had elapsed, and then get included once again when they finally get marked as delivered.* Status and billing tasks shouldn't need to have knowledge about which table their data is in and getting this wrong is the fundamental cause of the bug here. Adding a view across both tables abstracts this away while keeping the query complexity the same. Using a view also has the added benefit that we no longer need to care when the status / billing tasks run in comparison to the deletion task, since we will retrieve the same data irrespective (see below for a more detailed discussion on data integrity). *Such a scenario is rare but has happened. A New View ========== I've included all the columns that are shared between the two tables, even though only a subset are actually needed. Having extra columns has no impact and may be useful in future. Although the view isn't actually a table, SQLAlchemy appears to wrap it without any issues, noting that the package doesn't have any direct support for "view models". Because we're never inserting data, we don't need most of the kwargs when defining columns.* *Note that the "default" kwarg doesn't affect data that's retrieved, only data that's written (if no value is set). Data Integrity ============== The (new) tests cover the main scenarios. We need to be careful with how the view interacts with the deletion / archiving task. There are two concerns here: - Duplicates. The deletion task inserts before it deletes [^1], so we could end up double counting. It turns out this isn't a problem because a Postgres UNION is an implicit "DISTINCT" [^2]. I've also verified this manually, just to be on the safe side. - No data. It's conceivable that the query will check the history table just before the insertion, then check the notifications table just after the deletion. It turns out this isn't a problem either because the whole query sees the same DB snapshot [^3][^4].* *I can't think of a way to test this as it's a race condition, but I'm confident the Postgres docs are accurate. Performance =========== I copied the relevant (non-PII) columns from Production for data going back to 2022-04-01. I then ran several tests. Queries using the new view still make use of indices on a per-table basis, as the following query plan illustrates: QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ GroupAggregate (cost=1130820.02..1135353.89 rows=46502 width=97) (actual time=629.863..756.703 rows=72 loops=1) Group Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international -> Sort (cost=1130820.02..1131401.28 rows=232506 width=85) (actual time=629.756..708.914 rows=217563 loops=1) Sort Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international Sort Method: external merge Disk: 9320kB -> Subquery Scan on notifications_all_time_view (cost=1088506.43..1098969.20 rows=232506 width=85) (actual time=416.118..541.669 rows=217563 loops=1) -> Unique (cost=1088506.43..1096644.14 rows=232506 width=725) (actual time=416.115..513.065 rows=217563 loops=1) -> Sort (cost=1088506.43..1089087.70 rows=232506 width=725) (actual time=416.115..451.190 rows=217563 loops=1) Sort Key: notifications_no_pii.id, notifications_no_pii.job_id, notifications_no_pii.service_id, notifications_no_pii.template_id, notifications_no_pii.key_type, notifications_no_pii.billable_units, notifications_no_pii.notification_type, notifications_no_pii.created_at, notifications_no_pii.sent_by, notifications_no_pii.notification_status, notifications_no_pii.international, notifications_no_pii.rate_multiplier, notifications_no_pii.postage Sort Method: external merge Disk: 23936kB -> Append (cost=114.42..918374.12 rows=232506 width=725) (actual time=2.051..298.229 rows=217563 loops=1) -> Bitmap Heap Scan on notifications_no_pii (cost=114.42..8557.55 rows=2042 width=113) (actual time=1.405..1.442 rows=0 loops=1) Recheck Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: ((key_type)::text = ANY ('{normal,team}'::text[])) -> Bitmap Index Scan on ix_notifications_no_piiservice_id_composite (cost=0.00..113.91 rows=2202 width=0) (actual time=1.402..1.439 rows=0 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) -> Index Scan using ix_notifications_history_no_pii_service_id_composite on notifications_history_no_pii (cost=0.70..906328.97 rows=230464 width=113) (actual time=0.645..281.612 rows=217563 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND ((key_type)::text = ANY ('{normal,team}'::text[])) AND (notification_type = 'sms'::notification_type) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) Planning Time: 18.032 ms Execution Time: 759.001 ms (21 rows) Queries using the new view appear to be slower than without, but the differences I've seen are minimal: the original queries execute in seconds locally and in Production, so it's not a big issue. Notes: Performance ================== I downloaded a minimal set of columns for testing: \copy ( select id, notification_type, key_type, created_at, service_id, template_id, sent_by, rate_multiplier, international, billable_units, postage, job_id, notification_status from notifications ) to 'notifications.csv' delimiter ',' csv header; CREATE TABLE notifications_no_pii ( id uuid NOT NULL, notification_type public.notification_type NOT NULL, key_type character varying(255) NOT NULL, created_at timestamp without time zone NOT NULL, service_id uuid, template_id uuid, sent_by character varying, rate_multiplier numeric, international boolean, billable_units integer NOT NULL, postage character varying, job_id uuid, notification_status text ); copy notifications_no_pii from '/Users/ben.thorner/Desktop/notifications.csv' delimiter ',' csv header; CREATE INDEX ix_notifications_no_piicreated_at ON notifications_no_pii USING btree (created_at); CREATE INDEX ix_notifications_no_piijob_id ON notifications_no_pii USING btree (job_id); CREATE INDEX ix_notifications_no_piinotification_type_composite ON notifications_no_pii USING btree (notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piiservice_created_at ON notifications_no_pii USING btree (service_id, created_at); CREATE INDEX ix_notifications_no_piiservice_id_composite ON notifications_no_pii USING btree (service_id, notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piitemplate_id ON notifications_no_pii USING btree (template_id); And similarly for the history table. I then created a sepatate view across both of these temporary tables using just these columns. To test performance I created some queries that reflect what is run by the billing [^5] and status [^6] tasks e.g. explain analyze select template_id, sent_by, rate_multiplier, international, sum(billable_units), count(*) from notifications_all_time_view where notification_status in ('sending', 'sent', 'delivered', 'pending', 'temporary-failure', 'permanent-failure') and key_type in ('normal', 'team') and created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' group by 1,2,3,4; explain analyze select template_id, job_id, key_type, notification_status, count(*) from notifications_all_time_view where created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' and key_type in ('normal', 'team') group by 1,2,3,4; Between running queries I restarted my local database and also ran a command to purge disk caches [^7]. I tested on a few services: - c5956607-20b1-48b4-8983-85d11404e61f on 2022-05-02 (high volume) - 0cc696c6-b792-409d-99e9-64232f461b0f on 2022-04-06 (highest volume) - 01135db6-7819-4121-8b97-4aa2d741e372 on 2022-04-14 (very low volume) All execution results are of the same magnitude using the view compared to the worst case of either table on its own. [^1]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/notifications_dao.py#L389 [^2]: https://stackoverflow.com/questions/49925/what-is-the-difference-between-union-and-union-all [^3]: https://www.postgresql.org/docs/current/transaction-iso.html [^4]: https://dba.stackexchange.com/questions/210485/can-sub-selects-change-in-one-single-query-in-a-read-committed-transaction [^5]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_billing_dao.py#L471 [^6]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_notification_status_dao.py#L58 [^7]: https://stackoverflow.com/questions/28845524/echo-3-proc-sys-vm-drop-caches-on-mac-osx
2022-05-19 11:28:44 +01:00
created_at = db.Column(db.DateTime)
sent_at = db.Column(db.DateTime)
sent_by = db.Column(db.String)
updated_at = db.Column(db.DateTime)
status = enum_column(
NotificationStatus,
name="notification_status",
nullable=True,
default=NotificationStatus.CREATED,
key="status",
)
Use notification view for status / billing tasks This fixes a bug where (letter) notifications left in sending would temporarily get excluded from billing and status calculations once the service retention period had elapsed, and then get included once again when they finally get marked as delivered.* Status and billing tasks shouldn't need to have knowledge about which table their data is in and getting this wrong is the fundamental cause of the bug here. Adding a view across both tables abstracts this away while keeping the query complexity the same. Using a view also has the added benefit that we no longer need to care when the status / billing tasks run in comparison to the deletion task, since we will retrieve the same data irrespective (see below for a more detailed discussion on data integrity). *Such a scenario is rare but has happened. A New View ========== I've included all the columns that are shared between the two tables, even though only a subset are actually needed. Having extra columns has no impact and may be useful in future. Although the view isn't actually a table, SQLAlchemy appears to wrap it without any issues, noting that the package doesn't have any direct support for "view models". Because we're never inserting data, we don't need most of the kwargs when defining columns.* *Note that the "default" kwarg doesn't affect data that's retrieved, only data that's written (if no value is set). Data Integrity ============== The (new) tests cover the main scenarios. We need to be careful with how the view interacts with the deletion / archiving task. There are two concerns here: - Duplicates. The deletion task inserts before it deletes [^1], so we could end up double counting. It turns out this isn't a problem because a Postgres UNION is an implicit "DISTINCT" [^2]. I've also verified this manually, just to be on the safe side. - No data. It's conceivable that the query will check the history table just before the insertion, then check the notifications table just after the deletion. It turns out this isn't a problem either because the whole query sees the same DB snapshot [^3][^4].* *I can't think of a way to test this as it's a race condition, but I'm confident the Postgres docs are accurate. Performance =========== I copied the relevant (non-PII) columns from Production for data going back to 2022-04-01. I then ran several tests. Queries using the new view still make use of indices on a per-table basis, as the following query plan illustrates: QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ GroupAggregate (cost=1130820.02..1135353.89 rows=46502 width=97) (actual time=629.863..756.703 rows=72 loops=1) Group Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international -> Sort (cost=1130820.02..1131401.28 rows=232506 width=85) (actual time=629.756..708.914 rows=217563 loops=1) Sort Key: notifications_all_time_view.template_id, notifications_all_time_view.sent_by, notifications_all_time_view.rate_multiplier, notifications_all_time_view.international Sort Method: external merge Disk: 9320kB -> Subquery Scan on notifications_all_time_view (cost=1088506.43..1098969.20 rows=232506 width=85) (actual time=416.118..541.669 rows=217563 loops=1) -> Unique (cost=1088506.43..1096644.14 rows=232506 width=725) (actual time=416.115..513.065 rows=217563 loops=1) -> Sort (cost=1088506.43..1089087.70 rows=232506 width=725) (actual time=416.115..451.190 rows=217563 loops=1) Sort Key: notifications_no_pii.id, notifications_no_pii.job_id, notifications_no_pii.service_id, notifications_no_pii.template_id, notifications_no_pii.key_type, notifications_no_pii.billable_units, notifications_no_pii.notification_type, notifications_no_pii.created_at, notifications_no_pii.sent_by, notifications_no_pii.notification_status, notifications_no_pii.international, notifications_no_pii.rate_multiplier, notifications_no_pii.postage Sort Method: external merge Disk: 23936kB -> Append (cost=114.42..918374.12 rows=232506 width=725) (actual time=2.051..298.229 rows=217563 loops=1) -> Bitmap Heap Scan on notifications_no_pii (cost=114.42..8557.55 rows=2042 width=113) (actual time=1.405..1.442 rows=0 loops=1) Recheck Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: ((key_type)::text = ANY ('{normal,team}'::text[])) -> Bitmap Index Scan on ix_notifications_no_piiservice_id_composite (cost=0.00..113.91 rows=2202 width=0) (actual time=1.402..1.439 rows=0 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND (notification_type = 'sms'::notification_type) AND (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) -> Index Scan using ix_notifications_history_no_pii_service_id_composite on notifications_history_no_pii (cost=0.70..906328.97 rows=230464 width=113) (actual time=0.645..281.612 rows=217563 loops=1) Index Cond: ((service_id = 'c5956607-20b1-48b4-8983-85d11404e61f'::uuid) AND ((key_type)::text = ANY ('{normal,team}'::text[])) AND (notification_type = 'sms'::notification_type) AND (created_at >= '2022-05-01 23:00:00'::timestamp without time zone) AND (created_at < '2022-05-02 23:00:00'::timestamp without time zone)) Filter: (notification_status = ANY ('{sending,sent,delivered,pending,temporary-failure,permanent-failure}'::text[])) Planning Time: 18.032 ms Execution Time: 759.001 ms (21 rows) Queries using the new view appear to be slower than without, but the differences I've seen are minimal: the original queries execute in seconds locally and in Production, so it's not a big issue. Notes: Performance ================== I downloaded a minimal set of columns for testing: \copy ( select id, notification_type, key_type, created_at, service_id, template_id, sent_by, rate_multiplier, international, billable_units, postage, job_id, notification_status from notifications ) to 'notifications.csv' delimiter ',' csv header; CREATE TABLE notifications_no_pii ( id uuid NOT NULL, notification_type public.notification_type NOT NULL, key_type character varying(255) NOT NULL, created_at timestamp without time zone NOT NULL, service_id uuid, template_id uuid, sent_by character varying, rate_multiplier numeric, international boolean, billable_units integer NOT NULL, postage character varying, job_id uuid, notification_status text ); copy notifications_no_pii from '/Users/ben.thorner/Desktop/notifications.csv' delimiter ',' csv header; CREATE INDEX ix_notifications_no_piicreated_at ON notifications_no_pii USING btree (created_at); CREATE INDEX ix_notifications_no_piijob_id ON notifications_no_pii USING btree (job_id); CREATE INDEX ix_notifications_no_piinotification_type_composite ON notifications_no_pii USING btree (notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piiservice_created_at ON notifications_no_pii USING btree (service_id, created_at); CREATE INDEX ix_notifications_no_piiservice_id_composite ON notifications_no_pii USING btree (service_id, notification_type, notification_status, created_at); CREATE INDEX ix_notifications_no_piitemplate_id ON notifications_no_pii USING btree (template_id); And similarly for the history table. I then created a sepatate view across both of these temporary tables using just these columns. To test performance I created some queries that reflect what is run by the billing [^5] and status [^6] tasks e.g. explain analyze select template_id, sent_by, rate_multiplier, international, sum(billable_units), count(*) from notifications_all_time_view where notification_status in ('sending', 'sent', 'delivered', 'pending', 'temporary-failure', 'permanent-failure') and key_type in ('normal', 'team') and created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' group by 1,2,3,4; explain analyze select template_id, job_id, key_type, notification_status, count(*) from notifications_all_time_view where created_at >= '2022-05-01 23:00' and created_at < '2022-05-02 23:00' and notification_type = 'sms' and service_id = 'c5956607-20b1-48b4-8983-85d11404e61f' and key_type in ('normal', 'team') group by 1,2,3,4; Between running queries I restarted my local database and also ran a command to purge disk caches [^7]. I tested on a few services: - c5956607-20b1-48b4-8983-85d11404e61f on 2022-05-02 (high volume) - 0cc696c6-b792-409d-99e9-64232f461b0f on 2022-04-06 (highest volume) - 01135db6-7819-4121-8b97-4aa2d741e372 on 2022-04-14 (very low volume) All execution results are of the same magnitude using the view compared to the worst case of either table on its own. [^1]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/notifications_dao.py#L389 [^2]: https://stackoverflow.com/questions/49925/what-is-the-difference-between-union-and-union-all [^3]: https://www.postgresql.org/docs/current/transaction-iso.html [^4]: https://dba.stackexchange.com/questions/210485/can-sub-selects-change-in-one-single-query-in-a-read-committed-transaction [^5]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_billing_dao.py#L471 [^6]: https://github.com/alphagov/notifications-api/blob/00a04ebf54c97fc695f013de0a497e5490ddb558/app/dao/fact_notification_status_dao.py#L58 [^7]: https://stackoverflow.com/questions/28845524/echo-3-proc-sys-vm-drop-caches-on-mac-osx
2022-05-19 11:28:44 +01:00
reference = db.Column(db.String)
client_reference = db.Column(db.String)
international = db.Column(db.Boolean)
phone_prefix = db.Column(db.String)
rate_multiplier = db.Column(db.Numeric(asdecimal=False))
created_by_id = db.Column(UUID(as_uuid=True))
document_download_count = db.Column(db.Integer)
class Notification(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "notifications"
2016-02-09 18:28:10 +00:00
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
to = db.Column(db.String, nullable=False)
normalised_to = db.Column(db.String, nullable=True)
2023-08-29 14:54:30 -07:00
job_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("jobs.id"),
index=True,
unique=False,
2023-08-29 14:54:30 -07:00
)
job = db.relationship("Job", backref=db.backref("notifications", lazy="dynamic"))
job_row_number = db.Column(db.Integer, nullable=True)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
unique=False,
2023-08-29 14:54:30 -07:00
)
service = db.relationship("Service")
template_id = db.Column(UUID(as_uuid=True), index=True, unique=False)
template_version = db.Column(db.Integer, nullable=False)
2023-08-29 14:54:30 -07:00
template = db.relationship("TemplateHistory")
api_key_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("api_keys.id"),
unique=False,
2023-08-29 14:54:30 -07:00
)
api_key = db.relationship("ApiKey")
key_type = enum_column(KeyType, unique=False, nullable=False)
billable_units = db.Column(db.Integer, nullable=False, default=0)
notification_type = enum_column(NotificationType, nullable=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(db.DateTime, index=True, unique=False, nullable=False)
sent_at = db.Column(db.DateTime, index=False, unique=False, nullable=True)
sent_by = db.Column(db.String, nullable=True)
2025-02-28 08:39:13 -08:00
message_cost = db.Column(db.Float, nullable=True, default=0.0)
updated_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
status = enum_column(
NotificationStatus,
name="notification_status",
nullable=True,
default=NotificationStatus.CREATED,
key="status",
)
reference = db.Column(db.String, nullable=True, index=True)
client_reference = db.Column(db.String, index=True, nullable=True)
_personalisation = db.Column(db.String, nullable=True)
2017-04-26 10:22:20 +01:00
international = db.Column(db.Boolean, nullable=False, default=False)
phone_prefix = db.Column(db.String, nullable=True)
rate_multiplier = db.Column(db.Numeric(asdecimal=False), nullable=True)
2017-04-26 10:22:20 +01:00
2023-08-29 14:54:30 -07:00
created_by = db.relationship("User")
created_by_id = db.Column(
UUID(as_uuid=True), db.ForeignKey("users.id"), nullable=True
)
reply_to_text = db.Column(db.String, nullable=True)
document_download_count = db.Column(db.Integer, nullable=True)
2022-09-15 14:59:13 -07:00
provider_response = db.Column(db.Text, nullable=True)
2023-10-24 11:35:52 -07:00
carrier = db.Column(db.Text, nullable=True)
2024-12-13 11:54:46 -08:00
message_id = db.Column(db.Text, nullable=True)
2023-10-24 11:35:52 -07:00
2022-09-15 14:59:13 -07:00
# queue_name = db.Column(db.Text, nullable=True)
__table_args__ = (
db.ForeignKeyConstraint(
2023-08-29 14:54:30 -07:00
["template_id", "template_version"],
["templates_history.id", "templates_history.version"],
),
UniqueConstraint(
"job_id", "job_row_number", name="uq_notifications_job_row_number"
),
Index(
2023-08-29 14:54:30 -07:00
"ix_notifications_notification_type_composite",
"notification_type",
"status",
"created_at",
),
2023-08-29 14:54:30 -07:00
Index("ix_notifications_service_created_at", "service_id", "created_at"),
Index(
"ix_notifications_service_id_composite",
2023-08-29 14:54:30 -07:00
"service_id",
"notification_type",
"status",
"created_at",
),
)
@property
def personalisation(self):
if self._personalisation:
try:
return encryption.decrypt(self._personalisation)
except EncryptionError:
current_app.logger.exception(
2024-08-15 10:31:02 -07:00
"Error decrypting notification.personalisation, returning empty dict",
2023-08-29 14:54:30 -07:00
)
return {}
@personalisation.setter
def personalisation(self, personalisation):
self._personalisation = encryption.encrypt(personalisation or {})
def completed_at(self):
if self.status in NotificationStatus.completed_types():
return self.updated_at.strftime(DATETIME_FORMAT)
return None
@staticmethod
def substitute_status(status_or_statuses):
"""
static function that takes a status or list of statuses and substitutes our new failure types if it finds
the deprecated one
> IN
'failed'
< OUT
['technical-failure', 'temporary-failure', 'permanent-failure']
-
> IN
['failed', 'created', 'accepted']
< OUT
['technical-failure', 'temporary-failure', 'permanent-failure', 'created', 'sending']
-
> IN
'delivered'
< OUT
['received']
:param status_or_statuses: a single status or list of statuses
:return: a single status or list with the current failure statuses substituted for 'failure'
"""
def _substitute_status_str(_status):
return (
NotificationStatus.failed_types()
if _status == NotificationStatus.FAILED
2023-08-29 14:54:30 -07:00
else [_status]
)
def _substitute_status_seq(_statuses):
2023-08-29 14:54:30 -07:00
return list(
set(
itertools.chain.from_iterable(
_substitute_status_str(status) for status in _statuses
)
)
)
if isinstance(status_or_statuses, str):
return _substitute_status_str(status_or_statuses)
return _substitute_status_seq(status_or_statuses)
@property
def content(self):
return self.template._as_utils_template_with_personalisation(
self.personalisation
).content_with_placeholders_filled_in
@property
def subject(self):
template_object = self.template._as_utils_template_with_personalisation(
self.personalisation
)
2023-08-29 14:54:30 -07:00
return getattr(template_object, "subject", None)
@property
def formatted_status(self):
return {
NotificationType.EMAIL: {
NotificationStatus.FAILED: "Failed",
NotificationStatus.TECHNICAL_FAILURE: "Technical failure",
NotificationStatus.TEMPORARY_FAILURE: "Inbox not accepting messages right now",
NotificationStatus.PERMANENT_FAILURE: "Email address doesnt exist",
NotificationStatus.DELIVERED: "Delivered",
NotificationStatus.SENDING: "Sending",
NotificationStatus.CREATED: "Sending",
NotificationStatus.SENT: "Delivered",
2023-08-29 14:54:30 -07:00
},
NotificationType.SMS: {
NotificationStatus.FAILED: "Failed",
NotificationStatus.TECHNICAL_FAILURE: "Technical failure",
NotificationStatus.TEMPORARY_FAILURE: "Unable to find carrier response -- still looking",
NotificationStatus.PERMANENT_FAILURE: "Unable to find carrier response.",
NotificationStatus.DELIVERED: "Delivered",
NotificationStatus.PENDING: "Pending",
NotificationStatus.SENDING: "Sending",
NotificationStatus.CREATED: "Sending",
NotificationStatus.SENT: "Sent internationally",
},
}[self.template.template_type].get(self.status, self.status)
def get_created_by_name(self):
if self.created_by:
return self.created_by.name
else:
return None
def get_created_by_email_address(self):
if self.created_by:
return self.created_by.email_address
else:
return None
2025-01-10 11:21:39 -08:00
def serialize_for_redis(self, obj):
if isinstance(obj.__class__, DeclarativeMeta):
fields = {}
for column in obj.__table__.columns:
if column.name == "notification_status":
new_name = "status"
value = getattr(obj, new_name)
elif column.name == "created_at":
2025-01-13 11:10:03 -08:00
if isinstance(obj.created_at, str):
value = obj.created_at
else:
value = (obj.created_at.strftime("%Y-%m-%d %H:%M:%S"),)
2025-01-10 11:21:39 -08:00
elif column.name in ["sent_at", "completed_at"]:
value = None
elif column.name.endswith("_id"):
value = getattr(obj, column.name)
value = str(value)
else:
value = getattr(obj, column.name)
if column.name in ["message_id", "api_key_id"]:
pass # do nothing because we don't have the message id yet
else:
fields[column.name] = value
2025-01-10 13:51:35 -08:00
2025-01-10 11:21:39 -08:00
return fields
raise ValueError("Provided object is not a SQLAlchemy instance")
def serialize_for_csv(self):
serialized = {
2024-04-01 15:12:33 -07:00
"row_number": (
"" if self.job_row_number is None else self.job_row_number + 1
),
"recipient": self.to,
2023-08-29 14:54:30 -07:00
"client_reference": self.client_reference or "",
"template_name": self.template.name,
"template_type": self.template.template_type,
2023-08-29 14:54:30 -07:00
"job_name": self.job.original_file_name if self.job else "",
2023-11-03 10:27:45 -07:00
"carrier": self.carrier,
"provider_response": self.provider_response,
"status": self.formatted_status,
"created_at": self.created_at.strftime("%Y-%m-%d %H:%M:%S"),
"created_by_name": self.get_created_by_name(),
"created_by_email_address": self.get_created_by_email_address(),
}
return serialized
def serialize(self):
template_dict = {
2023-08-29 14:54:30 -07:00
"version": self.template.version,
"id": self.template.id,
"uri": self.template.get_link(),
}
serialized = {
"id": self.id,
"reference": self.client_reference,
2024-04-01 15:12:33 -07:00
"email_address": (
self.to if self.notification_type == NotificationType.EMAIL else None
),
"phone_number": (
self.to if self.notification_type == NotificationType.SMS else None
),
"line_1": None,
"line_2": None,
"line_3": None,
"line_4": None,
"line_5": None,
"line_6": None,
"postcode": None,
"type": self.notification_type,
"status": self.status,
2022-09-15 14:59:13 -07:00
"provider_response": self.provider_response,
2023-10-24 11:35:52 -07:00
"carrier": self.carrier,
"template": template_dict,
"body": self.content,
"subject": self.subject,
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"created_by_name": self.get_created_by_name(),
"sent_at": get_dt_string_or_none(self.sent_at),
"completed_at": self.completed_at(),
"scheduled_for": None,
}
return serialized
class NotificationHistory(db.Model, HistoryModel):
2023-08-29 14:54:30 -07:00
__tablename__ = "notification_history"
id = db.Column(UUID(as_uuid=True), primary_key=True)
2023-08-29 14:54:30 -07:00
job_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("jobs.id"),
index=True,
unique=False,
2023-08-29 14:54:30 -07:00
)
job = db.relationship("Job")
job_row_number = db.Column(db.Integer, nullable=True)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
unique=False,
2023-08-29 14:54:30 -07:00
)
service = db.relationship("Service")
template_id = db.Column(UUID(as_uuid=True), unique=False)
template_version = db.Column(db.Integer, nullable=False)
2023-08-29 14:54:30 -07:00
api_key_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("api_keys.id"),
unique=False,
2023-08-29 14:54:30 -07:00
)
api_key = db.relationship("ApiKey")
key_type = enum_column(KeyType, unique=False, nullable=False)
billable_units = db.Column(db.Integer, nullable=False, default=0)
notification_type = enum_column(NotificationType, nullable=False)
created_at = db.Column(db.DateTime, unique=False, nullable=False)
sent_at = db.Column(db.DateTime, index=False, unique=False, nullable=True)
sent_by = db.Column(db.String, nullable=True)
2025-02-28 08:39:13 -08:00
message_cost = db.Column(db.Float, nullable=True, default=0.0)
2023-08-29 14:54:30 -07:00
updated_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
status = enum_column(
NotificationStatus,
name="notification_status",
nullable=True,
default=NotificationStatus.CREATED,
key="status",
)
reference = db.Column(db.String, nullable=True, index=True)
client_reference = db.Column(db.String, nullable=True)
international = db.Column(db.Boolean, nullable=True, default=False)
2017-04-26 10:22:20 +01:00
phone_prefix = db.Column(db.String, nullable=True)
rate_multiplier = db.Column(db.Numeric(asdecimal=False), nullable=True)
2017-04-26 10:22:20 +01:00
created_by_id = db.Column(UUID(as_uuid=True), nullable=True)
document_download_count = db.Column(db.Integer, nullable=True)
__table_args__ = (
db.ForeignKeyConstraint(
2023-08-29 14:54:30 -07:00
["template_id", "template_version"],
["templates_history.id", "templates_history.version"],
),
Index(
2023-08-29 14:54:30 -07:00
"ix_notification_history_service_id_composite",
"service_id",
"key_type",
"notification_type",
"created_at",
),
)
@classmethod
def from_original(cls, notification):
history = super().from_original(notification)
history.status = notification.status
return history
def update_from_original(self, original):
super().update_from_original(original)
self.status = original.status
class InvitedUser(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "invited_users"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
email_address = db.Column(db.String(255), nullable=False)
2023-08-29 14:54:30 -07:00
user_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
from_user = db.relationship("User")
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
unique=False,
2023-08-29 14:54:30 -07:00
)
service = db.relationship("Service")
created_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
status = enum_column(
InvitedUserStatus,
2023-08-29 14:54:30 -07:00
nullable=False,
default=InvitedUserStatus.PENDING,
2023-08-29 14:54:30 -07:00
)
permissions = db.Column(db.String, nullable=False)
auth_type = enum_column(AuthType, index=True, nullable=False, default=AuthType.SMS)
folder_permissions = db.Column(
JSONB(none_as_null=True), nullable=False, default=list
)
# would like to have used properties for this but haven't found a way to make them
# play nice with marshmallow yet
def get_permissions(self):
2023-08-29 14:54:30 -07:00
return self.permissions.split(",")
2023-07-10 11:06:29 -07:00
class InvitedOrganizationUser(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "invited_organization_users"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
email_address = db.Column(db.String(255), nullable=False)
2023-08-29 14:54:30 -07:00
invited_by_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
nullable=False,
2023-08-29 14:54:30 -07:00
)
invited_by = db.relationship("User")
organization_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("organization.id"),
nullable=False,
2023-08-29 14:54:30 -07:00
)
organization = db.relationship("Organization")
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
status = enum_column(
InvitedUserStatus,
nullable=False,
default=InvitedUserStatus.PENDING,
)
2018-02-16 10:56:12 +00:00
def serialize(self):
return {
2023-08-29 14:54:30 -07:00
"id": str(self.id),
"email_address": self.email_address,
"invited_by": str(self.invited_by_id),
"organization": str(self.organization_id),
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"status": self.status,
2018-02-16 10:56:12 +00:00
}
class Permission(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "permissions"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
# Service id is optional, if the service is omitted we will assume the permission is not service specific.
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
unique=False,
nullable=True,
)
service = db.relationship("Service")
user_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("users.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
user = db.relationship("User")
permission = enum_column(PermissionType, index=False, unique=False, nullable=False)
created_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
__table_args__ = (
2023-08-29 14:54:30 -07:00
UniqueConstraint(
"service_id",
"user_id",
"permission",
name="uix_service_user_permission",
2023-08-29 14:54:30 -07:00
),
)
class Event(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "events"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
event_type = db.Column(db.String(255), nullable=False)
created_at = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
data = db.Column(JSON, nullable=False)
2017-04-25 09:53:43 +01:00
class Rate(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "rates"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
valid_from = db.Column(db.DateTime, nullable=False)
rate = db.Column(db.Float(asdecimal=False), nullable=False)
notification_type = enum_column(NotificationType, index=True, nullable=False)
2017-05-09 11:22:05 +01:00
2017-05-24 08:57:11 +01:00
def __str__(self):
return f"{self.rate} {self.notification_type} {self.valid_from}"
2017-05-24 08:57:11 +01:00
2017-05-09 11:22:05 +01:00
2017-05-22 11:26:47 +01:00
class InboundSms(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "inbound_sms"
2017-05-22 11:26:47 +01:00
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
service = db.relationship("Service", backref="inbound_sms")
notify_number = db.Column(
db.String,
nullable=False,
2023-08-29 14:54:30 -07:00
) # the service's number, that the msg was sent to
user_number = db.Column(
db.String,
nullable=False,
index=True,
2023-08-29 14:54:30 -07:00
) # the end user's number, that the msg was sent from
2017-05-22 11:26:47 +01:00
provider_date = db.Column(db.DateTime)
provider_reference = db.Column(db.String)
provider = db.Column(db.String, nullable=False)
2023-08-29 14:54:30 -07:00
_content = db.Column("content", db.String, nullable=False)
2017-05-22 11:26:47 +01:00
@property
def content(self):
return encryption.decrypt(self._content)
@content.setter
def content(self, content):
self._content = encryption.encrypt(content)
def serialize(self):
return {
2023-08-29 14:54:30 -07:00
"id": str(self.id),
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"service_id": str(self.service_id),
"notify_number": self.notify_number,
"user_number": self.user_number,
"content": self.content,
}
class InboundSmsHistory(db.Model, HistoryModel):
2023-08-29 14:54:30 -07:00
__tablename__ = "inbound_sms_history"
id = db.Column(UUID(as_uuid=True), primary_key=True)
created_at = db.Column(db.DateTime, index=True, unique=False, nullable=False)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
index=True,
unique=False,
2023-08-29 14:54:30 -07:00
)
service = db.relationship("Service")
notify_number = db.Column(db.String, nullable=False)
provider_date = db.Column(db.DateTime)
provider_reference = db.Column(db.String)
provider = db.Column(db.String, nullable=False)
2017-09-07 15:41:23 +01:00
class ServiceEmailReplyTo(db.Model):
__tablename__ = "service_email_reply_to"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
unique=False,
index=True,
nullable=False,
)
service = db.relationship(Service, backref=db.backref("reply_to_email_addresses"))
2017-09-07 15:41:23 +01:00
email_address = db.Column(db.Text, nullable=False, index=False, unique=False)
is_default = db.Column(db.Boolean, nullable=False, default=True)
archived = db.Column(db.Boolean, nullable=False, default=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
def serialize(self):
return {
2023-08-29 14:54:30 -07:00
"id": str(self.id),
"service_id": str(self.service_id),
"email_address": self.email_address,
"is_default": self.is_default,
"archived": self.archived,
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
}
class FactBilling(db.Model):
__tablename__ = "ft_billing"
2022-11-21 11:49:59 -05:00
local_date = db.Column(db.Date, nullable=False, primary_key=True, index=True)
2023-08-29 14:54:30 -07:00
template_id = db.Column(
UUID(as_uuid=True),
nullable=False,
primary_key=True,
index=True,
2023-08-29 14:54:30 -07:00
)
service_id = db.Column(
UUID(as_uuid=True),
nullable=False,
primary_key=True,
index=True,
2023-08-29 14:54:30 -07:00
)
notification_type = db.Column(db.Text, nullable=False, primary_key=True)
provider = db.Column(db.Text, nullable=False, primary_key=True)
rate_multiplier = db.Column(db.Integer(), nullable=False, primary_key=True)
international = db.Column(db.Boolean, nullable=False, primary_key=True)
rate = db.Column(db.Numeric(), nullable=False, primary_key=True)
billable_units = db.Column(db.Integer(), nullable=True)
notifications_sent = db.Column(db.Integer(), nullable=True)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
class FactNotificationStatus(db.Model):
__tablename__ = "ft_notification_status"
2022-11-21 11:49:59 -05:00
local_date = db.Column(db.Date, index=True, primary_key=True, nullable=False)
2023-08-29 14:54:30 -07:00
template_id = db.Column(
UUID(as_uuid=True),
primary_key=True,
index=True,
nullable=False,
2023-08-29 14:54:30 -07:00
)
service_id = db.Column(
UUID(as_uuid=True),
primary_key=True,
index=True,
nullable=False,
)
job_id = db.Column(UUID(as_uuid=True), primary_key=True, index=True, nullable=False)
notification_type = enum_column(NotificationType, primary_key=True, nullable=False)
key_type = enum_column(KeyType, primary_key=True, nullable=False)
notification_status = enum_column(
NotificationStatus,
primary_key=True,
nullable=False,
)
notification_count = db.Column(db.Integer(), nullable=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
class FactProcessingTime(db.Model):
__tablename__ = "ft_processing_time"
2022-11-21 11:49:59 -05:00
local_date = db.Column(db.Date, index=True, primary_key=True, nullable=False)
messages_total = db.Column(db.Integer(), nullable=False)
messages_within_10_secs = db.Column(db.Integer(), nullable=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
class Complaint(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "complaints"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
notification_id = db.Column(UUID(as_uuid=True), index=True, nullable=False)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
unique=False,
index=True,
nullable=False,
)
service = db.relationship(Service, backref=db.backref("complaints"))
ses_feedback_id = db.Column(db.Text, nullable=True)
complaint_type = db.Column(db.Text, nullable=True)
complaint_date = db.Column(db.DateTime, nullable=True)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
def serialize(self):
return {
2023-08-29 14:54:30 -07:00
"id": str(self.id),
"notification_id": str(self.notification_id),
"service_id": str(self.service_id),
"service_name": self.service.name,
"ses_feedback_id": str(self.ses_feedback_id),
"complaint_type": self.complaint_type,
"complaint_date": get_dt_string_or_none(self.complaint_date),
"created_at": self.created_at.strftime(DATETIME_FORMAT),
}
class ServiceDataRetention(db.Model):
2023-08-29 14:54:30 -07:00
__tablename__ = "service_data_retention"
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
2023-08-29 14:54:30 -07:00
service_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("services.id"),
unique=False,
index=True,
nullable=False,
)
service = db.relationship(
Service,
backref=db.backref(
2023-08-29 14:54:30 -07:00
"data_retention",
collection_class=attribute_mapped_collection("notification_type"),
),
)
notification_type = enum_column(NotificationType, nullable=False)
days_of_retention = db.Column(db.Integer, nullable=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
__table_args__ = (
2023-08-29 14:54:30 -07:00
UniqueConstraint(
"service_id", "notification_type", name="uix_service_data_retention"
),
)
def serialize(self):
return {
"id": str(self.id),
"service_id": str(self.service_id),
"service_name": self.service.name,
"notification_type": self.notification_type,
"days_of_retention": self.days_of_retention,
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
}
class WebauthnCredential(db.Model):
"""
A table that stores data for registered webauthn credentials.
"""
2023-08-29 14:54:30 -07:00
__tablename__ = "webauthn_credential"
2023-08-29 14:54:30 -07:00
id = db.Column(
UUID(as_uuid=True),
primary_key=True,
nullable=False,
default=uuid.uuid4,
2023-08-29 14:54:30 -07:00
)
2023-08-29 14:54:30 -07:00
user_id = db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), nullable=False)
user = db.relationship(User, backref=db.backref("webauthn_credentials"))
name = db.Column(db.String, nullable=False)
# base64 encoded CBOR. used for logging in. https://w3c.github.io/webauthn/#sctn-attested-credential-data
credential_data = db.Column(db.String, nullable=False)
# base64 encoded CBOR. used for auditing. https://www.w3.org/TR/webauthn-2/#authenticatorattestationresponse
registration_response = db.Column(db.String, nullable=False)
2023-08-29 14:54:30 -07:00
created_at = db.Column(
db.DateTime,
nullable=False,
2024-05-23 13:59:51 -07:00
default=utc_now(),
2023-08-29 14:54:30 -07:00
)
updated_at = db.Column(
db.DateTime,
nullable=True,
2024-05-23 13:59:51 -07:00
onupdate=utc_now(),
2023-08-29 14:54:30 -07:00
)
def serialize(self):
return {
2023-08-29 14:54:30 -07:00
"id": str(self.id),
"user_id": str(self.user_id),
"name": self.name,
"credential_data": self.credential_data,
"created_at": self.created_at.strftime(DATETIME_FORMAT),
"updated_at": get_dt_string_or_none(self.updated_at),
}
class Agreement(db.Model):
__tablename__ = "agreements"
2023-08-29 14:54:30 -07:00
id = db.Column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
unique=False,
2023-08-29 14:54:30 -07:00
)
type = enum_column(AgreementType, index=False, unique=False, nullable=False)
partner_name = db.Column(db.String(255), nullable=False, unique=True, index=True)
status = enum_column(AgreementStatus, index=False, unique=False, nullable=False)
start_time = db.Column(db.DateTime, nullable=True)
end_time = db.Column(db.DateTime, nullable=True)
url = db.Column(db.String(255), nullable=False, unique=True, index=True)
budget_amount = db.Column(db.Float, nullable=True)
organization_id = db.Column(
UUID(as_uuid=True),
2023-08-29 14:54:30 -07:00
db.ForeignKey("organization.id"),
nullable=True,
)
organization = db.relationship("Organization", backref="agreements")
def serialize(self):
return {
"id": str(self.id),
"type": self.type,
"partner_name": self.partner_name,
"status": self.status,
"start_time": self.start_time.strftime(DATETIME_FORMAT),
"end_time": self.end_time.strftime(DATETIME_FORMAT),
"budget_amount": self.budget_amount,
2023-08-29 14:54:30 -07:00
"organization_id": self.organization_id,
}