2024-10-18 12:40:14 -07:00
|
|
|
from flask import current_app
|
2024-10-18 08:12:22 -07:00
|
|
|
from sqlalchemy import and_, delete, desc, func, select
|
2019-12-17 18:04:22 +00:00
|
|
|
from sqlalchemy.dialects.postgresql import insert
|
2024-10-17 12:27:17 -07:00
|
|
|
from sqlalchemy.orm import aliased
|
2017-06-02 12:21:12 +01:00
|
|
|
|
2017-05-22 11:26:47 +01:00
|
|
|
from app import db
|
2021-04-14 07:11:01 +01:00
|
|
|
from app.dao.dao_utils import autocommit
|
2024-01-10 12:32:25 -05:00
|
|
|
from app.enums import NotificationType
|
2024-04-24 16:27:20 -04:00
|
|
|
from app.models import InboundSms, InboundSmsHistory, ServiceDataRetention
|
2019-02-26 17:57:55 +00:00
|
|
|
from app.utils import midnight_n_days_ago
|
2017-05-22 11:26:47 +01:00
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2017-05-22 11:26:47 +01:00
|
|
|
def dao_create_inbound_sms(inbound_sms):
|
|
|
|
|
db.session.add(inbound_sms)
|
2017-05-31 14:49:14 +01:00
|
|
|
|
|
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
def dao_get_inbound_sms_for_service(
|
|
|
|
|
service_id, user_number=None, *, limit_days=None, limit=None
|
|
|
|
|
):
|
2024-10-17 12:24:23 -07:00
|
|
|
q = (
|
|
|
|
|
select(InboundSms)
|
2024-12-20 08:09:19 -08:00
|
|
|
.where(InboundSms.service_id == service_id)
|
2024-10-17 12:24:23 -07:00
|
|
|
.order_by(InboundSms.created_at.desc())
|
2017-05-31 14:49:14 +01:00
|
|
|
)
|
2019-02-28 13:59:28 +00:00
|
|
|
if limit_days is not None:
|
|
|
|
|
start_date = midnight_n_days_ago(limit_days)
|
2024-12-20 08:09:19 -08:00
|
|
|
q = q.where(InboundSms.created_at >= start_date)
|
2017-05-31 14:49:14 +01:00
|
|
|
|
|
|
|
|
if user_number:
|
2024-12-20 08:09:19 -08:00
|
|
|
q = q.where(InboundSms.user_number == user_number)
|
2017-05-31 14:49:14 +01:00
|
|
|
|
|
|
|
|
if limit:
|
|
|
|
|
q = q.limit(limit)
|
|
|
|
|
|
2024-10-17 12:24:23 -07:00
|
|
|
return db.session.execute(q).scalars().all()
|
2017-05-31 14:49:14 +01:00
|
|
|
|
|
|
|
|
|
2018-03-22 12:41:17 +00:00
|
|
|
def dao_get_paginated_inbound_sms_for_service_for_public_api(
|
2023-08-29 14:54:30 -07:00
|
|
|
service_id, older_than=None, page_size=None
|
2017-11-06 18:19:02 +00:00
|
|
|
):
|
|
|
|
|
if page_size is None:
|
2023-08-29 14:54:30 -07:00
|
|
|
page_size = current_app.config["PAGE_SIZE"]
|
2017-11-06 18:19:02 +00:00
|
|
|
|
|
|
|
|
filters = [InboundSms.service_id == service_id]
|
|
|
|
|
|
2017-11-07 11:55:51 +00:00
|
|
|
if older_than:
|
2023-08-29 14:54:30 -07:00
|
|
|
older_than_created_at = (
|
|
|
|
|
db.session.query(InboundSms.created_at)
|
2024-12-20 08:09:19 -08:00
|
|
|
.where(InboundSms.id == older_than)
|
2024-10-18 09:05:03 -07:00
|
|
|
.scalar_subquery()
|
2023-08-29 14:54:30 -07:00
|
|
|
)
|
2017-11-06 18:19:02 +00:00
|
|
|
filters.append(InboundSms.created_at < older_than_created_at)
|
|
|
|
|
|
2024-11-18 12:08:32 -08:00
|
|
|
page = 1 # ?
|
|
|
|
|
offset = (page - 1) * page_size
|
2024-10-18 08:45:31 -07:00
|
|
|
# As part of the move to sqlalchemy 2.0, we do this manual pagination
|
2024-11-18 11:36:20 -08:00
|
|
|
stmt = (
|
|
|
|
|
select(InboundSms)
|
2024-11-18 12:08:32 -08:00
|
|
|
.where(*filters)
|
2024-11-18 11:36:20 -08:00
|
|
|
.order_by(desc(InboundSms.created_at))
|
|
|
|
|
.limit(page_size)
|
2024-11-18 12:08:32 -08:00
|
|
|
.offset(offset)
|
2024-11-18 11:36:20 -08:00
|
|
|
)
|
|
|
|
|
paginated_items = db.session.execute(stmt).scalars().all()
|
2024-11-18 12:19:23 -08:00
|
|
|
total_items = db.session.execute(select(func.count()).where(*filters)).scalar() or 0
|
2024-11-18 12:08:32 -08:00
|
|
|
pagination = Pagination(paginated_items, page, page_size, total_items)
|
2024-11-18 11:36:20 -08:00
|
|
|
return pagination
|
2017-11-06 18:19:02 +00:00
|
|
|
|
|
|
|
|
|
2019-03-27 17:44:53 +00:00
|
|
|
def dao_count_inbound_sms_for_service(service_id, limit_days):
|
2024-10-17 12:58:35 -07:00
|
|
|
stmt = (
|
|
|
|
|
select(func.count())
|
|
|
|
|
.select_from(InboundSms)
|
2024-12-20 08:09:19 -08:00
|
|
|
.where(
|
2024-10-17 12:58:35 -07:00
|
|
|
InboundSms.service_id == service_id,
|
|
|
|
|
InboundSms.created_at >= midnight_n_days_ago(limit_days),
|
|
|
|
|
)
|
2024-10-17 12:24:23 -07:00
|
|
|
)
|
2024-10-17 12:44:28 -07:00
|
|
|
result = db.session.execute(stmt).scalar()
|
2024-10-17 12:58:35 -07:00
|
|
|
return result
|
2017-06-02 12:21:12 +01:00
|
|
|
|
|
|
|
|
|
2019-12-24 09:29:27 +00:00
|
|
|
def _insert_inbound_sms_history(subquery, query_limit=10000):
|
2019-12-20 11:19:36 +00:00
|
|
|
offset = 0
|
2024-10-18 09:31:12 -07:00
|
|
|
subquery_select = select(subquery)
|
2024-11-21 09:38:43 -08:00
|
|
|
inbound_sms_stmt = select(
|
2021-04-26 11:50:17 +01:00
|
|
|
InboundSms.id,
|
|
|
|
|
InboundSms.created_at,
|
|
|
|
|
InboundSms.service_id,
|
|
|
|
|
InboundSms.notify_number,
|
|
|
|
|
InboundSms.provider_date,
|
|
|
|
|
InboundSms.provider_reference,
|
2023-08-29 14:54:30 -07:00
|
|
|
InboundSms.provider,
|
2024-10-18 09:31:12 -07:00
|
|
|
).where(InboundSms.id.in_(subquery_select))
|
|
|
|
|
|
2024-11-21 09:38:43 -08:00
|
|
|
count_query = select(func.count()).select_from(inbound_sms_stmt.subquery())
|
2024-10-18 11:03:21 -07:00
|
|
|
inbound_sms_count = db.session.execute(count_query).scalar() or 0
|
2019-12-20 11:19:36 +00:00
|
|
|
|
|
|
|
|
while offset < inbound_sms_count:
|
|
|
|
|
statement = insert(InboundSmsHistory).from_select(
|
|
|
|
|
InboundSmsHistory.__table__.c,
|
2024-11-21 09:38:43 -08:00
|
|
|
inbound_sms_stmt.limit(query_limit).offset(offset),
|
2019-12-20 11:19:36 +00:00
|
|
|
)
|
|
|
|
|
|
2019-12-24 09:29:27 +00:00
|
|
|
statement = statement.on_conflict_do_nothing(
|
|
|
|
|
constraint="inbound_sms_history_pkey"
|
2019-12-20 11:19:36 +00:00
|
|
|
)
|
2024-10-18 09:21:04 -07:00
|
|
|
db.session.execute(statement)
|
|
|
|
|
db.session.commit()
|
2019-12-20 11:19:36 +00:00
|
|
|
|
|
|
|
|
offset += query_limit
|
|
|
|
|
|
|
|
|
|
|
2019-02-26 17:57:55 +00:00
|
|
|
def _delete_inbound_sms(datetime_to_delete_from, query_filter):
|
|
|
|
|
query_limit = 10000
|
|
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
subquery = (
|
2024-10-18 08:12:22 -07:00
|
|
|
select(InboundSms.id)
|
2024-12-20 08:09:19 -08:00
|
|
|
.where(InboundSms.created_at < datetime_to_delete_from, *query_filter)
|
2023-08-29 14:54:30 -07:00
|
|
|
.limit(query_limit)
|
2025-07-25 08:00:43 -07:00
|
|
|
.subquery()
|
2023-08-29 14:54:30 -07:00
|
|
|
)
|
2019-02-26 17:57:55 +00:00
|
|
|
|
|
|
|
|
deleted = 0
|
|
|
|
|
# set to nonzero just to enter the loop
|
|
|
|
|
number_deleted = 1
|
|
|
|
|
while number_deleted > 0:
|
2019-12-24 09:29:27 +00:00
|
|
|
_insert_inbound_sms_history(subquery, query_limit=query_limit)
|
2019-12-17 18:04:22 +00:00
|
|
|
|
2025-07-25 08:00:43 -07:00
|
|
|
stmt = delete(InboundSms).where(InboundSms.id.in_(select(subquery.c.id)))
|
2024-10-18 08:12:22 -07:00
|
|
|
number_deleted = db.session.execute(stmt).rowcount
|
|
|
|
|
db.session.commit()
|
2019-02-26 17:57:55 +00:00
|
|
|
deleted += number_deleted
|
|
|
|
|
|
|
|
|
|
return deleted
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2019-02-26 17:57:55 +00:00
|
|
|
def delete_inbound_sms_older_than_retention():
|
2023-08-29 14:54:30 -07:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"Deleting inbound sms for services with flexible data retention"
|
|
|
|
|
)
|
2019-02-26 17:57:55 +00:00
|
|
|
|
2024-10-17 13:10:02 -07:00
|
|
|
stmt = (
|
|
|
|
|
select(ServiceDataRetention)
|
|
|
|
|
.join(ServiceDataRetention.service)
|
2024-12-20 08:09:19 -08:00
|
|
|
.where(ServiceDataRetention.notification_type == NotificationType.SMS)
|
2023-08-29 14:54:30 -07:00
|
|
|
)
|
2024-10-17 13:39:05 -07:00
|
|
|
flexible_data_retention = db.session.execute(stmt).scalars().all()
|
2019-12-20 11:19:36 +00:00
|
|
|
|
2019-02-26 17:57:55 +00:00
|
|
|
deleted = 0
|
2019-12-17 18:04:22 +00:00
|
|
|
|
2019-02-26 17:57:55 +00:00
|
|
|
for f in flexible_data_retention:
|
|
|
|
|
n_days_ago = midnight_n_days_ago(f.days_of_retention)
|
|
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"Deleting inbound sms for service id: {}".format(f.service_id)
|
|
|
|
|
)
|
|
|
|
|
deleted += _delete_inbound_sms(
|
|
|
|
|
n_days_ago, query_filter=[InboundSms.service_id == f.service_id]
|
|
|
|
|
)
|
2019-02-26 17:57:55 +00:00
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"Deleting inbound sms for services without flexible data retention"
|
|
|
|
|
)
|
2019-02-26 17:57:55 +00:00
|
|
|
|
|
|
|
|
seven_days_ago = midnight_n_days_ago(7)
|
|
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
deleted += _delete_inbound_sms(
|
|
|
|
|
seven_days_ago,
|
|
|
|
|
query_filter=[
|
|
|
|
|
InboundSms.service_id.notin_(x.service_id for x in flexible_data_retention),
|
|
|
|
|
],
|
|
|
|
|
)
|
2017-06-06 17:11:59 +01:00
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
current_app.logger.info("Deleted {} inbound sms".format(deleted))
|
2017-06-06 17:11:59 +01:00
|
|
|
|
2019-12-20 11:19:36 +00:00
|
|
|
return deleted
|
2019-12-17 18:04:22 +00:00
|
|
|
|
|
|
|
|
|
2017-06-07 14:23:31 +01:00
|
|
|
def dao_get_inbound_sms_by_id(service_id, inbound_id):
|
2024-12-19 11:10:03 -08:00
|
|
|
stmt = select(InboundSms).where(
|
|
|
|
|
InboundSms.id == inbound_id, InboundSms.service_id == service_id
|
|
|
|
|
)
|
2024-10-17 13:10:02 -07:00
|
|
|
return db.session.execute(stmt).scalars().one()
|
2018-04-04 16:59:48 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_get_paginated_most_recent_inbound_sms_by_user_number_for_service(
|
2023-08-29 14:54:30 -07:00
|
|
|
service_id, page, limit_days
|
2018-04-04 16:59:48 +01:00
|
|
|
):
|
|
|
|
|
"""
|
2019-03-27 17:44:53 +00:00
|
|
|
This query starts from inbound_sms and joins on to itself to find the most recent row for each user_number.
|
2018-04-04 16:59:48 +01:00
|
|
|
|
|
|
|
|
Equivalent sql:
|
|
|
|
|
|
|
|
|
|
SELECT t1.*
|
|
|
|
|
FROM inbound_sms t1
|
|
|
|
|
LEFT OUTER JOIN inbound_sms AS t2 ON (
|
|
|
|
|
-- identifying
|
|
|
|
|
t1.user_number = t2.user_number AND
|
|
|
|
|
t1.service_id = t2.service_id AND
|
|
|
|
|
-- ordering
|
|
|
|
|
t1.created_at < t2.created_at
|
|
|
|
|
)
|
|
|
|
|
WHERE t2.id IS NULL AND t1.service_id = :service_id
|
|
|
|
|
ORDER BY t1.created_at DESC;
|
|
|
|
|
LIMIT 50 OFFSET :page
|
|
|
|
|
"""
|
|
|
|
|
t2 = aliased(InboundSms)
|
2024-10-17 14:28:22 -07:00
|
|
|
q = (
|
2024-10-18 11:18:29 -07:00
|
|
|
select(InboundSms)
|
2023-08-29 14:54:30 -07:00
|
|
|
.outerjoin(
|
|
|
|
|
t2,
|
|
|
|
|
and_(
|
|
|
|
|
InboundSms.user_number == t2.user_number,
|
|
|
|
|
InboundSms.service_id == t2.service_id,
|
|
|
|
|
InboundSms.created_at < t2.created_at,
|
|
|
|
|
),
|
2018-04-04 16:59:48 +01:00
|
|
|
)
|
2024-10-18 11:18:29 -07:00
|
|
|
.where(
|
|
|
|
|
t2.id.is_(None), # noqa
|
2023-08-29 14:54:30 -07:00
|
|
|
InboundSms.service_id == service_id,
|
|
|
|
|
InboundSms.created_at >= midnight_n_days_ago(limit_days),
|
|
|
|
|
)
|
|
|
|
|
.order_by(InboundSms.created_at.desc())
|
2018-04-04 16:59:48 +01:00
|
|
|
)
|
2024-10-18 11:18:29 -07:00
|
|
|
result = db.session.execute(q).scalars().all()
|
|
|
|
|
page_size = current_app.config["PAGE_SIZE"]
|
|
|
|
|
offset = (page - 1) * page_size
|
|
|
|
|
paginated_results = result[offset : offset + page_size]
|
2024-10-18 12:23:51 -07:00
|
|
|
pagination = Pagination(paginated_results, page, page_size, len(result))
|
2024-10-18 11:33:59 -07:00
|
|
|
return pagination
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# TODO remove this when billing dao PR is merged.
|
|
|
|
|
class Pagination:
|
|
|
|
|
def __init__(self, items, page, per_page, total):
|
|
|
|
|
self.items = items
|
|
|
|
|
self.page = page
|
|
|
|
|
self.per_page = per_page
|
|
|
|
|
self.total = total
|
|
|
|
|
self.pages = (total + per_page - 1) // per_page
|
|
|
|
|
self.prev_num = page - 1 if page > 1 else None
|
|
|
|
|
self.next_num = page + 1 if page < self.pages else None
|
|
|
|
|
|
|
|
|
|
def has_next(self):
|
|
|
|
|
return self.page < self.pages
|
|
|
|
|
|
|
|
|
|
def has_prev(self):
|
|
|
|
|
return self.page > 1
|