2017-11-06 18:19:02 +00:00
|
|
|
from flask import current_app
|
2024-10-18 08:12:22 -07:00
|
|
|
from sqlalchemy import and_, delete, desc, func, select
|
2019-12-17 18:04:22 +00:00
|
|
|
from sqlalchemy.dialects.postgresql import insert
|
2024-10-17 12:27:17 -07:00
|
|
|
from sqlalchemy.orm import aliased
|
2017-06-02 12:21:12 +01:00
|
|
|
|
2017-05-22 11:26:47 +01:00
|
|
|
from app import db
|
2021-04-14 07:11:01 +01:00
|
|
|
from app.dao.dao_utils import autocommit
|
2024-01-10 12:32:25 -05:00
|
|
|
from app.enums import NotificationType
|
2024-04-24 16:27:20 -04:00
|
|
|
from app.models import InboundSms, InboundSmsHistory, ServiceDataRetention
|
2019-02-26 17:57:55 +00:00
|
|
|
from app.utils import midnight_n_days_ago
|
2017-05-22 11:26:47 +01:00
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2017-05-22 11:26:47 +01:00
|
|
|
def dao_create_inbound_sms(inbound_sms):
|
|
|
|
|
db.session.add(inbound_sms)
|
2017-05-31 14:49:14 +01:00
|
|
|
|
|
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
def dao_get_inbound_sms_for_service(
|
|
|
|
|
service_id, user_number=None, *, limit_days=None, limit=None
|
|
|
|
|
):
|
2024-10-17 12:24:23 -07:00
|
|
|
q = (
|
|
|
|
|
select(InboundSms)
|
|
|
|
|
.filter(InboundSms.service_id == service_id)
|
|
|
|
|
.order_by(InboundSms.created_at.desc())
|
2017-05-31 14:49:14 +01:00
|
|
|
)
|
2019-02-28 13:59:28 +00:00
|
|
|
if limit_days is not None:
|
|
|
|
|
start_date = midnight_n_days_ago(limit_days)
|
2019-02-27 17:17:35 +00:00
|
|
|
q = q.filter(InboundSms.created_at >= start_date)
|
2017-05-31 14:49:14 +01:00
|
|
|
|
|
|
|
|
if user_number:
|
|
|
|
|
q = q.filter(InboundSms.user_number == user_number)
|
|
|
|
|
|
|
|
|
|
if limit:
|
|
|
|
|
q = q.limit(limit)
|
|
|
|
|
|
2024-10-17 12:24:23 -07:00
|
|
|
return db.session.execute(q).scalars().all()
|
2017-05-31 14:49:14 +01:00
|
|
|
|
|
|
|
|
|
2018-03-22 12:41:17 +00:00
|
|
|
def dao_get_paginated_inbound_sms_for_service_for_public_api(
|
2023-08-29 14:54:30 -07:00
|
|
|
service_id, older_than=None, page_size=None
|
2017-11-06 18:19:02 +00:00
|
|
|
):
|
|
|
|
|
if page_size is None:
|
2023-08-29 14:54:30 -07:00
|
|
|
page_size = current_app.config["PAGE_SIZE"]
|
2017-11-06 18:19:02 +00:00
|
|
|
|
|
|
|
|
filters = [InboundSms.service_id == service_id]
|
|
|
|
|
|
2017-11-07 11:55:51 +00:00
|
|
|
if older_than:
|
2023-08-29 14:54:30 -07:00
|
|
|
older_than_created_at = (
|
|
|
|
|
db.session.query(InboundSms.created_at)
|
|
|
|
|
.filter(InboundSms.id == older_than)
|
|
|
|
|
.as_scalar()
|
|
|
|
|
)
|
2017-11-06 18:19:02 +00:00
|
|
|
filters.append(InboundSms.created_at < older_than_created_at)
|
|
|
|
|
|
2024-10-18 08:45:31 -07:00
|
|
|
# As part of the move to sqlalchemy 2.0, we do this manual pagination
|
|
|
|
|
# 1.4 had a paginate() method which assumed 'page' was 1 if it was not specified,
|
|
|
|
|
# so we set page to 1 here to mimic that.
|
|
|
|
|
page = 1
|
|
|
|
|
query = db.session.query(InboundSms).filter(*filters)
|
|
|
|
|
paginated_items = (
|
|
|
|
|
query.order_by(desc(InboundSms.created_at))
|
|
|
|
|
.offset((page - 1) * page_size)
|
|
|
|
|
.limit(page_size)
|
2023-08-29 14:54:30 -07:00
|
|
|
)
|
2024-10-18 08:45:31 -07:00
|
|
|
return paginated_items
|
2017-11-06 18:19:02 +00:00
|
|
|
|
|
|
|
|
|
2019-03-27 17:44:53 +00:00
|
|
|
def dao_count_inbound_sms_for_service(service_id, limit_days):
|
2024-10-17 12:58:35 -07:00
|
|
|
stmt = (
|
|
|
|
|
select(func.count())
|
|
|
|
|
.select_from(InboundSms)
|
|
|
|
|
.filter(
|
|
|
|
|
InboundSms.service_id == service_id,
|
|
|
|
|
InboundSms.created_at >= midnight_n_days_ago(limit_days),
|
|
|
|
|
)
|
2024-10-17 12:24:23 -07:00
|
|
|
)
|
2024-10-17 12:44:28 -07:00
|
|
|
result = db.session.execute(stmt).scalar()
|
2024-10-17 12:58:35 -07:00
|
|
|
return result
|
2017-06-02 12:21:12 +01:00
|
|
|
|
|
|
|
|
|
2019-12-24 09:29:27 +00:00
|
|
|
def _insert_inbound_sms_history(subquery, query_limit=10000):
|
2019-12-20 11:19:36 +00:00
|
|
|
offset = 0
|
|
|
|
|
inbound_sms_query = db.session.query(
|
2021-04-26 11:50:17 +01:00
|
|
|
InboundSms.id,
|
|
|
|
|
InboundSms.created_at,
|
|
|
|
|
InboundSms.service_id,
|
|
|
|
|
InboundSms.notify_number,
|
|
|
|
|
InboundSms.provider_date,
|
|
|
|
|
InboundSms.provider_reference,
|
2023-08-29 14:54:30 -07:00
|
|
|
InboundSms.provider,
|
2019-12-20 11:19:36 +00:00
|
|
|
).filter(InboundSms.id.in_(subquery))
|
|
|
|
|
inbound_sms_count = inbound_sms_query.count()
|
|
|
|
|
|
|
|
|
|
while offset < inbound_sms_count:
|
|
|
|
|
statement = insert(InboundSmsHistory).from_select(
|
|
|
|
|
InboundSmsHistory.__table__.c,
|
2023-08-29 14:54:30 -07:00
|
|
|
inbound_sms_query.limit(query_limit).offset(offset),
|
2019-12-20 11:19:36 +00:00
|
|
|
)
|
|
|
|
|
|
2019-12-24 09:29:27 +00:00
|
|
|
statement = statement.on_conflict_do_nothing(
|
|
|
|
|
constraint="inbound_sms_history_pkey"
|
2019-12-20 11:19:36 +00:00
|
|
|
)
|
|
|
|
|
db.session.connection().execute(statement)
|
|
|
|
|
|
|
|
|
|
offset += query_limit
|
|
|
|
|
|
|
|
|
|
|
2019-02-26 17:57:55 +00:00
|
|
|
def _delete_inbound_sms(datetime_to_delete_from, query_filter):
|
|
|
|
|
query_limit = 10000
|
|
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
subquery = (
|
2024-10-18 08:12:22 -07:00
|
|
|
select(InboundSms.id)
|
2023-08-29 14:54:30 -07:00
|
|
|
.filter(InboundSms.created_at < datetime_to_delete_from, *query_filter)
|
|
|
|
|
.limit(query_limit)
|
|
|
|
|
.subquery()
|
|
|
|
|
)
|
2019-02-26 17:57:55 +00:00
|
|
|
|
|
|
|
|
deleted = 0
|
|
|
|
|
# set to nonzero just to enter the loop
|
|
|
|
|
number_deleted = 1
|
|
|
|
|
while number_deleted > 0:
|
2019-12-24 09:29:27 +00:00
|
|
|
_insert_inbound_sms_history(subquery, query_limit=query_limit)
|
2019-12-17 18:04:22 +00:00
|
|
|
|
2024-10-18 08:12:22 -07:00
|
|
|
stmt = delete(InboundSms).filter(InboundSms.id.in_(subquery))
|
|
|
|
|
number_deleted = db.session.execute(stmt).rowcount
|
|
|
|
|
db.session.commit()
|
2019-02-26 17:57:55 +00:00
|
|
|
deleted += number_deleted
|
|
|
|
|
|
|
|
|
|
return deleted
|
|
|
|
|
|
|
|
|
|
|
2021-04-14 07:11:01 +01:00
|
|
|
@autocommit
|
2019-02-26 17:57:55 +00:00
|
|
|
def delete_inbound_sms_older_than_retention():
|
2023-08-29 14:54:30 -07:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"Deleting inbound sms for services with flexible data retention"
|
|
|
|
|
)
|
2019-02-26 17:57:55 +00:00
|
|
|
|
2024-10-17 13:10:02 -07:00
|
|
|
stmt = (
|
|
|
|
|
select(ServiceDataRetention)
|
|
|
|
|
.join(ServiceDataRetention.service)
|
2024-02-28 12:40:52 -05:00
|
|
|
.filter(ServiceDataRetention.notification_type == NotificationType.SMS)
|
2023-08-29 14:54:30 -07:00
|
|
|
)
|
2024-10-17 13:39:05 -07:00
|
|
|
flexible_data_retention = db.session.execute(stmt).scalars().all()
|
2019-12-20 11:19:36 +00:00
|
|
|
|
2019-02-26 17:57:55 +00:00
|
|
|
deleted = 0
|
2019-12-17 18:04:22 +00:00
|
|
|
|
2019-02-26 17:57:55 +00:00
|
|
|
for f in flexible_data_retention:
|
|
|
|
|
n_days_ago = midnight_n_days_ago(f.days_of_retention)
|
|
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"Deleting inbound sms for service id: {}".format(f.service_id)
|
|
|
|
|
)
|
|
|
|
|
deleted += _delete_inbound_sms(
|
|
|
|
|
n_days_ago, query_filter=[InboundSms.service_id == f.service_id]
|
|
|
|
|
)
|
2019-02-26 17:57:55 +00:00
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
current_app.logger.info(
|
|
|
|
|
"Deleting inbound sms for services without flexible data retention"
|
|
|
|
|
)
|
2019-02-26 17:57:55 +00:00
|
|
|
|
|
|
|
|
seven_days_ago = midnight_n_days_ago(7)
|
|
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
deleted += _delete_inbound_sms(
|
|
|
|
|
seven_days_ago,
|
|
|
|
|
query_filter=[
|
|
|
|
|
InboundSms.service_id.notin_(x.service_id for x in flexible_data_retention),
|
|
|
|
|
],
|
|
|
|
|
)
|
2017-06-06 17:11:59 +01:00
|
|
|
|
2023-08-29 14:54:30 -07:00
|
|
|
current_app.logger.info("Deleted {} inbound sms".format(deleted))
|
2017-06-06 17:11:59 +01:00
|
|
|
|
2019-12-20 11:19:36 +00:00
|
|
|
return deleted
|
2019-12-17 18:04:22 +00:00
|
|
|
|
|
|
|
|
|
2017-06-07 14:23:31 +01:00
|
|
|
def dao_get_inbound_sms_by_id(service_id, inbound_id):
|
2024-10-17 13:10:02 -07:00
|
|
|
stmt = select(InboundSms).filter_by(id=inbound_id, service_id=service_id)
|
|
|
|
|
return db.session.execute(stmt).scalars().one()
|
2018-04-04 16:59:48 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def dao_get_paginated_most_recent_inbound_sms_by_user_number_for_service(
|
2023-08-29 14:54:30 -07:00
|
|
|
service_id, page, limit_days
|
2018-04-04 16:59:48 +01:00
|
|
|
):
|
|
|
|
|
"""
|
2019-03-27 17:44:53 +00:00
|
|
|
This query starts from inbound_sms and joins on to itself to find the most recent row for each user_number.
|
2018-04-04 16:59:48 +01:00
|
|
|
|
|
|
|
|
Equivalent sql:
|
|
|
|
|
|
|
|
|
|
SELECT t1.*
|
|
|
|
|
FROM inbound_sms t1
|
|
|
|
|
LEFT OUTER JOIN inbound_sms AS t2 ON (
|
|
|
|
|
-- identifying
|
|
|
|
|
t1.user_number = t2.user_number AND
|
|
|
|
|
t1.service_id = t2.service_id AND
|
|
|
|
|
-- ordering
|
|
|
|
|
t1.created_at < t2.created_at
|
|
|
|
|
)
|
|
|
|
|
WHERE t2.id IS NULL AND t1.service_id = :service_id
|
|
|
|
|
ORDER BY t1.created_at DESC;
|
|
|
|
|
LIMIT 50 OFFSET :page
|
|
|
|
|
"""
|
|
|
|
|
t2 = aliased(InboundSms)
|
2024-10-17 14:28:22 -07:00
|
|
|
q = (
|
|
|
|
|
db.session.query(InboundSms)
|
2023-08-29 14:54:30 -07:00
|
|
|
.outerjoin(
|
|
|
|
|
t2,
|
|
|
|
|
and_(
|
|
|
|
|
InboundSms.user_number == t2.user_number,
|
|
|
|
|
InboundSms.service_id == t2.service_id,
|
|
|
|
|
InboundSms.created_at < t2.created_at,
|
|
|
|
|
),
|
2018-04-04 16:59:48 +01:00
|
|
|
)
|
2023-08-29 14:54:30 -07:00
|
|
|
.filter(
|
|
|
|
|
t2.id == None, # noqa
|
|
|
|
|
InboundSms.service_id == service_id,
|
|
|
|
|
InboundSms.created_at >= midnight_n_days_ago(limit_days),
|
|
|
|
|
)
|
|
|
|
|
.order_by(InboundSms.created_at.desc())
|
2018-04-04 16:59:48 +01:00
|
|
|
)
|
2024-10-17 14:28:22 -07:00
|
|
|
|
|
|
|
|
return q.paginate(page=page, per_page=current_app.config["PAGE_SIZE"])
|