mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-03 09:51:11 -05:00
Updated backend to include stat endpoint that gives hourly results instead of daily
This commit is contained in:
5
Makefile
5
Makefile
@@ -148,3 +148,8 @@ clean:
|
|||||||
# cf unmap-route notify-api-failwhale ${DNS_NAME} --hostname api
|
# cf unmap-route notify-api-failwhale ${DNS_NAME} --hostname api
|
||||||
# cf stop notify-api-failwhale
|
# cf stop notify-api-failwhale
|
||||||
# @echo "Failwhale is disabled"
|
# @echo "Failwhale is disabled"
|
||||||
|
|
||||||
|
.PHONY: test-single
|
||||||
|
test-single: export NEW_RELIC_ENVIRONMENT=test
|
||||||
|
test-single: ## Run a single test file
|
||||||
|
poetry run pytest $(TEST_FILE)
|
||||||
|
|||||||
@@ -93,3 +93,25 @@ def generate_date_range(start_date, end_date=None, days=0):
|
|||||||
current_date += timedelta(days=1)
|
current_date += timedelta(days=1)
|
||||||
else:
|
else:
|
||||||
return "An end_date or number of days must be specified"
|
return "An end_date or number of days must be specified"
|
||||||
|
|
||||||
|
|
||||||
|
def generate_hourly_range(start_date, end_date=None, hours=0):
|
||||||
|
if end_date:
|
||||||
|
current_time = start_date
|
||||||
|
while current_time <= end_date:
|
||||||
|
try:
|
||||||
|
yield current_time
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
current_time += timedelta(hours=1)
|
||||||
|
elif hours > 0:
|
||||||
|
end_time = start_date + timedelta(hours=hours)
|
||||||
|
current_time = start_date
|
||||||
|
while current_time < end_time:
|
||||||
|
try:
|
||||||
|
yield current_time
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
current_time += timedelta(hours=1)
|
||||||
|
else:
|
||||||
|
return "An end_date or number of hours must be specified"
|
||||||
|
|||||||
@@ -8,7 +8,11 @@ from sqlalchemy.sql.expression import and_, asc, case, func
|
|||||||
|
|
||||||
from app import db
|
from app import db
|
||||||
from app.dao.dao_utils import VersionOptions, autocommit, version_class
|
from app.dao.dao_utils import VersionOptions, autocommit, version_class
|
||||||
from app.dao.date_util import generate_date_range, get_current_calendar_year
|
from app.dao.date_util import (
|
||||||
|
generate_date_range,
|
||||||
|
generate_hourly_range,
|
||||||
|
get_current_calendar_year,
|
||||||
|
)
|
||||||
from app.dao.organization_dao import dao_get_organization_by_email_address
|
from app.dao.organization_dao import dao_get_organization_by_email_address
|
||||||
from app.dao.service_sms_sender_dao import insert_service_sms_sender
|
from app.dao.service_sms_sender_dao import insert_service_sms_sender
|
||||||
from app.dao.service_user_dao import dao_get_service_user
|
from app.dao.service_user_dao import dao_get_service_user
|
||||||
@@ -522,6 +526,68 @@ def dao_fetch_stats_for_service_from_days(service_id, start_date, end_date):
|
|||||||
|
|
||||||
return total_notifications, data
|
return total_notifications, data
|
||||||
|
|
||||||
|
def dao_fetch_stats_for_service_from_hours(service_id, start_date, end_date):
|
||||||
|
start_date = get_midnight_in_utc(start_date)
|
||||||
|
end_date = get_midnight_in_utc(end_date + timedelta(days=1))
|
||||||
|
|
||||||
|
# Update to group by HOUR instead of DAY
|
||||||
|
total_substmt = (
|
||||||
|
select(
|
||||||
|
func.date_trunc("hour", NotificationAllTimeView.created_at).label("hour"), # UPDATED
|
||||||
|
Job.notification_count.label("notification_count"),
|
||||||
|
)
|
||||||
|
.join(Job, NotificationAllTimeView.job_id == Job.id)
|
||||||
|
.where(
|
||||||
|
NotificationAllTimeView.service_id == service_id,
|
||||||
|
NotificationAllTimeView.key_type != KeyType.TEST,
|
||||||
|
NotificationAllTimeView.created_at >= start_date,
|
||||||
|
NotificationAllTimeView.created_at < end_date,
|
||||||
|
)
|
||||||
|
.group_by(
|
||||||
|
Job.id,
|
||||||
|
Job.notification_count,
|
||||||
|
func.date_trunc("hour", NotificationAllTimeView.created_at), # UPDATED
|
||||||
|
)
|
||||||
|
.subquery()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Also update this to group by hour
|
||||||
|
total_stmt = select(
|
||||||
|
total_substmt.c.hour, # UPDATED
|
||||||
|
func.sum(total_substmt.c.notification_count).label("total_notifications"),
|
||||||
|
).group_by(total_substmt.c.hour) # UPDATED
|
||||||
|
|
||||||
|
# Ensure we're using hourly timestamps in the response
|
||||||
|
total_notifications = {
|
||||||
|
row.hour: row.total_notifications for row in db.session.execute(total_stmt).all()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update the second query to also use "hour"
|
||||||
|
stmt = (
|
||||||
|
select(
|
||||||
|
NotificationAllTimeView.notification_type,
|
||||||
|
NotificationAllTimeView.status,
|
||||||
|
func.date_trunc("hour", NotificationAllTimeView.created_at).label("hour"), # UPDATED
|
||||||
|
func.count(NotificationAllTimeView.id).label("count"),
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
NotificationAllTimeView.service_id == service_id,
|
||||||
|
NotificationAllTimeView.key_type != KeyType.TEST,
|
||||||
|
NotificationAllTimeView.created_at >= start_date,
|
||||||
|
NotificationAllTimeView.created_at < end_date,
|
||||||
|
)
|
||||||
|
.group_by(
|
||||||
|
NotificationAllTimeView.notification_type,
|
||||||
|
NotificationAllTimeView.status,
|
||||||
|
func.date_trunc("hour", NotificationAllTimeView.created_at), # UPDATED
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
data = db.session.execute(stmt).all()
|
||||||
|
|
||||||
|
return total_notifications, data
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def dao_fetch_stats_for_service_from_days_for_user(
|
def dao_fetch_stats_for_service_from_days_for_user(
|
||||||
service_id, start_date, end_date, user_id
|
service_id, start_date, end_date, user_id
|
||||||
@@ -827,3 +893,37 @@ def get_specific_days_stats(
|
|||||||
for day, rows in grouped_data.items()
|
for day, rows in grouped_data.items()
|
||||||
}
|
}
|
||||||
return stats
|
return stats
|
||||||
|
|
||||||
|
|
||||||
|
def get_specific_hours_stats(data, start_date, hours=None, end_date=None, total_notifications=None):
|
||||||
|
if hours is not None and end_date is not None:
|
||||||
|
raise ValueError("Only set hours OR set end_date, not both.")
|
||||||
|
elif hours is not None:
|
||||||
|
gen_range = [start_date + timedelta(hours=i) for i in range(hours)]
|
||||||
|
elif end_date is not None:
|
||||||
|
gen_range = generate_hourly_range(start_date, end_date=end_date)
|
||||||
|
else:
|
||||||
|
raise ValueError("Either hours or end_date must be set.")
|
||||||
|
|
||||||
|
# Ensure all hours exist in the output (even if empty)
|
||||||
|
grouped_data = {hour: [] for hour in gen_range}
|
||||||
|
|
||||||
|
# Normalize timestamps and group notifications
|
||||||
|
for row in data:
|
||||||
|
row_hour = row.timestamp.replace(minute=0, second=0, microsecond=0) # Normalize to full hour
|
||||||
|
if row_hour in grouped_data:
|
||||||
|
grouped_data[row_hour].append(row)
|
||||||
|
|
||||||
|
# Ensure `total_notifications` is a dictionary
|
||||||
|
total_notifications = total_notifications or {}
|
||||||
|
|
||||||
|
# Format statistics, returning only hours with results
|
||||||
|
stats = {
|
||||||
|
hour.strftime("%Y-%m-%dT%H:00:00Z"): statistics.format_statistics(
|
||||||
|
rows,
|
||||||
|
total_notifications.get(hour, 0)
|
||||||
|
)
|
||||||
|
for hour, rows in grouped_data.items() if rows # Only include hours with notifications
|
||||||
|
}
|
||||||
|
|
||||||
|
return stats
|
||||||
|
|||||||
@@ -67,6 +67,7 @@ from app.dao.services_dao import (
|
|||||||
dao_fetch_service_by_id,
|
dao_fetch_service_by_id,
|
||||||
dao_fetch_stats_for_service_from_days,
|
dao_fetch_stats_for_service_from_days,
|
||||||
dao_fetch_stats_for_service_from_days_for_user,
|
dao_fetch_stats_for_service_from_days_for_user,
|
||||||
|
dao_fetch_stats_for_service_from_hours,
|
||||||
dao_fetch_todays_stats_for_all_services,
|
dao_fetch_todays_stats_for_all_services,
|
||||||
dao_fetch_todays_stats_for_service,
|
dao_fetch_todays_stats_for_service,
|
||||||
dao_remove_user_from_service,
|
dao_remove_user_from_service,
|
||||||
@@ -76,6 +77,7 @@ from app.dao.services_dao import (
|
|||||||
fetch_notification_stats_for_service_by_month_by_user,
|
fetch_notification_stats_for_service_by_month_by_user,
|
||||||
get_services_by_partial_name,
|
get_services_by_partial_name,
|
||||||
get_specific_days_stats,
|
get_specific_days_stats,
|
||||||
|
get_specific_hours_stats,
|
||||||
)
|
)
|
||||||
from app.dao.templates_dao import dao_get_template_by_id
|
from app.dao.templates_dao import dao_get_template_by_id
|
||||||
from app.dao.users_dao import get_user_by_id
|
from app.dao.users_dao import get_user_by_id
|
||||||
@@ -227,26 +229,38 @@ def get_service_notification_statistics_by_day(service_id, start, days):
|
|||||||
|
|
||||||
|
|
||||||
def get_service_statistics_for_specific_days(service_id, start, days=1):
|
def get_service_statistics_for_specific_days(service_id, start, days=1):
|
||||||
# start and end dates needs to be reversed because
|
# Calculate start and end date range
|
||||||
# the end date is today and the start is x days in the past
|
|
||||||
# a day needs to be substracted to allow for today
|
|
||||||
end_date = datetime.strptime(start, "%Y-%m-%d")
|
end_date = datetime.strptime(start, "%Y-%m-%d")
|
||||||
start_date = end_date - timedelta(days=days - 1)
|
start_date = end_date - timedelta(days=days - 1)
|
||||||
|
|
||||||
total_notifications, results = dao_fetch_stats_for_service_from_days(
|
# Fetch hourly stats from DB
|
||||||
|
total_notifications, results = dao_fetch_stats_for_service_from_hours(
|
||||||
service_id,
|
service_id,
|
||||||
start_date,
|
start_date,
|
||||||
end_date,
|
end_date,
|
||||||
)
|
)
|
||||||
|
|
||||||
stats = get_specific_days_stats(
|
# Debug logs: Raw output from DB query
|
||||||
|
print(f"🚀 Fetching stats for service {service_id} from {start_date} to {end_date}")
|
||||||
|
print(f"🔍 Total Notifications: {total_notifications}")
|
||||||
|
print(f"📝 Raw Query Results: {results}")
|
||||||
|
|
||||||
|
# Convert days to hours (since 1 day = 24 hours)
|
||||||
|
hours = days * 24
|
||||||
|
|
||||||
|
# Process data using new hourly stats function
|
||||||
|
stats = get_specific_hours_stats(
|
||||||
results,
|
results,
|
||||||
start_date,
|
start_date,
|
||||||
days=days,
|
hours=hours,
|
||||||
total_notifications=total_notifications,
|
total_notifications=total_notifications,
|
||||||
)
|
)
|
||||||
|
|
||||||
return stats
|
# Debug log: Final processed stats
|
||||||
|
print(f"✅ Processed Stats: {stats}")
|
||||||
|
|
||||||
|
return stats # ✅ Make sure to return stats
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@service_blueprint.route(
|
@service_blueprint.route(
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from app.dao.date_util import (
|
|||||||
get_calendar_year_for_datetime,
|
get_calendar_year_for_datetime,
|
||||||
get_month_start_and_end_date_in_utc,
|
get_month_start_and_end_date_in_utc,
|
||||||
get_new_years,
|
get_new_years,
|
||||||
|
generate_hourly_range,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -75,3 +76,44 @@ def test_get_month_start_and_end_date_in_utc(month, year, expected_start, expect
|
|||||||
)
|
)
|
||||||
def test_get_calendar_year_for_datetime(dt, fy):
|
def test_get_calendar_year_for_datetime(dt, fy):
|
||||||
assert get_calendar_year_for_datetime(dt) == fy
|
assert get_calendar_year_for_datetime(dt) == fy
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_hourly_range_with_end_date():
|
||||||
|
start_date = datetime(2025, 2, 18, 12, 0)
|
||||||
|
end_date = datetime(2025, 2, 18, 15, 0)
|
||||||
|
result = list(generate_hourly_range(start_date, end_date=end_date))
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
datetime(2025, 2, 18, 12, 0),
|
||||||
|
datetime(2025, 2, 18, 13, 0),
|
||||||
|
datetime(2025, 2, 18, 14, 0),
|
||||||
|
datetime(2025, 2, 18, 15, 0),
|
||||||
|
]
|
||||||
|
|
||||||
|
assert result == expected, f"Expected {expected}, but got {result}"
|
||||||
|
|
||||||
|
def test_generate_hourly_range_with_hours():
|
||||||
|
start_date = datetime(2025, 2, 18, 12, 0)
|
||||||
|
result = list(generate_hourly_range(start_date, hours=3))
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
datetime(2025, 2, 18, 12, 0),
|
||||||
|
datetime(2025, 2, 18, 13, 0),
|
||||||
|
datetime(2025, 2, 18, 14, 0),
|
||||||
|
]
|
||||||
|
|
||||||
|
assert result == expected, f"Expected {expected}, but got {result}"
|
||||||
|
|
||||||
|
def test_generate_hourly_range_with_zero_hours():
|
||||||
|
start_date = datetime(2025, 2, 18, 12, 0)
|
||||||
|
result = list(generate_hourly_range(start_date, hours=0))
|
||||||
|
|
||||||
|
assert result == [], f"Expected an empty list, but got {result}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_hourly_range_with_end_date_before_start():
|
||||||
|
start_date = datetime(2025, 2, 18, 12, 0)
|
||||||
|
end_date = datetime(2025, 2, 18, 10, 0)
|
||||||
|
result = list(generate_hourly_range(start_date, end_date=end_date))
|
||||||
|
|
||||||
|
assert result == [], f"Expected empty list, but got {result}"
|
||||||
|
|||||||
109
tests/app/dao/test_services_get_specific_hours.py
Normal file
109
tests/app/dao/test_services_get_specific_hours.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from unittest.mock import Mock
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from app.dao.services_dao import get_specific_hours_stats
|
||||||
|
from app.enums import StatisticsType
|
||||||
|
from app.models import TemplateType
|
||||||
|
|
||||||
|
|
||||||
|
def generate_expected_hourly_output(requested_sms_hours):
|
||||||
|
"""
|
||||||
|
Generates expected output only for hours where notifications exist.
|
||||||
|
Removes empty hours from the output to match function behavior.
|
||||||
|
"""
|
||||||
|
output = {}
|
||||||
|
for hour in requested_sms_hours:
|
||||||
|
output[hour] = {
|
||||||
|
TemplateType.SMS: {
|
||||||
|
StatisticsType.REQUESTED: 1,
|
||||||
|
StatisticsType.DELIVERED: 0,
|
||||||
|
StatisticsType.FAILURE: 0,
|
||||||
|
StatisticsType.PENDING: 0,
|
||||||
|
},
|
||||||
|
TemplateType.EMAIL: {
|
||||||
|
StatisticsType.REQUESTED: 0,
|
||||||
|
StatisticsType.DELIVERED: 0,
|
||||||
|
StatisticsType.FAILURE: 0,
|
||||||
|
StatisticsType.PENDING: 0,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def create_mock_notification(notification_type, status, timestamp, count=1):
|
||||||
|
"""
|
||||||
|
Creates a mock notification object with the required attributes.
|
||||||
|
"""
|
||||||
|
mock = Mock()
|
||||||
|
mock.notification_type = notification_type
|
||||||
|
mock.status = status
|
||||||
|
mock.timestamp = timestamp.replace(minute=0, second=0, microsecond=0)
|
||||||
|
mock.count = count
|
||||||
|
return mock
|
||||||
|
|
||||||
|
|
||||||
|
test_cases = [
|
||||||
|
# Single notification at 14:00 (Only 14:00 is expected in output)
|
||||||
|
(
|
||||||
|
[create_mock_notification(
|
||||||
|
TemplateType.SMS,
|
||||||
|
StatisticsType.REQUESTED,
|
||||||
|
datetime(2025, 2, 18, 14, 15, 0),
|
||||||
|
)],
|
||||||
|
datetime(2025, 2, 18, 12, 0),
|
||||||
|
6,
|
||||||
|
generate_expected_hourly_output(
|
||||||
|
["2025-02-18T14:00:00Z"]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
# Notification at 17:59 (Only 17:00 is expected in output)
|
||||||
|
(
|
||||||
|
[create_mock_notification(
|
||||||
|
TemplateType.SMS,
|
||||||
|
StatisticsType.REQUESTED,
|
||||||
|
datetime(2025, 2, 18, 17, 59, 59),
|
||||||
|
)],
|
||||||
|
datetime(2025, 2, 18, 15, 0),
|
||||||
|
3,
|
||||||
|
generate_expected_hourly_output(
|
||||||
|
["2025-02-18T17:00:00Z"]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
# No notifications at all (Expect empty `{}`)
|
||||||
|
(
|
||||||
|
[],
|
||||||
|
datetime(2025, 2, 18, 10, 0),
|
||||||
|
4,
|
||||||
|
{},
|
||||||
|
),
|
||||||
|
# Two notifications at 09:00 and 11:00 (Only those hours expected)
|
||||||
|
(
|
||||||
|
[
|
||||||
|
create_mock_notification(TemplateType.SMS, StatisticsType.REQUESTED, datetime(2025, 2, 18, 9, 30, 0)),
|
||||||
|
create_mock_notification(TemplateType.SMS, StatisticsType.REQUESTED, datetime(2025, 2, 18, 11, 45, 0)),
|
||||||
|
],
|
||||||
|
datetime(2025, 2, 18, 8, 0),
|
||||||
|
5,
|
||||||
|
generate_expected_hourly_output(
|
||||||
|
["2025-02-18T09:00:00Z", "2025-02-18T11:00:00Z"]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"mocked_notifications, start_date, hours, expected_output",
|
||||||
|
test_cases,
|
||||||
|
)
|
||||||
|
def test_get_specific_hours(mocked_notifications, start_date, hours, expected_output):
|
||||||
|
"""
|
||||||
|
Tests get_specific_hours_stats to ensure it correctly aggregates hourly statistics.
|
||||||
|
"""
|
||||||
|
results = get_specific_hours_stats(
|
||||||
|
mocked_notifications,
|
||||||
|
start_date,
|
||||||
|
hours=hours
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results == expected_output, f"Expected {expected_output}, but got {results}"
|
||||||
Reference in New Issue
Block a user