mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-22 00:11:16 -05:00
Merge branch 'master' of https://github.com/alphagov/notifications-api into vb-free-sms-limit-history
This commit is contained in:
@@ -100,10 +100,10 @@ def _transform_billing_for_month(billing_for_month):
|
|||||||
@billing_blueprint.route('/free-sms-fragment-limit/current-year', methods=["GET"])
|
@billing_blueprint.route('/free-sms-fragment-limit/current-year', methods=["GET"])
|
||||||
def get_free_sms_fragment_limit(service_id):
|
def get_free_sms_fragment_limit(service_id):
|
||||||
|
|
||||||
financial_year_start = request.args.get('financial_year_start')
|
|
||||||
|
|
||||||
if request.path.split('/')[-1] == 'current-year':
|
if request.path.split('/')[-1] == 'current-year':
|
||||||
financial_year_start = get_current_financial_year_start_year()
|
financial_year_start = get_current_financial_year_start_year()
|
||||||
|
else:
|
||||||
|
financial_year_start = request.args.get('financial_year_start')
|
||||||
|
|
||||||
if financial_year_start is None:
|
if financial_year_start is None:
|
||||||
results = dao_get_all_free_sms_fragment_limit(service_id)
|
results = dao_get_all_free_sms_fragment_limit(service_id)
|
||||||
@@ -122,7 +122,12 @@ def get_free_sms_fragment_limit(service_id):
|
|||||||
@billing_blueprint.route('/free-sms-fragment-limit', methods=["POST"])
|
@billing_blueprint.route('/free-sms-fragment-limit', methods=["POST"])
|
||||||
def create_or_update_free_sms_fragment_limit(service_id):
|
def create_or_update_free_sms_fragment_limit(service_id):
|
||||||
|
|
||||||
form = validate(request.get_json(), create_or_update_free_sms_fragment_limit_schema)
|
dict_arg = request.get_json()
|
||||||
|
|
||||||
|
if 'financial_year_start' not in dict_arg:
|
||||||
|
dict_arg['financial_year_start'] = get_current_financial_year_start_year()
|
||||||
|
|
||||||
|
form = validate(dict_arg, create_or_update_free_sms_fragment_limit_schema)
|
||||||
|
|
||||||
financial_year_start = form.get('financial_year_start')
|
financial_year_start = form.get('financial_year_start')
|
||||||
free_sms_fragment_limit = form.get('free_sms_fragment_limit')
|
free_sms_fragment_limit = form.get('free_sms_fragment_limit')
|
||||||
|
|||||||
@@ -376,9 +376,9 @@ def dao_fetch_monthly_historical_stats_for_service(service_id, year):
|
|||||||
|
|
||||||
|
|
||||||
@statsd(namespace='dao')
|
@statsd(namespace='dao')
|
||||||
def dao_fetch_todays_stats_for_all_services(include_from_test_key=True):
|
def dao_fetch_todays_stats_for_all_services(include_from_test_key=True, only_active=True):
|
||||||
|
|
||||||
query = db.session.query(
|
subquery = db.session.query(
|
||||||
Notification.notification_type,
|
Notification.notification_type,
|
||||||
Notification.status,
|
Notification.status,
|
||||||
Notification.service_id,
|
Notification.service_id,
|
||||||
@@ -389,26 +389,43 @@ def dao_fetch_todays_stats_for_all_services(include_from_test_key=True):
|
|||||||
Notification.notification_type,
|
Notification.notification_type,
|
||||||
Notification.status,
|
Notification.status,
|
||||||
Notification.service_id
|
Notification.service_id
|
||||||
).order_by(
|
|
||||||
Notification.service_id
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not include_from_test_key:
|
if not include_from_test_key:
|
||||||
query = query.filter(Notification.key_type != KEY_TYPE_TEST)
|
subquery = subquery.filter(Notification.key_type != KEY_TYPE_TEST)
|
||||||
|
|
||||||
|
subquery = subquery.subquery()
|
||||||
|
|
||||||
|
query = db.session.query(
|
||||||
|
Service.id.label('service_id'),
|
||||||
|
Service.name,
|
||||||
|
Service.restricted,
|
||||||
|
Service.research_mode,
|
||||||
|
Service.active,
|
||||||
|
Service.created_at,
|
||||||
|
subquery.c.notification_type,
|
||||||
|
subquery.c.status,
|
||||||
|
subquery.c.count
|
||||||
|
).outerjoin(
|
||||||
|
subquery,
|
||||||
|
subquery.c.service_id == Service.id
|
||||||
|
).order_by(Service.id)
|
||||||
|
|
||||||
|
if only_active:
|
||||||
|
query = query.filter(Service.active)
|
||||||
|
|
||||||
return query.all()
|
return query.all()
|
||||||
|
|
||||||
|
|
||||||
@statsd(namespace='dao')
|
@statsd(namespace='dao')
|
||||||
def fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True):
|
def fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True, only_active=True):
|
||||||
start_date = get_london_midnight_in_utc(start_date)
|
start_date = get_london_midnight_in_utc(start_date)
|
||||||
end_date = get_london_midnight_in_utc(end_date + timedelta(days=1))
|
end_date = get_london_midnight_in_utc(end_date + timedelta(days=1))
|
||||||
table = NotificationHistory
|
table = NotificationHistory
|
||||||
|
|
||||||
if start_date >= datetime.utcnow() - timedelta(days=7):
|
if start_date >= datetime.utcnow() - timedelta(days=7):
|
||||||
table = Notification
|
table = Notification
|
||||||
|
subquery = db.session.query(
|
||||||
query = db.session.query(
|
|
||||||
table.notification_type,
|
table.notification_type,
|
||||||
table.status,
|
table.status,
|
||||||
table.service_id,
|
table.service_id,
|
||||||
@@ -420,12 +437,27 @@ def fetch_stats_by_date_range_for_all_services(start_date, end_date, include_fro
|
|||||||
table.notification_type,
|
table.notification_type,
|
||||||
table.status,
|
table.status,
|
||||||
table.service_id
|
table.service_id
|
||||||
).order_by(
|
|
||||||
table.service_id
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not include_from_test_key:
|
if not include_from_test_key:
|
||||||
query = query.filter(table.key_type != KEY_TYPE_TEST)
|
subquery = subquery.filter(table.key_type != KEY_TYPE_TEST)
|
||||||
|
subquery = subquery.subquery()
|
||||||
|
|
||||||
|
query = db.session.query(
|
||||||
|
Service.id.label('service_id'),
|
||||||
|
Service.name,
|
||||||
|
Service.restricted,
|
||||||
|
Service.research_mode,
|
||||||
|
Service.active,
|
||||||
|
Service.created_at,
|
||||||
|
subquery.c.notification_type,
|
||||||
|
subquery.c.status,
|
||||||
|
subquery.c.count
|
||||||
|
).outerjoin(
|
||||||
|
subquery,
|
||||||
|
subquery.c.service_id == Service.id
|
||||||
|
).order_by(Service.id)
|
||||||
|
if only_active:
|
||||||
|
query = query.filter(Service.active)
|
||||||
|
|
||||||
return query.all()
|
return query.all()
|
||||||
|
|
||||||
|
|||||||
@@ -408,23 +408,33 @@ def get_detailed_service(service_id, today_only=False):
|
|||||||
|
|
||||||
|
|
||||||
def get_detailed_services(start_date, end_date, only_active=False, include_from_test_key=True):
|
def get_detailed_services(start_date, end_date, only_active=False, include_from_test_key=True):
|
||||||
services = {service.id: service for service in dao_fetch_all_services(only_active)}
|
|
||||||
if start_date == datetime.utcnow().date():
|
if start_date == datetime.utcnow().date():
|
||||||
stats = dao_fetch_todays_stats_for_all_services(include_from_test_key=include_from_test_key)
|
stats = dao_fetch_todays_stats_for_all_services(include_from_test_key=include_from_test_key,
|
||||||
|
only_active=only_active)
|
||||||
else:
|
else:
|
||||||
|
|
||||||
stats = fetch_stats_by_date_range_for_all_services(start_date=start_date,
|
stats = fetch_stats_by_date_range_for_all_services(start_date=start_date,
|
||||||
end_date=end_date,
|
end_date=end_date,
|
||||||
include_from_test_key=include_from_test_key)
|
include_from_test_key=include_from_test_key,
|
||||||
|
only_active=only_active)
|
||||||
|
results = []
|
||||||
for service_id, rows in itertools.groupby(stats, lambda x: x.service_id):
|
for service_id, rows in itertools.groupby(stats, lambda x: x.service_id):
|
||||||
services[service_id].statistics = statistics.format_statistics(rows)
|
rows = list(rows)
|
||||||
|
if rows[0].count is None:
|
||||||
# if service has not sent anything, query will not have set statistics correctly
|
s = statistics.create_zeroed_stats_dicts()
|
||||||
for service in services.values():
|
else:
|
||||||
if not hasattr(service, 'statistics'):
|
s = statistics.format_statistics(rows)
|
||||||
service.statistics = statistics.create_zeroed_stats_dicts()
|
results.append({
|
||||||
return detailed_service_schema.dump(services.values(), many=True).data
|
'id': str(rows[0].service_id),
|
||||||
|
'name': rows[0].name,
|
||||||
|
'notification_type': rows[0].notification_type,
|
||||||
|
'research_mode': rows[0].research_mode,
|
||||||
|
'restricted': rows[0].restricted,
|
||||||
|
'active': rows[0].active,
|
||||||
|
'created_at': rows[0].created_at,
|
||||||
|
'statistics': s
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
@service_blueprint.route('/<uuid:service_id>/whitelist', methods=['GET'])
|
@service_blueprint.route('/<uuid:service_id>/whitelist', methods=['GET'])
|
||||||
|
|||||||
@@ -14,9 +14,9 @@ from tests.app.db import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from tests import create_authorization_header
|
from tests import create_authorization_header
|
||||||
from app.dao.annual_billing_dao import (dao_get_free_sms_fragment_limit_for_year,
|
|
||||||
dao_create_or_update_annual_billing_for_year)
|
from app.service.utils import get_current_financial_year_start_year
|
||||||
from app.models import AnnualBilling
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
APR_2016_MONTH_START = datetime(2016, 3, 31, 23, 00, 00)
|
APR_2016_MONTH_START = datetime(2016, 3, 31, 23, 00, 00)
|
||||||
@@ -376,5 +376,24 @@ def test_get_free_sms_fragment_limit_current_year(client, sample_service):
|
|||||||
'service/{}/billing/free-sms-fragment-limit/current-year'.format(sample_service.id, True),
|
'service/{}/billing/free-sms-fragment-limit/current-year'.format(sample_service.id, True),
|
||||||
headers=[('Content-Type', 'application/json'), create_authorization_header()])
|
headers=[('Content-Type', 'application/json'), create_authorization_header()])
|
||||||
json_resp = json.loads(response.get_data(as_text=True))
|
json_resp = json.loads(response.get_data(as_text=True))
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert json_resp['data']['free_sms_fragment_limit'] == 250000
|
assert json_resp['data']['free_sms_fragment_limit'] == 250000
|
||||||
|
|
||||||
|
|
||||||
|
def test_post_free_sms_fragment_limit_current_year(client, sample_service):
|
||||||
|
|
||||||
|
data_new = {'free_sms_fragment_limit': 7777}
|
||||||
|
response = client.post('service/{}/billing/free-sms-fragment-limit'.format(sample_service.id),
|
||||||
|
data=json.dumps(data_new),
|
||||||
|
headers=[('Content-Type', 'application/json'), create_authorization_header()])
|
||||||
|
|
||||||
|
response_get = client.get(
|
||||||
|
'service/{}/billing/free-sms-fragment-limit/current-year'.format(sample_service.id),
|
||||||
|
headers=[('Content-Type', 'application/json'), create_authorization_header()])
|
||||||
|
json_resp = json.loads(response_get.get_data(as_text=True))
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
assert response_get.status_code == 200
|
||||||
|
assert json_resp['data']['financial_year_start'] == get_current_financial_year_start_year()
|
||||||
|
assert json_resp['data']['free_sms_fragment_limit'] == 7777
|
||||||
|
|||||||
@@ -682,7 +682,7 @@ def test_dao_fetch_todays_stats_for_all_services_includes_all_services(notify_db
|
|||||||
assert stats == sorted(stats, key=lambda x: x.service_id)
|
assert stats == sorted(stats, key=lambda x: x.service_id)
|
||||||
|
|
||||||
|
|
||||||
def test_dao_fetch_todays_stats_for_all_services_only_includes_today(notify_db):
|
def test_dao_fetch_todays_stats_for_all_services_only_includes_today(notify_db, notify_db_session):
|
||||||
with freeze_time('2001-01-01T23:59:00'):
|
with freeze_time('2001-01-01T23:59:00'):
|
||||||
just_before_midnight_yesterday = create_notification(notify_db, None, to_field='1', status='delivered')
|
just_before_midnight_yesterday = create_notification(notify_db, None, to_field='1', status='delivered')
|
||||||
|
|
||||||
@@ -711,12 +711,15 @@ def test_dao_fetch_todays_stats_for_all_services_groups_correctly(notify_db, not
|
|||||||
create_notification(notify_db, notify_db_session, service=service2)
|
create_notification(notify_db, notify_db_session, service=service2)
|
||||||
|
|
||||||
stats = dao_fetch_todays_stats_for_all_services()
|
stats = dao_fetch_todays_stats_for_all_services()
|
||||||
|
|
||||||
assert len(stats) == 4
|
assert len(stats) == 4
|
||||||
assert ('sms', 'created', service1.id, 2) in stats
|
assert (service1.id, service1.name, service1.restricted, service1.research_mode, service1.active,
|
||||||
assert ('sms', 'failed', service1.id, 1) in stats
|
service1.created_at, 'sms', 'created', 2) in stats
|
||||||
assert ('email', 'created', service1.id, 1) in stats
|
assert (service1.id, service1.name, service1.restricted, service1.research_mode, service1.active,
|
||||||
assert ('sms', 'created', service2.id, 1) in stats
|
service1.created_at, 'sms', 'failed', 1) in stats
|
||||||
|
assert (service1.id, service1.name, service1.restricted, service1.research_mode, service1.active,
|
||||||
|
service1.created_at, 'email', 'created', 1) in stats
|
||||||
|
assert (service2.id, service2.name, service2.restricted, service2.research_mode, service2.active,
|
||||||
|
service2.created_at, 'sms', 'created', 1) in stats
|
||||||
|
|
||||||
|
|
||||||
def test_dao_fetch_todays_stats_for_all_services_includes_all_keys_by_default(notify_db, notify_db_session):
|
def test_dao_fetch_todays_stats_for_all_services_includes_all_keys_by_default(notify_db, notify_db_session):
|
||||||
@@ -754,7 +757,9 @@ def test_fetch_stats_by_date_range_for_all_services(notify_db, notify_db_session
|
|||||||
results = fetch_stats_by_date_range_for_all_services(start_date, end_date)
|
results = fetch_stats_by_date_range_for_all_services(start_date, end_date)
|
||||||
|
|
||||||
assert len(results) == 1
|
assert len(results) == 1
|
||||||
assert results[0] == ('sms', 'created', result_one.service_id, 2)
|
assert results[0] == (result_one.service.id, result_one.service.name, result_one.service.restricted,
|
||||||
|
result_one.service.research_mode, result_one.service.active,
|
||||||
|
result_one.service.created_at, 'sms', 'created', 2)
|
||||||
|
|
||||||
|
|
||||||
@freeze_time('2001-01-01T23:59:00')
|
@freeze_time('2001-01-01T23:59:00')
|
||||||
@@ -793,7 +798,9 @@ def test_fetch_stats_by_date_range_for_all_services_returns_test_notifications(n
|
|||||||
results = fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True)
|
results = fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True)
|
||||||
|
|
||||||
assert len(results) == 1
|
assert len(results) == 1
|
||||||
assert results[0] == ('sms', 'created', result_one.service_id, int(expected))
|
assert results[0] == (result_one.service.id, result_one.service.name, result_one.service.restricted,
|
||||||
|
result_one.service.research_mode, result_one.service.active, result_one.service.created_at,
|
||||||
|
'sms', 'created', int(expected))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("start_delta, end_delta, expected",
|
@pytest.mark.parametrize("start_delta, end_delta, expected",
|
||||||
@@ -820,7 +827,9 @@ def test_fetch_stats_by_date_range_during_bst_hour_for_all_services_returns_test
|
|||||||
results = fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True)
|
results = fetch_stats_by_date_range_for_all_services(start_date, end_date, include_from_test_key=True)
|
||||||
|
|
||||||
assert len(results) == 1
|
assert len(results) == 1
|
||||||
assert results[0] == ('sms', 'created', result_one.service_id, int(expected))
|
assert results[0] == (result_one.service.id, result_one.service.name, result_one.service.restricted,
|
||||||
|
result_one.service.research_mode, result_one.service.active, result_one.service.created_at,
|
||||||
|
'sms', 'created', int(expected))
|
||||||
|
|
||||||
|
|
||||||
@freeze_time('2001-01-01T23:59:00')
|
@freeze_time('2001-01-01T23:59:00')
|
||||||
|
|||||||
Reference in New Issue
Block a user