mirror of
https://github.com/GSA/notifications-api.git
synced 2026-01-30 06:21:50 -05:00
@@ -270,13 +270,8 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(
|
||||
]
|
||||
|
||||
|
||||
def test_send_daily_performance_stats_calls_does_not_send_if_inactive(
|
||||
notify_db,
|
||||
notify_db_session,
|
||||
sample_template,
|
||||
mocker
|
||||
):
|
||||
send_mock = mocker.patch('app.celery.scheduled_tasks.performance_platform_client.send_performance_stats')
|
||||
def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mocker):
|
||||
send_mock = mocker.patch('app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa
|
||||
|
||||
with patch.object(
|
||||
PerformancePlatformClient,
|
||||
@@ -296,7 +291,7 @@ def test_send_daily_performance_stats_calls_with_correct_totals(
|
||||
sample_template,
|
||||
mocker
|
||||
):
|
||||
perf_mock = mocker.patch('app.celery.scheduled_tasks.performance_platform_client.send_performance_stats')
|
||||
perf_mock = mocker.patch('app.celery.scheduled_tasks.total_sent_notifications.send_total_notifications_sent_for_day_stats') # noqa
|
||||
|
||||
notification_history = partial(
|
||||
create_notification_history,
|
||||
@@ -327,8 +322,8 @@ def test_send_daily_performance_stats_calls_with_correct_totals(
|
||||
send_daily_performance_platform_stats()
|
||||
|
||||
perf_mock.assert_has_calls([
|
||||
call(get_london_midnight_in_utc(yesterday), 'sms', 2, 'day'),
|
||||
call(get_london_midnight_in_utc(yesterday), 'email', 3, 'day')
|
||||
call(get_london_midnight_in_utc(yesterday), 'sms', 2),
|
||||
call(get_london_midnight_in_utc(yesterday), 'email', 3)
|
||||
])
|
||||
|
||||
|
||||
|
||||
@@ -1,119 +1,58 @@
|
||||
import requests
|
||||
import requests_mock
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
from freezegun import freeze_time
|
||||
from functools import partial
|
||||
|
||||
from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient
|
||||
from app.utils import (
|
||||
get_london_midnight_in_utc,
|
||||
get_midnight_for_day_before
|
||||
)
|
||||
from tests.app.conftest import sample_notification_history as create_notification_history
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def client(mocker):
|
||||
client = PerformancePlatformClient()
|
||||
def perf_client(client, mocker):
|
||||
perf_client = PerformancePlatformClient()
|
||||
current_app = mocker.Mock(config={
|
||||
'PERFORMANCE_PLATFORM_ENABLED': True,
|
||||
'PERFORMANCE_PLATFORM_URL': 'https://performance-platform-url/',
|
||||
'PERFORMANCE_PLATFORM_TOKEN': 'token'
|
||||
'PERFORMANCE_PLATFORM_ENDPOINTS': {
|
||||
'foo': 'my_token',
|
||||
'bar': 'other_token'
|
||||
},
|
||||
'PERFORMANCE_PLATFORM_URL': 'https://performance-platform-url/'
|
||||
})
|
||||
client.init_app(current_app)
|
||||
return client
|
||||
perf_client.init_app(current_app)
|
||||
return perf_client
|
||||
|
||||
|
||||
def test_should_not_call_if_not_enabled(notify_api, client, mocker):
|
||||
mocker.patch.object(client, '_send_stats_to_performance_platform')
|
||||
client.active = False
|
||||
client.send_performance_stats(
|
||||
date=datetime(2016, 10, 16, 0, 0, 0),
|
||||
channel='sms',
|
||||
count=142,
|
||||
period='day'
|
||||
)
|
||||
|
||||
client._send_stats_to_performance_platform.assert_not_called()
|
||||
|
||||
|
||||
def test_should_call_if_enabled(notify_api, client, mocker):
|
||||
mocker.patch.object(client, '_send_stats_to_performance_platform')
|
||||
client.send_performance_stats(
|
||||
date=datetime(2016, 10, 16, 0, 0, 0),
|
||||
channel='sms',
|
||||
count=142,
|
||||
period='day'
|
||||
)
|
||||
|
||||
assert client._send_stats_to_performance_platform.call_count == 1
|
||||
|
||||
|
||||
def test_send_platform_stats_creates_correct_call(notify_api, client):
|
||||
def test_should_not_call_if_not_enabled(perf_client):
|
||||
with requests_mock.Mocker() as request_mock:
|
||||
request_mock.post(
|
||||
client.performance_platform_url,
|
||||
json={},
|
||||
status_code=200
|
||||
)
|
||||
client.send_performance_stats(
|
||||
date=datetime(2016, 10, 15, 23, 0, 0),
|
||||
channel='sms',
|
||||
count=142,
|
||||
period='day'
|
||||
)
|
||||
request_mock.post('https://performance-platform-url/foo', json={}, status_code=200)
|
||||
perf_client._active = False
|
||||
perf_client.send_stats_to_performance_platform({'dataType': 'foo'})
|
||||
|
||||
assert request_mock.called is False
|
||||
|
||||
|
||||
def test_should_call_datatype_endpoint_if_enabled(perf_client):
|
||||
with requests_mock.Mocker() as request_mock:
|
||||
request_mock.post('https://performance-platform-url/foo', json={}, status_code=200)
|
||||
perf_client.send_stats_to_performance_platform({'dataType': 'foo'})
|
||||
|
||||
assert request_mock.call_count == 1
|
||||
|
||||
assert request_mock.request_history[0].url == client.performance_platform_url
|
||||
assert request_mock.request_history[0].method == 'POST'
|
||||
|
||||
request_args = request_mock.request_history[0].json()
|
||||
assert request_args['dataType'] == 'notifications'
|
||||
assert request_args['service'] == 'govuk-notify'
|
||||
assert request_args['period'] == 'day'
|
||||
assert request_args['channel'] == 'sms'
|
||||
assert request_args['_timestamp'] == '2016-10-16T00:00:00'
|
||||
assert request_args['count'] == 142
|
||||
expected_base64_id = 'MjAxNi0xMC0xNlQwMDowMDowMGdvdnVrLW5vdGlmeXNtc25vdGlmaWNhdGlvbnNkYXk='
|
||||
assert request_args['_id'] == expected_base64_id
|
||||
assert request_mock.last_request.method == 'POST'
|
||||
|
||||
|
||||
@freeze_time("2016-01-11 12:30:00")
|
||||
def test_get_total_sent_notifications_yesterday_returns_expected_totals_dict(
|
||||
notify_db,
|
||||
notify_db_session,
|
||||
client,
|
||||
sample_template
|
||||
):
|
||||
notification_history = partial(
|
||||
create_notification_history,
|
||||
notify_db,
|
||||
notify_db_session,
|
||||
sample_template,
|
||||
status='delivered'
|
||||
)
|
||||
@pytest.mark.parametrize('dataset, token', [
|
||||
('foo', 'my_token'),
|
||||
('bar', 'other_token')
|
||||
])
|
||||
def test_should_use_correct_token(perf_client, dataset, token):
|
||||
with requests_mock.Mocker() as request_mock:
|
||||
request_mock.post('https://performance-platform-url/foo', json={}, status_code=200)
|
||||
request_mock.post('https://performance-platform-url/bar', json={}, status_code=200)
|
||||
perf_client.send_stats_to_performance_platform({'dataType': dataset})
|
||||
|
||||
notification_history(notification_type='email')
|
||||
notification_history(notification_type='sms')
|
||||
assert request_mock.call_count == 1
|
||||
assert request_mock.last_request.headers.get('authorization') == 'Bearer {}'.format(token)
|
||||
|
||||
# Create some notifications for the day before
|
||||
yesterday = datetime(2016, 1, 10, 15, 30, 0, 0)
|
||||
with freeze_time(yesterday):
|
||||
notification_history(notification_type='sms')
|
||||
notification_history(notification_type='sms')
|
||||
notification_history(notification_type='email')
|
||||
notification_history(notification_type='email')
|
||||
notification_history(notification_type='email')
|
||||
|
||||
total_count_dict = client.get_total_sent_notifications_yesterday()
|
||||
|
||||
assert total_count_dict == {
|
||||
"start_date": get_midnight_for_day_before(datetime.utcnow()),
|
||||
"email": {
|
||||
"count": 3
|
||||
},
|
||||
"sms": {
|
||||
"count": 2
|
||||
}
|
||||
}
|
||||
def test_should_raise_for_status(perf_client):
|
||||
with pytest.raises(requests.HTTPError), requests_mock.Mocker() as request_mock:
|
||||
request_mock.post('https://performance-platform-url/foo', json={}, status_code=403)
|
||||
perf_client.send_stats_to_performance_platform({'dataType': 'foo'})
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
|
||||
from freezegun import freeze_time
|
||||
|
||||
from app.utils import get_midnight_for_day_before
|
||||
from app.performance_platform.total_sent_notifications import (
|
||||
send_total_notifications_sent_for_day_stats,
|
||||
get_total_sent_notifications_yesterday
|
||||
)
|
||||
|
||||
from tests.app.conftest import (
|
||||
sample_notification_history as create_notification_history
|
||||
)
|
||||
|
||||
|
||||
def test_send_total_notifications_sent_for_day_stats_stats_creates_correct_call(mocker, client):
|
||||
send_stats = mocker.patch('app.performance_platform.total_sent_notifications.performance_platform_client.send_stats_to_performance_platform') # noqa
|
||||
|
||||
send_total_notifications_sent_for_day_stats(
|
||||
date=datetime(2016, 10, 15, 23, 0, 0),
|
||||
notification_type='sms',
|
||||
count=142
|
||||
)
|
||||
|
||||
assert send_stats.call_count == 1
|
||||
|
||||
request_args = send_stats.call_args[0][0]
|
||||
assert request_args['dataType'] == 'notifications'
|
||||
assert request_args['service'] == 'govuk-notify'
|
||||
assert request_args['period'] == 'day'
|
||||
assert request_args['channel'] == 'sms'
|
||||
assert request_args['_timestamp'] == '2016-10-16T00:00:00'
|
||||
assert request_args['count'] == 142
|
||||
expected_base64_id = 'MjAxNi0xMC0xNlQwMDowMDowMGdvdnVrLW5vdGlmeXNtc25vdGlmaWNhdGlvbnNkYXk='
|
||||
assert request_args['_id'] == expected_base64_id
|
||||
|
||||
|
||||
@freeze_time("2016-01-11 12:30:00")
|
||||
def test_get_total_sent_notifications_yesterday_returns_expected_totals_dict(
|
||||
notify_db,
|
||||
notify_db_session,
|
||||
sample_template
|
||||
):
|
||||
notification_history = partial(
|
||||
create_notification_history,
|
||||
notify_db,
|
||||
notify_db_session,
|
||||
sample_template,
|
||||
status='delivered'
|
||||
)
|
||||
|
||||
notification_history(notification_type='email')
|
||||
notification_history(notification_type='sms')
|
||||
|
||||
# Create some notifications for the day before
|
||||
yesterday = datetime(2016, 1, 10, 15, 30, 0, 0)
|
||||
with freeze_time(yesterday):
|
||||
notification_history(notification_type='sms')
|
||||
notification_history(notification_type='sms')
|
||||
notification_history(notification_type='email')
|
||||
notification_history(notification_type='email')
|
||||
notification_history(notification_type='email')
|
||||
|
||||
total_count_dict = get_total_sent_notifications_yesterday()
|
||||
|
||||
assert total_count_dict == {
|
||||
"start_date": get_midnight_for_day_before(datetime.utcnow()),
|
||||
"email": {
|
||||
"count": 3
|
||||
},
|
||||
"sms": {
|
||||
"count": 2
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,6 @@ def notify_config():
|
||||
'admin_client_secret': 'admin client secret',
|
||||
'secret_key': 'secret key',
|
||||
'dangerous_salt': 'dangerous salt',
|
||||
'performance_platform_token': 'performance platform token',
|
||||
'allow_ip_inbound_sms': ['111.111.111.111', '100.100.100.100']
|
||||
}
|
||||
}
|
||||
@@ -88,6 +87,17 @@ def redis_config():
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def performance_platform_config():
|
||||
return {
|
||||
'name': 'performance-platform',
|
||||
'credentials': {
|
||||
'foo': 'my_token',
|
||||
'bar': 'other_token'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cloudfoundry_config(
|
||||
postgres_config,
|
||||
@@ -96,7 +106,8 @@ def cloudfoundry_config(
|
||||
hosted_graphite_config,
|
||||
mmg_config,
|
||||
firetext_config,
|
||||
redis_config
|
||||
redis_config,
|
||||
performance_platform_config
|
||||
):
|
||||
return {
|
||||
'postgres': postgres_config,
|
||||
@@ -106,7 +117,8 @@ def cloudfoundry_config(
|
||||
hosted_graphite_config,
|
||||
mmg_config,
|
||||
firetext_config,
|
||||
redis_config
|
||||
redis_config,
|
||||
performance_platform_config
|
||||
]
|
||||
}
|
||||
|
||||
@@ -148,16 +160,6 @@ def test_notify_config():
|
||||
assert os.environ['ADMIN_CLIENT_SECRET'] == 'admin client secret'
|
||||
assert os.environ['SECRET_KEY'] == 'secret key'
|
||||
assert os.environ['DANGEROUS_SALT'] == 'dangerous salt'
|
||||
assert os.environ['PERFORMANCE_PLATFORM_TOKEN'] == 'performance platform token'
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
|
||||
def test_notify_config_if_perf_platform_not_set(cloudfoundry_config):
|
||||
del cloudfoundry_config['user-provided'][0]['credentials']['performance_platform_token']
|
||||
|
||||
set_config_env_vars(cloudfoundry_config)
|
||||
|
||||
assert os.environ['PERFORMANCE_PLATFORM_TOKEN'] == ''
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
|
||||
@@ -205,3 +207,13 @@ def test_sms_inbound_config():
|
||||
extract_cloudfoundry_config()
|
||||
|
||||
assert os.environ['SMS_INBOUND_WHITELIST'] == json.dumps(['111.111.111.111', '100.100.100.100'])
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('os_environ', 'cloudfoundry_environ')
|
||||
def test_performance_platform_config():
|
||||
extract_cloudfoundry_config()
|
||||
|
||||
assert os.environ['PERFORMANCE_PLATFORM_ENDPOINTS'] == json.dumps({
|
||||
'foo': 'my_token',
|
||||
'bar': 'other_token'
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user