mirror of
https://github.com/GSA/notifications-api.git
synced 2026-01-30 06:21:50 -05:00
notify-34 fix more skips
This commit is contained in:
@@ -828,7 +828,7 @@ def test_dao_fetch_todays_stats_for_service_only_includes_today_during_bst(notif
|
||||
assert not stats.get('permanent-failure')
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Need a better way to test variable DST date")
|
||||
# @pytest.mark.skip(reason="Need a better way to test variable DST date")
|
||||
def test_dao_fetch_todays_stats_for_service_only_includes_today_when_clocks_fall_back(notify_db_session):
|
||||
template = create_template(service=create_service())
|
||||
with freeze_time('2021-10-30T22:59:59'):
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import pytest
|
||||
from freezegun import freeze_time
|
||||
|
||||
from app.dao.uploads_dao import dao_get_uploads_by_service_id
|
||||
@@ -98,120 +97,6 @@ def test_get_uploads_orders_by_processing_started_desc(sample_template):
|
||||
assert results[1].id == upload_2.id
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Investigate what remains after removing letters")
|
||||
@freeze_time("2020-10-27 16:15") # GMT time
|
||||
def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_template):
|
||||
letter_template = create_uploaded_template(sample_template.service)
|
||||
|
||||
days_ago = datetime.utcnow() - timedelta(days=4)
|
||||
create_uploaded_letter(letter_template, service=letter_template.service)
|
||||
upload_2 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1),
|
||||
created_at=days_ago,
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
upload_3 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=2),
|
||||
created_at=days_ago,
|
||||
job_status=JOB_STATUS_IN_PROGRESS)
|
||||
create_uploaded_letter(letter_template, service=letter_template.service,
|
||||
created_at=datetime.utcnow() - timedelta(days=3))
|
||||
|
||||
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
||||
|
||||
assert len(results) == 4
|
||||
assert results[0].id is None
|
||||
assert results[1].id == upload_2.id
|
||||
assert results[2].id == upload_3.id
|
||||
assert results[3].id is None
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Investigate what remains after removing letters")
|
||||
@freeze_time('2020-04-02 14:00') # Few days after the clocks go forward
|
||||
def test_get_uploads_only_gets_uploads_within_service_retention_period(sample_template):
|
||||
letter_template = create_uploaded_template(sample_template.service)
|
||||
create_service_data_retention(sample_template.service, 'sms', days_of_retention=3)
|
||||
|
||||
days_ago = datetime.utcnow() - timedelta(days=4)
|
||||
upload_1 = create_uploaded_letter(letter_template, service=letter_template.service)
|
||||
upload_2 = create_job(
|
||||
sample_template, processing_started=datetime.utcnow() - timedelta(days=1), created_at=days_ago,
|
||||
job_status=JOB_STATUS_IN_PROGRESS
|
||||
)
|
||||
# older than custom retention for sms:
|
||||
create_job(
|
||||
sample_template, processing_started=datetime.utcnow() - timedelta(days=5), created_at=days_ago,
|
||||
job_status=JOB_STATUS_IN_PROGRESS
|
||||
)
|
||||
upload_3 = create_uploaded_letter(
|
||||
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=3)
|
||||
)
|
||||
|
||||
# older than retention for sms but within letter retention:
|
||||
upload_4 = create_uploaded_letter(
|
||||
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=6)
|
||||
)
|
||||
|
||||
# older than default retention for letters:
|
||||
create_uploaded_letter(
|
||||
letter_template, service=letter_template.service, created_at=datetime.utcnow() - timedelta(days=8)
|
||||
)
|
||||
|
||||
results = dao_get_uploads_by_service_id(service_id=sample_template.service_id).items
|
||||
|
||||
assert len(results) == 4
|
||||
|
||||
# Uploaded letters get their `created_at` shifted time of printing
|
||||
# 21:30 EST == 16:30 UTC
|
||||
assert results[0].created_at == upload_1.created_at.replace(hour=21, minute=30, second=0, microsecond=0)
|
||||
|
||||
# Jobs keep their original `created_at`
|
||||
assert results[1].created_at == upload_2.created_at.replace(hour=14, minute=00, second=0, microsecond=0)
|
||||
|
||||
# Still in BST here…
|
||||
assert results[2].created_at == upload_3.created_at.replace(hour=21, minute=30, second=0, microsecond=0)
|
||||
|
||||
# Now we’ve gone far enough back to be in GMT
|
||||
# 17:30 GMT == 17:30 UTC
|
||||
assert results[3].created_at == upload_4.created_at.replace(hour=21, minute=30, second=0, microsecond=0)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Investigate what remains after removing letters")
|
||||
@freeze_time('2020-02-02 14:00')
|
||||
def test_get_uploads_is_paginated(sample_template):
|
||||
letter_template = create_uploaded_template(sample_template.service)
|
||||
|
||||
create_uploaded_letter(
|
||||
letter_template, sample_template.service, status='delivered',
|
||||
created_at=datetime.utcnow() - timedelta(minutes=3),
|
||||
)
|
||||
create_job(
|
||||
sample_template, processing_started=datetime.utcnow() - timedelta(minutes=2),
|
||||
job_status=JOB_STATUS_IN_PROGRESS,
|
||||
)
|
||||
create_uploaded_letter(
|
||||
letter_template, sample_template.service, status='delivered',
|
||||
created_at=datetime.utcnow() - timedelta(minutes=1),
|
||||
)
|
||||
create_job(
|
||||
sample_template, processing_started=datetime.utcnow(),
|
||||
job_status=JOB_STATUS_IN_PROGRESS,
|
||||
)
|
||||
|
||||
results = dao_get_uploads_by_service_id(sample_template.service_id, page=1, page_size=1)
|
||||
|
||||
assert results.per_page == 1
|
||||
assert results.total == 3
|
||||
assert len(results.items) == 1
|
||||
assert results.items[0].created_at == datetime.utcnow().replace(hour=22, minute=30, second=0, microsecond=0)
|
||||
assert results.items[0].notification_count == 2
|
||||
assert results.items[0].upload_type == 'letter_day'
|
||||
|
||||
results = dao_get_uploads_by_service_id(sample_template.service_id, page=2, page_size=1)
|
||||
|
||||
assert len(results.items) == 1
|
||||
assert results.items[0].created_at == datetime.utcnow().replace(hour=14, minute=0, second=0, microsecond=0)
|
||||
assert results.items[0].notification_count == 1
|
||||
assert results.items[0].upload_type == 'job'
|
||||
|
||||
|
||||
def test_get_uploads_returns_empty_list(sample_service):
|
||||
items = dao_get_uploads_by_service_id(sample_service.id).items
|
||||
assert items == []
|
||||
|
||||
@@ -72,7 +72,6 @@ def test_create_invited_user(
|
||||
mocked.assert_called_once_with([(str(notification.id))], queue="notify-internal-tasks")
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason")
|
||||
def test_create_invited_user_without_auth_type(admin_request, sample_service, mocker, invitation_email_template):
|
||||
mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
|
||||
email_address = 'invited_user@service.gov.uk'
|
||||
@@ -125,13 +124,12 @@ def test_create_invited_user_invalid_email(client, sample_service, mocker, fake_
|
||||
assert mocked.call_count == 0
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason")
|
||||
def test_get_all_invited_users_by_service(client, notify_db_session, sample_service):
|
||||
invites = []
|
||||
for i in range(0, 5):
|
||||
email = 'invited_user_{}@service.gov.uk'.format(i)
|
||||
invited_user = create_invited_user(sample_service, to_email_address=email)
|
||||
|
||||
print(f"INVITED USER = {invited_user.auth_type}")
|
||||
invites.append(invited_user)
|
||||
|
||||
url = '/service/{}/invite'.format(sample_service.id)
|
||||
@@ -146,6 +144,7 @@ def test_get_all_invited_users_by_service(client, notify_db_session, sample_serv
|
||||
json_resp = json.loads(response.get_data(as_text=True))
|
||||
|
||||
invite_from = sample_service.users[0]
|
||||
print(f"INVITE FROM {json_resp['data']}")
|
||||
|
||||
for invite in json_resp['data']:
|
||||
assert invite['service'] == str(sample_service.id)
|
||||
|
||||
@@ -47,7 +47,6 @@ def test_get_user_list(admin_request, sample_service):
|
||||
assert sorted(expected_permissions) == sorted(fetched['permissions'][str(sample_service.id)])
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason")
|
||||
def test_get_user(admin_request, sample_service, sample_organisation):
|
||||
"""
|
||||
Tests GET endpoint '/<user_id>' to retrieve a single service.
|
||||
@@ -123,7 +122,6 @@ def test_post_user(admin_request, notify_db_session):
|
||||
assert user.auth_type == EMAIL_AUTH_TYPE
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason")
|
||||
def test_post_user_without_auth_type(admin_request, notify_db_session):
|
||||
User.query.delete()
|
||||
data = {
|
||||
@@ -819,7 +817,6 @@ def test_activate_user_fails_if_already_active(admin_request, sample_user):
|
||||
assert sample_user.state == 'active'
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason")
|
||||
def test_update_user_auth_type(admin_request, sample_user):
|
||||
assert sample_user.auth_type == 'sms_auth'
|
||||
resp = admin_request.post(
|
||||
|
||||
Reference in New Issue
Block a user