diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 457877ed0..636522f01 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -9,6 +9,7 @@ from sqlalchemy.exc import SQLAlchemyError from app.aws import s3 from app import notify_celery from app import performance_platform_client +from app.dao.inbound_sms_dao import delete_inbound_sms_created_more_than_a_week_ago from app.dao.invited_user_dao import delete_invitations_created_more_than_two_days_ago from app.dao.jobs_dao import dao_set_scheduled_jobs_to_pending, dao_get_jobs_older_than_limited_by from app.dao.notifications_dao import ( @@ -31,8 +32,8 @@ from app.config import QueueNames @notify_celery.task(name="remove_csv_files") @statsd(namespace="tasks") -def remove_csv_files(): - jobs = dao_get_jobs_older_than_limited_by() +def remove_csv_files(job_types): + jobs = dao_get_jobs_older_than_limited_by(job_types=job_types) for job in jobs: s3.remove_job_from_s3(job.service_id, job.id) current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) @@ -226,3 +227,21 @@ def timeout_job_statistics(): if updated: current_app.logger.info( "Timeout period reached for {} job statistics, failure count has been updated.".format(updated)) + + +@notify_celery.task(name="delete-inbound-sms") +@statsd(namespace="tasks") +def delete_inbound_sms_older_than_seven_days(): + try: + start = datetime.utcnow() + deleted = delete_inbound_sms_created_more_than_a_week_ago() + current_app.logger.info( + "Delete inbound sms job started {} finished {} deleted {} inbound sms notifications".format( + start, + datetime.utcnow(), + deleted + ) + ) + except SQLAlchemyError as e: + current_app.logger.exception("Failed to delete inbound sms notifications") + raise diff --git a/app/config.py b/app/config.py index 1cac131e2..63d1f18c3 100644 --- a/app/config.py +++ b/app/config.py @@ -2,7 +2,11 @@ from datetime import timedelta from celery.schedules import crontab from kombu import Exchange, Queue import os -from app.models import KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST + +from app.models import ( + EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, + KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST +) if os.environ.get('VCAP_SERVICES'): # on cloudfoundry, config is a json blob in VCAP_SERVICES - unpack it, and populate @@ -168,6 +172,11 @@ class Config(object): 'schedule': crontab(minute=40, hour=0), 'options': {'queue': QueueNames.PERIODIC} }, + 'delete-inbound-sms': { + 'task': 'delete-inbound-sms', + 'schedule': crontab(minute=0, hour=1), + 'options': {'queue': QueueNames.PERIODIC} + }, 'send-daily-performance-platform-stats': { 'task': 'send-daily-performance-platform-stats', 'schedule': crontab(minute=0, hour=2), @@ -183,10 +192,17 @@ class Config(object): 'schedule': crontab(minute=0, hour=3), 'options': {'queue': QueueNames.PERIODIC} }, - 'remove_csv_files': { + 'remove_sms_email_jobs': { 'task': 'remove_csv_files', 'schedule': crontab(minute=0, hour=4), - 'options': {'queue': QueueNames.PERIODIC} + 'options': {'queue': QueueNames.PERIODIC}, + 'kwargs': {'job_types': [EMAIL_TYPE, SMS_TYPE]} + }, + 'remove_letter_jobs': { + 'task': 'remove_csv_files', + 'schedule': crontab(minute=20, hour=4), + 'options': {'queue': QueueNames.PERIODIC}, + 'kwargs': {'job_types': [LETTER_TYPE]} }, 'timeout-job-statistics': { 'task': 'timeout-job-statistics', @@ -233,6 +249,8 @@ class Config(object): } } + FREE_SMS_TIER_FRAGMENT_COUNT = 250000 + ###################### # Config overrides ### diff --git a/app/dao/inbound_sms_dao.py b/app/dao/inbound_sms_dao.py index 597748731..18060cced 100644 --- a/app/dao/inbound_sms_dao.py +++ b/app/dao/inbound_sms_dao.py @@ -1,6 +1,13 @@ +from datetime import ( + timedelta, + datetime +) + + from app import db from app.dao.dao_utils import transactional from app.models import InboundSms +from app.statsd_decorators import statsd @transactional @@ -28,3 +35,22 @@ def dao_count_inbound_sms_for_service(service_id): return InboundSms.query.filter( InboundSms.service_id == service_id ).count() + + +@statsd(namespace="dao") +@transactional +def delete_inbound_sms_created_more_than_a_week_ago(): + seven_days_ago = datetime.utcnow() - timedelta(days=7) + + deleted = db.session.query(InboundSms).filter( + InboundSms.created_at < seven_days_ago + ).delete(synchronize_session='fetch') + + return deleted + + +def dao_get_inbound_sms_by_id(service_id, inbound_id): + return InboundSms.query.filter_by( + id=inbound_id, + service_id=service_id + ).one() diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index d2ec8c367..86e565619 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -1,17 +1,15 @@ -from datetime import datetime +from datetime import datetime, timedelta from flask import current_app from sqlalchemy import func, desc, asc, cast, Date as sql_date from app import db from app.dao import days_ago -from app.models import (Job, - Notification, - NotificationHistory, - Template, - JOB_STATUS_SCHEDULED, - JOB_STATUS_PENDING, - LETTER_TYPE, JobStatistics) +from app.models import ( + Job, JobStatistics, Notification, NotificationHistory, Template, + JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, + EMAIL_TYPE, SMS_TYPE, LETTER_TYPE +) from app.statsd_decorators import statsd @@ -129,10 +127,14 @@ def dao_update_job_status(job_id, status): db.session.commit() -def dao_get_jobs_older_than_limited_by(older_than=7, limit_days=2): - return Job.query.filter( - cast(Job.created_at, sql_date) < days_ago(older_than), - cast(Job.created_at, sql_date) >= days_ago(older_than + limit_days) +def dao_get_jobs_older_than_limited_by(job_types, older_than=7, limit_days=2): + end_date = datetime.utcnow() - timedelta(days=older_than) + start_date = end_date - timedelta(days=limit_days) + + return Job.query.join(Template).filter( + Job.created_at < end_date, + Job.created_at >= start_date, + Template.template_type.in_(job_types) ).order_by(desc(Job.created_at)).all() diff --git a/app/dao/notification_usage_dao.py b/app/dao/notification_usage_dao.py index b3b582ed3..995462509 100644 --- a/app/dao/notification_usage_dao.py +++ b/app/dao/notification_usage_dao.py @@ -1,5 +1,6 @@ from datetime import datetime, timedelta +from flask import current_app from sqlalchemy import Float, Integer from sqlalchemy import func, case, cast from sqlalchemy import literal_column @@ -11,7 +12,7 @@ from app.models import (NotificationHistory, NOTIFICATION_STATUS_TYPES_BILLABLE, KEY_TYPE_TEST, SMS_TYPE, - EMAIL_TYPE) + EMAIL_TYPE, Service) from app.statsd_decorators import statsd from app.utils import get_london_month_from_utc_column @@ -158,6 +159,8 @@ def rate_multiplier(): @statsd(namespace="dao") def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date, end_date, service_id): + free_sms_limit = Service.free_sms_fragment_limit() + billable_units = 0 total_cost = 0.0 @@ -176,9 +179,15 @@ def get_total_billable_units_for_sent_sms_notifications_in_date_range(start_date ) billable_units_by_rate_boundry = result.scalar() if billable_units_by_rate_boundry: - billable_units += int(billable_units_by_rate_boundry) - total_cost += int(billable_units_by_rate_boundry) * rate_boundary['rate'] - + int_billable_units_by_rate_boundry = int(billable_units_by_rate_boundry) + if billable_units >= free_sms_limit: + total_cost += int_billable_units_by_rate_boundry * rate_boundary['rate'] + elif billable_units + int_billable_units_by_rate_boundry > free_sms_limit: + remaining_free_allowance = abs(free_sms_limit - billable_units) + total_cost += ((int_billable_units_by_rate_boundry - remaining_free_allowance) * rate_boundary['rate']) + else: + total_cost += 0 + billable_units += int_billable_units_by_rate_boundry return billable_units, total_cost diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index f4ff2a24f..22da11966 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -2,7 +2,8 @@ import functools from datetime import ( datetime, timedelta, - date) + date +) from flask import current_app @@ -26,6 +27,7 @@ from app.models import ( NotificationHistory, NotificationStatistics, Template, + ScheduledNotification, NOTIFICATION_CREATED, NOTIFICATION_DELIVERED, NOTIFICATION_SENDING, @@ -35,7 +37,8 @@ from app.models import ( NOTIFICATION_PERMANENT_FAILURE, KEY_TYPE_NORMAL, KEY_TYPE_TEST, LETTER_TYPE, - NOTIFICATION_SENT, ScheduledNotification) + NOTIFICATION_SENT, +) from app.dao.dao_utils import transactional from app.statsd_decorators import statsd diff --git a/app/dao/statistics_dao.py b/app/dao/statistics_dao.py index baff82ba4..48d62e71c 100644 --- a/app/dao/statistics_dao.py +++ b/app/dao/statistics_dao.py @@ -60,7 +60,10 @@ def timeout_job_counts(notifications_type, timeout_start): ).update({ sent: sent_count, failed: failed_count, - delivered: delivered_count + delivered: delivered_count, + 'sent': sent_count, + 'delivered': delivered_count, + 'failed': failed_count }, synchronize_session=False) return total_updated @@ -87,11 +90,13 @@ def create_or_update_job_sending_statistics(notification): @transactional def __update_job_stats_sent_count(notification): column = columns(notification.notification_type, 'sent') + new_column = 'sent' return db.session.query(JobStatistics).filter_by( job_id=notification.job_id, ).update({ - column: column + 1 + column: column + 1, + new_column: column + 1 }) @@ -102,7 +107,8 @@ def __insert_job_stats(notification): emails_sent=1 if notification.notification_type == EMAIL_TYPE else 0, sms_sent=1 if notification.notification_type == SMS_TYPE else 0, letters_sent=1 if notification.notification_type == LETTER_TYPE else 0, - updated_at=datetime.utcnow() + updated_at=datetime.utcnow(), + sent=1 ) db.session.add(stats) @@ -131,10 +137,12 @@ def columns(notification_type, status): def update_job_stats_outcome_count(notification): if notification.status in NOTIFICATION_STATUS_TYPES_FAILED: column = columns(notification.notification_type, 'failed') + new_column = 'failed' elif notification.status in [NOTIFICATION_DELIVERED, NOTIFICATION_SENT] and notification.notification_type != LETTER_TYPE: column = columns(notification.notification_type, 'delivered') + new_column = 'delivered' else: column = None @@ -143,7 +151,8 @@ def update_job_stats_outcome_count(notification): return db.session.query(JobStatistics).filter_by( job_id=notification.job_id, ).update({ - column: column + 1 + column: column + 1, + new_column: column + 1 }) else: return 0 diff --git a/app/inbound_sms/rest.py b/app/inbound_sms/rest.py index d4072dea3..1eca4b089 100644 --- a/app/inbound_sms/rest.py +++ b/app/inbound_sms/rest.py @@ -3,15 +3,20 @@ from flask import ( jsonify, request ) + from notifications_utils.recipients import validate_and_format_phone_number -from app.dao.inbound_sms_dao import dao_get_inbound_sms_for_service, dao_count_inbound_sms_for_service +from app.dao.inbound_sms_dao import ( + dao_get_inbound_sms_for_service, + dao_count_inbound_sms_for_service, + dao_get_inbound_sms_by_id +) from app.errors import register_errors inbound_sms = Blueprint( 'inbound_sms', __name__, - url_prefix='/service//inbound-sms' + url_prefix='/service//inbound-sms' ) register_errors(inbound_sms) @@ -40,3 +45,10 @@ def get_inbound_sms_summary_for_service(service_id): count=count, most_recent=most_recent[0].created_at.isoformat() if most_recent else None ) + + +@inbound_sms.route('/', methods=['GET']) +def get_inbound_by_id(service_id, inbound_sms_id): + inbound_sms = dao_get_inbound_sms_by_id(service_id, inbound_sms_id) + + return jsonify(inbound_sms.serialize()), 200 diff --git a/app/models.py b/app/models.py index aefacfed6..4b8213a4a 100644 --- a/app/models.py +++ b/app/models.py @@ -217,6 +217,10 @@ class Service(db.Model, Versioned): self.can_send_letters = LETTER_TYPE in [p.permission for p in self.permissions] self.can_send_international_sms = INTERNATIONAL_SMS_TYPE in [p.permission for p in self.permissions] + @staticmethod + def free_sms_fragment_limit(): + return current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] + @classmethod def from_json(cls, data): """ @@ -1122,6 +1126,9 @@ class JobStatistics(db.Model): sms_failed = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) letters_sent = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) letters_failed = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + sent = db.Column(db.BigInteger, index=False, unique=False, nullable=True, default=0) + delivered = db.Column(db.BigInteger, index=False, unique=False, nullable=True, default=0) + failed = db.Column(db.BigInteger, index=False, unique=False, nullable=True, default=0) created_at = db.Column( db.DateTime, index=False, diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 904e78711..fdd1f95ec 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -27,10 +27,13 @@ def receive_mmg_sms(): } """ post_data = request.get_json() - potential_services = dao_fetch_services_by_sms_sender(post_data['Number']) + + inbound_number = strip_leading_forty_four(post_data['Number']) + + potential_services = dao_fetch_services_by_sms_sender(inbound_number) if len(potential_services) != 1: - current_app.logger.error('Inbound number "{}" not associated with exactly one service'.format( + current_app.logger.error('Inbound number "{}" from MMG not associated with exactly one service'.format( post_data['Number'] )) statsd_client.incr('inbound.mmg.failed') @@ -88,9 +91,11 @@ def create_inbound_mmg_sms_object(service, json): def receive_firetext_sms(): post_data = request.form - potential_services = dao_fetch_services_by_sms_sender(post_data['destination']) + inbound_number = strip_leading_forty_four(post_data['destination']) + + potential_services = dao_fetch_services_by_sms_sender(inbound_number) if len(potential_services) != 1: - current_app.logger.error('Inbound number "{}" not associated with exactly one service'.format( + current_app.logger.error('Inbound number "{}" from firetext not associated with exactly one service'.format( post_data['destination'] )) statsd_client.incr('inbound.firetext.failed') @@ -120,3 +125,9 @@ def receive_firetext_sms(): return jsonify({ "status": "ok" }), 200 + + +def strip_leading_forty_four(number): + if number.startswith('44'): + return number.replace('44', '0', 1) + return number diff --git a/app/schemas.py b/app/schemas.py index b8d3ee7af..7971300b7 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -25,9 +25,8 @@ from notifications_utils.recipients import ( from app import ma from app import models -from app.models import ServicePermission, INTERNATIONAL_SMS_TYPE, SMS_TYPE, LETTER_TYPE, EMAIL_TYPE +from app.models import ServicePermission, INTERNATIONAL_SMS_TYPE, LETTER_TYPE from app.dao.permissions_dao import permission_dao -from app.dao.service_permissions_dao import dao_fetch_service_permissions from app.utils import get_template_instance @@ -176,6 +175,7 @@ class ProviderDetailsHistorySchema(BaseSchema): class ServiceSchema(BaseSchema): + free_sms_fragment_limit = fields.Method(method_name='get_free_sms_fragment_limit') created_by = field_for(models.Service, 'created_by', required=True) organisation = field_for(models.Service, 'organisation') branding = field_for(models.Service, 'branding') @@ -183,11 +183,15 @@ class ServiceSchema(BaseSchema): permissions = fields.Method("service_permissions") override_flag = False + def get_free_sms_fragment_limit(selfs, service): + return service.free_sms_fragment_limit() + def service_permissions(self, service): return [p.permission for p in service.permissions] class Meta: model = models.Service + dump_only = ['free_sms_fragment_limit'] exclude = ( 'updated_at', 'created_at', @@ -256,6 +260,11 @@ class ServiceSchema(BaseSchema): class DetailedServiceSchema(BaseSchema): statistics = fields.Dict() + free_sms_fragment_limit = fields.Method(method_name='get_free_sms_fragment_limit') + + def get_free_sms_fragment_limit(selfs, service): + return service.free_sms_fragment_limit() + class Meta: model = models.Service exclude = ( diff --git a/app/service/rest.py b/app/service/rest.py index 24fdcd513..1e301d0a7 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -300,6 +300,19 @@ def get_all_notifications_for_service(service_id): ), 200 +@service_blueprint.route('//notifications/', methods=['GET']) +def get_notification_for_service(service_id, notification_id): + + notification = notifications_dao.get_notification_with_personalisation( + service_id, + notification_id, + key_type=None, + ) + return jsonify( + notification_with_template_schema.dump(notification).data, + ), 200 + + def search_for_notification_by_to_field(service_id, search_term, statuses): results = notifications_dao.dao_get_notifications_by_to_field(service_id, search_term, statuses) return jsonify( diff --git a/app/user/rest.py b/app/user/rest.py index cc0c6e41b..9a9cf3832 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -206,7 +206,7 @@ def send_user_confirm_new_email(user_id): personalisation={ 'name': user_to_send_to.name, 'url': _create_confirmation_url(user=user_to_send_to, email_address=email['email']), - 'feedback_url': current_app.config['ADMIN_BASE_URL'] + '/feedback' + 'feedback_url': current_app.config['ADMIN_BASE_URL'] + '/support' }, notification_type=EMAIL_TYPE, api_key_id=None, @@ -259,7 +259,7 @@ def send_already_registered_email(user_id): personalisation={ 'signin_url': current_app.config['ADMIN_BASE_URL'] + '/sign-in', 'forgot_password_url': current_app.config['ADMIN_BASE_URL'] + '/forgot-password', - 'feedback_url': current_app.config['ADMIN_BASE_URL'] + '/feedback' + 'feedback_url': current_app.config['ADMIN_BASE_URL'] + '/support' }, notification_type=EMAIL_TYPE, api_key_id=None, diff --git a/migrations/README b/migrations/README index 6c36a3e0e..7c44eae44 100644 --- a/migrations/README +++ b/migrations/README @@ -1,7 +1,9 @@ Generic single-database configuration. -python application.py db migration to generate migration script. +python application.py db migrate to generate migration script. python application.py db upgrade to upgrade db with script. python application.py db downgrade to rollback db changes. + +python application.py db current to show current script. diff --git a/migrations/versions/0093_data_gov_uk.py b/migrations/versions/0093_data_gov_uk.py new file mode 100644 index 000000000..fbe22d38a --- /dev/null +++ b/migrations/versions/0093_data_gov_uk.py @@ -0,0 +1,32 @@ +"""empty message + +Revision ID: 0093_data_gov_uk +Revises: 0092_add_inbound_provider +Create Date: 2017-06-05 16:15:17.744908 + +""" + +# revision identifiers, used by Alembic. +revision = '0093_data_gov_uk' +down_revision = '0092_add_inbound_provider' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +DATA_GOV_UK_ID = '123496d4-44cb-4324-8e0a-4187101f4bdc' + + +def upgrade(): + op.execute("""INSERT INTO organisation VALUES ( + '{}', + '', + 'data_gov_uk_x2.png', + '' + )""".format(DATA_GOV_UK_ID)) + + +def downgrade(): + op.execute(""" + DELETE FROM organisation WHERE "id" = '{}' + """.format(DATA_GOV_UK_ID)) diff --git a/migrations/versions/0093_populate_inbound_provider.py b/migrations/versions/0093_populate_inbound_provider.py deleted file mode 100644 index 688d3f7bf..000000000 --- a/migrations/versions/0093_populate_inbound_provider.py +++ /dev/null @@ -1,22 +0,0 @@ -"""empty message - -Revision ID: 0093_populate_inbound_provider -Revises: 0092_add_inbound_provider -Create Date: 2017-05-22 10:23:43.939050 - -""" - -# revision identifiers, used by Alembic. -revision = '0093_populate_inbound_provider' -down_revision = '0092_add_inbound_provider' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -def upgrade(): - op.execute("UPDATE inbound_sms SET provider='mmg' WHERE provider is null") - - -def downgrade(): - op.execute("UPDATE inbound_sms SET provider=null WHERE provider = 'mmg'") diff --git a/migrations/versions/0094_job_stats_update.py b/migrations/versions/0094_job_stats_update.py new file mode 100644 index 000000000..6a7f7db2a --- /dev/null +++ b/migrations/versions/0094_job_stats_update.py @@ -0,0 +1,25 @@ +"""empty message + +Revision ID: 0094_job_stats_update +Revises: 0093_data_gov_uk +Create Date: 2017-06-06 14:37:30.051647 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = '0094_job_stats_update' +down_revision = '0093_data_gov_uk' + + +def upgrade(): + op.add_column('job_statistics', sa.Column('sent', sa.BigInteger(), nullable=True)) + op.add_column('job_statistics', sa.Column('delivered', sa.BigInteger(), nullable=True)) + op.add_column('job_statistics', sa.Column('failed', sa.BigInteger(), nullable=True)) + + +def downgrade(): + op.drop_column('job_statistics', 'sent') + op.drop_column('job_statistics', 'failed') + op.drop_column('job_statistics', 'delivered') diff --git a/migrations/versions/0095_migrate_existing_svc_perms.py b/migrations/versions/0095_migrate_existing_svc_perms.py new file mode 100644 index 000000000..0211450f8 --- /dev/null +++ b/migrations/versions/0095_migrate_existing_svc_perms.py @@ -0,0 +1,39 @@ +"""empty message + +Revision ID: 0095_migrate_existing_svc_perms +Revises: 0094_job_stats_update +Create Date: 2017-05-23 18:13:03.532095 + +""" + +# revision identifiers, used by Alembic. +revision = '0095_migrate_existing_svc_perms' +down_revision = '0094_job_stats_update' + +from alembic import op +import sqlalchemy as sa + +migration_date = '2017-05-26 17:30:00.000000' + + +def upgrade(): + def get_values(permission): + return "SELECT id, '{0}', '{1}' FROM services WHERE "\ + "id NOT IN (SELECT service_id FROM service_permissions "\ + "WHERE service_id=id AND permission='{0}')".format(permission, migration_date) + + def get_values_if_flag(permission, flag): + return "SELECT id, '{0}', '{1}' FROM services WHERE "\ + "{2} AND id NOT IN (SELECT service_id FROM service_permissions "\ + "WHERE service_id=id AND permission='{0}')".format(permission, migration_date, flag) + + op.execute("INSERT INTO service_permissions (service_id, permission, created_at) {}".format(get_values('sms'))) + op.execute("INSERT INTO service_permissions (service_id, permission, created_at) {}".format(get_values('email'))) + op.execute("INSERT INTO service_permissions (service_id, permission, created_at) {}".format( + get_values_if_flag('letter', 'can_send_letters'))) + op.execute("INSERT INTO service_permissions (service_id, permission, created_at) {}".format( + get_values_if_flag('international_sms', 'can_send_international_sms'))) + + +def downgrade(): + op.execute("DELETE FROM service_permissions WHERE created_at = '{}'::timestamp".format(migration_date)) diff --git a/migrations/versions/0096_update_job_stats.py b/migrations/versions/0096_update_job_stats.py new file mode 100644 index 000000000..75b345ead --- /dev/null +++ b/migrations/versions/0096_update_job_stats.py @@ -0,0 +1,35 @@ +"""empty message + +Revision ID: 0096_update_job_stats +Revises: 0095_migrate_existing_svc_perms +Create Date: 2017-06-08 15:46:49.637642 + +""" + +# revision identifiers, used by Alembic. +revision = '0096_update_job_stats' +down_revision = '0095_migrate_existing_svc_perms' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +def upgrade(): + query = "UPDATE job_statistics " \ + "set sent = sms_sent + emails_sent + letters_sent, " \ + " delivered = sms_delivered + emails_delivered, " \ + " failed = sms_failed + emails_failed + letters_failed " + + conn = op.get_bind() + conn.execute(query) + + +def downgrade(): + query = "UPDATE job_statistics " \ + "set sent = 0, " \ + " delivered = 0, " \ + " failed = 0 " + + conn = op.get_bind() + conn.execute(query) diff --git a/migrations/versions/0094_notnull_inbound_provider.py b/migrations/versions/0097_notnull_inbound_provider.py similarity index 71% rename from migrations/versions/0094_notnull_inbound_provider.py rename to migrations/versions/0097_notnull_inbound_provider.py index ebf98e3a7..48f5e778d 100644 --- a/migrations/versions/0094_notnull_inbound_provider.py +++ b/migrations/versions/0097_notnull_inbound_provider.py @@ -1,14 +1,14 @@ """empty message -Revision ID: 0094_notnull_inbound_provider -Revises: 0093_populate_inbound_provider +Revision ID: 0097_notnull_inbound_provider +Revises: 0096_update_job_stats Create Date: 2017-06-02 16:50:11.698423 """ # revision identifiers, used by Alembic. -revision = '0094_notnull_inbound_provider' -down_revision = '0093_populate_inbound_provider' +revision = '0097_notnull_inbound_provider' +down_revision = '0096_update_job_stats' from alembic import op import sqlalchemy as sa diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 7a347e35c..b3ff5d478 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -5,19 +5,23 @@ from functools import partial from flask import current_app from freezegun import freeze_time -from app.celery.scheduled_tasks import s3, timeout_job_statistics, delete_sms_notifications_older_than_seven_days, \ - delete_letter_notifications_older_than_seven_days, delete_email_notifications_older_than_seven_days, \ - send_scheduled_notifications from app.celery import scheduled_tasks from app.celery.scheduled_tasks import ( + delete_email_notifications_older_than_seven_days, + delete_inbound_sms_older_than_seven_days, + delete_invitations, + delete_notifications_created_more_than_a_week_ago_by_type, + delete_letter_notifications_older_than_seven_days, + delete_sms_notifications_older_than_seven_days, delete_verify_codes, remove_csv_files, - delete_notifications_created_more_than_a_week_ago_by_type, - delete_invitations, - timeout_notifications, run_scheduled_jobs, + s3, send_daily_performance_platform_stats, - switch_current_sms_provider_on_slow_delivery + send_scheduled_notifications, + switch_current_sms_provider_on_slow_delivery, + timeout_job_statistics, + timeout_notifications ) from app.clients.performance_platform.performance_platform_client import PerformancePlatformClient from app.dao.jobs_dao import dao_get_job_by_id @@ -26,9 +30,12 @@ from app.dao.provider_details_dao import ( dao_update_provider_details, get_current_provider ) -from app.models import Service, Template +from app.models import ( + Service, Template, + SMS_TYPE, LETTER_TYPE +) from app.utils import get_london_midnight_in_utc -from tests.app.db import create_notification, create_service +from tests.app.db import create_notification, create_service, create_template, create_job from tests.app.conftest import ( sample_job as create_sample_job, sample_notification_history as create_notification_history, @@ -71,7 +78,8 @@ def prepare_current_provider(restore_provider_details): def test_should_have_decorated_tasks_functions(): assert delete_verify_codes.__wrapped__.__name__ == 'delete_verify_codes' - assert delete_notifications_created_more_than_a_week_ago_by_type.__wrapped__.__name__ == 'delete_notifications_created_more_than_a_week_ago_by_type' # noqa + assert delete_notifications_created_more_than_a_week_ago_by_type.__wrapped__.__name__ == \ + 'delete_notifications_created_more_than_a_week_ago_by_type' assert timeout_notifications.__wrapped__.__name__ == 'timeout_notifications' assert delete_invitations.__wrapped__.__name__ == 'delete_invitations' assert run_scheduled_jobs.__wrapped__.__name__ == 'run_scheduled_jobs' @@ -79,6 +87,8 @@ def test_should_have_decorated_tasks_functions(): assert send_daily_performance_platform_stats.__wrapped__.__name__ == 'send_daily_performance_platform_stats' assert switch_current_sms_provider_on_slow_delivery.__wrapped__.__name__ == \ 'switch_current_sms_provider_on_slow_delivery' + assert delete_inbound_sms_older_than_seven_days.__wrapped__.__name__ == \ + 'delete_inbound_sms_older_than_seven_days' def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): @@ -207,22 +217,33 @@ def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_ ]) -def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_db_session, mocker): +@freeze_time('2016-10-18T10:00:00') +def test_will_remove_csv_files_for_jobs_older_than_seven_days( + notify_db, notify_db_session, mocker, sample_template +): mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') + """ + Jobs older than seven days are deleted, but only two day's worth (two-day window) + """ + seven_days_ago = datetime.utcnow() - timedelta(days=7) + just_under_seven_days = seven_days_ago + timedelta(seconds=1) + eight_days_ago = seven_days_ago - timedelta(days=1) + nine_days_ago = eight_days_ago - timedelta(days=1) + just_under_nine_days = nine_days_ago + timedelta(seconds=1) + nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - eligible_job_1 = datetime(2016, 10, 10, 23, 59, 59, 000) - eligible_job_2 = datetime(2016, 10, 9, 00, 00, 00, 000) - in_eligible_job_too_new = datetime(2016, 10, 11, 00, 00, 00, 000) - in_eligible_job_too_old = datetime(2016, 10, 8, 23, 59, 59, 999) + create_sample_job(notify_db, notify_db_session, created_at=nine_days_one_second_ago) + job1_to_delete = create_sample_job(notify_db, notify_db_session, created_at=eight_days_ago) + job2_to_delete = create_sample_job(notify_db, notify_db_session, created_at=just_under_nine_days) + create_sample_job(notify_db, notify_db_session, created_at=seven_days_ago) + create_sample_job(notify_db, notify_db_session, created_at=just_under_seven_days) - job_1 = create_sample_job(notify_db, notify_db_session, created_at=eligible_job_1) - job_2 = create_sample_job(notify_db, notify_db_session, created_at=eligible_job_2) - create_sample_job(notify_db, notify_db_session, created_at=in_eligible_job_too_new) - create_sample_job(notify_db, notify_db_session, created_at=in_eligible_job_too_old) + remove_csv_files(job_types=[sample_template.template_type]) - with freeze_time('2016-10-18T10:00:00'): - remove_csv_files() - assert s3.remove_job_from_s3.call_args_list == [call(job_1.service_id, job_1.id), call(job_2.service_id, job_2.id)] + assert s3.remove_job_from_s3.call_args_list == [ + call(job1_to_delete.service_id, job1_to_delete.id), + call(job2_to_delete.service_id, job2_to_delete.id) + ] def test_send_daily_performance_stats_calls_does_not_send_if_inactive( @@ -440,3 +461,30 @@ def test_timeout_job_statistics_called_with_notification_timeout(notify_api, moc dao_mock = mocker.patch('app.celery.scheduled_tasks.dao_timeout_job_statistics') timeout_job_statistics() dao_mock.assert_called_once_with(999) + + +def test_should_call_delete_inbound_sms_older_than_seven_days(notify_api, mocker): + mocker.patch('app.celery.scheduled_tasks.delete_inbound_sms_created_more_than_a_week_ago') + delete_inbound_sms_older_than_seven_days() + assert scheduled_tasks.delete_inbound_sms_created_more_than_a_week_ago.call_count == 1 + + +@freeze_time('2017-01-01 10:00:00') +def test_remove_csv_files_filters_by_type(mocker, sample_service): + mocker.patch('app.celery.scheduled_tasks.s3.remove_job_from_s3') + """ + Jobs older than seven days are deleted, but only two day's worth (two-day window) + """ + letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) + sms_template = create_template(service=sample_service, template_type=SMS_TYPE) + + eight_days_ago = datetime.utcnow() - timedelta(days=8) + + job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) + create_job(template=sms_template, created_at=eight_days_ago) + + remove_csv_files(job_types=[LETTER_TYPE]) + + assert s3.remove_job_from_s3.call_args_list == [ + call(job_to_delete.service_id, job_to_delete.id), + ] diff --git a/tests/app/conftest.py b/tests/app/conftest.py index d49c527aa..91eff00fc 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -280,16 +280,18 @@ def sample_team_api_key(notify_db, notify_db_session, service=None): @pytest.fixture(scope='function') -def sample_job(notify_db, - notify_db_session, - service=None, - template=None, - notification_count=1, - created_at=None, - job_status='pending', - scheduled_for=None, - processing_started=None, - original_file_name='some.csv'): +def sample_job( + notify_db, + notify_db_session, + service=None, + template=None, + notification_count=1, + created_at=None, + job_status='pending', + scheduled_for=None, + processing_started=None, + original_file_name='some.csv' +): if service is None: service = sample_service(notify_db, notify_db_session) if template is None: diff --git a/tests/app/dao/test_inbound_sms_dao.py b/tests/app/dao/test_inbound_sms_dao.py index f0fc6d8b3..b26dc913e 100644 --- a/tests/app/dao/test_inbound_sms_dao.py +++ b/tests/app/dao/test_inbound_sms_dao.py @@ -1,11 +1,17 @@ -from datetime import datetime +from datetime import datetime, timedelta from freezegun import freeze_time -from app.dao.inbound_sms_dao import dao_get_inbound_sms_for_service, dao_count_inbound_sms_for_service - +from app.dao.inbound_sms_dao import ( + dao_get_inbound_sms_for_service, + dao_count_inbound_sms_for_service, + delete_inbound_sms_created_more_than_a_week_ago, + dao_get_inbound_sms_by_id +) from tests.app.db import create_inbound_sms, create_service +from app.models import InboundSms + def test_get_all_inbound_sms(sample_service): inbound = create_inbound_sms(sample_service) @@ -57,3 +63,35 @@ def test_count_inbound_sms_for_service(notify_db_session): create_inbound_sms(service_two) assert dao_count_inbound_sms_for_service(service_one.id) == 2 + + +@freeze_time("2017-01-01 12:00:00") +def test_should_delete_inbound_sms_older_than_seven_days(sample_service): + older_than_seven_days = datetime.utcnow() - timedelta(days=7, seconds=1) + create_inbound_sms(sample_service, created_at=older_than_seven_days) + delete_inbound_sms_created_more_than_a_week_ago() + + assert len(InboundSms.query.all()) == 0 + + +@freeze_time("2017-01-01 12:00:00") +def test_should_not_delete_inbound_sms_before_seven_days(sample_service): + yesterday = datetime.utcnow() - timedelta(days=1) + just_before_seven_days = datetime.utcnow() - timedelta(days=6, hours=23, minutes=59, seconds=59) + older_than_seven_days = datetime.utcnow() - timedelta(days=7, seconds=1) + + create_inbound_sms(sample_service, created_at=yesterday) + create_inbound_sms(sample_service, created_at=just_before_seven_days) + create_inbound_sms(sample_service, created_at=older_than_seven_days) + + delete_inbound_sms_created_more_than_a_week_ago() + + assert len(InboundSms.query.all()) == 2 + + +def test_get_inbound_sms_by_id_returns(sample_service): + inbound = create_inbound_sms(sample_service) + + inbound_from_db = dao_get_inbound_sms_by_id(sample_service.id, inbound.id) + + assert inbound == inbound_from_db diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index a9af1afda..42f93d230 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -17,7 +17,10 @@ from app.dao.jobs_dao import ( dao_update_job_status, dao_get_all_notifications_for_job, dao_get_jobs_older_than_limited_by) -from app.models import Job, JobStatistics +from app.models import ( + Job, JobStatistics, + EMAIL_TYPE, SMS_TYPE, LETTER_TYPE +) from tests.app.conftest import sample_notification as create_notification from tests.app.conftest import sample_job as create_job @@ -285,33 +288,30 @@ def test_get_future_scheduled_job_gets_a_job_yet_to_send(sample_scheduled_job): assert result.id == sample_scheduled_job.id -def test_should_get_jobs_seven_days_old(notify_db, notify_db_session): - # job runs at some point on each day - # shouldn't matter when, we are deleting things 7 days ago - job_run_time = '2016-10-31T10:00:00' +@freeze_time('2016-10-31 10:00:00') +def test_should_get_jobs_seven_days_old(notify_db, notify_db_session, sample_template): + """ + Jobs older than seven days are deleted, but only two day's worth (two-day window) + """ + seven_days_ago = datetime.utcnow() - timedelta(days=7) + within_seven_days = seven_days_ago + timedelta(seconds=1) - # running on the 31st means the previous 7 days are ignored + eight_days_ago = seven_days_ago - timedelta(days=1) - # 2 day window for delete jobs - # 7 days of files to skip includes the 30,29,28,27,26,25,24th, so the.... - last_possible_time_for_eligible_job = '2016-10-23T23:59:59' - first_possible_time_for_eligible_job = '2016-10-22T00:00:00' + nine_days_ago = eight_days_ago - timedelta(days=2) + nine_days_one_second_ago = nine_days_ago - timedelta(seconds=1) - job_1 = create_job(notify_db, notify_db_session, created_at=last_possible_time_for_eligible_job) - job_2 = create_job(notify_db, notify_db_session, created_at=first_possible_time_for_eligible_job) + job = partial(create_job, notify_db, notify_db_session) + job(created_at=seven_days_ago) + job(created_at=within_seven_days) + job_to_delete = job(created_at=eight_days_ago) + job(created_at=nine_days_ago) + job(created_at=nine_days_one_second_ago) - # bookmarks for jobs that should be ignored - last_possible_time_for_ineligible_job = '2016-10-24T00:00:00' - create_job(notify_db, notify_db_session, created_at=last_possible_time_for_ineligible_job) + jobs = dao_get_jobs_older_than_limited_by(job_types=[sample_template.template_type]) - first_possible_time_for_ineligible_job = '2016-10-21T23:59:59' - create_job(notify_db, notify_db_session, created_at=first_possible_time_for_ineligible_job) - - with freeze_time(job_run_time): - jobs = dao_get_jobs_older_than_limited_by() - assert len(jobs) == 2 - assert jobs[0].id == job_1.id - assert jobs[1].id == job_2.id + assert len(jobs) == 1 + assert jobs[0].id == job_to_delete.id def test_get_jobs_for_service_is_paginated(notify_db, notify_db_session, sample_service, sample_template): @@ -391,3 +391,23 @@ def test_dao_update_job_status(sample_job): updated_job = Job.query.get(sample_job.id) assert updated_job.job_status == 'sent to dvla' assert updated_job.updated_at + + +@freeze_time('2016-10-31 10:00:00') +def test_should_get_jobs_seven_days_old_filters_type(notify_db, notify_db_session): + eight_days_ago = datetime.utcnow() - timedelta(days=8) + letter_template = create_template(notify_db, notify_db_session, template_type=LETTER_TYPE) + sms_template = create_template(notify_db, notify_db_session, template_type=SMS_TYPE) + email_template = create_template(notify_db, notify_db_session, template_type=EMAIL_TYPE) + + job = partial(create_job, notify_db, notify_db_session, created_at=eight_days_ago) + job_to_remain = job(template=letter_template) + job(template=sms_template) + job(template=email_template) + + jobs = dao_get_jobs_older_than_limited_by( + job_types=[EMAIL_TYPE, SMS_TYPE] + ) + + assert len(jobs) == 2 + assert job_to_remain.id not in [job.id for job in jobs] diff --git a/tests/app/dao/test_notification_usage_dao.py b/tests/app/dao/test_notification_usage_dao.py index d4aec8531..83bfa264c 100644 --- a/tests/app/dao/test_notification_usage_dao.py +++ b/tests/app/dao/test_notification_usage_dao.py @@ -2,6 +2,7 @@ import uuid from datetime import datetime, timedelta import pytest +from flask import current_app from app.dao.date_util import get_financial_year from app.dao.notification_usage_dao import ( @@ -15,12 +16,13 @@ from app.models import ( Rate, NOTIFICATION_DELIVERED, NOTIFICATION_STATUS_TYPES_BILLABLE, - NOTIFICATION_STATUS_TYPES_NON_BILLABLE, - Notification) + NOTIFICATION_STATUS_TYPES_NON_BILLABLE) from tests.app.conftest import sample_notification, sample_email_template, sample_letter_template, sample_service from tests.app.db import create_notification from freezegun import freeze_time +from tests.conftest import set_config + def test_get_rates_for_year(notify_db, notify_db_session): set_up_rate(notify_db, datetime(2016, 5, 18), 0.016) @@ -266,218 +268,235 @@ def set_up_rate(notify_db, start_date, value): @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications(notify_db, notify_db_session, sample_service): - set_up_rate(notify_db, datetime(2016, 1, 1), 0.016) + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - sample_notification( - notify_db, notify_db_session, service=sample_service, billable_units=1, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, billable_units=2, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, billable_units=3, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, billable_units=4, status=NOTIFICATION_DELIVERED) + set_up_rate(notify_db, datetime(2016, 1, 1), 0.016) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=1, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=2, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=3, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, billable_units=4, status=NOTIFICATION_DELIVERED) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 10 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 0.16 + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[0] == 10 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[1] == 0.16 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications( notify_db, notify_db_session, sample_service ): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - sample_notification( - notify_db, notify_db_session, service=sample_service, rate_multiplier=1.0, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, rate_multiplier=2.0, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, rate_multiplier=5.0, status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, notify_db_session, service=sample_service, rate_multiplier=10.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=1.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=2.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=5.0, status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, notify_db_session, service=sample_service, rate_multiplier=10.0, status=NOTIFICATION_DELIVERED) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 18 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 45 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 18 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 45 def test_returns_total_billable_units_multiplied_by_multipler_for_sms_notifications_for_several_rates( notify_db, notify_db_session, sample_service ): - set_up_rate(notify_db, datetime(2016, 1, 1), 2) - set_up_rate(notify_db, datetime(2016, 10, 1), 4) - set_up_rate(notify_db, datetime(2017, 1, 1), 6) + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - eligble_rate_1 = datetime(2016, 2, 1) - eligble_rate_2 = datetime(2016, 11, 1) - eligble_rate_3 = datetime(2017, 2, 1) + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - rate_multiplier=1.0, - status=NOTIFICATION_DELIVERED, - created_at=eligble_rate_1) + eligble_rate_1 = datetime(2016, 2, 1) + eligble_rate_2 = datetime(2016, 11, 1) + eligble_rate_3 = datetime(2017, 2, 1) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - rate_multiplier=2.0, - status=NOTIFICATION_DELIVERED, - created_at=eligble_rate_2) - - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - rate_multiplier=5.0, - status=NOTIFICATION_DELIVERED, - created_at=eligble_rate_3) - - start = datetime(2016, 1, 1) - end = datetime(2018, 1, 1) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 8 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 40 - - -def test_returns_total_billable_units_for_sms_notifications_for_several_rates_where_dates_match_rate_boundary( - notify_db, notify_db_session, sample_service -): - set_up_rate(notify_db, datetime(2016, 1, 1), 2) - set_up_rate(notify_db, datetime(2016, 10, 1), 4) - set_up_rate(notify_db, datetime(2017, 1, 1), 6) - - eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) - eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) - eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) - eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) - eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) - eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) - - def make_notification(created_at): sample_notification( notify_db, notify_db_session, service=sample_service, rate_multiplier=1.0, status=NOTIFICATION_DELIVERED, - created_at=created_at) + created_at=eligble_rate_1) - make_notification(eligble_rate_1_start) - make_notification(eligble_rate_1_end) - make_notification(eligble_rate_2_start) - make_notification(eligble_rate_2_end) - make_notification(eligble_rate_3_start) - make_notification(eligble_rate_3_whenever) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=2.0, + status=NOTIFICATION_DELIVERED, + created_at=eligble_rate_2) - start = datetime(2016, 1, 1) - end = datetime(2018, 1, 1) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=5.0, + status=NOTIFICATION_DELIVERED, + created_at=eligble_rate_3) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 6 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 24.0 + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 8 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 40 + + +def test_returns_total_billable_units_for_sms_notifications_for_several_rates_where_dates_match_rate_boundary( + notify_db, notify_db_session, sample_service +): + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): + + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) + + eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) + eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) + eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) + eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) + eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) + eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) + + def make_notification(created_at): + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=1.0, + status=NOTIFICATION_DELIVERED, + created_at=created_at) + + make_notification(eligble_rate_1_start) + make_notification(eligble_rate_1_end) + make_notification(eligble_rate_2_start) + make_notification(eligble_rate_2_end) + make_notification(eligble_rate_3_start) + make_notification(eligble_rate_3_whenever) + + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[0] == 6 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[1] == 24.0 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_ignoring_letters_and_emails( notify_db, notify_db_session, sample_service ): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) - letter_template = sample_letter_template(sample_service) + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=2, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - template=email_template, - service=sample_service, - billable_units=2, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - template=letter_template, - service=sample_service, - billable_units=2, - status=NOTIFICATION_DELIVERED - ) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) + letter_template = sample_letter_template(sample_service) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + template=email_template, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + template=letter_template, + service=sample_service, + billable_units=2, + status=NOTIFICATION_DELIVERED + ) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_for_only_requested_service( notify_db, notify_db_session ): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) - service_2 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) - service_3 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - sample_notification( - notify_db, - notify_db_session, - service=service_1, - billable_units=2, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - service=service_2, - billable_units=2, - status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - service=service_3, - billable_units=2, - status=NOTIFICATION_DELIVERED - ) + service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + service_2 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + service_3 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_2, + billable_units=2, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_3, + billable_units=2, + status=NOTIFICATION_DELIVERED + ) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 5 + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 5 @freeze_time("2016-01-10 12:00:00.000000") def test_returns_total_billable_units_for_sms_notifications_handling_null_values( notify_db, notify_db_session, sample_service ): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=2, - rate_multiplier=None, - status=NOTIFICATION_DELIVERED) + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=2, + rate_multiplier=None, + status=NOTIFICATION_DELIVERED) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 5 @pytest.mark.parametrize('billable_units, states', ([ @@ -488,57 +507,61 @@ def test_returns_total_billable_units_for_sms_notifications_handling_null_values def test_ignores_non_billable_states_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service, billable_units, states ): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - for state in states: - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=1, - rate_multiplier=None, - status=state) + for state in states: + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=None, + status=state) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range( - start, end, sample_service.id - )[0] == billable_units - assert get_total_billable_units_for_sent_sms_notifications_in_date_range( - start, end, sample_service.id - )[1] == billable_units * 2.5 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + )[0] == billable_units + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + )[1] == billable_units * 2.5 @freeze_time("2016-01-10 12:00:00.000000") def test_restricts_to_time_period_when_returning_billable_units_for_sms_notifications( notify_db, notify_db_session, sample_service ): - set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + with set_config(current_app, 'FREE_SMS_TIER_FRAGMENT_COUNT', 0): + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=1, - rate_multiplier=1.0, - created_at=datetime.utcnow() - timedelta(minutes=100), - status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=1.0, + created_at=datetime.utcnow() - timedelta(minutes=100), + status=NOTIFICATION_DELIVERED) - sample_notification( - notify_db, - notify_db_session, - service=sample_service, - billable_units=1, - rate_multiplier=1.0, - created_at=datetime.utcnow() - timedelta(minutes=5), - status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + billable_units=1, + rate_multiplier=1.0, + created_at=datetime.utcnow() - timedelta(minutes=5), + status=NOTIFICATION_DELIVERED) - start = datetime.utcnow() - timedelta(minutes=10) - end = datetime.utcnow() + timedelta(minutes=10) + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 1 - assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[1] == 2.5 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[0] == 1 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id)[1] == 2.5 def test_returns_zero_if_no_matching_rows_when_returning_billable_units_for_sms_notifications( @@ -604,3 +627,86 @@ def test_should_calculate_rate_boundaries_for_billing_query_for_three_relevant_r assert rate_boundaries[2]['start_date'] == rate_3_valid_from assert rate_boundaries[2]['end_date'] == end_date assert rate_boundaries[2]['rate'] == 0.06 + + +@freeze_time("2016-01-10 12:00:00.000000") +def test_deducts_free_tier_from_bill( + notify_db, notify_db_session +): + start_value = current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] + try: + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = 1 + + set_up_rate(notify_db, datetime(2016, 1, 1), 2.5) + + service_1 = sample_service(notify_db, notify_db_session, service_name=str(uuid.uuid4())) + + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=1, + status=NOTIFICATION_DELIVERED) + sample_notification( + notify_db, + notify_db_session, + service=service_1, + billable_units=1, + status=NOTIFICATION_DELIVERED) + + start = datetime.utcnow() - timedelta(minutes=10) + end = datetime.utcnow() + timedelta(minutes=10) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[0] == 2 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, service_1.id)[1] == 2.5 + finally: + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = start_value + + +@freeze_time("2016-01-10 12:00:00.000000") +@pytest.mark.parametrize( + 'free_tier, expected_cost', + [(0, 24.0), (1, 22.0), (2, 20.0), (3, 16.0), (4, 12.0), (5, 6.0), (6, 0.0)] +) +def test_deducts_free_tier_from_bill_across_rate_boundaries( + notify_db, notify_db_session, sample_service, free_tier, expected_cost +): + start_value = current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] + try: + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = free_tier + set_up_rate(notify_db, datetime(2016, 1, 1), 2) + set_up_rate(notify_db, datetime(2016, 10, 1), 4) + set_up_rate(notify_db, datetime(2017, 1, 1), 6) + + eligble_rate_1_start = datetime(2016, 1, 1, 0, 0, 0, 0) + eligble_rate_1_end = datetime(2016, 9, 30, 23, 59, 59, 999) + eligble_rate_2_start = datetime(2016, 10, 1, 0, 0, 0, 0) + eligble_rate_2_end = datetime(2016, 12, 31, 23, 59, 59, 999) + eligble_rate_3_start = datetime(2017, 1, 1, 0, 0, 0, 0) + eligble_rate_3_whenever = datetime(2017, 12, 12, 0, 0, 0, 0) + + def make_notification(created_at): + sample_notification( + notify_db, + notify_db_session, + service=sample_service, + rate_multiplier=1.0, + status=NOTIFICATION_DELIVERED, + created_at=created_at) + + make_notification(eligble_rate_1_start) + make_notification(eligble_rate_1_end) + make_notification(eligble_rate_2_start) + make_notification(eligble_rate_2_end) + make_notification(eligble_rate_3_start) + make_notification(eligble_rate_3_whenever) + + start = datetime(2016, 1, 1) + end = datetime(2018, 1, 1) + + assert get_total_billable_units_for_sent_sms_notifications_in_date_range(start, end, sample_service.id)[0] == 6 + assert get_total_billable_units_for_sent_sms_notifications_in_date_range( + start, end, sample_service.id + )[1] == expected_cost + finally: + current_app.config['FREE_SMS_TIER_FRAGMENT_COUNT'] = start_value diff --git a/tests/app/dao/test_statistics_dao.py b/tests/app/dao/test_statistics_dao.py index 4425a3409..5a73ea672 100644 --- a/tests/app/dao/test_statistics_dao.py +++ b/tests/app/dao/test_statistics_dao.py @@ -199,6 +199,10 @@ def test_should_update_a_stats_entry_with_its_success_outcome_for_a_job( assert stat.sms_failed == 0 assert stat.letters_failed == 0 + assert stat.sent == email_count + sms_count + letter_count + assert stat.delivered == email_count + sms_count + assert stat.failed == 0 + @pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ (SMS_TYPE, 1, 0, 0, NOTIFICATION_TECHNICAL_FAILURE), @@ -264,6 +268,10 @@ def test_should_update_a_stats_entry_with_its_error_outcomes_for_a_job( assert stat.emails_delivered == 0 assert stat.sms_delivered == 0 + assert stat.sent == email_count + sms_count + letter_count + assert stat.delivered == 0 + assert stat.failed == email_count + sms_count + letter_count + @pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ (SMS_TYPE, 1, 0, 0, NOTIFICATION_DELIVERED), @@ -326,6 +334,10 @@ def test_should_update_a_stats_entry_with_its_success_outcomes_for_a_job( assert stat.emails_delivered == email_count assert stat.sms_delivered == sms_count + assert stat.sent == email_count + sms_count + letter_count + assert stat.delivered == 0 if notification_type == LETTER_TYPE else 1 + assert stat.failed == 0 + @pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ (SMS_TYPE, 1, 0, 0, NOTIFICATION_PENDING), @@ -394,6 +406,10 @@ def test_should_not_update_job_stats_if_irrelevant_status( assert stat.emails_delivered == 0 assert stat.sms_delivered == 0 + assert stat.sent == email_count + sms_count + letter_count + assert stat.delivered == 0 + assert stat.failed == 0 + @pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count', [ (SMS_TYPE, 2, 1, 1), @@ -480,41 +496,52 @@ def test_inserting_one_type_of_notification_maintains_other_counts( assert updated_stats[0].sms_sent == sms_count assert updated_stats[0].letters_sent == letter_count + if notification_type == EMAIL_TYPE: + assert updated_stats[0].sent == email_count + elif notification_type == SMS_TYPE: + assert updated_stats[0].sent == sms_count + elif notification_type == LETTER_TYPE: + assert updated_stats[0].sent == letter_count + def test_updating_one_type_of_notification_to_success_maintains_other_counts( notify_db, notify_db_session, - sample_job, + sample_service, sample_letter_template ): - sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) - email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + job_1 = sample_job(notify_db, notify_db_session, service=sample_service) + job_2 = sample_job(notify_db, notify_db_session, service=sample_service) + job_3 = sample_job(notify_db, notify_db_session, service=sample_service) + + sms_template = sample_template(notify_db, notify_db_session, service=sample_service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) letter_template = sample_letter_template letter = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=letter_template, - job=sample_job, + job=job_1, status=NOTIFICATION_CREATED ) email = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=email_template, - job=sample_job, + job=job_2, status=NOTIFICATION_CREATED ) sms = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=sms_template, - job=sample_job, + job=job_3, status=NOTIFICATION_CREATED ) @@ -530,49 +557,76 @@ def test_updating_one_type_of_notification_to_success_maintains_other_counts( update_job_stats_outcome_count(email) update_job_stats_outcome_count(sms) - stats = JobStatistics.query.all() - assert len(stats) == 1 - assert stats[0].emails_sent == 1 - assert stats[0].sms_sent == 1 + stats = JobStatistics.query.order_by(JobStatistics.created_at).all() + assert len(stats) == 3 assert stats[0].letters_sent == 1 - assert stats[0].emails_delivered == 1 - assert stats[0].sms_delivered == 1 + assert stats[0].emails_sent == 0 + assert stats[0].sms_sent == 0 + assert stats[0].emails_delivered == 0 + assert stats[0].sms_delivered == 0 + + assert stats[1].letters_sent == 0 + assert stats[1].emails_sent == 1 + assert stats[1].sms_sent == 0 + assert stats[1].emails_delivered == 1 + assert stats[1].sms_delivered == 0 + + assert stats[2].letters_sent == 0 + assert stats[2].emails_sent == 0 + assert stats[2].sms_sent == 1 + assert stats[2].emails_delivered == 0 + assert stats[2].sms_delivered == 1 + + assert stats[0].sent == 1 + assert stats[0].delivered == 0 + assert stats[0].failed == 0 + + assert stats[1].sent == 1 + assert stats[1].delivered == 1 + assert stats[1].failed == 0 + + assert stats[2].sent == 1 + assert stats[2].delivered == 1 + assert stats[2].failed == 0 def test_updating_one_type_of_notification_to_error_maintains_other_counts( notify_db, notify_db_session, - sample_job, + sample_service, sample_letter_template ): - sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) - email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + job_1 = sample_job(notify_db, notify_db_session, service=sample_service) + job_2 = sample_job(notify_db, notify_db_session, service=sample_service) + job_3 = sample_job(notify_db, notify_db_session, service=sample_service) + sms_template = sample_template(notify_db, notify_db_session, service=sample_service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_service) letter_template = sample_letter_template letter = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=letter_template, - job=sample_job, + job=job_1, status=NOTIFICATION_CREATED ) email = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=email_template, - job=sample_job, + job=job_2, status=NOTIFICATION_CREATED ) sms = sample_notification( notify_db, notify_db_session, - service=sample_job.service, + service=sample_service, template=sms_template, - job=sample_job, + job=job_3, status=NOTIFICATION_CREATED ) @@ -588,20 +642,50 @@ def test_updating_one_type_of_notification_to_error_maintains_other_counts( update_job_stats_outcome_count(email) update_job_stats_outcome_count(sms) - stats = JobStatistics.query.all() - assert len(stats) == 1 - assert stats[0].emails_sent == 1 - assert stats[0].sms_sent == 1 + stats = JobStatistics.query.order_by(JobStatistics.created_at).all() + assert len(stats) == 3 + assert stats[0].emails_sent == 0 + assert stats[0].sms_sent == 0 assert stats[0].letters_sent == 1 assert stats[0].emails_delivered == 0 assert stats[0].sms_delivered == 0 - assert stats[0].sms_failed == 1 - assert stats[0].emails_failed == 1 + assert stats[0].sms_failed == 0 + assert stats[0].emails_failed == 0 + assert stats[0].letters_failed == 1 + + assert stats[1].emails_sent == 1 + assert stats[1].sms_sent == 0 + assert stats[1].letters_sent == 0 + assert stats[1].emails_delivered == 0 + assert stats[1].sms_delivered == 0 + assert stats[1].sms_failed == 0 + assert stats[1].emails_failed == 1 + assert stats[1].letters_failed == 0 + + assert stats[2].emails_sent == 0 + assert stats[2].sms_sent == 1 + assert stats[2].letters_sent == 0 + assert stats[2].emails_delivered == 0 + assert stats[2].sms_delivered == 0 + assert stats[2].sms_failed == 1 + assert stats[2].emails_failed == 0 + assert stats[1].letters_failed == 0 + + assert stats[0].sent == 1 + assert stats[0].delivered == 0 + assert stats[0].failed == 1 + + assert stats[1].sent == 1 + assert stats[1].delivered == 0 + assert stats[1].failed == 1 + + assert stats[2].sent == 1 + assert stats[2].delivered == 0 + assert stats[2].failed == 1 -def test_will_not_timeout_job_counts_before_notification_timeouts(notify_db, notify_db_session, sample_job): - sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) - email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) +def test_will_not_timeout_job_counts_before_notification_timeouts(notify_db, notify_db_session, + sample_job, sample_template): one_minute_ago = datetime.utcnow() - timedelta(minutes=1) @@ -609,43 +693,51 @@ def test_will_not_timeout_job_counts_before_notification_timeouts(notify_db, not notify_db, notify_db_session, service=sample_job.service, - template=sms_template, + template=sample_template, job=sample_job, status=NOTIFICATION_CREATED ) - email = sample_notification( + sms_2 = sample_notification( notify_db, notify_db_session, service=sample_job.service, - template=email_template, + template=sample_template, job=sample_job, status=NOTIFICATION_CREATED ) - create_or_update_job_sending_statistics(email) create_or_update_job_sending_statistics(sms) + create_or_update_job_sending_statistics(sms_2) JobStatistics.query.update({JobStatistics.created_at: one_minute_ago}) - intial_stats = JobStatistics.query.all() + initial_stats = JobStatistics.query.all() - assert intial_stats[0].emails_sent == 1 - assert intial_stats[0].sms_sent == 1 - assert intial_stats[0].emails_delivered == 0 - assert intial_stats[0].sms_delivered == 0 - assert intial_stats[0].sms_failed == 0 - assert intial_stats[0].emails_failed == 0 + assert initial_stats[0].emails_sent == 0 + assert initial_stats[0].sms_sent == 2 + assert initial_stats[0].emails_delivered == 0 + assert initial_stats[0].sms_delivered == 0 + assert initial_stats[0].sms_failed == 0 + assert initial_stats[0].emails_failed == 0 + + assert initial_stats[0].sent == 2 + assert initial_stats[0].delivered == 0 + assert initial_stats[0].failed == 0 dao_timeout_job_statistics(61) updated_stats = JobStatistics.query.all() - assert updated_stats[0].emails_sent == 1 - assert updated_stats[0].sms_sent == 1 + assert updated_stats[0].emails_sent == 0 + assert updated_stats[0].sms_sent == 2 assert updated_stats[0].emails_delivered == 0 assert updated_stats[0].sms_delivered == 0 assert updated_stats[0].sms_failed == 0 assert updated_stats[0].emails_failed == 0 + assert initial_stats[0].sent == 2 + assert initial_stats[0].delivered == 0 + assert initial_stats[0].failed == 0 + @pytest.mark.parametrize('notification_type, sms_count, email_count', [ (SMS_TYPE, 3, 0), @@ -688,6 +780,9 @@ def test_timeout_job_counts_timesout_multiple_jobs( assert stats.sms_delivered == 0 assert stats.sms_failed == 0 assert stats.emails_failed == 0 + assert stats.sent == email_count + sms_count + assert stats.delivered == 0 + assert stats.failed == 0 dao_timeout_job_statistics(1) updated_stats = JobStatistics.query.all() @@ -698,6 +793,9 @@ def test_timeout_job_counts_timesout_multiple_jobs( assert stats.sms_delivered == 0 assert stats.sms_failed == sms_count assert stats.emails_failed == email_count + assert stats.sent == email_count + sms_count + assert stats.delivered == 0 + assert stats.failed == email_count + sms_count count_notifications = len(NOTIFICATION_STATUS_TYPES) @@ -754,17 +852,13 @@ def test_timeout_job_sets_all_non_delivered_emails_to_error_and_doesnt_affect_sm assert initial_stats[0].sms_failed == 0 assert initial_stats[0].emails_failed == 0 - all = JobStatistics.query.all() - for a in all: - print(a) + assert initial_stats[0].sent == count_notifications + assert initial_stats[0].delivered == 0 + assert initial_stats[0].failed == 0 # timeout the notifications dao_timeout_job_statistics(1) - all = JobStatistics.query.all() - for a in all: - print(a) - # after timeout all delivered states are success and ALL other states are failed updated_stats = JobStatistics.query.filter_by(job_id=email_job.id).all() assert updated_stats[0].emails_sent == count_notifications @@ -774,6 +868,10 @@ def test_timeout_job_sets_all_non_delivered_emails_to_error_and_doesnt_affect_sm assert updated_stats[0].sms_failed == 0 assert updated_stats[0].emails_failed == count_error_notifications + assert initial_stats[0].sent == count_notifications + assert initial_stats[0].delivered == count_success_notifications + assert initial_stats[0].failed == count_error_notifications + sms_stats = JobStatistics.query.filter_by(job_id=sms_job.id).all() assert sms_stats[0].emails_sent == 0 assert sms_stats[0].sms_sent == 1 @@ -781,6 +879,9 @@ def test_timeout_job_sets_all_non_delivered_emails_to_error_and_doesnt_affect_sm assert sms_stats[0].sms_delivered == 0 assert sms_stats[0].sms_failed == 1 assert sms_stats[0].emails_failed == 0 + assert sms_stats[0].sent == 1 + assert sms_stats[0].delivered == 0 + assert sms_stats[0].failed == 1 # this test is as above, but for SMS not email @@ -810,6 +911,10 @@ def test_timeout_job_sets_all_non_delivered_states_to_error( assert stats.sms_failed == 0 assert stats.emails_failed == 0 + assert stats.sent == count_notifications + assert stats.delivered == 0 + assert stats.failed == 0 + dao_timeout_job_statistics(1) updated_stats = JobStatistics.query.all() @@ -820,3 +925,7 @@ def test_timeout_job_sets_all_non_delivered_states_to_error( assert stats.sms_delivered == count_success_notifications assert stats.sms_failed == count_error_notifications assert stats.emails_failed == 0 + + assert stats.sent == count_notifications + assert stats.delivered == count_success_notifications + assert stats.failed == count_error_notifications diff --git a/tests/app/db.py b/tests/app/db.py index bfe292a97..6fff1612b 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -2,6 +2,7 @@ from datetime import datetime import uuid +from app.dao.inbound_sms_dao import dao_create_inbound_sms from app.dao.jobs_dao import dao_create_job from app.models import ( InboundSms, @@ -12,6 +13,7 @@ from app.models import ( ScheduledNotification, ServicePermission, Job, + InboundSms, EMAIL_TYPE, SMS_TYPE, KEY_TYPE_NORMAL, @@ -151,14 +153,15 @@ def create_notification( return notification -def create_job(template, - notification_count=1, - created_at=None, - job_status='pending', - scheduled_for=None, - processing_started=None, - original_file_name='some.csv'): - +def create_job( + template, + notification_count=1, + created_at=None, + job_status='pending', + scheduled_for=None, + processing_started=None, + original_file_name='some.csv' +): data = { 'id': uuid.uuid4(), 'service_id': template.service_id, @@ -194,11 +197,12 @@ def create_inbound_sms( provider_date=None, provider_reference=None, content='Hello', - provider="mmg" + provider="mmg", + created_at=None ): inbound = InboundSms( service=service, - created_at=datetime.utcnow(), + created_at=created_at or datetime.utcnow(), notify_number=notify_number or service.sms_sender, user_number=user_number, provider_date=provider_date or datetime.utcnow(), diff --git a/tests/app/inbound_sms/test_rest.py b/tests/app/inbound_sms/test_rest.py index da10ecb6b..b4c55cd83 100644 --- a/tests/app/inbound_sms/test_rest.py +++ b/tests/app/inbound_sms/test_rest.py @@ -112,3 +112,40 @@ def test_get_inbound_sms_summary_with_no_inbound(admin_request, sample_service): 'count': 0, 'most_recent': None } + + +def test_get_inbound_sms_by_id_returns_200(admin_request, sample_service): + inbound = create_inbound_sms(sample_service, user_number='447700900001') + + response = admin_request.get( + 'inbound_sms.get_inbound_by_id', + endpoint_kwargs={ + 'service_id': sample_service.id, + 'inbound_sms_id': inbound.id + } + ) + + assert response['user_number'] == '447700900001' + assert response['service_id'] == str(sample_service.id) + + +def test_get_inbound_sms_by_id_invalid_id_returns_404(admin_request, sample_service): + assert admin_request.get( + 'inbound_sms.get_inbound_by_id', + endpoint_kwargs={ + 'service_id': sample_service.id, + 'inbound_sms_id': 'bar' + }, + expected_status=404 + ) + + +def test_get_inbound_sms_by_id_with_invalid_service_id_returns_404(admin_request, sample_service): + assert admin_request.get( + 'inbound_sms.get_inbound_by_id', + endpoint_kwargs={ + 'service_id': 'foo', + 'inbound_sms_id': '2cfbd6a1-1575-4664-8969-f27be0ea40d9' + }, + expected_status=404 + ) diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 67949104a..66f743be4 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -7,11 +7,11 @@ from flask import json from app.notifications.receive_notifications import ( format_mmg_message, format_mmg_datetime, - create_inbound_mmg_sms_object + create_inbound_mmg_sms_object, + strip_leading_forty_four ) from app.models import InboundSms -from tests.app.conftest import sample_service from tests.app.db import create_service @@ -163,7 +163,6 @@ def test_receive_notification_from_firetext_persists_message_with_normalized_pho def test_returns_ok_to_firetext_if_mismatched_sms_sender(notify_db_session, client, mocker): - mock = mocker.patch('app.notifications.receive_notifications.statsd_client.incr') create_service(service_name='b', sms_sender='07111111199') @@ -181,3 +180,16 @@ def test_returns_ok_to_firetext_if_mismatched_sms_sender(notify_db_session, clie assert not InboundSms.query.all() assert result['status'] == 'ok' mock.assert_has_calls([call('inbound.firetext.failed')]) + + +@pytest.mark.parametrize( + 'number, expected', + [ + ('447123123123', '07123123123'), + ('447123123144', '07123123144'), + ('07123123123', '07123123123'), + ('447444444444', '07444444444') + ] +) +def test_strip_leading_country_code(number, expected): + assert strip_leading_forty_four(number) == expected diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index f2b0a8247..2eb7a1f48 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -27,6 +27,7 @@ from app.models import ( ) from tests.app.db import create_user +from tests.conftest import set_config_values def test_get_service_list(client, service_factory): @@ -147,7 +148,32 @@ def test_get_service_by_id(client, sample_service): assert json_resp['data']['sms_sender'] == current_app.config['FROM_NUMBER'] +def test_get_service_by_id_returns_free_sms_limit(client, sample_service): + + auth_header = create_authorization_header() + resp = client.get( + '/service/{}'.format(sample_service.id), + headers=[auth_header] + ) + assert resp.status_code == 200 + json_resp = json.loads(resp.get_data(as_text=True)) + assert json_resp['data']['free_sms_fragment_limit'] == 250000 + + +def test_get_detailed_service_by_id_returns_free_sms_limit(client, sample_service): + + auth_header = create_authorization_header() + resp = client.get( + '/service/{}?detailed=True'.format(sample_service.id), + headers=[auth_header] + ) + assert resp.status_code == 200 + json_resp = json.loads(resp.get_data(as_text=True)) + assert json_resp['data']['free_sms_fragment_limit'] == 250000 + + def test_get_service_list_has_default_permissions(client, service_factory): + service_factory.get('one') service_factory.get('one') service_factory.get('two') service_factory.get('three') @@ -1248,6 +1274,48 @@ def test_get_all_notifications_for_service_in_order(notify_api, notify_db, notif assert response.status_code == 200 +def test_get_notification_for_service_without_uuid(client, notify_db, notify_db_session): + service_1 = create_service(notify_db, notify_db_session, service_name="1", email_from='1') + response = client.get( + path='/service/{}/notifications/{}'.format(service_1.id, 'foo'), + headers=[create_authorization_header()] + ) + assert response.status_code == 404 + + +def test_get_notification_for_service(client, notify_db, notify_db_session): + + service_1 = create_service(notify_db, notify_db_session, service_name="1", email_from='1') + service_2 = create_service(notify_db, notify_db_session, service_name="2", email_from='2') + + service_1_notifications = [ + create_sample_notification(notify_db, notify_db_session, service=service_1), + create_sample_notification(notify_db, notify_db_session, service=service_1), + create_sample_notification(notify_db, notify_db_session, service=service_1), + ] + + service_2_notifications = [ + create_sample_notification(notify_db, notify_db_session, service=service_2) + ] + + for notification in service_1_notifications: + response = client.get( + path='/service/{}/notifications/{}'.format(service_1.id, notification.id), + headers=[create_authorization_header()] + ) + resp = json.loads(response.get_data(as_text=True)) + assert str(resp['id']) == str(notification.id) + assert response.status_code == 200 + + service_2_response = client.get( + path='/service/{}/notifications/{}'.format(service_2.id, notification.id), + headers=[create_authorization_header()] + ) + assert service_2_response.status_code == 404 + service_2_response = json.loads(service_2_response.get_data(as_text=True)) + assert service_2_response == {'message': 'No result found', 'result': 'error'} + + @pytest.mark.parametrize( 'include_from_test_key, expected_count_of_notifications', [ @@ -1990,7 +2058,7 @@ def test_get_yearly_billing_usage_count_returns_from_cache_if_present(client, sa '/service/{}/yearly-sms-billable-units?year=2016'.format(sample_service.id), headers=[create_authorization_header()] ) - print(response.get_data(as_text=True)) + response.get_data(as_text=True) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == { 'billable_sms_units': 50,