diff --git a/app/__init__.py b/app/__init__.py index 8d8e18270..c5b509f30 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -40,6 +40,7 @@ performance_platform_client = PerformancePlatformClient() clients = Clients() api_user = LocalProxy(lambda: _request_ctx_stack.top.api_user) +authenticated_service = LocalProxy(lambda: _request_ctx_stack.top.authenticated_service) def create_app(app_name=None): diff --git a/app/authentication/auth.py b/app/authentication/auth.py index 1e9fbece8..77fba3d95 100644 --- a/app/authentication/auth.py +++ b/app/authentication/auth.py @@ -5,7 +5,7 @@ from sqlalchemy.orm.exc import NoResultFound from notifications_python_client.authentication import decode_jwt_token, get_token_issuer from notifications_python_client.errors import TokenDecodeError, TokenExpiredError, TokenIssuerError -from app.dao.services_dao import dao_fetch_service_by_id +from app.dao.services_dao import dao_fetch_service_by_id_with_api_keys class AuthError(Exception): @@ -59,7 +59,7 @@ def requires_auth(): client = __get_token_issuer(auth_token) try: - service = dao_fetch_service_by_id(client) + service = dao_fetch_service_by_id_with_api_keys(client) except DataError: raise AuthError("Invalid token: service id is not the right data type", 403) except NoResultFound: @@ -81,7 +81,9 @@ def requires_auth(): raise AuthError("Invalid token: API key revoked", 403) g.service_id = api_key.service_id + _request_ctx_stack.top.authenticated_service = service _request_ctx_stack.top.api_user = api_key + return else: # service has API keys, but none matching the one the user provided diff --git a/app/aws/s3.py b/app/aws/s3.py index 2aa7aac39..ed206b3a4 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -4,7 +4,12 @@ from flask import current_app FILE_LOCATION_STRUCTURE = 'service-{}-notify/{}.csv' -def get_s3_job_object(bucket_name, file_location): +def get_s3_file(bucket_name, file_location): + s3_file = get_s3_object(bucket_name, file_location) + return s3_file.get()['Body'].read().decode('utf-8') + + +def get_s3_object(bucket_name, file_location): s3 = resource('s3') return s3.Object(bucket_name, file_location) @@ -12,12 +17,12 @@ def get_s3_job_object(bucket_name, file_location): def get_job_from_s3(service_id, job_id): bucket_name = current_app.config['CSV_UPLOAD_BUCKET_NAME'] file_location = FILE_LOCATION_STRUCTURE.format(service_id, job_id) - obj = get_s3_job_object(bucket_name, file_location) + obj = get_s3_object(bucket_name, file_location) return obj.get()['Body'].read().decode('utf-8') def remove_job_from_s3(service_id, job_id): bucket_name = current_app.config['CSV_UPLOAD_BUCKET_NAME'] file_location = FILE_LOCATION_STRUCTURE.format(service_id, job_id) - obj = get_s3_job_object(bucket_name, file_location) + obj = get_s3_object(bucket_name, file_location) return obj.delete() diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index b76122819..fbea3bdb0 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -16,6 +16,7 @@ from app.dao.notifications_dao import ( dao_timeout_notifications, is_delivery_slow_for_provider ) +from app.dao.statistics_dao import dao_timeout_job_statistics from app.dao.provider_details_dao import ( get_current_provider, dao_toggle_sms_provider @@ -180,3 +181,12 @@ def switch_current_sms_provider_on_slow_delivery(): ) dao_toggle_sms_provider(current_provider.identifier) + + +@notify_celery.task(name='timeout-job-statistics') +@statsd(namespace="tasks") +def timeout_job_statistics(): + updated = dao_timeout_job_statistics(current_app.config.get('SENDING_NOTIFICATIONS_TIMEOUT_PERIOD')) + if updated: + current_app.logger.info( + "Timeout period reached for {} job statistics, failure count has been updated.".format(updated)) diff --git a/app/celery/statistics_tasks.py b/app/celery/statistics_tasks.py new file mode 100644 index 000000000..a82a3791f --- /dev/null +++ b/app/celery/statistics_tasks.py @@ -0,0 +1,67 @@ +from sqlalchemy.exc import SQLAlchemyError + +from app import notify_celery +from flask import current_app + +from app.models import JobStatistics +from app.statsd_decorators import statsd +from app.dao.statistics_dao import ( + create_or_update_job_sending_statistics, + update_job_stats_outcome_count +) +from app.dao.notifications_dao import get_notification_by_id +from app.models import NOTIFICATION_STATUS_TYPES_COMPLETED + + +def create_initial_notification_statistic_tasks(notification): + if notification.job_id and notification.status: + record_initial_job_statistics.apply_async((str(notification.id),), queue="statistics") + + +def create_outcome_notification_statistic_tasks(notification): + if notification.job_id and notification.status in NOTIFICATION_STATUS_TYPES_COMPLETED: + record_outcome_job_statistics.apply_async((str(notification.id),), queue="statistics") + + +@notify_celery.task(bind=True, name='record_initial_job_statistics', max_retries=20, default_retry_delay=10) +@statsd(namespace="tasks") +def record_initial_job_statistics(self, notification_id): + notification = None + try: + notification = get_notification_by_id(notification_id) + if notification: + create_or_update_job_sending_statistics(notification) + else: + raise SQLAlchemyError("Failed to find notification with id {}".format(notification_id)) + except SQLAlchemyError as e: + current_app.logger.exception(e) + self.retry(queue="retry") + except self.MaxRetriesExceededError: + current_app.logger.error( + "RETRY FAILED: task record_initial_job_statistics failed for notification {}".format( + notification.id if notification else "missing ID" + ) + ) + + +@notify_celery.task(bind=True, name='record_outcome_job_statistics', max_retries=20, default_retry_delay=10) +@statsd(namespace="tasks") +def record_outcome_job_statistics(self, notification_id): + notification = None + try: + notification = get_notification_by_id(notification_id) + if notification: + updated_count = update_job_stats_outcome_count(notification) + if updated_count == 0: + self.retry(queue="retry") + else: + raise SQLAlchemyError("Failed to find notification with id {}".format(notification_id)) + except SQLAlchemyError as e: + current_app.logger.exception(e) + self.retry(queue="retry") + except self.MaxRetriesExceededError: + current_app.logger.error( + "RETRY FAILED: task update_job_stats_outcome_count failed for notification {}".format( + notification.id if notification else "missing ID" + ) + ) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 464e3e1be..48923d228 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -1,6 +1,7 @@ import random from datetime import (datetime) +from collections import namedtuple from flask import current_app from notifications_utils.recipients import ( @@ -354,3 +355,27 @@ def get_template_class(template_type): # since we don't need rendering capabilities (we only need to extract placeholders) both email and letter can # use the same base template return WithSubjectTemplate + + +@notify_celery.task(bind=True, name='update-letter-notifications-statuses') +@statsd(namespace="tasks") +def update_letter_notifications_statuses(self, filename): + bucket_location = '{}-ftp'.format(current_app.config['NOTIFY_EMAIL_DOMAIN']) + response_file = s3.get_s3_file(bucket_location, filename) + + try: + NotificationUpdate = namedtuple('NotificationUpdate', ['reference', 'status', 'page_count', 'cost_threshold']) + notification_updates = [NotificationUpdate(*line.split('|')) for line in response_file.splitlines()] + + except TypeError: + current_app.logger.exception('DVLA response file: {} has an invalid format'.format(filename)) + raise + + else: + if notification_updates: + for update in notification_updates: + current_app.logger.info('DVLA update: {}'.format(str(update))) + # TODO: Update notifications with desired status + return notification_updates + else: + current_app.logger.exception('DVLA response file contained no updates') diff --git a/app/config.py b/app/config.py index 4b0f170fb..b3528cb49 100644 --- a/app/config.py +++ b/app/config.py @@ -89,6 +89,7 @@ class Config(object): PASSWORD_RESET_TEMPLATE_ID = '474e9242-823b-4f99-813d-ed392e7f1201' ALREADY_REGISTERED_EMAIL_TEMPLATE_ID = '0880fbb1-a0c6-46f0-9a8e-36c986381ceb' CHANGE_EMAIL_CONFIRMATION_TEMPLATE_ID = 'eb4d9930-87ab-4aef-9bce-786762687884' + SERVICE_NOW_LIVE_TEMPLATE_ID = '618185c6-3636-49cd-b7d2-6f6f5eb3bdde' BROKER_URL = 'sqs://' BROKER_TRANSPORT_OPTIONS = { @@ -147,6 +148,11 @@ class Config(object): 'task': 'remove_csv_files', 'schedule': crontab(minute=0, hour=4), 'options': {'queue': 'periodic'} + }, + 'timeout-job-statistics': { + 'task': 'timeout-job-statistics', + 'schedule': crontab(minute=0, hour=5), + 'options': {'queue': 'periodic'} } } CELERY_QUEUES = [ @@ -198,6 +204,7 @@ class Config(object): ###################### class Development(Config): + SQLALCHEMY_ECHO = False NOTIFY_EMAIL_DOMAIN = 'notify.tools' CSV_UPLOAD_BUCKET_NAME = 'development-notifications-csv-upload' NOTIFY_ENVIRONMENT = 'development' @@ -211,7 +218,8 @@ class Development(Config): Queue('db-letter', Exchange('default'), routing_key='db-letter'), Queue('send-sms', Exchange('default'), routing_key='send-sms'), Queue('send-email', Exchange('default'), routing_key='send-email'), - Queue('research-mode', Exchange('default'), routing_key='research-mode') + Queue('research-mode', Exchange('default'), routing_key='research-mode'), + Queue('statistics', Exchange('default'), routing_key='statistics') ] API_HOST_NAME = "http://localhost:6011" API_RATE_LIMIT_ENABLED = True @@ -234,9 +242,10 @@ class Test(Config): Queue('db-letter', Exchange('default'), routing_key='db-letter'), Queue('send-sms', Exchange('default'), routing_key='send-sms'), Queue('send-email', Exchange('default'), routing_key='send-email'), - Queue('research-mode', Exchange('default'), routing_key='research-mode') + Queue('research-mode', Exchange('default'), routing_key='research-mode'), + Queue('statistics', Exchange('default'), routing_key='statistics') ] - REDIS_ENABLED = True + API_RATE_LIMIT_ENABLED = True API_HOST_NAME = "http://localhost:6011" @@ -295,6 +304,7 @@ class Sandbox(CloudFoundryConfig): NOTIFY_ENVIRONMENT = 'sandbox' CSV_UPLOAD_BUCKET_NAME = 'cf-sandbox-notifications-csv-upload' FROM_NUMBER = 'sandbox' + REDIS_ENABLED = False configs = { diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 6d4a8d6cb..b712b0e70 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -11,7 +11,7 @@ from app.models import (Job, Template, JOB_STATUS_SCHEDULED, JOB_STATUS_PENDING, - LETTER_TYPE) + LETTER_TYPE, JobStatistics) from app.statsd_decorators import statsd @@ -109,6 +109,11 @@ def dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id): def dao_create_job(job): + job_stats = JobStatistics( + job_id=job.id, + updated_at=datetime.utcnow() + ) + db.session.add(job_stats) db.session.add(job) db.session.commit() diff --git a/app/dao/service_permissions_dao.py b/app/dao/service_permissions_dao.py new file mode 100644 index 000000000..3bba3cc0d --- /dev/null +++ b/app/dao/service_permissions_dao.py @@ -0,0 +1,22 @@ +from app import db +from app.dao.dao_utils import transactional +from app.models import ServicePermission, SERVICE_PERMISSION_TYPES + + +def dao_fetch_service_permissions(service_id): + return ServicePermission.query.filter( + ServicePermission.service_id == service_id).all() + + +@transactional +def dao_add_service_permission(service_id, permission): + service_permission = ServicePermission(service_id=service_id, permission=permission) + db.session.add(service_permission) + + +def dao_remove_service_permission(service_id, permission): + deleted = ServicePermission.query.filter( + ServicePermission.service_id == service_id, + ServicePermission.permission == permission).delete() + db.session.commit() + return deleted diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index c5679289c..0e0701560 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -25,9 +25,13 @@ from app.models import ( User, InvitedUser, Service, + ServicePermission, KEY_TYPE_TEST, NOTIFICATION_STATUS_TYPES, TEMPLATE_TYPES, + JobStatistics, + SMS_TYPE, + EMAIL_TYPE ) from app.service.statistics import format_monthly_template_notification_stats from app.statsd_decorators import statsd @@ -60,6 +64,19 @@ def dao_fetch_service_by_id(service_id, only_active=False): return query.one() +def dao_fetch_service_by_id_with_api_keys(service_id, only_active=False): + query = Service.query.filter_by( + id=service_id + ).options( + joinedload('api_keys') + ) + + if only_active: + query = query.filter(Service.active) + + return query.one() + + def dao_fetch_all_services_by_user(user_id, only_active=False): query = Service.query.filter( Service.users.any(id=user_id) @@ -111,13 +128,18 @@ def dao_fetch_service_by_id_and_user(service_id, user_id): @transactional @version_class(Service) -def dao_create_service(service, user, service_id=None): +def dao_create_service(service, user, service_id=None, service_permissions=[SMS_TYPE, EMAIL_TYPE]): from app.dao.permissions_dao import permission_dao service.users.append(user) permission_dao.add_default_service_permissions_for_user(user, service) service.id = service_id or uuid.uuid4() # must be set now so version history model can use same id service.active = True service.research_mode = False + + for permission in service_permissions: + service_permission = ServicePermission(service_id=service.id, permission=permission) + db.session.add(service_permission) + db.session.add(service) @@ -160,6 +182,10 @@ def delete_service_and_all_associated_db_objects(service): query.delete() db.session.commit() + job_stats = JobStatistics.query.join(Job).filter(Job.service_id == service.id) + list(map(db.session.delete, job_stats)) + db.session.commit() + _delete_commit(NotificationStatistics.query.filter_by(service=service)) _delete_commit(TemplateStatistics.query.filter_by(service=service)) _delete_commit(ProviderStatistics.query.filter_by(service=service)) @@ -167,11 +193,12 @@ def delete_service_and_all_associated_db_objects(service): _delete_commit(Permission.query.filter_by(service=service)) _delete_commit(ApiKey.query.filter_by(service=service)) _delete_commit(ApiKey.get_history_model().query.filter_by(service_id=service.id)) + _delete_commit(Job.query.filter_by(service=service)) _delete_commit(NotificationHistory.query.filter_by(service=service)) _delete_commit(Notification.query.filter_by(service=service)) - _delete_commit(Job.query.filter_by(service=service)) _delete_commit(Template.query.filter_by(service=service)) _delete_commit(TemplateHistory.query.filter_by(service_id=service.id)) + _delete_commit(ServicePermission.query.filter_by(service_id=service.id)) verify_codes = VerifyCode.query.join(User).filter(User.id.in_([x.id for x in service.users])) list(map(db.session.delete, verify_codes)) @@ -384,3 +411,12 @@ def dao_suspend_service(service_id): def dao_resume_service(service_id): service = Service.query.get(service_id) service.active = True + + +def dao_fetch_active_users_for_service(service_id): + query = User.query.filter( + User.user_to_service.any(id=service_id), + User.state == 'active' + ) + + return query.all() diff --git a/app/dao/statistics_dao.py b/app/dao/statistics_dao.py new file mode 100644 index 000000000..baff82ba4 --- /dev/null +++ b/app/dao/statistics_dao.py @@ -0,0 +1,149 @@ +from datetime import datetime, timedelta +from itertools import groupby + +from flask import current_app +from sqlalchemy import func +from sqlalchemy.exc import IntegrityError, SQLAlchemyError + +from app import db +from app.dao.dao_utils import transactional +from app.models import ( + JobStatistics, + Notification, + EMAIL_TYPE, + SMS_TYPE, + LETTER_TYPE, + NOTIFICATION_STATUS_TYPES_FAILED, + NOTIFICATION_STATUS_SUCCESS, + NOTIFICATION_DELIVERED, + NOTIFICATION_SENT) +from app.statsd_decorators import statsd + + +@transactional +def timeout_job_counts(notifications_type, timeout_start): + total_updated = 0 + + sent = columns(notifications_type, 'sent') + delivered = columns(notifications_type, 'delivered') + failed = columns(notifications_type, 'failed') + + results = db.session.query( + JobStatistics.job_id.label('job_id'), + func.count(Notification.status).label('count'), + Notification.status.label('status') + ).filter( + Notification.notification_type == notifications_type, + JobStatistics.job_id == Notification.job_id, + JobStatistics.created_at < timeout_start, + sent != failed + delivered + ).group_by(Notification.status, JobStatistics.job_id).order_by(JobStatistics.job_id).all() + + sort = sorted(results, key=lambda result: result.job_id) + groups = [] + for k, g in groupby(sort, key=lambda result: result.job_id): + groups.append(list(g)) + + for job in groups: + sent_count = 0 + delivered_count = 0 + failed_count = 0 + for notification_status in job: + if notification_status.status in NOTIFICATION_STATUS_SUCCESS: + delivered_count += notification_status.count + else: + failed_count += notification_status.count + sent_count += notification_status.count + + total_updated += JobStatistics.query.filter_by( + job_id=notification_status.job_id + ).update({ + sent: sent_count, + failed: failed_count, + delivered: delivered_count + }, synchronize_session=False) + return total_updated + + +@statsd(namespace="dao") +def dao_timeout_job_statistics(timeout_period): + timeout_start = datetime.utcnow() - timedelta(seconds=timeout_period) + sms_count = timeout_job_counts(SMS_TYPE, timeout_start) + email_count = timeout_job_counts(EMAIL_TYPE, timeout_start) + return sms_count + email_count + + +@statsd(namespace="dao") +def create_or_update_job_sending_statistics(notification): + if __update_job_stats_sent_count(notification) == 0: + try: + __insert_job_stats(notification) + except IntegrityError as e: + current_app.logger.exception(e) + if __update_job_stats_sent_count(notification) == 0: + raise SQLAlchemyError("Failed to create job statistics for {}".format(notification.job_id)) + + +@transactional +def __update_job_stats_sent_count(notification): + column = columns(notification.notification_type, 'sent') + + return db.session.query(JobStatistics).filter_by( + job_id=notification.job_id, + ).update({ + column: column + 1 + }) + + +@transactional +def __insert_job_stats(notification): + stats = JobStatistics( + job_id=notification.job_id, + emails_sent=1 if notification.notification_type == EMAIL_TYPE else 0, + sms_sent=1 if notification.notification_type == SMS_TYPE else 0, + letters_sent=1 if notification.notification_type == LETTER_TYPE else 0, + updated_at=datetime.utcnow() + ) + db.session.add(stats) + + +def columns(notification_type, status): + keys = { + EMAIL_TYPE: { + 'failed': JobStatistics.emails_failed, + 'delivered': JobStatistics.emails_delivered, + 'sent': JobStatistics.emails_sent + }, + SMS_TYPE: { + 'failed': JobStatistics.sms_failed, + 'delivered': JobStatistics.sms_delivered, + 'sent': JobStatistics.sms_sent + }, + LETTER_TYPE: { + 'failed': JobStatistics.letters_failed, + 'sent': JobStatistics.letters_sent + } + } + return keys.get(notification_type).get(status) + + +@transactional +def update_job_stats_outcome_count(notification): + if notification.status in NOTIFICATION_STATUS_TYPES_FAILED: + column = columns(notification.notification_type, 'failed') + + elif notification.status in [NOTIFICATION_DELIVERED, + NOTIFICATION_SENT] and notification.notification_type != LETTER_TYPE: + column = columns(notification.notification_type, 'delivered') + + else: + column = None + + if column: + return db.session.query(JobStatistics).filter_by( + job_id=notification.job_id, + ).update({ + column: column + 1 + }) + else: + return 0 diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index f9ca7861f..0a9aa22f0 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -18,6 +18,8 @@ from app.dao.templates_dao import dao_get_template_by_id from app.models import SMS_TYPE, KEY_TYPE_TEST, BRANDING_ORG, EMAIL_TYPE, NOTIFICATION_TECHNICAL_FAILURE, \ NOTIFICATION_SENT, NOTIFICATION_SENDING +from app.celery.statistics_tasks import record_initial_job_statistics, create_initial_notification_statistic_tasks + def send_sms_to_provider(notification): service = notification.service @@ -57,6 +59,8 @@ def send_sms_to_provider(notification): notification.billable_units = template.fragment_count update_notification(notification, provider, notification.international) + create_initial_notification_statistic_tasks(notification) + current_app.logger.info( "SMS {} sent to provider {} at {}".format(notification.id, provider.get_name(), notification.sent_at) ) @@ -107,6 +111,8 @@ def send_email_to_provider(notification): notification.reference = reference update_notification(notification, provider) + create_initial_notification_statistic_tasks(notification) + current_app.logger.info( "Email {} sent to provider at {}".format(notification.id, notification.sent_at) ) diff --git a/app/models.py b/app/models.py index 1b07a6fc5..5fff42027 100644 --- a/app/models.py +++ b/app/models.py @@ -31,6 +31,18 @@ from app import ( from app.history_meta import Versioned from app.utils import get_utc_time_in_bst +SMS_TYPE = 'sms' +EMAIL_TYPE = 'email' +LETTER_TYPE = 'letter' + +TEMPLATE_TYPES = [SMS_TYPE, EMAIL_TYPE, LETTER_TYPE] + +template_types = db.Enum(*TEMPLATE_TYPES, name='template_type') + +NORMAL = 'normal' +PRIORITY = 'priority' +TEMPLATE_PROCESS_TYPE = [NORMAL, PRIORITY] + def filter_null_value_fields(obj): return dict( @@ -131,6 +143,30 @@ class DVLAOrganisation(db.Model): name = db.Column(db.String(255), nullable=True) +INTERNATIONAL_SMS_TYPE = 'international_sms' +INCOMING_SMS_TYPE = 'incoming_sms' + +SERVICE_PERMISSION_TYPES = [EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, INCOMING_SMS_TYPE] + + +class ServicePermissionTypes(db.Model): + __tablename__ = 'service_permission_types' + + name = db.Column(db.String(255), primary_key=True) + + +class ServicePermission(db.Model): + __tablename__ = "service_permissions" + + service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'), + primary_key=True, index=True, nullable=False) + service = db.relationship('Service') + permission = db.Column(db.String(255), db.ForeignKey('service_permission_types.name'), + index=True, primary_key=True, nullable=False) + created_at = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False) + updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) + + class Service(db.Model, Versioned): __tablename__ = 'services' @@ -181,6 +217,13 @@ class Service(db.Model, Versioned): nullable=False, default=BRANDING_GOVUK ) + permissions = db.relationship('ServicePermission') + + # This is only for backward compatibility and will be dropped when the columns are removed from the data model + def set_permissions(self): + if self.permissions: + self.can_send_letters = LETTER_TYPE in [p.permission for p in self.permissions] + self.can_send_international_sms = INTERNATIONAL_SMS_TYPE in [p.permission for p in self.permissions] MOBILE_TYPE = 'mobile' @@ -293,19 +336,6 @@ class TemplateProcessTypes(db.Model): name = db.Column(db.String(255), primary_key=True) -SMS_TYPE = 'sms' -EMAIL_TYPE = 'email' -LETTER_TYPE = 'letter' - -TEMPLATE_TYPES = [SMS_TYPE, EMAIL_TYPE, LETTER_TYPE] - -template_types = db.Enum(*TEMPLATE_TYPES, name='template_type') - -NORMAL = 'normal' -PRIORITY = 'priority' -TEMPLATE_PROCESS_TYPE = [NORMAL, PRIORITY] - - class Template(db.Model): __tablename__ = 'templates' @@ -385,7 +415,6 @@ class TemplateHistory(db.Model): default=NORMAL) def serialize(self): - serialized = { "id": self.id, "type": self.template_type, @@ -603,6 +632,11 @@ NOTIFICATION_STATUS_TYPES_COMPLETED = [ NOTIFICATION_PERMANENT_FAILURE, ] +NOTIFICATION_STATUS_SUCCESS = [ + NOTIFICATION_SENT, + NOTIFICATION_DELIVERED +] + NOTIFICATION_STATUS_TYPES_BILLABLE = [ NOTIFICATION_SENDING, NOTIFICATION_SENT, @@ -624,6 +658,7 @@ NOTIFICATION_STATUS_TYPES = [ NOTIFICATION_TEMPORARY_FAILURE, NOTIFICATION_PERMANENT_FAILURE, ] + NOTIFICATION_STATUS_TYPES_ENUM = db.Enum(*NOTIFICATION_STATUS_TYPES, name='notify_status_type') @@ -1024,3 +1059,48 @@ class Rate(db.Model): valid_from = db.Column(db.DateTime, nullable=False) rate = db.Column(db.Float(asdecimal=False), nullable=False) notification_type = db.Column(notification_types, index=True, nullable=False) + + +class JobStatistics(db.Model): + __tablename__ = 'job_statistics' + + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + job_id = db.Column(UUID(as_uuid=True), db.ForeignKey('jobs.id'), index=True, unique=True, nullable=False) + job = db.relationship('Job', backref=db.backref('job_statistics', lazy='dynamic')) + emails_sent = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + emails_delivered = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + emails_failed = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + sms_sent = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + sms_delivered = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + sms_failed = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + letters_sent = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + letters_failed = db.Column(db.BigInteger, index=False, unique=False, nullable=False, default=0) + created_at = db.Column( + db.DateTime, + index=False, + unique=False, + nullable=True, + default=datetime.datetime.utcnow) + updated_at = db.Column( + db.DateTime, + index=False, + unique=False, + nullable=True, + onupdate=datetime.datetime.utcnow) + + def __str__(self): + the_string = "" + the_string += "email sent {} email delivered {} email failed {} ".format( + self.emails_sent, self.emails_delivered, self.emails_failed + ) + the_string += "sms sent {} sms delivered {} sms failed {} ".format( + self.sms_sent, self.sms_delivered, self.sms_failed + ) + the_string += "letter sent {} letter failed {} ".format( + self.letters_sent, self.letters_failed + ) + the_string += "job_id {} ".format( + self.job_id + ) + the_string += "created at {}".format(self.created_at) + return the_string diff --git a/app/notifications/notifications_letter_callback.py b/app/notifications/notifications_letter_callback.py index f7c357055..1de6c0009 100644 --- a/app/notifications/notifications_letter_callback.py +++ b/app/notifications/notifications_letter_callback.py @@ -1,39 +1,58 @@ -from datetime import datetime +from functools import wraps from flask import ( Blueprint, jsonify, request, - current_app, - json + current_app ) -from app import statsd_client -from app.clients.email.aws_ses import get_aws_responses -from app.dao import ( - notifications_dao -) +from app.celery.tasks import update_letter_notifications_statuses +from app.v2.errors import register_errors +from app.notifications.utils import autoconfirm_subscription +from app.schema_validation import validate -from app.notifications.process_client_response import validate_callback_data letter_callback_blueprint = Blueprint('notifications_letter_callback', __name__) - -from app.errors import ( - register_errors, - InvalidRequest -) - register_errors(letter_callback_blueprint) +dvla_sns_callback_schema = { + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "sns callback received on s3 update", + "type": "object", + "title": "dvla internal sns callback", + "properties": { + "Type": {"enum": ["Notification", "SubscriptionConfirmation"]}, + "MessageId": {"type": "string"}, + "Message": {"type": ["string", "object"]} + }, + "required": ["Type", "MessageId", "Message"] +} + + +def validate_schema(schema): + def decorator(f): + @wraps(f) + def wrapper(*args, **kw): + validate(request.get_json(force=True), schema) + return f(*args, **kw) + return wrapper + return decorator + + @letter_callback_blueprint.route('/notifications/letter/dvla', methods=['POST']) +@validate_schema(dvla_sns_callback_schema) def process_letter_response(): - try: - dvla_request = json.loads(request.data) - current_app.logger.info(dvla_request) - return jsonify( - result="success", message="DVLA callback succeeded" - ), 200 - except ValueError: - error = "DVLA callback failed: invalid json" - raise InvalidRequest(error, status_code=400) + req_json = request.get_json(force=True) + current_app.logger.info('Received SNS callback: {}'.format(req_json)) + if not autoconfirm_subscription(req_json): + # The callback should have one record for an S3 Put Event. + filename = req_json['Message']['Records'][0]['s3']['object']['key'] + current_app.logger.info('Received file from DVLA: {}'.format(filename)) + current_app.logger.info('DVLA callback: Calling task to update letter notifications') + update_letter_notifications_statuses.apply_async([filename], queue='notify') + + return jsonify( + result="success", message="DVLA callback succeeded" + ), 200 diff --git a/app/notifications/notifications_ses_callback.py b/app/notifications/notifications_ses_callback.py index ce8dec9ba..b1d0ba589 100644 --- a/app/notifications/notifications_ses_callback.py +++ b/app/notifications/notifications_ses_callback.py @@ -13,9 +13,9 @@ from app.clients.email.aws_ses import get_aws_responses from app.dao import ( notifications_dao ) - +from app.celery.statistics_tasks import create_outcome_notification_statistic_tasks from app.notifications.process_client_response import validate_callback_data -from app.notifications.utils import confirm_subscription +from app.notifications.utils import autoconfirm_subscription ses_callback_blueprint = Blueprint('notifications_ses_callback', __name__) @@ -32,14 +32,12 @@ def process_ses_response(): try: ses_request = json.loads(request.data) - if ses_request.get('Type') == 'SubscriptionConfirmation': - current_app.logger.info("SNS subscription confirmation url: {}".format(ses_request['SubscribeURL'])) - subscribed_topic = confirm_subscription(ses_request) - if subscribed_topic: - current_app.logger.info("Automatically subscribed to topic: {}".format(subscribed_topic)) - return jsonify( - result="success", message="SES callback succeeded" - ), 200 + subscribed_topic = autoconfirm_subscription(ses_request) + if subscribed_topic: + current_app.logger.info("Automatically subscribed to topic: {}".format(subscribed_topic)) + return jsonify( + result="success", message="SES callback succeeded" + ), 200 errors = validate_callback_data(data=ses_request, fields=['Message'], client_name=client_name) if errors: @@ -95,6 +93,9 @@ def process_ses_response(): datetime.utcnow(), notification.sent_at ) + + create_outcome_notification_statistic_tasks(notification) + return jsonify( result="success", message="SES callback succeeded" ), 200 diff --git a/app/notifications/process_client_response.py b/app/notifications/process_client_response.py index f07366422..534408ce6 100644 --- a/app/notifications/process_client_response.py +++ b/app/notifications/process_client_response.py @@ -7,6 +7,8 @@ from app import statsd_client from app.dao import notifications_dao from app.clients.sms.firetext import get_firetext_responses from app.clients.sms.mmg import get_mmg_responses +from app.celery.statistics_tasks import create_outcome_notification_statistic_tasks + sms_response_mapper = { 'MMG': get_mmg_responses, @@ -45,8 +47,9 @@ def process_sms_client_response(status, reference, client_name): # validate status try: response_dict = response_parser(status) - current_app.logger.info('{} callback return status of {} for reference: {}'.format(client_name, - status, reference)) + current_app.logger.info('{} callback return status of {} for reference: {}'.format( + client_name, status, reference) + ) except KeyError: msg = "{} callback failed: status {} not found.".format(client_name, status) return success, msg @@ -77,5 +80,8 @@ def process_sms_client_response(status, reference, client_name): datetime.utcnow(), notification.sent_at ) + + create_outcome_notification_statistic_tasks(notification) + success = "{} callback succeeded. reference {} updated".format(client_name, reference) return success, errors diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index e8510ba63..1d03efe8b 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -46,6 +46,9 @@ def persist_notification( notification_id=None, simulated=False ): + + notification_created_at = created_at or datetime.utcnow() + notification = Notification( id=notification_id, template_id=template_id, @@ -57,7 +60,7 @@ def persist_notification( notification_type=notification_type, api_key_id=api_key_id, key_type=key_type, - created_at=created_at or datetime.utcnow(), + created_at=notification_created_at, job_id=job_id, job_row_number=job_row_number, client_reference=client_reference, @@ -80,7 +83,7 @@ def persist_notification( if redis_store.get_all_from_hash(cache_key_for_service_template_counter(service.id)): redis_store.increment_hash_value(cache_key_for_service_template_counter(service.id), template_id) current_app.logger.info( - "{} {} created at {}".format(notification.notification_type, notification.id, notification.created_at) + "{} {} created at {}".format(notification_type, notification_id, notification_created_at) ) return notification diff --git a/app/notifications/rest.py b/app/notifications/rest.py index eb395736b..faa086e32 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -5,20 +5,23 @@ from flask import ( current_app ) -from app import api_user +from app import api_user, authenticated_service from app.dao import ( templates_dao, - services_dao, notifications_dao ) from app.models import KEY_TYPE_TEAM, PRIORITY from app.models import SMS_TYPE -from app.notifications.process_notifications import (persist_notification, - send_notification_to_queue, - simulated_recipient) -from app.notifications.validators import (check_service_over_daily_message_limit, - check_template_is_for_notification_type, - check_template_is_active, check_rate_limiting) +from app.notifications.process_notifications import ( + persist_notification, + send_notification_to_queue, + simulated_recipient +) +from app.notifications.validators import ( + check_template_is_for_notification_type, + check_template_is_active, + check_rate_limiting +) from app.schemas import ( email_notification_schema, sms_template_notification_schema, @@ -45,9 +48,10 @@ register_errors(notifications) @notifications.route('/notifications/', methods=['GET']) def get_notification_by_id(notification_id): - notification = notifications_dao.get_notification_with_personalisation(str(api_user.service_id), - notification_id, - key_type=None) + notification = notifications_dao.get_notification_with_personalisation( + str(authenticated_service.id), + notification_id, + key_type=None) return jsonify(data={"notification": notification_with_personalisation_schema.dump(notification).data}), 200 @@ -60,7 +64,7 @@ def get_all_notifications(): limit_days = data.get('limit_days') pagination = notifications_dao.get_notifications_for_service( - str(api_user.service_id), + str(authenticated_service.id), personalisation=True, filter_dict=data, page=page, @@ -96,8 +100,6 @@ def send_notification(notification_type): if notification_type not in ['sms', 'email']: assert False - service = services_dao.dao_fetch_service_by_id(api_user.service_id) - notification_form, errors = ( sms_template_notification_schema if notification_type == SMS_TYPE else email_notification_schema ).load(request.get_json()) @@ -105,27 +107,27 @@ def send_notification(notification_type): if errors: raise InvalidRequest(errors, status_code=400) - check_rate_limiting(service, api_user) + check_rate_limiting(authenticated_service, api_user) template = templates_dao.dao_get_template_by_id_and_service_id( template_id=notification_form['template'], - service_id=service.id) + service_id=authenticated_service.id) check_template_is_for_notification_type(notification_type, template.template_type) check_template_is_active(template) template_object = create_template_object_for_notification(template, notification_form.get('personalisation', {})) - _service_allowed_to_send_to(notification_form, service) + _service_allowed_to_send_to(notification_form, authenticated_service) if notification_type == SMS_TYPE: - _service_can_send_internationally(service, notification_form['to']) + _service_can_send_internationally(authenticated_service, notification_form['to']) # Do not persist or send notification to the queue if it is a simulated recipient simulated = simulated_recipient(notification_form['to'], notification_type) notification_model = persist_notification(template_id=template.id, template_version=template.version, recipient=request.get_json()['to'], - service=service, + service=authenticated_service, personalisation=notification_form.get('personalisation', None), notification_type=notification_type, api_key_id=api_user.id, @@ -134,7 +136,7 @@ def send_notification(notification_type): if not simulated: queue_name = 'priority' if template.process_type == PRIORITY else None send_notification_to_queue(notification=notification_model, - research_mode=service.research_mode, + research_mode=authenticated_service.research_mode, queue=queue_name) else: current_app.logger.info("POST simulated notification for id: {}".format(notification_model.id)) diff --git a/app/notifications/utils.py b/app/notifications/utils.py index 80346c475..5f1443f09 100644 --- a/app/notifications/utils.py +++ b/app/notifications/utils.py @@ -16,3 +16,10 @@ def confirm_subscription(confirmation_request): raise e return confirmation_request['TopicArn'] + + +def autoconfirm_subscription(req_json): + if req_json.get('Type') == 'SubscriptionConfirmation': + current_app.logger.info("SNS subscription confirmation url: {}".format(req_json['SubscribeURL'])) + subscribed_topic = confirm_subscription(req_json) + return subscribed_topic diff --git a/app/notifications/validators.py b/app/notifications/validators.py index 5d43e7341..a680f446e 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -10,12 +10,12 @@ from app.models import KEY_TYPE_TEST, KEY_TYPE_TEAM, SMS_TYPE from app.service.utils import service_allowed_to_send_to from app.v2.errors import TooManyRequestsError, BadRequestError, RateLimitError from app import redis_store -from notifications_utils.clients import redis +from notifications_utils.clients.redis import rate_limit_cache_key, daily_limit_cache_key def check_service_over_api_rate_limit(service, api_key): if current_app.config['API_RATE_LIMIT_ENABLED']: - cache_key = redis.rate_limit_cache_key(service.id, api_key.key_type) + cache_key = rate_limit_cache_key(service.id, api_key.key_type) rate_limit = current_app.config['API_KEY_LIMITS'][api_key.key_type]['limit'] interval = current_app.config['API_KEY_LIMITS'][api_key.key_type]['interval'] if redis_store.exceeded_rate_limit(cache_key, rate_limit, interval): @@ -25,7 +25,7 @@ def check_service_over_api_rate_limit(service, api_key): def check_service_over_daily_message_limit(key_type, service): if key_type != KEY_TYPE_TEST: - cache_key = redis.daily_limit_cache_key(service.id) + cache_key = daily_limit_cache_key(service.id) service_stats = redis_store.get(cache_key) if not service_stats: service_stats = services_dao.fetch_todays_total_message_count(service.id) diff --git a/app/service/rest.py b/app/service/rest.py index 4122db82a..db8c1aefa 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -46,6 +46,7 @@ from app.errors import ( InvalidRequest, register_errors) from app.service import statistics from app.service.utils import get_whitelist_objects +from app.service.sender import send_notification_to_service_users from app.schemas import ( service_schema, api_key_schema, @@ -117,10 +118,25 @@ def create_service(): @service_blueprint.route('/', methods=['POST']) def update_service(service_id): fetched_service = dao_fetch_service_by_id(service_id) + # Capture the status change here as Marshmallow changes this later + service_going_live = fetched_service.restricted and not request.get_json().get('restricted', True) + current_data = dict(service_schema.dump(fetched_service).data.items()) current_data.update(request.get_json()) update_dict = service_schema.load(current_data).data dao_update_service(update_dict) + + if service_going_live: + send_notification_to_service_users( + service_id=service_id, + template_id=current_app.config['SERVICE_NOW_LIVE_TEMPLATE_ID'], + personalisation={ + 'service_name': current_data['name'], + 'message_limit': current_data['message_limit'] + }, + include_user_fields=['name'] + ) + return jsonify(data=service_schema.dump(fetched_service).data), 200 diff --git a/app/service/sender.py b/app/service/sender.py new file mode 100644 index 000000000..3c6a6a03e --- /dev/null +++ b/app/service/sender.py @@ -0,0 +1,33 @@ +from flask import current_app + +from app.dao.services_dao import dao_fetch_service_by_id, dao_fetch_active_users_for_service +from app.dao.templates_dao import dao_get_template_by_id +from app.models import EMAIL_TYPE, KEY_TYPE_NORMAL +from app.notifications.process_notifications import persist_notification, send_notification_to_queue + + +def send_notification_to_service_users(service_id, template_id, personalisation={}, include_user_fields=[]): + template = dao_get_template_by_id(template_id) + service = dao_fetch_service_by_id(service_id) + active_users = dao_fetch_active_users_for_service(service.id) + notify_service = dao_fetch_service_by_id(current_app.config['NOTIFY_SERVICE_ID']) + + for user in active_users: + personalisation = _add_user_fields(user, personalisation, include_user_fields) + notification = persist_notification( + template_id=template.id, + template_version=template.version, + recipient=user.email_address if template.template_type == EMAIL_TYPE else user.mobile_number, + service=notify_service, + personalisation=personalisation, + notification_type=template.template_type, + api_key_id=None, + key_type=KEY_TYPE_NORMAL + ) + send_notification_to_queue(notification, False, queue='notify') + + +def _add_user_fields(user, personalisation, fields): + for field in fields: + personalisation[field] = getattr(user, field) + return personalisation diff --git a/app/v2/notifications/get_notifications.py b/app/v2/notifications/get_notifications.py index 112d2e51e..08a0d3c5e 100644 --- a/app/v2/notifications/get_notifications.py +++ b/app/v2/notifications/get_notifications.py @@ -3,7 +3,7 @@ import uuid from flask import jsonify, request, url_for, current_app from werkzeug.exceptions import abort -from app import api_user +from app import api_user, authenticated_service from app.dao import notifications_dao from app.schema_validation import validate from app.v2.notifications import v2_notification_blueprint @@ -17,7 +17,7 @@ def get_notification_by_id(id): except ValueError or AttributeError: abort(404) notification = notifications_dao.get_notification_with_personalisation( - api_user.service_id, casted_id, key_type=None + authenticated_service.id, casted_id, key_type=None ) return jsonify(notification.serialize()), 200 @@ -38,7 +38,7 @@ def get_notifications(): data = validate(_data, get_notifications_request) paginated_notifications = notifications_dao.get_notifications_for_service( - str(api_user.service_id), + str(authenticated_service.id), filter_dict=data, key_type=api_user.key_type, personalisation=True, diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index 347884ed8..a5a3a1ec4 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -1,7 +1,7 @@ from flask import request, jsonify, current_app from sqlalchemy.orm.exc import NoResultFound -from app import api_user +from app import api_user, authenticated_service from app.dao import services_dao, templates_dao from app.models import SMS_TYPE, EMAIL_TYPE, PRIORITY from app.notifications.process_notifications import ( @@ -32,17 +32,15 @@ def post_notification(notification_type): else: form = validate(request.get_json(), post_sms_request) - service = services_dao.dao_fetch_service_by_id(api_user.service_id) - - check_rate_limiting(service, api_user) + check_rate_limiting(authenticated_service, api_user) form_send_to = form['phone_number'] if notification_type == SMS_TYPE else form['email_address'] send_to = validate_and_format_recipient(send_to=form_send_to, key_type=api_user.key_type, - service=service, + service=authenticated_service, notification_type=notification_type) - template, template_with_content = __validate_template(form, service, notification_type) + template, template_with_content = __validate_template(form, authenticated_service, notification_type) # Do not persist or send notification to the queue if it is a simulated recipient simulated = simulated_recipient(send_to, notification_type) @@ -50,7 +48,7 @@ def post_notification(notification_type): notification = persist_notification(template_id=template.id, template_version=template.version, recipient=form_send_to, - service=service, + service=authenticated_service, personalisation=form.get('personalisation', None), notification_type=notification_type, api_key_id=api_user.id, @@ -60,23 +58,29 @@ def post_notification(notification_type): if not simulated: queue_name = 'priority' if template.process_type == PRIORITY else None - send_notification_to_queue(notification=notification, research_mode=service.research_mode, queue=queue_name) + send_notification_to_queue( + notification=notification, + research_mode=authenticated_service.research_mode, + queue=queue_name + ) + else: current_app.logger.info("POST simulated notification for id: {}".format(notification.id)) if notification_type == SMS_TYPE: - sms_sender = service.sms_sender if service.sms_sender else current_app.config.get('FROM_NUMBER') + sms_sender = authenticated_service.sms_sender or current_app.config.get('FROM_NUMBER') resp = create_post_sms_response_from_notification(notification=notification, body=str(template_with_content), from_number=sms_sender, url_root=request.url_root, - service_id=service.id) + service_id=authenticated_service.id) else: resp = create_post_email_response_from_notification(notification=notification, content=str(template_with_content), subject=template_with_content.subject, - email_from=service.email_from, + email_from=authenticated_service.email_from, url_root=request.url_root, - service_id=service.id) + service_id=authenticated_service.id) + return jsonify(resp), 201 diff --git a/app/v2/template/get_template.py b/app/v2/template/get_template.py index fef0e1b2e..4054e161a 100644 --- a/app/v2/template/get_template.py +++ b/app/v2/template/get_template.py @@ -1,7 +1,6 @@ from flask import jsonify -from jsonschema.exceptions import ValidationError -from app import api_user +from app import authenticated_service from app.dao import templates_dao from app.schema_validation import validate from app.v2.template import v2_template_blueprint @@ -18,6 +17,6 @@ def get_template_by_id(template_id, version=None): data = validate(_data, get_template_by_id_request) template = templates_dao.dao_get_template_by_id_and_service_id( - template_id, api_user.service_id, data.get('version')) + template_id, authenticated_service.id, data.get('version')) return jsonify(template.serialize()), 200 diff --git a/app/v2/template/post_template.py b/app/v2/template/post_template.py index 3da768ef1..4de01084a 100644 --- a/app/v2/template/post_template.py +++ b/app/v2/template/post_template.py @@ -1,9 +1,7 @@ from flask import jsonify, request -from jsonschema.exceptions import ValidationError -from app import api_user +from app import authenticated_service from app.dao import templates_dao -from app.models import SMS_TYPE from app.schema_validation import validate from app.utils import get_template_instance from app.v2.errors import BadRequestError @@ -22,7 +20,7 @@ def post_template_preview(template_id): data = validate(_data, post_template_preview_request) template = templates_dao.dao_get_template_by_id_and_service_id( - template_id, api_user.service_id) + template_id, authenticated_service.id) template_object = get_template_instance( template.__dict__, values=data.get('personalisation')) diff --git a/app/v2/templates/get_templates.py b/app/v2/templates/get_templates.py index 92c79fd63..cd34e46aa 100644 --- a/app/v2/templates/get_templates.py +++ b/app/v2/templates/get_templates.py @@ -1,7 +1,6 @@ from flask import jsonify, request -from jsonschema.exceptions import ValidationError -from app import api_user +from app import authenticated_service from app.dao import templates_dao from app.schema_validation import validate from app.v2.templates import v2_templates_blueprint @@ -12,7 +11,7 @@ from app.v2.templates.templates_schemas import get_all_template_request def get_templates(): data = validate(request.args.to_dict(), get_all_template_request) - templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id, data.get('type')) + templates = templates_dao.dao_get_all_templates_for_service(authenticated_service.id, data.get('type')) return jsonify( templates=[template.serialize() for template in templates] diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index 2b85b9d15..73457b3c9 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -38,7 +38,7 @@ applications: NOTIFY_APP_NAME: delivery-worker-sender - name: notify-delivery-worker-periodic - command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic + command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic,statistics instances: 1 memory: 2G env: diff --git a/migrations/README b/migrations/README index 98e4f9c44..6c36a3e0e 100644 --- a/migrations/README +++ b/migrations/README @@ -1 +1,7 @@ -Generic single-database configuration. \ No newline at end of file +Generic single-database configuration. + +python application.py db migration to generate migration script. + +python application.py db upgrade to upgrade db with script. + +python application.py db downgrade to rollback db changes. diff --git a/migrations/versions/0083_add_perm_types_and_svc_perm.py b/migrations/versions/0083_add_perm_types_and_svc_perm.py new file mode 100644 index 000000000..2bebb273e --- /dev/null +++ b/migrations/versions/0083_add_perm_types_and_svc_perm.py @@ -0,0 +1,53 @@ +"""empty message + +Revision ID: 0083_add_perm_types_and_svc_perm +Revises: 0082_add_go_live_template +Create Date: 2017-05-12 11:29:32.664811 + +""" + +# revision identifiers, used by Alembic. +revision = '0083_add_perm_types_and_svc_perm' +down_revision = '0082_add_go_live_template' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + service_permission_types=op.create_table('service_permission_types', + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('name')) + + op.bulk_insert(service_permission_types, + [ + {'name': x} for x in { + 'letter', + 'email', + 'sms', + 'international_sms', + 'incoming_sms' + } + ]) + + op.create_table('service_permissions', + sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('permission', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['permission'], ['service_permission_types.name'], ), + sa.ForeignKeyConstraint(['service_id'], ['services.id'], ), + sa.PrimaryKeyConstraint('service_id', 'permission')) + op.create_index(op.f('ix_service_permissions_permission'), 'service_permissions', ['permission'], unique=False) + op.create_index(op.f('ix_service_permissions_service_id'), 'service_permissions', ['service_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_service_permissions_service_id'), table_name='service_permissions') + op.drop_index(op.f('ix_service_permissions_permission'), table_name='service_permissions') + op.drop_table('service_permissions') + op.drop_table('service_permission_types') + # ### end Alembic commands ### diff --git a/migrations/versions/0084_add_job_stats.py b/migrations/versions/0084_add_job_stats.py new file mode 100644 index 000000000..0961f06b6 --- /dev/null +++ b/migrations/versions/0084_add_job_stats.py @@ -0,0 +1,39 @@ +"""empty message + +Revision ID: 0084_add_job_stats +Revises: 0083_add_perm_types_and_svc_perm +Create Date: 2017-05-12 13:16:14.147368 + +""" + +# revision identifiers, used by Alembic. +revision = '0084_add_job_stats' +down_revision = '0083_add_perm_types_and_svc_perm' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +def upgrade(): + op.create_table('job_statistics', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('job_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('emails_sent', sa.BigInteger(), nullable=False), + sa.Column('emails_delivered', sa.BigInteger(), nullable=False), + sa.Column('emails_failed', sa.BigInteger(), nullable=False), + sa.Column('sms_sent', sa.BigInteger(), nullable=False), + sa.Column('sms_delivered', sa.BigInteger(), nullable=False), + sa.Column('sms_failed', sa.BigInteger(), nullable=False), + sa.Column('letters_sent', sa.BigInteger(), nullable=False), + sa.Column('letters_failed', sa.BigInteger(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['job_id'], ['jobs.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_job_statistics_job_id'), 'job_statistics', ['job_id'], unique=True) + + +def downgrade(): + op.drop_index(op.f('ix_job_statistics_job_id'), table_name='job_statistics') + op.drop_table('job_statistics') diff --git a/requirements.txt b/requirements.txt index 3f688a907..fe25533c1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,6 +28,6 @@ notifications-python-client>=3.1,<3.2 awscli>=1.11,<1.12 awscli-cwlogs>=1.4,<1.5 -git+https://github.com/alphagov/notifications-utils.git@16.1.3#egg=notifications-utils==16.1.3 +git+https://github.com/alphagov/notifications-utils.git@17.1.0#egg=notifications-utils==17.1.0 git+https://github.com/alphagov/boto.git@2.43.0-patch3#egg=boto==2.43.0-patch3 diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py new file mode 100644 index 000000000..38c9966dd --- /dev/null +++ b/tests/app/aws/test_s3.py @@ -0,0 +1,11 @@ +from app.aws.s3 import get_s3_file + + +def test_get_s3_file_makes_correct_call(sample_service, sample_job, mocker): + get_s3_mock = mocker.patch('app.aws.s3.get_s3_object') + get_s3_file('foo-bucket', 'bar-file.txt') + + get_s3_mock.assert_called_with( + 'foo-bucket', + 'bar-file.txt' + ) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 038d2ca1c..50bf5f982 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -5,7 +5,7 @@ from functools import partial from flask import current_app from freezegun import freeze_time -from app.celery.scheduled_tasks import s3 +from app.celery.scheduled_tasks import s3, timeout_job_statistics from app.celery import scheduled_tasks from app.celery.scheduled_tasks import ( delete_verify_codes, @@ -409,3 +409,10 @@ def test_switch_providers_on_slow_delivery_does_not_switch_based_on_older_notifi switch_current_sms_provider_on_slow_delivery() current_provider = get_current_provider('sms') assert starting_provider.identifier == current_provider.identifier + + +def test_timeout_job_statistics_called_with_notification_timeout(notify_api, mocker): + notify_api.config['SENDING_NOTIFICATIONS_TIMEOUT_PERIOD'] = 999 + dao_mock = mocker.patch('app.celery.scheduled_tasks.dao_timeout_job_statistics') + timeout_job_statistics() + dao_mock.assert_called_once_with(999) diff --git a/tests/app/celery/test_statistics_tasks.py b/tests/app/celery/test_statistics_tasks.py new file mode 100644 index 000000000..24aaba97d --- /dev/null +++ b/tests/app/celery/test_statistics_tasks.py @@ -0,0 +1,164 @@ +import pytest +from app.celery.statistics_tasks import ( + record_initial_job_statistics, + record_outcome_job_statistics, + create_initial_notification_statistic_tasks, + create_outcome_notification_statistic_tasks) +from sqlalchemy.exc import SQLAlchemyError +from app import create_uuid +from tests.app.conftest import sample_notification +from app.models import NOTIFICATION_STATUS_TYPES_COMPLETED, NOTIFICATION_SENT, NOTIFICATION_SENDING, \ + NOTIFICATION_PENDING, NOTIFICATION_CREATED, NOTIFICATION_DELIVERED + + +def test_should_create_initial_job_task_if_notification_is_related_to_a_job( + notify_db, notify_db_session, sample_job, mocker +): + mock = mocker.patch("app.celery.statistics_tasks.record_initial_job_statistics.apply_async") + notification = sample_notification(notify_db, notify_db_session, job=sample_job) + create_initial_notification_statistic_tasks(notification) + mock.assert_called_once_with((str(notification.id), ), queue="statistics") + + +@pytest.mark.parametrize('status', [ + NOTIFICATION_SENDING, NOTIFICATION_CREATED, NOTIFICATION_PENDING +]) +def test_should_create_intial_job_task_if_notification_is_not_in_completed_state( + notify_db, notify_db_session, sample_job, mocker, status +): + mock = mocker.patch("app.celery.statistics_tasks.record_initial_job_statistics.apply_async") + notification = sample_notification(notify_db, notify_db_session, job=sample_job, status=status) + create_initial_notification_statistic_tasks(notification) + mock.assert_called_once_with((str(notification.id), ), queue="statistics") + + +def test_should_not_create_initial_job_task_if_notification_is_not_related_to_a_job( + notify_db, notify_db_session, mocker +): + notification = sample_notification(notify_db, notify_db_session, status=NOTIFICATION_CREATED) + mock = mocker.patch("app.celery.statistics_tasks.record_initial_job_statistics.apply_async") + create_initial_notification_statistic_tasks(notification) + mock.assert_not_called() + + +def test_should_create_outcome_job_task_if_notification_is_related_to_a_job( + notify_db, notify_db_session, sample_job, mocker +): + mock = mocker.patch("app.celery.statistics_tasks.record_outcome_job_statistics.apply_async") + notification = sample_notification(notify_db, notify_db_session, job=sample_job, status=NOTIFICATION_DELIVERED) + create_outcome_notification_statistic_tasks(notification) + mock.assert_called_once_with((str(notification.id), ), queue="statistics") + + +@pytest.mark.parametrize('status', NOTIFICATION_STATUS_TYPES_COMPLETED) +def test_should_create_outcome_job_task_if_notification_is_in_completed_state( + notify_db, notify_db_session, sample_job, mocker, status +): + mock = mocker.patch("app.celery.statistics_tasks.record_outcome_job_statistics.apply_async") + notification = sample_notification(notify_db, notify_db_session, job=sample_job, status=status) + create_outcome_notification_statistic_tasks(notification) + mock.assert_called_once_with((str(notification.id), ), queue='statistics') + + +@pytest.mark.parametrize('status', [ + NOTIFICATION_SENDING, NOTIFICATION_CREATED, NOTIFICATION_PENDING +]) +def test_should_not_create_outcome_job_task_if_notification_is_not_in_completed_state_already( + notify_db, notify_db_session, sample_job, mocker, status +): + mock = mocker.patch("app.celery.statistics_tasks.record_initial_job_statistics.apply_async") + notification = sample_notification(notify_db, notify_db_session, job=sample_job, status=status) + create_outcome_notification_statistic_tasks(notification) + mock.assert_not_called() + + +def test_should_not_create_outcome_job_task_if_notification_is_not_related_to_a_job( + notify_db, notify_db_session, sample_notification, mocker +): + mock = mocker.patch("app.celery.statistics_tasks.record_outcome_job_statistics.apply_async") + create_outcome_notification_statistic_tasks(sample_notification) + mock.assert_not_called() + + +def test_should_call_create_job_stats_dao_methods(notify_db, notify_db_session, sample_notification, mocker): + dao_mock = mocker.patch("app.celery.statistics_tasks.create_or_update_job_sending_statistics") + record_initial_job_statistics(str(sample_notification.id)) + + dao_mock.assert_called_once_with(sample_notification) + + +def test_should_retry_if_persisting_the_job_stats_has_a_sql_alchemy_exception( + notify_db, + notify_db_session, + sample_notification, + mocker): + dao_mock = mocker.patch( + "app.celery.statistics_tasks.create_or_update_job_sending_statistics", + side_effect=SQLAlchemyError() + ) + retry_mock = mocker.patch('app.celery.statistics_tasks.record_initial_job_statistics.retry') + + record_initial_job_statistics(str(sample_notification.id)) + dao_mock.assert_called_once_with(sample_notification) + retry_mock.assert_called_with(queue="retry") + + +def test_should_call_update_job_stats_dao_outcome_methods(notify_db, notify_db_session, sample_notification, mocker): + dao_mock = mocker.patch("app.celery.statistics_tasks.update_job_stats_outcome_count") + record_outcome_job_statistics(str(sample_notification.id)) + + dao_mock.assert_called_once_with(sample_notification) + + +def test_should_retry_if_persisting_the_job_outcome_stats_has_a_sql_alchemy_exception( + notify_db, + notify_db_session, + sample_notification, + mocker): + dao_mock = mocker.patch( + "app.celery.statistics_tasks.update_job_stats_outcome_count", + side_effect=SQLAlchemyError() + ) + retry_mock = mocker.patch('app.celery.statistics_tasks.record_outcome_job_statistics.retry') + + record_outcome_job_statistics(str(sample_notification.id)) + dao_mock.assert_called_once_with(sample_notification) + retry_mock.assert_called_with(queue="retry") + + +def test_should_retry_if_persisting_the_job_outcome_stats_updates_zero_rows( + notify_db, + notify_db_session, + sample_notification, + mocker): + dao_mock = mocker.patch("app.celery.statistics_tasks.update_job_stats_outcome_count", return_value=0) + retry_mock = mocker.patch('app.celery.statistics_tasks.record_outcome_job_statistics.retry') + + record_outcome_job_statistics(str(sample_notification.id)) + dao_mock.assert_called_once_with(sample_notification) + retry_mock.assert_called_with(queue="retry") + + +def test_should_retry_if_persisting_the_job_stats_creation_cant_find_notification_by_id( + notify_db, + notify_db_session, + mocker): + dao_mock = mocker.patch("app.celery.statistics_tasks.create_or_update_job_sending_statistics") + retry_mock = mocker.patch('app.celery.statistics_tasks.record_initial_job_statistics.retry') + + record_initial_job_statistics(str(create_uuid())) + dao_mock.assert_not_called() + retry_mock.assert_called_with(queue="retry") + + +def test_should_retry_if_persisting_the_job_stats_outcome_cant_find_notification_by_id( + notify_db, + notify_db_session, + mocker): + + dao_mock = mocker.patch("app.celery.statistics_tasks.update_job_stats_outcome_count") + retry_mock = mocker.patch('app.celery.statistics_tasks.record_outcome_job_statistics.retry') + + record_outcome_job_statistics(str(create_uuid())) + dao_mock.assert_not_called() + retry_mock.assert_called_with(queue="retry") diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 8b9631b4f..560f68c27 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -12,15 +12,19 @@ from celery.exceptions import Retry from app import (encryption, DATETIME_FORMAT) from app.celery import provider_tasks from app.celery import tasks -from app.celery.tasks import s3, build_dvla_file, create_dvla_file_contents, update_dvla_job_to_error from app.celery.tasks import ( + s3, + build_dvla_file, + create_dvla_file_contents, + update_dvla_job_to_error, process_job, process_row, send_sms, send_email, persist_letter, get_template_class, - update_job_to_sent_to_dvla + update_job_to_sent_to_dvla, + update_letter_notifications_statuses ) from app.dao import jobs_dao, services_dao from app.models import ( @@ -34,6 +38,7 @@ from app.models import ( Job) from tests.app import load_example_csv +from tests.conftest import set_config from tests.app.conftest import ( sample_service, sample_template, @@ -1071,3 +1076,37 @@ def test_update_dvla_job_to_error(sample_letter_template, sample_letter_job): assert not n.sent_by assert 'error' == Job.query.filter_by(id=sample_letter_job.id).one().job_status + + +def test_update_letter_notifications_statuses_raises_for_invalid_format(notify_api, mocker): + invalid_file = 'ref-foo|Sent|1|Unsorted\nref-bar|Sent|2' + mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=invalid_file) + + with pytest.raises(TypeError): + update_letter_notifications_statuses(filename='foo.txt') + + +def test_update_letter_notifications_statuses_calls_with_correct_bucket_location(notify_api, mocker): + s3_mock = mocker.patch('app.celery.tasks.s3.get_s3_object') + + with set_config(notify_api, 'NOTIFY_EMAIL_DOMAIN', 'foo.bar'): + update_letter_notifications_statuses(filename='foo.txt') + s3_mock.assert_called_with('{}-ftp'.format(current_app.config['NOTIFY_EMAIL_DOMAIN']), 'foo.txt') + + +def test_update_letter_notifications_statuses_builds_updates_list(notify_api, mocker): + valid_file = 'ref-foo|Sent|1|Unsorted\nref-bar|Sent|2|Sorted' + mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) + updates = update_letter_notifications_statuses(filename='foo.txt') + + assert len(updates) == 2 + + assert updates[0].reference == 'ref-foo' + assert updates[0].status == 'Sent' + assert updates[0].page_count == '1' + assert updates[0].cost_threshold == 'Unsorted' + + assert updates[1].reference == 'ref-bar' + assert updates[1].status == 'Sent' + assert updates[1].page_count == '2' + assert updates[1].cost_threshold == 'Sorted' diff --git a/tests/app/conftest.py b/tests/app/conftest.py index fc9748870..c10c053cb 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -666,11 +666,9 @@ def sample_permission(notify_db, @pytest.fixture(scope='function') -def sample_service_permission(notify_db, - notify_db_session, - service=None, - user=None, - permission="manage_settings"): +def sample_user_service_permission( + notify_db, notify_db_session, service=None, user=None, permission="manage_settings" +): if user is None: user = create_user() if service is None: diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index f01b631ba..034048a32 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -16,7 +16,7 @@ from app.dao.jobs_dao import ( dao_update_job_status, dao_get_all_notifications_for_job, dao_get_jobs_older_than_limited_by) -from app.models import Job +from app.models import Job, JobStatistics from tests.app.conftest import sample_notification as create_notification from tests.app.conftest import sample_job as create_job @@ -130,10 +130,23 @@ def test_create_job(sample_template): dao_create_job(job) assert Job.query.count() == 1 + assert JobStatistics.query.count() == 1 job_from_db = Job.query.get(job_id) assert job == job_from_db assert job_from_db.notifications_delivered == 0 assert job_from_db.notifications_failed == 0 + job_stats_from_db = JobStatistics.query.filter_by(job_id=job_id).all() + assert len(job_stats_from_db) == 1 + assert job_stats_from_db[0].sms_sent == 0 + assert job_stats_from_db[0].emails_sent == 0 + assert job_stats_from_db[0].letters_sent == 0 + + assert job_stats_from_db[0].sms_failed == 0 + assert job_stats_from_db[0].emails_failed == 0 + assert job_stats_from_db[0].letters_failed == 0 + + assert job_stats_from_db[0].sms_delivered == 0 + assert job_stats_from_db[0].emails_delivered == 0 def test_get_job_by_id(sample_job): diff --git a/tests/app/dao/test_service_permissions_dao.py b/tests/app/dao/test_service_permissions_dao.py new file mode 100644 index 000000000..c41c891f4 --- /dev/null +++ b/tests/app/dao/test_service_permissions_dao.py @@ -0,0 +1,44 @@ +import pytest + +from app.dao.service_permissions_dao import dao_fetch_service_permissions, dao_remove_service_permission +from app.models import EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, INCOMING_SMS_TYPE + +from tests.app.db import create_service_permission, create_service + + +@pytest.fixture(scope='function') +def service_without_permissions(notify_db, notify_db_session): + return create_service(service_permissions=[]) + + +def test_create_service_permission(service_without_permissions): + service_permissions = create_service_permission( + service_id=service_without_permissions.id, permission=SMS_TYPE) + + assert len(service_permissions) == 1 + assert service_permissions[0].service_id == service_without_permissions.id + assert service_permissions[0].permission == SMS_TYPE + + +def test_fetch_service_permissions_gets_service_permissions(service_without_permissions): + create_service_permission(service_id=service_without_permissions.id, permission=LETTER_TYPE) + create_service_permission(service_id=service_without_permissions.id, permission=INTERNATIONAL_SMS_TYPE) + create_service_permission(service_id=service_without_permissions.id, permission=SMS_TYPE) + + service_permissions = dao_fetch_service_permissions(service_without_permissions.id) + + assert len(service_permissions) == 3 + assert all(sp.service_id == service_without_permissions.id for sp in service_permissions) + assert all(sp.permission in [LETTER_TYPE, INTERNATIONAL_SMS_TYPE, SMS_TYPE] for sp in service_permissions) + + +def test_remove_service_permission(service_without_permissions): + create_service_permission(service_id=service_without_permissions.id, permission=EMAIL_TYPE) + create_service_permission(service_id=service_without_permissions.id, permission=INCOMING_SMS_TYPE) + + dao_remove_service_permission(service_without_permissions.id, EMAIL_TYPE) + + permissions = dao_fetch_service_permissions(service_without_permissions.id) + assert len(permissions) == 1 + assert permissions[0].permission == INCOMING_SMS_TYPE + assert permissions[0].service_id == service_without_permissions.id diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index d37f8341b..2dc6f199f 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -24,8 +24,10 @@ from app.dao.services_dao import ( dao_fetch_todays_stats_for_all_services, fetch_stats_by_date_range_for_all_services, dao_suspend_service, - dao_resume_service + dao_resume_service, + dao_fetch_active_users_for_service ) +from app.dao.service_permissions_dao import dao_add_service_permission, dao_remove_service_permission from app.dao.users_dao import save_model_user from app.models import ( NotificationStatistics, @@ -46,9 +48,14 @@ from app.models import ( DVLA_ORG_HM_GOVERNMENT, KEY_TYPE_NORMAL, KEY_TYPE_TEAM, - KEY_TYPE_TEST + KEY_TYPE_TEST, + EMAIL_TYPE, + SMS_TYPE, + LETTER_TYPE, + INTERNATIONAL_SMS_TYPE ) +from tests.app.db import create_user, create_service from tests.app.conftest import ( sample_notification as create_notification, sample_notification_history as create_notification_history, @@ -243,6 +250,62 @@ def test_get_service_by_id_returns_service(service_factory): assert dao_fetch_service_by_id(service.id).name == 'testing' +def test_create_service_returns_service_with_default_permissions(service_factory): + service = service_factory.get('testing', email_from='testing') + + service = dao_fetch_service_by_id(service.id) + assert len(service.permissions) == 2 + assert all(p.permission in [SMS_TYPE, EMAIL_TYPE] for p in service.permissions) + + +# This test is only for backward compatibility and will be removed +# when the 'can_use' columns are dropped from the Service data model +@pytest.mark.parametrize("permission_to_add, can_send_letters, can_send_international_sms", + [(LETTER_TYPE, True, False), + (INTERNATIONAL_SMS_TYPE, False, True)]) +def test_create_service_by_id_adding_service_permission_returns_service_with_permissions_set( + service_factory, permission_to_add, can_send_letters, can_send_international_sms): + service = service_factory.get('testing', email_from='testing') + + dao_add_service_permission(service_id=service.id, permission=permission_to_add) + service.set_permissions() + + service = dao_fetch_service_by_id(service.id) + assert len(service.permissions) == 3 + assert all(p.permission in [SMS_TYPE, EMAIL_TYPE, permission_to_add] for p in service.permissions) + assert service.can_send_letters == can_send_letters + assert service.can_send_international_sms == can_send_international_sms + + +def test_remove_permission_from_service_by_id_returns_service_with_correct_permissions(service_factory): + service = service_factory.get('testing', email_from='testing') + dao_remove_service_permission(service_id=service.id, permission=SMS_TYPE) + + service = dao_fetch_service_by_id(service.id) + assert len(service.permissions) == 1 + assert service.permissions[0].permission == EMAIL_TYPE + + +def test_create_service_by_id_adding_and_removing_letter_returns_service_without_letter(service_factory): + service = service_factory.get('testing', email_from='testing') + + dao_add_service_permission(service_id=service.id, permission=LETTER_TYPE) + service.set_permissions() + + service = dao_fetch_service_by_id(service.id) + assert len(service.permissions) == 3 + assert all(p.permission in [SMS_TYPE, EMAIL_TYPE, LETTER_TYPE] for p in service.permissions) + assert service.can_send_letters + + dao_remove_service_permission(service_id=service.id, permission=LETTER_TYPE) + service.set_permissions() + service = dao_fetch_service_by_id(service.id) + + assert len(service.permissions) == 2 + assert all(p.permission in [SMS_TYPE, EMAIL_TYPE] for p in service.permissions) + assert not service.can_send_letters + + def test_create_service_creates_a_history_record_with_current_data(sample_user): assert Service.query.count() == 0 assert Service.get_history_model().query.count() == 0 @@ -783,3 +846,13 @@ def test_fetch_monthly_historical_template_stats_for_service_separates_templates assert len(result.get('2016-04').keys()) == 2 assert str(template_one.id) in result.get('2016-04').keys() assert str(template_two.id) in result.get('2016-04').keys() + + +def test_dao_fetch_active_users_for_service_returns_active_only(notify_db, notify_db_session): + active_user = create_user(email='active@foo.com', state='active') + pending_user = create_user(email='pending@foo.com', state='pending') + service = create_service(user=active_user) + dao_add_user_to_service(service, pending_user) + users = dao_fetch_active_users_for_service(service.id) + + assert len(users) == 1 diff --git a/tests/app/dao/test_statistics_dao.py b/tests/app/dao/test_statistics_dao.py new file mode 100644 index 000000000..4425a3409 --- /dev/null +++ b/tests/app/dao/test_statistics_dao.py @@ -0,0 +1,822 @@ +from datetime import datetime, timedelta +from unittest.mock import call + +import pytest +from sqlalchemy.exc import IntegrityError, SQLAlchemyError + +from app.dao.statistics_dao import ( + create_or_update_job_sending_statistics, + update_job_stats_outcome_count, + dao_timeout_job_statistics) +from app.models import ( + JobStatistics, + SMS_TYPE, + EMAIL_TYPE, + LETTER_TYPE, + NOTIFICATION_DELIVERED, + NOTIFICATION_TECHNICAL_FAILURE, + NOTIFICATION_TEMPORARY_FAILURE, + NOTIFICATION_PERMANENT_FAILURE, + NOTIFICATION_PENDING, NOTIFICATION_CREATED, NOTIFICATION_FAILED, NOTIFICATION_SENT, NOTIFICATION_SENDING, + NOTIFICATION_STATUS_TYPES_COMPLETED, Notification, NOTIFICATION_STATUS_TYPES, NOTIFICATION_STATUS_SUCCESS) +from tests.app.conftest import sample_notification, sample_email_template, sample_template, sample_job, sample_service + + +@pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count', [ + (SMS_TYPE, 1, 0, 0), + (EMAIL_TYPE, 0, 1, 0), + (LETTER_TYPE, 0, 0, 1) +]) +def test_should_create_a_stats_entry_for_a_job( + notify_db, + notify_db_session, + sample_job, + sample_letter_template, + notification_type, + sms_count, + email_count, + letter_count +): + template = None + + if notification_type == SMS_TYPE: + template = sample_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == EMAIL_TYPE: + template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == LETTER_TYPE: + template = sample_letter_template + + notification = sample_notification( + notify_db, notify_db_session, service=sample_job.service, template=template, job=sample_job + ) + + create_or_update_job_sending_statistics(notification) + + stats = JobStatistics.query.all() + + assert len(stats) == 1 + + stat = stats[0] + assert stat.job_id == sample_job.id + + assert stat.emails_sent == email_count + assert stat.sms_sent == sms_count + assert stat.letters_sent == letter_count + + assert stat.emails_delivered == 0 + assert stat.emails_failed == 0 + assert stat.sms_delivered == 0 + assert stat.sms_failed == 0 + assert stat.letters_failed == 0 + + +@pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count', [ + (SMS_TYPE, 2, 0, 0), + (EMAIL_TYPE, 0, 2, 0), + (LETTER_TYPE, 0, 0, 2) +]) +def test_should_update_a_stats_entry_for_a_job( + notify_db, + notify_db_session, + sample_job, + sample_letter_template, + notification_type, + sms_count, + email_count, + letter_count +): + template = None + + if notification_type == SMS_TYPE: + template = sample_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == EMAIL_TYPE: + template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == LETTER_TYPE: + template = sample_letter_template + + notification = sample_notification( + notify_db, notify_db_session, service=sample_job.service, template=template, job=sample_job + ) + + create_or_update_job_sending_statistics(notification) + + stats = JobStatistics.query.all() + + assert len(stats) == 1 + + create_or_update_job_sending_statistics(notification) + + stat = stats[0] + assert stat.job_id == sample_job.id + + assert stat.emails_sent == email_count + assert stat.sms_sent == sms_count + assert stat.letters_sent == letter_count + + assert stat.emails_delivered == 0 + assert stat.emails_failed == 0 + assert stat.sms_delivered == 0 + assert stat.sms_failed == 0 + assert stat.letters_failed == 0 + + +def test_should_handle_error_conditions( + notify_db, + notify_db_session, + sample_job, + mocker): + create_mock = mocker.patch("app.dao.statistics_dao.__insert_job_stats", side_effect=IntegrityError("1", "2", "3")) + update_mock = mocker.patch("app.dao.statistics_dao.__update_job_stats_sent_count", return_value=0) + + notification = sample_notification(notify_db, notify_db_session, job=sample_job) + + with pytest.raises(SQLAlchemyError) as e: + create_or_update_job_sending_statistics(notification) + assert 'Failed to create job statistics for {}'.format(sample_job.id) in str(e.value) + + update_mock.assert_has_calls([call(notification), call(notification)]) + create_mock.assert_called_once_with(notification) + + +@pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count', [ + (SMS_TYPE, 1, 0, 0), + (EMAIL_TYPE, 0, 1, 0), + (LETTER_TYPE, 0, 0, 1) +]) +def test_should_update_a_stats_entry_with_its_success_outcome_for_a_job( + notify_db, + notify_db_session, + sample_job, + sample_letter_template, + notification_type, + sms_count, + email_count, + letter_count +): + template = None + + if notification_type == SMS_TYPE: + template = sample_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == EMAIL_TYPE: + template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == LETTER_TYPE: + template = sample_letter_template + + notification = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=template, + job=sample_job, + status=NOTIFICATION_DELIVERED + ) + + create_or_update_job_sending_statistics(notification) + + stats = JobStatistics.query.all() + + assert len(stats) == 1 + + update_job_stats_outcome_count(notification) + + stat = stats[0] + assert stat.job_id == sample_job.id + + assert stat.emails_sent == email_count + assert stat.sms_sent == sms_count + assert stat.letters_sent == letter_count + + assert stat.emails_delivered == email_count + assert stat.sms_delivered == sms_count + + assert stat.emails_failed == 0 + assert stat.sms_failed == 0 + assert stat.letters_failed == 0 + + +@pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ + (SMS_TYPE, 1, 0, 0, NOTIFICATION_TECHNICAL_FAILURE), + (SMS_TYPE, 1, 0, 0, NOTIFICATION_TEMPORARY_FAILURE), + (SMS_TYPE, 1, 0, 0, NOTIFICATION_PERMANENT_FAILURE), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_TECHNICAL_FAILURE), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_PERMANENT_FAILURE), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_TEMPORARY_FAILURE), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_PERMANENT_FAILURE), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_TEMPORARY_FAILURE), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_TECHNICAL_FAILURE) +]) +def test_should_update_a_stats_entry_with_its_error_outcomes_for_a_job( + notify_db, + notify_db_session, + sample_job, + sample_letter_template, + notification_type, + sms_count, + email_count, + letter_count, + status +): + template = None + + if notification_type == SMS_TYPE: + template = sample_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == EMAIL_TYPE: + template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == LETTER_TYPE: + template = sample_letter_template + + notification = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=template, + job=sample_job, + status=status + ) + + create_or_update_job_sending_statistics(notification) + + stats = JobStatistics.query.all() + + assert len(stats) == 1 + + update_job_stats_outcome_count(notification) + + stat = stats[0] + assert stat.job_id == sample_job.id + + assert stat.emails_sent == email_count + assert stat.sms_sent == sms_count + assert stat.letters_sent == letter_count + + assert stat.emails_failed == email_count + assert stat.letters_failed == letter_count + assert stat.sms_failed == sms_count + + assert stat.emails_delivered == 0 + assert stat.sms_delivered == 0 + + +@pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ + (SMS_TYPE, 1, 0, 0, NOTIFICATION_DELIVERED), + (SMS_TYPE, 1, 0, 0, NOTIFICATION_SENT), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_DELIVERED), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_SENT), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_SENT), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_DELIVERED), +]) +def test_should_update_a_stats_entry_with_its_success_outcomes_for_a_job( + notify_db, + notify_db_session, + sample_job, + sample_letter_template, + notification_type, + sms_count, + email_count, + letter_count, + status +): + template = None + + if notification_type == SMS_TYPE: + template = sample_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == EMAIL_TYPE: + template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == LETTER_TYPE: + template = sample_letter_template + + notification = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=template, + job=sample_job, + status=status + ) + + create_or_update_job_sending_statistics(notification) + + stats = JobStatistics.query.all() + + assert len(stats) == 1 + + update_job_stats_outcome_count(notification) + + stat = stats[0] + assert stat.job_id == sample_job.id + + assert stat.emails_sent == email_count + assert stat.sms_sent == sms_count + assert stat.letters_sent == letter_count + + assert stat.emails_failed == 0 + assert stat.letters_failed == 0 + assert stat.sms_failed == 0 + + assert stat.emails_delivered == email_count + assert stat.sms_delivered == sms_count + + +@pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count, status', [ + (SMS_TYPE, 1, 0, 0, NOTIFICATION_PENDING), + (SMS_TYPE, 1, 0, 0, NOTIFICATION_CREATED), + (SMS_TYPE, 1, 0, 0, NOTIFICATION_FAILED), + (SMS_TYPE, 1, 0, 0, NOTIFICATION_SENDING), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_PENDING), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_CREATED), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_FAILED), + (EMAIL_TYPE, 0, 1, 0, NOTIFICATION_SENDING), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_PENDING), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_CREATED), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_FAILED), + (LETTER_TYPE, 0, 0, 1, NOTIFICATION_SENDING) +]) +def test_should_not_update_job_stats_if_irrelevant_status( + notify_db, + notify_db_session, + sample_job, + sample_letter_template, + notification_type, + sms_count, + email_count, + letter_count, + status +): + template = None + + if notification_type == SMS_TYPE: + template = sample_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == EMAIL_TYPE: + template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + + if notification_type == LETTER_TYPE: + template = sample_letter_template + + notification = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=template, + job=sample_job, + status=status + ) + + create_or_update_job_sending_statistics(notification) + + stats = JobStatistics.query.all() + + assert len(stats) == 1 + + update_job_stats_outcome_count(notification) + + stat = stats[0] + assert stat.job_id == sample_job.id + + assert stat.emails_sent == email_count + assert stat.sms_sent == sms_count + assert stat.letters_sent == letter_count + + assert stat.emails_failed == 0 + assert stat.letters_failed == 0 + assert stat.sms_failed == 0 + + assert stat.emails_delivered == 0 + assert stat.sms_delivered == 0 + + +@pytest.mark.parametrize('notification_type, sms_count, email_count, letter_count', [ + (SMS_TYPE, 2, 1, 1), + (EMAIL_TYPE, 1, 2, 1), + (LETTER_TYPE, 1, 1, 2) +]) +def test_inserting_one_type_of_notification_maintains_other_counts( + notify_db, + notify_db_session, + sample_job, + sample_letter_template, + notification_type, + sms_count, + email_count, + letter_count +): + sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + letter_template = sample_letter_template + + template = None + + if notification_type == SMS_TYPE: + template = sms_template + + if notification_type == EMAIL_TYPE: + template = email_template + + if notification_type == LETTER_TYPE: + template = letter_template + + notification = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + letter = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=letter_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + email = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=email_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + sms = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=sms_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + create_or_update_job_sending_statistics(email) + create_or_update_job_sending_statistics(sms) + create_or_update_job_sending_statistics(letter) + + intitial_stats = JobStatistics.query.all() + assert len(intitial_stats) == 1 + assert intitial_stats[0].emails_sent == 1 + assert intitial_stats[0].sms_sent == 1 + assert intitial_stats[0].letters_sent == 1 + + create_or_update_job_sending_statistics(notification) + + updated_stats = JobStatistics.query.all() + assert updated_stats[0].job_id == sample_job.id + + assert updated_stats[0].emails_sent == email_count + assert updated_stats[0].sms_sent == sms_count + assert updated_stats[0].letters_sent == letter_count + + +def test_updating_one_type_of_notification_to_success_maintains_other_counts( + notify_db, + notify_db_session, + sample_job, + sample_letter_template +): + sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + letter_template = sample_letter_template + + letter = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=letter_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + email = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=email_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + sms = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=sms_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + create_or_update_job_sending_statistics(email) + create_or_update_job_sending_statistics(sms) + create_or_update_job_sending_statistics(letter) + + sms.status = NOTIFICATION_DELIVERED + email.status = NOTIFICATION_DELIVERED + letter.status = NOTIFICATION_DELIVERED + + update_job_stats_outcome_count(letter) + update_job_stats_outcome_count(email) + update_job_stats_outcome_count(sms) + + stats = JobStatistics.query.all() + assert len(stats) == 1 + assert stats[0].emails_sent == 1 + assert stats[0].sms_sent == 1 + assert stats[0].letters_sent == 1 + assert stats[0].emails_delivered == 1 + assert stats[0].sms_delivered == 1 + + +def test_updating_one_type_of_notification_to_error_maintains_other_counts( + notify_db, + notify_db_session, + sample_job, + sample_letter_template +): + sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + letter_template = sample_letter_template + + letter = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=letter_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + email = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=email_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + sms = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=sms_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + create_or_update_job_sending_statistics(email) + create_or_update_job_sending_statistics(sms) + create_or_update_job_sending_statistics(letter) + + sms.status = NOTIFICATION_TECHNICAL_FAILURE + email.status = NOTIFICATION_TECHNICAL_FAILURE + letter.status = NOTIFICATION_TECHNICAL_FAILURE + + update_job_stats_outcome_count(letter) + update_job_stats_outcome_count(email) + update_job_stats_outcome_count(sms) + + stats = JobStatistics.query.all() + assert len(stats) == 1 + assert stats[0].emails_sent == 1 + assert stats[0].sms_sent == 1 + assert stats[0].letters_sent == 1 + assert stats[0].emails_delivered == 0 + assert stats[0].sms_delivered == 0 + assert stats[0].sms_failed == 1 + assert stats[0].emails_failed == 1 + + +def test_will_not_timeout_job_counts_before_notification_timeouts(notify_db, notify_db_session, sample_job): + sms_template = sample_template(notify_db, notify_db_session, service=sample_job.service) + email_template = sample_email_template(notify_db, notify_db_session, service=sample_job.service) + + one_minute_ago = datetime.utcnow() - timedelta(minutes=1) + + sms = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=sms_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + email = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=email_template, + job=sample_job, + status=NOTIFICATION_CREATED + ) + + create_or_update_job_sending_statistics(email) + create_or_update_job_sending_statistics(sms) + + JobStatistics.query.update({JobStatistics.created_at: one_minute_ago}) + + intial_stats = JobStatistics.query.all() + + assert intial_stats[0].emails_sent == 1 + assert intial_stats[0].sms_sent == 1 + assert intial_stats[0].emails_delivered == 0 + assert intial_stats[0].sms_delivered == 0 + assert intial_stats[0].sms_failed == 0 + assert intial_stats[0].emails_failed == 0 + + dao_timeout_job_statistics(61) + updated_stats = JobStatistics.query.all() + assert updated_stats[0].emails_sent == 1 + assert updated_stats[0].sms_sent == 1 + assert updated_stats[0].emails_delivered == 0 + assert updated_stats[0].sms_delivered == 0 + assert updated_stats[0].sms_failed == 0 + assert updated_stats[0].emails_failed == 0 + + +@pytest.mark.parametrize('notification_type, sms_count, email_count', [ + (SMS_TYPE, 3, 0), + (EMAIL_TYPE, 0, 3), +]) +def test_timeout_job_counts_timesout_multiple_jobs( + notify_db, notify_db_session, notification_type, sms_count, email_count +): + one_minute_ago = datetime.utcnow() - timedelta(minutes=1) + + job_1 = sample_job(notify_db, notify_db_session) + job_2 = sample_job(notify_db, notify_db_session) + job_3 = sample_job(notify_db, notify_db_session) + + jobs = [job_1, job_2, job_3] + + for job in jobs: + if notification_type == EMAIL_TYPE: + template = sample_email_template(notify_db, notify_db_session, service=job.service) + else: + template = sample_template(notify_db, notify_db_session, service=job.service) + + for i in range(3): + n = sample_notification( + notify_db, + notify_db_session, + service=job.service, + template=template, + job=job, + status=NOTIFICATION_CREATED + ) + create_or_update_job_sending_statistics(n) + + JobStatistics.query.update({JobStatistics.created_at: one_minute_ago}) + initial_stats = JobStatistics.query.all() + for stats in initial_stats: + assert stats.emails_sent == email_count + assert stats.sms_sent == sms_count + assert stats.emails_delivered == 0 + assert stats.sms_delivered == 0 + assert stats.sms_failed == 0 + assert stats.emails_failed == 0 + + dao_timeout_job_statistics(1) + updated_stats = JobStatistics.query.all() + for stats in updated_stats: + assert stats.emails_sent == email_count + assert stats.sms_sent == sms_count + assert stats.emails_delivered == 0 + assert stats.sms_delivered == 0 + assert stats.sms_failed == sms_count + assert stats.emails_failed == email_count + + +count_notifications = len(NOTIFICATION_STATUS_TYPES) +count_success_notifications = len(NOTIFICATION_STATUS_SUCCESS) +count_error_notifications = len(NOTIFICATION_STATUS_TYPES) - len(NOTIFICATION_STATUS_SUCCESS) + + +def test_timeout_job_sets_all_non_delivered_emails_to_error_and_doesnt_affect_sms( + notify_db, + notify_db_session +): + service = sample_service(notify_db, notify_db_session) + + sms_template = sample_template(notify_db, notify_db_session, service=service) + email_template = sample_email_template(notify_db, notify_db_session, service=service) + + email_job = sample_job( + notify_db, notify_db_session, template=email_template, service=service + ) + sms_job = sample_job( + notify_db, notify_db_session, template=sms_template, service=service + ) + + # Make an email notification in every state + for i in range(len(NOTIFICATION_STATUS_TYPES)): + n = sample_notification( + notify_db, + notify_db_session, + service=email_job.service, + template=email_template, + job=email_job, + status=NOTIFICATION_STATUS_TYPES[i] + ) + create_or_update_job_sending_statistics(n) + + # single sms notification + sms_notification = sample_notification( + notify_db, notify_db_session, service=service, template=sms_template, job=sms_job + ) + create_or_update_job_sending_statistics(sms_notification) + + # fudge the created at time on the job stats table to make the eligible for timeout query + JobStatistics.query.update({ + JobStatistics.created_at: datetime.utcnow() - timedelta(minutes=1) + }) + + # should have sent an email for every state (len(NOTIFICATION_STATUS_TYPES)) + initial_stats = JobStatistics.query.filter_by(job_id=email_job.id).all() + assert len(initial_stats) == 1 + assert initial_stats[0].emails_sent == count_notifications + assert initial_stats[0].sms_sent == 0 + assert initial_stats[0].emails_delivered == 0 + assert initial_stats[0].sms_delivered == 0 + assert initial_stats[0].sms_failed == 0 + assert initial_stats[0].emails_failed == 0 + + all = JobStatistics.query.all() + for a in all: + print(a) + + # timeout the notifications + dao_timeout_job_statistics(1) + + all = JobStatistics.query.all() + for a in all: + print(a) + + # after timeout all delivered states are success and ALL other states are failed + updated_stats = JobStatistics.query.filter_by(job_id=email_job.id).all() + assert updated_stats[0].emails_sent == count_notifications + assert updated_stats[0].sms_sent == 0 + assert updated_stats[0].emails_delivered == count_success_notifications + assert updated_stats[0].sms_delivered == 0 + assert updated_stats[0].sms_failed == 0 + assert updated_stats[0].emails_failed == count_error_notifications + + sms_stats = JobStatistics.query.filter_by(job_id=sms_job.id).all() + assert sms_stats[0].emails_sent == 0 + assert sms_stats[0].sms_sent == 1 + assert sms_stats[0].emails_delivered == 0 + assert sms_stats[0].sms_delivered == 0 + assert sms_stats[0].sms_failed == 1 + assert sms_stats[0].emails_failed == 0 + + +# this test is as above, but for SMS not email +def test_timeout_job_sets_all_non_delivered_states_to_error( + notify_db, + notify_db_session, + sample_job +): + for i in range(len(NOTIFICATION_STATUS_TYPES)): + n = sample_notification( + notify_db, + notify_db_session, + service=sample_job.service, + template=sample_template(notify_db, notify_db_session, service=sample_job.service), + job=sample_job, + status=NOTIFICATION_STATUS_TYPES[i] + ) + create_or_update_job_sending_statistics(n) + + JobStatistics.query.update({JobStatistics.created_at: datetime.utcnow() - timedelta(minutes=1)}) + initial_stats = JobStatistics.query.all() + for stats in initial_stats: + assert stats.emails_sent == 0 + assert stats.sms_sent == count_notifications + assert stats.emails_delivered == 0 + assert stats.sms_delivered == 0 + assert stats.sms_failed == 0 + assert stats.emails_failed == 0 + + dao_timeout_job_statistics(1) + updated_stats = JobStatistics.query.all() + + for stats in updated_stats: + assert stats.emails_sent == 0 + assert stats.sms_sent == count_notifications + assert stats.emails_delivered == 0 + assert stats.sms_delivered == count_success_notifications + assert stats.sms_failed == count_error_notifications + assert stats.emails_failed == 0 diff --git a/tests/app/db.py b/tests/app/db.py index b5838d693..d418b1c3b 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -2,20 +2,23 @@ from datetime import datetime import uuid from app.dao.jobs_dao import dao_create_job -from app.models import Service, User, Template, Notification, SMS_TYPE, KEY_TYPE_NORMAL, Job +from app.models import (Service, User, Template, Notification, EMAIL_TYPE, LETTER_TYPE, + SMS_TYPE, KEY_TYPE_NORMAL, Job, ServicePermission) from app.dao.users_dao import save_model_user from app.dao.notifications_dao import dao_create_notification from app.dao.templates_dao import dao_create_template from app.dao.services_dao import dao_create_service +from app.dao.service_permissions_dao import dao_add_service_permission -def create_user(mobile_number="+447700900986", email="notify@digital.cabinet-office.gov.uk"): +def create_user(mobile_number="+447700900986", email="notify@digital.cabinet-office.gov.uk", state='active'): data = { + 'id': uuid.uuid4(), 'name': 'Test User', 'email_address': email, 'password': 'password', 'mobile_number': mobile_number, - 'state': 'active' + 'state': state } user = User.query.filter_by(email_address=email).first() if not user: @@ -24,15 +27,17 @@ def create_user(mobile_number="+447700900986", email="notify@digital.cabinet-off return user -def create_service(user=None, service_name="Sample service", service_id=None): +def create_service( + user=None, service_name="Sample service", service_id=None, restricted=False, + service_permissions=[EMAIL_TYPE, SMS_TYPE]): service = Service( name=service_name, message_limit=1000, - restricted=False, + restricted=restricted, email_from=service_name.lower().replace(' ', '.'), created_by=user or create_user() ) - dao_create_service(service, service.created_by, service_id) + dao_create_service(service, service.created_by, service_id, service_permissions=service_permissions) return service @@ -141,3 +146,12 @@ def create_job(template, job = Job(**data) dao_create_job(job) return job + + +def create_service_permission(service_id, permission=EMAIL_TYPE): + dao_add_service_permission( + service_id if service_id else create_service().id, permission) + + service_permissions = ServicePermission.query.all() + + return service_permissions diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 0e35ad413..1d243b3e5 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -1,7 +1,7 @@ import uuid from datetime import datetime from collections import namedtuple -from unittest.mock import ANY +from unittest.mock import ANY, call import pytest from notifications_utils.recipients import validate_and_format_phone_number @@ -18,8 +18,7 @@ from app.models import ( KEY_TYPE_TEST, KEY_TYPE_TEAM, BRANDING_ORG, - BRANDING_BOTH, - ProviderDetails) + BRANDING_BOTH) from tests.app.db import create_service, create_template, create_notification @@ -64,6 +63,7 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist( status='created') mocker.patch('app.mmg_client.send_sms') + stats_mock = mocker.patch('app.delivery.send_to_providers.create_initial_notification_statistic_tasks') send_to_providers.send_sms_to_provider( db_notification @@ -75,6 +75,9 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist( reference=str(db_notification.id), sender=None ) + + stats_mock.assert_called_once_with(db_notification) + notification = Notification.query.filter_by(id=db_notification.id).one() assert notification.status == 'sending' @@ -95,6 +98,7 @@ def test_should_send_personalised_template_to_correct_email_provider_and_persist ) mocker.patch('app.aws_ses_client.send_email', return_value='reference') + stats_mock = mocker.patch('app.delivery.send_to_providers.create_initial_notification_statistic_tasks') send_to_providers.send_email_to_provider( db_notification @@ -108,6 +112,8 @@ def test_should_send_personalised_template_to_correct_email_provider_and_persist html_body=ANY, reply_to_address=None ) + stats_mock.assert_called_once_with(db_notification) + assert '