diff --git a/app/aws/s3.py b/app/aws/s3.py index 9466e6cce..8435b4938 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -1,4 +1,5 @@ import re +import uuid import botocore from boto3 import Session @@ -7,6 +8,7 @@ from flask import current_app from app import redis_store from app.clients import AWS_CLIENT_CONFIG +from notifications_utils.s3 import s3upload as utils_s3upload FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv" @@ -19,11 +21,31 @@ JOBS_CACHE_HITS = "JOBS_CACHE_HITS" JOBS_CACHE_MISSES = "JOBS_CACHE_MISSES" +def get_csv_location(service_id, upload_id): + return ( + current_app.config["CSV_UPLOAD_BUCKET"]["bucket"], + FILE_LOCATION_STRUCTURE.format(service_id, upload_id), + current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"], + current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"], + current_app.config["CSV_UPLOAD_BUCKET"]["region"], + ) + + def get_s3_file(bucket_name, file_location, access_key, secret_key, region): s3_file = get_s3_object(bucket_name, file_location, access_key, secret_key, region) return s3_file.get()["Body"].read().decode("utf-8") +def get_file_from_s3(file_location): + return get_s3_file( + current_app.config["CSV_UPLOAD_BUCKET"]["bucket"], + file_location, + current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"], + current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"], + current_app.config["CSV_UPLOAD_BUCKET"]["region"], + ) + + def get_s3_object(bucket_name, file_location, access_key, secret_key, region): session = Session( aws_access_key_id=access_key, @@ -253,3 +275,21 @@ def remove_csv_object(object_key): current_app.config["CSV_UPLOAD_BUCKET"]["region"], ) return obj.delete() + + +def s3upload(service_id, filedata, upload_id=None): + + if upload_id is None: + upload_id = str(uuid.uuid4()) + bucket_name, file_location, access_key, secret_key, region = get_csv_location( + service_id, upload_id + ) + utils_s3upload( + filedata=filedata["data"], + region=region, + bucket_name=bucket_name, + file_location=file_location, + access_key=access_key, + secret_key=secret_key, + ) + return upload_id diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 1950712af..342f21e4c 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -1,4 +1,5 @@ import json +import os from flask import current_app from requests import HTTPError, RequestException, request @@ -18,6 +19,7 @@ from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id from app.dao.templates_dao import dao_get_template_by_id +from app.dao.users_dao import dao_report_users from app.enums import JobStatus, KeyType, NotificationType from app.notifications.process_notifications import persist_notification from app.notifications.validators import check_service_over_total_message_limit @@ -189,7 +191,11 @@ def save_sms(self, service_id, notification_id, encrypted_notification, sender_i # Return False when trial mode services try sending notifications # to non-team and non-simulated recipients. if not service_allowed_to_send_to(notification["to"], service, KeyType.NORMAL): - current_app.logger.info(hilite(scrub(f"service not allowed to send to {notification['to']}, aborting"))) + current_app.logger.info( + hilite( + scrub(f"service not allowed to send to {notification['to']}, aborting") + ) + ) current_app.logger.debug( "SMS {} failed as restricted service".format(notification_id) ) @@ -220,7 +226,9 @@ def save_sms(self, service_id, notification_id, encrypted_notification, sender_i ) # Kick off sns process in provider_tasks.py - current_app.logger.info(hilite(scrub(f"Going to deliver sms for recipient: {notification['to']}"))) + current_app.logger.info( + hilite(scrub(f"Going to deliver sms for recipient: {notification['to']}")) + ) provider_tasks.deliver_sms.apply_async( [str(saved_notification.id)], queue=QueueNames.SEND_SMS ) @@ -470,3 +478,30 @@ def process_incomplete_job(job_id): process_row(row, template, job, job.service, sender_id=sender_id) job_complete(job, resumed=True) + + +@notify_celery.task(name="report-all-users") +def report_all_users(): + """ + This is to support the platform admin's ability to view all user data. + It runs once per night and is stored in + bucket/service-all-users-report-{env}-notify/all-users-report-{env}.csv + + When the front end is ready, it can just download from there. + """ + users = dao_report_users() + csv_text = "NAME,EMAIL_ADDRESS,MOBILE_NUMBER,SERVICE\n" + for user in users: + row = f"{user[0]},{user[1]},{user[2]},{user[3]}\n" + csv_text = f"{csv_text}{row}" + my_env = os.getenv("NOTIFY_ENVIRONMENT") + report_name = f"all-users-report-{my_env}" + file_data = {} + file_data["data"] = csv_text + object_key = s3.FILE_LOCATION_STRUCTURE.format(report_name, report_name) + s3.remove_csv_object(object_key) + s3.s3upload(report_name, file_data, report_name) + + # prove that it works + x = s3.get_file_from_s3(object_key) + print(f"!!!!!!!DOWNLOADED {x}") diff --git a/app/commands.py b/app/commands.py index 3246f31fc..826c2013b 100644 --- a/app/commands.py +++ b/app/commands.py @@ -1008,23 +1008,3 @@ def add_test_users_to_db(generate, state, admin): platform_admin=admin, ) print(f"{num} {user.email_address} created") - - -@notify_command(name="show-users") -def show_users(): - - sql = """ - - select users.name, users.email_address, users.mobile_number, services.name as service_name - from users - inner join user_to_service on users.id=user_to_service.user_id - inner join services on services.id=user_to_service.service_id - order by services.name asc, users.name asc - """ - users = db.session.execute(sql) - report = "Name,Email address,Mobile number,Service name" - print(report) - for row in users: - print(f"{row.name},{row.email_address},{row.mobile_number},{row.service_name}") - report = f"{report}\n{row.name},{row.email_address},{row.mobile_number},{row.service_name}" - return report diff --git a/app/config.py b/app/config.py index 8d913bdd8..637ece32f 100644 --- a/app/config.py +++ b/app/config.py @@ -199,6 +199,11 @@ class Config(object): "schedule": timedelta(minutes=66), "options": {"queue": QueueNames.PERIODIC}, }, + "report-all-users": { + "task": "report-all-users", + "schedule": timedelta(minutes=2), + "options": {"queue": QueueNames.PERIODIC}, + }, "check-job-status": { "task": "check-job-status", "schedule": crontab(), diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py index d7291b35c..e541f4052 100644 --- a/app/dao/users_dao.py +++ b/app/dao/users_dao.py @@ -4,7 +4,7 @@ from secrets import randbelow import sqlalchemy from flask import current_app -from sqlalchemy import func +from sqlalchemy import func, text from sqlalchemy.orm import joinedload from app import db @@ -244,3 +244,15 @@ def user_can_be_archived(user): return False return True + + +def dao_report_users(): + sql = """ + select users.name, users.email_address, users.mobile_number, services.name as service_name + from users + inner join user_to_service on users.id=user_to_service.user_id + inner join services on services.id=user_to_service.service_id + where services.name not like '_archived%' + order by services.name asc, users.name asc + """ + return db.session.execute(text(sql)) diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index d899a8146..a91c27158 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -110,7 +110,11 @@ def persist_notification( formatted_recipient = validate_and_format_phone_number( recipient, international=True ) - current_app.logger.info(hilite(scrub(f"Persisting notification with recipient {formatted_recipient}"))) + current_app.logger.info( + hilite( + scrub(f"Persisting notification with recipient {formatted_recipient}") + ) + ) recipient_info = get_international_phone_info(formatted_recipient) notification.normalised_to = formatted_recipient notification.international = recipient_info.international diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index f1d5fd77a..7eee00bbf 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -16,7 +16,6 @@ from app.commands import ( populate_organizations_from_file, promote_user_to_platform_admin, purge_functional_test_data, - show_users, update_jobs_archived_flag, ) from app.dao.inbound_numbers_dao import dao_get_available_inbound_numbers @@ -442,11 +441,3 @@ def test_promote_user_to_platform_admin_no_result_found( ) assert "NoResultFound" in str(result) assert sample_user.platform_admin is False - - -def test_show_users(notify_db_session, notify_api, sample_user): - result = notify_api.test_cli_runner().invoke( - show_users, - [], - ) - assert "Name,Email address,Mobile number,Service name" in str(result) diff --git a/tests/app/test_utils.py b/tests/app/test_utils.py index bbe37256a..20675aec5 100644 --- a/tests/app/test_utils.py +++ b/tests/app/test_utils.py @@ -99,7 +99,10 @@ def test_scrub(): result = scrub( "This is a message with 17775554324, and also 18884449323 and also 17775554324" ) - assert result == "This is a message with 1XXXXX54324, and also 1XXXXX49323 and also 1XXXXX54324" + assert ( + result + == "This is a message with 1XXXXX54324, and also 1XXXXX49323 and also 1XXXXX54324" + ) # This method is used for simulating bulk sends. We use localstack and run on a developer's machine to do the