mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-01 15:46:07 -05:00
change it to a task
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
import re
|
||||
import uuid
|
||||
|
||||
import botocore
|
||||
from boto3 import Session
|
||||
@@ -7,6 +8,7 @@ from flask import current_app
|
||||
|
||||
from app import redis_store
|
||||
from app.clients import AWS_CLIENT_CONFIG
|
||||
from notifications_utils.s3 import s3upload as utils_s3upload
|
||||
|
||||
FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv"
|
||||
|
||||
@@ -19,11 +21,31 @@ JOBS_CACHE_HITS = "JOBS_CACHE_HITS"
|
||||
JOBS_CACHE_MISSES = "JOBS_CACHE_MISSES"
|
||||
|
||||
|
||||
def get_csv_location(service_id, upload_id):
|
||||
return (
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["bucket"],
|
||||
FILE_LOCATION_STRUCTURE.format(service_id, upload_id),
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"],
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"],
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["region"],
|
||||
)
|
||||
|
||||
|
||||
def get_s3_file(bucket_name, file_location, access_key, secret_key, region):
|
||||
s3_file = get_s3_object(bucket_name, file_location, access_key, secret_key, region)
|
||||
return s3_file.get()["Body"].read().decode("utf-8")
|
||||
|
||||
|
||||
def get_file_from_s3(file_location):
|
||||
return get_s3_file(
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["bucket"],
|
||||
file_location,
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"],
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"],
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["region"],
|
||||
)
|
||||
|
||||
|
||||
def get_s3_object(bucket_name, file_location, access_key, secret_key, region):
|
||||
session = Session(
|
||||
aws_access_key_id=access_key,
|
||||
@@ -253,3 +275,21 @@ def remove_csv_object(object_key):
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["region"],
|
||||
)
|
||||
return obj.delete()
|
||||
|
||||
|
||||
def s3upload(service_id, filedata, upload_id=None):
|
||||
|
||||
if upload_id is None:
|
||||
upload_id = str(uuid.uuid4())
|
||||
bucket_name, file_location, access_key, secret_key, region = get_csv_location(
|
||||
service_id, upload_id
|
||||
)
|
||||
utils_s3upload(
|
||||
filedata=filedata["data"],
|
||||
region=region,
|
||||
bucket_name=bucket_name,
|
||||
file_location=file_location,
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
)
|
||||
return upload_id
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from flask import current_app
|
||||
from requests import HTTPError, RequestException, request
|
||||
@@ -18,6 +19,7 @@ from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id
|
||||
from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service
|
||||
from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id
|
||||
from app.dao.templates_dao import dao_get_template_by_id
|
||||
from app.dao.users_dao import dao_report_users
|
||||
from app.enums import JobStatus, KeyType, NotificationType
|
||||
from app.notifications.process_notifications import persist_notification
|
||||
from app.notifications.validators import check_service_over_total_message_limit
|
||||
@@ -189,7 +191,11 @@ def save_sms(self, service_id, notification_id, encrypted_notification, sender_i
|
||||
# Return False when trial mode services try sending notifications
|
||||
# to non-team and non-simulated recipients.
|
||||
if not service_allowed_to_send_to(notification["to"], service, KeyType.NORMAL):
|
||||
current_app.logger.info(hilite(scrub(f"service not allowed to send to {notification['to']}, aborting")))
|
||||
current_app.logger.info(
|
||||
hilite(
|
||||
scrub(f"service not allowed to send to {notification['to']}, aborting")
|
||||
)
|
||||
)
|
||||
current_app.logger.debug(
|
||||
"SMS {} failed as restricted service".format(notification_id)
|
||||
)
|
||||
@@ -220,7 +226,9 @@ def save_sms(self, service_id, notification_id, encrypted_notification, sender_i
|
||||
)
|
||||
|
||||
# Kick off sns process in provider_tasks.py
|
||||
current_app.logger.info(hilite(scrub(f"Going to deliver sms for recipient: {notification['to']}")))
|
||||
current_app.logger.info(
|
||||
hilite(scrub(f"Going to deliver sms for recipient: {notification['to']}"))
|
||||
)
|
||||
provider_tasks.deliver_sms.apply_async(
|
||||
[str(saved_notification.id)], queue=QueueNames.SEND_SMS
|
||||
)
|
||||
@@ -470,3 +478,30 @@ def process_incomplete_job(job_id):
|
||||
process_row(row, template, job, job.service, sender_id=sender_id)
|
||||
|
||||
job_complete(job, resumed=True)
|
||||
|
||||
|
||||
@notify_celery.task(name="report-all-users")
|
||||
def report_all_users():
|
||||
"""
|
||||
This is to support the platform admin's ability to view all user data.
|
||||
It runs once per night and is stored in
|
||||
bucket/service-all-users-report-{env}-notify/all-users-report-{env}.csv
|
||||
|
||||
When the front end is ready, it can just download from there.
|
||||
"""
|
||||
users = dao_report_users()
|
||||
csv_text = "NAME,EMAIL_ADDRESS,MOBILE_NUMBER,SERVICE\n"
|
||||
for user in users:
|
||||
row = f"{user[0]},{user[1]},{user[2]},{user[3]}\n"
|
||||
csv_text = f"{csv_text}{row}"
|
||||
my_env = os.getenv("NOTIFY_ENVIRONMENT")
|
||||
report_name = f"all-users-report-{my_env}"
|
||||
file_data = {}
|
||||
file_data["data"] = csv_text
|
||||
object_key = s3.FILE_LOCATION_STRUCTURE.format(report_name, report_name)
|
||||
s3.remove_csv_object(object_key)
|
||||
s3.s3upload(report_name, file_data, report_name)
|
||||
|
||||
# prove that it works
|
||||
x = s3.get_file_from_s3(object_key)
|
||||
print(f"!!!!!!!DOWNLOADED {x}")
|
||||
|
||||
@@ -1008,23 +1008,3 @@ def add_test_users_to_db(generate, state, admin):
|
||||
platform_admin=admin,
|
||||
)
|
||||
print(f"{num} {user.email_address} created")
|
||||
|
||||
|
||||
@notify_command(name="show-users")
|
||||
def show_users():
|
||||
|
||||
sql = """
|
||||
|
||||
select users.name, users.email_address, users.mobile_number, services.name as service_name
|
||||
from users
|
||||
inner join user_to_service on users.id=user_to_service.user_id
|
||||
inner join services on services.id=user_to_service.service_id
|
||||
order by services.name asc, users.name asc
|
||||
"""
|
||||
users = db.session.execute(sql)
|
||||
report = "Name,Email address,Mobile number,Service name"
|
||||
print(report)
|
||||
for row in users:
|
||||
print(f"{row.name},{row.email_address},{row.mobile_number},{row.service_name}")
|
||||
report = f"{report}\n{row.name},{row.email_address},{row.mobile_number},{row.service_name}"
|
||||
return report
|
||||
|
||||
@@ -199,6 +199,11 @@ class Config(object):
|
||||
"schedule": timedelta(minutes=66),
|
||||
"options": {"queue": QueueNames.PERIODIC},
|
||||
},
|
||||
"report-all-users": {
|
||||
"task": "report-all-users",
|
||||
"schedule": timedelta(minutes=2),
|
||||
"options": {"queue": QueueNames.PERIODIC},
|
||||
},
|
||||
"check-job-status": {
|
||||
"task": "check-job-status",
|
||||
"schedule": crontab(),
|
||||
|
||||
@@ -4,7 +4,7 @@ from secrets import randbelow
|
||||
|
||||
import sqlalchemy
|
||||
from flask import current_app
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import func, text
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app import db
|
||||
@@ -244,3 +244,15 @@ def user_can_be_archived(user):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def dao_report_users():
|
||||
sql = """
|
||||
select users.name, users.email_address, users.mobile_number, services.name as service_name
|
||||
from users
|
||||
inner join user_to_service on users.id=user_to_service.user_id
|
||||
inner join services on services.id=user_to_service.service_id
|
||||
where services.name not like '_archived%'
|
||||
order by services.name asc, users.name asc
|
||||
"""
|
||||
return db.session.execute(text(sql))
|
||||
|
||||
@@ -110,7 +110,11 @@ def persist_notification(
|
||||
formatted_recipient = validate_and_format_phone_number(
|
||||
recipient, international=True
|
||||
)
|
||||
current_app.logger.info(hilite(scrub(f"Persisting notification with recipient {formatted_recipient}")))
|
||||
current_app.logger.info(
|
||||
hilite(
|
||||
scrub(f"Persisting notification with recipient {formatted_recipient}")
|
||||
)
|
||||
)
|
||||
recipient_info = get_international_phone_info(formatted_recipient)
|
||||
notification.normalised_to = formatted_recipient
|
||||
notification.international = recipient_info.international
|
||||
|
||||
Reference in New Issue
Block a user