mirror of
https://github.com/GSA/notifications-api.git
synced 2026-01-26 12:31:52 -05:00
remove task
This commit is contained in:
@@ -1,5 +1,4 @@
|
||||
import re
|
||||
import uuid
|
||||
|
||||
import botocore
|
||||
from boto3 import Session
|
||||
@@ -8,7 +7,6 @@ from flask import current_app
|
||||
|
||||
from app import redis_store
|
||||
from app.clients import AWS_CLIENT_CONFIG
|
||||
from notifications_utils.s3 import s3upload as utils_s3upload
|
||||
|
||||
FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv"
|
||||
|
||||
@@ -21,31 +19,11 @@ JOBS_CACHE_HITS = "JOBS_CACHE_HITS"
|
||||
JOBS_CACHE_MISSES = "JOBS_CACHE_MISSES"
|
||||
|
||||
|
||||
def get_csv_location(service_id, upload_id):
|
||||
return (
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["bucket"],
|
||||
FILE_LOCATION_STRUCTURE.format(service_id, upload_id),
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"],
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"],
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["region"],
|
||||
)
|
||||
|
||||
|
||||
def get_s3_file(bucket_name, file_location, access_key, secret_key, region):
|
||||
s3_file = get_s3_object(bucket_name, file_location, access_key, secret_key, region)
|
||||
return s3_file.get()["Body"].read().decode("utf-8")
|
||||
|
||||
|
||||
def get_file_from_s3(file_location):
|
||||
return get_s3_file(
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["bucket"],
|
||||
file_location,
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"],
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"],
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["region"],
|
||||
)
|
||||
|
||||
|
||||
def get_s3_object(bucket_name, file_location, access_key, secret_key, region):
|
||||
session = Session(
|
||||
aws_access_key_id=access_key,
|
||||
@@ -275,21 +253,3 @@ def remove_csv_object(object_key):
|
||||
current_app.config["CSV_UPLOAD_BUCKET"]["region"],
|
||||
)
|
||||
return obj.delete()
|
||||
|
||||
|
||||
def s3upload(service_id, filedata, upload_id=None):
|
||||
|
||||
if upload_id is None:
|
||||
upload_id = str(uuid.uuid4())
|
||||
bucket_name, file_location, access_key, secret_key, region = get_csv_location(
|
||||
service_id, upload_id
|
||||
)
|
||||
utils_s3upload(
|
||||
filedata=filedata["data"],
|
||||
region=region,
|
||||
bucket_name=bucket_name,
|
||||
file_location=file_location,
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
)
|
||||
return upload_id
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from flask import current_app
|
||||
from requests import HTTPError, RequestException, request
|
||||
@@ -19,7 +18,6 @@ from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id
|
||||
from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service
|
||||
from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id
|
||||
from app.dao.templates_dao import dao_get_template_by_id
|
||||
from app.dao.users_dao import dao_report_users
|
||||
from app.enums import JobStatus, KeyType, NotificationType
|
||||
from app.errors import TotalRequestsError
|
||||
from app.notifications.process_notifications import persist_notification
|
||||
@@ -481,30 +479,3 @@ def process_incomplete_job(job_id):
|
||||
process_row(row, template, job, job.service, sender_id=sender_id)
|
||||
|
||||
job_complete(job, resumed=True)
|
||||
|
||||
|
||||
@notify_celery.task(name="report-all-users")
|
||||
def report_all_users():
|
||||
"""
|
||||
This is to support the platform admin's ability to view all user data.
|
||||
It runs once per night and is stored in
|
||||
bucket/service-all-users-report-{env}-notify/all-users-report-{env}.csv
|
||||
|
||||
When the front end is ready, it can just download from there.
|
||||
"""
|
||||
users = dao_report_users()
|
||||
csv_text = "NAME,EMAIL_ADDRESS,MOBILE_NUMBER,SERVICE\n"
|
||||
for user in users:
|
||||
row = f"{user[0]},{user[1]},{user[2]},{user[3]}\n"
|
||||
csv_text = f"{csv_text}{row}"
|
||||
my_env = os.getenv("NOTIFY_ENVIRONMENT")
|
||||
report_name = f"all-users-report-{my_env}"
|
||||
file_data = {}
|
||||
file_data["data"] = csv_text
|
||||
object_key = s3.FILE_LOCATION_STRUCTURE.format(report_name, report_name)
|
||||
s3.remove_csv_object(object_key)
|
||||
s3.s3upload(report_name, file_data, report_name)
|
||||
|
||||
# prove that it works
|
||||
x = s3.get_file_from_s3(object_key)
|
||||
print(f"!!!!!!!DOWNLOADED {x}")
|
||||
|
||||
@@ -18,6 +18,7 @@ from app.dao.users_dao import (
|
||||
create_secret_code,
|
||||
create_user_code,
|
||||
dao_archive_user,
|
||||
dao_report_users,
|
||||
get_login_gov_user,
|
||||
get_user_and_accounts,
|
||||
get_user_by_email,
|
||||
@@ -667,6 +668,12 @@ def update_password(user_id):
|
||||
return jsonify(data=user.serialize()), 200
|
||||
|
||||
|
||||
@user_blueprint.route("/report-all-users", methods=["GET"])
|
||||
def report_all_users():
|
||||
users = dao_report_users()
|
||||
return jsonify(data=users.serialize()), 200
|
||||
|
||||
|
||||
@user_blueprint.route("/<uuid:user_id>/organizations-and-services", methods=["GET"])
|
||||
def get_organizations_and_services_for_user(user_id):
|
||||
user = get_user_and_accounts(user_id)
|
||||
|
||||
Reference in New Issue
Block a user