Merge branch 'main' of https://github.com/GSA/notifications-api into notify-34

This commit is contained in:
Kenneth Kehl
2023-05-25 07:35:36 -07:00
9 changed files with 131 additions and 43 deletions

84
Pipfile.lock generated
View File

@@ -500,6 +500,7 @@
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
],
"markers": "python_full_version >= '3.5.0'",
"version": "==3.4"
},
"importlib-metadata": {
@@ -847,7 +848,7 @@
"sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b",
"sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"
],
"markers": "python_full_version >= '3.7.0'",
"markers": "python_version >= '3.7'",
"version": "==3.0.38"
},
"psycopg2-binary": {
@@ -1062,11 +1063,11 @@
},
"requests": {
"hashes": [
"sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294",
"sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"
"sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f",
"sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"
],
"markers": "python_version >= '3.7'",
"version": "==2.30.0"
"index": "pypi",
"version": "==2.31.0"
},
"rfc3339-validator": {
"hashes": [
@@ -1087,7 +1088,7 @@
"sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2",
"sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9"
],
"markers": "python_version >= '3.5' and python_version < '4'",
"markers": "python_version < '4' and python_full_version >= '3.5.0'",
"version": "==4.7.2"
},
"s3transfer": {
@@ -1100,11 +1101,11 @@
},
"setuptools": {
"hashes": [
"sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b",
"sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"
"sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f",
"sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"
],
"markers": "python_version >= '3.7'",
"version": "==67.7.2"
"version": "==67.8.0"
},
"shapely": {
"hashes": [
@@ -1216,11 +1217,11 @@
},
"typing-extensions": {
"hashes": [
"sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
"sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
"sha256:6ad00b63f849b7dcc313b70b6b304ed67b2b2963b3098a33efe18056b1a9a223",
"sha256:ff6b238610c747e44c268aa4bb23c8c735d665a63726df3f9431ce707f2aa768"
],
"markers": "python_version < '3.10'",
"version": "==4.5.0"
"version": "==4.6.0"
},
"uri-template": {
"hashes": [
@@ -1847,6 +1848,7 @@
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
],
"markers": "python_full_version >= '3.5.0'",
"version": "==3.4"
},
"iniconfig": {
@@ -2194,7 +2196,7 @@
"sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526",
"sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3"
],
"markers": "python_full_version >= '3.6.0'",
"markers": "python_version >= '3.6'",
"version": "==32.0.1"
},
"pluggy": {
@@ -2214,22 +2216,22 @@
},
"protobuf": {
"hashes": [
"sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf",
"sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21",
"sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598",
"sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5",
"sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd",
"sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159",
"sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227",
"sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688",
"sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1",
"sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711",
"sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b",
"sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b",
"sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2"
"sha256:2036a3a1e7fc27f973fa0a7888dce712393af644f4695385f117886abc792e39",
"sha256:32e78beda26d7a101fecf15d7a4a792278a0d26a31bc327ff05564a9d68ab8ee",
"sha256:346990f634272caac1f09efbcfbbacb23098b1f606d172534c6fa2d9758bb436",
"sha256:3b8905eafe4439076e1f58e9d1fa327025fd2777cf90f14083092ae47f77b0aa",
"sha256:3ce113b3f3362493bddc9069c2163a38f240a9ed685ff83e7bcb756b05e1deb0",
"sha256:410bcc0a5b279f634d3e16082ce221dfef7c3392fac723500e2e64d1806dd2be",
"sha256:5b9cd6097e6acae48a68cb29b56bc79339be84eca65b486910bb1e7a30e2b7c1",
"sha256:65f0ac96ef67d7dd09b19a46aad81a851b6f85f89725577f16de38f2d68ad477",
"sha256:91fac0753c3c4951fbb98a93271c43cc7cf3b93cf67747b3e600bb1e5cc14d61",
"sha256:95789b569418a3e32a53f43d7763be3d490a831e9c08042539462b6d972c2d7e",
"sha256:ac50be82491369a9ec3710565777e4da87c6d2e20404e0abb1f3a8f10ffd20f0",
"sha256:decf119d54e820f298ee6d89c72d6b289ea240c32c521f00433f9dc420595f38",
"sha256:f9510cac91e764e86acd74e2b7f7bc5e6127a7f3fb646d7c8033cfb84fd1176a"
],
"markers": "python_version >= '3.7'",
"version": "==4.23.0"
"version": "==4.23.1"
},
"py": {
"hashes": [
@@ -2385,11 +2387,11 @@
},
"requests": {
"hashes": [
"sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294",
"sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"
"sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f",
"sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"
],
"markers": "python_version >= '3.7'",
"version": "==2.30.0"
"index": "pypi",
"version": "==2.31.0"
},
"requests-mock": {
"hashes": [
@@ -2419,7 +2421,7 @@
"sha256:2d11b9b8dd03868f09b4fffadc84a6a8cda574e40dc90821bd845720ebb8e89c",
"sha256:69cdf53799e63f38b95b9bf9c875f8c90e78dd62b2f00c13a911c7a3b9fa4704"
],
"markers": "python_full_version >= '3.7.0'",
"markers": "python_version >= '3.7'",
"version": "==13.3.5"
},
"s3transfer": {
@@ -2455,11 +2457,11 @@
},
"stevedore": {
"hashes": [
"sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021",
"sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"
"sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d",
"sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"
],
"markers": "python_version >= '3.8'",
"version": "==5.0.0"
"version": "==5.1.0"
},
"toml": {
"hashes": [
@@ -2479,10 +2481,10 @@
},
"types-pyyaml": {
"hashes": [
"sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8",
"sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"
"sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f",
"sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97"
],
"version": "==6.0.12.9"
"version": "==6.0.12.10"
},
"urllib3": {
"hashes": [
@@ -2501,11 +2503,11 @@
},
"websocket-client": {
"hashes": [
"sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40",
"sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"
"sha256:c7d67c13b928645f259d9b847ab5b57fd2d127213ca41ebd880de1f553b7c23b",
"sha256:f8c64e28cd700e7ba1f04350d66422b6833b82a796b525a51e740b8cc8dab4b1"
],
"markers": "python_version >= '3.7'",
"version": "==1.5.1"
"version": "==1.5.2"
},
"werkzeug": {
"hashes": [

View File

@@ -65,3 +65,14 @@ def remove_job_from_s3(service_id, job_id):
def remove_s3_object(bucket_name, object_key, access_key, secret_key, region):
obj = get_s3_object(bucket_name, object_key, access_key, secret_key, region)
return obj.delete()
def remove_csv_object(object_key):
obj = get_s3_object(
current_app.config['CSV_UPLOAD_BUCKET']['bucket'],
object_key,
current_app.config['CSV_UPLOAD_BUCKET']['access_key_id'],
current_app.config['CSV_UPLOAD_BUCKET']['secret_access_key'],
current_app.config['CSV_UPLOAD_BUCKET']['region']
)
return obj.delete()

View File

@@ -6,6 +6,7 @@ from sqlalchemy.exc import SQLAlchemyError
from app import notify_celery
from app.aws import s3
from app.aws.s3 import remove_csv_object
from app.celery.process_ses_receipts_tasks import check_and_queue_callback_task
from app.config import QueueNames
from app.cronitor import cronitor
@@ -14,6 +15,7 @@ from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention
from app.dao.jobs_dao import (
dao_archive_job,
dao_get_jobs_older_than_data_retention,
dao_get_unfinished_jobs,
)
from app.dao.notifications_dao import (
dao_get_notifications_processing_time_stats,
@@ -42,6 +44,19 @@ def _remove_csv_files(job_types):
current_app.logger.info("Job ID {} has been removed from s3.".format(job.id))
@notify_celery.task(name="cleanup-unfinished-jobs")
def cleanup_unfinished_jobs():
now = datetime.utcnow()
jobs = dao_get_unfinished_jobs()
for job in jobs:
# The query already checks that the processing_finished time is null, so here we are saying
# if it started more than 4 hours ago, that's too long
acceptable_finish_time = job.processing_started + timedelta(minutes=5)
if now > acceptable_finish_time:
remove_csv_object(job.original_file_name)
dao_archive_job(job)
@notify_celery.task(name="delete-notifications-older-than-retention")
def delete_notifications_older_than_retention():
delete_email_notifications_older_than_retention.apply_async(queue=QueueNames.REPORTING)
@@ -161,6 +176,7 @@ def delete_inbound_sms():
@notify_celery.task(name='save-daily-notification-processing-time')
@cronitor("save-daily-notification-processing-time")
def save_daily_notification_processing_time(local_date=None):
# local_date is a string in the format of "YYYY-MM-DD"
if local_date is None:
# if a date is not provided, we run against yesterdays data

View File

@@ -18,6 +18,7 @@ from sqlalchemy.orm.exc import NoResultFound
from app import db
from app.aws import s3
from app.celery.nightly_tasks import cleanup_unfinished_jobs
from app.celery.tasks import process_row
from app.dao.annual_billing_dao import (
dao_create_or_update_annual_billing_for_year,
@@ -464,6 +465,12 @@ def fix_billable_units():
print("End fix_billable_units")
@notify_command(name='delete-unfinished-jobs')
def delete_unfinished_jobs():
cleanup_unfinished_jobs()
print("End cleanup_unfinished_jobs")
@notify_command(name='process-row-from-job')
@click.option('-j', '--job_id', required=True, help='Job id')
@click.option('-n', '--job_row_number', type=int, required=True, help='Job id')

View File

@@ -240,6 +240,11 @@ class Config(object):
'schedule': crontab(hour=2, minute=0),
'options': {'queue': QueueNames.PERIODIC}
},
'cleanup-unfinished-jobs': {
'task': 'cleanup-unfinished-jobs',
'schedule': crontab(hour=0, minute=5),
'options': {'queue': QueueNames.PERIODIC}
},
'remove_sms_email_jobs': {
'task': 'remove_sms_email_jobs',
'schedule': crontab(hour=4, minute=0),

View File

@@ -43,6 +43,10 @@ def dao_get_job_by_service_id_and_job_id(service_id, job_id):
return Job.query.filter_by(service_id=service_id, id=job_id).one()
def dao_get_unfinished_jobs():
return Job.query.filter(Job.processing_finished.is_(None)).all()
def dao_get_jobs_by_service_id(
service_id,
*,

View File

@@ -117,8 +117,8 @@ Steps for deploying production from scratch. These can be updated for a new clou
1. Update `terraform-production.yml` and `deploy-prod.yml` to point to the correct space and git branch.
1. Ensure that the `api_network_route` and `domain` modules are commented out in `terraform/production/main.tf`
1. Run CI/CD pipeline on the `production` branch by opening a PR from `main` to `production`
1. Uncomment the `api_network_route` and `domain` modules and re-trigger a deploy
1. Create DNS records for `domain` module within https://github.com/18f/dns
1. Uncomment the `api_network_route` and `domain` modules and re-trigger a deploy
### Steps to prepare SES

View File

@@ -0,0 +1,28 @@
"""
Revision ID: 0395_remove_international_letters_permission
Revises: 0394_remove_contact_list
Create Date: 2023-05-23 10:03:10.485368
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0395_remove_intl_letters_perm'
down_revision = '0394_remove_contact_list'
def upgrade():
sql = """
DELETE
FROM service_permissions
WHERE permission = 'international_letters'
"""
conn = op.get_bind()
conn.execute(sql)
def downgrade():
pass

View File

@@ -7,6 +7,7 @@ from freezegun import freeze_time
from app.celery import nightly_tasks
from app.celery.nightly_tasks import (
_delete_notifications_older_than_retention_by_type,
cleanup_unfinished_jobs,
delete_email_notifications_older_than_retention,
delete_inbound_sms,
delete_sms_notifications_older_than_retention,
@@ -15,7 +16,7 @@ from app.celery.nightly_tasks import (
save_daily_notification_processing_time,
timeout_notifications,
)
from app.models import EMAIL_TYPE, SMS_TYPE, FactProcessingTime
from app.models import EMAIL_TYPE, SMS_TYPE, FactProcessingTime, Job
from tests.app.db import (
create_job,
create_notification,
@@ -313,3 +314,17 @@ def test_delete_notifications_task_calls_task_for_services_that_have_sent_notifi
'datetime_to_delete_before': datetime(2021, 3, 26, 4, 0)
}),
])
def test_cleanup_unfinished_jobs(mocker):
mock_s3 = mocker.patch('app.celery.nightly_tasks.remove_csv_object')
mock_dao_archive = mocker.patch('app.celery.nightly_tasks.dao_archive_job')
mock_dao = mocker.patch('app.celery.nightly_tasks.dao_get_unfinished_jobs')
mock_job_unfinished = Job()
mock_job_unfinished.processing_started = datetime(2023, 1, 1, 0, 0, 0)
mock_job_unfinished.original_file_name = "blah"
mock_dao.return_value = [mock_job_unfinished]
cleanup_unfinished_jobs()
mock_s3.assert_called_once_with('blah')
mock_dao_archive.assert_called_once_with(mock_job_unfinished)