mirror of
https://github.com/GSA/notifications-api.git
synced 2026-02-03 18:01:08 -05:00
Merge branch 'master' into letters-billing-table
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,3 +1,5 @@
|
|||||||
|
queues.csv
|
||||||
|
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
|
|||||||
@@ -9,7 +9,8 @@ from flask import current_app
|
|||||||
from notifications_utils.recipients import (
|
from notifications_utils.recipients import (
|
||||||
validate_and_format_phone_number,
|
validate_and_format_phone_number,
|
||||||
validate_and_format_email_address,
|
validate_and_format_email_address,
|
||||||
InvalidPhoneError
|
InvalidPhoneError,
|
||||||
|
InvalidEmailError,
|
||||||
)
|
)
|
||||||
from werkzeug.datastructures import MultiDict
|
from werkzeug.datastructures import MultiDict
|
||||||
from sqlalchemy import (desc, func, or_, and_, asc)
|
from sqlalchemy import (desc, func, or_, and_, asc)
|
||||||
@@ -477,7 +478,10 @@ def dao_get_notifications_by_to_field(service_id, search_term, statuses=None):
|
|||||||
try:
|
try:
|
||||||
normalised = validate_and_format_phone_number(search_term)
|
normalised = validate_and_format_phone_number(search_term)
|
||||||
except InvalidPhoneError:
|
except InvalidPhoneError:
|
||||||
normalised = validate_and_format_email_address(search_term)
|
try:
|
||||||
|
normalised = validate_and_format_email_address(search_term)
|
||||||
|
except InvalidEmailError:
|
||||||
|
normalised = search_term
|
||||||
|
|
||||||
filters = [
|
filters = [
|
||||||
Notification.service_id == service_id,
|
Notification.service_id == service_id,
|
||||||
|
|||||||
@@ -6,3 +6,6 @@ routes:
|
|||||||
- route: notify-api-preview.cloudapps.digital
|
- route: notify-api-preview.cloudapps.digital
|
||||||
- route: api-paas.notify.works
|
- route: api-paas.notify.works
|
||||||
- route: api.notify.works
|
- route: api.notify.works
|
||||||
|
|
||||||
|
instances: 1
|
||||||
|
memory: 256M
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ applications:
|
|||||||
|
|
||||||
- name: notify-delivery-worker-database
|
- name: notify-delivery-worker-database
|
||||||
command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email,db-letter,database-tasks
|
command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email,db-letter,database-tasks
|
||||||
|
memory: 1G
|
||||||
env:
|
env:
|
||||||
NOTIFY_APP_NAME: delivery-worker-database
|
NOTIFY_APP_NAME: delivery-worker-database
|
||||||
|
|
||||||
@@ -34,13 +35,13 @@ applications:
|
|||||||
|
|
||||||
- name: notify-delivery-worker-sender
|
- name: notify-delivery-worker-sender
|
||||||
command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email,send-tasks
|
command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email,send-tasks
|
||||||
|
memory: 1G
|
||||||
env:
|
env:
|
||||||
NOTIFY_APP_NAME: delivery-worker-sender
|
NOTIFY_APP_NAME: delivery-worker-sender
|
||||||
|
|
||||||
- name: notify-delivery-worker-periodic
|
- name: notify-delivery-worker-periodic
|
||||||
command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic,statistics,periodic-tasks,statistics-tasks
|
command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic,statistics,periodic-tasks,statistics-tasks
|
||||||
instances: 1
|
instances: 1
|
||||||
memory: 2G
|
|
||||||
env:
|
env:
|
||||||
NOTIFY_APP_NAME: delivery-worker-periodic
|
NOTIFY_APP_NAME: delivery-worker-periodic
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
---
|
---
|
||||||
|
|
||||||
inherit: manifest-delivery-base.yml
|
inherit: manifest-delivery-base.yml
|
||||||
|
memory: 256M
|
||||||
|
|||||||
@@ -3,4 +3,4 @@
|
|||||||
inherit: manifest-delivery-base.yml
|
inherit: manifest-delivery-base.yml
|
||||||
|
|
||||||
instances: 2
|
instances: 2
|
||||||
memory: 1G
|
memory: 768M
|
||||||
|
|||||||
@@ -3,4 +3,4 @@
|
|||||||
inherit: manifest-delivery-base.yml
|
inherit: manifest-delivery-base.yml
|
||||||
|
|
||||||
instances: 2
|
instances: 2
|
||||||
memory: 1G
|
memory: 768M
|
||||||
|
|||||||
71
scripts/delete_sqs_queues.py
Normal file → Executable file
71
scripts/delete_sqs_queues.py
Normal file → Executable file
@@ -1,10 +1,33 @@
|
|||||||
|
"""
|
||||||
|
|
||||||
|
Script to manage SQS queues. Can list or delete queues.
|
||||||
|
|
||||||
|
Uses boto, so relies on correctly set up AWS access keys and tokens.
|
||||||
|
|
||||||
|
In principle use this script to dump details of all queues in a gievn environment, and then
|
||||||
|
manipulate the resultant CSV file so that it contains the queues you want to delete.
|
||||||
|
|
||||||
|
Very hands on. Starter for a more automagic process.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
scripts/delete_sqs_queues.py <action>
|
||||||
|
|
||||||
|
options are:
|
||||||
|
- list: dumps queue details to local file queues.csv in current directory.
|
||||||
|
- delete: delete queues from local file queues.csv in current directory.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
scripts/delete_sqs_queues.py list delete
|
||||||
|
"""
|
||||||
|
|
||||||
|
from docopt import docopt
|
||||||
import boto3
|
import boto3
|
||||||
import csv
|
import csv
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pprint import pprint
|
|
||||||
import os
|
|
||||||
|
|
||||||
client = boto3.client('sqs', region_name=os.getenv('AWS_REGION'))
|
FILE_NAME = "/tmp/queues.csv"
|
||||||
|
|
||||||
|
client = boto3.client('sqs', region_name='eu-west-1')
|
||||||
|
|
||||||
|
|
||||||
def _formatted_date_from_timestamp(timestamp):
|
def _formatted_date_from_timestamp(timestamp):
|
||||||
@@ -27,15 +50,18 @@ def get_queue_attributes(queue_name):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
queue_attributes = response['Attributes']
|
queue_attributes = response['Attributes']
|
||||||
|
queue_attributes.update({'QueueUrl': queue_name})
|
||||||
return queue_attributes
|
return queue_attributes
|
||||||
|
|
||||||
|
|
||||||
def delete_queue(queue_name):
|
def delete_queue(queue_url):
|
||||||
|
# Note that deleting a queue returns 200 OK if it doesn't exist
|
||||||
|
print("DELETEING {}".format(queue_url))
|
||||||
response = client.delete_queue(
|
response = client.delete_queue(
|
||||||
QueueUrl=queue_name
|
QueueUrl=queue_url
|
||||||
)
|
)
|
||||||
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
|
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
|
||||||
print('Deleted queue successfully')
|
print('Deleted queue successfully {}'.format(response['ResponseMetadata']))
|
||||||
else:
|
else:
|
||||||
print('Error occured when attempting to delete queue')
|
print('Error occured when attempting to delete queue')
|
||||||
pprint(response)
|
pprint(response)
|
||||||
@@ -43,10 +69,10 @@ def delete_queue(queue_name):
|
|||||||
|
|
||||||
|
|
||||||
def output_to_csv(queue_attributes):
|
def output_to_csv(queue_attributes):
|
||||||
csv_name = 'queues.csv'
|
with open(FILE_NAME, 'w') as csvfile:
|
||||||
with open(csv_name, 'w') as csvfile:
|
|
||||||
fieldnames = [
|
fieldnames = [
|
||||||
'Queue Name',
|
'Queue Name',
|
||||||
|
'Queue URL',
|
||||||
'Number of Messages',
|
'Number of Messages',
|
||||||
'Number of Messages Delayed',
|
'Number of Messages Delayed',
|
||||||
'Number of Messages Not Visible',
|
'Number of Messages Not Visible',
|
||||||
@@ -55,23 +81,19 @@ def output_to_csv(queue_attributes):
|
|||||||
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
||||||
writer.writeheader()
|
writer.writeheader()
|
||||||
for queue_attr in queue_attributes:
|
for queue_attr in queue_attributes:
|
||||||
queue_url = client.get_queue_url(
|
|
||||||
QueueName=queue_attr['QueueArn']
|
|
||||||
)['QueueUrl']
|
|
||||||
writer.writerow({
|
writer.writerow({
|
||||||
'Queue Name': queue_attr['QueueArn'],
|
'Queue Name': queue_attr['QueueArn'],
|
||||||
'Queue URL': queue_url,
|
'Queue URL': queue_attr['QueueUrl'],
|
||||||
'Number of Messages': queue_attr['ApproximateNumberOfMessages'],
|
'Number of Messages': queue_attr['ApproximateNumberOfMessages'],
|
||||||
'Number of Messages Delayed': queue_attr['ApproximateNumberOfMessagesDelayed'],
|
'Number of Messages Delayed': queue_attr['ApproximateNumberOfMessagesDelayed'],
|
||||||
'Number of Messages Not Visible': queue_attr['ApproximateNumberOfMessagesNotVisible'],
|
'Number of Messages Not Visible': queue_attr['ApproximateNumberOfMessagesNotVisible'],
|
||||||
'Created': _formatted_date_from_timestamp(queue_attr['CreatedTimestamp'])
|
'Created': _formatted_date_from_timestamp(queue_attr['CreatedTimestamp'])
|
||||||
})
|
})
|
||||||
return csv_name
|
|
||||||
|
|
||||||
|
|
||||||
def read_from_csv(csv_name):
|
def read_from_csv():
|
||||||
queue_urls = []
|
queue_urls = []
|
||||||
with open(csv_name, 'r') as csvfile:
|
with open(FILE_NAME, 'r') as csvfile:
|
||||||
next(csvfile)
|
next(csvfile)
|
||||||
rows = csv.reader(csvfile, delimiter=',')
|
rows = csv.reader(csvfile, delimiter=',')
|
||||||
for row in rows:
|
for row in rows:
|
||||||
@@ -79,6 +101,19 @@ def read_from_csv(csv_name):
|
|||||||
return queue_urls
|
return queue_urls
|
||||||
|
|
||||||
|
|
||||||
queues = get_queues()
|
if __name__ == "__main__":
|
||||||
for queue in queues:
|
arguments = docopt(__doc__)
|
||||||
delete_queue(queue)
|
|
||||||
|
if arguments['<action>'] == 'list':
|
||||||
|
queues = get_queues()
|
||||||
|
queue_attributes = []
|
||||||
|
for queue in queues:
|
||||||
|
queue_attributes.append(get_queue_attributes(queue))
|
||||||
|
output_to_csv(queue_attributes)
|
||||||
|
elif arguments['<action>'] == 'delete':
|
||||||
|
queues_to_delete = read_from_csv()
|
||||||
|
for queue in queues_to_delete:
|
||||||
|
delete_queue(queue)
|
||||||
|
else:
|
||||||
|
print("UNKNOWN COMMAND")
|
||||||
|
exit(1)
|
||||||
|
|||||||
@@ -1772,6 +1772,20 @@ def test_dao_get_notifications_by_to_field_search_is_not_case_sensitive(sample_t
|
|||||||
assert notification.id in notification_ids
|
assert notification.id in notification_ids
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('to', [
|
||||||
|
'not@email', '123'
|
||||||
|
])
|
||||||
|
def test_dao_get_notifications_by_to_field_accepts_invalid_phone_numbers_and_email_addresses(
|
||||||
|
sample_template,
|
||||||
|
to,
|
||||||
|
):
|
||||||
|
notification = create_notification(
|
||||||
|
template=sample_template, to_field='test@example.com', normalised_to='test@example.com'
|
||||||
|
)
|
||||||
|
results = dao_get_notifications_by_to_field(notification.service_id, to)
|
||||||
|
assert len(results) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_dao_get_notifications_by_to_field_search_ignores_spaces(sample_template):
|
def test_dao_get_notifications_by_to_field_search_ignores_spaces(sample_template):
|
||||||
notification1 = create_notification(
|
notification1 = create_notification(
|
||||||
template=sample_template, to_field='+447700900855', normalised_to='447700900855'
|
template=sample_template, to_field='+447700900855', normalised_to='447700900855'
|
||||||
|
|||||||
Reference in New Issue
Block a user