diff --git a/.gitignore b/.gitignore index d19df2e94..12f77b15c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +queues.csv + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 7ebf9170c..f4ff2a24f 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -9,7 +9,8 @@ from flask import current_app from notifications_utils.recipients import ( validate_and_format_phone_number, validate_and_format_email_address, - InvalidPhoneError + InvalidPhoneError, + InvalidEmailError, ) from werkzeug.datastructures import MultiDict from sqlalchemy import (desc, func, or_, and_, asc) @@ -477,7 +478,10 @@ def dao_get_notifications_by_to_field(service_id, search_term, statuses=None): try: normalised = validate_and_format_phone_number(search_term) except InvalidPhoneError: - normalised = validate_and_format_email_address(search_term) + try: + normalised = validate_and_format_email_address(search_term) + except InvalidEmailError: + normalised = search_term filters = [ Notification.service_id == service_id, diff --git a/manifest-api-preview.yml b/manifest-api-preview.yml index 04b396388..a40990194 100644 --- a/manifest-api-preview.yml +++ b/manifest-api-preview.yml @@ -6,3 +6,6 @@ routes: - route: notify-api-preview.cloudapps.digital - route: api-paas.notify.works - route: api.notify.works + +instances: 1 +memory: 256M diff --git a/manifest-delivery-base.yml b/manifest-delivery-base.yml index c68d36965..2eaf380a6 100644 --- a/manifest-delivery-base.yml +++ b/manifest-delivery-base.yml @@ -24,6 +24,7 @@ applications: - name: notify-delivery-worker-database command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q db-sms,db-email,db-letter,database-tasks + memory: 1G env: NOTIFY_APP_NAME: delivery-worker-database @@ -34,13 +35,13 @@ applications: - name: notify-delivery-worker-sender command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=11 -Q send-sms,send-email,send-tasks + memory: 1G env: NOTIFY_APP_NAME: delivery-worker-sender - name: notify-delivery-worker-periodic command: scripts/run_app_paas.sh celery -A aws_run_celery.notify_celery worker --loglevel=INFO --concurrency=2 -Q periodic,statistics,periodic-tasks,statistics-tasks instances: 1 - memory: 2G env: NOTIFY_APP_NAME: delivery-worker-periodic diff --git a/manifest-delivery-preview.yml b/manifest-delivery-preview.yml index d628e5fc9..2bbb3c0dc 100644 --- a/manifest-delivery-preview.yml +++ b/manifest-delivery-preview.yml @@ -1,3 +1,4 @@ --- inherit: manifest-delivery-base.yml +memory: 256M diff --git a/manifest-delivery-production.yml b/manifest-delivery-production.yml index 53c8d2f12..d2c2ba647 100644 --- a/manifest-delivery-production.yml +++ b/manifest-delivery-production.yml @@ -3,4 +3,4 @@ inherit: manifest-delivery-base.yml instances: 2 -memory: 1G +memory: 768M diff --git a/manifest-delivery-staging.yml b/manifest-delivery-staging.yml index 53c8d2f12..d2c2ba647 100644 --- a/manifest-delivery-staging.yml +++ b/manifest-delivery-staging.yml @@ -3,4 +3,4 @@ inherit: manifest-delivery-base.yml instances: 2 -memory: 1G +memory: 768M diff --git a/scripts/delete_sqs_queues.py b/scripts/delete_sqs_queues.py old mode 100644 new mode 100755 index bdbf6ff65..b167ce392 --- a/scripts/delete_sqs_queues.py +++ b/scripts/delete_sqs_queues.py @@ -1,10 +1,33 @@ +""" + +Script to manage SQS queues. Can list or delete queues. + +Uses boto, so relies on correctly set up AWS access keys and tokens. + +In principle use this script to dump details of all queues in a gievn environment, and then +manipulate the resultant CSV file so that it contains the queues you want to delete. + +Very hands on. Starter for a more automagic process. + +Usage: + scripts/delete_sqs_queues.py + + options are: + - list: dumps queue details to local file queues.csv in current directory. + - delete: delete queues from local file queues.csv in current directory. + +Example: + scripts/delete_sqs_queues.py list delete +""" + +from docopt import docopt import boto3 import csv from datetime import datetime -from pprint import pprint -import os -client = boto3.client('sqs', region_name=os.getenv('AWS_REGION')) +FILE_NAME = "/tmp/queues.csv" + +client = boto3.client('sqs', region_name='eu-west-1') def _formatted_date_from_timestamp(timestamp): @@ -27,15 +50,18 @@ def get_queue_attributes(queue_name): ] ) queue_attributes = response['Attributes'] + queue_attributes.update({'QueueUrl': queue_name}) return queue_attributes -def delete_queue(queue_name): +def delete_queue(queue_url): + # Note that deleting a queue returns 200 OK if it doesn't exist + print("DELETEING {}".format(queue_url)) response = client.delete_queue( - QueueUrl=queue_name + QueueUrl=queue_url ) if response['ResponseMetadata']['HTTPStatusCode'] == 200: - print('Deleted queue successfully') + print('Deleted queue successfully {}'.format(response['ResponseMetadata'])) else: print('Error occured when attempting to delete queue') pprint(response) @@ -43,10 +69,10 @@ def delete_queue(queue_name): def output_to_csv(queue_attributes): - csv_name = 'queues.csv' - with open(csv_name, 'w') as csvfile: + with open(FILE_NAME, 'w') as csvfile: fieldnames = [ 'Queue Name', + 'Queue URL', 'Number of Messages', 'Number of Messages Delayed', 'Number of Messages Not Visible', @@ -55,23 +81,19 @@ def output_to_csv(queue_attributes): writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for queue_attr in queue_attributes: - queue_url = client.get_queue_url( - QueueName=queue_attr['QueueArn'] - )['QueueUrl'] writer.writerow({ 'Queue Name': queue_attr['QueueArn'], - 'Queue URL': queue_url, + 'Queue URL': queue_attr['QueueUrl'], 'Number of Messages': queue_attr['ApproximateNumberOfMessages'], 'Number of Messages Delayed': queue_attr['ApproximateNumberOfMessagesDelayed'], 'Number of Messages Not Visible': queue_attr['ApproximateNumberOfMessagesNotVisible'], 'Created': _formatted_date_from_timestamp(queue_attr['CreatedTimestamp']) }) - return csv_name -def read_from_csv(csv_name): +def read_from_csv(): queue_urls = [] - with open(csv_name, 'r') as csvfile: + with open(FILE_NAME, 'r') as csvfile: next(csvfile) rows = csv.reader(csvfile, delimiter=',') for row in rows: @@ -79,6 +101,19 @@ def read_from_csv(csv_name): return queue_urls -queues = get_queues() -for queue in queues: - delete_queue(queue) +if __name__ == "__main__": + arguments = docopt(__doc__) + + if arguments[''] == 'list': + queues = get_queues() + queue_attributes = [] + for queue in queues: + queue_attributes.append(get_queue_attributes(queue)) + output_to_csv(queue_attributes) + elif arguments[''] == 'delete': + queues_to_delete = read_from_csv() + for queue in queues_to_delete: + delete_queue(queue) + else: + print("UNKNOWN COMMAND") + exit(1) diff --git a/tests/app/dao/test_notification_dao.py b/tests/app/dao/test_notification_dao.py index a6b69cd24..dc0f3fc34 100644 --- a/tests/app/dao/test_notification_dao.py +++ b/tests/app/dao/test_notification_dao.py @@ -1772,6 +1772,20 @@ def test_dao_get_notifications_by_to_field_search_is_not_case_sensitive(sample_t assert notification.id in notification_ids +@pytest.mark.parametrize('to', [ + 'not@email', '123' +]) +def test_dao_get_notifications_by_to_field_accepts_invalid_phone_numbers_and_email_addresses( + sample_template, + to, +): + notification = create_notification( + template=sample_template, to_field='test@example.com', normalised_to='test@example.com' + ) + results = dao_get_notifications_by_to_field(notification.service_id, to) + assert len(results) == 0 + + def test_dao_get_notifications_by_to_field_search_ignores_spaces(sample_template): notification1 = create_notification( template=sample_template, to_field='+447700900855', normalised_to='447700900855'