mirror of
https://github.com/GSA/notifications-api.git
synced 2026-01-25 03:51:44 -05:00
Command to load the processing time data for the new ft_processing_time
data.
This commit is contained in:
@@ -30,6 +30,7 @@ from app.dao.fact_billing_dao import (
|
||||
get_service_ids_that_need_billing_populated,
|
||||
update_fact_billing,
|
||||
)
|
||||
from app.dao.fact_processing_time_dao import insert_update_processing_time
|
||||
from app.dao.jobs_dao import dao_get_job_by_id
|
||||
from app.dao.organisation_dao import dao_get_organisation_by_email_address, dao_add_service_to_organisation
|
||||
|
||||
@@ -57,7 +58,7 @@ from app.models import (
|
||||
Domain,
|
||||
Service,
|
||||
EmailBranding,
|
||||
LetterBranding,
|
||||
LetterBranding, FactProcessingTime,
|
||||
)
|
||||
from app.performance_platform.processing_time import send_processing_time_for_start_and_end
|
||||
from app.utils import DATETIME_FORMAT, get_london_midnight_in_utc, get_midnight_for_day_before
|
||||
@@ -927,3 +928,78 @@ def process_row_from_job(job_id, job_row_number):
|
||||
notification_id = process_row(row, template, job, job.service)
|
||||
current_app.logger.info("Process row {} for job {} created notification_id: {}".format(
|
||||
job_row_number, job_id, notification_id))
|
||||
|
||||
|
||||
@notify_command(name='load-processing-time-data')
|
||||
@click.option('-f', '--file_name', required=True, help='Text file contain json data for processing time')
|
||||
def load_processing_time_data(file_name):
|
||||
# This method loads the data from a text file that was downloaded from
|
||||
# https://www.performance.service.gov.uk/data/govuk-notify/processing-time?flatten=true&duration=30&group_by=status&period=day&collect=count%3Asum&format=json ## noqa
|
||||
# The data is formatted as a json
|
||||
# {"data": [
|
||||
# {
|
||||
# "_count": 1.0,
|
||||
# "_end_at": "2021-01-27T00:00:00+00:00",
|
||||
# "_start_at": "2021-01-26T00:00:00+00:00",
|
||||
# "count:sum": 4024207.0,
|
||||
# "status": "messages-within-10-secs"
|
||||
# },
|
||||
# {
|
||||
# "_count": 1.0,
|
||||
# "_end_at": "2021-01-27T00:00:00+00:00",
|
||||
# "_start_at": "2021-01-26T00:00:00+00:00",
|
||||
# "count:sum": 4243204.0,
|
||||
# "status": "messages-total"
|
||||
# },
|
||||
# ]}
|
||||
#
|
||||
# Using the fact_processing_time_dao.insert_update_processing_time means if this method is run more than once
|
||||
# it will not throw an exception.
|
||||
|
||||
file = open(file_name)
|
||||
|
||||
file_contents = ""
|
||||
for line in file:
|
||||
file_contents += line
|
||||
data = json.loads(file_contents)
|
||||
normalised = []
|
||||
|
||||
class ProcesingTimeData:
|
||||
bst_date = datetime(1990, 1, 1).date()
|
||||
messages_total = 0
|
||||
messages_within_10_secs = 0
|
||||
|
||||
def __eq__(self, obj):
|
||||
return isinstance(obj, ProcesingTimeData) and obj.bst_date == self.bst_date
|
||||
|
||||
def set_bst_date(self, value):
|
||||
self.bst_date = value
|
||||
|
||||
def set_m(self, status, value):
|
||||
if status == 'messages-total':
|
||||
self.messages_total = value
|
||||
elif status == 'messages-within-10-secs':
|
||||
self.messages_within_10_secs = value
|
||||
|
||||
for entry in data['data']:
|
||||
bst_date = datetime.strptime(entry['_start_at'][0:10], "%Y-%m-%d").date()
|
||||
status = entry['status']
|
||||
value = entry['count:sum']
|
||||
obj = ProcesingTimeData()
|
||||
obj.set_bst_date(bst_date)
|
||||
if obj in normalised:
|
||||
normalised[normalised.index(obj)].set_m(status, value)
|
||||
else:
|
||||
d = ProcesingTimeData()
|
||||
d.set_bst_date(bst_date)
|
||||
d.set_m(status, value)
|
||||
normalised.append(d)
|
||||
for n in normalised:
|
||||
print(n.bst_date, n.messages_total, n.messages_within_10_secs)
|
||||
fact_processing_time = FactProcessingTime(bst_date=n.bst_date,
|
||||
messages_total=n.messages_total,
|
||||
messages_within_10_secs=n.messages_within_10_secs
|
||||
)
|
||||
insert_update_processing_time(fact_processing_time)
|
||||
|
||||
print("Done loading processing time data.")
|
||||
|
||||
Reference in New Issue
Block a user