mirror of
https://github.com/GSA/notifications-api.git
synced 2025-12-09 14:42:24 -05:00
The performance platform is going away soon. The only stat that we do not have in our database is the processing time. Let me clarify the only statistic we don't have in our database that we can query efficiently is the processing time. Any queries on notification_history are too inefficient to use on a web page. Processing time = the total number of normal/team emails and text messages plus the number of messages that have gone from created to sending within 10 seconds per whole day. We can then easily calculate the percentage of messages that were marked as sending under 10 seconds.
29 lines
944 B
Python
29 lines
944 B
Python
"""
|
|
|
|
Revision ID: 0347_add_ft_processing_time
|
|
Revises: 0346_notify_number_sms_sender
|
|
Create Date: 2021-02-22 14:05:24.775338
|
|
|
|
"""
|
|
from alembic import op
|
|
import sqlalchemy as sa
|
|
from sqlalchemy.dialects import postgresql
|
|
|
|
revision = '0347_add_ft_processing_time'
|
|
down_revision = '0346_notify_number_sms_sender'
|
|
|
|
|
|
def upgrade():
|
|
op.create_table('ft_processing_time',
|
|
sa.Column('bst_date', sa.Date(), nullable=False),
|
|
sa.Column('messages_total', sa.Integer(), nullable=False),
|
|
sa.Column('messages_within_10_secs', sa.Integer(), nullable=False),
|
|
sa.PrimaryKeyConstraint('bst_date')
|
|
)
|
|
op.create_index(op.f('ix_ft_processing_time_bst_date'), 'ft_processing_time', ['bst_date'], unique=False)
|
|
|
|
|
|
def downgrade():
|
|
op.drop_index(op.f('ix_ft_processing_time_bst_date'), table_name='ft_processing_time')
|
|
op.drop_table('ft_processing_time')
|