Skip to content

Commit

Permalink
Merge pull request #1308 from GSA/main
Browse files Browse the repository at this point in the history
8/30/2024 Production Deploy
  • Loading branch information
ccostino authored Aug 30, 2024
2 parents abddd7a + 0914cd9 commit 9625cd4
Show file tree
Hide file tree
Showing 23 changed files with 995 additions and 1,173 deletions.
6 changes: 3 additions & 3 deletions .ds.baseline
Original file line number Diff line number Diff line change
Expand Up @@ -133,15 +133,15 @@
"filename": ".github/workflows/checks.yml",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 27,
"line_number": 28,
"is_secret": false
},
{
"type": "Basic Auth Credentials",
"filename": ".github/workflows/checks.yml",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 44,
"line_number": 45,
"is_secret": false
}
],
Expand Down Expand Up @@ -384,5 +384,5 @@
}
]
},
"generated_at": "2024-08-13T22:32:28Z"
"generated_at": "2024-08-22T18:00:24Z"
}
1 change: 1 addition & 0 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ env:
AWS_US_TOLL_FREE_NUMBER: "+18556438890"

jobs:

build:
runs-on: ubuntu-latest

Expand Down
1 change: 1 addition & 0 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ permissions:
contents: read

jobs:

deploy:
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' }}
Expand Down
8 changes: 7 additions & 1 deletion app/aws/s3.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import datetime
import re
import time

Expand All @@ -8,6 +9,7 @@

from app import redis_store
from app.clients import AWS_CLIENT_CONFIG
from notifications_utils import aware_utcnow

FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv"

Expand Down Expand Up @@ -58,11 +60,15 @@ def list_s3_objects():

bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]
s3_client = get_s3_client()
# Our reports only support 7 days, but pull 8 days to avoid
# any edge cases
time_limit = aware_utcnow() - datetime.timedelta(days=8)
try:
response = s3_client.list_objects_v2(Bucket=bucket_name)
while True:
for obj in response.get("Contents", []):
yield obj["Key"]
if obj["LastModified"] >= time_limit:
yield obj["Key"]
if "NextContinuationToken" in response:
response = s3_client.list_objects_v2(
Bucket=bucket_name,
Expand Down
1 change: 0 additions & 1 deletion app/clients/sms/aws_sns.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ def _valid_sender_number(self, sender):

def send_sms(self, to, content, reference, sender=None, international=False):
matched = False

for match in phonenumbers.PhoneNumberMatcher(to, "US"):
matched = True
to = phonenumbers.format_number(
Expand Down
10 changes: 10 additions & 0 deletions app/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -1053,3 +1053,13 @@ def fake_email(name):
platform_admin=admin,
)
print(f"{num} {user.email_address} created")


# generate a new salt value
@notify_command(name="generate-salt")
def generate_salt():
if getenv("NOTIFY_ENVIRONMENT", "") not in ["development", "test"]:
current_app.logger.error("Can only be run in development")
return
salt = secrets.token_hex(16)
print(salt)
2 changes: 1 addition & 1 deletion app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ class Config(object):
SQLALCHEMY_POOL_TIMEOUT = 30
SQLALCHEMY_POOL_RECYCLE = 300
SQLALCHEMY_STATEMENT_TIMEOUT = 1200
PAGE_SIZE = 50
PAGE_SIZE = 20
API_PAGE_SIZE = 250
REDIS_URL = cloud_config.redis_url
REDIS_ENABLED = getenv("REDIS_ENABLED", "1") == "1"
Expand Down
113 changes: 82 additions & 31 deletions app/dao/fact_notification_status_dao.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from datetime import timedelta

from sqlalchemy import Date, case, func
from sqlalchemy import Date, case, cast, func, select, union_all
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import extract, literal
from sqlalchemy.types import DateTime, Integer
from sqlalchemy.types import DateTime, Integer, Text

from app import db
from app.dao.dao_utils import autocommit
Expand All @@ -14,6 +15,9 @@
NotificationAllTimeView,
Service,
Template,
TemplateFolder,
User,
template_folder_map,
)
from app.utils import (
get_midnight_in_utc,
Expand Down Expand Up @@ -126,36 +130,47 @@ def fetch_notification_status_for_service_for_day(fetch_day, service_id):


def fetch_notification_status_for_service_for_today_and_7_previous_days(
service_id, by_template=False, limit_days=7
):
service_id: str, by_template: bool = False, limit_days: int = 7
) -> list[dict | None]:
start_date = midnight_n_days_ago(limit_days)
now = utc_now()
stats_for_7_days = db.session.query(
FactNotificationStatus.notification_type.cast(db.Text).label(
"notification_type"
),
FactNotificationStatus.notification_status.cast(db.Text).label("status"),
now = get_midnight_in_utc(utc_now())

# Query for the last 7 days
stats_for_7_days = select(
cast(FactNotificationStatus.notification_type, Text).label("notification_type"),
cast(FactNotificationStatus.notification_status, Text).label("status"),
*(
[FactNotificationStatus.template_id.label("template_id")]
[
FactNotificationStatus.template_id.label("template_id"),
FactNotificationStatus.local_date.label("date_used"),
]
if by_template
else []
),
FactNotificationStatus.notification_count.label("count"),
).filter(
).where(
FactNotificationStatus.service_id == service_id,
FactNotificationStatus.local_date >= start_date,
FactNotificationStatus.key_type != KeyType.TEST,
)

# Query for today's stats
stats_for_today = (
db.session.query(
Notification.notification_type.cast(db.Text),
Notification.status.cast(db.Text),
*([Notification.template_id] if by_template else []),
select(
cast(Notification.notification_type, Text),
cast(Notification.status, Text),
*(
[
Notification.template_id,
literal(now).label("date_used"),
]
if by_template
else []
),
func.count().label("count"),
)
.filter(
Notification.created_at >= get_midnight_in_utc(now),
.where(
Notification.created_at >= now,
Notification.service_id == service_id,
Notification.key_type != KeyType.TEST,
)
Expand All @@ -166,31 +181,67 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days(
)
)

all_stats_table = stats_for_7_days.union_all(stats_for_today).subquery()
# Combine the queries using union_all
all_stats_union = union_all(stats_for_7_days, stats_for_today).subquery()
all_stats_alias = aliased(all_stats_union, name="all_stats")

query = db.session.query(
# Final query with optional template joins
query = select(
*(
[
TemplateFolder.name.label("folder"),
Template.name.label("template_name"),
False, # TODO: this is related to is_precompiled_letter
all_stats_table.c.template_id,
False, # TODO: Handle `is_precompiled_letter`
template_folder_map.c.template_folder_id,
all_stats_alias.c.template_id,
User.name.label("created_by"),
Template.created_by_id,
func.max(all_stats_alias.c.date_used).label(
"last_used"
), # Get the most recent date
]
if by_template
else []
),
all_stats_table.c.notification_type,
all_stats_table.c.status,
func.cast(func.sum(all_stats_table.c.count), Integer).label("count"),
all_stats_alias.c.notification_type,
all_stats_alias.c.status,
cast(func.sum(all_stats_alias.c.count), Integer).label("count"),
)

if by_template:
query = query.filter(all_stats_table.c.template_id == Template.id)
query = (
query.join(Template, all_stats_alias.c.template_id == Template.id)
.join(User, Template.created_by_id == User.id)
.outerjoin(
template_folder_map, Template.id == template_folder_map.c.template_id
)
.outerjoin(
TemplateFolder,
TemplateFolder.id == template_folder_map.c.template_folder_id,
)
)

# Group by all necessary fields except date_used
query = query.group_by(
*(
[
TemplateFolder.name,
Template.name,
all_stats_alias.c.template_id,
User.name,
template_folder_map.c.template_folder_id,
Template.created_by_id,
]
if by_template
else []
),
all_stats_alias.c.notification_type,
all_stats_alias.c.status,
)

return query.group_by(
*([Template.name, all_stats_table.c.template_id] if by_template else []),
all_stats_table.c.notification_type,
all_stats_table.c.status,
).all()
# Execute the query using Flask-SQLAlchemy's session
result = db.session.execute(query)
return result.mappings().all()


def fetch_notification_status_totals_for_all_services(start_date, end_date):
Expand Down
23 changes: 22 additions & 1 deletion app/dao/jobs_dao.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import uuid
from datetime import timedelta

Expand Down Expand Up @@ -140,6 +141,25 @@ def dao_create_job(job):
job.id = uuid.uuid4()
db.session.add(job)
db.session.commit()
# We are seeing weird time anomalies where a job can be created on
# 8/19 yet show a created_at time of 8/16. This seems to be the only
# place the created_at value is set so do some double-checking and debugging
orig_time = job.created_at
now_time = utc_now()
diff_time = now_time - orig_time
current_app.logger.info(
f"#notify-admin-1859 dao_create_job orig created at {orig_time} and now {now_time}"
)
if diff_time.total_seconds() > 300: # It should be only a few seconds diff at most
current_app.logger.error(
"#notify-admin-1859 Something is wrong with job.created_at!"
)
if os.getenv("NOTIFY_ENVIRONMENT") not in ["test"]:
job.created_at = now_time
dao_update_job(job)
current_app.logger.error(
f"#notify-admin-1859 Job created_at reset to {job.created_at}"
)


def dao_update_job(job):
Expand Down Expand Up @@ -168,7 +188,8 @@ def dao_get_jobs_older_than_data_retention(notification_types):
.all()
)

end_date = today - timedelta(days=7)
# notify-api-1287, make default data retention 7 days, 23 hours
end_date = today - timedelta(days=7, hours=23)
for notification_type in notification_types:
services_with_data_retention = [
x.service_id
Expand Down
29 changes: 13 additions & 16 deletions app/delivery/send_to_providers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
from contextlib import suppress
from urllib import parse

from cachetools import TTLCache, cached
Expand Down Expand Up @@ -81,27 +82,15 @@ def send_sms_to_provider(notification):
# We start by trying to get the phone number from a job in s3. If we fail, we assume
# the phone number is for the verification code on login, which is not a job.
recipient = None
try:
# It is our 2facode, maybe
recipient = _get_verify_code(notification)

if recipient is None:
recipient = get_phone_number_from_s3(
notification.service_id,
notification.job_id,
notification.job_row_number,
)
except Exception:
# It is our 2facode, maybe
key = f"2facode-{notification.id}".replace(" ", "")
recipient = redis_store.get(key)

if recipient:
recipient = recipient.decode("utf-8")

if recipient is None:
si = notification.service_id
ji = notification.job_id
jrn = notification.job_row_number
raise Exception(
f"The recipient for (Service ID: {si}; Job ID: {ji}; Job Row Number {jrn} was not found."
)

sender_numbers = get_sender_numbers(notification)
if notification.reply_to_text not in sender_numbers:
Expand Down Expand Up @@ -138,6 +127,14 @@ def send_sms_to_provider(notification):
return message_id


def _get_verify_code(notification):
key = f"2facode-{notification.id}".replace(" ", "")
recipient = redis_store.get(key)
with suppress(AttributeError):
recipient = recipient.decode("utf-8")
return recipient


def get_sender_numbers(notification):
possible_senders = dao_get_sms_senders_by_service_id(notification.service_id)
sender_numbers = []
Expand Down
7 changes: 6 additions & 1 deletion app/template_statistics/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def get_template_statistics_for_service_by_day(service_id):
try:
whole_days = int(whole_days)
except ValueError:
error = "{} is not an integer".format(whole_days)
error = f"{whole_days} is not an integer"
message = {"whole_days": [error]}
raise InvalidRequest(message, status_code=400)

Expand All @@ -41,6 +41,11 @@ def get_template_statistics_for_service_by_day(service_id):
"count": row.count,
"template_id": str(row.template_id),
"template_name": row.template_name,
"template_folder_id": row.template_folder_id,
"template_folder": row.folder,
"created_by_id": row.created_by_id,
"created_by": row.created_by,
"last_used": row.last_used,
"template_type": row.notification_type,
"status": row.status,
}
Expand Down
Loading

0 comments on commit 9625cd4

Please sign in to comment.