Skip to content

Commit

Permalink
Merge pull request #1273 from GSA/main
Browse files Browse the repository at this point in the history
8/15/2024 Production Deploy
  • Loading branch information
ccostino authored Aug 15, 2024
2 parents 2217010 + 714f6f1 commit 7f71249
Show file tree
Hide file tree
Showing 29 changed files with 548 additions and 667 deletions.
8 changes: 4 additions & 4 deletions .ds.baseline
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@
"filename": "tests/app/aws/test_s3.py",
"hashed_secret": "67a74306b06d0c01624fe0d0249a570f4d093747",
"is_verified": false,
"line_number": 24,
"line_number": 25,
"is_secret": false
}
],
Expand Down Expand Up @@ -305,7 +305,7 @@
"filename": "tests/app/service/test_rest.py",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 1274,
"line_number": 1275,
"is_secret": false
}
],
Expand Down Expand Up @@ -349,7 +349,7 @@
"filename": "tests/app/user/test_rest.py",
"hashed_secret": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33",
"is_verified": false,
"line_number": 962,
"line_number": 810,
"is_secret": false
}
],
Expand Down Expand Up @@ -384,5 +384,5 @@
}
]
},
"generated_at": "2024-07-10T20:12:22Z"
"generated_at": "2024-08-13T22:32:28Z"
}
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ run-celery: ## Run celery, TODO remove purge for staging/prod


.PHONY: dead-code
dead-code:
dead-code: ## Use 60 to look for suspected dead code
poetry run vulture ./app --min-confidence=100

.PHONY: run-celery-beat
Expand Down
53 changes: 51 additions & 2 deletions app/aws/s3.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import re
import time

import botocore
from boto3 import Session
Expand Down Expand Up @@ -95,6 +96,32 @@ def get_s3_file(bucket_name, file_location, access_key, secret_key, region):
return s3_file.get()["Body"].read().decode("utf-8")


def download_from_s3(
bucket_name, s3_key, local_filename, access_key, secret_key, region
):
session = Session(
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
region_name=region,
)
s3 = session.client("s3", config=AWS_CLIENT_CONFIG)
result = None
try:
result = s3.download_file(bucket_name, s3_key, local_filename)
current_app.logger.info(f"File downloaded successfully to {local_filename}")
except botocore.exceptions.NoCredentialsError as nce:
current_app.logger.error("Credentials not found")
raise Exception(nce)
except botocore.exceptions.PartialCredentialsError as pce:
current_app.logger.error("Incomplete credentials provided")
raise Exception(pce)
except Exception as e:
current_app.logger.error(f"An error occurred {e}")
text = f"EXCEPTION {e} local_filename {local_filename}"
raise Exception(text)
return result


def get_s3_object(bucket_name, file_location, access_key, secret_key, region):
session = Session(
aws_access_key_id=access_key,
Expand Down Expand Up @@ -145,8 +172,29 @@ def get_job_and_metadata_from_s3(service_id, job_id):


def get_job_from_s3(service_id, job_id):
obj = get_s3_object(*get_job_location(service_id, job_id))
return obj.get()["Body"].read().decode("utf-8")
retries = 0
max_retries = 5
backoff_factor = 1
while retries < max_retries:

try:
obj = get_s3_object(*get_job_location(service_id, job_id))
return obj.get()["Body"].read().decode("utf-8")
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] in [
"Throttling",
"RequestTimeout",
"SlowDown",
]:
retries += 1
sleep_time = backoff_factor * (2**retries) # Exponential backoff
time.sleep(sleep_time)
continue
except Exception as e:
current_app.logger.error(f"Failed to get object from bucket {e}")
raise

raise Exception("Failed to get object after 5 attempts")


def incr_jobs_cache_misses():
Expand Down Expand Up @@ -215,6 +263,7 @@ def get_phone_number_from_s3(service_id, job_id, job_row_number):
# So this is a little recycling mechanism to reduce the number of downloads.
job = JOBS.get(job_id)
if job is None:
current_app.logger.info(f"job {job_id} was not in the cache")
job = get_job_from_s3(service_id, job_id)
JOBS[job_id] = job
incr_jobs_cache_misses()
Expand Down
41 changes: 38 additions & 3 deletions app/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,14 @@
dao_get_organization_by_email_address,
dao_get_organization_by_id,
)
from app.dao.service_sms_sender_dao import dao_get_sms_senders_by_service_id
from app.dao.services_dao import (
dao_fetch_all_services_by_user,
dao_fetch_all_services_created_by_user,
dao_fetch_service_by_id,
dao_update_service,
delete_service_and_all_associated_db_objects,
get_services_by_partial_name,
)
from app.dao.templates_dao import dao_get_template_by_id
from app.dao.users_dao import (
Expand Down Expand Up @@ -590,14 +592,36 @@ def process_row_from_job(job_id, job_row_number):


@notify_command(name="download-csv-file-by-name")
@click.option("-f", "--csv_filename", required=True, help="csv file name")
@click.option("-f", "--csv_filename", required=True, help="S3 file location")
def download_csv_file_by_name(csv_filename):

# poetry run flask command download-csv-file-by-name -f <s3 file location>
# cf run-task notify-api-production --command "flask command download-csv-file-by-name -f <s3 location>"
bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]
access_key = current_app.config["CSV_UPLOAD_BUCKET"]["access_key_id"]
secret = current_app.config["CSV_UPLOAD_BUCKET"]["secret_access_key"]
region = current_app.config["CSV_UPLOAD_BUCKET"]["region"]
print(s3.get_s3_file(bucket_name, csv_filename, access_key, secret, region))

s3.download_from_s3(
bucket_name, csv_filename, "download.csv", access_key, secret, region
)


@notify_command(name="dump-sms-senders")
@click.argument("service_name")
def dump_sms_senders(service_name):

# poetry run flask command dump-sms-senders MyServiceName
# cf run-task notify-api-production --command "flask command dump-sms-senders <MyServiceName>"
services = get_services_by_partial_name(service_name)
if len(services) > 1:
raise ValueError(
f"Please use a unique and complete service name instead of {service_name}"
)

senders = dao_get_sms_senders_by_service_id(services[0].id)
for sender in senders:
# Not PII, okay to put in logs
click.echo(sender.serialize())


@notify_command(name="populate-annual-billing-with-the-previous-years-allowance")
Expand Down Expand Up @@ -639,6 +663,17 @@ def populate_annual_billing_with_the_previous_years_allowance(year):
)


@notify_command(name="dump-user-info")
@click.argument("user_email_address")
def dump_user_info(user_email_address):
user = get_user_by_email(user_email_address)
content = user.serialize()
with open("user_download.json", "wb") as f:
f.write(json.dumps(content).encode("utf8"))
f.close()
print("Successfully downloaded user info to user_download.json")


@notify_command(name="populate-annual-billing-with-defaults")
@click.option(
"-y",
Expand Down
14 changes: 13 additions & 1 deletion app/config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
from datetime import timedelta
from datetime import datetime, timedelta
from os import getenv, path

from celery.schedules import crontab
Expand Down Expand Up @@ -165,6 +165,8 @@ class Config(object):
# we only need real email in Live environment (production)
DVLA_EMAIL_ADDRESSES = json.loads(getenv("DVLA_EMAIL_ADDRESSES", "[]"))

current_minute = (datetime.now().minute + 1) % 60

CELERY = {
"broker_url": REDIS_URL,
"broker_transport_options": {
Expand Down Expand Up @@ -254,6 +256,16 @@ class Config(object):
"schedule": crontab(minute="*/30"),
"options": {"queue": QueueNames.PERIODIC},
},
"regenerate-job-cache-on-startup": {
"task": "regenerate-job-cache",
"schedule": crontab(
minute=current_minute
), # Runs once at the next minute
"options": {
"queue": QueueNames.PERIODIC,
"expires": 60,
}, # Ensure it doesn't run if missed
},
"cleanup-unfinished-jobs": {
"task": "cleanup-unfinished-jobs",
"schedule": crontab(hour=4, minute=5),
Expand Down
2 changes: 1 addition & 1 deletion app/dao/users_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,6 +253,6 @@ def dao_report_users():
inner join user_to_service on users.id=user_to_service.user_id
inner join services on services.id=user_to_service.service_id
where services.name not like '_archived%'
order by services.name asc, users.name asc
order by users.name asc
"""
return db.session.execute(text(sql))
16 changes: 16 additions & 0 deletions app/delivery/send_to_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from app.dao.email_branding_dao import dao_get_email_branding_by_id
from app.dao.notifications_dao import dao_update_notification
from app.dao.provider_details_dao import get_provider_details_by_notification_type
from app.dao.service_sms_sender_dao import dao_get_sms_senders_by_service_id
from app.enums import BrandType, KeyType, NotificationStatus, NotificationType
from app.exceptions import NotificationTechnicalFailureException
from app.serialised_models import SerialisedService, SerialisedTemplate
Expand Down Expand Up @@ -101,6 +102,13 @@ def send_sms_to_provider(notification):
raise Exception(
f"The recipient for (Service ID: {si}; Job ID: {ji}; Job Row Number {jrn} was not found."
)

sender_numbers = get_sender_numbers(notification)
if notification.reply_to_text not in sender_numbers:
raise ValueError(
f"{notification.reply_to_text} not in {sender_numbers} #notify-admin-1701"
)

send_sms_kwargs = {
"to": recipient,
"content": str(template),
Expand Down Expand Up @@ -130,6 +138,14 @@ def send_sms_to_provider(notification):
return message_id


def get_sender_numbers(notification):
possible_senders = dao_get_sms_senders_by_service_id(notification.service_id)
sender_numbers = []
for possible_sender in possible_senders:
sender_numbers.append(possible_sender.sms_sender)
return sender_numbers


def send_email_to_provider(notification):
# Someone needs an email, possibly new registration
recipient = redis_store.get(f"email-address-{notification.id}")
Expand Down
5 changes: 0 additions & 5 deletions app/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,6 @@
from notifications_utils.recipients import InvalidEmailError


class VirusScanError(Exception):
def __init__(self, message):
super().__init__(message)


class InvalidRequest(Exception):
code = None
fields = []
Expand Down
3 changes: 0 additions & 3 deletions app/notifications/notifications_ses_callback.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from datetime import timedelta

from flask import Blueprint, jsonify, request

from app.celery.process_ses_receipts_tasks import process_ses_results
Expand All @@ -8,7 +6,6 @@
from app.notifications.sns_handlers import sns_notification_handler

ses_callback_blueprint = Blueprint("notifications_ses_callback", __name__)
DEFAULT_MAX_AGE = timedelta(days=10000)


# 400 counts as a permanent failure so SNS will not retry.
Expand Down
3 changes: 0 additions & 3 deletions app/notifications/sns_handlers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import enum
from datetime import timedelta
from json import decoder

import requests
Expand All @@ -8,8 +7,6 @@
from app.errors import InvalidRequest
from app.notifications.sns_cert_validator import validate_sns_cert

DEFAULT_MAX_AGE = timedelta(days=10000)


class SNSMessageType(enum.Enum):
SubscriptionConfirmation = "SubscriptionConfirmation"
Expand Down
10 changes: 0 additions & 10 deletions app/service/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -511,11 +511,6 @@ def get_all_notifications_for_service(service_id):
)
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
s = notification.service_id
j = notification.job_id
current_app.logger.warning(
f"No personalisation found for s3 file location service: service-{s}-notify/{j}.csv"
)
notification.personalisation = ""
else:
raise ex
Expand All @@ -531,11 +526,6 @@ def get_all_notifications_for_service(service_id):
notification.normalised_to = recipient
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
s = notification.service_id
j = notification.job_id
current_app.logger.warning(
f"No phone number found for s3 file location service: service-{s}-notify/{j}.csv"
)
notification.to = ""
notification.normalised_to = ""
else:
Expand Down
Loading

0 comments on commit 7f71249

Please sign in to comment.