Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Commit

Permalink
[Issue HHS#2079] Add GET /opportunity/:opportunityId/versions (#82)
Browse files Browse the repository at this point in the history
Fixes HHS#2079

Adds an endpoint to fetch opportunity versions
* Only includes some of the filters that we'll need to include

Adds a lot of utilities for setting up opportunities for local
development and testing with versions

https://docs.google.com/document/d/18oWmjQJKunMKy6cfnfUnyGEX33uu5UDPnRktD_wRFlE/edit#heading=h.4xmkylqq7mnx
provides a lot of context for how opportunity versioning works in the
existing system - which is to say its very very complex. I'm sure we'll
alter that behavior as we go forward, for now I kept the endpoint simple
in terms of its filters, just removing obvious cases (eg. the summary
record is marked as deleted).

I'm also not certain what we want to do with naming. I really don't like
my current approach of "forecast" and "non-forecast", but we can address
that later as well.

--
Beyond understanding what versioning logic we needed to support, the
most complex component by far is setting up the data in the first place
in an easy manner. I originally tried some ideas using the factory
classes themselves, but due to the order of operations necessary to do
that, that wasn't possible (in short, to create prior history records, I
first need the current record, but that doesn't exist until after
everything else in a factory runs). So, I went with a builder process
that wraps the factories and sets up some reasonable scenarios for you.
Its clunky, but seems to work well enough.

---------

Co-authored-by: nava-platform-bot <[email protected]>
  • Loading branch information
2 people authored and acouch committed Sep 18, 2024
1 parent e8b3f44 commit 76d4523
Show file tree
Hide file tree
Showing 12 changed files with 656 additions and 52 deletions.
16 changes: 15 additions & 1 deletion api/src/api/opportunities_v1/opportunity_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from src.api.opportunities_v1.opportunity_blueprint import opportunity_blueprint
from src.auth.api_key_auth import api_key_auth
from src.logging.flask_logger import add_extra_data_to_current_request_logs
from src.services.opportunities_v1.get_opportunity import get_opportunity
from src.services.opportunities_v1.get_opportunity import get_opportunity, get_opportunity_versions
from src.services.opportunities_v1.search_opportunities import search_opportunities
from src.util.dict_util import flatten_dict

Expand Down Expand Up @@ -64,3 +64,17 @@ def opportunity_get(db_session: db.Session, opportunity_id: int) -> response.Api
opportunity = get_opportunity(db_session, opportunity_id)

return response.ApiResponse(message="Success", data=opportunity)


@opportunity_blueprint.get("/opportunities/<int:opportunity_id>/versions")
@opportunity_blueprint.output(opportunity_schemas.OpportunityVersionsGetResponseV1Schema)
@opportunity_blueprint.auth_required(api_key_auth)
@opportunity_blueprint.doc(description=SHARED_ALPHA_DESCRIPTION)
@flask_db.with_db_session()
def opportunity_versions_get(db_session: db.Session, opportunity_id: int) -> response.ApiResponse:
add_extra_data_to_current_request_logs({"opportunity.opportunity_id": opportunity_id})
logger.info("GET /v1/opportunities/:opportunity_id/versions")
with db_session.begin():
data = get_opportunity_versions(db_session, opportunity_id)

return response.ApiResponse(message="Success", data=data)
14 changes: 14 additions & 0 deletions api/src/api/opportunities_v1/opportunity_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,10 @@ class OpportunitySummaryV1Schema(Schema):
}
)

version_number = fields.Integer(
metadata={"description": "The version number of the opportunity summary", "example": 1}
)

funding_instruments = fields.List(fields.Enum(FundingInstrument))
funding_categories = fields.List(fields.Enum(FundingCategory))
applicant_types = fields.List(fields.Enum(ApplicantType))
Expand Down Expand Up @@ -303,5 +307,15 @@ class OpportunityGetResponseV1Schema(AbstractResponseSchema):
data = fields.Nested(OpportunityV1Schema())


class OpportunityVersionV1Schema(Schema):
opportunity = fields.Nested(OpportunityV1Schema())
forecasts = fields.Nested(OpportunitySummaryV1Schema(many=True))
non_forecasts = fields.Nested(OpportunitySummaryV1Schema(many=True))


class OpportunityVersionsGetResponseV1Schema(AbstractResponseSchema):
data = fields.Nested(OpportunityVersionV1Schema())


class OpportunitySearchResponseV1Schema(AbstractResponseSchema, PaginationMixinSchema):
data = fields.Nested(OpportunityV1Schema(many=True))
35 changes: 35 additions & 0 deletions api/src/db/models/opportunity_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,18 @@ def opportunity_status(self) -> OpportunityStatus | None:

return self.current_opportunity_summary.opportunity_status

@property
def all_forecasts(self) -> list["OpportunitySummary"]:
# Utility method for getting all forecasted summary records attached to the opportunity
# Note this will include historical and deleted records.
return [summary for summary in self.all_opportunity_summaries if summary.is_forecast]

@property
def all_non_forecasts(self) -> list["OpportunitySummary"]:
# Utility method for getting all forecasted summary records attached to the opportunity
# Note this will include historical and deleted records.
return [summary for summary in self.all_opportunity_summaries if not summary.is_forecast]


class OpportunitySummary(ApiSchemaTable, TimestampMixin):
__tablename__ = "opportunity_summary"
Expand Down Expand Up @@ -191,6 +203,29 @@ class OpportunitySummary(ApiSchemaTable, TimestampMixin):
creator=lambda obj: LinkOpportunitySummaryApplicantType(applicant_type=obj),
)

def for_json(self) -> dict:
json_valid_dict = super().for_json()

# The proxy values don't end up in the JSON as they aren't columns
# so manually add them.
json_valid_dict["funding_instruments"] = self.funding_instruments
json_valid_dict["funding_categories"] = self.funding_categories
json_valid_dict["applicant_types"] = self.applicant_types

return json_valid_dict

def can_summary_be_public(self, current_date: date) -> bool:
"""
Utility method to check whether a summary object
"""
if self.is_deleted:
return False

if self.post_date is None or self.post_date > current_date:
return False

return True


class OpportunityAssistanceListing(ApiSchemaTable, TimestampMixin):
__tablename__ = "opportunity_assistance_listing"
Expand Down
97 changes: 86 additions & 11 deletions api/src/services/opportunities_v1/get_opportunity.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,99 @@
from datetime import date

from sqlalchemy import select
from sqlalchemy.orm import noload, selectinload

import src.adapters.db as db
import src.util.datetime_util as datetime_util
from src.api.route_utils import raise_flask_error
from src.db.models.opportunity_models import Opportunity
from src.db.models.opportunity_models import Opportunity, OpportunitySummary


def get_opportunity(db_session: db.Session, opportunity_id: int) -> Opportunity:
opportunity: Opportunity | None = (
db_session.execute(
select(Opportunity)
.where(Opportunity.opportunity_id == opportunity_id)
.where(Opportunity.is_draft.is_(False))
.options(selectinload("*"), noload(Opportunity.all_opportunity_summaries))
)
.unique()
.scalar_one_or_none()
def _fetch_opportunity(
db_session: db.Session, opportunity_id: int, load_all_opportunity_summaries: bool
) -> Opportunity:
stmt = (
select(Opportunity)
.where(Opportunity.opportunity_id == opportunity_id)
.where(Opportunity.is_draft.is_(False))
.options(selectinload("*"))
)

if not load_all_opportunity_summaries:
stmt = stmt.options(noload(Opportunity.all_opportunity_summaries))

opportunity = db_session.execute(stmt).unique().scalar_one_or_none()

if opportunity is None:
raise_flask_error(404, message=f"Could not find Opportunity with ID {opportunity_id}")

return opportunity


def get_opportunity(db_session: db.Session, opportunity_id: int) -> Opportunity:
return _fetch_opportunity(db_session, opportunity_id, load_all_opportunity_summaries=False)


def get_opportunity_versions(db_session: db.Session, opportunity_id: int) -> dict:
opportunity = _fetch_opportunity(
db_session, opportunity_id, load_all_opportunity_summaries=True
)

now_us_eastern = datetime_util.get_now_us_eastern_date()

forecasts = _filter_summaries(opportunity.all_forecasts, now_us_eastern)
non_forecasts = _filter_summaries(opportunity.all_non_forecasts, now_us_eastern)

return {"opportunity": opportunity, "forecasts": forecasts, "non_forecasts": non_forecasts}


def _filter_summaries(
summaries: list[OpportunitySummary], current_date: date
) -> list[OpportunitySummary]:
# Find the most recent summary
most_recent_summary: OpportunitySummary | None = None
for summary in summaries:
if summary.revision_number is None:
most_recent_summary = summary
summaries.remove(summary)
break

# If there is no most recent summary, even if there is any history records
# we have to filter all of the summaries. Effectively this would mean the most recent
# was deleted, and we never show deleted summaries (or anything that comes before them).
if most_recent_summary is None:
return []

# If the most recent summary isn't able to be public itself, we can't display any history
# for this type of summary object.
if not most_recent_summary.can_summary_be_public(current_date):
return []

summaries_to_keep = [most_recent_summary]

# We want to process these in reverse order (most recent first)
# as soon as we hit one that we need to filter, we stop adding records.
#
# For example, if a summary is marked as deleted, we won't add that, and
# we also filter out all summaries that came before it (by just breaking the loop)
summaries = sorted(summaries, key=lambda s: s.version_number, reverse=True) # type: ignore

for summary in summaries:
if summary.is_deleted:
break

if summary.post_date is None:
break

# If a historical record was updated (or initially created) before
# its own post date (ie. would have been visible when created) regardless
# of what the current date may be
# TODO - leaving this out of the implementation for the moment
# as we need to investigate why this is being done and if there is a better
# way as this ends up filtering out records we don't want removed
# if summary.updated_at.date() < summary.post_date:
# break

summaries_to_keep.append(summary)

return summaries_to_keep
22 changes: 8 additions & 14 deletions api/src/task/opportunities/set_current_opportunities_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,10 +160,16 @@ def determine_current_and_status(
# Note that if it cannot, we do not want to use an earlier revision
# even if that revision doesn't have the same issue. Only the latest
# revisions of forecast/non-forecast records are ever an option
if not self.can_summary_be_public(latest_forecasted_summary):
if (
latest_forecasted_summary is not None
and not latest_forecasted_summary.can_summary_be_public(self.current_date)
):
latest_forecasted_summary = None

if not self.can_summary_be_public(latest_non_forecasted_summary):
if (
latest_non_forecasted_summary is not None
and not latest_non_forecasted_summary.can_summary_be_public(self.current_date)
):
latest_non_forecasted_summary = None

if latest_forecasted_summary is None and latest_non_forecasted_summary is None:
Expand All @@ -180,18 +186,6 @@ def determine_current_and_status(
cast(OpportunitySummary, latest_forecasted_summary)
)

def can_summary_be_public(self, summary: OpportunitySummary | None) -> bool:
if summary is None:
return False

if summary.is_deleted:
return False

if summary.post_date is None or summary.post_date > self.current_date:
return False

return True

def determine_opportunity_status(
self, opportunity_summary: OpportunitySummary
) -> OpportunityStatus:
Expand Down
90 changes: 80 additions & 10 deletions api/tests/lib/seed_local_db.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,102 @@
import logging
import random

import click
from sqlalchemy import func

import src.adapters.db as db
import src.logging
import src.util.datetime_util as datetime_util
import tests.src.db.models.factories as factories
from src.adapters.db import PostgresDBClient
from src.db.models.opportunity_models import Opportunity
from src.db.models.transfer.topportunity_models import TransferTopportunity
from src.util.local import error_if_not_local

logger = logging.getLogger(__name__)


def _build_opportunities(db_session: db.Session, iterations: int) -> None:
def _add_history(
opps: list[Opportunity],
add_forecast: bool = False,
add_non_forecast: bool = False,
add_forecast_hist: bool = False,
add_non_forecast_hist: bool = False,
is_history_deleted: bool = False,
):
for opp in opps:
builder = factories.OpportunitySummaryHistoryBuilder(opportunity=opp)

if add_forecast:
builder.add_forecast()

if add_non_forecast:
builder.add_non_forecast()

if add_forecast_hist:
for _ in range(random.randint(1, 3)):
builder.add_forecast_history(
is_deleted=is_history_deleted, modification_comments="Modified forecast"
)

if add_non_forecast_hist:
for _ in range(random.randint(1, 3)):
builder.add_non_forecast_history(
is_deleted=is_history_deleted, modification_comments="Modified non-forecast"
)

builder.build()


def _build_opportunities(db_session: db.Session, iterations: int, include_history: bool) -> None:
# Just create a variety of opportunities for local testing
# we can eventually look into creating more specific scenarios
for i in range(iterations):
logger.info(f"Creating opportunity batch number {i}")
# Create a few opportunities in various scenarios
factories.OpportunityFactory.create_batch(size=5, is_forecasted_summary=True)
factories.OpportunityFactory.create_batch(size=5, is_posted_summary=True)
factories.OpportunityFactory.create_batch(size=5, is_closed_summary=True)
factories.OpportunityFactory.create_batch(size=5, is_archived_non_forecast_summary=True)
factories.OpportunityFactory.create_batch(size=5, is_archived_forecast_summary=True)
factories.OpportunityFactory.create_batch(size=5, no_current_summary=True)
forecasted_opps = factories.OpportunityFactory.create_batch(
size=5, is_forecasted_summary=True
)
posted_opps = factories.OpportunityFactory.create_batch(size=5, is_posted_summary=True)
closed_opps = factories.OpportunityFactory.create_batch(size=5, is_closed_summary=True)
archived_non_forecast_opps = factories.OpportunityFactory.create_batch(
size=5, is_archived_non_forecast_summary=True
)
archived_forecast_opps = factories.OpportunityFactory.create_batch(
size=5, is_archived_forecast_summary=True
)
no_current_summary_opps = factories.OpportunityFactory.create_batch(
size=5, no_current_summary=True
)

if include_history:
_add_history(forecasted_opps, add_forecast_hist=True)
_add_history(
posted_opps, add_non_forecast_hist=True, add_forecast=True, add_forecast_hist=True
)
_add_history(
closed_opps, add_non_forecast_hist=True, add_forecast=True, add_forecast_hist=True
)
_add_history(
archived_non_forecast_opps,
add_non_forecast_hist=True,
add_forecast=True,
add_forecast_hist=True,
)
_add_history(archived_forecast_opps, add_forecast_hist=True)
_add_history(no_current_summary_opps, is_history_deleted=True)

# generate a few opportunities with mostly null values
all_null_opportunities = factories.OpportunityFactory.create_batch(
size=5, all_fields_null=True
)
for all_null_opportunity in all_null_opportunities:
summary = factories.OpportunitySummaryFactory.create(
all_fields_null=True, opportunity=all_null_opportunity
# We set post_date to something so that running the set-current-opportunities logic
# won't get rid of it for having a null post date
all_fields_null=True,
opportunity=all_null_opportunity,
post_date=datetime_util.get_now_us_eastern_date(),
)
factories.CurrentOpportunitySummaryFactory.create(
opportunity=all_null_opportunity, opportunity_summary=summary
Expand All @@ -57,7 +121,13 @@ def _build_opportunities(db_session: db.Session, iterations: int) -> None:
default=1,
help="Number of sets of opportunities to create, note that several are created per iteration",
)
def seed_local_db(iterations: int) -> None:
@click.option(
"--include-history",
is_flag=True,
default=False,
help="Whether to add historical records to the opportunities generated - much slower as this requires a lot more data to be generated",
)
def seed_local_db(iterations: int, include_history: bool) -> None:
with src.logging.init("seed_local_db"):
logger.info("Running seed script for local DB")
error_if_not_local()
Expand All @@ -67,7 +137,7 @@ def seed_local_db(iterations: int) -> None:
with db_client.get_session() as db_session:
factories._db_session = db_session

_build_opportunities(db_session, iterations)
_build_opportunities(db_session, iterations, include_history)
# Need to commit to force any updates made
# after factories created objects
db_session.commit()
Loading

0 comments on commit 76d4523

Please sign in to comment.