diff --git a/api/bin/create_erds.py b/api/bin/create_erds.py index 11e12b1b7..117c2f822 100755 --- a/api/bin/create_erds.py +++ b/api/bin/create_erds.py @@ -12,7 +12,7 @@ import src.db.models.staging.opportunity as staging_opportunity_models import src.db.models.staging.synopsis as staging_synopsis_models import src.logging -from src.db.models import opportunity_models +from src.db.models import agency_models, opportunity_models from src.db.models.transfer import topportunity_models logger = logging.getLogger(__name__) @@ -23,7 +23,10 @@ ERD_FOLDER = pathlib.Path(__file__).parent.resolve() # If we want to generate separate files for more specific groups, we can set that up here -API_MODULES = (opportunity_models,) +API_MODULES = ( + opportunity_models, + agency_models, +) STAGING_TABLE_MODULES = ( staging_opportunity_models, staging_forecast_models, diff --git a/api/src/constants/lookup_constants.py b/api/src/constants/lookup_constants.py index a33135705..f686f95f2 100644 --- a/api/src/constants/lookup_constants.py +++ b/api/src/constants/lookup_constants.py @@ -105,3 +105,14 @@ class FundingInstrument(StrEnum): GRANT = "grant" # G PROCUREMENT_CONTRACT = "procurement_contract" # PC OTHER = "other" # O + + +class AgencyDownloadFileType(StrEnum): + XML = "xml" + PDF = "pdf" + + +class AgencySubmissionNotificationSetting(StrEnum): + NEVER = "never" + FIRST_APPLICATION_ONLY = "first_application_only" + ALWAYS = "always" diff --git a/api/src/db/foreign/__init__.py b/api/src/db/foreign/__init__.py index 9d55582c3..7406213dc 100644 --- a/api/src/db/foreign/__init__.py +++ b/api/src/db/foreign/__init__.py @@ -2,8 +2,8 @@ # SQLAlchemy models for foreign tables. # -from . import forecast, foreignbase, opportunity, synopsis +from . import forecast, foreignbase, opportunity, synopsis, tgroups metadata = foreignbase.metadata -__all__ = ["metadata", "forecast", "opportunity", "synopsis"] +__all__ = ["metadata", "forecast", "opportunity", "synopsis", "tgroups"] diff --git a/api/src/db/foreign/tgroups.py b/api/src/db/foreign/tgroups.py new file mode 100644 index 000000000..029f90724 --- /dev/null +++ b/api/src/db/foreign/tgroups.py @@ -0,0 +1,14 @@ +# +# SQLAlchemy models for foreign tables. +# +# The order of the columns must match the remote Oracle database. The names are not required to +# match by oracle_fdw, but we are matching them for maintainability. +# + +from src.db.legacy_mixin import tgroups_mixin + +from . import foreignbase + + +class Tgroups(foreignbase.ForeignBase, tgroups_mixin.TGroupsMixin): + __tablename__ = "tgroups" diff --git a/api/src/db/legacy_mixin/tgroups_mixin.py b/api/src/db/legacy_mixin/tgroups_mixin.py new file mode 100644 index 000000000..026cd4029 --- /dev/null +++ b/api/src/db/legacy_mixin/tgroups_mixin.py @@ -0,0 +1,20 @@ +# +# SQLAlchemy models for foreign tables. +# +# The order of the columns must match the remote Oracle database. The names are not required to +# match by oracle_fdw, but we are matching them for maintainability. +# + +import datetime + +from sqlalchemy.orm import Mapped, declarative_mixin, mapped_column + + +@declarative_mixin +class TGroupsMixin: + keyfield: Mapped[str] = mapped_column(primary_key=True) + value: Mapped[str | None] + created_date: Mapped[datetime.datetime | None] + last_upd_date: Mapped[datetime.datetime | None] + creator_id: Mapped[str | None] + last_upd_id: Mapped[str | None] diff --git a/api/src/db/migrations/versions/2024_07_08_add_agency_related_tables.py b/api/src/db/migrations/versions/2024_07_08_add_agency_related_tables.py new file mode 100644 index 000000000..404c5ced1 --- /dev/null +++ b/api/src/db/migrations/versions/2024_07_08_add_agency_related_tables.py @@ -0,0 +1,223 @@ +"""Add agency related tables + +Revision ID: 4f7acbb61548 +Revises: 61c58638e56b +Create Date: 2024-07-08 12:43:45.240782 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "4f7acbb61548" +down_revision = "61c58638e56b" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "agency_contact_info", + sa.Column("agency_contact_info_id", sa.BigInteger(), nullable=False), + sa.Column("contact_name", sa.Text(), nullable=False), + sa.Column("address_line_1", sa.Text(), nullable=False), + sa.Column("address_line_2", sa.Text(), nullable=True), + sa.Column("city", sa.Text(), nullable=False), + sa.Column("state", sa.Text(), nullable=False), + sa.Column("zip_code", sa.Text(), nullable=False), + sa.Column("phone_number", sa.Text(), nullable=False), + sa.Column("primary_email", sa.Text(), nullable=False), + sa.Column("secondary_email", sa.Text(), nullable=True), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.PrimaryKeyConstraint("agency_contact_info_id", name=op.f("agency_contact_info_pkey")), + schema="api", + ) + op.create_table( + "lk_agency_download_file_type", + sa.Column("agency_download_file_type_id", sa.Integer(), nullable=False), + sa.Column("description", sa.Text(), nullable=False), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.PrimaryKeyConstraint( + "agency_download_file_type_id", name=op.f("lk_agency_download_file_type_pkey") + ), + schema="api", + ) + op.create_table( + "lk_agency_submission_notification_setting", + sa.Column("agency_submission_notification_setting_id", sa.Integer(), nullable=False), + sa.Column("description", sa.Text(), nullable=False), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.PrimaryKeyConstraint( + "agency_submission_notification_setting_id", + name=op.f("lk_agency_submission_notification_setting_pkey"), + ), + schema="api", + ) + op.create_table( + "agency", + sa.Column("agency_id", sa.BigInteger(), nullable=False), + sa.Column("agency_name", sa.Text(), nullable=False), + sa.Column("agency_code", sa.Text(), nullable=False), + sa.Column("sub_agency_code", sa.Text(), nullable=True), + sa.Column("assistance_listing_number", sa.Text(), nullable=False), + sa.Column("agency_submission_notification_setting_id", sa.Integer(), nullable=False), + sa.Column("agency_contact_info_id", sa.BigInteger(), nullable=True), + sa.Column("is_test_agency", sa.Boolean(), nullable=False), + sa.Column("ldap_group", sa.Text(), nullable=False), + sa.Column("description", sa.Text(), nullable=False), + sa.Column("label", sa.Text(), nullable=False), + sa.Column("is_multilevel_agency", sa.Boolean(), nullable=False), + sa.Column("is_multiproject", sa.Boolean(), nullable=False), + sa.Column("has_system_to_system_certificate", sa.Boolean(), nullable=False), + sa.Column("can_view_packages_in_grace_period", sa.Boolean(), nullable=False), + sa.Column("is_image_workspace_enabled", sa.Boolean(), nullable=False), + sa.Column("is_validation_workspace_enabled", sa.Boolean(), nullable=False), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["agency_contact_info_id"], + ["api.agency_contact_info.agency_contact_info_id"], + name=op.f("agency_agency_contact_info_id_agency_contact_info_fkey"), + ), + sa.ForeignKeyConstraint( + ["agency_submission_notification_setting_id"], + [ + "api.lk_agency_submission_notification_setting.agency_submission_notification_setting_id" + ], + name=op.f( + "agency_agency_submission_notification_setting_id_lk_agency_submission_notification_setting_fkey" + ), + ), + sa.PrimaryKeyConstraint("agency_id", name=op.f("agency_pkey")), + schema="api", + ) + op.create_index( + op.f("agency_agency_code_idx"), "agency", ["agency_code"], unique=True, schema="api" + ) + op.create_table( + "link_agency_download_file_type", + sa.Column("agency_id", sa.BigInteger(), nullable=False), + sa.Column("agency_download_file_type_id", sa.Integer(), nullable=False), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["agency_download_file_type_id"], + ["api.lk_agency_download_file_type.agency_download_file_type_id"], + name=op.f( + "link_agency_download_file_type_agency_download_file_type_id_lk_agency_download_file_type_fkey" + ), + ), + sa.ForeignKeyConstraint( + ["agency_id"], + ["api.agency.agency_id"], + name=op.f("link_agency_download_file_type_agency_id_agency_fkey"), + ), + sa.PrimaryKeyConstraint( + "agency_id", + "agency_download_file_type_id", + name=op.f("link_agency_download_file_type_pkey"), + ), + schema="api", + ) + op.create_table( + "tgroups", + sa.Column("keyfield", sa.Text(), nullable=False), + sa.Column("value", sa.Text(), nullable=True), + sa.Column("created_date", sa.TIMESTAMP(timezone=True), nullable=True), + sa.Column("last_upd_date", sa.TIMESTAMP(timezone=True), nullable=True), + sa.Column("creator_id", sa.Text(), nullable=True), + sa.Column("last_upd_id", sa.Text(), nullable=True), + sa.Column("is_deleted", sa.Boolean(), nullable=False), + sa.Column("transformed_at", sa.TIMESTAMP(timezone=True), nullable=True), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column("deleted_at", sa.TIMESTAMP(timezone=True), nullable=True), + sa.Column("transformation_notes", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("keyfield", name=op.f("tgroups_pkey")), + schema="staging", + ) + op.create_index( + op.f("tgroups_transformed_at_idx"), + "tgroups", + ["transformed_at"], + unique=False, + schema="staging", + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("tgroups_transformed_at_idx"), table_name="tgroups", schema="staging") + op.drop_table("tgroups", schema="staging") + op.drop_table("link_agency_download_file_type", schema="api") + op.drop_index(op.f("agency_agency_code_idx"), table_name="agency", schema="api") + op.drop_table("agency", schema="api") + op.drop_table("lk_agency_submission_notification_setting", schema="api") + op.drop_table("lk_agency_download_file_type", schema="api") + op.drop_table("agency_contact_info", schema="api") + # ### end Alembic commands ### diff --git a/api/src/db/models/__init__.py b/api/src/db/models/__init__.py index 2a82158ff..5ed62061e 100644 --- a/api/src/db/models/__init__.py +++ b/api/src/db/models/__init__.py @@ -1,6 +1,6 @@ import logging -from . import base, lookup_models, opportunity_models +from . import agency_models, base, lookup_models, opportunity_models from .transfer import topportunity_models logger = logging.getLogger(__name__) @@ -9,4 +9,10 @@ # This is used by tests to create the test database. metadata = base.metadata -__all__ = ["metadata", "opportunity_models", "lookup_models", "topportunity_models"] +__all__ = [ + "metadata", + "opportunity_models", + "lookup_models", + "topportunity_models", + "agency_models", +] diff --git a/api/src/db/models/agency_models.py b/api/src/db/models/agency_models.py new file mode 100644 index 000000000..6759b9fb2 --- /dev/null +++ b/api/src/db/models/agency_models.py @@ -0,0 +1,109 @@ +from sqlalchemy import BigInteger, ForeignKey +from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from src.adapters.db.type_decorators.postgres_type_decorators import LookupColumn +from src.constants.lookup_constants import ( + AgencyDownloadFileType, + AgencySubmissionNotificationSetting, +) +from src.db.models.base import ApiSchemaTable, TimestampMixin +from src.db.models.lookup_models import ( + LkAgencyDownloadFileType, + LkAgencySubmissionNotificationSetting, +) + + +class AgencyContactInfo(ApiSchemaTable, TimestampMixin): + __tablename__ = "agency_contact_info" + + agency_contact_info_id: Mapped[int] = mapped_column(BigInteger, primary_key=True) + + contact_name: Mapped[str] + + address_line_1: Mapped[str] + address_line_2: Mapped[str | None] + city: Mapped[str] + + # Note that while it would make sense to do an enum for state + # it doesn't look to be limited to US states and includes some foreign states + # as well as numbers(?) in the existing system + state: Mapped[str] + zip_code: Mapped[str] + phone_number: Mapped[str] + primary_email: Mapped[str] + secondary_email: Mapped[str | None] + + +class Agency(ApiSchemaTable, TimestampMixin): + __tablename__ = "agency" + + agency_id: Mapped[int] = mapped_column(BigInteger, primary_key=True) + + agency_name: Mapped[str] + + agency_code: Mapped[str] = mapped_column(index=True, unique=True) + sub_agency_code: Mapped[str | None] + + assistance_listing_number: Mapped[str] + + agency_submission_notification_setting: Mapped[ + AgencySubmissionNotificationSetting + ] = mapped_column( + "agency_submission_notification_setting_id", + LookupColumn(LkAgencySubmissionNotificationSetting), + ForeignKey(LkAgencySubmissionNotificationSetting.agency_submission_notification_setting_id), + ) + + agency_contact_info_id: Mapped[BigInteger | None] = mapped_column( + BigInteger, ForeignKey(AgencyContactInfo.agency_contact_info_id) + ) + agency_contact_info: Mapped[AgencyContactInfo | None] = relationship(AgencyContactInfo) + + # There are several agencies in the data we're ingesting that + # are clearly meant for testing, I'm not certain we want to flag + # them in this way, but adding it for now - can revisit later + # From the legacy system configurations, this should be the following agencies + # GDIT,IVV,IVPDF,0001,FGLT,NGMS,NGMS-Sub1,SECSCAN + # including any subagencies + is_test_agency: Mapped[bool] + + # These values come from the legacy system, but their exact usage isn't entirely + # clear at this point in time. + ldap_group: Mapped[str] + description: Mapped[str] + label: Mapped[str] + is_multilevel_agency: Mapped[bool] + is_multiproject: Mapped[bool] + has_system_to_system_certificate: Mapped[bool] + can_view_packages_in_grace_period: Mapped[bool] + is_image_workspace_enabled: Mapped[bool] + is_validation_workspace_enabled: Mapped[bool] + + link_agency_download_file_types: Mapped[list["LinkAgencyDownloadFileType"]] = relationship( + back_populates="agency", uselist=True, cascade="all, delete-orphan" + ) + + agency_download_file_types: AssociationProxy[set[AgencyDownloadFileType]] = association_proxy( + "link_agency_download_file_types", + "agency_download_file_type", + creator=lambda obj: LinkAgencyDownloadFileType(agency_download_file_type=obj), + ) + + +class LinkAgencyDownloadFileType(ApiSchemaTable, TimestampMixin): + __tablename__ = "link_agency_download_file_type" + + agency_id: Mapped[int] = mapped_column( + BigInteger, + ForeignKey(Agency.agency_id), + primary_key=True, + ) + agency: Mapped[Agency] = relationship(Agency) + + agency_download_file_type: Mapped[AgencyDownloadFileType] = mapped_column( + "agency_download_file_type_id", + LookupColumn(LkAgencyDownloadFileType), + ForeignKey(LkAgencyDownloadFileType.agency_download_file_type_id), + primary_key=True, + ) diff --git a/api/src/db/models/lookup_models.py b/api/src/db/models/lookup_models.py index 91b71e7cd..d954d6422 100644 --- a/api/src/db/models/lookup_models.py +++ b/api/src/db/models/lookup_models.py @@ -1,6 +1,8 @@ from sqlalchemy.orm import Mapped, mapped_column from src.constants.lookup_constants import ( + AgencyDownloadFileType, + AgencySubmissionNotificationSetting, ApplicantType, FundingCategory, FundingInstrument, @@ -93,6 +95,18 @@ ] ) +AGENCY_DOWNLOAD_FILE_TYPE_CONFIG = LookupConfig( + [LookupStr(AgencyDownloadFileType.XML, 1), LookupStr(AgencyDownloadFileType.PDF, 2)] +) + +AGENCY_SUBMISSION_NOTIFICATION_SETTING_CONFIG = LookupConfig( + [ + LookupStr(AgencySubmissionNotificationSetting.NEVER, 1), + LookupStr(AgencySubmissionNotificationSetting.FIRST_APPLICATION_ONLY, 2), + LookupStr(AgencySubmissionNotificationSetting.ALWAYS, 3), + ] +) + @LookupRegistry.register_lookup(OPPORTUNITY_CATEGORY_CONFIG) class LkOpportunityCategory(LookupTable, TimestampMixin): @@ -162,3 +176,32 @@ def from_lookup(cls, lookup: Lookup) -> "LkOpportunityStatus": return LkOpportunityStatus( opportunity_status_id=lookup.lookup_val, description=lookup.get_description() ) + + +@LookupRegistry.register_lookup(AGENCY_DOWNLOAD_FILE_TYPE_CONFIG) +class LkAgencyDownloadFileType(LookupTable, TimestampMixin): + __tablename__ = "lk_agency_download_file_type" + + agency_download_file_type_id: Mapped[int] = mapped_column(primary_key=True) + description: Mapped[str] + + @classmethod + def from_lookup(cls, lookup: Lookup) -> "LkAgencyDownloadFileType": + return LkAgencyDownloadFileType( + agency_download_file_type_id=lookup.lookup_val, description=lookup.get_description() + ) + + +@LookupRegistry.register_lookup(AGENCY_SUBMISSION_NOTIFICATION_SETTING_CONFIG) +class LkAgencySubmissionNotificationSetting(LookupTable, TimestampMixin): + __tablename__ = "lk_agency_submission_notification_setting" + + agency_submission_notification_setting_id: Mapped[int] = mapped_column(primary_key=True) + description: Mapped[str] + + @classmethod + def from_lookup(cls, lookup: Lookup) -> "LkAgencySubmissionNotificationSetting": + return LkAgencySubmissionNotificationSetting( + agency_submission_notification_setting_id=lookup.lookup_val, + description=lookup.get_description(), + ) diff --git a/api/src/db/models/staging/__init__.py b/api/src/db/models/staging/__init__.py index d89da9dc9..6a2de9c45 100644 --- a/api/src/db/models/staging/__init__.py +++ b/api/src/db/models/staging/__init__.py @@ -1,5 +1,5 @@ -from . import forecast, opportunity, staging_base, synopsis +from . import forecast, opportunity, staging_base, synopsis, tgroups metadata = staging_base.metadata -__all__ = ["metadata", "opportunity", "forecast", "synopsis"] +__all__ = ["metadata", "opportunity", "forecast", "synopsis", "tgroups"] diff --git a/api/src/db/models/staging/tgroups.py b/api/src/db/models/staging/tgroups.py new file mode 100644 index 000000000..fda264834 --- /dev/null +++ b/api/src/db/models/staging/tgroups.py @@ -0,0 +1,6 @@ +from src.db.legacy_mixin import tgroups_mixin +from src.db.models.staging.staging_base import StagingBase, StagingParamMixin + + +class Tgroups(StagingBase, tgroups_mixin.TGroupsMixin, StagingParamMixin): + __tablename__ = "tgroups" diff --git a/api/tests/lib/seed_local_db.py b/api/tests/lib/seed_local_db.py index ad87525ab..4562d2fe1 100644 --- a/api/tests/lib/seed_local_db.py +++ b/api/tests/lib/seed_local_db.py @@ -9,6 +9,7 @@ import src.util.datetime_util as datetime_util import tests.src.db.models.factories as factories from src.adapters.db import PostgresDBClient +from src.db.models.agency_models import Agency from src.db.models.opportunity_models import Opportunity from src.db.models.transfer.topportunity_models import TransferTopportunity from src.util.local import error_if_not_local @@ -115,6 +116,41 @@ def _build_opportunities(db_session: db.Session, iterations: int, include_histor logger.info("Finished creating records in the transfer_topportunity table") +# Agencies we want to create locally - if we want to create significantly more +# we can consider shoving this into a CSV that we load instead. +AGENCIES_TO_CREATE = [ + { + "agency_code": "USAID", + "agency_name": "Agency for International Development", + }, + { + "agency_code": "ARPAH", + "agency_name": "Advanced Research Projects Agency for Health", + }, + { + "agency_code": "DOC", + "agency_name": "Agency for International Development", + }, + { + "agency_code": "DOC-EDA", + "agency_name": "Agency for International Development", + }, +] + + +def _build_agencies(db_session: db.Session) -> None: + # Create a static set of agencies, only if they don't already exist + agencies = db_session.query(Agency).all() + agency_codes = set([a.agency_code for a in agencies]) + + for agency_to_create in AGENCIES_TO_CREATE: + if agency_to_create["agency_code"] in agency_codes: + continue + + logger.info("Creating agency %s in agency table", agency_to_create["agency_code"]) + factories.AgencyFactory.create(**agency_to_create) + + @click.command() @click.option( "--iterations", @@ -141,3 +177,5 @@ def seed_local_db(iterations: int, include_history: bool) -> None: # Need to commit to force any updates made # after factories created objects db_session.commit() + + _build_agencies(db_session) diff --git a/api/tests/src/db/models/factories.py b/api/tests/src/db/models/factories.py index 9aa87663f..3fafacdce 100644 --- a/api/tests/src/db/models/factories.py +++ b/api/tests/src/db/models/factories.py @@ -24,6 +24,8 @@ import src.db.models.transfer.topportunity_models as transfer_topportunity_models import src.util.datetime_util as datetime_util from src.constants.lookup_constants import ( + AgencyDownloadFileType, + AgencySubmissionNotificationSetting, ApplicantType, FundingCategory, FundingInstrument, @@ -31,6 +33,7 @@ OpportunityCategoryLegacy, OpportunityStatus, ) +from src.db.models import agency_models def sometimes_none(factory_value, none_chance: float = 0.5): @@ -137,7 +140,9 @@ class CustomProvider(BaseProvider): "{{word}}-###-##", ] - YN_BOOLEAN_VALUES = ["Y", "N", "Yes", "No"] + YN_BOOLEAN_VALUES = ["Y", "N"] + + YN_YESNO_BOOLEAN_VALUES = ["Y", "N", "Yes", "No"] def agency(self) -> str: return self.random_element(self.AGENCIES) @@ -177,6 +182,9 @@ def summary_description(self) -> str: def yn_boolean(self) -> str: return self.random_element(self.YN_BOOLEAN_VALUES) + def yn_yesno_boolean(self) -> str: + return self.random_element(self.YN_YESNO_BOOLEAN_VALUES) + fake = faker.Faker() fake.add_provider(CustomProvider) @@ -638,6 +646,61 @@ class Meta: applicant_type = factory.Iterator(ApplicantType) +class AgencyContactInfoFactory(BaseFactory): + class Meta: + model = agency_models.AgencyContactInfo + + contact_name = factory.Faker("name") + address_line_1 = factory.Faker("street_address") + address_line_2 = sometimes_none(factory.Sequence(lambda n: f"Room {n}")) + city = factory.Faker("city") + state = factory.Faker("state_abbr") + zip_code = factory.Faker("street_address") + phone_number = factory.Faker("basic_phone_number") + primary_email = factory.Faker("email") + secondary_email = sometimes_none(factory.Faker("email")) + + +class AgencyFactory(BaseFactory): + class Meta: + model = agency_models.Agency + + agency_name = factory.Faker("agency_name") + + agency_code = factory.Faker("agency") + sub_agency_code = factory.LazyAttribute(lambda a: a.agency_code.split("-")[0]) + + assistance_listing_number = factory.Faker("random_int", min=1, max=999) + + agency_submission_notification_setting = factory.fuzzy.FuzzyChoice( + AgencySubmissionNotificationSetting + ) + + agency_contact_info = factory.SubFactory(AgencyContactInfoFactory) + agency_contact_info_id = factory.LazyAttribute( + lambda a: a.agency_contact_info.agency_contact_info_id if a.agency_contact_info else None + ) + + is_test_agency = False + + ldap_group = factory.LazyAttribute(lambda a: a.agency_code) + description = factory.LazyAttribute(lambda a: a.agency_name) + label = factory.LazyAttribute(lambda a: a.agency_name) + is_multilevel_agency = factory.Faker("boolean") + is_multiproject = factory.Faker("boolean") + has_system_to_system_certificate = factory.Faker("boolean") + can_view_packages_in_grace_period = factory.Faker("boolean") + is_image_workspace_enabled = factory.Faker("boolean") + is_validation_workspace_enabled = factory.Faker("boolean") + + agency_download_file_types = factory.Faker( + "random_elements", + length=random.randint(1, 2), + elements=[a for a in AgencyDownloadFileType], + unique=True, + ) + + #################################### # Staging Table Factories #################################### @@ -797,7 +860,7 @@ class Meta: syn_desc = factory.Faker("summary_description") oth_cat_fa_desc = sometimes_none(factory.Faker("paragraph", nb_sentences=1)) - cost_sharing = sometimes_none(factory.Faker("yn_boolean"), none_chance=0.1) + cost_sharing = sometimes_none(factory.Faker("yn_yesno_boolean"), none_chance=0.1) # These int values are stored as strings number_of_awards = sometimes_none( factory.LazyFunction(lambda: str(fake.random_int(1, 25))), none_chance=0.1 @@ -828,7 +891,7 @@ class Meta: factory.Faker("date_time_between", start_date="-5y", end_date="now") ) create_ts = factory.Faker("date_time_between", start_date="-10y", end_date="-5y") - sendmail = sometimes_none(factory.Faker("yn_boolean")) + sendmail = sometimes_none(factory.Faker("yn_yesno_boolean")) response_date_desc = sometimes_none(factory.Faker("paragraph", nb_sentences=2)) applicant_elig_desc = sometimes_none(factory.Faker("paragraph", nb_sentences=5)) version_nbr = factory.Faker("random_int", min=0, max=10) @@ -871,7 +934,7 @@ class Meta: forecast_desc = factory.Faker("summary_description") oth_cat_fa_desc = sometimes_none(factory.Faker("paragraph", nb_sentences=1)) - cost_sharing = sometimes_none(factory.Faker("yn_boolean"), none_chance=0.1) + cost_sharing = sometimes_none(factory.Faker("yn_yesno_boolean"), none_chance=0.1) # These int values are stored as strings number_of_awards = sometimes_none( factory.LazyFunction(lambda: str(fake.random_int(1, 25))), none_chance=0.1 @@ -901,7 +964,7 @@ class Meta: factory.Faker("date_time_between", start_date="-5y", end_date="now") ) create_ts = factory.Faker("date_time_between", start_date="-10y", end_date="-5y") - sendmail = sometimes_none(factory.Faker("yn_boolean")) + sendmail = sometimes_none(factory.Faker("yn_yesno_boolean")) applicant_elig_desc = sometimes_none(factory.Faker("paragraph", nb_sentences=5)) version_nbr = factory.Faker("random_int", min=0, max=10) modification_comments = sometimes_none(factory.Faker("paragraph", nb_sentences=1)) @@ -1230,6 +1293,24 @@ class Params: ) +class StagingTgroupsFactory(BaseFactory): + class Meta: + model = staging.tgroups.Tgroups + + keyfield = "" + value = "" + + is_deleted = False + + created_date = factory.Faker("date_time_between", start_date="-10y", end_date="-5y") + last_upd_date = sometimes_none( + factory.Faker("date_time_between", start_date="-5y", end_date="now") + ) + + last_upd_id = factory.Faker("first_name") + creator_id = factory.Faker("first_name") + + #################################### # Transfer Table Factories #################################### @@ -1532,3 +1613,73 @@ def build(self) -> opportunity_models.Opportunity: revision_number -= 1 return self.opportunity + + +class StagingTgroupsAgencyFactory(factory.DictFactory): + """ + This does not need to be called directly, and instead you should use + create_tgroups_agency (defined below) in order to call this. + + We use this to help organize factories / the ability to override and set + values for the tgroups agency data which is spread across many rows. + + Note: Any value that is "None" will not be included in the created + tgroups records (empty strings, or strings of values like "null" will be) + """ + + AgencyName = factory.Faker("agency_name") + AgencyCode = "" # see: create_tgroups_agency for how this gets set + AgencyCFDA = factory.Faker("random_int", min=1, max=99) + AgencyDownload = factory.Faker("random_int", min=1, max=3) + AgencyNotify = factory.Faker("random_int", min=1, max=3) + AgencyEnroll = "" # see: create_tgroups_agency for how this gets set + + AgencyContactName = factory.Faker("name") + AgencyContactAddress1 = factory.Faker("street_address") + AgencyContactAddress2 = factory.Maybe( + decider=factory.LazyAttribute(lambda s: random.random() > 0.5), + yes_declaration=factory.Sequence(lambda n: f"Room {n}"), + no_declaration="NULL", + ) + AgencyContactCity = factory.Faker("city") + AgencyContactState = factory.Faker("state_abbr") + AgencyContactZipCode = factory.Faker("postcode") + AgencyContactTelephone = Generators.PhoneNumber + AgencyContactEMail = factory.Faker("email") + AgencyContactEMail2 = sometimes_none(factory.Faker("email")) + + ldapGp = "" # see: create_tgroups_agency for how this gets set + description = factory.LazyAttribute(lambda g: g.AgencyName) + label = factory.LazyAttribute(lambda g: g.AgencyName) + multilevel = sometimes_none("TRUE", none_chance=0.8) + HasS2SCert = sometimes_none(factory.Faker("yn_yesno_boolean"), none_chance=0.8) + ViewPkgsInGracePeriod = sometimes_none(factory.Faker("yn_yesno_boolean"), none_chance=0.8) + multiproject = sometimes_none(factory.Faker("yn_yesno_boolean"), none_chance=0.8) + ImageWS = sometimes_none(factory.Faker("yn_yesno_boolean"), none_chance=0.8) + ValidationWS = sometimes_none(factory.Faker("yn_yesno_boolean"), none_chance=0.8) + + +def create_tgroups_agency( + agency_code: str, is_deleted: bool = False, **kwargs +) -> list[staging.tgroups.Tgroups]: + # The agency_code value is actually just the first bit (the top-level agency) + kwargs["AgencyCode"] = agency_code.split("-")[0] + kwargs["AgencyEnroll"] = agency_code + kwargs["ldapGp"] = agency_code + + field_values = StagingTgroupsAgencyFactory.build(**kwargs) + + groups = [] + + field_prefix = f"Agency-{agency_code}-" + + for field_name, value in field_values.items(): + if value is None: + continue + tgroup = StagingTgroupsFactory.create( + keyfield=field_prefix + field_name, value=value, is_deleted=is_deleted + ) + + groups.append(tgroup) + + return groups diff --git a/documentation/api/database/erds/api-schema.png b/documentation/api/database/erds/api-schema.png index f97bd993c..3ea9cb144 100644 Binary files a/documentation/api/database/erds/api-schema.png and b/documentation/api/database/erds/api-schema.png differ diff --git a/documentation/api/database/erds/full-schema.png b/documentation/api/database/erds/full-schema.png index 9923cdfef..66d3af5bc 100644 Binary files a/documentation/api/database/erds/full-schema.png and b/documentation/api/database/erds/full-schema.png differ