From 55a758f342274d2bcb22e6e210398a00e47ae871 Mon Sep 17 00:00:00 2001 From: Tessa Walsh Date: Wed, 30 Oct 2024 15:33:22 -0400 Subject: [PATCH] Consolidate ops class initialization (#2117) Fixes #2111 The background job and operator entrypoints now use a shared function that initalizes and returns the ops classes. This is not applied in the main entrypoint as that also initializes the backend API, which we don't want in the other entrypoints. --- backend/btrixcloud/main_bg.py | 93 +------------------------ backend/btrixcloud/main_op.py | 74 +++----------------- backend/btrixcloud/ops.py | 124 ++++++++++++++++++++++++++++++++++ 3 files changed, 137 insertions(+), 154 deletions(-) create mode 100644 backend/btrixcloud/ops.py diff --git a/backend/btrixcloud/main_bg.py b/backend/btrixcloud/main_bg.py index 8b56e6a8da..208c53333a 100644 --- a/backend/btrixcloud/main_bg.py +++ b/backend/btrixcloud/main_bg.py @@ -6,25 +6,9 @@ import traceback from uuid import UUID -from .crawlmanager import CrawlManager -from .db import init_db -from .emailsender import EmailSender - -# from .utils import register_exit_handler from .models import BgJobType +from .ops import init_ops -from .basecrawls import BaseCrawlOps -from .invites import InviteOps -from .users import init_user_manager -from .orgs import OrgOps -from .colls import CollectionOps -from .crawlconfigs import CrawlConfigOps -from .crawls import CrawlOps -from .profiles import ProfileOps -from .storages import StorageOps -from .webhooks import EventWebhookOps -from .background_jobs import BackgroundJobOps -from .pages import PageOps job_type = os.environ.get("BG_JOB_TYPE") oid = os.environ.get("OID") @@ -33,19 +17,7 @@ # ============================================================================ # pylint: disable=too-many-function-args, duplicate-code, too-many-locals async def main(): - """main init""" - email = EmailSender() - crawl_manager = None - - dbclient, mdb = init_db() - - invite_ops = InviteOps(mdb, email) - - user_manager = init_user_manager(mdb, email, invite_ops) - - org_ops = OrgOps(mdb, invite_ops, user_manager) - - event_webhook_ops = EventWebhookOps(mdb, org_ops) + """run background job with access to ops classes""" # pylint: disable=import-outside-toplevel if not os.environ.get("KUBERNETES_SERVICE_HOST"): @@ -55,66 +27,7 @@ async def main(): ) sys.exit(1) - crawl_manager = CrawlManager() - - storage_ops = StorageOps(org_ops, crawl_manager) - - background_job_ops = BackgroundJobOps( - mdb, email, user_manager, org_ops, crawl_manager, storage_ops - ) - - profile_ops = ProfileOps( - mdb, org_ops, crawl_manager, storage_ops, background_job_ops - ) - - crawl_config_ops = CrawlConfigOps( - dbclient, - mdb, - user_manager, - org_ops, - crawl_manager, - profile_ops, - ) - - coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops) - - base_crawl_ops = BaseCrawlOps( - mdb, - user_manager, - org_ops, - crawl_config_ops, - coll_ops, - storage_ops, - event_webhook_ops, - background_job_ops, - ) - - crawl_ops = CrawlOps( - crawl_manager, - mdb, - user_manager, - org_ops, - crawl_config_ops, - coll_ops, - storage_ops, - event_webhook_ops, - background_job_ops, - ) - - page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops) - - base_crawl_ops.set_page_ops(page_ops) - crawl_ops.set_page_ops(page_ops) - - background_job_ops.set_ops(crawl_ops, profile_ops) - - org_ops.set_ops(base_crawl_ops, profile_ops, coll_ops, background_job_ops) - - user_manager.set_ops(org_ops, crawl_config_ops, base_crawl_ops) - - background_job_ops.set_ops(base_crawl_ops, profile_ops) - - crawl_config_ops.set_coll_ops(coll_ops) + (org_ops, _, _, _, _, _, _, _, _, _, user_manager) = init_ops() # Run job if job_type == BgJobType.DELETE_ORG: diff --git a/backend/btrixcloud/main_op.py b/backend/btrixcloud/main_op.py index 573c3174b5..a6f6654be3 100644 --- a/backend/btrixcloud/main_op.py +++ b/backend/btrixcloud/main_op.py @@ -5,23 +5,10 @@ from fastapi import FastAPI -from .crawlmanager import CrawlManager -from .db import init_db -from .emailsender import EmailSender from .operator import init_operator_api +from .ops import init_ops from .utils import register_exit_handler -from .invites import InviteOps -from .users import init_user_manager -from .orgs import OrgOps -from .colls import CollectionOps -from .crawlconfigs import CrawlConfigOps -from .crawls import CrawlOps -from .profiles import ProfileOps -from .storages import init_storages_api -from .webhooks import EventWebhookOps -from .background_jobs import BackgroundJobOps -from .pages import PageOps app_root = FastAPI() @@ -29,19 +16,7 @@ # ============================================================================ # pylint: disable=too-many-function-args, duplicate-code def main(): - """main init""" - email = EmailSender() - crawl_manager = None - - dbclient, mdb = init_db() - - invite_ops = InviteOps(mdb, email) - - user_manager = init_user_manager(mdb, email, invite_ops) - - org_ops = OrgOps(mdb, invite_ops, user_manager) - - event_webhook_ops = EventWebhookOps(mdb, org_ops) + """init operator""" # pylint: disable=import-outside-toplevel if not os.environ.get("KUBERNETES_SERVICE_HOST"): @@ -51,48 +26,19 @@ def main(): ) sys.exit(1) - crawl_manager = CrawlManager() - - storage_ops = init_storages_api(org_ops, crawl_manager) - - background_job_ops = BackgroundJobOps( - mdb, email, user_manager, org_ops, crawl_manager, storage_ops - ) - - profile_ops = ProfileOps( - mdb, org_ops, crawl_manager, storage_ops, background_job_ops - ) - - crawl_config_ops = CrawlConfigOps( - dbclient, - mdb, - user_manager, - org_ops, - crawl_manager, - profile_ops, - ) - - user_manager.set_ops(org_ops, crawl_config_ops, None) - - coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops) - - crawl_ops = CrawlOps( - crawl_manager, - mdb, - user_manager, + ( org_ops, crawl_config_ops, + _, + crawl_ops, + page_ops, coll_ops, + _, storage_ops, - event_webhook_ops, background_job_ops, - ) - - page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops) - - crawl_ops.set_page_ops(page_ops) - - background_job_ops.set_ops(crawl_ops, profile_ops) + event_webhook_ops, + _, + ) = init_ops() return init_operator_api( app_root, diff --git a/backend/btrixcloud/ops.py b/backend/btrixcloud/ops.py new file mode 100644 index 0000000000..23629de2aa --- /dev/null +++ b/backend/btrixcloud/ops.py @@ -0,0 +1,124 @@ +""" shared helper to initialize ops classes """ + +from typing import Tuple + +from .crawlmanager import CrawlManager +from .db import init_db +from .emailsender import EmailSender + +from .background_jobs import BackgroundJobOps +from .basecrawls import BaseCrawlOps +from .colls import CollectionOps +from .crawls import CrawlOps +from .crawlconfigs import CrawlConfigOps +from .invites import InviteOps +from .orgs import OrgOps +from .pages import PageOps +from .profiles import ProfileOps +from .storages import StorageOps +from .users import UserManager +from .webhooks import EventWebhookOps + + +# pylint: disable=too-many-locals +def init_ops() -> Tuple[ + OrgOps, + CrawlConfigOps, + BaseCrawlOps, + CrawlOps, + PageOps, + CollectionOps, + ProfileOps, + StorageOps, + BackgroundJobOps, + EventWebhookOps, + UserManager, +]: + """Initialize and return ops classes""" + email = EmailSender() + + dbclient, mdb = init_db() + + invite_ops = InviteOps(mdb, email) + + user_manager = UserManager(mdb, email, invite_ops) + + org_ops = OrgOps(mdb, invite_ops, user_manager) + + event_webhook_ops = EventWebhookOps(mdb, org_ops) + + crawl_manager = CrawlManager() + + storage_ops = StorageOps(org_ops, crawl_manager) + + background_job_ops = BackgroundJobOps( + mdb, email, user_manager, org_ops, crawl_manager, storage_ops + ) + + profile_ops = ProfileOps( + mdb, org_ops, crawl_manager, storage_ops, background_job_ops + ) + + crawl_config_ops = CrawlConfigOps( + dbclient, + mdb, + user_manager, + org_ops, + crawl_manager, + profile_ops, + ) + + coll_ops = CollectionOps(mdb, crawl_manager, org_ops, event_webhook_ops) + + base_crawl_ops = BaseCrawlOps( + mdb, + user_manager, + org_ops, + crawl_config_ops, + coll_ops, + storage_ops, + event_webhook_ops, + background_job_ops, + ) + + crawl_ops = CrawlOps( + crawl_manager, + mdb, + user_manager, + org_ops, + crawl_config_ops, + coll_ops, + storage_ops, + event_webhook_ops, + background_job_ops, + ) + + page_ops = PageOps(mdb, crawl_ops, org_ops, storage_ops) + + base_crawl_ops.set_page_ops(page_ops) + + crawl_ops.set_page_ops(page_ops) + + background_job_ops.set_ops(crawl_ops, profile_ops) + + org_ops.set_ops(base_crawl_ops, profile_ops, coll_ops, background_job_ops) + + user_manager.set_ops(org_ops, crawl_config_ops, base_crawl_ops) + + background_job_ops.set_ops(base_crawl_ops, profile_ops) + + crawl_config_ops.set_coll_ops(coll_ops) + + return ( + org_ops, + crawl_config_ops, + base_crawl_ops, + crawl_ops, + page_ops, + coll_ops, + profile_ops, + storage_ops, + background_job_ops, + event_webhook_ops, + user_manager, + )