Skip to content

Commit

Permalink
fix(datasets): BI-6085 load dataset connection before updating it fro…
Browse files Browse the repository at this point in the history
…m body (control-api) (#847)
  • Loading branch information
KonstantAnxiety authored Feb 20, 2025
1 parent c89ac6a commit 8ac232d
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 11 deletions.
2 changes: 1 addition & 1 deletion lib/dl_api_lib/dl_api_lib/api_common/dataset_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def _update_dataset_sources_from_body(
connection_id=connection_id,
source_type=source_type,
**source_data["parameters"],
) # not that this does not include title and raw_schema updates
) # note that this does not include title and raw_schema updates
old_raw_schema = old_src_coll.get_cached_raw_schema(role=DataSourceRole.origin)
schema_updated = not are_raw_schemas_same(old_raw_schema, source_data["raw_schema"])

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ class DatasetItem(BIResource):
def delete(self, dataset_id):
"""Delete dataset"""
us_manager = self.get_us_manager()
ds, _ = DatasetResource.get_dataset(dataset_id=dataset_id, body={})
ds, _ = DatasetResource.get_dataset(dataset_id=dataset_id, body={}, load_dependencies=False)
utils.need_permission_on_entry(ds, USPermissionKind.admin)

us_manager.delete(ds)
Expand All @@ -141,7 +141,7 @@ class DatasetItemFields(BIResource):
},
)
def get(self, dataset_id): # type: ignore # TODO: fix
ds, _ = DatasetResource.get_dataset(dataset_id=dataset_id, body={})
ds, _ = DatasetResource.get_dataset(dataset_id=dataset_id, body={}, load_dependencies=False)
fields = [
{
"title": f.title,
Expand Down Expand Up @@ -172,7 +172,6 @@ def post(self, dataset_id, body): # type: ignore # TODO: fix
copy_us_key = body["new_key"]
us_manager = self.get_us_manager()
ds, _ = self.get_dataset(dataset_id=dataset_id, body={})
us_manager.load_dependencies(ds)
orig_ds_loc = ds.entry_key
copy_ds_loc: PathEntryLocation

Expand Down Expand Up @@ -216,7 +215,6 @@ def get(self, dataset_id, version): # type: ignore # TODO: fix

ds_dict["is_favorite"] = ds.is_favorite

us_manager.load_dependencies(ds)
ds_dict.update(self.make_dataset_response_data(dataset=ds, us_entry_buffer=us_manager.get_entry_buffer()))
return ds_dict

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,7 @@
import logging
from typing import (
Any,
List,
Optional,
Set,
Tuple,
)

from dl_api_lib.api_common.dataset_loader import (
Expand Down Expand Up @@ -83,7 +80,12 @@ def create_dataset_api_loader(cls) -> DatasetApiLoader:
)

@classmethod
def get_dataset(cls, dataset_id: Optional[str], body: dict) -> Tuple[Dataset, DatasetUpdateInfo]:
def get_dataset(
cls,
dataset_id: Optional[str],
body: dict,
load_dependencies: bool = True,
) -> tuple[Dataset, DatasetUpdateInfo]:
us_manager = cls.get_us_manager()
if dataset_id:
try:
Expand All @@ -96,6 +98,9 @@ def get_dataset(cls, dataset_id: Optional[str], body: dict) -> Tuple[Dataset, Da
us_manager=us_manager,
)

if load_dependencies:
us_manager.load_dependencies(dataset)

loader = cls.create_dataset_api_loader()
update_info = loader.update_dataset_from_body(
dataset=dataset,
Expand Down Expand Up @@ -254,7 +259,7 @@ def dump_option_data(

opt_data["sources"] = dict(items=[])
connection_ids = set()
connection_types: Set[Optional[ConnectionType]] = set()
connection_types: set[Optional[ConnectionType]] = set()
for source_id in ds_accessor.get_data_source_id_list():
dsrc_coll_spec = ds_accessor.get_data_source_coll_spec_strict(source_id=source_id)
dsrc_coll = dsrc_coll_factory.get_data_source_collection(spec=dsrc_coll_spec)
Expand Down Expand Up @@ -297,7 +302,7 @@ def dump_option_data(
)
)

compatible_conn_types: List[dict] = []
compatible_conn_types: list[dict] = []
for conn_type in capabilities.get_compatible_connection_types():
if connection_ids: # There already are connections in the dataset
continue
Expand Down

0 comments on commit 8ac232d

Please sign in to comment.