diff --git a/lib/dl_api_lib/dl_api_lib/dataset/validator.py b/lib/dl_api_lib/dl_api_lib/dataset/validator.py index aa025fcff..c75786850 100644 --- a/lib/dl_api_lib/dl_api_lib/dataset/validator.py +++ b/lib/dl_api_lib/dl_api_lib/dataset/validator.py @@ -1098,6 +1098,8 @@ def get_schema_info(exists: bool) -> Optional[SchemaInfo]: self._reload_sources() + if len(new_raw_schema) != len(self._ds.result_schema.get_direct_fields_by_source(source_id)): + force_update_fields = True if not self._are_schemas_identical(new_raw_schema, old_raw_schema) or force_update_fields: # try to match old and new schemas against each other # and update result_schema fields accordingly diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/dataset_suite.py b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/dataset_suite.py index 604a9a990..b6cac1a52 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/dataset_suite.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/dataset_suite.py @@ -1,8 +1,13 @@ import abc from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 -from dl_api_client.dsmaker.primitives import Dataset +from dl_api_client.dsmaker.primitives import ( + Action, + Dataset, + UpdateAction, +) from dl_api_lib_testing.dataset_base import DatasetTestBase +from dl_core.us_connection_base import ConnectionBase from dl_core.us_manager.us_manager_sync import SyncUSManager from dl_testing.regulated_test import RegulatedTestCase @@ -33,3 +38,24 @@ def test_remove_connection( sync_us_manager.delete(sync_us_manager.get_by_id(saved_connection_id)) dataset_resp = control_api.load_dataset(saved_dataset) assert dataset_resp.status_code == 200, dataset_resp.json + + def test_result_field_available_after_deletion( + self, + saved_dataset: Dataset, + control_api: SyncHttpDatasetApiV1, + ) -> None: + # arrange + dataset = saved_dataset + result_field_to_remove = next(iter(dataset.result_schema)) + + # act + dataset_after_update = control_api.apply_updates(dataset, updates=[result_field_to_remove.delete()]).dataset + dataset_after_deletion = control_api.save_dataset(dataset_after_update).dataset + dataset_after_reload = control_api.refresh_dataset_sources( + dataset_after_deletion, dataset.sources._item_ids + ).dataset + + # assert + assert result_field_to_remove.title not in [item.title for item in dataset_after_deletion.result_schema] + assert result_field_to_remove.title in [item.title for item in dataset_after_reload.result_schema] + assert len(dataset.result_schema) == len(dataset.result_schema) diff --git a/lib/dl_core/dl_core/fields.py b/lib/dl_core/dl_core/fields.py index db2fa45ef..0bd708f47 100644 --- a/lib/dl_core/dl_core/fields.py +++ b/lib/dl_core/dl_core/fields.py @@ -474,3 +474,10 @@ def remove_multiple(self, field_ids: Collection[str]) -> None: for field in fields_to_remove: self.fields.remove(field) self.clear_caches() + + def get_direct_fields_by_source(self, source_id: str) -> List[BIField]: + return [ + field + for field in self.fields + if isinstance(field.calc_spec, DirectCalculationSpec) and field.source == source_id + ]