diff --git a/lib/dl_api_connector/dl_api_connector/api_schema/connection_base_fields.py b/lib/dl_api_connector/dl_api_connector/api_schema/connection_base_fields.py index 245673590..582ddc16a 100644 --- a/lib/dl_api_connector/dl_api_connector/api_schema/connection_base_fields.py +++ b/lib/dl_api_connector/dl_api_connector/api_schema/connection_base_fields.py @@ -47,7 +47,7 @@ def secret_string_field( required: bool = True, allow_none: bool = False, default: Optional[str] = None, - bi_extra: FieldExtra = FieldExtra(editable=True), # noqa: B008 + bi_extra: FieldExtra = FieldExtra(editable=True, export_fake=True), # noqa: B008 ) -> ma_fields.String: return ma_fields.String( attribute=attribute, diff --git a/lib/dl_api_connector/dl_api_connector/api_schema/connection_sql.py b/lib/dl_api_connector/dl_api_connector/api_schema/connection_sql.py index 9b9f02a6d..d1bcdcad4 100644 --- a/lib/dl_api_connector/dl_api_connector/api_schema/connection_sql.py +++ b/lib/dl_api_connector/dl_api_connector/api_schema/connection_sql.py @@ -75,7 +75,7 @@ class ClassicSQLConnectionSchema(ConnectionSchema): host = DBHostField(attribute="data.host", required=True, bi_extra=FieldExtra(editable=True)) port = ma_fields.Integer(attribute="data.port", required=True, bi_extra=FieldExtra(editable=True)) username = ma_fields.String(attribute="data.username", required=True, bi_extra=FieldExtra(editable=True)) - password = secret_string_field(attribute="data.password", bi_extra=FieldExtra(editable=True)) + password = secret_string_field(attribute="data.password") db_name = ma_fields.String( attribute="data.db_name", allow_none=True, bi_extra=FieldExtra(editable=True), validate=db_name_no_query_params ) diff --git a/lib/dl_api_connector/dl_api_connector/api_schema/extras.py b/lib/dl_api_connector/dl_api_connector/api_schema/extras.py index 0fa1b12e4..7c7efbee8 100644 --- a/lib/dl_api_connector/dl_api_connector/api_schema/extras.py +++ b/lib/dl_api_connector/dl_api_connector/api_schema/extras.py @@ -20,11 +20,19 @@ class CreateMode(OperationsMode): test = enum.auto() +class ImportMode(OperationsMode): + create_from_import = enum.auto() + + class EditMode(OperationsMode): edit = enum.auto() test = enum.auto() +class ExportMode(OperationsMode): + export = enum.auto() + + class SchemaKWArgs(TypedDict): only: Optional[Sequence[str]] partial: Union[Sequence[str], bool] @@ -38,3 +46,4 @@ class FieldExtra: partial_in: Sequence[OperationsMode] = () exclude_in: Sequence[OperationsMode] = () editable: Union[bool, Sequence[OperationsMode]] = () + export_fake: Optional[bool] = False diff --git a/lib/dl_api_connector/dl_api_connector/api_schema/top_level.py b/lib/dl_api_connector/dl_api_connector/api_schema/top_level.py index e04a34dd6..1c2072d77 100644 --- a/lib/dl_api_connector/dl_api_connector/api_schema/top_level.py +++ b/lib/dl_api_connector/dl_api_connector/api_schema/top_level.py @@ -1,6 +1,7 @@ from __future__ import annotations import copy +from copy import deepcopy import itertools import logging import os @@ -19,6 +20,7 @@ import marshmallow from marshmallow import ( missing, + post_dump, post_load, pre_load, ) @@ -28,7 +30,9 @@ from dl_api_connector.api_schema.extras import ( CreateMode, EditMode, + ExportMode, FieldExtra, + ImportMode, OperationsMode, SchemaKWArgs, ) @@ -84,7 +88,7 @@ def get_field_extra(f: ma_fields.Field) -> Optional[FieldExtra]: return f.metadata.get("bi_extra", None) @property - def operations_mode(self) -> Optional[CreateMode]: + def operations_mode(self) -> Optional[CreateMode | ImportMode]: return self.context.get(self.CTX_KEY_OPERATIONS_MODE) @classmethod @@ -98,6 +102,13 @@ def all_fields_with_extra_info(cls) -> Iterable[tuple[str, ma_fields.Field, Fiel if extra is not None: yield field_name, field, extra + @classmethod + def fieldnames_with_extra_export_fake_info(cls) -> Iterable[str]: + for field_name, field in cls.all_fields_dict().items(): + extra = cls.get_field_extra(field) + if extra is not None and extra.export_fake is True: + yield field_name + def _refine_init_kwargs(self, kw_args: SchemaKWArgs, operations_mode: Optional[OperationsMode]) -> SchemaKWArgs: if operations_mode is None: return kw_args @@ -159,7 +170,7 @@ def post_load(self, data: dict[str, Any], **_: Any) -> Union[_TARGET_OBJECT_TV, return data assert isinstance(editable_object, self.TARGET_CLS) return self.update_object(editable_object, data) - if isinstance(self.operations_mode, CreateMode): + if isinstance(self.operations_mode, CreateMode | ImportMode): return self.create_object(data) raise ValueError(f"Can not perform load. Unknown operations mode: {self.operations_mode!r}") @@ -171,6 +182,21 @@ def get_allowed_unknown_fields(self) -> set[str]: """ return set() + @final + def delete_unknown_fields(self, data: dict[str, Any]) -> dict[str, Any]: + LOGGER.info( + "Got unknown fields for schema %s/%s. Unknown fields will be removed.", + type(self).__qualname__, + self.operations_mode, + ) + + cleaned_data = {} + for field_name, field_value in data.items(): + if field_name in self.fields and not self.fields[field_name].dump_only: + cleaned_data[field_name] = field_value + + return cleaned_data + @final def handle_unknown_fields(self, data: dict[str, Any]) -> dict[str, Any]: refined_data = {} @@ -230,8 +256,20 @@ def pre_load(self, data: dict[str, Any], **_: Any) -> dict[str, Any]: schema_input_keys=all_data_keys, ), ) + + if isinstance(self.operations_mode, ImportMode): + return self.delete_unknown_fields(data) + return self.handle_unknown_fields(data) + @post_dump(pass_many=False) + def post_dump(self, data: dict[str, Any], **_: Any) -> dict[str, Any]: + if isinstance(self.operations_mode, ExportMode): + data = deepcopy(data) + for secret_field in self.fieldnames_with_extra_export_fake_info(): + data[secret_field] = "******" + return data + _US_ENTRY_TV = TypeVar("_US_ENTRY_TV", bound=USEntry) diff --git a/lib/dl_api_lib/dl_api_lib/app/control_api/app.py b/lib/dl_api_lib/dl_api_lib/app/control_api/app.py index 7f34c719b..786e812d3 100644 --- a/lib/dl_api_lib/dl_api_lib/app/control_api/app.py +++ b/lib/dl_api_lib/dl_api_lib/app/control_api/app.py @@ -6,6 +6,7 @@ Generic, Optional, TypeVar, + final, ) import attr @@ -49,6 +50,12 @@ from dl_core.connection_models import ConnectOptions from dl_core.us_connection_base import ConnectionBase +from dl_api_lib.app.control_api.resources.connections import ( + BIResource, + ConnectionExportItem, +) +from dl_api_lib.app.control_api.resources.connections import ns as connections_namespace + @attr.s(frozen=True) class EnvSetupResult: @@ -62,6 +69,14 @@ class EnvSetupResult: class ControlApiAppFactory(SRFactoryBuilder, Generic[TControlApiAppSettings], abc.ABC): _settings: TControlApiAppSettings = attr.ib() + def get_connection_export_resource(self) -> type[BIResource]: + return ConnectionExportItem + + @final + def register_additional_handlers(self) -> None: + connection_export_resource = self.get_connection_export_resource() + connections_namespace.add_resource(connection_export_resource, "/export/") + @abc.abstractmethod def set_up_environment( self, @@ -159,6 +174,7 @@ def create_app( ma = Marshmallow() ma.init_app(app) + app.before_first_request(self.register_additional_handlers) init_apis(app) return app diff --git a/lib/dl_api_lib/dl_api_lib/app/control_api/resources/connections.py b/lib/dl_api_lib/dl_api_lib/app/control_api/resources/connections.py index f110c0fb5..abb3af991 100644 --- a/lib/dl_api_lib/dl_api_lib/app/control_api/resources/connections.py +++ b/lib/dl_api_lib/dl_api_lib/app/control_api/resources/connections.py @@ -17,6 +17,8 @@ from dl_api_connector.api_schema.extras import ( CreateMode, EditMode, + ExportMode, + ImportMode, ) from dl_api_lib import exc from dl_api_lib.api_decorators import schematic_request @@ -125,6 +127,48 @@ def post(self, connection_id): # type: ignore # TODO: fix _handle_conn_test_exc(e) +@ns.route("/import") +class ConnectionsImportList(BIResource): + @put_to_request_context(endpoint_code="ConnectionImport") + @schematic_request(ns=ns) + def post(self): # type: ignore # TODO: fix + us_manager = self.get_us_manager() + notifications = [] + + conn_data = request.json and request.json["data"]["connection"] + assert conn_data + + conn_type = conn_data["db_type"] + if not conn_type or conn_type not in ConnectionType: + raise exc.BadConnectionType(f"Invalid connection type value: {conn_type}") + + conn_availability = self.get_service_registry().get_connector_availability() + conn_type_is_available = conn_availability.check_connector_is_available(ConnectionType[conn_type]) + if not conn_type_is_available: + raise exc.UnsupportedForEntityType("Connector %s is not available in current env", conn_type) + + conn_data["workbook_id"] = request.json and request.json["data"].get("workbook_id", None) + conn_data["type"] = conn_type + + schema = GenericConnectionSchema( + context=self.get_schema_ctx(schema_operations_mode=ImportMode.create_from_import) + ) + try: + conn: ConnectionBase = schema.load(conn_data) + except MValidationError as e: + return e.messages, 400 + + conn.validate_new_data_sync(services_registry=self.get_service_registry()) + + conn_warnings = conn.get_import_warnings_list() + if conn_warnings: + notifications.extend(conn_warnings) + + us_manager.save(conn) + + return dict(id=conn.uuid, notifications=notifications) + + @ns.route("/") class ConnectionsList(BIResource): @put_to_request_context(endpoint_code="ConnectionCreate") @@ -211,6 +255,33 @@ def put(self, connection_id): # type: ignore # TODO: fix us_manager.save(conn) +class ConnectionExportItem(BIResource): + @put_to_request_context(endpoint_code="ConnectionExport") + @schematic_request( + ns=ns, + responses={}, + ) + def get(self, connection_id: str) -> dict: + notifications: list[dict] = [] + + conn = self.get_us_manager().get_by_id(connection_id, expected_type=ConnectionBase) + need_permission_on_entry(conn, USPermissionKind.read) + assert isinstance(conn, ConnectionBase) + + if not conn.allow_export: + raise exc.UnsupportedForEntityType(f"Connector {conn.conn_type.name} does not support export") + + result = GenericConnectionSchema(context=self.get_schema_ctx(ExportMode.export)).dump(conn) + result.update(options=ConnectionOptionsSchema().dump(conn.get_options())) + result.pop("id") + + conn_warnings = conn.get_export_warnings_list() + if conn_warnings: + notifications.extend(conn_warnings) + + return dict(connection=result, notifications=notifications) + + def _dump_source_templates(tpls: Optional[list[DataSourceTemplate]]) -> Optional[list[dict[str, Any]]]: if tpls is None: return None diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/connection_suite.py b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/connection_suite.py index 6ae2677fd..52ad55c2d 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/connection_suite.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/connection_suite.py @@ -6,6 +6,8 @@ from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase from dl_api_lib_testing.connection_base import ConnectionTestBase +from dl_core.us_connection_base import ConnectionBase +from dl_core.us_manager.us_manager_sync import SyncUSManager from dl_testing.regulated_test import RegulatedTestCase @@ -23,6 +25,76 @@ def test_create_connection( ) assert resp.status_code == 200, resp.json + def test_export_connection( + self, + control_api_sync_client: SyncHttpClientBase, + saved_connection_id: str, + bi_headers: Optional[dict[str, str]], + sync_us_manager: SyncUSManager, + ) -> None: + conn = sync_us_manager.get_by_id(saved_connection_id, expected_type=ConnectionBase) + assert isinstance(conn, ConnectionBase) + + resp = control_api_sync_client.get( + url=f"/api/v1/connections/export/{saved_connection_id}", + headers=bi_headers, + ) + + if not conn.allow_export: + assert resp.status_code == 400 + return + + assert resp.status_code == 200, resp.json + if hasattr(conn.data, "password"): + password = resp.json["connection"]["password"] + assert password == "******" + + def test_import_connection( + self, + control_api_sync_client: SyncHttpClientBase, + saved_connection_id: str, + bi_headers: Optional[dict[str, str]], + sync_us_manager: SyncUSManager, + ) -> None: + conn = sync_us_manager.get_by_id(saved_connection_id, expected_type=ConnectionBase) + assert isinstance(conn, ConnectionBase) + if not conn.allow_export: + return + + export_resp = control_api_sync_client.get( + url=f"/api/v1/connections/export/{saved_connection_id}", + headers=bi_headers, + ) + + export_resp.json["connection"][ + "name" + ] = f"{self.conn_type.name} conn {uuid.uuid4()}" # in case of response with workbook, 'name'-field is in export response by default + + import_request = json.dumps( + { + "data": { + # "workbook_id" : "1234567890000", # can't test with workbook in case of ERR.DS_API.US.OBJ_NOT_FOUND + "connection": export_resp.json["connection"] + } + } + ) + + import_response = control_api_sync_client.post( + url="/api/v1/connections/import", + headers=bi_headers, + data=import_request, + content_type="application/json", + ) + assert import_response.status_code == 200, import_response.json + assert import_response.json["id"] + assert import_response.json["id"] != saved_connection_id + assert import_response.json["notifications"] + + export_resp = control_api_sync_client.delete( + url=f"/api/v1/connections/{import_response.json['id']}", + headers=bi_headers, + ) + def test_test_connection( self, control_api_sync_client: SyncHttpClientBase, diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/us_connection.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/us_connection.py index e4053f49f..20029c9ef 100644 --- a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/us_connection.py +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/us_connection.py @@ -35,6 +35,7 @@ class BitrixGDSConnectOptions(ConnectOptions): class BitrixGDSConnection(ConnectionBase): allow_cache: ClassVar[bool] = True source_type = SOURCE_TYPE_BITRIX_GDS + allow_export: ClassVar[bool] = True @attr.s(kw_only=True) class DataModel(ConnCacheableDataModelMixin, ConnectionBase.DataModel): diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/source.py index 77ecffc82..f23165a49 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/source.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/source.py @@ -2,7 +2,10 @@ from typing import ( Any, + ClassVar, + Optional, Type, + final, ) import marshmallow as ma @@ -11,9 +14,14 @@ Schema, fields, post_load, + pre_load, ) -from dl_api_connector.api_schema.extras import FieldExtra +from dl_api_connector.api_schema.extras import ( + CreateMode, + FieldExtra, + ImportMode, +) from dl_api_connector.api_schema.source_base import ( SQLDataSourceSchema, SQLDataSourceTemplateSchema, @@ -34,15 +42,36 @@ class RawSchemaColumnSchema(Schema): class BaseFileSourceSchema(Schema): + CTX_KEY_OPERATIONS_MODE: ClassVar[str] = "operations_mode" + class Meta: unknown = RAISE target: Type[BaseFileS3Connection.FileDataSource] @post_load(pass_many=False) - def to_object(self, data: dict[str, Any], **kwargs: Any) -> BaseFileS3Connection.FileDataSource: + def post_load(self, data: dict[str, Any], **kwargs: Any) -> BaseFileS3Connection.FileDataSource: return self.Meta.target(**data) + @property + def operations_mode(self) -> Optional[CreateMode | ImportMode]: + return self.context.get(self.CTX_KEY_OPERATIONS_MODE) + + @final + def delete_unknown_fields(self, data: dict[str, Any]) -> dict[str, Any]: + cleaned_data = {} + for field_name, field_value in data.items(): + if field_name in self.fields and not self.fields[field_name].dump_only: + cleaned_data[field_name] = field_value + + return cleaned_data + + @pre_load(pass_many=False) + def pre_load(self, data: dict[str, Any], **_: Any) -> dict[str, Any]: + if isinstance(self.operations_mode, ImportMode): + return self.delete_unknown_fields(data) + return data + id = fields.String() file_id = fields.String(load_default=None) title = fields.String(bi_extra=FieldExtra(editable=True)) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/us_connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/us_connection.py index af3d4b1a7..6e7cecab5 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/us_connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/us_connection.py @@ -54,6 +54,7 @@ class BaseFileS3Connection(ConnectionHardcodedDataMixin[FileS3ConnectorSettings] is_always_internal_source: ClassVar[bool] = True allow_cache: ClassVar[bool] = True settings_type = FileS3ConnectorSettings + allow_export: ClassVar[bool] = True editable_data_source_parameters: ClassVar[tuple[str, ...]] = ( "file_id", diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/api_schema/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/api_schema/connection.py index 6f3d4a713..3dcefae73 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/api_schema/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/api_schema/connection.py @@ -28,7 +28,7 @@ class GSheetsFileS3ConnectionSchema(BaseFileS3ConnectionSchema): load_default=None, allow_none=True, load_only=True, - bi_extra=FieldExtra(editable=True), + bi_extra=FieldExtra(editable=True, export_fake=True), ) refresh_enabled = fields.Boolean(attribute="data.refresh_enabled", bi_extra=FieldExtra(editable=True)) authorized = fields.Boolean(dump_only=True) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_yadocs/api/api_schema/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_yadocs/api/api_schema/connection.py index ec3e8f1a0..e13c07622 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_yadocs/api/api_schema/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_yadocs/api/api_schema/connection.py @@ -28,7 +28,7 @@ class YaDocsFileS3ConnectionSchema(BaseFileS3ConnectionSchema): load_default=None, allow_none=True, load_only=True, - bi_extra=FieldExtra(editable=True), + bi_extra=FieldExtra(editable=True, export_fake=True), ) refresh_enabled = fields.Boolean(attribute="data.refresh_enabled", bi_extra=FieldExtra(editable=True)) authorized = fields.Boolean(dump_only=True) diff --git a/lib/dl_connector_chyt/dl_connector_chyt/api/api_schema/connection.py b/lib/dl_connector_chyt/dl_connector_chyt/api/api_schema/connection.py index d460e384e..3982f3687 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/api/api_schema/connection.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/api/api_schema/connection.py @@ -28,7 +28,7 @@ class CHYTConnectionSchema(ConnectionMetaMixin, RawSQLLevelMixin, DataExportForb host = DBHostField(attribute="data.host", required=True, bi_extra=FieldExtra(editable=True)) port = ma.fields.Integer(attribute="data.port", required=True, bi_extra=FieldExtra(editable=True)) - token = secret_string_field(attribute="data.token", required=True, bi_extra=FieldExtra(editable=True)) + token = secret_string_field(attribute="data.token", required=True) alias = alias_string_field(attribute="data.alias") secure = ma.fields.Boolean(attribute="data.secure", bi_extra=FieldExtra(editable=True)) cache_ttl_sec = cache_ttl_field(attribute="data.cache_ttl_sec") diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/api_schema/connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/api_schema/connection.py index 5c4ff732b..84496e2b9 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/api_schema/connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/api_schema/connection.py @@ -26,7 +26,6 @@ class ClickHouseConnectionSchema( attribute="data.password", required=False, allow_none=True, - bi_extra=FieldExtra(editable=True), ) secure = core_ma_fields.OnOffField(attribute="data.secure", bi_extra=FieldExtra(editable=True)) @@ -38,4 +37,4 @@ class ClickHouseConnectionSchema( load_default=None, load_only=True, ) - readonly = ma_fields.Integer(attribute="data.readonly", bi_extra=FieldExtra(editable=True)) + readonly = ma_fields.Integer(attribute="data.readonly", bi_extra=FieldExtra(editable=True), load_default=2) diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/us_connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/us_connection.py index c5cf8e160..e4e0e789a 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/us_connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/us_connection.py @@ -28,6 +28,7 @@ class ConnectionClickhouse(ConnectionClickhouseBase): allowed_source_types = frozenset((SOURCE_TYPE_CH_TABLE, SOURCE_TYPE_CH_SUBSELECT)) allow_dashsql: ClassVar[bool] = True allow_cache: ClassVar[bool] = True + allow_export: ClassVar[bool] = True is_always_user_source: ClassVar[bool] = False # TODO: should be `True`, but need some cleanup for that. def get_data_source_template_templates(self, localizer: Localizer) -> list[DataSourceTemplate]: diff --git a/lib/dl_connector_mssql/dl_connector_mssql/core/us_connection.py b/lib/dl_connector_mssql/dl_connector_mssql/core/us_connection.py index 4e5376287..eb7377ab9 100644 --- a/lib/dl_connector_mssql/dl_connector_mssql/core/us_connection.py +++ b/lib/dl_connector_mssql/dl_connector_mssql/core/us_connection.py @@ -29,6 +29,7 @@ class ConnectionMSSQL(ClassicConnectionSQL): allowed_source_types = frozenset((SOURCE_TYPE_MSSQL_TABLE, SOURCE_TYPE_MSSQL_SUBSELECT)) allow_dashsql: ClassVar[bool] = True allow_cache: ClassVar[bool] = True + allow_export: ClassVar[bool] = True is_always_user_source: ClassVar[bool] = True @attr.s(kw_only=True) diff --git a/lib/dl_connector_mysql/dl_connector_mysql/core/us_connection.py b/lib/dl_connector_mysql/dl_connector_mysql/core/us_connection.py index 6fa59142d..358f1379e 100644 --- a/lib/dl_connector_mysql/dl_connector_mysql/core/us_connection.py +++ b/lib/dl_connector_mysql/dl_connector_mysql/core/us_connection.py @@ -25,6 +25,7 @@ class ConnectionMySQL(ClassicConnectionSQL): allowed_source_types = frozenset((SOURCE_TYPE_MYSQL_TABLE, SOURCE_TYPE_MYSQL_SUBSELECT)) allow_dashsql: ClassVar[bool] = True allow_cache: ClassVar[bool] = True + allow_export: ClassVar[bool] = True is_always_user_source: ClassVar[bool] = True @attr.s(kw_only=True) diff --git a/lib/dl_connector_oracle/dl_connector_oracle/core/us_connection.py b/lib/dl_connector_oracle/dl_connector_oracle/core/us_connection.py index 20f5badb0..22ec7a088 100644 --- a/lib/dl_connector_oracle/dl_connector_oracle/core/us_connection.py +++ b/lib/dl_connector_oracle/dl_connector_oracle/core/us_connection.py @@ -32,6 +32,7 @@ class ConnectionSQLOracle(ClassicConnectionSQL): allowed_source_types = frozenset((SOURCE_TYPE_ORACLE_TABLE, SOURCE_TYPE_ORACLE_SUBSELECT)) allow_dashsql: ClassVar[bool] = True allow_cache: ClassVar[bool] = True + allow_export: ClassVar[bool] = True is_always_user_source: ClassVar[bool] = True @attr.s(kw_only=True) diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/us_connection.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/us_connection.py index 9ae7b4292..3eabf8ffa 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/us_connection.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/us_connection.py @@ -19,6 +19,7 @@ class ConnectionPostgreSQLBase(ClassicConnectionSQL): has_schema = True default_schema_name = "public" + allow_export = True @attr.s(kw_only=True) class DataModel(ClassicConnectionSQL.DataModel): diff --git a/lib/dl_connector_promql/dl_connector_promql/api/api_schema/connection.py b/lib/dl_connector_promql/dl_connector_promql/api/api_schema/connection.py index 24afdb406..062221db7 100644 --- a/lib/dl_connector_promql/dl_connector_promql/api/api_schema/connection.py +++ b/lib/dl_connector_promql/dl_connector_promql/api/api_schema/connection.py @@ -56,7 +56,6 @@ class PromQLConnectionSchema(ConnectionMetaMixin, ClassicSQLConnectionSchema): attribute="data.password", required=False, allow_none=True, - bi_extra=FieldExtra(editable=True), ) path = DBPathField( attribute="data.path", diff --git a/lib/dl_connector_promql/dl_connector_promql/core/us_connection.py b/lib/dl_connector_promql/dl_connector_promql/core/us_connection.py index b104c3cd0..4d63ba4f3 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/us_connection.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/us_connection.py @@ -17,6 +17,7 @@ class PromQLConnection(ClassicConnectionSQL): allow_cache: ClassVar[bool] = True is_always_user_source: ClassVar[bool] = True allow_dashsql: ClassVar[bool] = True + allow_export: ClassVar[bool] = True source_type = SOURCE_TYPE_PROMQL @attr.s(kw_only=True) diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/api/api_schema/connection.py b/lib/dl_connector_snowflake/dl_connector_snowflake/api/api_schema/connection.py index 8c640409d..9d82b0b72 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/api/api_schema/connection.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/api/api_schema/connection.py @@ -36,7 +36,6 @@ class SnowFlakeConnectionSchema(ConnectionSchema, RawSQLLevelMixin): client_secret = secret_string_field( attribute="data.client_secret", required=True, - bi_extra=FieldExtra(editable=True), ) schema = ma_fields.String( attribute="data.schema", @@ -57,7 +56,6 @@ class SnowFlakeConnectionSchema(ConnectionSchema, RawSQLLevelMixin): refresh_token = secret_string_field( attribute="data.refresh_token", required=False, - bi_extra=FieldExtra(editable=True), ) refresh_token_expire_time = ma_fields.DateTime( attribute="data.refresh_token_expire_time", diff --git a/lib/dl_connector_ydb/dl_connector_ydb/core/ydb/us_connection.py b/lib/dl_connector_ydb/dl_connector_ydb/core/ydb/us_connection.py index 360237f8d..fa0c5894f 100644 --- a/lib/dl_connector_ydb/dl_connector_ydb/core/ydb/us_connection.py +++ b/lib/dl_connector_ydb/dl_connector_ydb/core/ydb/us_connection.py @@ -36,6 +36,7 @@ class YDBConnection(ClassicConnectionSQL): allow_cache: ClassVar[bool] = True is_always_user_source: ClassVar[bool] = True allow_dashsql: ClassVar[bool] = True + allow_export: ClassVar[bool] = True source_type = SOURCE_TYPE_YDB_TABLE diff --git a/lib/dl_connector_ydb/dl_connector_ydb_tests/db/api/test_connection.py b/lib/dl_connector_ydb/dl_connector_ydb_tests/db/api/test_connection.py index 89221454b..ea2cd2ce8 100644 --- a/lib/dl_connector_ydb/dl_connector_ydb_tests/db/api/test_connection.py +++ b/lib/dl_connector_ydb/dl_connector_ydb_tests/db/api/test_connection.py @@ -1,7 +1,35 @@ +from typing import Optional + +from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite +from dl_core.us_connection_base import ConnectionBase +from dl_core.us_manager.us_manager_sync import SyncUSManager from dl_connector_ydb_tests.db.api.base import YDBConnectionTestBase class TestYDBConnection(YDBConnectionTestBase, DefaultConnectorConnectionTestSuite): - pass + # a separate test since password=self.data.token + def test_export_connection( + self, + control_api_sync_client: SyncHttpClientBase, + saved_connection_id: str, + bi_headers: Optional[dict[str, str]], + sync_us_manager: SyncUSManager, + ) -> None: + conn = sync_us_manager.get_by_id(saved_connection_id, expected_type=ConnectionBase) + assert isinstance(conn, ConnectionBase) + + resp = control_api_sync_client.get( + url=f"/api/v1/connections/export/{saved_connection_id}", + headers=bi_headers, + ) + + if not conn.allow_export: + assert resp.status_code == 400 + return + + assert resp.status_code == 200, resp.json + if hasattr(conn.data, "token"): + token = resp.json["connection"]["token"] + assert token == "******" diff --git a/lib/dl_core/dl_core/us_connection_base.py b/lib/dl_core/dl_core/us_connection_base.py index 378819cf3..8f2a8ecf9 100644 --- a/lib/dl_core/dl_core/us_connection_base.py +++ b/lib/dl_core/dl_core/us_connection_base.py @@ -31,6 +31,7 @@ DataSourceRole, DataSourceType, MigrationStatus, + NotificationLevel, RawSQLLevel, UserDataType, ) @@ -144,6 +145,7 @@ class ConnectionBase(USEntry, metaclass=abc.ABCMeta): allowed_source_types: ClassVar[Optional[frozenset[DataSourceType]]] = None allow_dashsql: ClassVar[bool] = False allow_cache: ClassVar[bool] = False + allow_export: ClassVar[bool] = False is_always_internal_source: ClassVar[bool] = False is_always_user_source: ClassVar[bool] = False @@ -370,6 +372,17 @@ def validate_new_data_sync( def check_for_notifications(self) -> list[Optional[NotificationReportingRecord]]: return [] + def get_import_warnings_list(self, localizer: Optional[Localizer] = None) -> list[dict]: + return [ + dict( + message="Secret fields like password, token etc. must be changed and resaved", + level=NotificationLevel.info, + ) + ] # TODO: localize message + + def get_export_warnings_list(self, localizer: Optional[Localizer] = None) -> list[dict]: + return [] + def get_cache_key_part(self) -> LocalKeyRepresentation: local_key_rep = LocalKeyRepresentation() local_key_rep = local_key_rep.multi_extend(