diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 000000000..7bb86242b --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,12 @@ +[bumpversion] +current_version = 1.0.0 +commit = False +tag = False + +[bumpversion:file:lib/dl_version/dl_version/__init__.py] +search = __version__ = "{current_version}" +replace = __version__ = "{new_version}" + +[bumpversion:file:lib/dl_version/pyproject.toml] +search = version = "{current_version}" +replace = version = "{new_version}" diff --git a/ci/gh_list_changes.sh b/ci/gh_list_changes.sh index 2d84392c8..d534cf052 100644 --- a/ci/gh_list_changes.sh +++ b/ci/gh_list_changes.sh @@ -4,9 +4,12 @@ set -x BASE_SHA="${1:-main}" # If outside PR, defaulting to main HEAD_SHA="${2:-$(git symbolic-ref --short HEAD)}" # Default to the current branch +DIFF_FILTER="${DIFF_FILTER:-ACMRTD}" DIVERGE_COMMIT=$(git merge-base $BASE_SHA $HEAD_SHA) -CHANGED_FILES=$(git diff --no-commit-id --name-only --diff-filter=ACMRTD $DIVERGE_COMMIT $HEAD_SHA || echo "") +CHANGED_FILES=$(git diff --no-commit-id --name-only --diff-filter=$DIFF_FILTER $DIVERGE_COMMIT $HEAD_SHA || echo "") echo "$CHANGED_FILES" + +set +x diff --git a/dl-repo.yml b/dl-repo.yml index 46119d331..e7fa29852 100644 --- a/dl-repo.yml +++ b/dl-repo.yml @@ -3,20 +3,19 @@ dl_repo: fs_editor: git + default_boilerplate_path: lib/dl_package_boilerplate + package_types: - type: terrarium # Repository tools root_path: terrarium - boilerplate_path: lib/dl_package_boilerplate - type: lib # Main libraries root_path: lib - boilerplate_path: lib/dl_package_boilerplate tags: - main_dependency_group - type: app # Apps root_path: app - boilerplate_path: lib/dl_package_boilerplate tags: - main_dependency_group - own_dependency_group @@ -40,3 +39,8 @@ dl_repo: plugins: - type: dependency_registration + + edit_exclude_masks: # Files at paths that match these patterns (via re.match) will not be edited + - ".*\\.mo" + - ".*\\.xlsx" + - ".*/__pycache__" diff --git a/docker_build/bake_code_gen.hcl b/docker_build/bake_code_gen.hcl index c9a696297..a60c56374 100644 --- a/docker_build/bake_code_gen.hcl +++ b/docker_build/bake_code_gen.hcl @@ -1,7 +1,7 @@ target "gen_antlr" { contexts = { - src = "${DL_B_PROJECT_ROOT}/lib/bi_formula/bi_formula/parser/antlr/" + src = "${DL_B_PROJECT_ROOT}/lib/dl_formula/dl_formula/parser/antlr/" } dockerfile = "./target_gen_antlr/Dockerfile" - output = ["type=local,dest=${DL_B_PROJECT_ROOT}/lib/bi_formula/bi_formula/parser/antlr/gen"] + output = ["type=local,dest=${DL_B_PROJECT_ROOT}/lib/dl_formula/dl_formula/parser/antlr/gen"] } diff --git a/docker_build/target_gen_antlr/Dockerfile b/docker_build/target_gen_antlr/Dockerfile index 402c9270c..64ff1fb12 100644 --- a/docker_build/target_gen_antlr/Dockerfile +++ b/docker_build/target_gen_antlr/Dockerfile @@ -11,7 +11,7 @@ RUN apk add --no-cache sed python3 py3-pip RUN pip install pycodestyle==2.10.0 autopep8==2.0.2 autoflake isort ENV AUTOGEN_DIR=/gen -ENV ANTLR_PY_PACKAGE="bi_formula.parser.antlr.gen" +ENV ANTLR_PY_PACKAGE="dl_formula.parser.antlr.gen" COPY --from=src . /src diff --git a/lib/dl_api_client/dl_api_client/dsmaker/api/schemas/data.py b/lib/dl_api_client/dl_api_client/dsmaker/api/schemas/data.py index 0a7ad47c0..beaa4ff03 100644 --- a/lib/dl_api_client/dl_api_client/dsmaker/api/schemas/data.py +++ b/lib/dl_api_client/dl_api_client/dsmaker/api/schemas/data.py @@ -34,7 +34,6 @@ TreeRoleSpec, ) from dl_constants.enums import ( - BIType, FieldRole, FieldType, FieldVisibility, @@ -46,6 +45,7 @@ QueryBlockPlacementType, QueryItemRefType, RangeType, + UserDataType, WhereClauseOperation, ) @@ -184,7 +184,7 @@ class LegendItemSchema(DefaultSchema[LegendItem]): id = ma_fields.String() title = ma_fields.String() role_spec = ma_fields.Nested(RoleSpecSchema) - data_type = ma_fields.Enum(BIType) + data_type = ma_fields.Enum(UserDataType) field_type = ma_fields.Enum(FieldType) item_type = ma_fields.Enum(LegendItemType) diff --git a/lib/dl_api_client/dl_api_client/dsmaker/api/schemas/dataset.py b/lib/dl_api_client/dl_api_client/dsmaker/api/schemas/dataset.py index cc41dd9c2..de568e769 100644 --- a/lib/dl_api_client/dl_api_client/dsmaker/api/schemas/dataset.py +++ b/lib/dl_api_client/dl_api_client/dsmaker/api/schemas/dataset.py @@ -57,17 +57,17 @@ from dl_constants.enums import ( AggregationFunction, BinaryJoinOperator, - BIType, CalcMode, ComponentErrorLevel, ComponentType, ConditionPartCalcMode, - CreateDSFrom, + DataSourceType, FieldType, JoinConditionType, JoinType, ManagedBy, ParameterValueConstraintType, + UserDataType, WhereClauseOperation, ) from dl_model_tools.schema.dynamic_enum_field import DynamicEnumField @@ -157,22 +157,22 @@ class ValueSchema(OneOfSchemaWithDumpLoadHooks): CONTEXT_KEY = "bi_value_type" type_field = "type" type_schemas = { - BIType.string.name: StringValueSchema, - BIType.integer.name: IntegerValueSchema, - BIType.float.name: FloatValueSchema, - BIType.date.name: DateValueSchema, - BIType.datetime.name: DateTimeValueSchema, - BIType.datetimetz.name: DateTimeTZValueSchema, - BIType.genericdatetime.name: GenericDateTimeValueSchema, - BIType.boolean.name: BooleanValueSchema, - BIType.geopoint.name: GeoPointValueSchema, - BIType.geopolygon.name: GeoPolygonValueSchema, - BIType.uuid.name: UuidValueSchema, - BIType.markup.name: MarkupValueSchema, - BIType.array_str.name: ArrayStrValueSchema, - BIType.array_int.name: ArrayIntValueSchema, - BIType.array_float.name: ArrayFloatValueSchema, - BIType.tree_str.name: TreeStrParameterValue, + UserDataType.string.name: StringValueSchema, + UserDataType.integer.name: IntegerValueSchema, + UserDataType.float.name: FloatValueSchema, + UserDataType.date.name: DateValueSchema, + UserDataType.datetime.name: DateTimeValueSchema, + UserDataType.datetimetz.name: DateTimeTZValueSchema, + UserDataType.genericdatetime.name: GenericDateTimeValueSchema, + UserDataType.boolean.name: BooleanValueSchema, + UserDataType.geopoint.name: GeoPointValueSchema, + UserDataType.geopolygon.name: GeoPolygonValueSchema, + UserDataType.uuid.name: UuidValueSchema, + UserDataType.markup.name: MarkupValueSchema, + UserDataType.array_str.name: ArrayStrValueSchema, + UserDataType.array_int.name: ArrayIntValueSchema, + UserDataType.array_float.name: ArrayFloatValueSchema, + UserDataType.tree_str.name: TreeStrParameterValue, } @pre_load(pass_many=False) @@ -227,10 +227,10 @@ class ResultFieldSchema(DefaultSchema[ResultField]): hidden = ma_fields.Boolean(load_default=False) description = ma_fields.String() formula = ma_fields.String(load_default="") - initial_data_type = ma_fields.Enum(BIType, allow_none=True) - cast = ma_fields.Enum(BIType) + initial_data_type = ma_fields.Enum(UserDataType, allow_none=True) + cast = ma_fields.Enum(UserDataType) type = ma_fields.Enum(FieldType, readonly=True) - data_type = ma_fields.Enum(BIType, allow_none=True) + data_type = ma_fields.Enum(UserDataType, allow_none=True) valid = ma_fields.Boolean(allow_none=True) avatar_id = ma_fields.String(allow_none=True) aggregation = ma_fields.Enum(AggregationFunction, load_default=AggregationFunction.none.name) @@ -280,7 +280,7 @@ class ColumnSchema(DefaultSchema[Column]): name = ma_fields.String() title = ma_fields.String() native_type = ma_fields.Dict(allow_none=True) - user_type = ma_fields.Enum(BIType) + user_type = ma_fields.Enum(UserDataType) description = ma_fields.String(dump_default="", allow_none=True) has_auto_aggregation = ma_fields.Boolean(dump_default=False, allow_none=True) lock_aggregation = ma_fields.Boolean(dump_default=False, allow_none=True) @@ -293,7 +293,7 @@ class DataSourceSchema(DefaultSchema[DataSource]): id = ma_fields.String() title = ma_fields.String() connection_id = ma_fields.String(allow_none=True) - source_type = DynamicEnumField(CreateDSFrom) + source_type = DynamicEnumField(DataSourceType) raw_schema = ma_fields.Nested(ColumnSchema, allow_none=True, required=False, many=True) index_info_set = ma_fields.List(ma_fields.Dict, allow_none=True) parameters = ma_fields.Dict() diff --git a/lib/dl_api_client/dl_api_client/dsmaker/primitives.py b/lib/dl_api_client/dl_api_client/dsmaker/primitives.py index 9c4f85123..982b04748 100644 --- a/lib/dl_api_client/dl_api_client/dsmaker/primitives.py +++ b/lib/dl_api_client/dl_api_client/dsmaker/primitives.py @@ -26,12 +26,11 @@ from dl_constants.enums import ( AggregationFunction, BinaryJoinOperator, - BIType, CalcMode, ComponentErrorLevel, ComponentType, ConditionPartCalcMode, - CreateDSFrom, + DataSourceType, FieldRole, FieldType, FieldVisibility, @@ -45,6 +44,7 @@ QueryBlockPlacementType, QueryItemRefType, RangeType, + UserDataType, WhereClauseOperation, ) @@ -215,7 +215,7 @@ def __eq__(self, other: Any) -> bool: @attr.s class DataSource(ApiProxyObject): connection_id: str = attr.ib(default=None) - source_type: Optional[CreateDSFrom] = attr.ib(default=None, converter=CreateDSFrom.normalize) + source_type: Optional[DataSourceType] = attr.ib(default=None, converter=DataSourceType.normalize) parameters: dict = attr.ib(default=None) raw_schema: list = attr.ib(factory=list) index_info_set: Optional[list] = attr.ib(default=None) @@ -331,7 +331,7 @@ class _Column: # use attr.s on superclass of the "real" class so that comparison methods from ConditionMakerMixin are used title: str = attr.ib(default=None) name: str = attr.ib(default=None) - user_type: Optional[BIType] = attr.ib(default=None, converter=BIType.normalize) + user_type: Optional[UserDataType] = attr.ib(default=None, converter=UserDataType.normalize) native_type: Optional[dict] = attr.ib(default=None) nullable: bool = attr.ib(default=True) description: str = attr.ib(default="") @@ -369,88 +369,88 @@ def on(self, *conditions: List[JoinCondition]): # type: ignore # TODO: fix @attr.s class ParameterValue(Generic[_INNER_TYPE]): - type: BIType + type: UserDataType value: _INNER_TYPE = attr.ib() @attr.s class StringParameterValue(ParameterValue[str]): - type: BIType = BIType.string + type: UserDataType = UserDataType.string @attr.s class IntegerParameterValue(ParameterValue[int]): - type: BIType = BIType.integer + type: UserDataType = UserDataType.integer @attr.s class FloatParameterValue(ParameterValue[float]): - type: BIType = BIType.float + type: UserDataType = UserDataType.float @attr.s class DateParameterValue(ParameterValue[date]): - type: BIType = BIType.date + type: UserDataType = UserDataType.date @attr.s class DateTimeParameterValue(ParameterValue[datetime]): - type: BIType = BIType.datetime + type: UserDataType = UserDataType.datetime @attr.s class DateTimeTZParameterValue(ParameterValue[datetime]): - type: BIType = BIType.datetimetz + type: UserDataType = UserDataType.datetimetz @attr.s class GenericDateTimeParameterValue(ParameterValue[datetime]): - type: BIType = BIType.genericdatetime + type: UserDataType = UserDataType.genericdatetime @attr.s class BooleanParameterValue(ParameterValue[bool]): - type: BIType = BIType.boolean + type: UserDataType = UserDataType.boolean @attr.s class GeoPointParameterValue(ParameterValue[List[Union[int, float]]]): - type: BIType = BIType.geopoint + type: UserDataType = UserDataType.geopoint @attr.s class GeoPolygonParameterValue(ParameterValue[List[List[List[Union[int, float]]]]]): - type: BIType = BIType.geopolygon + type: UserDataType = UserDataType.geopolygon @attr.s class UuidParameterValue(ParameterValue[str]): - type: BIType = BIType.uuid + type: UserDataType = UserDataType.uuid @attr.s class MarkupParameterValue(ParameterValue[str]): - type: BIType = BIType.markup + type: UserDataType = UserDataType.markup @attr.s class ArrayStrParameterValue(ParameterValue[List[str]]): - type: BIType = BIType.array_str + type: UserDataType = UserDataType.array_str @attr.s class ArrayIntParameterValue(ParameterValue[List[int]]): - type: BIType = BIType.array_int + type: UserDataType = UserDataType.array_int @attr.s class ArrayFloatParameterValue(ParameterValue[List[float]]): - type: BIType = BIType.array_float + type: UserDataType = UserDataType.array_float @attr.s class TreeStrParameterValue(ParameterValue[List[str]]): - type: BIType = BIType.tree_str + type: UserDataType = UserDataType.tree_str @attr.s @@ -517,9 +517,9 @@ class _ResultField(ApiProxyObject): hidden: bool = attr.ib(default=False) description: str = attr.ib(default="") formula: str = attr.ib(default="") - initial_data_type: Optional[BIType] = attr.ib(default=None, converter=BIType.normalize) - cast: Optional[BIType] = attr.ib(default=None, converter=BIType.normalize) - data_type: Optional[BIType] = attr.ib(default=None, converter=BIType.normalize) + initial_data_type: Optional[UserDataType] = attr.ib(default=None, converter=UserDataType.normalize) + cast: Optional[UserDataType] = attr.ib(default=None, converter=UserDataType.normalize) + data_type: Optional[UserDataType] = attr.ib(default=None, converter=UserDataType.normalize) valid: bool = attr.ib(default=True) has_auto_aggregation: bool = attr.ib(default=False) lock_aggregation: bool = attr.ib(default=False) @@ -728,7 +728,7 @@ class LegendItem(LegendItemBase): # noqa legend_item_id: int = attr.ib(kw_only=True) # redefine as strictly not None id: str = attr.ib(kw_only=True) title: str = attr.ib(kw_only=True) - data_type: BIType = attr.ib(kw_only=True) + data_type: UserDataType = attr.ib(kw_only=True) field_type: FieldType = attr.ib(kw_only=True) item_type: LegendItemType = attr.ib(kw_only=True) diff --git a/lib/dl_api_client/dl_api_client/dsmaker/shortcuts/dataset.py b/lib/dl_api_client/dl_api_client/dsmaker/shortcuts/dataset.py index 3170333e8..21bd20b20 100644 --- a/lib/dl_api_client/dl_api_client/dsmaker/shortcuts/dataset.py +++ b/lib/dl_api_client/dl_api_client/dsmaker/shortcuts/dataset.py @@ -17,7 +17,7 @@ def _add_anything_to_dataset( *, - api_v1: SyncHttpDatasetApiV1, + control_api: SyncHttpDatasetApiV1, dataset: Optional[Dataset] = None, dataset_id: Optional[str] = None, updater: Callable[[Dataset], Dataset], @@ -26,25 +26,25 @@ def _add_anything_to_dataset( ) -> Dataset: if dataset is None: assert dataset_id is not None - ds = api_v1.load_dataset(dataset=Dataset(id=dataset_id)).dataset + ds = control_api.load_dataset(dataset=Dataset(id=dataset_id)).dataset else: ds = dataset ds = updater(ds) - ds_resp = api_v1.apply_updates(dataset=ds, fail_ok=True) + ds_resp = control_api.apply_updates(dataset=ds, fail_ok=True) assert ds_resp.status_code == exp_status, ds_resp.response_errors ds = ds_resp.dataset if save: - ds = api_v1.save_dataset(ds).dataset + ds = control_api.save_dataset(ds).dataset return ds def add_formulas_to_dataset( *, - api_v1: SyncHttpDatasetApiV1, + api_v1: SyncHttpDatasetApiV1, # FIXME: Rename to control_api dataset: Optional[Dataset] = None, dataset_id: Optional[str] = None, formulas: Dict[str, str], @@ -57,7 +57,7 @@ def _add_formulas(ds: Dataset) -> Dataset: return ds return _add_anything_to_dataset( - api_v1=api_v1, + control_api=api_v1, dataset=dataset, dataset_id=dataset_id, updater=_add_formulas, @@ -68,7 +68,7 @@ def _add_formulas(ds: Dataset) -> Dataset: def add_parameters_to_dataset( *, - api_v1: SyncHttpDatasetApiV1, + api_v1: SyncHttpDatasetApiV1, # FIXME: Rename to control_api dataset: Optional[Dataset] = None, dataset_id: Optional[str] = None, parameters: Dict[str, Tuple[ParameterValue, Optional[ParameterValueConstraint]]], @@ -83,7 +83,7 @@ def _add_parameters(ds: Dataset) -> Dataset: return ds return _add_anything_to_dataset( - api_v1=api_v1, + control_api=api_v1, dataset=dataset, dataset_id=dataset_id, updater=_add_parameters, @@ -94,7 +94,7 @@ def _add_parameters(ds: Dataset) -> Dataset: def create_basic_dataset( *, - api_v1: SyncHttpDatasetApiV1, + api_v1: SyncHttpDatasetApiV1, # FIXME: Rename to control_api connection_id: str, data_source_settings: Dict[str, Any], formulas: Optional[Dict[str, str]] = None, diff --git a/lib/dl_api_client/dl_api_client_tests/unit/test_primitives.py b/lib/dl_api_client/dl_api_client_tests/unit/test_primitives.py index c0c0262ed..a0942b03f 100644 --- a/lib/dl_api_client/dl_api_client_tests/unit/test_primitives.py +++ b/lib/dl_api_client/dl_api_client_tests/unit/test_primitives.py @@ -5,8 +5,8 @@ ) from dl_constants.enums import ( BinaryJoinOperator, - BIType, CalcMode, + UserDataType, ) @@ -17,18 +17,18 @@ def test_field_condition_operators(): field_1 = ResultField( title="field_1", id="12345", - data_type=BIType.string, - initial_data_type=BIType.string, - cast=BIType.string, + data_type=UserDataType.string, + initial_data_type=UserDataType.string, + cast=UserDataType.string, calc_mode=CalcMode.direct, source="column_1", ) field_2 = ResultField( title="field_2", id="67890", - data_type=BIType.string, - initial_data_type=BIType.string, - cast=BIType.string, + data_type=UserDataType.string, + initial_data_type=UserDataType.string, + cast=UserDataType.string, calc_mode=CalcMode.direct, source="column_2", ) diff --git a/lib/dl_api_commons/dl_api_commons/logging.py b/lib/dl_api_commons/dl_api_commons/logging.py index 87132c748..a9c414a2c 100644 --- a/lib/dl_api_commons/dl_api_commons/logging.py +++ b/lib/dl_api_commons/dl_api_commons/logging.py @@ -17,7 +17,7 @@ import attr from dl_api_commons.headers import normalize_header_name -from dl_app_tools import log +from dl_app_tools.log import context LOGGER = logging.getLogger(__name__) @@ -340,4 +340,4 @@ def put_to_context(self, key: str, value: Any) -> None: if key in self.allowed_keys: # Each request assumed to be executed in individual ContextVars context so we don't need to pop it back # see `bi_core.flask_utils.context_var_middleware` - log.context.put_to_context(key, value) + context.put_to_context(key, value) diff --git a/lib/dl_api_commons/dl_api_commons/reporting/models.py b/lib/dl_api_commons/dl_api_commons/reporting/models.py index b625daa50..692359776 100644 --- a/lib/dl_api_commons/dl_api_commons/reporting/models.py +++ b/lib/dl_api_commons/dl_api_commons/reporting/models.py @@ -11,7 +11,7 @@ from dl_constants.enums import ( ConnectionType, NotificationLevel, - QueryType, + ReportingQueryType, ) @@ -23,10 +23,11 @@ class QueryExecutionReportingRecord(ReportingRecord): @attr.s(frozen=True, auto_attribs=True) class QueryExecutionStartReportingRecord(QueryExecutionReportingRecord): dataset_id: Optional[str] - query_type: Optional[QueryType] + query_type: Optional[ReportingQueryType] connection_type: ConnectionType conn_reporting_data: dict query: str # SQL query + workbook_id: Optional[str] @attr.s(frozen=True, auto_attribs=True) diff --git a/lib/dl_api_commons/dl_api_commons/reporting/profiler.py b/lib/dl_api_commons/dl_api_commons/reporting/profiler.py index fe61326d7..f7c46f7d7 100644 --- a/lib/dl_api_commons/dl_api_commons/reporting/profiler.py +++ b/lib/dl_api_commons/dl_api_commons/reporting/profiler.py @@ -149,6 +149,7 @@ def flush_query_report(self, query_id: str) -> None: chart_id=x_dl_context.get(DLContextKey.CHART_ID), chart_kind=x_dl_context.get(DLContextKey.CHART_KIND), response_status_code=response_status_code, + workbook_id=start_record.workbook_id, **start_record.conn_reporting_data, ) # TODO FIX: Change logger diff --git a/lib/dl_api_commons/dl_api_commons/sentry_config.py b/lib/dl_api_commons/dl_api_commons/sentry_config.py index 038145751..54780cddc 100644 --- a/lib/dl_api_commons/dl_api_commons/sentry_config.py +++ b/lib/dl_api_commons/dl_api_commons/sentry_config.py @@ -30,6 +30,7 @@ def configure_sentry(cfg: SentryConfig, extra_integrations: Sequence[Any] = ()) # from sentry_sdk.integrations.logging import LoggingIntegration sentry_sdk.init( dsn=cfg.dsn, + release=cfg.release, default_integrations=False, before_send=cleanup_common_secret_data, integrations=[ diff --git a/lib/dl_api_commons/dl_api_commons_tests/unit/test_reporting.py b/lib/dl_api_commons/dl_api_commons_tests/unit/test_reporting.py index 322fedf62..c665c003b 100644 --- a/lib/dl_api_commons/dl_api_commons_tests/unit/test_reporting.py +++ b/lib/dl_api_commons/dl_api_commons_tests/unit/test_reporting.py @@ -16,7 +16,7 @@ from dl_constants.api_constants import DLContextKey from dl_constants.enums import ( ConnectionType, - QueryType, + ReportingQueryType, ) @@ -35,8 +35,9 @@ "connection_id": "conn_123", "host": "8.8.8.8", }, - query_type=QueryType.external, + query_type=ReportingQueryType.external, query="SELECT 1", + workbook_id="wb_123", ) _DEFAULT_REPORT_FIELDS_FROM_START = dict( @@ -44,8 +45,9 @@ connection_id="conn_123", connection_type=CONNECTION_TYPE_TEST.name, host="8.8.8.8", - query_type=QueryType.external.name, + query_type=ReportingQueryType.external.name, query="SELECT 1", + workbook_id="wb_123", ) _CHYT_START_RECORD_TS_0 = QueryExecutionStartReportingRecord( @@ -58,8 +60,9 @@ "cluster": "my_cluster", "clique_alias": "*ch_my_clique", }, - query_type=QueryType.external, + query_type=ReportingQueryType.external, query="SELECT 1", + workbook_id=None, ) _CHYT_REPORT_FIELDS_FROM_START = dict( @@ -68,8 +71,9 @@ connection_type=CONNECTION_TYPE_CHYT_TEST.name, cluster="my_cluster", clique_alias="*ch_my_clique", - query_type=QueryType.external.name, + query_type=ReportingQueryType.external.name, query="SELECT 1", + workbook_id=None, ) @@ -225,6 +229,7 @@ def test_db_query_report_generation(case_name, records_seq, expected_query_data, "chart_id", "chart_kind", "response_status_code", + "workbook_id", ) if "chyt" in case_name: required_extras += ( diff --git a/lib/dl_api_connector/dl_api_connector/api_schema/source.py b/lib/dl_api_connector/dl_api_connector/api_schema/source.py index 46bc7940c..6dba472af 100644 --- a/lib/dl_api_connector/dl_api_connector/api_schema/source.py +++ b/lib/dl_api_connector/dl_api_connector/api_schema/source.py @@ -21,7 +21,7 @@ DataSourceBaseSchema, DataSourceTemplateBaseSchema, ) -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType class DataSourceSchema(OneOfSchema): @@ -65,11 +65,11 @@ def get_obj_type(self, obj: dict[str, Any]) -> str: return obj[self.type_field].name -def register_source_api_schema(source_type: CreateDSFrom, schema_cls: Type[DataSourceBaseSchema]) -> None: +def register_source_api_schema(source_type: DataSourceType, schema_cls: Type[DataSourceBaseSchema]) -> None: DataSourceSchema.type_schemas[source_type.name] = schema_cls def register_source_template_api_schema( - source_type: CreateDSFrom, schema_cls: Type[DataSourceTemplateBaseSchema] + source_type: DataSourceType, schema_cls: Type[DataSourceTemplateBaseSchema] ) -> None: DataSourceTemplateResponseSchema.type_schemas[source_type.name] = schema_cls diff --git a/lib/dl_api_connector/dl_api_connector/api_schema/source_base.py b/lib/dl_api_connector/dl_api_connector/api_schema/source_base.py index 617d434ac..c1c457f26 100644 --- a/lib/dl_api_connector/dl_api_connector/api_schema/source_base.py +++ b/lib/dl_api_connector/dl_api_connector/api_schema/source_base.py @@ -15,13 +15,13 @@ from dl_constants.enums import ( BinaryJoinOperator, - BIType, ConditionPartCalcMode, - CreateDSFrom, + DataSourceType, IndexKind, JoinConditionType, JoinType, ManagedBy, + UserDataType, ) from dl_core.db import ( IndexInfo, @@ -49,7 +49,7 @@ class RawSchemaColumnSchema(BaseSchema): native_type = ma_fields.Nested(OneOfNativeTypeSchema, allow_none=True) - user_type = ma_fields.Enum(BIType) + user_type = ma_fields.Enum(UserDataType) description = ma_fields.String(dump_default="", allow_none=True) has_auto_aggregation = ma_fields.Boolean(dump_default=False, allow_none=True) lock_aggregation = ma_fields.Boolean(dump_default=False, allow_none=True) @@ -100,7 +100,7 @@ class SubselectParametersSchema(SimpleParametersSchema): class DataSourceCommonSchema(BaseSchema): title = ma_fields.String(required=True) connection_id = ma_fields.String(allow_none=True) - source_type = DynamicEnumField(CreateDSFrom) + source_type = DynamicEnumField(DataSourceType) raw_schema = ma_fields.Nested(RawSchemaColumnSchema, many=True, allow_none=True) index_info_set = FrozenSetField( ma_fields.Nested(IndexInfoSchema), diff --git a/lib/dl_api_connector/dl_api_connector/connector.py b/lib/dl_api_connector/dl_api_connector/connector.py index 48cb5a072..9a996641d 100644 --- a/lib/dl_api_connector/dl_api_connector/connector.py +++ b/lib/dl_api_connector/dl_api_connector/connector.py @@ -17,6 +17,8 @@ DefaultDashSQLParamLiteralizer, ) from dl_api_connector.form_config.models.base import ConnectionFormFactory +from dl_api_lib.query.registry import MQMFactorySettingItem +from dl_constants.enums import QueryProcessingMode from dl_core.connectors.base.connector import ( CoreConnectionDefinition, CoreConnector, @@ -33,7 +35,7 @@ ) from dl_query_processing.multi_query.factory import ( DefaultMultiQueryMutatorFactory, - MultiQueryMutatorFactoryBase, + NoCompengMultiQueryMutatorFactory, ) @@ -46,7 +48,7 @@ class ApiSourceDefinition(abc.ABC): class ApiConnectionDefinition(abc.ABC): core_conn_def_cls: ClassVar[Type[CoreConnectionDefinition]] api_generic_schema_cls: ClassVar[Type[ConnectionSchema]] - alias: ClassVar[Optional[str]] = None + alias: ClassVar[Optional[str]] = None # TODO remove in favor of info provider info_provider_cls: ClassVar[Type[ConnectionInfoProvider]] form_factory_cls: ClassVar[Optional[Type[ConnectionFormFactory]]] = None @@ -54,9 +56,16 @@ class ApiConnectionDefinition(abc.ABC): class ApiConnector(abc.ABC): # backend_type-bound properties - TODO: move to a separate entity formula_dialect_name: ClassVar[DialectName] = DialectName.DUMMY - default_multi_query_mutator_factory_cls: ClassVar[ - Type[MultiQueryMutatorFactoryBase] - ] = DefaultMultiQueryMutatorFactory + multi_query_mutation_factories: tuple[MQMFactorySettingItem, ...] = ( + MQMFactorySettingItem( + query_proc_mode=QueryProcessingMode.basic, + factory_cls=DefaultMultiQueryMutatorFactory, + ), + MQMFactorySettingItem( + query_proc_mode=QueryProcessingMode.no_compeng, + factory_cls=NoCompengMultiQueryMutatorFactory, + ), + ) is_forkable: ClassVar[bool] = True is_compeng_executable: ClassVar[bool] = False filter_formula_compiler_cls: ClassVar[Type[FilterFormulaCompiler]] = MainFilterFormulaCompiler diff --git a/lib/dl_api_connector/dl_api_connector/dashsql.py b/lib/dl_api_connector/dl_api_connector/dashsql.py index b4170c1ec..985995979 100644 --- a/lib/dl_api_connector/dl_api_connector/dashsql.py +++ b/lib/dl_api_connector/dl_api_connector/dashsql.py @@ -3,32 +3,32 @@ import sqlalchemy as sa from sqlalchemy.types import TypeEngine -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.exc import DashSQLError TValueBase = str | list[str] | tuple[str, ...] -BI_TYPE_TO_SA_TYPE: dict[BIType, TypeEngine] = { - BIType.string: sa.TEXT(), - BIType.integer: sa.BIGINT(), - BIType.float: sa.FLOAT(), - BIType.date: sa.DATE(), - BIType.datetime: sa.DATETIME(), - BIType.boolean: sa.BOOLEAN(), - BIType.datetimetz: sa.DATETIME(timezone=True), - BIType.genericdatetime: sa.DATETIME(), +BI_TYPE_TO_SA_TYPE: dict[UserDataType, TypeEngine] = { + UserDataType.string: sa.TEXT(), + UserDataType.integer: sa.BIGINT(), + UserDataType.float: sa.FLOAT(), + UserDataType.date: sa.DATE(), + UserDataType.datetime: sa.DATETIME(), + UserDataType.boolean: sa.BOOLEAN(), + UserDataType.datetimetz: sa.DATETIME(timezone=True), + UserDataType.genericdatetime: sa.DATETIME(), } class DashSQLParamLiteralizer(abc.ABC): @abc.abstractmethod - def get_sa_type(self, bi_type: BIType, value_base: TValueBase) -> TypeEngine: + def get_sa_type(self, bi_type: UserDataType, value_base: TValueBase) -> TypeEngine: raise NotImplementedError class DefaultDashSQLParamLiteralizer(DashSQLParamLiteralizer): - def get_sa_type(self, bi_type: BIType, value_base: TValueBase) -> TypeEngine: + def get_sa_type(self, bi_type: UserDataType, value_base: TValueBase) -> TypeEngine: try: sa_type = BI_TYPE_TO_SA_TYPE[bi_type] return sa_type diff --git a/lib/dl_api_lib/dl_api_lib/api_common/data_types.py b/lib/dl_api_lib/dl_api_lib/api_common/data_types.py index 1ea17993f..39cf23555 100644 --- a/lib/dl_api_lib/dl_api_lib/api_common/data_types.py +++ b/lib/dl_api_lib/dl_api_lib/api_common/data_types.py @@ -1,29 +1,29 @@ from __future__ import annotations -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType _BI_TO_YQL = { - BIType.string: "String", - BIType.integer: "Int64", - BIType.float: "Double", - BIType.date: "Date", - BIType.datetime: "Datetime", - BIType.datetimetz: "DatetimeTZ", - BIType.genericdatetime: "GenericDatetime", - BIType.boolean: "Bool", - BIType.geopoint: "GeoPoint", - BIType.geopolygon: "GeoPolygon", - BIType.uuid: "UUID", - BIType.markup: "Markup", - BIType.array_str: "ArrayStr", - BIType.array_int: "ArrayInt", - BIType.array_float: "ArrayFloat", - BIType.tree_str: "TreeStr", - # Should not ever be in the output: `BIType.unsupported` + UserDataType.string: "String", + UserDataType.integer: "Int64", + UserDataType.float: "Double", + UserDataType.date: "Date", + UserDataType.datetime: "Datetime", + UserDataType.datetimetz: "DatetimeTZ", + UserDataType.genericdatetime: "GenericDatetime", + UserDataType.boolean: "Bool", + UserDataType.geopoint: "GeoPoint", + UserDataType.geopolygon: "GeoPolygon", + UserDataType.uuid: "UUID", + UserDataType.markup: "Markup", + UserDataType.array_str: "ArrayStr", + UserDataType.array_int: "ArrayInt", + UserDataType.array_float: "ArrayFloat", + UserDataType.tree_str: "TreeStr", + # Should not ever be in the output: `UserDataType.unsupported` } # TODO: Legacy stuff. Should be removed with data-api-v1 -def bi_to_yql(bi_type: BIType) -> str: +def bi_to_yql(bi_type: UserDataType) -> str: return _BI_TO_YQL[bi_type] diff --git a/lib/dl_api_lib/dl_api_lib/app/control_api/resources/dataset_base.py b/lib/dl_api_lib/dl_api_lib/app/control_api/resources/dataset_base.py index a5af575d7..fd08bd72f 100644 --- a/lib/dl_api_lib/dl_api_lib/app/control_api/resources/dataset_base.py +++ b/lib/dl_api_lib/dl_api_lib/app/control_api/resources/dataset_base.py @@ -33,9 +33,9 @@ from dl_constants.enums import ( AggregationFunction, BinaryJoinOperator, - BIType, ConnectionType, ManagedBy, + UserDataType, ) from dl_constants.exc import ( DEFAULT_ERR_CODE_API_PREFIX, @@ -246,7 +246,7 @@ def dump_option_data( casts=CASTS_BY_TYPE.get(user_type, []), filter_operations=sfm.get_supported_filters(dialect=funcs_dialect, user_type=user_type), ) - for user_type in BIType + for user_type in UserDataType ], ) diff --git a/lib/dl_api_lib/dl_api_lib/app/control_api/resources/info.py b/lib/dl_api_lib/dl_api_lib/app/control_api/resources/info.py index ca8dac064..0bdba093e 100644 --- a/lib/dl_api_lib/dl_api_lib/app/control_api/resources/info.py +++ b/lib/dl_api_lib/dl_api_lib/app/control_api/resources/info.py @@ -16,8 +16,8 @@ from dl_api_lib.public.entity_usage_checker import PublicEnvEntityUsageChecker from dl_api_lib.schemas.main import BadRequestResponseSchema from dl_constants.enums import ( - BIType, ConnectionType, + UserDataType, ) from dl_core.exc import EntityUsageNotAllowed from dl_core.us_dataset import Dataset @@ -56,7 +56,14 @@ def get(self): # type: ignore # TODO: fix "types": [ {"name": k.name, "aggregations": [x.name for x in v]} for k, v in BI_TYPE_AGGREGATIONS.items() - if k not in (BIType.uuid, BIType.markup, BIType.datetimetz, BIType.datetime, BIType.unsupported) + if k + not in ( + UserDataType.uuid, + UserDataType.markup, + UserDataType.datetimetz, + UserDataType.datetime, + UserDataType.unsupported, + ) ] } diff --git a/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dashsql.py b/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dashsql.py index df3424c96..6687464bf 100644 --- a/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dashsql.py +++ b/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dashsql.py @@ -28,8 +28,8 @@ from dl_api_lib.utils.base import need_permission_on_entry from dl_app_tools.profiling_base import generic_profiler_async from dl_constants.enums import ( - BIType, ConnectionType, + UserDataType, ) from dl_core.backend_types import get_backend_type from dl_core.data_processing.dashsql import ( @@ -61,20 +61,20 @@ TRowProcessor = Callable[[TRow], TRow] -def parse_value(value: Optional[str], bi_type: BIType) -> Any: +def parse_value(value: Optional[str], bi_type: UserDataType) -> Any: if value is None: return None - if bi_type == BIType.string: + if bi_type == UserDataType.string: return value - if bi_type == BIType.integer: + if bi_type == UserDataType.integer: return int(value) - if bi_type == BIType.float: + if bi_type == UserDataType.float: return float(value) - if bi_type == BIType.date: + if bi_type == UserDataType.date: return datetime.datetime.strptime(value, "%Y-%m-%d").date() - if bi_type == BIType.datetime: + if bi_type == UserDataType.datetime: return parse_datetime(value) - if bi_type == BIType.boolean: + if bi_type == UserDataType.boolean: if value == "true": return True if value == "false": @@ -88,7 +88,7 @@ def make_param_obj(name: str, param: dict, conn_type: ConnectionType) -> BindPar value_base: TValueBase = param["value"] try: - bi_type = BIType[type_name] + bi_type = UserDataType[type_name] except KeyError: raise DashSQLError(f"Unknown type name {type_name!r}") @@ -248,8 +248,8 @@ async def post(self) -> web.Response: need_permission_on_entry(conn, USPermissionKind.execute) # TODO: instead of this, use something like: - # formula_dialect = bi_formula.core.dialect.from_name_and_version(conn.get_dialect().name) - # bindparam = bi_formula.definitions.literals.literal(parsed_value, formula_dialect) + # formula_dialect = dl_formula.core.dialect.from_name_and_version(conn.get_dialect().name) + # bindparam = dl_formula.definitions.literals.literal(parsed_value, formula_dialect) # (but account for `expanding`) conn_type = conn.conn_type param_objs = None diff --git a/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dataset/base.py b/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dataset/base.py index 5033d8e28..f920bea8c 100644 --- a/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dataset/base.py +++ b/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dataset/base.py @@ -408,8 +408,7 @@ async def execute_all_queries( post_paginator = paginator.get_post_paginator() block_legend = pre_paginator.pre_paginate(block_legend=block_legend) - concurrency_limit = int(os.environ.get("DATASET_CONCURRENCY_LIMIT", 5)) - runner = ConcurrentTaskRunner(concurrency_limit=concurrency_limit) + runner = ConcurrentTaskRunner() for block_spec in block_legend.blocks: await runner.schedule(self.execute_query(block_spec=block_spec)) executed_queries = await runner.finalize() diff --git a/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dataset/result.py b/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dataset/result.py index 4dbbb13c3..5b591cd0f 100644 --- a/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dataset/result.py +++ b/lib/dl_api_lib/dl_api_lib/app/data_api/resources/dataset/result.py @@ -23,9 +23,9 @@ import dl_api_lib.schemas.main from dl_app_tools.profiling_base import generic_profiler_async from dl_constants.enums import ( - BIType, FieldRole, FieldType, + UserDataType, ) from dl_query_processing.enums import ( EmptyQueryMode, @@ -130,7 +130,7 @@ async def _make_totals( updated_item = item.clone( obj=PlaceholderObjSpec(), role_spec=TemplateRoleSpec(role=FieldRole.template, template=None), - data_type=BIType.string, + data_type=UserDataType.string, ) else: assert isinstance(item.obj, FieldObjSpec) diff --git a/lib/dl_api_lib/dl_api_lib/app_common.py b/lib/dl_api_lib/dl_api_lib/app_common.py index a8659e1eb..81b91f4e2 100644 --- a/lib/dl_api_lib/dl_api_lib/app_common.py +++ b/lib/dl_api_lib/dl_api_lib/app_common.py @@ -179,5 +179,6 @@ def get_sr_factory( connector_availability=self._get_connector_availability(settings), inst_specific_sr_factory=self._get_inst_specific_sr_factory(settings), force_non_rqe_mode=settings.RQE_FORCE_OFF, + query_proc_mode=settings.QUERY_PROCESSING_MODE, ) return sr_factory diff --git a/lib/dl_api_lib/dl_api_lib/app_settings.py b/lib/dl_api_lib/dl_api_lib/app_settings.py index bdf7f549d..934bc4710 100644 --- a/lib/dl_api_lib/dl_api_lib/app_settings.py +++ b/lib/dl_api_lib/dl_api_lib/app_settings.py @@ -21,7 +21,10 @@ from dl_configs.settings_loaders.settings_obj_base import SettingsBase from dl_configs.settings_submodels import RedisSettings from dl_configs.utils import split_by_comma -from dl_constants.enums import USAuthMode +from dl_constants.enums import ( + QueryProcessingMode, + USAuthMode, +) from dl_core.components.ids import FieldIdGeneratorType from dl_formula.parser.factory import ParserType @@ -131,6 +134,12 @@ class AppSettings: DEFAULT_LOCALE: Optional[str] = "en" + QUERY_PROCESSING_MODE: QueryProcessingMode = s_attrib( # type: ignore + "QUERY_PROCESSING_MODE", + env_var_converter=lambda s: QueryProcessingMode[s.lower()], + missing=QueryProcessingMode.basic, + ) + @attr.s(frozen=True) class ControlApiAppSettings(AppSettings): diff --git a/lib/dl_api_lib/dl_api_lib/connector_registrator.py b/lib/dl_api_lib/dl_api_lib/connector_registrator.py index 4fbcba3b3..d416b9ecd 100644 --- a/lib/dl_api_lib/dl_api_lib/connector_registrator.py +++ b/lib/dl_api_lib/dl_api_lib/connector_registrator.py @@ -51,11 +51,13 @@ def register_connector(cls, connector: Type[ApiConnector]) -> None: # backend_type-related stuff - TODO: Move to a separate entity backend_type = connector.core_connector_cls.backend_type register_dialect_name(backend_type=backend_type, dialect_name=connector.formula_dialect_name) - register_multi_query_mutator_factory_cls( - backend_type=backend_type, - dialects=(None,), - factory_cls=connector.default_multi_query_mutator_factory_cls, - ) + for mqm_setting_item in connector.multi_query_mutation_factories: + register_multi_query_mutator_factory_cls( + query_proc_mode=mqm_setting_item.query_proc_mode, + backend_type=backend_type, + dialects=mqm_setting_item.dialects, + factory_cls=mqm_setting_item.factory_cls, + ) register_is_forkable_source(backend_type=backend_type, is_forkable=connector.is_forkable) register_is_compeng_executable(backend_type=backend_type, is_compeng_executable=connector.is_compeng_executable) register_filter_formula_compiler_cls( diff --git a/lib/dl_api_lib/dl_api_lib/dataset/base_wrapper.py b/lib/dl_api_lib/dl_api_lib/dataset/base_wrapper.py index c98595202..5057dcaac 100644 --- a/lib/dl_api_lib/dl_api_lib/dataset/base_wrapper.py +++ b/lib/dl_api_lib/dl_api_lib/dataset/base_wrapper.py @@ -14,7 +14,6 @@ from dl_api_lib.query.formalization.query_formalizer_base import QuerySpecFormalizerBase from dl_api_lib.query.registry import ( get_compeng_dialect, - get_multi_query_mutator_factory, is_compeng_enabled, ) from dl_api_lib.service_registry.service_registry import ApiServiceRegistry @@ -236,13 +235,12 @@ def make_query_mutators(self) -> Sequence[QueryMutator]: def make_multi_query_mutators(self) -> Sequence[MultiQueryMutatorBase]: backend_type = self.get_backend_type() - factory = get_multi_query_mutator_factory( + mqm_factory_factory = self._service_registry.get_multi_query_mutator_factory_factory() + return mqm_factory_factory.get_multi_query_mutators( backend_type=backend_type, + dataset=self._ds, dialect=self.dialect, - result_schema=self._ds.result_schema, ) - mutators = factory.get_mutators() - return mutators def make_multi_query_translator(self) -> MultiLevelQueryTranslator: assert self.inspect_env is not None diff --git a/lib/dl_api_lib/dl_api_lib/dataset/validator.py b/lib/dl_api_lib/dl_api_lib/dataset/validator.py index 43a2ffd5c..ec7d8a9a7 100644 --- a/lib/dl_api_lib/dl_api_lib/dataset/validator.py +++ b/lib/dl_api_lib/dl_api_lib/dataset/validator.py @@ -55,8 +55,8 @@ ComponentErrorLevel, ComponentType, ConnectionType, - CreateDSFrom, DataSourceRole, + DataSourceType, ManagedBy, TopLevelComponentId, ) @@ -1448,7 +1448,7 @@ def _migrate_source_parameters( old_connection: Optional[ConnectionBase], new_connection: ConnectionBase, dsrc: DataSource, - ) -> tuple[dict, CreateDSFrom]: + ) -> tuple[dict, DataSourceType]: old_conn_type: ConnectionType if old_connection is not None: old_conn_type = old_connection.conn_type diff --git a/lib/dl_api_lib/dl_api_lib/enums.py b/lib/dl_api_lib/dl_api_lib/enums.py index 851c37c8b..bc423dc3c 100644 --- a/lib/dl_api_lib/dl_api_lib/enums.py +++ b/lib/dl_api_lib/dl_api_lib/enums.py @@ -11,7 +11,7 @@ from typing import Set from dl_constants.enums import ( - BIType, + UserDataType, WhereClauseOperation, ) from dl_constants.enums import AggregationFunction as ag @@ -54,98 +54,119 @@ ) FILTERS_BY_TYPE = { - BIType.datetime: _FILT_NULL | _FILT_COMMON, - BIType.datetimetz: _FILT_NULL | _FILT_COMMON, - BIType.genericdatetime: _FILT_NULL | _FILT_COMMON, - BIType.date: _FILT_NULL | _FILT_COMMON, - BIType.boolean: _FILT_NULL | _FILT_MINIMAL, - BIType.integer: _FILT_NULL | _FILT_COMMON | _FILT_STR, - BIType.float: _FILT_NULL | _FILT_COMMON, - BIType.string: _FILT_NULL | _FILT_COMMON | _FILT_STR, - BIType.geopoint: _FILT_NULL | _FILT_MINIMAL, - BIType.geopolygon: _FILT_NULL | _FILT_MINIMAL, - BIType.uuid: _FILT_NULL | _FILT_COMMON | _FILT_STR, + UserDataType.datetime: _FILT_NULL | _FILT_COMMON, + UserDataType.datetimetz: _FILT_NULL | _FILT_COMMON, + UserDataType.genericdatetime: _FILT_NULL | _FILT_COMMON, + UserDataType.date: _FILT_NULL | _FILT_COMMON, + UserDataType.boolean: _FILT_NULL | _FILT_MINIMAL, + UserDataType.integer: _FILT_NULL | _FILT_COMMON | _FILT_STR, + UserDataType.float: _FILT_NULL | _FILT_COMMON, + UserDataType.string: _FILT_NULL | _FILT_COMMON | _FILT_STR, + UserDataType.geopoint: _FILT_NULL | _FILT_MINIMAL, + UserDataType.geopolygon: _FILT_NULL | _FILT_MINIMAL, + UserDataType.uuid: _FILT_NULL | _FILT_COMMON | _FILT_STR, # intentionally not supporting comparison between markups: # it has too much dependency on internal optimizations. - BIType.markup: _FILT_NULL, - BIType.unsupported: _FILT_NULL, - BIType.array_float: _FILT_ARRAY, - BIType.array_int: _FILT_ARRAY, - BIType.array_str: _FILT_ARRAY, - BIType.tree_str: _FILT_ARRAY, + UserDataType.markup: _FILT_NULL, + UserDataType.unsupported: _FILT_NULL, + UserDataType.array_float: _FILT_ARRAY, + UserDataType.array_int: _FILT_ARRAY, + UserDataType.array_str: _FILT_ARRAY, + UserDataType.tree_str: _FILT_ARRAY, } CASTS_BY_TYPE = { - BIType.datetime: [ - BIType.genericdatetime, - BIType.datetime, - BIType.date, - BIType.string, - BIType.integer, - BIType.float, - BIType.boolean, + UserDataType.datetime: [ + UserDataType.genericdatetime, + UserDataType.datetime, + UserDataType.date, + UserDataType.string, + UserDataType.integer, + UserDataType.float, + UserDataType.boolean, ], - BIType.datetimetz: [ - BIType.datetimetz, - BIType.genericdatetime, - BIType.date, - BIType.string, - BIType.integer, - BIType.float, - BIType.boolean, + UserDataType.datetimetz: [ + UserDataType.datetimetz, + UserDataType.genericdatetime, + UserDataType.date, + UserDataType.string, + UserDataType.integer, + UserDataType.float, + UserDataType.boolean, ], - BIType.genericdatetime: [ - BIType.genericdatetime, - BIType.date, - BIType.string, - BIType.integer, - BIType.float, - BIType.boolean, + UserDataType.genericdatetime: [ + UserDataType.genericdatetime, + UserDataType.date, + UserDataType.string, + UserDataType.integer, + UserDataType.float, + UserDataType.boolean, ], - BIType.date: [BIType.date, BIType.genericdatetime, BIType.string, BIType.integer, BIType.float, BIType.boolean], - BIType.boolean: [BIType.boolean, BIType.integer, BIType.float, BIType.string], - BIType.integer: [BIType.integer, BIType.float, BIType.string, BIType.boolean, BIType.date, BIType.genericdatetime], - BIType.float: [BIType.float, BIType.integer, BIType.string, BIType.boolean, BIType.date, BIType.genericdatetime], - BIType.string: [ - BIType.string, - BIType.date, - BIType.genericdatetime, - BIType.boolean, - BIType.integer, - BIType.float, - BIType.geopoint, - BIType.geopolygon, + UserDataType.date: [ + UserDataType.date, + UserDataType.genericdatetime, + UserDataType.string, + UserDataType.integer, + UserDataType.float, + UserDataType.boolean, ], - BIType.geopoint: [BIType.geopoint, BIType.string], - BIType.geopolygon: [BIType.geopolygon, BIType.string], - BIType.uuid: [BIType.uuid, BIType.string], - BIType.markup: [BIType.markup], - BIType.unsupported: [BIType.unsupported], # `, BIType.string` would be too implicit. - BIType.array_float: [BIType.array_float, BIType.string], - BIType.array_int: [BIType.array_int, BIType.string], - BIType.array_str: [BIType.array_str, BIType.string], - BIType.tree_str: [BIType.tree_str], + UserDataType.boolean: [UserDataType.boolean, UserDataType.integer, UserDataType.float, UserDataType.string], + UserDataType.integer: [ + UserDataType.integer, + UserDataType.float, + UserDataType.string, + UserDataType.boolean, + UserDataType.date, + UserDataType.genericdatetime, + ], + UserDataType.float: [ + UserDataType.float, + UserDataType.integer, + UserDataType.string, + UserDataType.boolean, + UserDataType.date, + UserDataType.genericdatetime, + ], + UserDataType.string: [ + UserDataType.string, + UserDataType.date, + UserDataType.genericdatetime, + UserDataType.boolean, + UserDataType.integer, + UserDataType.float, + UserDataType.geopoint, + UserDataType.geopolygon, + ], + UserDataType.geopoint: [UserDataType.geopoint, UserDataType.string], + UserDataType.geopolygon: [UserDataType.geopolygon, UserDataType.string], + UserDataType.uuid: [UserDataType.uuid, UserDataType.string], + UserDataType.markup: [UserDataType.markup], + UserDataType.unsupported: [UserDataType.unsupported], # `, UserDataType.string` would be too implicit. + UserDataType.array_float: [UserDataType.array_float, UserDataType.string], + UserDataType.array_int: [UserDataType.array_int, UserDataType.string], + UserDataType.array_str: [UserDataType.array_str, UserDataType.string], + UserDataType.tree_str: [UserDataType.tree_str], } _AGG_BASIC = [ag.none, ag.count] BI_TYPE_AGGREGATIONS = { - BIType.string: _AGG_BASIC + [ag.countunique], - BIType.integer: _AGG_BASIC + [ag.sum, ag.avg, ag.min, ag.max, ag.countunique], - BIType.float: _AGG_BASIC + [ag.sum, ag.avg, ag.min, ag.max, ag.countunique], - BIType.date: _AGG_BASIC + [ag.min, ag.max, ag.countunique, ag.avg], - BIType.datetime: _AGG_BASIC + [ag.min, ag.max, ag.countunique, ag.avg], - BIType.datetimetz: _AGG_BASIC + [ag.min, ag.max, ag.countunique, ag.avg], - BIType.genericdatetime: _AGG_BASIC + [ag.min, ag.max, ag.countunique], # TODO: 'avg'? - BIType.boolean: _AGG_BASIC + [], - BIType.geopoint: _AGG_BASIC + [], - BIType.geopolygon: _AGG_BASIC + [], - BIType.uuid: _AGG_BASIC + [ag.countunique], - BIType.markup: _AGG_BASIC + [], # TODO: 'any' - BIType.unsupported: [ag.none], # only explicit formula-based processing is allowed - BIType.array_float: _AGG_BASIC + [ag.countunique], - BIType.array_int: _AGG_BASIC + [ag.countunique], - BIType.array_str: _AGG_BASIC + [ag.countunique], - BIType.tree_str: _AGG_BASIC + [ag.countunique], + UserDataType.string: _AGG_BASIC + [ag.countunique], + UserDataType.integer: _AGG_BASIC + [ag.sum, ag.avg, ag.min, ag.max, ag.countunique], + UserDataType.float: _AGG_BASIC + [ag.sum, ag.avg, ag.min, ag.max, ag.countunique], + UserDataType.date: _AGG_BASIC + [ag.min, ag.max, ag.countunique, ag.avg], + UserDataType.datetime: _AGG_BASIC + [ag.min, ag.max, ag.countunique, ag.avg], + UserDataType.datetimetz: _AGG_BASIC + [ag.min, ag.max, ag.countunique, ag.avg], + UserDataType.genericdatetime: _AGG_BASIC + [ag.min, ag.max, ag.countunique], # TODO: 'avg'? + UserDataType.boolean: _AGG_BASIC + [], + UserDataType.geopoint: _AGG_BASIC + [], + UserDataType.geopolygon: _AGG_BASIC + [], + UserDataType.uuid: _AGG_BASIC + [ag.countunique], + UserDataType.markup: _AGG_BASIC + [], # TODO: 'any' + UserDataType.unsupported: [ag.none], # only explicit formula-based processing is allowed + UserDataType.array_float: _AGG_BASIC + [ag.countunique], + UserDataType.array_int: _AGG_BASIC + [ag.countunique], + UserDataType.array_str: _AGG_BASIC + [ag.countunique], + UserDataType.tree_str: _AGG_BASIC + [ag.countunique], } diff --git a/lib/dl_api_lib/dl_api_lib/error_handling.py b/lib/dl_api_lib/dl_api_lib/error_handling.py index 08171e6fc..592443c78 100644 --- a/lib/dl_api_lib/dl_api_lib/error_handling.py +++ b/lib/dl_api_lib/dl_api_lib/error_handling.py @@ -56,7 +56,6 @@ exc.RLSConfigParsingError: status.BAD_REQUEST, common_exc.RLSSubjectNotFound: status.BAD_REQUEST, exc.FeatureNotAvailable: status.BAD_REQUEST, - dl_query_processing.exc.ObligatoryFilterMissing: status.BAD_REQUEST, dl_query_processing.exc.FilterError: status.BAD_REQUEST, exc.UnsupportedForEntityType: status.BAD_REQUEST, common_exc.SourceAvatarNotFound: status.BAD_REQUEST, diff --git a/lib/dl_api_lib/dl_api_lib/pivot/sort_strategy.py b/lib/dl_api_lib/dl_api_lib/pivot/sort_strategy.py index d1d6debde..c54a7e9cd 100644 --- a/lib/dl_api_lib/dl_api_lib/pivot/sort_strategy.py +++ b/lib/dl_api_lib/dl_api_lib/pivot/sort_strategy.py @@ -28,10 +28,10 @@ MeasureNameValue, ) from dl_constants.enums import ( - BIType, FieldRole, OrderDirection, PivotRole, + UserDataType, ) @@ -165,9 +165,9 @@ def _make_value_converter(self) -> Optional[Callable[[Any], Any]]: assert len(data_types) == 1, "Only single data type is supported within a pivot dimension" data_type = next(iter(data_types)) # Normalize numbers for correct sorting - if data_type is BIType.integer: + if data_type is UserDataType.integer: return int - if data_type is BIType.float: + if data_type is UserDataType.float: return float return None diff --git a/lib/dl_api_lib/dl_api_lib/query/formalization/legend_formalizer.py b/lib/dl_api_lib/dl_api_lib/query/formalization/legend_formalizer.py index a06122a09..aa059d94d 100644 --- a/lib/dl_api_lib/dl_api_lib/query/formalization/legend_formalizer.py +++ b/lib/dl_api_lib/dl_api_lib/query/formalization/legend_formalizer.py @@ -29,11 +29,11 @@ RawTreeRoleSpec, ) from dl_constants.enums import ( - BIType, CalcMode, FieldRole, FieldType, ManagedBy, + UserDataType, WhereClauseOperation, ) import dl_core.exc as core_exc @@ -62,7 +62,7 @@ DATA_TYPES_SUPPORTING_TREE = frozenset( { - BIType.tree_str, + UserDataType.tree_str, } ) @@ -159,7 +159,7 @@ def _resolve_item_spec( obj=MeasureNameObjSpec(), block_id=item_spec.block_id, role_spec=role_spec, - data_type=BIType.string, + data_type=UserDataType.string, field_type=FieldType.DIMENSION, ) elif isinstance(item_spec.ref, PlaceholderRef): @@ -168,7 +168,7 @@ def _resolve_item_spec( obj=PlaceholderObjSpec(), block_id=item_spec.block_id, role_spec=role_spec, - data_type=BIType.string, + data_type=UserDataType.string, field_type=FieldType.DIMENSION, ) else: @@ -439,7 +439,7 @@ def patch_legend(self, legend: Legend, id_gen: IdGenerator) -> None: obj=MeasureNameObjSpec(), # using `row` here would corrupt the expected structure, so use `info` role_spec=RoleSpec(role=FieldRole.info), - data_type=BIType.string, + data_type=UserDataType.string, field_type=FieldType.DIMENSION, ) ) @@ -453,7 +453,7 @@ def patch_legend(self, legend: Legend, id_gen: IdGenerator) -> None: obj=DimensionNameObjSpec(), # using `row` here would corrupt the expected structure, so use `info` role_spec=RoleSpec(role=FieldRole.info), - data_type=BIType.string, + data_type=UserDataType.string, field_type=FieldType.DIMENSION, ) ) diff --git a/lib/dl_api_lib/dl_api_lib/query/formalization/query_formalizer.py b/lib/dl_api_lib/dl_api_lib/query/formalization/query_formalizer.py index ea637895b..87f8db234 100644 --- a/lib/dl_api_lib/dl_api_lib/query/formalization/query_formalizer.py +++ b/lib/dl_api_lib/dl_api_lib/query/formalization/query_formalizer.py @@ -21,7 +21,6 @@ from dl_api_lib.query.formalization.field_resolver import FieldResolver from dl_api_lib.query.formalization.query_formalizer_base import QuerySpecFormalizerBase from dl_constants.enums import ( - BIType, CalcMode, DataSourceRole, FieldRole, @@ -29,6 +28,7 @@ OrderDirection, RangeType, RLSSubjectType, + UserDataType, WhereClauseOperation, ) from dl_core.components.accessor import DatasetComponentAccessor @@ -363,7 +363,7 @@ def _ensure_not_measure(self, field: BIField) -> None: ) def _ensure_not_unsupported_type(self, field: BIField) -> None: - if field.cast == BIType.unsupported: + if field.cast == UserDataType.unsupported: raise dl_query_processing.exc.LogicError(f"Cannot select fields of unsupported type: {field.title}") def validate_select_field(self, block_spec: BlockSpec, field: BIField) -> None: diff --git a/lib/dl_api_lib/dl_api_lib/query/registry.py b/lib/dl_api_lib/dl_api_lib/query/registry.py index 3b7f81cb7..0ff165fca 100644 --- a/lib/dl_api_lib/dl_api_lib/query/registry.py +++ b/lib/dl_api_lib/dl_api_lib/query/registry.py @@ -4,7 +4,12 @@ Type, ) -from dl_constants.enums import SourceBackendType +import attr + +from dl_constants.enums import ( + QueryProcessingMode, + SourceBackendType, +) from dl_core.fields import ResultSchema from dl_formula.core.dialect import DialectCombo from dl_query_processing.compilation.filter_compiler import ( @@ -65,31 +70,53 @@ def register_is_compeng_executable(backend_type: SourceBackendType, is_compeng_e _IS_COMPENG_EXECUTABLE_BACKEND_TYPE[backend_type] = is_compeng_executable -_MQM_FACTORY_REGISTRY: dict[tuple[SourceBackendType, Optional[DialectCombo]], Type[MultiQueryMutatorFactoryBase]] = {} +@attr.s(frozen=True, auto_attribs=True, kw_only=True) +class MQMFactoryKey: + query_proc_mode: QueryProcessingMode + backend_type: SourceBackendType + dialect: Optional[DialectCombo] + + +@attr.s(frozen=True, auto_attribs=True, kw_only=True) +class MQMFactorySettingItem: + query_proc_mode: QueryProcessingMode + factory_cls: Type[MultiQueryMutatorFactoryBase] + dialects: Collection[Optional[DialectCombo]] = attr.ib(default=(None,)) + + +_MQM_FACTORY_REGISTRY: dict[MQMFactoryKey, Type[MultiQueryMutatorFactoryBase]] = {} def get_multi_query_mutator_factory( + query_proc_mode: QueryProcessingMode, backend_type: SourceBackendType, dialect: DialectCombo, result_schema: ResultSchema, -) -> MultiQueryMutatorFactoryBase: +) -> Optional[MultiQueryMutatorFactoryBase]: factory_cls = _MQM_FACTORY_REGISTRY.get( - (backend_type, dialect), # First try with exact dialect + # First try with exact dialect + MQMFactoryKey(query_proc_mode=query_proc_mode, backend_type=backend_type, dialect=dialect), _MQM_FACTORY_REGISTRY.get( - (backend_type, None), # Then try without the dialect, just the backend + # Then try without the dialect, just the backend + MQMFactoryKey(query_proc_mode=query_proc_mode, backend_type=backend_type, dialect=None), DefaultMultiQueryMutatorFactory, # If still nothing, then use the default ), ) + + if factory_cls is None: + return None + return factory_cls(result_schema=result_schema) def register_multi_query_mutator_factory_cls( + query_proc_mode: QueryProcessingMode, backend_type: SourceBackendType, dialects: Collection[Optional[DialectCombo]], factory_cls: Type[MultiQueryMutatorFactoryBase], ) -> None: for dialect in dialects: - key = (backend_type, dialect) + key = MQMFactoryKey(query_proc_mode=query_proc_mode, backend_type=backend_type, dialect=dialect) if key in _MQM_FACTORY_REGISTRY: assert _MQM_FACTORY_REGISTRY[key] is factory_cls else: diff --git a/lib/dl_api_lib/dl_api_lib/request_model/data.py b/lib/dl_api_lib/dl_api_lib/request_model/data.py index 3d32c3421..a4234817d 100644 --- a/lib/dl_api_lib/dl_api_lib/request_model/data.py +++ b/lib/dl_api_lib/dl_api_lib/request_model/data.py @@ -18,10 +18,10 @@ ) from dl_constants.enums import ( AggregationFunction, - BIType, CalcMode, FieldType, ManagedBy, + UserDataType, ) from dl_core.fields import CalculationSpec from dl_core.values import BIValue @@ -81,7 +81,7 @@ class UpdateField(FieldBase): lock_aggregation: Optional[bool] = attr.ib(default=None) formula: Optional[str] = attr.ib(default=None) guid_formula: Optional[str] = attr.ib(default=None) - cast: Optional[BIType] = attr.ib(default=None) + cast: Optional[UserDataType] = attr.ib(default=None) avatar_id: Optional[str] = attr.ib(default=None) new_id: Optional[str] = attr.ib(default=None) default_value: Optional[BIValue] = attr.ib(default=None) diff --git a/lib/dl_api_lib/dl_api_lib/schemas/action.py b/lib/dl_api_lib/dl_api_lib/schemas/action.py index 0ac9c148c..275febe84 100644 --- a/lib/dl_api_lib/dl_api_lib/schemas/action.py +++ b/lib/dl_api_lib/dl_api_lib/schemas/action.py @@ -40,8 +40,8 @@ ) from dl_constants.enums import ( AggregationFunction, - BIType, CalcMode, + UserDataType, ) from dl_model_tools.schema.base import ( BaseSchema, @@ -77,7 +77,7 @@ class UpdateFieldBaseSchema(WithNestedValueSchema, FieldActionBaseSchema.FieldBa aggregation = ma_fields.Enum(AggregationFunction) formula = ma_fields.String() guid_formula = ma_fields.String() - cast = ma_fields.Enum(BIType, allow_none=True) + cast = ma_fields.Enum(UserDataType, allow_none=True) avatar_id = ma_fields.String(allow_none=True) new_id = ma_fields.String(allow_none=True) default_value = ma_fields.Nested(ValueSchema, allow_none=True) @@ -109,7 +109,7 @@ class CloneFieldSchema(FieldActionBaseSchema.FieldBaseSchema, DefaultValidateSch title = ma_fields.String() from_guid = ma_fields.String() aggregation = ma_fields.Enum(AggregationFunction, allow_none=True) - cast = ma_fields.Enum(BIType, allow_none=True) + cast = ma_fields.Enum(UserDataType, allow_none=True) field = ma_fields.Nested(CloneFieldSchema, required=True) diff --git a/lib/dl_api_lib/dl_api_lib/schemas/data.py b/lib/dl_api_lib/dl_api_lib/schemas/data.py index 782631096..09388dbf0 100644 --- a/lib/dl_api_lib/dl_api_lib/schemas/data.py +++ b/lib/dl_api_lib/dl_api_lib/schemas/data.py @@ -55,7 +55,6 @@ from dl_api_lib.schemas.legend import LegendItemSchema from dl_api_lib.schemas.pivot import RequestPivotSpecSchema from dl_constants.enums import ( - BIType, CalcMode, FieldRole, FieldType, @@ -65,6 +64,7 @@ QueryBlockPlacementType, QueryItemRefType, RangeType, + UserDataType, WhereClauseOperation, ) from dl_core.constants import DataAPILimits @@ -122,7 +122,7 @@ def _make_drm(self, raw_query_spec_union: RawQuerySpecUnion, data: Dict[str, Any class FieldsResponseFieldSchema(BaseSchema): title = ma_fields.String() guid = ma_fields.String() - data_type = ma_fields.Enum(BIType) + data_type = ma_fields.Enum(UserDataType) hidden = ma_fields.Boolean() type = ma_fields.Enum(FieldType) calc_mode = ma_fields.Enum(CalcMode) diff --git a/lib/dl_api_lib/dl_api_lib/schemas/dataset_base.py b/lib/dl_api_lib/dl_api_lib/schemas/dataset_base.py index 65ade82d9..d73af7da1 100644 --- a/lib/dl_api_lib/dl_api_lib/schemas/dataset_base.py +++ b/lib/dl_api_lib/dl_api_lib/schemas/dataset_base.py @@ -33,10 +33,10 @@ ) from dl_constants.enums import ( AggregationFunction, - BIType, CalcMode, FieldType, ManagedBy, + UserDataType, ) from dl_core.fields import ( BIField, @@ -93,10 +93,10 @@ def get_obj_type(self, obj: CalculationSpec) -> str: guid = ma_fields.String() hidden = ma_fields.Boolean(load_default=False) description = ma_fields.String() - initial_data_type = ma_fields.Enum(BIType, allow_none=True) - cast = ma_fields.Enum(BIType) + initial_data_type = ma_fields.Enum(UserDataType, allow_none=True) + cast = ma_fields.Enum(UserDataType) type = ma_fields.Enum(FieldType, readonly=True) - data_type = ma_fields.Enum(BIType, allow_none=True) + data_type = ma_fields.Enum(UserDataType, allow_none=True) valid = ma_fields.Boolean(allow_none=True) # this will be flattened on dump and un-flattened before load diff --git a/lib/dl_api_lib/dl_api_lib/schemas/legend.py b/lib/dl_api_lib/dl_api_lib/schemas/legend.py index 5e6ef8be3..9dd10b0af 100644 --- a/lib/dl_api_lib/dl_api_lib/schemas/legend.py +++ b/lib/dl_api_lib/dl_api_lib/schemas/legend.py @@ -5,13 +5,13 @@ from marshmallow_oneofschema import OneOfSchema from dl_constants.enums import ( - BIType, FieldRole, FieldType, FieldVisibility, LegendItemType, OrderDirection, RangeType, + UserDataType, ) from dl_model_tools.schema.base import BaseSchema from dl_query_processing.legend.field_legend import RoleSpec @@ -80,7 +80,7 @@ class LegendItemSchema(BaseSchema): id = ma_fields.String() title = ma_fields.String() role_spec = ma_fields.Nested(RoleSpecSchema) - data_type = ma_fields.Enum(BIType) + data_type = ma_fields.Enum(UserDataType) field_type = ma_fields.Enum(FieldType) item_type = ma_fields.Enum(LegendItemType) diff --git a/lib/dl_api_lib/dl_api_lib/schemas/options.py b/lib/dl_api_lib/dl_api_lib/schemas/options.py index cad0405ab..8155ad15f 100644 --- a/lib/dl_api_lib/dl_api_lib/schemas/options.py +++ b/lib/dl_api_lib/dl_api_lib/schemas/options.py @@ -8,10 +8,10 @@ from dl_constants.enums import ( AggregationFunction, BinaryJoinOperator, - BIType, ConnectionType, - CreateDSFrom, + DataSourceType, JoinType, + UserDataType, ) from dl_model_tools.schema.base import BaseSchema from dl_model_tools.schema.dynamic_enum_field import DynamicEnumField @@ -31,9 +31,9 @@ class PreviewSchema(BaseSchema): class DataTypesSchema(BaseSchema): class DataTypeListItemSchema(BaseSchema): - type = ma_fields.Enum(BIType) + type = ma_fields.Enum(UserDataType) aggregations = ma_fields.List(ma_fields.Enum(AggregationFunction)) - casts = ma_fields.List(ma_fields.Enum(BIType)) + casts = ma_fields.List(ma_fields.Enum(UserDataType)) filter_operations = ma_fields.List(ma_fields.Enum(WhereClauseOperation)) items = ma_fields.List(ma_fields.Nested(DataTypeListItemSchema)) @@ -42,7 +42,7 @@ class DataTypeListItemSchema(BaseSchema): class FieldsSchema(BaseSchema): class FieldListItemSchema(BaseSchema): guid = ma_fields.String() - casts = ma_fields.List(ma_fields.Enum(BIType)) + casts = ma_fields.List(ma_fields.Enum(UserDataType)) aggregations = ma_fields.List(ma_fields.Enum(AggregationFunction)) items = ma_fields.List(ma_fields.Nested(FieldListItemSchema)) @@ -63,7 +63,7 @@ class ConnectionListItemSchema(BaseSchema): class CompatSourceTypeListItemSchema(BaseSchema): - source_type = DynamicEnumField(CreateDSFrom) + source_type = DynamicEnumField(DataSourceType) class SourcesSchema(BaseSchema): diff --git a/lib/dl_api_lib/dl_api_lib/schemas/values.py b/lib/dl_api_lib/dl_api_lib/schemas/values.py index 3bc7803eb..667f8f482 100644 --- a/lib/dl_api_lib/dl_api_lib/schemas/values.py +++ b/lib/dl_api_lib/dl_api_lib/schemas/values.py @@ -18,7 +18,7 @@ ) from marshmallow import fields as ma_fields -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.values import ( ArrayFloatValue, ArrayIntValue, @@ -73,7 +73,7 @@ def extract_value(self, data: Dict[str, Any], **_: Any) -> Any: return data["value"] class BaseValueSchema(DefaultSchema): - type = ma_fields.Enum(BIType) + type = ma_fields.Enum(UserDataType) value = ma_fields.Field() class StringValueSchema(BaseValueSchema): @@ -141,22 +141,22 @@ class TreeStrValueSchema(BaseValueSchema): value = ma_fields.List(ma_fields.String()) type_schemas = { - BIType.string.name: StringValueSchema, - BIType.integer.name: IntegerValueSchema, - BIType.float.name: FloatValueSchema, - BIType.date.name: DateValueSchema, - BIType.datetime.name: DateTimeValueSchema, - BIType.datetimetz.name: DateTimeTZValueSchema, - BIType.genericdatetime.name: GenericDateTimeValueSchema, - BIType.boolean.name: BooleanValueSchema, - BIType.geopoint.name: GeoPointValueSchema, - BIType.geopolygon.name: GeoPolygonValueSchema, - BIType.uuid.name: UuidValueSchema, - BIType.markup.name: MarkupValueSchema, - BIType.array_str.name: ArrayStrValueSchema, - BIType.array_int.name: ArrayIntValueSchema, - BIType.array_float.name: ArrayFloatValueSchema, - BIType.tree_str.name: TreeStrValueSchema, + UserDataType.string.name: StringValueSchema, + UserDataType.integer.name: IntegerValueSchema, + UserDataType.float.name: FloatValueSchema, + UserDataType.date.name: DateValueSchema, + UserDataType.datetime.name: DateTimeValueSchema, + UserDataType.datetimetz.name: DateTimeTZValueSchema, + UserDataType.genericdatetime.name: GenericDateTimeValueSchema, + UserDataType.boolean.name: BooleanValueSchema, + UserDataType.geopoint.name: GeoPointValueSchema, + UserDataType.geopolygon.name: GeoPolygonValueSchema, + UserDataType.uuid.name: UuidValueSchema, + UserDataType.markup.name: MarkupValueSchema, + UserDataType.array_str.name: ArrayStrValueSchema, + UserDataType.array_int.name: ArrayIntValueSchema, + UserDataType.array_float.name: ArrayFloatValueSchema, + UserDataType.tree_str.name: TreeStrValueSchema, } def get_obj_type(self, obj: BIValue) -> str: diff --git a/lib/dl_api_lib/dl_api_lib/service_registry/multi_query_mutator_factory.py b/lib/dl_api_lib/dl_api_lib/service_registry/multi_query_mutator_factory.py new file mode 100644 index 000000000..bc8c0dd9e --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib/service_registry/multi_query_mutator_factory.py @@ -0,0 +1,64 @@ +import abc +from typing import Sequence + +import attr + +from dl_api_lib.query.registry import get_multi_query_mutator_factory +from dl_constants.enums import ( + QueryProcessingMode, + SourceBackendType, +) +from dl_core.us_dataset import Dataset +from dl_formula.core.dialect import DialectCombo +from dl_query_processing.multi_query.factory import MultiQueryMutatorFactoryBase +from dl_query_processing.multi_query.mutators.base import MultiQueryMutatorBase + + +class SRMultiQueryMutatorFactory(abc.ABC): + @abc.abstractmethod + def get_mqm_factory( + self, + backend_type: SourceBackendType, + dialect: DialectCombo, + dataset: Dataset, + ) -> MultiQueryMutatorFactoryBase: + raise NotImplementedError + + def get_multi_query_mutators( + self, + backend_type: SourceBackendType, + dialect: DialectCombo, + dataset: Dataset, + ) -> Sequence[MultiQueryMutatorBase]: + mqm_factory = self.get_mqm_factory(backend_type=backend_type, dialect=dialect, dataset=dataset) + return mqm_factory.get_mutators() + + +@attr.s +class DefaultSRMultiQueryMutatorFactory(SRMultiQueryMutatorFactory): + _query_proc_mode: QueryProcessingMode = attr.ib(kw_only=True) + + def get_mqm_factory( + self, + backend_type: SourceBackendType, + dialect: DialectCombo, + dataset: Dataset, + ) -> MultiQueryMutatorFactoryBase: + # Try to get for the specified query mode + factory = get_multi_query_mutator_factory( + query_proc_mode=self._query_proc_mode, + backend_type=backend_type, + dialect=dialect, + result_schema=dataset.result_schema, + ) + if factory is None: + # Try again for the basic mode + factory = get_multi_query_mutator_factory( + query_proc_mode=QueryProcessingMode.basic, + backend_type=backend_type, + dialect=dialect, + result_schema=dataset.result_schema, + ) + + assert factory is not None + return factory diff --git a/lib/dl_api_lib/dl_api_lib/service_registry/service_registry.py b/lib/dl_api_lib/dl_api_lib/service_registry/service_registry.py index 77ad7e907..d376262e4 100644 --- a/lib/dl_api_lib/dl_api_lib/service_registry/service_registry.py +++ b/lib/dl_api_lib/dl_api_lib/service_registry/service_registry.py @@ -11,8 +11,13 @@ from dl_api_lib.connector_availability.base import ConnectorAvailabilityConfig from dl_api_lib.service_registry.field_id_generator_factory import FieldIdGeneratorFactory from dl_api_lib.service_registry.formula_parser_factory import FormulaParserFactory +from dl_api_lib.service_registry.multi_query_mutator_factory import ( + DefaultSRMultiQueryMutatorFactory, + SRMultiQueryMutatorFactory, +) from dl_api_lib.service_registry.supported_functions_manager import SupportedFunctionsManager from dl_api_lib.utils.rls import BaseSubjectResolver +from dl_constants.enums import QueryProcessingMode from dl_core.services_registry.top_level import ( DefaultServicesRegistry, ServicesRegistry, @@ -58,6 +63,10 @@ def get_localizer(self) -> Localizer: def get_connector_availability(self) -> ConnectorAvailabilityConfig: raise NotImplementedError + @abc.abstractmethod + def get_multi_query_mutator_factory_factory(self) -> SRMultiQueryMutatorFactory: + raise NotImplementedError + @attr.s class DefaultApiServiceRegistry(DefaultServicesRegistry, ApiServiceRegistry): # noqa @@ -69,6 +78,7 @@ class DefaultApiServiceRegistry(DefaultServicesRegistry, ApiServiceRegistry): # _localizer_factory: Optional[LocalizerFactory] = attr.ib(kw_only=True, default=None) _localizer_fallback: Optional[Localizer] = attr.ib(kw_only=True, default=None) _connector_availability: Optional[ConnectorAvailabilityConfig] = attr.ib(kw_only=True, default=None) + _query_proc_mode: QueryProcessingMode = attr.ib(kw_only=True, default=QueryProcessingMode.basic) @_formula_parser_factory.default # noqa def _default_formula_parser_factory(self) -> FormulaParserFactory: @@ -109,6 +119,9 @@ def get_connector_availability(self) -> ConnectorAvailabilityConfig: assert self._connector_availability is not None return self._connector_availability + def get_multi_query_mutator_factory_factory(self) -> SRMultiQueryMutatorFactory: + return DefaultSRMultiQueryMutatorFactory(query_proc_mode=self._query_proc_mode) + def close(self) -> None: if self._formula_parser_factory is not None: self._formula_parser_factory.close() diff --git a/lib/dl_api_lib/dl_api_lib/service_registry/sr_factory.py b/lib/dl_api_lib/dl_api_lib/service_registry/sr_factory.py index 1166cc6b6..93652e6a5 100644 --- a/lib/dl_api_lib/dl_api_lib/service_registry/sr_factory.py +++ b/lib/dl_api_lib/dl_api_lib/service_registry/sr_factory.py @@ -3,7 +3,6 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Optional, ) @@ -14,6 +13,7 @@ from dl_api_lib.service_registry.field_id_generator_factory import FieldIdGeneratorFactory from dl_api_lib.service_registry.service_registry import DefaultApiServiceRegistry from dl_api_lib.service_registry.supported_functions_manager import SupportedFunctionsManager +from dl_constants.enums import QueryProcessingMode from dl_core.components.ids import FieldIdGeneratorType from dl_core.services_registry.sr_factories import DefaultSRFactory from dl_core.services_registry.top_level import ServicesRegistry @@ -38,12 +38,13 @@ class DefaultApiSRFactory(DefaultSRFactory[DefaultApiServiceRegistry]): _localizer_factory: Optional[LocalizerFactory] = attr.ib(default=None) _localizer_fallback: Optional[Localizer] = attr.ib(default=None) _connector_availability: Optional[ConnectorAvailabilityConfig] = attr.ib(default=None) + _query_proc_mode: QueryProcessingMode = attr.ib(kw_only=True, default=QueryProcessingMode.basic) def additional_sr_constructor_kwargs( self, request_context_info: RequestContextInfo, sr_ref: FutureRef[ServicesRegistry], - ) -> Dict[str, Any]: + ) -> dict[str, Any]: return dict( default_formula_parser_type=self._default_formula_parser_type, dataset_validator_factory=DefaultDatasetValidatorFactory( @@ -56,4 +57,5 @@ def additional_sr_constructor_kwargs( localizer_factory=self._localizer_factory, localizer_fallback=self._localizer_fallback, connector_availability=self._connector_availability, + query_proc_mode=self._query_proc_mode, ) diff --git a/lib/dl_api_lib/dl_api_lib/service_registry/supported_functions_manager.py b/lib/dl_api_lib/dl_api_lib/service_registry/supported_functions_manager.py index e36e919a9..69ed09bd1 100644 --- a/lib/dl_api_lib/dl_api_lib/service_registry/supported_functions_manager.py +++ b/lib/dl_api_lib/dl_api_lib/service_registry/supported_functions_manager.py @@ -14,7 +14,7 @@ ) from dl_constants.enums import ( AggregationFunction, - BIType, + UserDataType, WhereClauseOperation, ) from dl_formula.core.dialect import DialectCombo @@ -47,7 +47,7 @@ class SupportedFunctionsManager: _operation_registry: OperationRegistry = attr.ib(default=OPERATION_REGISTRY) @method_lru(maxsize=1000) - def get_supported_filters(self, dialect: DialectCombo, user_type: BIType) -> list[WhereClauseOperation]: + def get_supported_filters(self, dialect: DialectCombo, user_type: UserDataType) -> list[WhereClauseOperation]: return [ op for op in self._get_supported_filters_for_dialect(dialect=dialect) @@ -55,7 +55,7 @@ def get_supported_filters(self, dialect: DialectCombo, user_type: BIType) -> lis ] @method_lru(maxsize=1000) - def get_supported_aggregations(self, dialect: DialectCombo, user_type: BIType) -> list[AggregationFunction]: + def get_supported_aggregations(self, dialect: DialectCombo, user_type: UserDataType) -> list[AggregationFunction]: supported_func_names = set( name for name, *_ in self._get_supported_functions( diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/base.py b/lib/dl_api_lib/dl_api_lib_tests/db/base.py index 638a86408..5da2df725 100644 --- a/lib/dl_api_lib/dl_api_lib_tests/db/base.py +++ b/lib/dl_api_lib/dl_api_lib_tests/db/base.py @@ -6,17 +6,19 @@ from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration from dl_api_lib_testing.connection_base import ConnectionTestBase from dl_api_lib_testing.data_api_base import DataApiTestBase +from dl_api_lib_testing.dataset_base import DatasetTestBase from dl_api_lib_tests.db.config import ( API_TEST_CONFIG, DB_CORE_URL, CoreConnectionSettings, ) + from dl_connector_clickhouse.core.clickhouse.constants import SOURCE_TYPE_CH_TABLE from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE from dl_connector_clickhouse.db_testing.engine_wrapper import ClickhouseDbEngineConfig -class DefaultApiTestBase(DataApiTestBase, ConnectionTestBase): +class DefaultApiTestBase(DataApiTestBase, DatasetTestBase, ConnectionTestBase): """The knowledge that this is a ClickHouse connector should not go beyond this class""" bi_compeng_pg_on = True @@ -48,13 +50,13 @@ def connection_params(self) -> dict: password=CoreConnectionSettings.PASSWORD, ) - @pytest.fixture(scope="session") - def dataset_params(self) -> dict: + @pytest.fixture(scope="class") + def dataset_params(self, sample_table) -> dict: return dict( source_type=SOURCE_TYPE_CH_TABLE.name, parameters=dict( - db_name="test_data", - table_name="sample_superstore", + db_name=sample_table.db.name, + table_name=sample_table.name, ), ) diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/config.py b/lib/dl_api_lib/dl_api_lib_tests/db/config.py index 4b4e597da..c77c1f7b0 100644 --- a/lib/dl_api_lib/dl_api_lib_tests/db/config.py +++ b/lib/dl_api_lib/dl_api_lib_tests/db/config.py @@ -1,10 +1,11 @@ from typing import ClassVar from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_core_testing.configuration import DefaultCoreTestConfiguration from dl_testing.containers import get_test_container_hostport +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + CORE_TEST_CONFIG = DefaultCoreTestConfiguration( host_us_http=get_test_container_hostport("us", fallback_port=52500).host, diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/control_api/__init__.py b/lib/dl_api_lib/dl_api_lib_tests/db/control_api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/control_api/test_dataset.py b/lib/dl_api_lib/dl_api_lib_tests/db/control_api/test_dataset.py new file mode 100644 index 000000000..ee782054b --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/control_api/test_dataset.py @@ -0,0 +1,39 @@ +import shortuuid + +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_core.base_models import PathEntryLocation + + +class TestDataset(DefaultApiTestBase): + def test_invalid_dataset_id(self, control_api, saved_connection_id, saved_dataset, sync_us_manager): + usm = sync_us_manager + us_client = usm._us_client + path = PathEntryLocation(shortuuid.uuid()) + dash = us_client.create_entry(scope="dash", key=path) + dash_id = dash["entryId"] + + resp = control_api.client.get("/api/v1/datasets/{}/versions/draft".format(saved_dataset.id)) + assert resp.status_code == 200 + + resp = control_api.client.get("/api/v1/datasets/{}/versions/draft".format(saved_connection_id)) + assert resp.status_code == 404 + + resp = control_api.client.get("/api/v1/datasets/{}/versions/draft".format(dash_id)) + assert resp.status_code == 404 + + def test_create_entity_with_existing_name(self, control_api, saved_connection_id, saved_dataset, dataset_params): + name = saved_dataset.name + + second_ds = Dataset(name=name) + second_ds.sources["source_1"] = second_ds.source( + connection_id=saved_connection_id, + **dataset_params, + ) + second_ds.source_avatars["avatar_1"] = second_ds.sources["source_1"].avatar() + + second_ds = control_api.apply_updates(dataset=second_ds).dataset + resp = control_api.save_dataset(dataset=second_ds, fail_ok=True) + assert resp.status_code == 400 + assert resp.json["message"] == "The entry already exists" + assert resp.json["code"] == "ERR.DS_API.US.BAD_REQUEST.ALREADY_EXISTS" diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/__init__.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_annotations.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_annotations.py new file mode 100644 index 000000000..8764f1bbe --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_annotations.py @@ -0,0 +1,195 @@ +from http import HTTPStatus + +import pytest + +from dl_api_client.dsmaker.pivot_utils import ( + check_pivot_response, + get_all_measure_cells, +) +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_regular_result_data +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import PivotRole +from dl_constants.internal_constants import ( + DIMENSION_NAME_TITLE, + MEASURE_NAME_TITLE, +) + + +class TestPivotWithAnnotations(DefaultApiTestBase): + def test_pivot_multiple_measures_with_annotation(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([profit])", + "order count": "COUNT([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "sub_category"], + rows=["order_date", "city", MEASURE_NAME_TITLE], + measures=["sales sum", "profit sum"], + annotations=["order count"], + min_col_cnt=10, + min_row_cnt=100, + min_value_cnt=100, + ) + + def test_pivot_multiple_measures_with_targeted_annotation(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([profit])", + "order count": "COUNT([sales])", + }, + ) + + data_by_field = get_regular_result_data( + ds, + data_api, + field_names=[ + "category", + "sub_category", + "order_date", + "city", + "sales sum", + "profit sum", + "order count", + ], + ) + category_leg_id = 0 + subcategory_leg_id = 1 + orderdate_leg_id = 2 + city_leg_id = 3 + mname_leg_id = 4 + profit_leg_id = 5 + sales_leg_id = 6 + anno_leg_id = 7 + + result_resp = data_api.get_pivot( + dataset=ds, + fields=[ + ds.find_field(title="category").as_req_legend_item(legend_item_id=category_leg_id), + ds.find_field(title="sub_category").as_req_legend_item(legend_item_id=subcategory_leg_id), + ds.find_field(title="order_date").as_req_legend_item(legend_item_id=orderdate_leg_id), + ds.find_field(title="city").as_req_legend_item(legend_item_id=city_leg_id), + ds.measure_name_as_req_legend_item(legend_item_id=mname_leg_id), + ds.find_field(title="sales sum").as_req_legend_item(legend_item_id=sales_leg_id), + ds.find_field(title="profit sum").as_req_legend_item(legend_item_id=profit_leg_id), + ds.find_field(title="order count").as_req_legend_item(legend_item_id=anno_leg_id), + ], + pivot_structure=[ + ds.make_req_pivot_item(role=PivotRole.pivot_column, legend_item_ids=[category_leg_id]), + ds.make_req_pivot_item(role=PivotRole.pivot_column, legend_item_ids=[subcategory_leg_id]), + ds.make_req_pivot_item(role=PivotRole.pivot_row, legend_item_ids=[orderdate_leg_id]), + ds.make_req_pivot_item(role=PivotRole.pivot_row, legend_item_ids=[city_leg_id]), + ds.make_req_pivot_item(role=PivotRole.pivot_row, legend_item_ids=[mname_leg_id]), + ds.make_req_pivot_item(role=PivotRole.pivot_measure, legend_item_ids=[sales_leg_id]), + ds.make_req_pivot_item(role=PivotRole.pivot_measure, legend_item_ids=[profit_leg_id]), + ds.make_req_pivot_item( + role=PivotRole.pivot_annotation, + legend_item_ids=[anno_leg_id], + annotation_type="color", + target_legend_item_ids=[profit_leg_id], + ), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK + result_data = result_resp.data + + # Check legend + legend_tuples = [(item.title, item.role_spec.role) for item in result_data["pivot"]["structure"]] + assert legend_tuples == [ + ("category", PivotRole.pivot_column), + ("sub_category", PivotRole.pivot_column), + ("order_date", PivotRole.pivot_row), + ("city", PivotRole.pivot_row), + (MEASURE_NAME_TITLE, PivotRole.pivot_row), + ("sales sum", PivotRole.pivot_measure), + ("profit sum", PivotRole.pivot_measure), + ("order count", PivotRole.pivot_annotation), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ] + + # Check measure values + pivot_rows = result_data["pivot_data"]["rows"] + # Check annotation values + all_measure_cells = get_all_measure_cells(pivot_rows) + sales_annotation_values = sorted( + {float(cell[1][0]) for cell in all_measure_cells if cell[0][1] == sales_leg_id and len(cell) > 1} + ) + profit_annotation_values = sorted( + {float(cell[1][0]) for cell in all_measure_cells if cell[0][1] == profit_leg_id and len(cell) > 1} + ) + original_annotation_values = sorted({float(val) for val in data_by_field["order count"]}) + assert sales_annotation_values == [] + assert profit_annotation_values == pytest.approx(original_annotation_values) + + def test_pivot_multi_measures_with_annotation_same_as_one_measure(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "Count": "COUNT()", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "sub_category"], + rows=["order_date", "city", MEASURE_NAME_TITLE], + measures=["sales sum", "Count"], + annotations=["sales sum"], + min_col_cnt=10, + min_row_cnt=100, + min_value_cnt=100, + ) + + def test_pivot_with_multiple_annotations(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([profit])", + "order count": "COUNT([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date"], + measures=["sales sum"], + annotations=["profit sum", "order count"], + ) + + def test_pivot_with_annotation_by_dimension(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "order count": "COUNT([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date"], + measures=["sales sum"], + annotations=["order_date"], + ) diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_basic.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_basic.py new file mode 100644 index 000000000..69734964a --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_basic.py @@ -0,0 +1,205 @@ +from http import HTTPStatus +from typing import Optional + +from dl_api_client.dsmaker.pivot_utils import ( + check_pivot_response, + get_pivot_response, +) +from dl_api_client.dsmaker.primitives import PivotPagination +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_regular_result_data +from dl_api_lib.pivot.primitives import ( + PivotHeaderValue, + PivotMeasureSorting, + PivotMeasureSortingSettings, +) +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import OrderDirection +from dl_constants.internal_constants import MEASURE_NAME_TITLE + + +class TestBasicPivot(DefaultApiTestBase): + def test_basic_pivot(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date"], + measures=["sales sum"], + min_col_cnt=3, + min_row_cnt=100, + min_value_cnt=100, + ) + + def test_pivot_multiple_measures(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "sub_category"], + rows=["order_date", MEASURE_NAME_TITLE, "city"], + measures=["sales sum", "profit sum"], + measures_sorting_settings=[ + PivotMeasureSorting( + row=PivotMeasureSortingSettings( + header_values=[ + PivotHeaderValue(value="2014-01-04"), + PivotHeaderValue(value="sales sum"), + PivotHeaderValue(value="Naperville"), + ], + ) + ), + None, + ], + min_col_cnt=10, + min_row_cnt=100, + min_value_cnt=100, + ) + + sorting_row_idx = None + for row in pivot_abs.iter_rows(): + if row.get_compound_header() == ("2014-01-04", "sales sum", "Naperville"): + sorting_row_idx = row.row_idx + break + assert sorting_row_idx is not None + + def _get_value(value: Optional[tuple]) -> float: + if value is None: + return float("-inf") + return float(value[0][0]) + + row_values = list(map(_get_value, pivot_abs.resp_data["pivot_data"]["rows"][sorting_row_idx]["values"])) + assert sorted(row_values) == row_values + + def test_pivot_with_order_by(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + data_by_field = get_regular_result_data( + ds, + data_api, + field_names=[ + "category", + "sub_category", + "order_date", + "city", + "sales sum", + "profit sum", + ], + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "sub_category"], + rows=["order_date", "city", MEASURE_NAME_TITLE], + measures=["sales sum", "profit sum"], + order_fields={"category": OrderDirection.desc, "order_date": OrderDirection.desc}, + ) + assert pivot_resp.status_code == HTTPStatus.OK + result_data = pivot_resp.data + + # Check first column dimension + category_values = [] + for col in result_data["pivot_data"]["columns"]: + value = col[0][0][0] + if value not in category_values: + category_values.append(value) + assert category_values == sorted(set(data_by_field["category"]), reverse=True) + + # Check first row dimension + pivot_rows = result_data["pivot_data"]["rows"] + date_values = [] + for row in pivot_rows: + value = row["header"][0][0][0] + if value not in date_values: + date_values.append(value) + assert date_values == sorted(set(data_by_field["order_date"]), reverse=True) + + def test_pivot_with_pagination(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + def get_pivot(pivot_pagination: Optional[PivotPagination] = None): + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "sub_category"], + rows=["order_date", "city", MEASURE_NAME_TITLE], + measures=["sales sum", "profit sum"], + pivot_pagination=pivot_pagination, + ) + assert pivot_resp.status_code == HTTPStatus.OK, pivot_resp.response_errors + return pivot_resp.data + + # Save unpaginated table: + result_data = get_pivot() + initial_columns = result_data["pivot_data"]["columns"] + initial_rows = result_data["pivot_data"]["rows"] + + # Get paginated table and compare + result_data = get_pivot(pivot_pagination=PivotPagination(offset_rows=1, limit_rows=2)) + paginated_columns = result_data["pivot_data"]["columns"] + paginated_rows = result_data["pivot_data"]["rows"] + assert paginated_columns == initial_columns + assert paginated_rows == initial_rows[1:3] + + # Pseudo-pagination + result_data = get_pivot(pivot_pagination=PivotPagination(offset_rows=0, limit_rows=None)) + paginated_columns = result_data["pivot_data"]["columns"] + paginated_rows = result_data["pivot_data"]["rows"] + assert paginated_columns == initial_columns + assert paginated_rows == initial_rows + + def test_pivot_with_remapped_titles(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date"], + measures=["sales sum"], + title_mapping={ + "category": "My Dimension 1", + "order_date": "My Dimension 2", + "sales sum": "Measure", + }, + min_col_cnt=3, + min_row_cnt=100, + min_value_cnt=100, + ) diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_corner_cases.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_corner_cases.py new file mode 100644 index 000000000..49dc4b2b7 --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_corner_cases.py @@ -0,0 +1,404 @@ +from http import HTTPStatus +import json +from typing import Any + +from dl_api_client.dsmaker.data_abstraction.pivot import PivotDataAbstraction +from dl_api_client.dsmaker.pivot_utils import ( + check_pivot_response, + get_all_measure_cells, +) +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_regular_result_data +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import ( + OrderDirection, + PivotRole, +) +from dl_constants.internal_constants import ( + DIMENSION_NAME_TITLE, + MEASURE_NAME_TITLE, +) + + +class TestPivotCornerCases(DefaultApiTestBase): + def test_pivot_with_markup(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "category MU": "BOLD([category])", + "order_date MU": "ITALIC(STR([order_date]))", + "measure MU": 'BOLD(STR(SUM([sales]))) + " - " + ITALIC(STR(SUM([profit])))', + }, + ) + + data_by_field = get_regular_result_data( + ds, data_api, field_names=["category MU", "order_date MU", "measure MU"] + ) + + category_liid = 0 + orderdate_liid = 1 + measure_liid = 2 + + result_resp = data_api.get_pivot( + dataset=ds, + fields=[ + ds.find_field(title="category MU").as_req_legend_item(legend_item_id=category_liid), + ds.find_field(title="order_date MU").as_req_legend_item(legend_item_id=orderdate_liid), + ds.find_field(title="measure MU").as_req_legend_item(legend_item_id=measure_liid), + ], + pivot_structure=[ + ds.make_req_pivot_item(role=PivotRole.pivot_column, legend_item_ids=[category_liid]), + ds.make_req_pivot_item(role=PivotRole.pivot_row, legend_item_ids=[orderdate_liid]), + ds.make_req_pivot_item(role=PivotRole.pivot_measure, legend_item_ids=[measure_liid]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK + result_data = result_resp.data + + # Check legend + legend_tuples = [(item.title, item.role_spec.role) for item in result_data["pivot"]["structure"]] + assert legend_tuples == [ + ("category MU", PivotRole.pivot_column), + ("order_date MU", PivotRole.pivot_row), + ("measure MU", PivotRole.pivot_measure), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ] + + def dumps(value: Any) -> str: + return json.dumps(value, sort_keys=True) + + # Check columns + category_values = {dumps(col[0][0][0]) for col in result_data["pivot_data"]["columns"]} + assert category_values == { + dumps({"type": "bold", "content": {"type": "text", "content": "Office Supplies"}}), + dumps({"type": "bold", "content": {"type": "text", "content": "Furniture"}}), + dumps({"type": "bold", "content": {"type": "text", "content": "Technology"}}), + } + + # Check row headers + pivot_rows = result_data["pivot_data"]["rows"] + date_values = {dumps(row["header"][0][0][0]) for row in pivot_rows} + assert date_values.issuperset( + { + dumps({"type": "italics", "content": {"type": "text", "content": "2014-01-03"}}), + dumps({"type": "italics", "content": {"type": "text", "content": "2014-01-06"}}), + dumps({"type": "italics", "content": {"type": "text", "content": "2014-01-09"}}), + } + ) + + # Check measure values + assert len(pivot_rows) > 100 + all_measure_cells = get_all_measure_cells(pivot_rows) + measure_values = sorted({dumps(cell[0][0]) for cell in all_measure_cells}) + original_measure_values = sorted({dumps(val) for val in data_by_field["measure MU"]}) + assert measure_values == original_measure_values + + def test_pivot_no_measures(self, control_api, data_api, dataset_id): + ds = control_api.load_dataset(dataset=Dataset(id=dataset_id)).dataset + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "sub_category"], + rows=["order_date", "city"], + measures=[], + min_col_cnt=10, + min_row_cnt=100, + max_value_cnt=0, + ) + + def test_pivot_no_dimensions_multiple_measures(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=[], + rows=[], + measures=["sales sum", "profit sum"], + min_col_cnt=2, + max_col_cnt=2, + min_row_cnt=1, + max_row_cnt=1, + min_value_cnt=2, + max_value_cnt=2, + custom_pivot_legend_check=[ + ("sales sum", PivotRole.pivot_measure), + ("profit sum", PivotRole.pivot_measure), + (MEASURE_NAME_TITLE, PivotRole.pivot_column), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ], + ) + + def test_pivot_only_row_dimensions_one_measure(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=[], + rows=["category", "order_date"], + measures=["sales sum"], + min_col_cnt=1, + max_col_cnt=1, + custom_pivot_legend_check=[ + ("category", PivotRole.pivot_row), + ("order_date", PivotRole.pivot_row), + ("sales sum", PivotRole.pivot_measure), + (MEASURE_NAME_TITLE, PivotRole.pivot_column), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ], + ) + + def test_pivot_only_column_dimensions_multiple_measures_no_mnames(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "order_date"], + rows=[], + measures=["sales sum", "profit sum"], + min_row_cnt=2, + max_row_cnt=2, + custom_pivot_legend_check=[ + ("category", PivotRole.pivot_column), + ("order_date", PivotRole.pivot_column), + ("sales sum", PivotRole.pivot_measure), + ("profit sum", PivotRole.pivot_measure), + (MEASURE_NAME_TITLE, PivotRole.pivot_row), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ], + ) + + def test_pivot_only_column_dimensions_multiple_measures_with_mnames(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "order_date", MEASURE_NAME_TITLE], + rows=[], + measures=["sales sum", "profit sum"], + min_row_cnt=1, + max_row_cnt=1, + ) + + def test_single_measure_with_duplicate_measure_name(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + category_liid = 0 + orderdate_liid = 1 + sales_liid = 2 + mname_liid = 3 + + def get_pivot(duplicate_measure_name: bool) -> dict: + result_resp = data_api.get_pivot( + dataset=ds, + fields=[ + ds.find_field(title="category").as_req_legend_item(legend_item_id=category_liid), + ds.find_field(title="order_date").as_req_legend_item(legend_item_id=orderdate_liid), + ds.find_field(title="sales sum").as_req_legend_item(legend_item_id=sales_liid), + ds.measure_name_as_req_legend_item(legend_item_id=mname_liid), + ], + pivot_structure=[ + ds.make_req_pivot_item(role=PivotRole.pivot_column, legend_item_ids=[category_liid]), + ds.make_req_pivot_item(role=PivotRole.pivot_column, legend_item_ids=[mname_liid]), + ds.make_req_pivot_item(role=PivotRole.pivot_row, legend_item_ids=[orderdate_liid]), + *( + (ds.make_req_pivot_item(role=PivotRole.pivot_row, legend_item_ids=[mname_liid]),) + if duplicate_measure_name + else () + ), + ds.make_req_pivot_item(role=PivotRole.pivot_measure, legend_item_ids=[sales_liid]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK + return result_resp.data + + single_result_data = get_pivot(duplicate_measure_name=False) + double_result_data = get_pivot(duplicate_measure_name=True) + + # Check legend + legend_tuples = [(item.title, item.role_spec.role) for item in double_result_data["pivot"]["structure"]] + assert legend_tuples == [ + ("category", PivotRole.pivot_column), + ("Measure Names", PivotRole.pivot_column), + ("order_date", PivotRole.pivot_row), + ("Measure Names", PivotRole.pivot_row), + ("sales sum", PivotRole.pivot_measure), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ] + + # Check column headers + for col in double_result_data["pivot_data"]["columns"]: + assert len(col) == 2 + + # Check row headers + for row in double_result_data["pivot_data"]["rows"]: + assert len(row["header"]) == 2 + + assert len(double_result_data["pivot_data"]["rows"]) == len(single_result_data["pivot_data"]["rows"]) + for single_row, double_row in zip( + single_result_data["pivot_data"]["rows"], double_result_data["pivot_data"]["rows"] + ): + assert len(single_row["values"]) == len(double_row["values"]) + for single_cell, double_cell in zip(single_row["values"], double_row["values"]): + if single_cell is None or double_cell is None: + assert single_cell == double_cell + else: + assert len(single_cell) == len(double_cell) == 1 + assert single_cell[0][0:2] == double_cell[0][0:2] # exclude pivot_item_id (idx=2) from comparison + + def test_pivot_only_row_dimensions_no_measures(self, control_api, data_api, dataset_id): + ds = control_api.load_dataset(dataset=Dataset(id=dataset_id)).dataset + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=[], + rows=["category", "order_date"], + measures=[], + max_col_cnt=1, # There will be 1 column without any headers or values + min_row_cnt=100, + max_value_cnt=0, + ) + + def test_pivot_only_single_column_dimension_no_measures(self, control_api, data_api, dataset_id): + ds = control_api.load_dataset(dataset=Dataset(id=dataset_id)).dataset + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["order_date"], + rows=[], + measures=[], + max_row_cnt=1, # There will be 1 row without any headers or values + min_col_cnt=100, + max_value_cnt=0, + ) + + def test_pivot_duplicate_measures(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", "sub_category"], + rows=["order_date", "city", MEASURE_NAME_TITLE], + measures=["sales sum", "profit sum", "sales sum"], + min_col_cnt=10, + min_row_cnt=100, + min_value_cnt=100, + ) + + def test_pivot_empty_string_dimension_values(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "empty city": 'IF [city] = "New York" THEN "" ELSE [city] END', + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["empty city"], + rows=["category"], + measures=["sales sum"], + order_fields={"empty city": OrderDirection.desc}, + with_totals=True, + ) + + def test_pivot_null_dimension_values(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "null city": 'IF [city] = "New York" THEN NULL ELSE [city] END', + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["null city"], + rows=["category"], + measures=["sales sum"], + with_totals=True, + ) + + def test_pivot_sorting_with_totals(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + def get_pivot(direction: OrderDirection) -> PivotDataAbstraction: + return check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region"], + measures=["sales sum"], + order_fields={"category": direction}, + with_totals=True, + ) + + pivot_abs = get_pivot(OrderDirection.asc) + col_titles = pivot_abs.get_flat_column_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + + pivot_abs = get_pivot(OrderDirection.desc) + col_titles = pivot_abs.get_flat_column_headers() + assert col_titles == ["Technology", "Office Supplies", "Furniture", ""] diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_errors.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_errors.py new file mode 100644 index 000000000..315755c89 --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_errors.py @@ -0,0 +1,291 @@ +from http import HTTPStatus + +from dl_api_client.dsmaker.pivot_utils import get_pivot_response +from dl_api_client.dsmaker.primitives import ( + Dataset, + PivotPagination, + PivotTotals, +) +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_lib.pivot.primitives import ( + PivotHeaderValue, + PivotMeasureSorting, + PivotMeasureSortingSettings, +) +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import ( + FieldRole, + PivotRole, +) +from dl_constants.internal_constants import MEASURE_NAME_TITLE + + +class TestPivotErrors(DefaultApiTestBase): + def test_multiple_measures_without_measure_name(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date"], + measures=["sales sum", "profit sum"], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.MEASURE_NAME.REQUIRED" + + def test_no_measures_with_measure_name(self, control_api, data_api, dataset_id): + ds = control_api.load_dataset(dataset=Dataset(id=dataset_id)).dataset + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date", MEASURE_NAME_TITLE], + measures=[], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.MEASURE_NAME.FORBIDDEN" + + def test_multiple_measures_with_double_measure_name(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", MEASURE_NAME_TITLE], + rows=["order_date", MEASURE_NAME_TITLE], + measures=["sales sum", "profit sum"], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.MEASURE_NAME.DUPLICATE" + + def test_pagination_range(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + def check_pagination_error(pivot_pagination: PivotPagination) -> None: + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date"], + measures=["sales sum"], + pivot_pagination=pivot_pagination, + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + + check_pagination_error(PivotPagination(limit_rows=-9, offset_rows=8)) + check_pagination_error(PivotPagination(limit_rows=9, offset_rows=-8)) + check_pagination_error(PivotPagination(limit_rows=0, offset_rows=1)) + + def test_measure_as_dimension(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([profit])", + }, + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["sales sum"], + measures=["profit sum"], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.LEGEND.INVALID_ROLE" + + def test_dimension_as_measure(self, control_api, data_api, dataset_id): + ds = control_api.load_dataset(dataset=Dataset(id=dataset_id)).dataset + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["city"], + measures=["order_date"], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.LEGEND.INVALID_ROLE" + + def test_uneven_data_columns(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + pivot_resp = data_api.get_pivot( + dataset=ds, + fields=[ + ds.find_field(title="category").as_req_legend_item(legend_item_id=0, block_id=0), + ds.find_field(title="sales sum").as_req_legend_item(legend_item_id=1, block_id=0), + ds.find_field(title="order_date").as_req_legend_item(legend_item_id=2, block_id=1), + ds.find_field(title="sales sum").as_req_legend_item(legend_item_id=3, role=FieldRole.total, block_id=1), + ], + pivot_structure=[ + ds.make_req_pivot_item(role=PivotRole.pivot_row, legend_item_ids=[0]), + ds.make_req_pivot_item(role=PivotRole.pivot_column, legend_item_ids=[2]), + ds.make_req_pivot_item(role=PivotRole.pivot_measure, legend_item_ids=[1, 3]), + ], + fail_ok=True, + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.UNEVEN_DATA_COLUMNS" + + def test_wrong_column_in_measure_sorting(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date"], + measures=["sales sum"], + measures_sorting_settings=[ + PivotMeasureSorting( + column=PivotMeasureSortingSettings(header_values=[PivotHeaderValue(value="Not found")]) + ) + ], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.SORTING.ROW_OR_COLUMN_NOT_FOUND" + + def test_wrong_role_spec_in_measure_sorting(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date"], + measures=["sales sum"], + totals=[()], + measures_sorting_settings=[ + PivotMeasureSorting( + column=PivotMeasureSortingSettings( + header_values=[PivotHeaderValue(value="")] + ) # sort by data, not totals! + ) + ], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.SORTING.ROW_OR_COLUMN_NOT_FOUND" + + def test_measure_sorting_multiple_sort_on_the_same_axis(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + sorting_settings = PivotMeasureSorting( + column=PivotMeasureSortingSettings( + header_values=[PivotHeaderValue(value="Furniture"), PivotHeaderValue(value="sales sum")] + ) + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", MEASURE_NAME_TITLE], + rows=["order_date"], + measures=["sales sum", "profit sum"], + measures_sorting_settings=[sorting_settings, sorting_settings], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.SORTING.MULTIPLE_COLUMNS_OR_ROWS" + + def test_measure_sorting_by_column_with_multiple_measures_in_rows(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + sorting_settings = PivotMeasureSorting( + column=PivotMeasureSortingSettings( + header_values=[PivotHeaderValue(value="Furniture"), PivotHeaderValue(value="sales sum")] + ) + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["order_date", MEASURE_NAME_TITLE], + measures=["sales sum", "profit sum"], + measures_sorting_settings=[sorting_settings, None], + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.SORTING.AGAINST_MULTIPLE_MEASURES" + + def test_measure_sorting_with_subtotals(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + pivot_resp = get_pivot_response( + dataset=ds, + data_api=data_api, + columns=[MEASURE_NAME_TITLE], + rows=["category", "order_date"], + measures=["sales sum", "profit sum"], + measures_sorting_settings=[ + PivotMeasureSorting( + column=PivotMeasureSortingSettings(header_values=[PivotHeaderValue(value="sales sum")]) + ), + None, + ], + simple_totals=PivotTotals( + rows=[PivotTotals.item(level=1)], + ), + ) + assert pivot_resp.status_code == HTTPStatus.BAD_REQUEST + assert pivot_resp.bi_status_code == "ERR.DS_API.PIVOT.SORTING.SUBTOTALS_ARE_NOT_ALLOWED" diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_simple_totals.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_simple_totals.py new file mode 100644 index 000000000..f5c4e93a4 --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_simple_totals.py @@ -0,0 +1,301 @@ +from dl_api_client.dsmaker.pivot_utils import check_pivot_response +from dl_api_client.dsmaker.primitives import PivotTotals +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import PivotRole +from dl_constants.internal_constants import ( + DIMENSION_NAME_TITLE, + MEASURE_NAME_TITLE, +) + + +class TestPivotWithSimpleTotals(DefaultApiTestBase): + def test_main_totals(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region"], + measures=["sales sum"], + simple_totals=PivotTotals( + rows=[PivotTotals.item(level=0)], + columns=[PivotTotals.item(level=0)], + ), + ) + col_titles = pivot_abs.get_flat_column_headers() + row_titles = pivot_abs.get_flat_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + assert row_titles == ["Central", "East", "South", "West", ""] + + def test_with_totals_flag(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region"], + measures=["sales sum"], + with_totals=True, + ) + col_titles = pivot_abs.get_flat_column_headers() + row_titles = pivot_abs.get_flat_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + assert row_titles == ["Central", "East", "South", "West", ""] + + def test_corner_case_totals(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=[], + measures=["sales sum"], + simple_totals=PivotTotals( + rows=[], + columns=[PivotTotals.item(level=0)], + ), + custom_pivot_legend_check=[ + ("category", PivotRole.pivot_column), + ("sales sum", PivotRole.pivot_measure), + (MEASURE_NAME_TITLE, PivotRole.pivot_row), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ], + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=[], + rows=["category"], + measures=["sales sum"], + simple_totals=PivotTotals( + rows=[PivotTotals.item(level=0)], + columns=[], + ), + custom_pivot_legend_check=[ + ("category", PivotRole.pivot_row), + ("sales sum", PivotRole.pivot_measure), + (MEASURE_NAME_TITLE, PivotRole.pivot_column), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ], + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region"], + measures=[], + simple_totals=PivotTotals( + rows=[PivotTotals.item(level=0)], + columns=[PivotTotals.item(level=0)], + ), + ) + + def test_multi_measure_corner_case_totals_flag(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([profit])", + }, + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=[], + measures=["sales sum", "profit sum"], + with_totals=True, + custom_pivot_legend_check=[ + ("category", PivotRole.pivot_column), + ("sales sum", PivotRole.pivot_measure), + ("profit sum", PivotRole.pivot_measure), + (MEASURE_NAME_TITLE, PivotRole.pivot_row), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ], + ) + + check_pivot_response( + dataset=ds, + data_api=data_api, + columns=[], + rows=["category"], + measures=["sales sum", "profit sum"], + simple_totals=PivotTotals( + rows=[PivotTotals.item(level=0)], + columns=[], + ), + custom_pivot_legend_check=[ + ("category", PivotRole.pivot_row), + ("sales sum", PivotRole.pivot_measure), + ("profit sum", PivotRole.pivot_measure), + (MEASURE_NAME_TITLE, PivotRole.pivot_column), + (DIMENSION_NAME_TITLE, PivotRole.pivot_info), + ], + ) + + def test_main_totals_with_annotation(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region"], + measures=["sales sum"], + annotations=["profit sum"], + simple_totals=PivotTotals( + rows=[PivotTotals.item(level=0)], + columns=[PivotTotals.item(level=0)], + ), + ) + col_titles = pivot_abs.get_flat_column_headers() + row_titles = pivot_abs.get_flat_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + assert row_titles == ["Central", "East", "South", "West", ""] + + def test_main_totals_with_multiple_measures(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region", MEASURE_NAME_TITLE], + measures=["sales sum", "profit sum"], + simple_totals=PivotTotals( + rows=[PivotTotals.item(level=0)], + columns=[PivotTotals.item(level=0)], + ), + ) + col_titles = pivot_abs.get_flat_column_headers() + row_compound_titles = pivot_abs.get_compound_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + assert row_compound_titles == [ + ("Central", "sales sum"), + ("Central", "profit sum"), + ("East", "sales sum"), + ("East", "profit sum"), + ("South", "sales sum"), + ("South", "profit sum"), + ("West", "sales sum"), + ("West", "profit sum"), + ("", "sales sum"), + ("", "profit sum"), + ] + + def test_subtotals_with_multiple_measures(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=[MEASURE_NAME_TITLE, "region"], + measures=["sales sum", "profit sum"], + simple_totals=PivotTotals( + rows=[PivotTotals.item(level=1)], + columns=[PivotTotals.item(level=0)], + ), + ) + col_titles = pivot_abs.get_flat_column_headers() + row_compound_titles = pivot_abs.get_compound_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + assert row_compound_titles == [ + ("sales sum", "Central"), + ("sales sum", "East"), + ("sales sum", "South"), + ("sales sum", "West"), + ("sales sum", ""), + ("profit sum", "Central"), + ("profit sum", "East"), + ("profit sum", "South"), + ("profit sum", "West"), + ("profit sum", ""), + ] + + def test_main_totals_with_only_mnames_one_one_side(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([sales])", + }, + ) + + # 1. Only Measure Names in rows + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=[MEASURE_NAME_TITLE], + measures=["sales sum", "profit sum"], + with_totals=True, + check_totals=[()], # override the autogenerated value because this is a corner case + ) + col_titles = pivot_abs.get_flat_column_headers() + row_titles = pivot_abs.get_flat_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + assert row_titles == ["sales sum", "profit sum"] + + # 2. Only Measure Names in columns + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=[MEASURE_NAME_TITLE], + rows=["category"], + measures=["sales sum", "profit sum"], + with_totals=True, + check_totals=[()], # override the autogenerated value because this is a corner case + ) + col_titles = pivot_abs.get_flat_column_headers() + row_titles = pivot_abs.get_flat_row_headers() + assert col_titles == ["sales sum", "profit sum"] + assert row_titles == ["Furniture", "Office Supplies", "Technology", ""] diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_totals.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_totals.py new file mode 100644 index 000000000..8a6b3edb2 --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/pivot/test_totals.py @@ -0,0 +1,180 @@ +from typing import Optional + +import pytest + +from dl_api_client.dsmaker.pivot_utils import check_pivot_response +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_lib.pivot.primitives import ( + PivotHeaderRoleSpec, + PivotHeaderValue, + PivotMeasureSorting, + PivotMeasureSortingSettings, +) +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import ( + NotificationType, + OrderDirection, + PivotHeaderRole, + WhereClauseOperation, +) +from dl_constants.internal_constants import MEASURE_NAME_TITLE + + +class TestPivotWithTotals(DefaultApiTestBase): + def test_basic_pivot_with_grand_total(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region"], + measures=["sales sum"], + totals=[()], + ) + col_titles = pivot_abs.get_flat_column_headers() + row_titles = pivot_abs.get_flat_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + assert row_titles == ["Central", "East", "South", "West", ""] + + def test_pivot_empty_data_with_grand_totals(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "profit sum": "SUM([profit])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category", MEASURE_NAME_TITLE], + rows=[], + measures=["sales sum", "profit sum"], + measures_sorting_settings=[ + PivotMeasureSorting( + column=PivotMeasureSortingSettings( + header_values=[PivotHeaderValue(value=""), PivotHeaderValue(value="sales sum")], + direction=OrderDirection.desc, + role_spec=PivotHeaderRoleSpec(role=PivotHeaderRole.total), + ) + ), + None, + ], + totals=[()], + filters=[ds.find_field("category").filter(WhereClauseOperation.STARTSWITH, ["lol"])], + ) + + assert pivot_abs.get_compound_column_headers() == [("", "sales sum"), ("", "profit sum")] + assert pivot_abs.get_compound_row_headers() == [()] + assert pivot_abs.get_1d_mapper().get_value_count() == 2 # no values except grand totals + + @pytest.mark.parametrize("role", [PivotHeaderRole.total, PivotHeaderRole.data]) + def test_pivot_measure_sorting_with_total_and_empty_name_column(self, control_api, data_api, dataset_id, role): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + "category without Furniture": "IF([category] = 'Furniture', '', [category])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category without Furniture"], + rows=["region"], + measures=["sales sum"], + measures_sorting_settings=[ + PivotMeasureSorting( + column=PivotMeasureSortingSettings( + header_values=[PivotHeaderValue(value="")], + direction=OrderDirection.desc, + role_spec=PivotHeaderRoleSpec(role=role), + ) + ) + ], + totals=[()], + ) + col_titles = pivot_abs.get_flat_column_headers() + assert col_titles == ["", "Office Supplies", "Technology", ""] # first '' is from formula, second is from total + + def _get_value(value: Optional[tuple]) -> float: + if value is None: + return float("-inf") + return float(value[0][0]) + + column_idx = 0 if role == PivotHeaderRole.data else -1 + total_values = [_get_value(row["values"][column_idx]) for row in pivot_abs.resp_data["pivot_data"]["rows"]] + last_value = total_values.pop() + # pivot is sorted by values of '' column with selected role, + # except for the last value, which is from totals row + assert sorted(total_values, reverse=True) == total_values + if role == PivotHeaderRole.total: # total column: the last value is a grand total => is not None: + assert last_value != float("-inf") + else: # data column: the last value is a column total => is None + assert last_value == float("-inf") + + def test_basic_pivot_with_grand_total_and_simple_subtotals(self, control_api, data_api, dataset_id): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region"], + measures=["sales sum"], + totals=[ + ("category",), # region total by category + ("region",), # category total by region + (), # Grand Total + ], + ) + col_titles = pivot_abs.get_flat_column_headers() + row_titles = pivot_abs.get_flat_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology", ""] + assert row_titles == ["Central", "East", "South", "West", ""] + + def test_pivot_with_totals_and_measure_filters(self, control_api, data_api, dataset_id): + """ + Check that if measure filters are added to the request, + then totals are disabled. + """ + + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + "sales sum": "SUM([sales])", + }, + ) + + pivot_abs = check_pivot_response( + dataset=ds, + data_api=data_api, + columns=["category"], + rows=["region"], + measures=["sales sum"], + totals=[("category",), ("region",), ()], + filters=[ds.find_field("sales sum").filter(WhereClauseOperation.GT, [10.0])], + expected_notifications=[NotificationType.totals_removed_due_to_measure_filter], + ) + col_titles = pivot_abs.get_flat_column_headers() + row_titles = pivot_abs.get_flat_row_headers() + assert col_titles == ["Furniture", "Office Supplies", "Technology"] # No totals + assert row_titles == ["Central", "East", "South", "West"] # No totals diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/__init__.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/generation/__init__.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/generation/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/generation/generator.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/generation/generator.py new file mode 100644 index 000000000..baaed03cd --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/generation/generator.py @@ -0,0 +1,208 @@ +from __future__ import annotations + +import random +from typing import ( + Any, + Sequence, +) + +import attr + +from dl_constants.enums import WhereClauseOperation + + +@attr.s +class AutoGeneratorSettings: + dimensions: Sequence[str] = attr.ib(kw_only=True) + dates: frozenset[str] = attr.ib(kw_only=True, factory=frozenset) + filters: dict[str, tuple[WhereClauseOperation, list[str]]] = attr.ib(kw_only=True) + dimension_cnts: Sequence[int] = attr.ib(kw_only=True) + aggregations: Sequence[str] = attr.ib(kw_only=True) + formula_cnts: Sequence[int] = attr.ib(kw_only=True) + measure_base_expressions: Sequence[str] = attr.ib(kw_only=True) + filter_probability: float = attr.ib(kw_only=True) + bfb_probability: float = attr.ib(kw_only=True) + lookup_probability: float = attr.ib(kw_only=True) + + +@attr.s(frozen=True) +class FormulaTemplate: + template_str: str = attr.ib() + placeholder: str = attr.ib(kw_only=True) + + def wrap(self, nested_formula: str) -> str: + return self.template_str.format(**{self.placeholder: nested_formula}) + + +@attr.s(frozen=True) +class FormulaRecursionState: + agg_level: int = attr.ib(kw_only=True) + effective_dims: list[str] = attr.ib(kw_only=True) + remaining_dims: list[str] = attr.ib(kw_only=True) + available_filters: frozenset[str] = attr.ib(kw_only=True, factory=frozenset) + top_level: bool = attr.ib(kw_only=True, default=False) + + def clone(self, **kwargs: Any) -> FormulaRecursionState: + return attr.evolve(self, **kwargs) + + def pop_agg_level(self) -> FormulaRecursionState: + assert self.agg_level > 0 + return self.clone(top_level=False, agg_level=self.agg_level - 1) + + def pop_dimension(self) -> tuple[FormulaRecursionState, str]: + assert self.remaining_dims + chosen_dim = random.choice(self.remaining_dims) + new_effective_dims = self.effective_dims + [chosen_dim] + new_remaining_dims = [dim for dim in self.remaining_dims if dim != chosen_dim] + new_state = self.clone( + remaining_dims=new_remaining_dims, + effective_dims=new_effective_dims, + ) + return new_state, chosen_dim + + def pop_bfb_field(self) -> tuple[FormulaRecursionState, str]: + bfb_field = next(iter(self.available_filters)) + new_available_filters = self.available_filters - frozenset((bfb_field,)) + return self.clone(available_filters=new_available_filters), bfb_field + + +@attr.s +class TestSettings: + base_dimensions: Sequence[str] = attr.ib(kw_only=True) + measure_formulas: Sequence[str] = attr.ib(kw_only=True) + filters: dict[str, tuple[WhereClauseOperation, list[str]]] = attr.ib(kw_only=True, factory=dict) + + def serialize(self) -> dict: + return { + "base_dimensions": list(self.base_dimensions), + "measure_formulas": list(self.measure_formulas), + "filters": {name: {"op": op.name, "values": values} for name, (op, values) in self.filters.items()}, + } + + @classmethod + def deserialize(cls, data: dict) -> TestSettings: + return cls( + base_dimensions=data["base_dimensions"], + measure_formulas=data["measure_formulas"], + filters={ + name: (WhereClauseOperation[params["op"]], params["values"]) for name, params in data["filters"].items() + }, + ) + + +@attr.s +class LODTestAutoGenerator: + settings: AutoGeneratorSettings = attr.ib(kw_only=True) + + def _match_probability(self, value: float) -> bool: + return random.random() <= value + + def _generate_formula_iteration_generic_func( + self, + recursion_state: FormulaRecursionState, + func_name: str, + add_args: Sequence[str] = (), + agg_level: bool = False, + lod: bool = False, + bfb: bool = False, + igdim: bool = False, + ) -> tuple[FormulaTemplate, FormulaRecursionState]: + top_level = recursion_state.top_level + + if agg_level: + recursion_state = recursion_state.pop_agg_level() + + lod_str: str = "" + if lod and not top_level: + recursion_state, chosen_dim = recursion_state.pop_dimension() + lod_str = f" INCLUDE [{chosen_dim}]" + + bfb_str: str = "" + if bfb and recursion_state.available_filters: + if self._match_probability(self.settings.bfb_probability): + recursion_state, bfb_field = recursion_state.pop_bfb_field() + bfb_str = f" BEFORE FILTER BY [{bfb_field}]" + + add_args_str = (", " + ", ".join(add_args)) if add_args else "" + formula_tmpl = FormulaTemplate( + f"{func_name}({{nested_formula}}{add_args_str}{lod_str}{bfb_str})", + placeholder="nested_formula", + ) + return formula_tmpl, recursion_state + + def _generate_formula_iteration_agg( + self, + recursion_state: FormulaRecursionState, + ) -> tuple[FormulaTemplate, FormulaRecursionState]: + return self._generate_formula_iteration_generic_func( + recursion_state=recursion_state, + func_name=random.choice(self.settings.aggregations), + agg_level=True, + lod=True, + bfb=True, + ) + + def _generate_formula_iteration_lookup( + self, + recursion_state: FormulaRecursionState, + ) -> tuple[FormulaTemplate, FormulaRecursionState]: + date_dims = list(set(recursion_state.effective_dims) & self.settings.dates) + assert date_dims + return self._generate_formula_iteration_generic_func( + recursion_state=recursion_state, + func_name="AGO", + add_args=[f"[{random.choice(date_dims)}]"], + bfb=True, + ) + + def _generate_formula_iteration( + self, + recursion_state: FormulaRecursionState, + ) -> tuple[FormulaTemplate, FormulaRecursionState]: + date_dims = set(recursion_state.effective_dims) & self.settings.dates + if date_dims and recursion_state.agg_level > 0 and self._match_probability(self.settings.lookup_probability): + return self._generate_formula_iteration_lookup(recursion_state=recursion_state) + + return self._generate_formula_iteration_agg(recursion_state=recursion_state) + + def generate_formula(self, recursion_state: FormulaRecursionState) -> str: + if recursion_state.agg_level == 0: + return random.choice(self.settings.measure_base_expressions) + + formula_tmpl, child_recursion_state = self._generate_formula_iteration(recursion_state=recursion_state) + nested_formula = self.generate_formula(recursion_state=child_recursion_state) + formula = formula_tmpl.wrap(nested_formula) + return formula + + def generate_test_settings(self) -> TestSettings: + dimension_cnt = random.choice(self.settings.dimension_cnts) + base_dimensions = random.sample(self.settings.dimensions, k=dimension_cnt) + effective_dims = [dim for dim in self.settings.dimensions if dim in base_dimensions] + remaining_dims = [dim for dim in self.settings.dimensions if dim not in base_dimensions] + formula_cnt = random.choice(self.settings.formula_cnts) + + # Generate filters + chosen_filters: dict[str, tuple[WhereClauseOperation, list[str]]] = {} + for name, params in self.settings.filters.items(): + if random.random() <= self.settings.filter_probability: + chosen_filters[name] = params + + recursion_state = FormulaRecursionState( + effective_dims=effective_dims, + remaining_dims=remaining_dims, + available_filters=frozenset(chosen_filters.keys()), + top_level=True, + agg_level=random.randrange(1, len(remaining_dims) + 1), + ) + + measure_formulas = [self.generate_formula(recursion_state=recursion_state) for i in range(formula_cnt)] + + test_settings = TestSettings( + base_dimensions=base_dimensions, + measure_formulas=measure_formulas, + filters=chosen_filters, + ) + return test_settings + + def generate_setting_list(self, test_cnt: int) -> list[TestSettings]: + return [self.generate_test_settings() for i in range(test_cnt)] diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/generation/runner.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/generation/runner.py new file mode 100644 index 000000000..23eccd756 --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/generation/runner.py @@ -0,0 +1,91 @@ +from __future__ import annotations + +from http import HTTPStatus +import json +from typing import Optional + +import attr +import pytest + +from dl_api_client.dsmaker.api.data_api import SyncHttpDataApiV2 +from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_tests.db.data_api.result.complex_queries.generation.generator import TestSettings + + +class Error400(Exception): + pass + + +@attr.s +class PreGeneratedLODTestRunner: + dataset_id: str = attr.ib(kw_only=True) + control_api: SyncHttpDatasetApiV1 = attr.ib(kw_only=True) + data_api: SyncHttpDataApiV2 = attr.ib(kw_only=True) + + def get_measure_data( + self, + ds: Dataset, + test_settings: TestSettings, + measures: list[str], + ) -> dict[str, list[float]]: + result_resp = self.data_api.get_result( + dataset=ds, + fields=[ + *[ds.find_field(title=name) for name in test_settings.base_dimensions], + *[ds.find_field(title=name) for name in measures], + ], + filters=[ + ds.find_field(title=name).filter(op, values) for name, (op, values) in test_settings.filters.items() + ], + order_by=[ + *[ds.find_field(title=name) for name in test_settings.base_dimensions], + ], + fail_ok=True, + ) + + if result_resp.status_code == HTTPStatus.BAD_REQUEST: + raise Error400(result_resp.json) + + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert data_rows + + def float_or_none(value: Optional[str]) -> Optional[float]: + if value is None: + return None + return float(value) + + result: dict[str, list[float]] = {} + dim_cnt = len(test_settings.base_dimensions) + for measure_idx, name in enumerate(measures): + result[name] = [float_or_none(row[measure_idx + dim_cnt]) for row in data_rows] + + return result + + def run_test(self, test_settings: TestSettings, ignore_400_error: bool = False) -> None: + ds = self.control_api.load_dataset(Dataset(id=self.dataset_id)).dataset + measure_fields = [ + (f"Measure {measure_idx}", measure_formula) + for measure_idx, measure_formula in enumerate(test_settings.measure_formulas) + ] + for measure_name, measure_formula in measure_fields: + ds.result_schema[measure_name] = ds.field(formula=measure_formula) + + try: + all_measures = self.get_measure_data( + ds=ds, test_settings=test_settings, measures=[name for name, _ in measure_fields] + ) + for measure_name, measure_formula in measure_fields: + one_measure = self.get_measure_data(ds=ds, test_settings=test_settings, measures=[measure_name]) + assert pytest.approx(all_measures[measure_name]) == one_measure[measure_name] + except Error400: + if not ignore_400_error: + raise + print("Ignoring Error400") + + def run_test_list(self, test_list: list[TestSettings], ignore_400_error: bool) -> None: + for test_idx, test_settings in enumerate(test_list): + print(f"\nTest # {test_idx}\n{json.dumps(test_settings.serialize())}\n") + self.run_test(test_settings, ignore_400_error=ignore_400_error) diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_compeng_corner_cases.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_compeng_corner_cases.py new file mode 100644 index 000000000..57a24e35b --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_compeng_corner_cases.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +from http import HTTPStatus + +import pytest + +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_tests.db.base import DefaultApiTestBase + + +class TestCompengCornerCases(DefaultApiTestBase): + def test_zero_division_in_compeng(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure Reg Div": "1 / (SUM(SUM([sales]) AMONG) * 0)", + "Measure Int Div": "DIV(1, SUM(COUNT([sales]) AMONG) * 0)", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Measure Reg Div"), + ds.find_field(title="Measure Int Div"), + ], + fail_ok=True, + ) + + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1 + row = data_rows[0] + assert row[0] is None + assert row[1] is None + + def test_integer_division_in_compeng(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure": "COUNT([sales])", + "Fraction": 'DB_CAST([Measure], "Int64") / DB_CAST(SUM([Measure] TOTAL), "integer")', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Fraction"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) > 1 + + fraction_sum = sum(float(row[1]) for row in data_rows) + assert pytest.approx(fraction_sum) == 1.0 diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_ext_agg_basic.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_ext_agg_basic.py new file mode 100644 index 000000000..3e2030151 --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_ext_agg_basic.py @@ -0,0 +1,997 @@ +from collections import defaultdict +import datetime +from http import HTTPStatus +from typing import ( + Any, + Iterable, +) + +import pytest + +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_client.dsmaker.shortcuts.result_data import ( + get_data_rows, + get_regular_result_data, +) +from dl_api_lib_testing.connector.complex_queries import DefaultBasicExtAggregationTestSuite +from dl_api_lib_testing.helpers.data_source import data_source_settings_from_table +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import ( + UserDataType, + WhereClauseOperation, +) +from dl_core_testing.database import ( + C, + make_table, +) +from dl_query_processing.compilation.primitives import CompiledMultiQueryBase +from dl_query_processing.translation.multi_level_translator import MultiLevelQueryTranslator + + +class TestBasicExtendedAggregations(DefaultApiTestBase, DefaultBasicExtAggregationTestSuite): + # Add to the default set of tests + + def test_fixed_same_as_exclude(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "sales sum fx city": "SUM([sales] FIXED [city])", + "sales sum exc category": "SUM([sales] EXCLUDE [category])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="category"), + ds.find_field(title="sales sum"), + ds.find_field(title="sales sum fx city"), + ds.find_field(title="sales sum exc category"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + for row in data_rows: + assert float(row[3]) == float(row[4]) + + def test_nested_lod(self, control_api, data_api, saved_dataset): + """ + Check nested LODs. + + Calculate the per-city average per-category sum of sales + (first the dimensions expand to allow `category` in, and then collapse + to the request default of just `city`) + """ + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "sales sum inc category": "SUM([sales] INCLUDE [category])", + "nested LOD AVG": "AVG([sales sum inc category])", + "nested LOD SUM": "SUM([sales sum inc category])", + }, + ) + + def get_expected_data() -> tuple[dict[str, float], dict[str, float]]: + # 1. Get ungrouped data + ungrouped_result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="category"), + ds.find_field(title="sales"), + ], + disable_group_by=True, + fail_ok=True, + ) + assert ungrouped_result_resp.status_code == HTTPStatus.OK, ungrouped_result_resp.json + + # 2. Group data by city and category and calculate the sum of sales per group + data_rows = get_data_rows(ungrouped_result_resp) + sum_by_city_and_cat: dict[tuple[str, str], float] = defaultdict(lambda: 0) + for row in data_rows: + sum_by_city_and_cat[(row[0], row[1])] += float(row[2]) + sum_list_by_city: dict[str, list[float]] = defaultdict(list) + for (city, cat), sum_value in sum_by_city_and_cat.items(): + sum_list_by_city[city].append(sum_value) + + # 3. Group by city and calculate average of sales sums + avg_sum_by_city = {city: sum(sum_list) / len(sum_list) for city, sum_list in sum_list_by_city.items()} + sum_sum_by_city = {city: sum(sum_list) for city, sum_list in sum_list_by_city.items()} + return avg_sum_by_city, sum_sum_by_city + + # Calculate expected results + exp_avg_by_city, exp_sum_by_city = get_expected_data() + # Get actual results + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + # ds.find_field(title='sales sum'), + ds.find_field(title="nested LOD AVG"), + ds.find_field(title="nested LOD SUM"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + # Compare values for each `city` + data_rows = get_data_rows(result_resp) + for row in data_rows: + city = row[0] + city_sum_avg = float(row[1]) + # Use approx here because DB and Python float calculations do not match exactly + assert city_sum_avg == pytest.approx(exp_avg_by_city[city]) + city_sum_sum = float(row[2]) + assert city_sum_sum == pytest.approx(exp_sum_by_city[city]) + + def test_lod_nested_zero_dim_aggregation_with_filter(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "sales sum total": "SUM(SUM([sales] INCLUDE [city]))", + }, + ) + + def get_total_value() -> float: + total_sum_result = data_api.get_result( + dataset=ds, + fields=[ds.find_field(title="sales sum")], + filters=[ + ds.find_field(title="category").filter( + WhereClauseOperation.EQ, + values=["Office Supplies"], + ), + ], + ) + data_rows = get_data_rows(total_sum_result) + return float(data_rows[0][0]) + + expected_total_value = get_total_value() + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="sales sum total"), + ], + filters=[ + ds.find_field(title="category").filter( + WhereClauseOperation.EQ, + values=["Office Supplies"], + ), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + for row_idx, row in enumerate(data_rows): + assert float(row[0]) == expected_total_value, f"total sum doesn't match expected number in row {row_idx}" + + def test_double_agg_ratio(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "simple ratio": "SUM([sales]) / SUM([sales] FIXED)", + "double agg ratio": "SUM([sales]) / SUM(SUM([sales]) FIXED)", + }, + ) + + def get_total_value() -> float: + total_sum_result = data_api.get_result( + dataset=ds, + fields=[ds.find_field(title="sales sum")], + ) + data_rows = get_data_rows(total_sum_result) + return float(data_rows[0][0]) + + expected_total_value = get_total_value() + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="sales sum"), + ds.find_field(title="simple ratio"), + ds.find_field(title="double agg ratio"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + for row_idx, row in enumerate(data_rows): + expected_value = float(row[1]) / expected_total_value + assert ( + float(row[3]) == float(row[2]) == expected_value + ), f"total sum doesn't match expected number in row {row_idx}" + + def test_nested_zero_dim_aggregation(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "city avg sales sum": "AVG(SUM([sales] INCLUDE [city]))", + }, + ) + + def get_expected_value() -> float: + # 1. Get per-city data + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="sales sum"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + # Calculate the average + return sum(float(row[1]) for row in data_rows) / len(data_rows) + + expected_avg_value = get_expected_value() + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city avg sales sum"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1 + assert float(data_rows[0][0]) == pytest.approx(expected_avg_value) + + def test_lod_with_bfb(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "sales sum fx Date": "SUM([sales] FIXED [order_date])", + "sales sum fx Date BFB": "SUM([sales] FIXED [order_date] BEFORE FILTER BY [category])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="category"), + ds.find_field(title="sales sum"), + ds.find_field(title="sales sum fx Date"), + ds.find_field(title="sales sum fx Date BFB"), + ], + filters=[ + ds.find_field(title="category").filter( + WhereClauseOperation.EQ, + values=["Office Supplies"], + ), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + sales_by_date = defaultdict(lambda: 0) + lod_sales_by_date = {} + case_1_cnt = 0 + case_2_cnt = 0 + for row in data_rows: + sales_by_date[row[0]] += float(row[2]) + lod_sales_by_date[row[0]] = float(row[4]) + # BFB sum is equal to non-BFB sum only when there is exactly one category value, + # so it also equals the regular per-row sum + if float(row[4]) == float(row[3]): + assert float(row[4]) == float(row[3]) == float(row[2]) + case_1_cnt += 1 + else: # Otherwise BFB is greater than non-BFB + assert float(row[4]) > float(row[3]) + case_2_cnt += 1 + + assert lod_sales_by_date == pytest.approx(lod_sales_by_date) + assert case_1_cnt > 0 + assert case_2_cnt > 0 + + def test_workaround_for_inconsistent_agg(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "My Field": "SUM(SUM([sales] INCLUDE [Order ID]) / SUM([sales] FIXED))", + }, + ) + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="My Field"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1 + assert pytest.approx(float(data_rows[0][0])) == 1 + + def test_lod_nested_multiple_times(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + # Plain and simple row count + "Agg 1": "COUNT([sales])", + # All the others are pretty much the same thing, but with multiple aggregation levels + "Agg 2": "SUM(COUNT([sales] INCLUDE [city]))", + "Agg 3": "SUM(SUM(COUNT([sales] INCLUDE [city]) INCLUDE [category]))", + "Agg 4": "SUM(SUM(SUM(COUNT([sales] INCLUDE [city]) INCLUDE [category]) INCLUDE [order_date]))", + "Agg 5": ( + "SUM(SUM(SUM(SUM(COUNT([sales] INCLUDE [city]) INCLUDE [category]) " + "INCLUDE [order_date]) INCLUDE [region]))" + ), + }, + ) + + def get_single_row_data(field_names: Iterable[str]) -> tuple[int, ...]: + result_resp = data_api.get_result( + dataset=ds, + fail_ok=True, + fields=[ds.find_field(title=field_name) for field_name in field_names], + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1, "There must be exactly 1 row of data" + return tuple(int(item) for item in data_rows[0]) + + value_1 = get_single_row_data(["Agg 1"])[0] + value_2 = get_single_row_data(["Agg 2"])[0] + assert value_2 == value_1 + value_3 = get_single_row_data(["Agg 3"])[0] + assert value_3 == value_1 + value_4 = get_single_row_data(["Agg 4"])[0] + assert value_4 == value_1 + value_5 = get_single_row_data(["Agg 4"])[0] + assert value_5 == value_1 + + # Check the two at a time with the order of aggregation increasing by 1 + simultaneous_values = get_single_row_data(["Agg 1", "Agg 2"]) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + simultaneous_values = get_single_row_data(["Agg 2", "Agg 3"]) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + simultaneous_values = get_single_row_data(["Agg 3", "Agg 4"]) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + simultaneous_values = get_single_row_data(["Agg 4", "Agg 5"]) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + # Check with diffs 2, 3 and 4 + simultaneous_values = get_single_row_data(["Agg 1", "Agg 3"]) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + simultaneous_values = get_single_row_data(["Agg 1", "Agg 4"]) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + simultaneous_values = get_single_row_data(["Agg 1", "Agg 5"]) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + # The killer query - do them all at once! + simultaneous_values = get_single_row_data(["Agg 1", "Agg 2", "Agg 3", "Agg 4", "Agg 5"]) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + def test_two_same_dim_lods_in_different_subqueries(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "same dim LOD": "SUM([sales] FIXED [city], [category])", + "by city": "SUM(SUM([same dim LOD] FIXED [city]))", + "by category": "SUM(SUM([same dim LOD] FIXED [category]))", + }, + ) + + def get_total_value() -> float: + total_sum_result = data_api.get_result( + dataset=ds, + fields=[ds.find_field(title="sales sum")], + ) + data_rows = get_data_rows(total_sum_result) + return float(data_rows[0][0]) + + total_value = get_total_value() + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="by city"), + ds.find_field(title="by category"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1 + by_city, by_category = float(data_rows[0][0]), float(data_rows[0][1]) + assert by_city == pytest.approx(by_category) == pytest.approx(total_value) + + def test_two_total_sums_with_different_nested_includes(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "By City": "SUM(SUM([sales] INCLUDE [city]))", + "By Category": "SUM(SUM([sales] INCLUDE [category]))", + }, + ) + + def get_total_value() -> float: + total_sum_result = data_api.get_result( + dataset=ds, + fields=[ds.find_field(title="Sales Sum")], + ) + data_rows = get_data_rows(total_sum_result) + return float(data_rows[0][0]) + + total_value = get_total_value() + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="By City"), + ds.find_field(title="By Category"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1 + by_city, by_category = float(data_rows[0][0]), float(data_rows[0][1]) + assert by_city == pytest.approx(by_category) == pytest.approx(total_value) + + def test_agg_with_lod_over_window_function(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Lod Over Win 1": "MIN(RANK_UNIQUE(SUM([sales] INCLUDE [city]) TOTAL) FIXED)", + "Lod Over Win 2": "MAX(RANK_UNIQUE(SUM([sales] INCLUDE [city]) TOTAL) FIXED)", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Lod Over Win 1"), + ds.find_field(title="Lod Over Win 2"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) > 10 # There should be a lot of them + for row in data_rows: + assert int(row[1]) == 1 + assert int(row[2]) == len(data_rows) + + def test_lod_compatibility_error(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + exp_status=HTTPStatus.BAD_REQUEST, + formulas={ + "Invalid Field": "SUM(AVG([sales] INCLUDE [city]) - AVG([sales] INCLUDE [category]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Invalid Field"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.BAD_REQUEST, result_resp.json + assert result_resp.bi_status_code == "ERR.DS_API.FORMULA.VALIDATION.LOD.INCOMPATIBLE_DIMENSIONS" + + def test_dimension_with_single_and_double_agg(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Single Agg": "SUM([sales])", + "Double Agg": "SUM(SUM([sales] INCLUDE [city]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Single Agg"), + ], + ) + data_rows = get_data_rows(result_resp) + expected_by_date = {row[0]: float(row[1]) for row in data_rows} + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Single Agg"), + ds.find_field(title="Double Agg"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == len(expected_by_date) + for row in data_rows: + date_str, single_agg_value, double_agg_value = row[0], float(row[1]), float(row[2]) + assert single_agg_value == pytest.approx(double_agg_value) == pytest.approx(expected_by_date[date_str]) + + def test_lod_with_ago(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Lod With Ago": """ + AVG( + SUM( /* Doesn't really aggregate anything + * because it has the same dims as the nested SUMs */ + SUM([sales]) - ZN(AGO(SUM([sales]), [order_date], "day")) + INCLUDE [order_date] + ) + ) + """, + }, + ) + + def get_expected_value() -> float: + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Sales Sum"), + ], + ) + data_rows = get_data_rows(result_resp) + sales_by_date = {row[0]: float(row[1]) for row in data_rows} + + total_sum = 0.0 + for date_str in sorted(sales_by_date): + ago_date_str = str(datetime.date.fromisoformat(date_str) - datetime.timedelta(days=1)) + sales_value = sales_by_date[date_str] + ago_sales_value = sales_by_date.get(ago_date_str, 0) + total_sum += sales_value - ago_sales_value + + return total_sum / len(sales_by_date) + + expected_value = get_expected_value() + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Lod With Ago"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1 + actual_value = float(data_rows[0][0]) + assert actual_value == pytest.approx(expected_value) + + def test_toplevel_lod_extra_dimension_error(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Extra Dim Field": "AVG([sales] INCLUDE [category])", + "Missing Dim Field": "AVG([sales] FIXED)", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Extra Dim Field"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.BAD_REQUEST, result_resp.json + assert result_resp.bi_status_code == "ERR.DS_API.FORMULA.VALIDATION.LOD.INVALID_TOPLEVEL_DIMENSIONS" + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Missing Dim Field"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + def test_double_agg_no_lod(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Double Agg": "COUNT(COUNT([sales]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Double Agg"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + assert len(data_rows) == 1 + assert int(data_rows[0][0]) == 1 + + def test_bfb_no_lod(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Bfb No Lod": "SUM([sales] BEFORE FILTER BY [category])", + }, + ) + + result_resp = data_api.get_result(dataset=ds, fields=[ds.find_field(title="Sales Sum")]) + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1 + expected_total_value = float(data_rows[0][0]) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Bfb No Lod"), + ], + filters=[ + ds.find_field(title="category").filter(op=WhereClauseOperation.EQ, values=["Office Supplies"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + assert len(data_rows) == 1 + assert float(data_rows[0][0]) == expected_total_value + + def test_regular_agg_with_bfb_agg(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Agg Bfb": "SUM([sales] EXCLUDE [category] BEFORE FILTER BY [category])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="region"), + ds.find_field(title="category"), + ds.find_field(title="Sales Sum"), + ], + filters=[ + ds.find_field(title="category").filter(WhereClauseOperation.EQ, ["Office Supplies"]), + ], + ) + data_rows = get_data_rows(result_resp) + expected_by_region = {row[0]: float(row[2]) for row in data_rows} + + # Same request, but with `Agg Bfb` + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="region"), + ds.find_field(title="category"), + ds.find_field(title="Sales Sum"), + ds.find_field(title="Agg Bfb"), + ], + filters=[ + ds.find_field(title="category").filter(WhereClauseOperation.EQ, ["Office Supplies"]), + ], + ) + data_rows = get_data_rows(result_resp) + actual_by_region = {row[0]: float(row[2]) for row in data_rows} + + assert actual_by_region == expected_by_region + + def test_double_aggregation_optimization(self, control_api, data_api, saved_dataset, monkeypatch): + """ + Test double aggregation optimizations in formulas like `SUM(SUM([sales]))` (-> `SUM([sales])`) + """ + data_container: dict[str, Any] = {} + dataset_id = saved_dataset.id + + def _log_query_complexity_stats(self, compiled_multi_query: CompiledMultiQueryBase) -> None: + data_container.update( + { + "query_count": compiled_multi_query.query_count(), + } + ) + + # Capture query complexity info + monkeypatch.setattr(MultiLevelQueryTranslator, "_log_query_complexity_stats", _log_query_complexity_stats) + + def check_agg(first_agg_name: str, second_agg_name: str) -> None: + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset_id=dataset_id, + formulas={ + f"{first_agg_name} {second_agg_name} Opt": f"{second_agg_name}({first_agg_name}([sales]))", + f"{first_agg_name} {second_agg_name}": f"{second_agg_name}({first_agg_name}([sales])+0)", + }, + ) + + data = get_regular_result_data( + ds, + data_api, + field_names=["category", f"{first_agg_name} {second_agg_name}"], + )[f"{first_agg_name} {second_agg_name}"] + assert data_container["query_count"] >= 3 + data_opt = get_regular_result_data( + ds, + data_api, + field_names=["category", f"{first_agg_name} {second_agg_name} Opt"], + )[f"{first_agg_name} {second_agg_name} Opt"] + assert data_container["query_count"] == 1 + assert sorted([float(val) for val in data_opt]) == pytest.approx( + sorted([float(val) for val in data]) + ), f"Got different values for {first_agg_name}({second_agg_name}(...))" + + for first_agg_name in ("avg", "count"): + for second_agg_name in ("sum", "any", "max", "min", "count", "countd"): + # no optimization for AVG + check_agg(first_agg_name.upper(), second_agg_name.upper()) + + def test_lod_fixed_markup(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Bold city": "BOLD([city])", + "Sales Sum fx": "AVG(SUM([sales] FIXED))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Bold city"), + ds.find_field(title="Sales Sum fx"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + def test_bug_bi_3425_deeply_nested_bfb(self, control_api, data_api, db, saved_connection_id): + raw_data = [ + {"id": 10, "city": "New York", "category": "Office Supplies", "sales": 1}, + {"id": 11, "city": "New York", "category": "Office Supplies", "sales": 10}, + {"id": 12, "city": "New York", "category": "Furniture", "sales": 100}, + {"id": 13, "city": "New York", "category": "Furniture", "sales": 1}, + {"id": 14, "city": "New Rochelle", "category": "Office Supplies", "sales": 10000}, + {"id": 15, "city": "New Rochelle", "category": "Office Supplies", "sales": 100000}, + {"id": 16, "city": "New Rochelle", "category": "Furniture", "sales": 10000}, + {"id": 17, "city": "New Rochelle", "category": "Furniture", "sales": 10000000}, + {"id": 18, "city": "Detroit", "category": "Office Supplies", "sales": 1}, + {"id": 19, "city": "Detroit", "category": "Office Supplies", "sales": 100}, + {"id": 20, "city": "Detroit", "category": "Furniture", "sales": 10000}, + {"id": 21, "city": "Detroit", "category": "Furniture", "sales": 1000000}, + ] + columns = [ + C("id", UserDataType.integer, vg=lambda rn, **kwargs: raw_data[rn]["id"]), + C("city", UserDataType.string, vg=lambda rn, **kwargs: raw_data[rn]["city"]), + C("category", UserDataType.string, vg=lambda rn, **kwargs: raw_data[rn]["category"]), + C("sales", UserDataType.integer, vg=lambda rn, **kwargs: raw_data[rn]["sales"]), + ] + db_table = make_table(db, columns=columns, rows=len(raw_data)) + + ds = Dataset() + ds.sources["source_1"] = ds.source( + connection_id=saved_connection_id, **data_source_settings_from_table(db_table) + ) + ds.source_avatars["avatar_1"] = ds.sources["source_1"].avatar() + + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=ds, + formulas={ + "city_1st": 'GET_ITEM(SPLIT([city], " "), 1)', + "sales_sum_bfb": "SUM(SUM([sales]) EXCLUDE [city] BEFORE FILTER BY [city_1st])", + "sales_sum_if": 'SUM_IF([sales], [city_1st] = "New")', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="sales_sum_if"), + ds.find_field(title="sales_sum_bfb"), + ], + order_by=[ + ds.find_field(title="city"), + ], + filters=[ + ds.find_field(title="city_1st").filter(WhereClauseOperation.EQ, ["New"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows_1 = get_data_rows(result_resp) + data_rows_1_stripped = [row[:2] for row in data_rows_1] # Strip off `sales_sum_bfb` values + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="sales_sum_if"), + # This time without `sales_sum_bfb` + ], + order_by=[ + ds.find_field(title="city"), + ], + filters=[ + ds.find_field(title="city_1st").filter(WhereClauseOperation.EQ, ["New"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows_2 = get_data_rows(result_resp) + + assert data_rows_1_stripped == data_rows_2 + + def test_lod_only_in_filter(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Daily Profit Sum": "AVG(SUM([sales] INCLUDE [order_date]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Sales Sum"), + ], + filters=[ + ds.find_field(title="Daily Profit Sum").filter(WhereClauseOperation.GT, ["10.0"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) > 0 + + def test_lod_in_filter_and_select(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Daily Profit Sum": "AVG(SUM([sales] INCLUDE [order_date]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Daily Profit Sum"), + ], + filters=[ + ds.find_field(title="Daily Profit Sum").filter(WhereClauseOperation.GT, ["10.0"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) > 0 + + def test_replace_original_dim_with_another(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure": "AVG(SUM([sales] FIXED [city]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="Measure"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 3 # There are 3 categories + + def test_bi_4534_inconsistent_aggregation(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sum": "SUM([sales])", + "Measure": "SUM([Sum]) - SUM(AGO([Sum], [order_date]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Measure"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK + + def test_bi_4652_measure_filter_with_total_in_select(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure": "SUM([sales])", + "Total Measure": "SUM([sales] FIXED)", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Total Measure"), + ], + filters=[ + ds.find_field(title="Measure").filter(WhereClauseOperation.GT, ["10.0"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + dim_values = [row[0] for row in data_rows] + assert len(dim_values) == len(set(dim_values)), "Dimension values are not unique" diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_ext_agg_corner_cases.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_ext_agg_corner_cases.py new file mode 100644 index 000000000..ae965934a --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_ext_agg_corner_cases.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +from http import HTTPStatus + +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_tests.db.base import DefaultApiTestBase + + +class TestExtendedAggregationCornerCases(DefaultApiTestBase): + def test_lod_with_const_dim(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Const Dim": '"something"', + "Measure": "SUM([sales] EXCLUDE [category])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="Const Dim"), + ds.find_field(title="Measure"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + assert len(data_rows) == 3 + assert len(set(row[2] for row in data_rows)) == 1 # They should all be the same + + def test_lod_with_date_dimension(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Date Dim": 'DB_CAST([order_date], "Date") + 1', + "Measure": "SUM([sales] EXCLUDE [category])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="Date Dim"), + ds.find_field(title="Measure"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + assert len(data_rows) > 1 + + def test_duplicate_main_dimensions(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Category 2": "[category]", + "Measure": "AVG(SUM([sales] INCLUDE [city]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="Category 2"), + ds.find_field(title="Measure"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 3 # There are 3 categories + for row in data_rows: + assert row[0] == row[1] + + def test_duplicate_lod_dimensions(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "City 2": "[city]", + "Measure Double Dim": "AVG(SUM([sales] INCLUDE [city], [City 2]))", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="Measure Double Dim"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 3 # There are 3 categories + + def test_lod_include_measure(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure": "SUM(SUM([sales] INCLUDE SUM([profit])))", + }, + exp_status=HTTPStatus.BAD_REQUEST, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="Measure"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.BAD_REQUEST diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_generated.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_generated.py new file mode 100644 index 000000000..1bcf6e45f --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_generated.py @@ -0,0 +1,122 @@ +import pytest + +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_api_lib_tests.db.data_api.result.complex_queries.generation.generator import ( + AutoGeneratorSettings, + LODTestAutoGenerator, + TestSettings, +) +from dl_api_lib_tests.db.data_api.result.complex_queries.generation.runner import PreGeneratedLODTestRunner +from dl_constants.enums import WhereClauseOperation + + +GENERATED_TESTS = [ + { + "base_dimensions": ["category"], + "measure_formulas": [ + "SUM(AVG(AVG(SUM(AVG(SUM(AVG([sales] INCLUDE [region]) INCLUDE [ship_date]) INCLUDE [city]) INCLUDE [sub_category]) INCLUDE [order_date]) INCLUDE [ship_mode] BEFORE FILTER BY [category]))", # noqa + "AVG(SUM(AVG(SUM(SUM([sales] INCLUDE [region]) INCLUDE [city]) INCLUDE [sub_category]) INCLUDE [ship_mode]))", + ], + "filters": {"category": {"op": "EQ", "values": ["Furniture"]}}, + }, + { + "base_dimensions": ["sub_category", "city"], + "measure_formulas": [ + "SUM(SUM([sales] INCLUDE [region]) BEFORE FILTER BY [profit])", + "AVG(SUM(AVG(AVG([sales] INCLUDE [region]) INCLUDE [order_date]) INCLUDE [ship_mode]))", + "SUM([sales])", + "AVG(SUM(AVG([sales] INCLUDE [order_date]) INCLUDE [category]) BEFORE FILTER BY [profit])", + ], + "filters": {"profit": {"op": "GT", "values": ["1.0"]}}, + }, + { + "base_dimensions": ["region"], + "measure_formulas": [ + "SUM(SUM(SUM(SUM([sales] INCLUDE [category]) INCLUDE [city] BEFORE FILTER BY [profit]) INCLUDE [ship_date]))", + "AVG([sales])", + ], + "filters": {"profit": {"op": "GT", "values": ["1.0"]}}, + }, + { + "base_dimensions": ["region"], + "measure_formulas": [ + "AVG(AVG(AVG(AVG([sales] INCLUDE [category]) INCLUDE [ship_mode]) INCLUDE [sub_category]))", + "SUM(AVG(AVG(SUM(AVG(SUM(SUM([sales] INCLUDE [order_date]) INCLUDE [city]) INCLUDE [sub_category]) INCLUDE [ship_date]) INCLUDE [category]) INCLUDE [ship_mode] BEFORE FILTER BY [order_date]))", # noqa + "SUM(AVG([sales] INCLUDE [city]) BEFORE FILTER BY [order_date])", + ], + "filters": {"order_date": {"op": "GT", "values": ["2014-03-01"]}}, + }, + { + "base_dimensions": ["category"], + "measure_formulas": [ + "SUM(AVG([sales] INCLUDE [ship_mode] BEFORE FILTER BY [category]))", + "SUM(SUM(SUM(SUM(SUM(AVG(SUM([sales] INCLUDE [ship_mode] BEFORE FILTER BY [category]) INCLUDE [region]) INCLUDE [order_date]) INCLUDE [city]) INCLUDE [ship_date]) INCLUDE [sub_category]))", # noqa + "AVG(AVG(SUM([sales] INCLUDE [ship_mode]) INCLUDE [region] BEFORE FILTER BY [category]))", + ], + "filters": {"category": {"op": "EQ", "values": ["Furniture"]}}, + }, + { + "base_dimensions": ["region"], + "measure_formulas": [ + "SUM(SUM(AVG(SUM(SUM(AVG(AVG([sales] INCLUDE [order_date]) INCLUDE [sub_category]) INCLUDE [city]) INCLUDE [ship_mode]) INCLUDE [ship_date]) INCLUDE [category]) BEFORE FILTER BY [category])", # noqa + "SUM(AVG(AVG(AVG(SUM(AVG([sales] INCLUDE [ship_mode]) INCLUDE [city]) INCLUDE [sub_category]) INCLUDE [ship_date]) INCLUDE [category]))", + "SUM(AVG(AVG(SUM(SUM([sales] INCLUDE [ship_mode]) INCLUDE [city] BEFORE FILTER BY [category]) INCLUDE [ship_date]) INCLUDE [category]))", + "AVG(AVG(SUM(AVG([sales] INCLUDE [ship_date]) INCLUDE [ship_mode] BEFORE FILTER BY [category]) INCLUDE [city]))", + ], + "filters": {"category": {"op": "EQ", "values": ["Furniture"]}}, + }, + { + "base_dimensions": ["region"], + "measure_formulas": [ + "AVG(SUM([sales] INCLUDE [city]))", + "SUM(AVG(AVG(SUM(SUM(AVG([sales] INCLUDE [order_date]) INCLUDE [ship_date] BEFORE FILTER BY [category]) INCLUDE [city]) INCLUDE [category]) INCLUDE [sub_category]))", + "AVG(AVG(SUM(AVG(SUM([sales] INCLUDE [order_date]) INCLUDE [ship_date]) INCLUDE [category]) INCLUDE [ship_mode]) BEFORE FILTER BY [category])", + "SUM(SUM([sales] INCLUDE [order_date]))", + ], + "filters": {"category": {"op": "EQ", "values": ["Furniture"]}}, + }, +] + + +class TestPreGeneratedComplexQueryTests(DefaultApiTestBase): + @pytest.mark.parametrize("raw_test_settings", GENERATED_TESTS) + def test_pre_generated(self, control_api, data_api, dataset_id, raw_test_settings): + test_runner = PreGeneratedLODTestRunner(control_api=control_api, data_api=data_api, dataset_id=dataset_id) + test_runner.run_test( + test_settings=TestSettings.deserialize(raw_test_settings), + ) + + +class TestNewGeneratedComplexQueryTests(DefaultApiTestBase): + @pytest.mark.skip # Should only be used for generating new tests + def test_new_auto_generated(self, control_api, data_api, dataset_id): + autogen_settings = AutoGeneratorSettings( + dimensions=( + "category", + "city", + "order_date", + "region", + "ship_date", + "ship_mode", + "sub_category", + ), + dates=frozenset(("order_date", "ship_date")), + filters={ + "city": (WhereClauseOperation.STARTSWITH, ["New"]), + "order_date": (WhereClauseOperation.GT, ["2014-03-01"]), + "profit": (WhereClauseOperation.GT, ["1.0"]), + "category": (WhereClauseOperation.EQ, ["Furniture"]), + }, + dimension_cnts=(1, 2), + aggregations=("AVG", "SUM"), + formula_cnts=(2, 3, 4), + measure_base_expressions=("[sales]",), + filter_probability=0.1, + bfb_probability=0.2, + lookup_probability=0.1, + ) + auto_gen = LODTestAutoGenerator(settings=autogen_settings) + setting_list = auto_gen.generate_setting_list(100) + + test_runner = PreGeneratedLODTestRunner(control_api=control_api, data_api=data_api, dataset_id=dataset_id) + test_runner.run_test_list(setting_list, ignore_400_error=False) diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_lookup_functions.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_lookup_functions.py new file mode 100644 index 000000000..228c1d4dd --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_lookup_functions.py @@ -0,0 +1,533 @@ +import datetime +from http import HTTPStatus +from typing import Optional + +from dl_api_client.dsmaker.primitives import WhereClause +from dl_api_client.dsmaker.shortcuts.dataset import ( + add_formulas_to_dataset, + create_basic_dataset, +) +from dl_api_client.dsmaker.shortcuts.range_data import get_range_values +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_testing.connector.complex_queries import DefaultBasicLookupFunctionTestSuite +from dl_api_lib_testing.helpers.data_source import data_source_settings_from_table +from dl_api_lib_testing.helpers.lookup_checkers import ( + check_ago_data, + check_at_date_data, + read_date, +) +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import WhereClauseOperation +from dl_core_testing.database import make_table + + +class TestBasicLookupFunctions(DefaultApiTestBase, DefaultBasicLookupFunctionTestSuite): + def test_ago(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Sales Sum Yesterday": 'AGO([Sales Sum], [order_date], "day", 1)', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="order_date"), + ds.find_field(title="Sales Sum"), + ds.find_field(title="Sales Sum Yesterday"), + ], + order_by=[ + ds.find_field(title="category"), + ds.find_field(title="order_date"), + ], + filters=[ + ds.find_field(title="category").filter(op=WhereClauseOperation.EQ, values=["Office Supplies"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + check_ago_data(data_rows=data_rows, date_idx=1, value_idx=2, ago_idx=3, day_offset=1) + + def test_ago_variants(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Ago 2": "AGO([Sales Sum], [order_date])", + "Ago 3 unit": 'AGO([Sales Sum], [order_date], "day")', + "Ago 3 number": "AGO([Sales Sum], [order_date], 1)", + "Ago 4": 'AGO([Sales Sum], [order_date], "day", 1)', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Sales Sum"), + ds.find_field(title="Ago 2"), + ds.find_field(title="Ago 3 unit"), + ds.find_field(title="Ago 3 number"), + ds.find_field(title="Ago 4"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=2, day_offset=1) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=3, day_offset=1) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=4, day_offset=1) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=5, day_offset=1) + + def test_ago_errors(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Ago Sum": 'AGO([Sales Sum], [order_date], "day", 1)', + }, + ) + + # Dimension in AGO doesn't match dimensions in the query + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="Sales Sum"), + ds.find_field(title="Ago Sum"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.BAD_REQUEST, result_resp.json + # FIXME: The more specific error for AGO is temporarily reverted to the generic inconsistent agg error + assert result_resp.bi_status_code == "ERR.DS_API.FORMULA.VALIDATION.AGG.INCONSISTENT" + # assert result_resp.bi_status_code == 'ERR.DS_API.FORMULA.VALIDATION.LOOKUP_FUNC.UNSELECTED_DIMENSION' + + # There are no dimensions in the query + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Ago Sum"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.BAD_REQUEST, result_resp.json + # FIXME: Same as above + assert result_resp.bi_status_code == "ERR.DS_API.FORMULA.VALIDATION.AGG.INCONSISTENT" + # assert result_resp.bi_status_code == 'ERR.DS_API.FORMULA.VALIDATION.LOOKUP_FUNC.UNSELECTED_DIMENSION' + + def test_ago_in_compeng(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales RSum": "RSUM(SUM([sales]))", + "Ago RSum": 'AGO([Sales RSum], [order_date], "day", 1)', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Sales RSum"), + ds.find_field(title="Ago RSum"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=2, day_offset=1) + + def test_dimensions_in_ago_identical_to_dims_in_query(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "order_date Clone": "[order_date]", + "Group Sales": "SUM([sales])", + "Ago Along Clone": "AGO([Group Sales], [order_date Clone])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Group Sales"), + ds.find_field(title="Ago Along Clone"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=2, day_offset=1) + + def test_ago_with_non_ago_aggregation(self, control_api, data_api, saved_dataset): + """ + Check that an expression containing ago and a simple aggregation + at the same level is sliced correctly. + """ + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Ago": "AGO([Group Sales], [order_date])", + "Ago And not Ago Agg": "[Group Sales] - [Ago]", + # In this formula the left part ([Group Sales]) has no AGO (QueryFork) nodes in it, + # while the right part ([Ago]) does. + # This means that fork slicing will be used, but it needs to slice above aggregations (not below) + # in the parts of the expression where there are no QueryFork nodes + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Group Sales"), + ds.find_field(title="Ago And not Ago Agg"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + def test_ago_with_avatarless_measure(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Avatarless Measure": "COUNT()", + "Ago": "AGO([Avatarless Measure], [order_date])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Avatarless Measure"), + ds.find_field(title="Ago"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=2, day_offset=1) + + def test_ago_with_ignore_dimensions(self, control_api, data_api, saved_connection_id, db): + db_table = make_table(db) + ds = create_basic_dataset( + api_v1=control_api, + connection_id=saved_connection_id, + data_source_settings=data_source_settings_from_table(table=db_table), + formulas={ + "sum": "SUM([int_value])", + # Create a dimension that would cause regular AGO to always return NULL: + "day": "DAY([date_value])", + # Regular AGO: + "ago": "AGO([sum], [date_value])", + # AGO that ignores [day] in JOIN + "ago_igdim": "AGO([sum], [date_value] IGNORE DIMENSIONS [day])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="date_value"), + ds.find_field(title="sum"), + ds.find_field(title="ago"), + ds.find_field(title="ago_igdim"), + # "Bad" dimension: + ds.find_field(title="day"), + ], + order_by=[ + ds.find_field(title="date_value"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + # [ago] should have all NULL values because of the "bad" dimension [day] + for row_idx, row in enumerate(data_rows): + assert row[2] is None, f"Expected a None value in row {row_idx}, but got {row[2]}" + + # [ago_igdim] should act the same way as a regular AGO under regular circumstances + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=3, day_offset=1) + + def test_at_date(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Sales Sum Fixed": "AT_DATE([Sales Sum], [order_date], #2014-02-02#)", + "Sales Sum Nullable": 'AT_DATE([Sales Sum], [order_date], DATE_PARSE("2014-02-02"))', + "Sales Sum Trunc": 'AT_DATE([Sales Sum], [order_date], DATETRUNC([order_date], "month"))', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="order_date"), + ds.find_field(title="Sales Sum"), + ds.find_field(title="Sales Sum Fixed"), + ds.find_field(title="Sales Sum Nullable"), + ds.find_field(title="Sales Sum Trunc"), + ], + order_by=[ + ds.find_field(title="category"), + ds.find_field(title="order_date"), + ], + filters=[ + ds.find_field(title="category").filter(op=WhereClauseOperation.EQ, values=["Office Supplies"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + check_at_date_data( + data_rows=data_rows, + date_idx=1, + value_idx=2, + ago_idx=3, + ago_date_callable=lambda d: datetime.date(2014, 2, 2), + ) + check_at_date_data( + data_rows=data_rows, + date_idx=1, + value_idx=2, + ago_idx=4, + ago_date_callable=lambda d: datetime.date(2014, 2, 2), + ) + check_at_date_data( + data_rows=data_rows, date_idx=1, value_idx=2, ago_idx=5, ago_date_callable=lambda d: d.replace(day=1) + ) + + def test_at_date_with_measure_as_third_arg(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Profit Sum": "SUM([profit])", + "Sum At Date": "AT_DATE([Sales Sum], [order_date], #2014-02-02# + [Profit Sum]*0)", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="order_date"), + ds.find_field(title="Sales Sum"), + ds.find_field(title="Sum At Date"), + ], + order_by=[ + ds.find_field(title="category"), + ds.find_field(title="order_date"), + ], + filters=[ + ds.find_field(title="category").filter(op=WhereClauseOperation.EQ, values=["Office Supplies"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + check_at_date_data( + data_rows=data_rows, + date_idx=1, + value_idx=2, + ago_idx=3, + ago_date_callable=lambda d: datetime.date(2014, 2, 2), + ) + + def test_ago_with_bfb(self, control_api, data_api, saved_connection_id, db): + day_offset = 3 + + db_table = make_table(db) + + ds = create_basic_dataset( + api_v1=control_api, + connection_id=saved_connection_id, + data_source_settings=data_source_settings_from_table(table=db_table), + formulas={ + "sum": "SUM([int_value])", + "date_duplicate": "[date_value]", + "ago": f'AGO([sum], [date_value], "day", {day_offset})', + "ago_bfb": (f'AGO([sum], [date_value], "day", {day_offset} ' f"BEFORE FILTER BY [date_duplicate])"), + "ago_bfb_nested": ( + f'AGO(AGO([sum], [date_value], "day", 1), [date_value], "day", {day_offset - 1} ' + "BEFORE FILTER BY [date_duplicate])" + ), + }, + ) + + min_date_s, _ = get_range_values(data_api.get_value_range(dataset=ds, field=ds.find_field(title="date_value"))) + min_date = read_date(min_date_s) + gte_date_s = (min_date + datetime.timedelta(days=day_offset)).isoformat() + + def get_data_rows_with_filter( + filters: Optional[list[WhereClause]] = None, + ) -> list[list]: + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="date_value"), + ds.find_field(title="sum"), + ds.find_field(title="ago"), + ds.find_field(title="ago_bfb"), + ds.find_field(title="ago_bfb_nested"), + ], + order_by=[ + ds.find_field(title="date_value"), + ], + filters=filters, + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + return get_data_rows(result_resp) + + # Check default BFB (main dimension) in AGO without explicit BFB + data_rows = get_data_rows_with_filter( + filters=[ + # Apply filter to main dimension - it should be BFBed by default + ds.find_field(title="date_value").filter( + # > min_date + {day_offset} days + op=WhereClauseOperation.GTE, + values=[gte_date_s], + ), + ] + ) + check_ago_data( + data_rows=data_rows, + date_idx=0, + value_idx=1, + ago_idx=2, + day_offset=day_offset, + allow_missing_date_values=True, + ) + # Now make sure there really are non-NULL values + for row_idx in range(1, day_offset): # Skip 0th row because because it has no AGO value + assert data_rows[row_idx][2] is not None, f"Expected a non-None value in row {row_idx}" + + # Now check the explicit BFB (with filter for non-main dimension) + data_rows = get_data_rows_with_filter( + filters=[ + # Filter has to be applied to a dimension other than the one in AGO + ds.find_field(title="date_duplicate").filter( + # > min_date + {day_offset} days + op=WhereClauseOperation.GTE, + values=[gte_date_s], + ), + ] + ) + check_ago_data( + data_rows=data_rows, + date_idx=0, + value_idx=1, + ago_idx=2, + day_offset=day_offset, + ) + # Omit the first 2 rows because their values are not None + check_ago_data( + data_rows=data_rows, + date_idx=0, + value_idx=1, + ago_idx=3, + day_offset=day_offset, + allow_missing_date_values=True, + ) + check_ago_data( + data_rows=data_rows, + date_idx=0, + value_idx=1, + ago_idx=4, + day_offset=day_offset, + allow_missing_date_values=True, + ) + # Now make sure there really are non-NULL values + for row_idx in range(1, day_offset): # Skip 0th row because because it has no AGO value + assert data_rows[row_idx][3] is not None, f"Expected a non-None value in row {row_idx}" + assert data_rows[row_idx][4] is not None, f"Expected a non-None value in row {row_idx}" + + def test_ago_with_corner_case_dimensions(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure": "SUM([sales])", + "Invalid Field": "[Whaaa?...]", + "Invalid AGO": "AGO([Measure], [Invalid Field])", + }, + exp_status=HTTPStatus.BAD_REQUEST, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Invalid Field"), + ds.find_field(title="Invalid AGO"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.BAD_REQUEST + + def test_ago_with_different_measures(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure 1": "SUM([sales])", + "Measure 2": "COUNTD([sales])", + "Ago Measure 1": "AGO([Measure 1], [order_date])", + "Ago Measure 2": "AGO([Measure 2], [order_date])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Measure 1"), + ds.find_field(title="Measure 2"), + ds.find_field(title="Ago Measure 1"), + ds.find_field(title="Ago Measure 2"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=3, day_offset=1) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=2, ago_idx=4, day_offset=1) diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_window_functions.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_window_functions.py new file mode 100644 index 000000000..c59d8334a --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/complex_queries/test_window_functions.py @@ -0,0 +1,784 @@ +from __future__ import annotations + +from collections import defaultdict +from http import HTTPStatus + +import pytest + +from dl_api_client.dsmaker.api.data_api import HttpDataApiResponse +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_client.dsmaker.shortcuts.dataset import add_formulas_to_dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_testing.connector.complex_queries import DefaultBasicWindowFunctionTestSuite +from dl_api_lib_testing.helpers.data_source import data_source_settings_from_table +from dl_api_lib_testing.helpers.multi_query import ( + MultiQueryInterceptor, + count_joins, +) +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import ( + OrderDirection, + UserDataType, + WhereClauseOperation, +) +from dl_core_testing.database import ( + C, + make_table, +) +from dl_query_processing.compilation.primitives import CompiledMultiQueryBase +from dl_query_processing.enums import ExecutionLevel + + +class TestBasicLookupFunctions(DefaultApiTestBase, DefaultBasicWindowFunctionTestSuite): + def test_winfunc_lod_combination(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Win Func": "SUM(SUM([sales]) TOTAL)", + "Lod Func": "SUM(SUM([sales]) FIXED)", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="region"), + ds.find_field(title="Sales Sum"), + ds.find_field(title="Win Func"), + ds.find_field(title="Lod Func"), + ], + fail_ok=True, + ) + + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + total_value = sum(float(row[1]) for row in data_rows) + + for row in data_rows: + win_value = float(row[2]) + lod_value = float(row[3]) + assert pytest.approx(win_value) == total_value + assert pytest.approx(lod_value) == total_value + + def test_window_function_filter(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Rank of Sales": 'RANK_UNIQUE([Group Sales], "asc" TOTAL)', + }, + ) + + def get_result(filters: list, window_in_select: bool = True) -> HttpDataApiResponse: + select_fields = [ + ds.find_field(title="Group Sales"), + ds.find_field(title="Rank of Sales"), + ] + if not window_in_select: + # for testing queries with window functions only in the filter section + select_fields = select_fields[:-1] + + result_resp = data_api.get_result( + dataset=ds, + fields=select_fields + + [ + ds.find_field(title="order_date"), + ds.find_field(title="city"), + ], + filters=filters, + fail_ok=True, + ) + return result_resp + + result_resp = get_result( + filters=[ + ds.find_field(title="Rank of Sales").filter(WhereClauseOperation.LTE, [5]), + ] + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 5 + + # Combine with filters before window functions + result_resp = get_result( + filters=[ + # This one goes after + ds.find_field(title="Rank of Sales").filter(WhereClauseOperation.LTE, [5]), + # This one goes before + ds.find_field(title="city").filter(WhereClauseOperation.STARTSWITH, ["S"]), + ] + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 5 + + # Without window functions in SELECT + result_resp = get_result( + filters=[ + ds.find_field(title="Rank of Sales").filter(WhereClauseOperation.LTE, [5]), + ], + window_in_select=False, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 5 + + def test_window_function_filter_with_before_filter_by(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Rank of Sales BFB": 'RANK_UNIQUE([Group Sales], "asc" TOTAL BEFORE FILTER BY [city])', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="city"), + ds.find_field(title="Group Sales"), + ds.find_field(title="Rank of Sales BFB"), + ], + filters=[ + ds.find_field(title="city").filter(WhereClauseOperation.STARTSWITH, ["S"]), + ], + fail_ok=True, + ) + + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + max_rank = 0 + possible_ranks = list(range(10000)) + for row in data_rows: + assert row[1].startswith("S") + row_rank = int(row[3]) + max_rank = max(max_rank, row_rank) + possible_ranks.remove(row_rank) + # rank should have gaps because of filters, so the max rank will be much greater than the number of rows + assert max_rank > len(data_rows) + possible_ranks = [r for r in possible_ranks if r <= max_rank] + # ranks that were skipped should have remained in the list + assert possible_ranks # <- gaps + + def test_window_function_filter_with_before_filter_by_in_ordered_window_function( + self, control_api, data_api, saved_dataset + ): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "RSUM of Sales BFB": "RSUM([Group Sales] BEFORE FILTER BY [order_date])", + "MAVG of RSUM of Sales": "MAVG([RSUM of Sales BFB], 50)", + "MAVG of MAVG of RSUM of Sales": "MAVG([MAVG of RSUM of Sales], 50)", + "RSUM of Sales non-BFB": "RSUM([Group Sales])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Group Sales"), + ds.find_field(title="RSUM of Sales BFB"), + ds.find_field(title="order_date"), + ds.find_field(title="MAVG of MAVG of RSUM of Sales"), + ds.find_field(title="RSUM of Sales non-BFB"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + filters=[ + ds.find_field(title="order_date").filter(WhereClauseOperation.GTE, ["2015-05-01"]), + ], + fail_ok=True, + ) + + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + first_row = data_rows[0] + # `* 2` - to eliminate float errors + assert float(first_row[1]) > float(first_row[0]) * 2 # [RSUM of Sales BFB] > [Group Sales] * 2 + # Make sure the non-BFB field was calculated correctly too + assert pytest.approx(float(first_row[4])) == float(first_row[0]) # [RSUM of Sales non-BFB] == [Group Sales] + + def test_order_dependent_window_function(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "RSUM ASC": 'RSUM([Group Sales], "asc" TOTAL)', + "RSUM DESC": 'RSUM([Group Sales], "desc" TOTAL)', + }, + ) + + def _get_data(direction: OrderDirection) -> list[list]: + order_field = ds.find_field(title="city") + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Group Sales"), + ds.find_field(title="RSUM ASC"), + ds.find_field(title="RSUM DESC"), + ], + order_by=[ + order_field if direction == OrderDirection.asc else order_field.desc, + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + return data_rows + + # ORDER BY City ASC + data_1 = _get_data(OrderDirection.asc) + city_values_1 = [row[0] for row in data_1] + sum_values_1 = [float(row[1]) for row in data_1] + rsum_asc_values_1 = [float(row[2]) for row in data_1] + rsum_desc_values_1 = [float(row[3]) for row in data_1] + assert city_values_1[0] == min(city_values_1) + assert city_values_1[-1] == max(city_values_1) + assert all( + pytest.approx(rsum_asc_values_1[i]) == rsum_asc_values_1[i - 1] + sum_values_1[i] + for i in range(1, len(data_1)) + ) + assert all( + pytest.approx(rsum_desc_values_1[i]) == rsum_desc_values_1[i - 1] - sum_values_1[i - 1] + for i in range(1, len(data_1)) + ) + + # ORDER BY City DESC + # (all of the patterns are the same except for the actual values of RSUM and the order of cities) + data_2 = _get_data(OrderDirection.desc) + city_values_2 = [row[0] for row in data_2] + sum_values_2 = [float(row[1]) for row in data_2] + rsum_asc_values_2 = [float(row[2]) for row in data_2] + rsum_desc_values_2 = [float(row[3]) for row in data_2] + assert city_values_2[0] == max(city_values_2) + assert city_values_2[-1] == min(city_values_2) + assert all( + pytest.approx(rsum_asc_values_2[i]) == rsum_asc_values_2[i - 1] + sum_values_2[i] + for i in range(1, len(data_2)) + ) + assert all( + pytest.approx(rsum_desc_values_2[i]) == rsum_desc_values_2[i - 1] - sum_values_2[i - 1] + for i in range(1, len(data_2)) + ) + + assert pytest.approx(rsum_asc_values_2) == rsum_desc_values_1[::-1] + assert pytest.approx(rsum_desc_values_2) == rsum_asc_values_1[::-1] + + def test_order_by_window_function(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Sales Rank": 'RANK_UNIQUE([Group Sales], "asc")', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Group Sales"), + ds.find_field(title="Sales Rank"), + ], + order_by=[ + ds.find_field(title="Sales Rank"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + values = [int(row[2]) for row in data_rows] + assert values == list(range(1, len(values) + 1)) + + def test_nested_window_functions(self, control_api, data_api, saved_dataset): + scary_formula_tmpl = 'RANK_UNIQUE(10 * {repl}, "asc")' + scary_formula = scary_formula_tmpl + for i in range(10): + scary_formula = scary_formula_tmpl.replace("{repl}", scary_formula) + scary_formula = scary_formula.replace("{repl}", "[Group Sales]") + + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Rank of Sales": 'RANK_UNIQUE([Group Sales], "asc" TOTAL)', + "Rank of Rank of Sales": 'RANK_UNIQUE([Rank of Sales], "desc" TOTAL)', + "RSUM of Rank of Sales": "RSUM([Rank of Sales] TOTAL ORDER BY [Rank of Sales])", + "10x Rank of Sales": scary_formula, + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Group Sales"), + ds.find_field(title="Rank of Sales"), + ds.find_field(title="Rank of Rank of Sales"), + ds.find_field(title="RSUM of Rank of Sales"), + ds.find_field(title="10x Rank of Sales"), + ], + order_by=[ + ds.find_field(title="Rank of Sales"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + total_cnt = len(data_rows) + rsum = 0 + for i, row in enumerate(data_rows): + num = i + 1 + rsum += num + assert int(row[2]) == num + assert int(row[3]) == total_cnt - i + assert int(row[4]) == rsum + assert int(row[5]) == num + + def test_window_function_bfb_unselected_dimension_error(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Rank of Sales BFB": 'RANK_UNIQUE(SUM([sales]), "asc" TOTAL BEFORE FILTER BY [city])', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Rank of Sales BFB"), + ], + filters=[ + ds.find_field(title="city").filter(WhereClauseOperation.STARTSWITH, ["S"]), + ], + fail_ok=True, + ) + + assert result_resp.status_code == HTTPStatus.BAD_REQUEST, result_resp.json + assert result_resp.json["code"] == "ERR.DS_API.FORMULA.VALIDATION.WIN_FUNC.BFB_UNSELECTED_DIMENSION" + assert "neither an aggregation nor a dimension in the query" in result_resp.json["message"] + + def test_order_by_field_that_depends_on_window_function(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Rank": "RANK_UNIQUE([Group Sales])", + "Stringified Rank": 'CONCAT("Rank ", [Rank])', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Rank"), + ], + order_by=[ + ds.find_field(title="Stringified Rank"), + ], + fail_ok=True, + ) + + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert data_rows + + def test_dimensions_in_window_function_identical_to_dims_in_query(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "City Clone": "[city]", + "order_date Clone": "[order_date]", + "Group Sales": "SUM([sales])", + "RSum Among Clone": "RSUM([Group Sales] AMONG [City Clone])", + "RSum Within Clone": "RSUM([Group Sales] WITHIN [order_date Clone])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="order_date"), + ds.find_field(title="RSum Among Clone"), + ds.find_field(title="RSum Within Clone"), + ], + order_by=[ + ds.find_field(title="city"), + ds.find_field(title="order_date"), + ], + fail_ok=True, + ) + + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + def test_order_by_multilevel_window_function(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Multi Rank": 'RANK(RANK(RANK(RANK_UNIQUE([Group Sales], "asc"), "asc"), "asc"), "asc")', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="Group Sales"), + ], + order_by=[ + ds.find_field(title="Multi Rank"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + def test_extra_dimensions_in_within(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Value": "SUM(SUM([sales]) WITHIN [category], [city])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="category"), + ds.find_field(title="Value"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert data_rows + by_cat: dict[str, set[str]] = defaultdict(set) + for row in data_rows: + by_cat[row[0]].add(row[1]) + + assert len(by_cat) == 3 + # Values should be equivalent to `SUM(SUM([sales]) WITHIN [Category])`, + # so all values within a category should be the same + for cat_values in by_cat.values(): + assert len(cat_values) == 1 + + def test_measure_in_within(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "My Measure": "COUNT()", + "Win Value": "SUM(SUM([sales]) WITHIN [My Measure])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="My Measure"), + ds.find_field(title="Win Value"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert data_rows + + # Make sure that all `Win Value` values are the same for matching `My Measure` values + by_my_measure = {row[2]: row[1] for row in data_rows} + for row in data_rows: + assert row[1] == by_my_measure[row[2]] + + def test_dim_complex_dimension_in_within(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure": "SUM([sales])", + "Complex Dim": 'DATETRUNC([order_date], "week")', + "Win Value": "SUM([Measure] WITHIN [Complex Dim])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Win Value"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + date_values = {row[0] for row in data_rows} + measure_values = {row[1] for row in data_rows} + assert len(date_values) > len(measure_values) > 10 # the last is an arbitrary nonzero number + + def test_nested_among(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Nested Among": "SUM(RANK(SUM([sales]) AMONG [category]) AMONG [category])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="region"), + ds.find_field(title="category"), + ds.find_field(title="Nested Among"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert data_rows + + for row in data_rows: + # 3 Categories in each Region => RANK = 1,2,3; SUM(RANK) = 6 + assert int(row[2]) == 6 + + def test_null_dimensions(self, control_api, data_api, saved_connection_id, db): + raw_data = [ + {"id": 1, "city": "New York", "category": "Office Supplies", "sales": 1}, + {"id": 2, "city": "New York", "category": "Furniture", "sales": 10}, + {"id": 3, "city": "New Rochelle", "category": "Office Supplies", "sales": 100}, + {"id": 4, "city": "New Rochelle", "category": "Furniture", "sales": 1000}, + {"id": 5, "city": None, "category": "Office Supplies", "sales": 10000}, + {"id": 6, "city": None, "category": "Furniture", "sales": 100000}, + ] + columns = [ + C("id", UserDataType.integer, vg=lambda rn, **kwargs: raw_data[rn]["id"]), + C("city", UserDataType.string, vg=lambda rn, **kwargs: raw_data[rn]["city"]), + C("category", UserDataType.string, vg=lambda rn, **kwargs: raw_data[rn]["category"]), + C("sales", UserDataType.integer, vg=lambda rn, **kwargs: raw_data[rn]["sales"]), + ] + db_table = make_table(db, columns=columns, rows=len(raw_data)) + + ds = Dataset() + ds.sources["source_1"] = ds.source( + connection_id=saved_connection_id, **data_source_settings_from_table(db_table) + ) + ds.source_avatars["avatar_1"] = ds.sources["source_1"].avatar() + + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=ds, + formulas={ + "rank_within_cat": "RANK(SUM([sales]) WITHIN [category])", + "rank_within_city": "RANK(SUM([sales]) WITHIN [city])", + "max_id": "MAX([id])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="category"), + ds.find_field(title="rank_within_cat"), + ds.find_field(title="rank_within_city"), + ], + order_by=[ + ds.find_field(title="max_id"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert data_rows + + city = [row[0] for row in data_rows] + rank_within_cat = [int(row[2]) for row in data_rows] + rank_within_city = [int(row[3]) for row in data_rows] + + assert city == ["New York", "New York", "New Rochelle", "New Rochelle", None, None] + assert rank_within_cat == [3, 3, 2, 2, 1, 1] + assert rank_within_city == [2, 1, 2, 1, 2, 1] + + def test_compeng_part_has_no_joins(self, control_api, data_api, saved_dataset, monkeypatch): + """ + Check that when selecting: + 1. dimension, + 2. aggregation and + 3. window function + in one simple request, there are no JOINs in compeng + """ + + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales Sum": "SUM([sales])", + "Rank of Sales Sum": 'RANK_UNIQUE(SUM([sales]), "asc" TOTAL)', + }, + ) + + def intercept_query(multi_query: CompiledMultiQueryBase) -> None: + compeng_part = multi_query.for_level_type(ExecutionLevel.compeng) + assert compeng_part.query_count() > 1 + assert count_joins(compeng_part) == 0 + + interceptor = MultiQueryInterceptor(mpatch=monkeypatch, callback=intercept_query) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="region"), + ds.find_field(title="Sales Sum"), + ds.find_field(title="Rank of Sales Sum"), + ], + fail_ok=True, + ) + + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + assert interceptor.intercepted + + def test_wf_and_non_wf_in_filter(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Rank": "RANK(SUM([sales]) TOTAL)", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ], + filters=[ + ds.find_field(title="city").filter(WhereClauseOperation.GT, ["A"]), + ds.find_field(title="Rank").filter(WhereClauseOperation.LT, ["7"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 6 + + def test_round_2_in_compeng(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Measure Non-Win": "ROUND(SUM([sales]) / 1000.0, 2)", + "Measure Win": "ROUND(MAX(SUM([sales]) TOTAL) / 1000.0, 2)", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Measure Non-Win"), + ds.find_field(title="Measure Win"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1 + + non_win = float(data_rows[0][0]) + win = float(data_rows[0][1]) + + assert win == pytest.approx(non_win) + + def test_subquery_column_name_conflict(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Sales RSUM BFB": ("RSUM(SUM([sales]) TOTAL BEFORE FILTER BY [order_date])"), + "Sales RSUM BFB MAVG BFB": ( + "MAVG(RSUM(SUM([sales]) TOTAL BEFORE FILTER BY [order_date]), 100 BEFORE FILTER BY [order_date])" + ), + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="Sales RSUM BFB MAVG BFB"), + ds.find_field(title="Group Sales"), + ], + order_by=[ + ds.find_field(title="order_date"), + ], + filters=[ + ds.find_field(title="order_date").filter( + op=WhereClauseOperation.EQ, + values=["2014-05-01"], + ), + ds.find_field(title="Sales RSUM BFB").filter( + op=WhereClauseOperation.LT, + values=["2000000"], + ), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + def test_bfb_with_only_winfunc_measure(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Sales SUM": "SUM([sales])", + "Sales RSUM BFB": "RSUM(SUM([sales]) TOTAL ORDER BY [order_date] BEFORE FILTER BY [order_date])", + }, + ) + + def get_data(measures: tuple[str, ...]) -> list[list[str]]: + result_resp = data_api.get_result( + dataset=ds, + fields=[ds.find_field(title="order_date"), *[ds.find_field(title=measure) for measure in measures]], + filters=[ + ds.find_field(title="category").filter(WhereClauseOperation.EQ, ["Office Supplies"]), + ds.find_field(title="order_date").filter(WhereClauseOperation.GT, ["2014-04-01"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) > 1 + return data_rows + + data_rows_no_sum = get_data(measures=("Sales RSUM BFB",)) + data_rows_with_sum = get_data(measures=("Sales RSUM BFB", "Sales SUM")) + assert len(data_rows_no_sum) == len(data_rows_with_sum) + for row_no_sum, row_with_sum in zip(data_rows_no_sum, data_rows_with_sum): + assert row_no_sum[0] == row_with_sum[0] # The dimension + assert row_no_sum[1] == row_with_sum[1] # The measure diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_errors.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_errors.py new file mode 100644 index 000000000..72a4aca67 --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_errors.py @@ -0,0 +1,24 @@ +from dl_api_client.dsmaker.primitives import ResultField +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import CalcMode + + +class TestResultErrors(DefaultApiTestBase): + def test_empty_query(self, saved_dataset, data_api): + result_resp = data_api.get_result(dataset=saved_dataset, fields=[], fail_ok=True) + assert result_resp.status_code == 400 + assert result_resp.bi_status_code == "ERR.DS_API.EMPTY_QUERY" + + def test_get_nonexisting_field(self, saved_dataset, data_api): + result_resp = data_api.get_result(dataset=saved_dataset, fields=[ResultField(title="unknown")], fail_ok=True) + assert result_resp.status_code == 400 + assert result_resp.bi_status_code == "ERR.DS_API.FIELD.NOT_FOUND" + + def test_calcmode_formula_without_formula_field(self, saved_dataset, data_api): + ds = saved_dataset + title = "Not a formula" + ds.result_schema[title] = ds.field(title=title, calc_mode=CalcMode.formula) + + result_resp = data_api.get_result(dataset=ds, fields=[ds.find_field(title)], fail_ok=True) + assert result_resp.status_code == 400 + assert result_resp.bi_status_code == "ERR.DS_API.FORMULA.PARSE.UNEXPECTED_EOF.EMPTY_FORMULA" diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_trees.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_trees.py index adf90ad4c..1d4a40017 100644 --- a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_trees.py +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_trees.py @@ -22,8 +22,8 @@ from dl_api_lib_testing.helpers.data_source import data_source_settings_from_table from dl_api_lib_tests.db.base import DefaultApiTestBase from dl_constants.enums import ( - BIType, FieldRole, + UserDataType, ) from dl_core_testing.database import ( C, @@ -70,10 +70,10 @@ def make_tree_dataset(db: Db, connection_id: str, control_api: SyncHttpDatasetApiV1) -> Dataset: columns = [ - C("id", BIType.integer, vg=lambda rn, **kwargs: TREE_DATA[rn]["id"]), - C("dept", BIType.array_str, vg=lambda rn, **kwargs: TREE_DATA[rn]["dept"]), - C("salary", BIType.integer, vg=lambda rn, **kwargs: TREE_DATA[rn]["salary"]), - C("is_person", BIType.integer, vg=lambda rn, **kwargs: TREE_DATA[rn]["is_person"]), + C("id", UserDataType.integer, vg=lambda rn, **kwargs: TREE_DATA[rn]["id"]), + C("dept", UserDataType.array_str, vg=lambda rn, **kwargs: TREE_DATA[rn]["dept"]), + C("salary", UserDataType.integer, vg=lambda rn, **kwargs: TREE_DATA[rn]["salary"]), + C("is_person", UserDataType.integer, vg=lambda rn, **kwargs: TREE_DATA[rn]["is_person"]), ] db_table = make_table(db, columns=columns, rows=len(TREE_DATA)) ds = Dataset() diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_updates.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_updates.py new file mode 100644 index 000000000..8cbfedffa --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/result/test_updates.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +import uuid + +import pytest + +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_testing.data_api_base import DataApiTestParams +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import FieldType + + +class TestUpdates(DefaultApiTestBase): + @pytest.fixture(scope="function") + def data_api_test_params(self, sample_table) -> DataApiTestParams: + # This default is defined for the sample table + return DataApiTestParams( + two_dims=("category", "city"), + summable_field="sales", + range_field="sales", + distinct_field="city", + date_field="order_date", + ) + + def test_result_with_updates(self, saved_dataset, data_api, data_api_test_params): + ds = saved_dataset + id_1, id_2, id_3 = (str(uuid.uuid4()) for _ in range(3)) + result_resp = data_api.get_result( + dataset=ds, + updates=[ + ds.field( + id=id_1, + title="First", + formula=f"SUM([{data_api_test_params.summable_field}]) / 100", + type=FieldType.MEASURE, + ).add(), + # use an invalid field type for the second one to make sure it fixes itself + ds.field( + id=id_2, + title="Second", + formula=f"COUNTD([{data_api_test_params.distinct_field}])", + type=FieldType.DIMENSION, + ).add(), + ds.field(id=id_3, title="Third", formula="[First] / [Second]", type=FieldType.MEASURE).add(), + ], + fields=[ + ds.field(id=id_1), + ds.field(id=id_2), + ds.field(id=id_3), + ], + ) + assert result_resp.status_code == 200, result_resp.response_errors + result_data = result_resp.data + titles = [field.title for field in result_data["fields"]] + assert titles == ["First", "Second", "Third"] + + def test_get_result_add_update_field_without_avatar(self, saved_dataset, data_api): + ds = saved_dataset + ds.result_schema["Some Field"] = ds.field(formula="NOW()") + + result_resp = data_api.get_result( + dataset=ds, + updates=[ + ds.result_schema["Some Field"].update(calc_mode="direct"), + ], + fields=[ + ds.find_field(title="Some Field"), + ], + ) + assert result_resp.status_code == 200, result_resp.response_errors + data_rows = get_data_rows(result_resp) + assert data_rows diff --git a/lib/dl_api_lib/dl_api_lib_tests/db/data_api/test_errors.py b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/test_errors.py new file mode 100644 index 000000000..bba2110b3 --- /dev/null +++ b/lib/dl_api_lib/dl_api_lib_tests/db/data_api/test_errors.py @@ -0,0 +1,48 @@ +import pytest + +from dl_api_lib_testing.data_api_base import DataApiTestParams +from dl_api_lib_tests.db.base import DefaultApiTestBase +from dl_constants.enums import WhereClauseOperation + + +class TestDataApiErrors(DefaultApiTestBase): + @pytest.fixture(scope="function") + def data_api_test_params(self, sample_table) -> DataApiTestParams: + # This default is defined for the sample table + return DataApiTestParams( + two_dims=("category", "city"), + summable_field="sales", + range_field="sales", + distinct_field="city", + date_field="order_date", + ) + + def test_distinct_measure_filter_error(self, saved_dataset, data_api, data_api_test_params): + ds = saved_dataset + ds.result_schema["Measure"] = ds.field(formula=f"SUM([{data_api_test_params.summable_field}])") + + distinct_resp = data_api.get_distinct( + dataset=ds, + field=ds.find_field(title=data_api_test_params.distinct_field), + filters=[ + ds.find_field(title="Measure").filter(WhereClauseOperation.GT, [100]), + ], + fail_ok=True, + ) + assert distinct_resp.status_code == 400 + assert distinct_resp.bi_status_code == "ERR.DS_API.FILTER.MEASURE_UNSUPPORTED" + + def test_range_measure_filter_error(self, saved_dataset, data_api, data_api_test_params): + ds = saved_dataset + ds.result_schema["Measure"] = ds.field(formula=f"SUM([{data_api_test_params.summable_field}])") + + range_resp = data_api.get_value_range( + dataset=ds, + field=ds.find_field(title=data_api_test_params.range_field), + filters=[ + ds.find_field(title="Measure").filter(WhereClauseOperation.GT, [100]), + ], + fail_ok=True, + ) + assert range_resp.status_code == 400 + assert range_resp.bi_status_code == "ERR.DS_API.FILTER.MEASURE_UNSUPPORTED" diff --git a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_corner_cases.py b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_corner_cases.py index 888a02e20..3f29da9f4 100644 --- a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_corner_cases.py +++ b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_corner_cases.py @@ -14,10 +14,10 @@ PivotMeasureRoleSpec, ) from dl_constants.enums import ( - BIType, FieldType, PivotItemType, PivotRole, + UserDataType, ) import dl_query_processing.exc from dl_query_processing.legend.field_legend import ( @@ -41,19 +41,19 @@ def test_pivot_empty_data(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -115,19 +115,19 @@ def test_pivot_duplicate_dimension_values(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -201,25 +201,25 @@ def test_pivot_only_row_dims_multiple_measures(): legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_profit, obj=FieldObjSpec(id=fid_profit, title="Profit"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -348,25 +348,25 @@ def test_pivot_only_column_dims_multiple_measures(): legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_profit, obj=FieldObjSpec(id=fid_profit, title="Profit"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -490,7 +490,7 @@ def test_pivot_only_row_dims_no_measures(): legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), ] ) @@ -556,7 +556,7 @@ def test_pivot_only_column_dims_no_measures(): legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), ] ) diff --git a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_paginator.py b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_paginator.py index 2fa166e6f..97a0f0e8f 100644 --- a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_paginator.py +++ b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_paginator.py @@ -15,12 +15,12 @@ PivotMeasureRoleSpec, ) from dl_constants.enums import ( - BIType, FieldType, OrderDirection, PivotHeaderRole, PivotItemType, PivotRole, + UserDataType, ) from dl_query_processing.legend.field_legend import ( FieldObjSpec, @@ -52,31 +52,31 @@ def test_paginate(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_profit, obj=FieldObjSpec(id=fid_profit, title="Profit"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) diff --git a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_sorter.py b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_sorter.py index 5ee535c05..3b459bd6a 100644 --- a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_sorter.py +++ b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_sorter.py @@ -19,12 +19,12 @@ PivotMeasureRoleSpec, ) from dl_constants.enums import ( - BIType, FieldType, OrderDirection, PivotHeaderRole, PivotItemType, PivotRole, + UserDataType, ) from dl_query_processing.legend.field_legend import ( FieldObjSpec, @@ -58,25 +58,25 @@ def test_measure_sort_basic(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -214,19 +214,19 @@ def test_measure_sort_no_rows_dimension(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -328,31 +328,31 @@ def test_measure_sort_with_multiple_measures(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_profit, obj=FieldObjSpec(id=fid_profit, title="Profit"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -534,31 +534,31 @@ def test_dimension_sort_with_multiple_measures(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_profit, obj=FieldObjSpec(id=fid_profit, title="Profit"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -729,19 +729,19 @@ def test_dimension_sort_mixed_case_strings(): legend_item_id=liid_thing, obj=FieldObjSpec(id=fid_thing, title="Thing"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_string, obj=FieldObjSpec(id=fid_string, title="String"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_measure, obj=FieldObjSpec(id=fid_measure, title="Measure"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -832,19 +832,19 @@ def test_dimension_sort_stringified_numbers(): legend_item_id=liid_thing, obj=FieldObjSpec(id=fid_thing, title="Thing"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_number, obj=FieldObjSpec(id=fid_number, title="Number"), field_type=FieldType.DIMENSION, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_measure, obj=FieldObjSpec(id=fid_measure, title="Measure"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) diff --git a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_stream_modifers.py b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_stream_modifers.py index 0a99d6a6f..7bc32eb39 100644 --- a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_stream_modifers.py +++ b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_stream_modifers.py @@ -16,11 +16,11 @@ PivotMeasureRoleSpec, ) from dl_constants.enums import ( - BIType, FieldRole, FieldType, PivotItemType, PivotRole, + UserDataType, ) from dl_query_processing.legend.field_legend import ( FieldObjSpec, @@ -48,28 +48,28 @@ def test_data_cell_converter(): role_spec=RowRoleSpec(role=FieldRole.row), obj=FieldObjSpec(id="1", title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, role_spec=RowRoleSpec(role=FieldRole.row), obj=FieldObjSpec(id="1", title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, role_spec=RowRoleSpec(role=FieldRole.row), obj=FieldObjSpec(id="1", title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.float, + data_type=UserDataType.float, ), LegendItem( legend_item_id=liid_profit, role_spec=RowRoleSpec(role=FieldRole.row), obj=FieldObjSpec(id="1", title="Profit"), field_type=FieldType.MEASURE, - data_type=BIType.float, + data_type=UserDataType.float, ), ], ) @@ -537,7 +537,7 @@ def test_data_cell_converter_with_dicts(): role_spec=RowRoleSpec(role=FieldRole.row), obj=FieldObjSpec(id="1", title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), ], ) diff --git a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_transformer.py b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_transformer.py index 0d359a694..06e106606 100644 --- a/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_transformer.py +++ b/lib/dl_api_lib/dl_api_lib_tests/unit/pivot/test_transformer.py @@ -14,10 +14,10 @@ PivotMeasureRoleSpec, ) from dl_constants.enums import ( - BIType, FieldType, PivotItemType, PivotRole, + UserDataType, ) from dl_query_processing.legend.field_legend import ( FieldObjSpec, @@ -46,19 +46,19 @@ def test_pivot_single_measure(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -133,13 +133,13 @@ def test_pivot_no_measures(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), ] ) @@ -208,31 +208,31 @@ def test_pivot_multiple_measures(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_profit, obj=FieldObjSpec(id=fid_profit, title="Profit"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -397,43 +397,43 @@ def test_pivot_multiple_measures_and_annotations(): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_mnames, obj=MeasureNameObjSpec(), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_profit, obj=FieldObjSpec(id=fid_profit, title="Profit"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_customers, obj=FieldObjSpec(id=fid_customers, title="Customers"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), LegendItem( legend_item_id=liid_orders, obj=FieldObjSpec(id=fid_orders, title="Orders"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) @@ -651,19 +651,19 @@ def cat_gen(num): legend_item_id=liid_ctgry, obj=FieldObjSpec(id=fid_ctgry, title="Category"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_city, obj=FieldObjSpec(id=fid_city, title="City"), field_type=FieldType.DIMENSION, - data_type=BIType.string, + data_type=UserDataType.string, ), LegendItem( legend_item_id=liid_sales, obj=FieldObjSpec(id=fid_sales, title="Sales"), field_type=FieldType.MEASURE, - data_type=BIType.integer, + data_type=UserDataType.integer, ), ] ) diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/base.py b/lib/dl_api_lib_testing/dl_api_lib_testing/base.py index 6b978940f..eb85f299a 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/base.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/base.py @@ -5,7 +5,6 @@ ClassVar, Generator, Optional, - Type, ) from flask.app import Flask @@ -14,8 +13,12 @@ from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase -from dl_api_commons.base_models import TenantCommon +from dl_api_commons.base_models import ( + RequestContextInfo, + TenantCommon, +) from dl_api_lib.app.control_api.app import ControlApiAppFactory +from dl_api_lib.app_common_settings import ConnOptionsMutatorsFactory from dl_api_lib.app_settings import ( ControlApiAppSettings, ControlApiAppTestingsSettings, @@ -31,8 +34,13 @@ from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration from dl_configs.connectors_settings import ConnectorSettingsBase from dl_configs.rqe import RQEConfig -from dl_constants.enums import ConnectionType +from dl_constants.enums import ( + ConnectionType, + QueryProcessingMode, +) from dl_core.components.ids import FieldIdGeneratorType +from dl_core.united_storage_client import USAuthContextMaster +from dl_core.us_manager.us_manager_sync import SyncUSManager from dl_core_testing.configuration import CoreTestEnvironmentConfigurationBase from dl_core_testing.flask_utils import ( FlaskTestClient, @@ -45,8 +53,8 @@ class ApiTestBase(abc.ABC): Base class defining the basic fixtures of bi-api tests """ - control_api_app_factory_cls: ClassVar[Type[ControlApiAppFactory]] = TestingControlApiAppFactory bi_compeng_pg_on: ClassVar[bool] = True + query_processing_mode: ClassVar[QueryProcessingMode] = QueryProcessingMode.basic @pytest.fixture(scope="function", autouse=True) def preload(self): @@ -115,6 +123,7 @@ def create_control_api_settings( REDIS_ARQ=redis_setting_maker.get_redis_settings_arq(), FILE_UPLOADER_BASE_URL="http://127.0.0.1:9999", # fake url FILE_UPLOADER_MASTER_TOKEN="qwerty", + QUERY_PROCESSING_MODE=cls.query_processing_mode, ) return settings @@ -129,17 +138,20 @@ def control_api_app_settings( rqe_config_subprocess=rqe_config_subprocess, ) + @pytest.fixture(scope="function") + def control_api_app_factory(self, control_api_app_settings: ControlApiAppSettings) -> ControlApiAppFactory: + return TestingControlApiAppFactory(settings=control_api_app_settings) + @pytest.fixture(scope="function") def control_api_app( self, environment_readiness: None, - control_api_app_settings: ControlApiAppSettings, + control_api_app_factory: ControlApiAppFactory, connectors_settings: dict[ConnectionType, ConnectorSettingsBase], ) -> Generator[Flask, None, None]: """Session-wide test `Flask` application.""" - control_app_factory = self.control_api_app_factory_cls(settings=control_api_app_settings) - app = control_app_factory.create_app( + app = control_api_app_factory.create_app( connectors_settings=connectors_settings, testing_app_settings=ControlApiAppTestingsSettings(fake_tenant=TenantCommon()), close_loop_after_request=False, @@ -172,3 +184,26 @@ def control_api_sync_client(self, client: FlaskClient) -> SyncHttpClientBase: @pytest.fixture(scope="function") def control_api(self, control_api_sync_client: SyncHttpClientBase) -> SyncHttpDatasetApiV1: return SyncHttpDatasetApiV1(client=control_api_sync_client) + + @pytest.fixture(scope="function") + def sync_us_manager( + self, + core_test_config: CoreTestEnvironmentConfigurationBase, + control_api_app_factory: ControlApiAppFactory, + connectors_settings: dict[ConnectionType, ConnectorSettingsBase], + control_api_app_settings: ControlApiAppSettings, + ) -> SyncUSManager: + bi_context = RequestContextInfo.create_empty() + us_config = core_test_config.get_us_config() + us_manager = SyncUSManager( + bi_context=bi_context, + services_registry=control_api_app_factory.get_sr_factory( + conn_opts_factory=ConnOptionsMutatorsFactory(), + connectors_settings=connectors_settings, + settings=control_api_app_settings, + ).make_service_registry(request_context_info=bi_context), + us_base_url=us_config.us_host, + us_auth_context=USAuthContextMaster(us_config.us_master_token), + crypto_keys_config=core_test_config.get_crypto_keys_config(), + ) + return us_manager diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/configuration.py b/lib/dl_api_lib_testing/dl_api_lib_testing/configuration.py index 655aeb5aa..48a9ed3ec 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/configuration.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/configuration.py @@ -22,6 +22,9 @@ class ApiTestEnvironmentConfiguration: bi_compeng_pg_url: str = attr.ib(default="") + file_uploader_api_host: str = attr.ib(default="http://127.0.0.1") + file_uploader_api_port: int = attr.ib(default=9999) + redis_host: str = attr.ib(default="") redis_port: int = attr.ib(default=6379) redis_password: str = attr.ib(default="") diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/complex_queries.py b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/complex_queries.py new file mode 100644 index 000000000..60a999ade --- /dev/null +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/complex_queries.py @@ -0,0 +1,522 @@ +from collections import defaultdict +from http import HTTPStatus +from typing import Iterable + +import pytest + +from dl_api_client.dsmaker.primitives import ( + Dataset, + ResultField, +) +from dl_api_client.dsmaker.shortcuts.dataset import ( + add_formulas_to_dataset, + create_basic_dataset, +) +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_testing.data_api_base import DataApiTestBase +from dl_api_lib_testing.dataset_base import DatasetTestBase +from dl_api_lib_testing.helpers.data_source import data_source_settings_from_table +from dl_api_lib_testing.helpers.lookup_checkers import check_ago_data +from dl_constants.enums import ( + QueryProcessingMode, + UserDataType, + WhereClauseOperation, +) +from dl_core_testing.database import ( + C, + make_table, +) +from dl_core_testing.testcases.service_base import DbServiceFixtureTextClass +from dl_testing.regulated_test import ( + Feature, + for_features, +) + + +class DefaultBasicExtAggregationTestSuite(DataApiTestBase, DatasetTestBase, DbServiceFixtureTextClass): + def test_lod_fixed_single_dim_in_two_dim_query(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "sales sum fx city": "SUM([sales] FIXED [city])", + "sales sum fx category": "SUM([sales] FIXED [category])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="category"), + ds.find_field(title="sales sum"), + ds.find_field(title="sales sum fx city"), + ds.find_field(title="sales sum fx category"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + sum_by_city = defaultdict(lambda: 0) + for row in data_rows: + sum_by_city[row[0]] += float(row[2]) + sum_by_category = defaultdict(lambda: 0) + for row in data_rows: + sum_by_category[row[1]] += float(row[2]) + + for row in data_rows: + assert float(row[3]) == pytest.approx(sum_by_city[row[0]]) + assert float(row[4]) == pytest.approx(sum_by_category[row[1]]) + + def test_null_dimensions(self, control_api, data_api, db, saved_connection_id): + connection_id = saved_connection_id + + raw_data = [ + {"id": 1, "city": "New York", "category": "Office Supplies", "sales": 1}, + {"id": 2, "city": "New York", "category": "Furniture", "sales": 10}, + {"id": 3, "city": "New Rochelle", "category": "Office Supplies", "sales": 100}, + {"id": 4, "city": "New Rochelle", "category": "Furniture", "sales": 1000}, + {"id": 5, "city": None, "category": "Office Supplies", "sales": 10000}, + {"id": 6, "city": None, "category": "Furniture", "sales": 100000}, + ] + columns = [ + C("id", UserDataType.integer, vg=lambda rn, **kwargs: raw_data[rn]["id"]), + C("city", UserDataType.string, vg=lambda rn, **kwargs: raw_data[rn]["city"]), + C("category", UserDataType.string, vg=lambda rn, **kwargs: raw_data[rn]["category"]), + C("sales", UserDataType.integer, vg=lambda rn, **kwargs: raw_data[rn]["sales"]), + ] + db_table = make_table(db, columns=columns, rows=len(raw_data)) + + ds = Dataset() + ds.sources["source_1"] = ds.source(connection_id=connection_id, **data_source_settings_from_table(db_table)) + ds.source_avatars["avatar_1"] = ds.sources["source_1"].avatar() + + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=ds, + formulas={ + "sum_lod": "SUM(SUM([sales] INCLUDE [category]))", + "max_id": "MAX([id])", + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="sum_lod"), + ], + order_by=[ + ds.find_field(title="max_id"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert data_rows + + city = [row[0] for row in data_rows] + sum_lod = [int(row[1]) for row in data_rows] + + assert city == ["New York", "New Rochelle", None] + assert sum_lod == [11, 1100, 110000] + + def test_total_lod(self, control_api, data_api, saved_dataset): + data_api = data_api + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "sales sum": "SUM([sales])", + "sales sum total": "SUM([sales] FIXED)", + }, + ) + + def get_total_value() -> float: + total_sum_result = data_api.get_result( + dataset=ds, + fields=[ds.find_field(title="sales sum")], + ) + data_rows = get_data_rows(total_sum_result) + return float(data_rows[0][0]) + + expected_total_value = get_total_value() + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="city"), + ds.find_field(title="sales sum"), + ds.find_field(title="sales sum total"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + total_sum = sum(float(row[1]) for row in data_rows) + + for row_idx, row in enumerate(data_rows): + assert ( + float(row[2]) == pytest.approx(total_sum) == pytest.approx(expected_total_value) + ), f"total sum doesn't match expected number in row {row_idx}" + + def test_total_lod_2( + self, + control_api, + data_api, + saved_connection_id, + db, + ): + db_table = make_table(db=db) + ds = create_basic_dataset( + api_v1=control_api, + connection_id=saved_connection_id, + data_source_settings=data_source_settings_from_table(table=db_table), + formulas={ + "div 2": "DIV([int_value], 2)", + "div 3": "DIV([int_value], 3)", + "Agg 1": "SUM([int_value])", + "Agg 2": "SUM(SUM([int_value] INCLUDE [div 2]))", + "Agg 3": "SUM(SUM(SUM([int_value] INCLUDE [div 2]) INCLUDE [div 3]))", + }, + ) + + def get_single_row_data(field_names: Iterable[str]) -> tuple[int, ...]: + result_resp = data_api.get_result( + dataset=ds, + fail_ok=True, + fields=[ds.find_field(title=field_name) for field_name in field_names], + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 1, "There must be exactly 1 row of data" + return tuple(int(item) for item in data_rows[0]) + + value_1 = get_single_row_data(["Agg 1"])[0] + value_2 = get_single_row_data(["Agg 2"])[0] + assert value_2 == value_1 + value_3 = get_single_row_data(["Agg 3"])[0] + assert value_3 == value_1 + + def check_equality_of_totals(*field_names: str) -> None: + simultaneous_values = get_single_row_data(field_names) + assert len(set(simultaneous_values)) == 1 + assert next(iter(simultaneous_values)) == value_1 + + check_equality_of_totals("Agg 1", "Agg 2") + check_equality_of_totals("Agg 2", "Agg 3") + check_equality_of_totals("Agg 1", "Agg 3") + check_equality_of_totals("Agg 1", "Agg 2", "Agg 3") + + def test_lod_in_order_by( + self, + control_api, + data_api, + saved_connection_id, + db, + ): + db_table = make_table(db=db) + + data_api = data_api + ds = create_basic_dataset( + api_v1=control_api, + connection_id=saved_connection_id, + data_source_settings=data_source_settings_from_table(table=db_table), + formulas={ + "Dimension": "[int_value] % 3", + "LOD Measure": "SUM([int_value]) / SUM([int_value] FIXED)", + }, + ) + + def get_data(order_by: list[ResultField]) -> list: + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Dimension"), + ds.find_field(title="LOD Measure"), + ], + order_by=order_by, + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + return get_data_rows(result_resp) + + data_rows = get_data(order_by=[]) + ordered_data_rows = get_data( + order_by=[ + ds.find_field(title="LOD Measure"), + ds.find_field(title="Dimension"), + ] + ) + + data_rows.sort(key=lambda row: (float(row[1]), row[0])) # (LOD Measure, City) + + assert ordered_data_rows == data_rows + + +class DefaultBasicLookupFunctionTestSuite(DataApiTestBase, DatasetTestBase, DbServiceFixtureTextClass): + def test_ago_any_db(self, saved_connection_id, control_api, data_api, db): + db_table = make_table(db=db) + ds = create_basic_dataset( + api_v1=control_api, + connection_id=saved_connection_id, + data_source_settings=data_source_settings_from_table(table=db_table), + formulas={ + "sum": "SUM([int_value])", + "ago": 'AGO([sum], [date_value], "day", 2)', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="date_value"), + ds.find_field(title="sum"), + ds.find_field(title="ago"), + ], + order_by=[ + ds.find_field(title="date_value"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=2, day_offset=2) + + def test_triple_ago_any_db(self, saved_connection_id, control_api, data_api, db): + db_table = make_table(db) + ds = create_basic_dataset( + api_v1=control_api, + connection_id=saved_connection_id, + data_source_settings=data_source_settings_from_table(table=db_table), + formulas={ + "sum": "SUM([int_value])", + "ago_1": 'AGO([sum], [date_value], "day", 1)', + "ago_2": 'AGO([sum], [date_value], "day", 2)', + "ago_3": 'AGO([sum], [date_value], "day", 3)', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="date_value"), + ds.find_field(title="sum"), + ds.find_field(title="ago_1"), + ds.find_field(title="ago_2"), + ds.find_field(title="ago_3"), + ], + order_by=[ + ds.find_field(title="date_value"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=2, day_offset=1) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=3, day_offset=2) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=4, day_offset=3) + + def test_ago_any_db_multisource(self, saved_connection_id, control_api, data_api, db): + connection_id = saved_connection_id + table_1 = make_table(db) + table_2 = make_table(db) + ds = Dataset() + ds.sources["source_1"] = ds.source( + connection_id=connection_id, + **data_source_settings_from_table(table=table_1), + ) + ds.sources["source_2"] = ds.source( + connection_id=connection_id, + **data_source_settings_from_table(table=table_2), + ) + ds.source_avatars["avatar_1"] = ds.sources["source_1"].avatar() + ds.source_avatars["avatar_2"] = ds.sources["source_2"].avatar() + ds.avatar_relations["rel_1"] = ( + ds.source_avatars["avatar_1"] + .join(ds.source_avatars["avatar_2"]) + .on(ds.col("string_value") == ds.col("string_value")) + ) + + ds.result_schema["date_1"] = ds.source_avatars["avatar_1"].field(source="date_value") + ds.result_schema["int_2"] = ds.source_avatars["avatar_2"].field(source="int_value") + ds.result_schema["sum"] = ds.field(formula="SUM([int_2])") + ds.result_schema["ago"] = ds.field(formula='AGO([sum], [date_1], "day", 2)') + ds_resp = control_api.apply_updates(dataset=ds, fail_ok=True) + assert ds_resp.status_code == HTTPStatus.OK, ds_resp.response_errors + ds = ds_resp.dataset + ds = control_api.save_dataset(ds).dataset + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="date_1"), + ds.find_field(title="sum"), + ds.find_field(title="ago"), + ], + order_by=[ + ds.find_field(title="date_1"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=2, day_offset=2) + + def test_nested_ago(self, saved_connection_id, control_api, data_api, db): + db_table = make_table(db) + ds = create_basic_dataset( + api_v1=control_api, + connection_id=saved_connection_id, + data_source_settings=data_source_settings_from_table(table=db_table), + formulas={ + "sum": "SUM([int_value])", + "ago_1": 'AGO([sum], [date_value], "day", 1)', + "ago_2": 'AGO([ago_1], [date_value], "day", 1)', + "ago_3": 'AGO([ago_2], [date_value], "day", 1)', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="date_value"), + ds.find_field(title="sum"), + ds.find_field(title="ago_1"), + ds.find_field(title="ago_2"), + ds.find_field(title="ago_3"), + ], + order_by=[ + ds.find_field(title="date_value"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=2, day_offset=1) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=3, day_offset=2) + check_ago_data(data_rows=data_rows, date_idx=0, value_idx=1, ago_idx=4, day_offset=3) + + def test_month_ago_for_shorter_month(self, db, saved_connection_id, control_api, data_api): + any_db_table_200 = make_table(db, rows=200) + + # FIXME + # if any_db.conn_type == CONNECTION_TYPE_ORACLE: + # # Oracle cannot add a month to 2021-01-31 (2021-02-31 doesn't exist) + # pytest.skip() + + ds = create_basic_dataset( + api_v1=control_api, + connection_id=saved_connection_id, + data_source_settings=data_source_settings_from_table(table=any_db_table_200), + formulas={ + "new_date_value": "#2021-01-01# + [int_value]", + "sum": "SUM([int_value])", + "ago": 'AGO([sum], [new_date_value], "month", 1)', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="new_date_value"), + ds.find_field(title="sum"), + ds.find_field(title="ago"), + ], + filters=[ + ds.find_field(title="new_date_value").filter(op=WhereClauseOperation.EQ, values=["2021-02-28"]), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + data_rows = get_data_rows(result_resp) + + # Check whether rows are duplicated + assert len(data_rows) == 1 + + +class DefaultBasicWindowFunctionTestSuite(DataApiTestBase, DatasetTestBase, DbServiceFixtureTextClass): + feature_window_functions = Feature("window_functions") + + @for_features(feature_window_functions) + def test_window_functions(self, control_api, data_api, saved_dataset): + ds = add_formulas_to_dataset( + api_v1=control_api, + dataset=saved_dataset, + formulas={ + "Group Sales": "SUM([sales])", + "Rank of Sales": 'RANK([Group Sales], "asc" TOTAL)', + "Unique Rank of Sales": 'RANK_UNIQUE([Group Sales], "asc" TOTAL)', + "Rank of City Sales for Date": 'RANK([Group Sales], "asc" AMONG [city])', + "Total Sales": "SUM([Group Sales] TOTAL)", + "Date Sales": "SUM([Group Sales] WITHIN [order_date])", + "City Sales": "SUM([Group Sales] AMONG [order_date])", + "Total RSUM": 'RSUM([Group Sales], "asc" TOTAL)', + }, + ) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="order_date"), + ds.find_field(title="city"), + ds.find_field(title="Group Sales"), + ds.find_field(title="Rank of Sales"), + ds.find_field(title="Unique Rank of Sales"), + ds.find_field(title="Rank of City Sales for Date"), + ds.find_field(title="Total Sales"), + ds.find_field(title="Date Sales"), + ds.find_field(title="City Sales"), + ds.find_field(title="Total RSUM"), + ], + order_by=[ + ds.find_field(title="order_date"), + ds.find_field(title="city"), + ], + fail_ok=True, + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + data_rows = get_data_rows(result_resp) + cnt = len(data_rows) + + # TODO: More thorough tests + + # [Rank of Sales] values are a subset of the full range of row numbers + assert {row[3] for row in data_rows}.issubset({str(i) for i in range(1, cnt + 1)}) + + # There are as many [Unique Rank of Sales] values as there are rows + assert {row[4] for row in data_rows} == ({str(i) for i in range(1, cnt + 1)}) + + # [Rank of City Sales for Date] values are not greater than the number of [City] values + assert len({row[5] for row in data_rows}) <= len({row[1] for row in data_rows}) + + # all rows have the same [Total Sales] value + assert len({row[6] for row in data_rows}) == 1 + + # as many values of [Date Sales] as there are [order_date] values (or less - because there may be duplicates) + assert len({row[7] for row in data_rows}) <= len({row[0] for row in data_rows}) + + # as many values of [City Sales] as there are [City] values (or less - because there may be duplicates) + assert len({row[8] for row in data_rows}) <= len({row[1] for row in data_rows}) + + for i in range(1, len(data_rows)): + # RSUM = previous RSUM value + value of current arg + assert pytest.approx(float(data_rows[i][9])) == float(data_rows[i - 1][9]) + float(data_rows[i][2]) + + +class DefaultBasicComplexQueryTestSuite( + DefaultBasicExtAggregationTestSuite, + DefaultBasicLookupFunctionTestSuite, + DefaultBasicWindowFunctionTestSuite, +): + """Put them all together""" + + query_processing_mode = QueryProcessingMode.native_wf diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/connection_suite.py b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/connection_suite.py index 3be406225..45243a8bd 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/connection_suite.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/connection_suite.py @@ -20,3 +20,26 @@ def test_test_connection(self, control_api_sync_client: SyncHttpClientBase, save data=json.dumps({}), ) assert resp.status_code == 200, resp.json + + def test_cache_ttl_sec_override( + self, control_api_sync_client: SyncHttpClientBase, saved_connection_id: str + ) -> None: + resp = control_api_sync_client.get( + url=f"/api/v1/connections/{saved_connection_id}", + ) + assert resp.status_code == 200, resp.json + assert resp.json["cache_ttl_sec"] is None, resp.json + + cache_ttl_override = 100500 + resp = control_api_sync_client.put( + url=f"/api/v1/connections/{saved_connection_id}", + content_type="application/json", + data=json.dumps({"cache_ttl_sec": cache_ttl_override}), + ) + assert resp.status_code == 200, resp.json + + resp = control_api_sync_client.get( + url=f"/api/v1/connections/{saved_connection_id}", + ) + assert resp.status_code == 200, resp.json + assert resp.json["cache_ttl_sec"] == cache_ttl_override, resp.json diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/dashsql_suite.py b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/dashsql_suite.py index b2ca09e4c..67a722db9 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/dashsql_suite.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/dashsql_suite.py @@ -1,4 +1,5 @@ import abc +from typing import Optional from aiohttp.test_utils import TestClient import pytest @@ -8,12 +9,27 @@ class DefaultDashSQLTestSuite(DashSQLTestBase, RegulatedTestCase, metaclass=abc.ABCMeta): + @pytest.fixture(scope="class") + def dashsql_headers(self) -> Optional[dict[str, str]]: + return None + + @pytest.fixture(scope="class") + def dashsql_basic_query(self) -> str: + return "select 1, 2, 3" + @pytest.mark.asyncio - async def test_basic_select(self, data_api_lowlevel_aiohttp_client: TestClient, saved_connection_id: str) -> None: + async def test_basic_select( + self, + data_api_lowlevel_aiohttp_client: TestClient, + saved_connection_id: str, + dashsql_basic_query: str, + dashsql_headers: Optional[dict[str, str]], + ) -> None: resp = await self.get_dashsql_response( data_api_aio=data_api_lowlevel_aiohttp_client, conn_id=saved_connection_id, - query="select 1, 2, 3", + query=dashsql_basic_query, + headers=dashsql_headers, ) data = await resp.json() assert data[1]["data"] == [1, 2, 3] diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/data_api_suites.py b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/data_api_suites.py index 8844ccac2..edd900b9b 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/connector/data_api_suites.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/connector/data_api_suites.py @@ -6,14 +6,17 @@ from dl_api_client.dsmaker.api.data_api import SyncHttpDataApiV2 from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 -from dl_api_client.dsmaker.primitives import Dataset +from dl_api_client.dsmaker.primitives import ( + Dataset, + WhereClause, +) from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows from dl_api_lib_testing.data_api_base import ( DataApiTestParams, StandardizedDataApiTestBase, ) from dl_constants.enums import ( - BIType, + UserDataType, WhereClauseOperation, ) from dl_core_testing.database import ( @@ -65,16 +68,16 @@ def _test_contains( filter_op: WhereClauseOperation, ) -> None: columns = [ - C("int_value", BIType.integer, vg=lambda rn, **kwargs: rn), - C("array_int_value", BIType.array_int, vg=lambda rn, **kwargs: [i for i in reversed(range(rn))]), + C("int_value", UserDataType.integer, vg=lambda rn, **kwargs: rn), + C("array_int_value", UserDataType.array_int, vg=lambda rn, **kwargs: [i for i in reversed(range(rn))]), C( "array_str_value", - BIType.array_str, + UserDataType.array_str, vg=lambda rn, **kwargs: [str(i) if i != 5 else None for i in reversed(range(rn))], ), C( "array_float_value", - BIType.array_float, + UserDataType.array_float, vg=lambda rn, **kwargs: [i / 100.0 for i in reversed(range(rn))], ), ] @@ -163,15 +166,15 @@ def test_array_contains_field( is_numeric: bool, ) -> None: columns = [ - C("int_value", BIType.integer, vg=lambda rn, **kwargs: 3), - C("str_value", BIType.string, vg=lambda rn, **kwargs: "3"), - C("float_value", BIType.float, vg=lambda rn, **kwargs: 0.03), - C("none_value", BIType.float, vg=lambda rn, **kwargs: None), - C("array_int_value", BIType.array_int, vg=lambda rn, **kwargs: [i for i in reversed(range(rn))]), - C("array_str_value", BIType.array_str, vg=lambda rn, **kwargs: [str(i) for i in reversed(range(rn))]), + C("int_value", UserDataType.integer, vg=lambda rn, **kwargs: 3), + C("str_value", UserDataType.string, vg=lambda rn, **kwargs: "3"), + C("float_value", UserDataType.float, vg=lambda rn, **kwargs: 0.03), + C("none_value", UserDataType.float, vg=lambda rn, **kwargs: None), + C("array_int_value", UserDataType.array_int, vg=lambda rn, **kwargs: [i for i in reversed(range(rn))]), + C("array_str_value", UserDataType.array_str, vg=lambda rn, **kwargs: [str(i) for i in reversed(range(rn))]), C( "array_float_value", - BIType.array_float, + UserDataType.array_float, vg=lambda rn, **kwargs: [i / 100.0 if i != 5 else None for i in reversed(range(rn))], ), ] @@ -214,7 +217,7 @@ def test_dates( ds.result_schema[new_field_name] = ds.field( formula=f"IF [{data_api_test_params.date_field}] > DATE('2020-01-01') THEN 1 ELSE 2 END" ) - ds.result_schema[new_field_name].cast = BIType.float + ds.result_schema[new_field_name].cast = UserDataType.float result_resp = self.get_result(ds, data_api, field_names=(data_api_test_params.date_field, new_field_name)) assert result_resp.status_code == 200, result_resp.json @@ -232,6 +235,20 @@ def test_dates( assert result_resp.status_code == 200, result_resp.json assert get_data_rows(result_resp) + result_resp = data_api.get_result( + dataset=ds, + fields=[ds.find_field(title=data_api_test_params.date_field)], + filters=[ + ds.find_field(title=data_api_test_params.date_field).filter( + op=WhereClauseOperation.BETWEEN, + values=["1990-01-01", "2023-10-02"], + ) + ], + fail_ok=True, + ) + assert result_resp.status_code == 200, result_resp.json + assert get_data_rows(result_resp) + def test_get_result_with_formula_in_where( self, saved_dataset: Dataset, data_api_test_params: DataApiTestParams, data_api: SyncHttpDataApiV2 ) -> None: @@ -272,10 +289,28 @@ def test_get_result_with_string_filter_operations_for_numbers( data_rows = get_data_rows(result_resp) values: set[str] = {row[0] for row in data_rows} assert len(values) > 1 # we just need to make sure there are several different values - assert all("2" in value for value in values) + assert all("2" in value for value in values), values class DefaultConnectorDataGroupByFormulaTestSuite(StandardizedDataApiTestBase, RegulatedTestCase): + def test_ordered_result( + self, + saved_dataset: Dataset, + data_api_test_params: DataApiTestParams, + data_api: SyncHttpDataApiV2, + ) -> None: + ds = saved_dataset + grouped_resp = self.get_result_ordered( + ds, + data_api, + field_names=(data_api_test_params.two_dims[0], data_api_test_params.distinct_field), + order_by=(data_api_test_params.distinct_field,), + ) + grouped_rows = get_data_rows(grouped_resp) + + min_row_cnt = 5 # just an arbitrary number + assert len(grouped_rows) > min_row_cnt + def test_complex_result( self, saved_dataset: Dataset, @@ -283,14 +318,14 @@ def test_complex_result( data_api: SyncHttpDataApiV2, ) -> None: ds = saved_dataset - ds.result_schema["CityNameLength"] = ds.field(formula=f"LEN([{data_api_test_params.distinct_field}])") + ds.result_schema["LengthField"] = ds.field(formula=f"LEN([{data_api_test_params.distinct_field}])") grouped_resp = self.get_result_ordered( - ds, data_api, field_names=(data_api_test_params.two_dims[0], "CityNameLength"), order_by=("CityNameLength",) + ds, data_api, field_names=(data_api_test_params.two_dims[0], "LengthField"), order_by=("LengthField",) ) grouped_rows = get_data_rows(grouped_resp) - min_row_cnt = 10 # just an arbitrary number + min_row_cnt = 5 # just an arbitrary number assert len(grouped_rows) > min_row_cnt @@ -320,7 +355,29 @@ def test_basic_distinct( distinct_resp = self.get_distinct(ds, data_api, field_name=data_api_test_params.distinct_field) distinct_rows = get_data_rows(distinct_resp) - min_distinct_row_cnt = 10 # just an arbitrary number + min_distinct_row_cnt = 5 # just an arbitrary number + assert len(distinct_rows) > min_distinct_row_cnt + values = [row[0] for row in distinct_rows] + assert len(set(values)) == len(values), "Values are not unique" + + def test_distinct_with_nonexistent_filter( + self, + saved_dataset: Dataset, + data_api_test_params: DataApiTestParams, + data_api: SyncHttpDataApiV2, + ) -> None: + ds = saved_dataset + + distinct_resp = data_api.get_distinct( + dataset=ds, + field=ds.find_field(title=data_api_test_params.distinct_field), + filters=[WhereClause(column="idontexist", operation=WhereClauseOperation.EQ, values=[0])], + ignore_nonexistent_filters=True, + ) + assert distinct_resp.status_code == 200, distinct_resp.json + + distinct_rows = get_data_rows(distinct_resp) + min_distinct_row_cnt = 5 # just an arbitrary number assert len(distinct_rows) > min_distinct_row_cnt values = [row[0] for row in distinct_rows] assert len(set(values)) == len(values), "Values are not unique" @@ -334,7 +391,7 @@ def test_date_filter_distinct( data_api: SyncHttpDataApiV2, ) -> None: columns = [ - C(name="date_val", user_type=BIType.date, nullable=True), + C(name="date_val", user_type=UserDataType.date, nullable=True), ] data = [ {"date_val": datetime.date(2002, 1, 2)}, @@ -374,7 +431,7 @@ def test_date_filter_distinct( class DefaultConnectorDataPreviewTestSuite(StandardizedDataApiTestBase, RegulatedTestCase): - def test_basic_distinct( + def test_basic_preview( self, saved_dataset: Dataset, data_api: SyncHttpDataApiV2, diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/data_api_base.py b/lib/dl_api_lib_testing/dl_api_lib_testing/data_api_base.py index 4b4b7aa1f..77c215feb 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/data_api_base.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/data_api_base.py @@ -6,7 +6,6 @@ Generator, Iterable, NamedTuple, - Type, ) from aiohttp import web @@ -49,7 +48,6 @@ class DataApiTestParams(NamedTuple): class DataApiTestBase(ApiTestBase, metaclass=abc.ABCMeta): - data_api_app_factory_cls: ClassVar[Type[DataApiAppFactory]] = TestingDataApiAppFactory mutation_caches_on: ClassVar[bool] = True data_caches_on: ClassVar[bool] = True @@ -87,7 +85,7 @@ def create_data_api_settings( BI_COMPENG_PG_ON=cls.bi_compeng_pg_on, BI_COMPENG_PG_URL=bi_test_config.bi_compeng_pg_url, FIELD_ID_GENERATOR_TYPE=FieldIdGeneratorType.suffix, - FILE_UPLOADER_BASE_URL="http://127.0.0.1:9999", # fake url + FILE_UPLOADER_BASE_URL=f"{bi_test_config.file_uploader_api_host}:{bi_test_config.file_uploader_api_port}", FILE_UPLOADER_MASTER_TOKEN="qwerty", ) # type: ignore @@ -102,13 +100,16 @@ def data_api_app_settings( rqe_config_subprocess=rqe_config_subprocess, ) + @pytest.fixture(scope="function") + def data_api_app_factory(self, data_api_app_settings: DataApiAppSettings) -> DataApiAppFactory: + return TestingDataApiAppFactory(settings=data_api_app_settings) + @pytest.fixture(scope="function") def data_api_app( self, - data_api_app_settings: DataApiAppSettings, + data_api_app_factory: DataApiAppFactory, connectors_settings: dict[ConnectionType, ConnectorSettingsBase], ) -> web.Application: - data_api_app_factory = self.data_api_app_factory_cls(settings=data_api_app_settings) return data_api_app_factory.create_app( connectors_settings=connectors_settings, ) diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/dataset_base.py b/lib/dl_api_lib_testing/dl_api_lib_testing/dataset_base.py index f08932088..979334517 100644 --- a/lib/dl_api_lib_testing/dl_api_lib_testing/dataset_base.py +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/dataset_base.py @@ -2,13 +2,10 @@ from typing import Generator import pytest -import shortuuid from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 from dl_api_client.dsmaker.primitives import Dataset from dl_api_lib_testing.connection_base import ConnectionTestBase -from dl_core.base_models import PathEntryLocation -from dl_core.us_manager.us_manager_sync import SyncUSManager class DatasetTestBase(ConnectionTestBase, metaclass=abc.ABCMeta): @@ -48,31 +45,3 @@ def saved_dataset( ) yield ds control_api.delete_dataset(dataset_id=ds.id, fail_ok=False) - - def test_invalid_dataset_id( - self, - control_api: SyncHttpDatasetApiV1, - saved_connection_id: str, - dataset_params: dict, - conn_default_sync_us_manager: SyncUSManager, - ) -> None: - usm = conn_default_sync_us_manager - us_client = usm._us_client - path = PathEntryLocation(shortuuid.uuid()) - dash = us_client.create_entry(scope="dash", key=path) - dash_id = dash["entryId"] - ds = self.make_basic_dataset( - control_api=control_api, - connection_id=saved_connection_id, - dataset_params=dataset_params, - ) - dataset_id = ds.id - - resp = control_api.client.get("/api/v1/datasets/{}/versions/draft".format(dataset_id)) - assert resp.status_code == 200 - - resp = control_api.client.get("/api/v1/datasets/{}/versions/draft".format(saved_connection_id)) - assert resp.status_code == 404 - - resp = control_api.client.get("/api/v1/datasets/{}/versions/draft".format(dash_id)) - assert resp.status_code == 404 diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/helpers/lookup_checkers.py b/lib/dl_api_lib_testing/dl_api_lib_testing/helpers/lookup_checkers.py new file mode 100644 index 000000000..378f4f543 --- /dev/null +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/helpers/lookup_checkers.py @@ -0,0 +1,65 @@ +import datetime +from typing import ( + Any, + Callable, +) + + +def read_date(s: str) -> datetime.date: + try: + return datetime.date.fromisoformat(s) + except ValueError: + return datetime.datetime.fromisoformat(s).date() + + +def check_at_date_data( + data_rows: list[list[Any]], + date_idx: int, + value_idx: int, + ago_idx: int, + ago_date_callable: Callable[[datetime.date], datetime.date], + allow_missing_date_values: bool = False, +) -> None: + assert len(data_rows) > 0 + value_by_date = {read_date(row[date_idx]): row[value_idx] for row in data_rows} + rows_checked = 0 + + for row_idx, row in enumerate(data_rows): + cur_date = read_date(row[date_idx]) + ago_date = ago_date_callable(cur_date) + expected_ago_value = value_by_date.get(ago_date) + actual_ago_value = row[ago_idx] + + if expected_ago_value is None: + if allow_missing_date_values: + pass # Do not check in this case + else: + assert actual_ago_value is None + else: + assert actual_ago_value == expected_ago_value + + rows_checked += 1 + + # Make sure that rows were checked + assert rows_checked > 5 + + +def check_ago_data( + data_rows: list[list[Any]], + date_idx: int, + value_idx: int, + ago_idx: int, + day_offset: int, + allow_missing_date_values: bool = False, +) -> None: + def ago_date_callable(cur_date: datetime.date) -> datetime.date: # noqa + return cur_date - datetime.timedelta(days=day_offset) + + check_at_date_data( + data_rows=data_rows, + date_idx=date_idx, + value_idx=value_idx, + ago_idx=ago_idx, + ago_date_callable=ago_date_callable, + allow_missing_date_values=allow_missing_date_values, + ) diff --git a/lib/dl_api_lib_testing/dl_api_lib_testing/helpers/multi_query.py b/lib/dl_api_lib_testing/dl_api_lib_testing/helpers/multi_query.py new file mode 100644 index 000000000..bd55c0eb5 --- /dev/null +++ b/lib/dl_api_lib_testing/dl_api_lib_testing/helpers/multi_query.py @@ -0,0 +1,35 @@ +from typing import Callable + +import attr +from pytest import MonkeyPatch + +from dl_query_processing.compilation.primitives import CompiledMultiQueryBase +from dl_query_processing.translation.multi_level_translator import MultiLevelQueryTranslator + + +@attr.s +class MultiQueryInterceptor: + _mpatch: MonkeyPatch = attr.ib(kw_only=True) + _callback: Callable[[CompiledMultiQueryBase], None] = attr.ib(kw_only=True) + _intercepted: bool = attr.ib(init=False, default=False) + + @property + def intercepted(self) -> bool: + return self._intercepted + + def __attrs_post_init__(self) -> None: + self._prepare() + + def _prepare(self) -> None: + self._mpatch.setattr(MultiLevelQueryTranslator, "_log_query_complexity_stats", self._log_query_complexity_stats) + + def _log_query_complexity_stats(self, compiled_multi_query: CompiledMultiQueryBase) -> None: + self._callback(compiled_multi_query) + self._intercepted = True + + +def count_joins(multi_query: CompiledMultiQueryBase) -> int: + result = 0 + for query in multi_query.iter_queries(): + result += len(query.join_on) + return result diff --git a/lib/dl_compeng_pg/dl_compeng_pg/compeng_aiopg/exec_adapter_aiopg.py b/lib/dl_compeng_pg/dl_compeng_pg/compeng_aiopg/exec_adapter_aiopg.py index de4b25524..cb283c37b 100644 --- a/lib/dl_compeng_pg/dl_compeng_pg/compeng_aiopg/exec_adapter_aiopg.py +++ b/lib/dl_compeng_pg/dl_compeng_pg/compeng_aiopg/exec_adapter_aiopg.py @@ -16,7 +16,7 @@ from sqlalchemy.sql.selectable import Select from dl_compeng_pg.compeng_pg_base.exec_adapter_base import PostgreSQLExecAdapterAsync -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.data_processing.prepared_components.primitives import PreparedMultiFromInfo from dl_core.data_processing.streaming import ( AsyncChunked, @@ -50,7 +50,7 @@ async def _execute_and_fetch( self, *, query: Union[Select, str], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], chunk_size: int, joint_dsrc_info: Optional[PreparedMultiFromInfo] = None, query_id: str, @@ -80,7 +80,7 @@ async def insert_data_into_table( *, table_name: str, names: Sequence[str], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], data: AsyncChunkedBase, ) -> None: """Insert data into a table.""" diff --git a/lib/dl_compeng_pg/dl_compeng_pg/compeng_asyncpg/exec_adapter_asyncpg.py b/lib/dl_compeng_pg/dl_compeng_pg/compeng_asyncpg/exec_adapter_asyncpg.py index c20c3c147..572b68eca 100644 --- a/lib/dl_compeng_pg/dl_compeng_pg/compeng_asyncpg/exec_adapter_asyncpg.py +++ b/lib/dl_compeng_pg/dl_compeng_pg/compeng_asyncpg/exec_adapter_asyncpg.py @@ -18,9 +18,7 @@ from sqlalchemy.dialects.postgresql import pypostgresql from dl_compeng_pg.compeng_pg_base.exec_adapter_base import PostgreSQLExecAdapterAsync -from dl_connector_postgresql.core.postgresql_base.error_transformer import make_async_pg_error_transformer -from dl_connector_postgresql.core.postgresql_base.utils import compile_pg_query -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.connectors.base.error_transformer import DbErrorTransformer from dl_core.data_processing.prepared_components.primitives import PreparedMultiFromInfo from dl_core.data_processing.streaming import ( @@ -29,6 +27,9 @@ ) from dl_sqlalchemy_postgres.asyncpg import DBAPIMock +from dl_connector_postgresql.core.postgresql_base.error_transformer import make_async_pg_error_transformer +from dl_connector_postgresql.core.postgresql_base.utils import compile_pg_query + if TYPE_CHECKING: from dl_constants.types import TBIDataValue @@ -86,7 +87,7 @@ async def _execute_and_fetch( # type: ignore # TODO: fix self, *, query: Union[str, sa.sql.selectable.Select], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], chunk_size: int, joint_dsrc_info: Optional[PreparedMultiFromInfo] = None, query_id: str, @@ -119,7 +120,7 @@ async def insert_data_into_table( *, table_name: str, names: Sequence[str], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], data: AsyncChunkedBase, ) -> None: """Insert data into a table.""" diff --git a/lib/dl_compeng_pg/dl_compeng_pg/compeng_pg_base/exec_adapter_base.py b/lib/dl_compeng_pg/dl_compeng_pg/compeng_pg_base/exec_adapter_base.py index 37df1236f..3bb50450e 100644 --- a/lib/dl_compeng_pg/dl_compeng_pg/compeng_pg_base/exec_adapter_base.py +++ b/lib/dl_compeng_pg/dl_compeng_pg/compeng_pg_base/exec_adapter_base.py @@ -16,12 +16,13 @@ from sqlalchemy.engine.default import DefaultDialect from sqlalchemy.sql.base import Executable -from dl_connector_postgresql.core.postgresql_base.type_transformer import PostgreSQLTypeTransformer -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.data_processing.processing.db_base.exec_adapter_base import ProcessorDbExecAdapterBase from dl_core.data_processing.streaming import AsyncChunkedBase from dl_core.db.sa_types import make_sa_type +from dl_connector_postgresql.core.postgresql_base.type_transformer import PostgreSQLTypeTransformer + LOGGER = logging.getLogger(__name__) @@ -54,7 +55,7 @@ async def _execute_ddl(self, query: Union[str, Executable]) -> None: """Execute a DDL statement""" await self._execute(query) - def _make_sa_table(self, table_name: str, names: Sequence[str], user_types: Sequence[BIType]) -> sa.Table: + def _make_sa_table(self, table_name: str, names: Sequence[str], user_types: Sequence[UserDataType]) -> sa.Table: assert len(names) == len(user_types) columns = [ sa.Column(name=name, type_=make_sa_type(native_type=self._tt.type_user_to_native(user_t=user_t))) @@ -67,7 +68,7 @@ async def create_table( *, table_name: str, names: Sequence[str], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], ) -> sa.sql.selectable.TableClause: """Create table in database""" @@ -91,7 +92,7 @@ async def insert_data_into_table( *, table_name: str, names: Sequence[str], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], data: AsyncChunkedBase, ) -> None: """,,,""" diff --git a/lib/dl_compeng_pg/dl_compeng_pg/compeng_pg_base/processor_base.py b/lib/dl_compeng_pg/dl_compeng_pg/compeng_pg_base/processor_base.py index 3635c91eb..25ef089fa 100644 --- a/lib/dl_compeng_pg/dl_compeng_pg/compeng_pg_base/processor_base.py +++ b/lib/dl_compeng_pg/dl_compeng_pg/compeng_pg_base/processor_base.py @@ -15,7 +15,7 @@ from dl_compeng_pg.compeng_pg_base.exec_adapter_base import PostgreSQLExecAdapterAsync from dl_compeng_pg.compeng_pg_base.op_executors import UploadOpExecutorAsync from dl_compeng_pg.compeng_pg_base.pool_base import BasePgPoolWrapper -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.data_processing.processing.context import OpExecutionContext from dl_core.data_processing.processing.db_base.op_executors import ( CalcOpExecutorAsync, @@ -58,7 +58,7 @@ async def end(self) -> None: async def ping(self) -> Optional[int]: assert self._pgex_adapter is not None - result = await self._pgex_adapter.scalar("select 1", user_type=BIType.integer) + result = await self._pgex_adapter.scalar("select 1", user_type=UserDataType.integer) assert result is None or isinstance(result, int) return result diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/api/api_schema/connection.py b/lib/dl_connector_bigquery/dl_connector_bigquery/api/api_schema/connection.py index 5938cf31e..1b6513524 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/api/api_schema/connection.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/api/api_schema/connection.py @@ -7,6 +7,7 @@ ) from dl_api_connector.api_schema.connection_mixins import RawSQLLevelMixin from dl_api_connector.api_schema.extras import FieldExtra + from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/api/connection_form/form_config.py b/lib/dl_connector_bigquery/dl_connector_bigquery/api/connection_form/form_config.py index ede604df4..87a93d8d7 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/api/connection_form/form_config.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/api/connection_form/form_config.py @@ -21,9 +21,10 @@ import dl_api_connector.form_config.models.rows as C from dl_api_connector.form_config.models.shortcuts.rows import RowConstructor from dl_configs.connectors_settings import ConnectorSettingsBase +from dl_constants.enums import RawSQLLevel + from dl_connector_bigquery.api.connection_info import BigQueryConnectionInfoProvider from dl_connector_bigquery.api.i18n.localizer import Translatable -from dl_constants.enums import RawSQLLevel @unique diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/api/connection_info.py b/lib/dl_connector_bigquery/dl_connector_bigquery/api/connection_info.py index 9b43d0481..984a137cf 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/api/connection_info.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/api/connection_info.py @@ -1,4 +1,5 @@ from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_bigquery.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/api/connector.py b/lib/dl_connector_bigquery/dl_connector_bigquery/api/connector.py index a5f42e36a..9c362a1d4 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/api/connector.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/api/connector.py @@ -7,6 +7,7 @@ ApiConnector, ApiSourceDefinition, ) + from dl_connector_bigquery.api.api_schema.connection import BigQueryConnectionSchema from dl_connector_bigquery.api.api_schema.source import ( BigQueryTableDataSourceSchema, diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/api/i18n/localizer.py b/lib/dl_connector_bigquery/dl_connector_bigquery/api/i18n/localizer.py index 8840c0a71..8a5e447de 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/api/i18n/localizer.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/api/i18n/localizer.py @@ -2,10 +2,11 @@ import attr -import dl_connector_bigquery as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_bigquery as package + DOMAIN = f"{package.__name__}" CONFIGS = [ diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/adapters.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/adapters.py index ad621c1ab..52bd8c9b4 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/adapters.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/adapters.py @@ -13,9 +13,6 @@ import sqlalchemy as sa import sqlalchemy_bigquery._types as bq_types -from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY -from dl_connector_bigquery.core.error_transformer import big_query_db_error_transformer -from dl_connector_bigquery.core.target_dto import BigQueryConnTargetDTO from dl_core.connection_executors.adapters.adapters_base_sa_classic import ( BaseClassicAdapter, BaseConnLineConstructor, @@ -27,6 +24,10 @@ TableIdent, ) +from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY +from dl_connector_bigquery.core.error_transformer import big_query_db_error_transformer +from dl_connector_bigquery.core.target_dto import BigQueryConnTargetDTO + class BigQueryConnLineConstructor(BaseConnLineConstructor[BigQueryConnTargetDTO]): def _get_dsn_params( diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/connection_executors.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/connection_executors.py index 8f4c33e89..cf080d696 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/connection_executors.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/connection_executors.py @@ -4,10 +4,11 @@ import attr +from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor + from dl_connector_bigquery.core.adapters import BigQueryDefaultAdapter from dl_connector_bigquery.core.dto import BigQueryConnDTO from dl_connector_bigquery.core.target_dto import BigQueryConnTargetDTO -from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor @attr.s(cmp=False, hash=False) diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/connector.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/connector.py index 06620c525..c4d557c27 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/connector.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/connector.py @@ -1,3 +1,9 @@ +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreConnector, + CoreSourceDefinition, +) + from dl_connector_bigquery.core.adapters import BigQueryDefaultAdapter from dl_connector_bigquery.core.connection_executors import BigQueryAsyncConnExecutor from dl_connector_bigquery.core.constants import ( @@ -23,11 +29,6 @@ ) from dl_connector_bigquery.core.type_transformer import BigQueryTypeTransformer from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery -from dl_core.connectors.base.connector import ( - CoreConnectionDefinition, - CoreConnector, - CoreSourceDefinition, -) class BigQueryCoreConnectionDefinition(CoreConnectionDefinition): diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/constants.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/constants.py index bc8b9a81b..a4affed53 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/constants.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/constants.py @@ -1,6 +1,6 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, SourceBackendType, ) @@ -9,5 +9,5 @@ CONNECTION_TYPE_BIGQUERY = ConnectionType.declare("bigquery") -SOURCE_TYPE_BIGQUERY_TABLE = CreateDSFrom.declare("BIGQUERY_TABLE") -SOURCE_TYPE_BIGQUERY_SUBSELECT = CreateDSFrom.declare("BIGQUERY_SUBSELECT") +SOURCE_TYPE_BIGQUERY_TABLE = DataSourceType.declare("BIGQUERY_TABLE") +SOURCE_TYPE_BIGQUERY_SUBSELECT = DataSourceType.declare("BIGQUERY_SUBSELECT") diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/data_source.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/data_source.py index 556bcb041..5a415de5c 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/data_source.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/data_source.py @@ -5,18 +5,7 @@ Optional, ) -from dl_connector_bigquery.core.constants import ( - CONNECTION_TYPE_BIGQUERY, - SOURCE_TYPE_BIGQUERY_SUBSELECT, - SOURCE_TYPE_BIGQUERY_TABLE, -) -from dl_connector_bigquery.core.data_source_spec import ( - BigQuerySubselectDataSourceSpec, - BigQueryTableDataSourceSpec, -) -from dl_connector_bigquery.core.query_compiler import BigQueryQueryCompiler -from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.connection_models import ( TableDefinition, TableIdent, @@ -30,12 +19,24 @@ ) from dl_core.utils import sa_plain_text +from dl_connector_bigquery.core.constants import ( + CONNECTION_TYPE_BIGQUERY, + SOURCE_TYPE_BIGQUERY_SUBSELECT, + SOURCE_TYPE_BIGQUERY_TABLE, +) +from dl_connector_bigquery.core.data_source_spec import ( + BigQuerySubselectDataSourceSpec, + BigQueryTableDataSourceSpec, +) +from dl_connector_bigquery.core.query_compiler import BigQueryQueryCompiler +from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery + class BigQueryDataSourceMixin(BaseSQLDataSource): conn_type = CONNECTION_TYPE_BIGQUERY @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in (SOURCE_TYPE_BIGQUERY_TABLE, SOURCE_TYPE_BIGQUERY_SUBSELECT) diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/dto.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/dto.py index 911fc0d89..c9f580813 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/dto.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/dto.py @@ -2,10 +2,11 @@ import attr -from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY from dl_core.connection_models.dto_defs import ConnDTO from dl_core.utils import secrepr +from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY + @attr.s(frozen=True) class BigQueryConnDTO(ConnDTO): diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/sa_types.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/sa_types.py index 70e9fa5fe..1274ec158 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/sa_types.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/sa_types.py @@ -1,11 +1,12 @@ import sqlalchemy_bigquery._types as bq_types -from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY as CT_BQ from dl_core.db.sa_types_base import ( make_native_type, simple_instantiator, ) +from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY as CT_BQ + SQLALCHEMY_BIGQUERY_TYPES = { make_native_type(CT_BQ, bq_types.DATE): simple_instantiator(bq_types.DATE), diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/storage_schemas/connection.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/storage_schemas/connection.py index 5820c2b72..0bcf6edaa 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/storage_schemas/connection.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/storage_schemas/connection.py @@ -1,12 +1,13 @@ from marshmallow import fields as ma_fields -from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery from dl_core.us_manager.storage_schemas.connection import ( BaseConnectionDataStorageSchema, CacheableConnectionDataSchemaMixin, SubselectConnectionDataSchemaMixin, ) +from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery + class BigQueryConnectionDataStorageSchema( BaseConnectionDataStorageSchema[ConnectionSQLBigQuery.DataModel], diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/storage_schemas/data_source_spec.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/storage_schemas/data_source_spec.py index 62828da80..61000cf2e 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/storage_schemas/data_source_spec.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/storage_schemas/data_source_spec.py @@ -1,15 +1,16 @@ import marshmallow.fields as ma_fields -from dl_connector_bigquery.core.data_source_spec import ( - BigQuerySubselectDataSourceSpec, - BigQueryTableDataSourceSpec, -) from dl_core.us_manager.storage_schemas.data_source_spec_base import ( BaseSQLDataSourceSpecStorageSchema, SubselectSQLDataSourceSpecStorageSchemaMixin, TableSQLDataSourceSpecStorageSchemaMixin, ) +from dl_connector_bigquery.core.data_source_spec import ( + BigQuerySubselectDataSourceSpec, + BigQueryTableDataSourceSpec, +) + class DatasetSQLDataSourceSpecStorageSchemaMixin(BaseSQLDataSourceSpecStorageSchema): # noqa dataset_name = ma_fields.String(required=True) diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/testing/connection.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/testing/connection.py index bfb48a0ce..251835659 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/testing/connection.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/testing/connection.py @@ -1,11 +1,12 @@ from typing import Any import uuid -from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY -from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery from dl_constants.enums import RawSQLLevel from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY +from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery + def make_bigquery_saved_connection( sync_usm: SyncUSManager, diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/type_transformer.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/type_transformer.py index f7d8775de..bd745c6f8 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/type_transformer.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/type_transformer.py @@ -1,35 +1,36 @@ import sqlalchemy_bigquery._types as bq_types -from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.db.conversion_base import ( TypeTransformer, make_native_type, ) +from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY + class BigQueryTypeTransformer(TypeTransformer): conn_type = CONNECTION_TYPE_BIGQUERY native_to_user_map = { - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.DATE): BIType.date, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.DATETIME): BIType.genericdatetime, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.STRING): BIType.string, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.BOOLEAN): BIType.boolean, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.INTEGER): BIType.integer, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.FLOAT): BIType.float, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.NUMERIC): BIType.float, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.INTEGER)): BIType.array_int, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.STRING)): BIType.array_str, - make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.FLOAT)): BIType.array_float, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.DATE): UserDataType.date, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.DATETIME): UserDataType.genericdatetime, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.STRING): UserDataType.string, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.BOOLEAN): UserDataType.boolean, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.INTEGER): UserDataType.integer, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.FLOAT): UserDataType.float, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.NUMERIC): UserDataType.float, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.INTEGER)): UserDataType.array_int, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.STRING)): UserDataType.array_str, + make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.FLOAT)): UserDataType.array_float, } user_to_native_map = { - BIType.date: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.DATE), - BIType.genericdatetime: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.DATETIME), - BIType.string: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.STRING), - BIType.boolean: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.BOOLEAN), - BIType.integer: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.INTEGER), - BIType.float: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.FLOAT), - BIType.array_int: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.INTEGER)), - BIType.array_str: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.STRING)), - BIType.array_float: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.FLOAT)), + UserDataType.date: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.DATE), + UserDataType.genericdatetime: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.DATETIME), + UserDataType.string: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.STRING), + UserDataType.boolean: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.BOOLEAN), + UserDataType.integer: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.INTEGER), + UserDataType.float: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.FLOAT), + UserDataType.array_int: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.INTEGER)), + UserDataType.array_str: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.STRING)), + UserDataType.array_float: make_native_type(CONNECTION_TYPE_BIGQUERY, bq_types.ARRAY(bq_types.FLOAT)), } diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/core/us_connection.py b/lib/dl_connector_bigquery/dl_connector_bigquery/core/us_connection.py index 7a2e7d6c8..7099493a7 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/core/us_connection.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/core/us_connection.py @@ -7,12 +7,6 @@ import attr -from dl_connector_bigquery.core.constants import ( - CONNECTION_TYPE_BIGQUERY, - SOURCE_TYPE_BIGQUERY_SUBSELECT, - SOURCE_TYPE_BIGQUERY_TABLE, -) -from dl_connector_bigquery.core.dto import BigQueryConnDTO from dl_core.base_models import ( ConnCacheableDataModelMixin, ConnectionDataModelBase, @@ -27,6 +21,13 @@ from dl_i18n.localizer_base import Localizer from dl_utils.utils import DataKey +from dl_connector_bigquery.core.constants import ( + CONNECTION_TYPE_BIGQUERY, + SOURCE_TYPE_BIGQUERY_SUBSELECT, + SOURCE_TYPE_BIGQUERY_TABLE, +) +from dl_connector_bigquery.core.dto import BigQueryConnDTO + class ConnectionSQLBigQuery(ConnectionSQL): conn_type = CONNECTION_TYPE_BIGQUERY diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/db_testing/connector.py b/lib/dl_connector_bigquery/dl_connector_bigquery/db_testing/connector.py index 01fd46a69..f31a1a830 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/db_testing/connector.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/db_testing/connector.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery.db_testing.engine_wrapper import BigQueryEngineWrapper from dl_db_testing.connectors.base.connector import DbTestingConnector +from dl_connector_bigquery.db_testing.engine_wrapper import BigQueryEngineWrapper + class BigQueryDbTestingConnector(DbTestingConnector): engine_wrapper_classes = (BigQueryEngineWrapper,) diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/connector.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/connector.py index 28d6aca66..c8974c371 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/connector.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/connector.py @@ -1,9 +1,10 @@ import sqlalchemy.sql.functions as sa_funcs from sqlalchemy_bigquery.base import BigQueryDialect as SABigQueryDialect +from dl_formula.connectors.base.connector import FormulaConnector + from dl_connector_bigquery.formula.constants import BigQueryDialect as BigQueryDialectNS from dl_connector_bigquery.formula.definitions.all import DEFINITIONS -from dl_formula.connectors.base.connector import FormulaConnector class BigQueryFormulaConnector(FormulaConnector): diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/conditional_blocks.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/conditional_blocks.py index a690a08df..5d9486e7a 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/conditional_blocks.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/conditional_blocks.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_bigquery.formula.constants import BigQueryDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.conditional_blocks as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_aggregation.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_aggregation.py index 2501a0973..9b7df69b7 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_aggregation.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_aggregation.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_bigquery.formula.constants import BigQueryDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_aggregation as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_datetime.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_datetime.py index 3880a94e2..1c77b138a 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_datetime.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_datetime.py @@ -1,7 +1,6 @@ import sqlalchemy as sa from sqlalchemy.sql.elements import ClauseElement -from dl_connector_bigquery.formula.constants import BigQueryDialect as D import dl_formula.core.exc as exc from dl_formula.definitions.base import TranslationVariant from dl_formula.definitions.common import raw_sql @@ -9,6 +8,8 @@ import dl_formula.definitions.functions_datetime as base from dl_formula.definitions.literals import un_literal +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_logical.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_logical.py index aade4fca9..d30142cb4 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_logical.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_logical.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_bigquery.formula.constants import BigQueryDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_logical as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_markup.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_markup.py index 92c524800..060665f5c 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_markup.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_markup.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery.formula.constants import BigQueryDialect as D import dl_formula.definitions.functions_markup as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + DEFINITIONS_MARKUP = [ # + diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_math.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_math.py index b25c07c61..85c205d3e 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_math.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_math.py @@ -2,10 +2,11 @@ import sqlalchemy as sa -from dl_connector_bigquery.formula.constants import BigQueryDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_math as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_string.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_string.py index a1cd8f53f..91eff7e52 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_string.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_string.py @@ -1,10 +1,11 @@ import sqlalchemy as sa -from dl_connector_bigquery.formula.constants import BigQueryDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_string as base from dl_formula.shortcuts import n +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make @@ -30,6 +31,9 @@ ] ), base.FuncContainsNonString.for_dialect(D.BIGQUERY), + # notcontains + base.FuncNotContainsNonConst.for_dialect(D.BIGQUERY), + base.FuncNotContainsNonString.for_dialect(D.BIGQUERY), # endswith base.FuncEndswithNonConst(variants=[V(D.BIGQUERY, sa.func.ENDS_WITH)]), base.FuncEndswithNonString.for_dialect(D.BIGQUERY), diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_type.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_type.py index aa581156e..a402a0552 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_type.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/functions_type.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_bigquery.formula.constants import BigQueryDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_type as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_binary.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_binary.py index 7d89a40de..adadf9ae3 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_binary.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_binary.py @@ -1,12 +1,13 @@ import sqlalchemy as sa from sqlalchemy.sql.elements import ClauseElement -from dl_connector_bigquery.formula.constants import BigQueryDialect as D from dl_formula.definitions.base import TranslationVariant from dl_formula.definitions.common import raw_sql from dl_formula.definitions.common_datetime import DAY_SEC import dl_formula.definitions.operators_binary as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_ternary.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_ternary.py index 7006ec1bd..df7df225f 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_ternary.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_ternary.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery.formula.constants import BigQueryDialect as D import dl_formula.definitions.operators_ternary as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + DEFINITIONS_TERNARY = [ # between diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_unary.py b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_unary.py index e5286accd..da5610b88 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_unary.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery/formula/definitions/operators_unary.py @@ -1,7 +1,8 @@ -from dl_connector_bigquery.formula.constants import BigQueryDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.operators_unary as base +from dl_connector_bigquery.formula.constants import BigQueryDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/base.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/base.py index f4a370b48..485180b7a 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/base.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/base.py @@ -4,6 +4,7 @@ from dl_api_lib_testing.connection_base import ConnectionTestBase from dl_api_lib_testing.data_api_base import StandardizedDataApiTestBase from dl_api_lib_testing.dataset_base import DatasetTestBase + from dl_connector_bigquery.core.constants import ( CONNECTION_TYPE_BIGQUERY, SOURCE_TYPE_BIGQUERY_TABLE, diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_complex_queries.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_complex_queries.py new file mode 100644 index 000000000..a09f780fd --- /dev/null +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_complex_queries.py @@ -0,0 +1,12 @@ +from dl_api_lib_testing.connector.complex_queries import DefaultBasicComplexQueryTestSuite +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_bigquery_tests.ext.api.base import BigQueryDataApiTestBase + + +class TestBigQueryBasicComplexQueries(BigQueryDataApiTestBase, DefaultBasicComplexQueryTestSuite): + test_params = RegulatedTestParams( + mark_features_skipped={ + DefaultBasicComplexQueryTestSuite.feature_window_functions: "Native window functions are not implemented" + } + ) diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_connection.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_connection.py index c880af8a7..15f02e05b 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_connection.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_connection.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite + from dl_connector_bigquery_tests.ext.api.base import BigQueryConnectionTestBase diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_data.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_data.py index 4be17cc6d..7da1716ff 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_data.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_data.py @@ -5,9 +5,10 @@ DefaultConnectorDataRangeTestSuite, DefaultConnectorDataResultTestSuite, ) -from dl_connector_bigquery_tests.ext.api.base import BigQueryDataApiTestBase from dl_testing.regulated_test import RegulatedTestParams +from dl_connector_bigquery_tests.ext.api.base import BigQueryDataApiTestBase + class TestBigQueryDataResult(BigQueryDataApiTestBase, DefaultConnectorDataResultTestSuite): test_params = RegulatedTestParams( diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_dataset.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_dataset.py index b50dab35f..181a15c49 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_dataset.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/api/test_dataset.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.connector.dataset_suite import DefaultConnectorDatasetTestSuite + from dl_connector_bigquery_tests.ext.api.base import BigQueryDatasetTestBase diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/conftest.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/conftest.py index 1c4674d44..c153012c6 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/conftest.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/conftest.py @@ -3,13 +3,14 @@ import pytest from dl_api_lib_testing.initialization import initialize_api_lib_test +from dl_formula_testing.forced_literal import forced_literal_use +from dl_testing.env_params.generic import GenericEnvParamGetter + from dl_connector_bigquery.testing.secrets import ( BigQuerySecretReader, BigQuerySecretReaderBase, ) from dl_connector_bigquery_tests.ext.config import API_TEST_CONFIG -from dl_formula_testing.forced_literal import forced_literal_use -from dl_testing.env_params.generic import GenericEnvParamGetter pytest_plugins = ("aiohttp.pytest_plugin",) # and it, in turn, includes 'pytest_asyncio.plugin' diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/base.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/base.py index a6a94ca55..6f361c3c3 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/base.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/base.py @@ -3,11 +3,6 @@ import pytest -from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY -from dl_connector_bigquery.core.testing.connection import make_bigquery_saved_connection -from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery -from dl_connector_bigquery.db_testing.engine_wrapper import BigQueryDbEngineConfig -import dl_connector_bigquery_tests.ext.config as test_config from dl_core.us_manager.us_manager_sync import SyncUSManager from dl_core_testing.database import ( C, @@ -18,6 +13,12 @@ from dl_core_testing.fixtures.sample_tables import TABLE_SPEC_SAMPLE_SUPERSTORE from dl_core_testing.testcases.connection import BaseConnectionTestClass +from dl_connector_bigquery.core.constants import CONNECTION_TYPE_BIGQUERY +from dl_connector_bigquery.core.testing.connection import make_bigquery_saved_connection +from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery +from dl_connector_bigquery.db_testing.engine_wrapper import BigQueryDbEngineConfig +import dl_connector_bigquery_tests.ext.config as test_config + class BaseBigQueryTestClass(BaseConnectionTestClass[ConnectionSQLBigQuery]): conn_type = CONNECTION_TYPE_BIGQUERY diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_connection.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_connection.py index f1807cbb7..621bce654 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_connection.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_connection.py @@ -1,8 +1,9 @@ -from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery -from dl_connector_bigquery_tests.ext.core.base import BaseBigQueryTestClass from dl_core.us_connection_base import DataSourceTemplate from dl_core_testing.testcases.connection import DefaultConnectionTestClass +from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery +from dl_connector_bigquery_tests.ext.core.base import BaseBigQueryTestClass + class TestBigQueryConnection( BaseBigQueryTestClass, diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_connection_executor.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_connection_executor.py index d5af9d6af..fbfdfb2ab 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_connection_executor.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_connection_executor.py @@ -1,8 +1,6 @@ import pytest -from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery -from dl_connector_bigquery_tests.ext.core.base import BaseBigQueryTestClass -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.connection_models.common_models import DBIdent from dl_core_testing.database import ( C, @@ -15,6 +13,9 @@ ) from dl_testing.regulated_test import RegulatedTestParams +from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery +from dl_connector_bigquery_tests.ext.core.base import BaseBigQueryTestClass + class BigQuerySyncAsyncConnectionExecutorCheckBase( BaseBigQueryTestClass, @@ -34,9 +35,9 @@ def db_table_columns(self, db: Db) -> list[C]: for col_spec in C.full_house() if col_spec.user_type not in ( - BIType.uuid, # UUID is not supported - BIType.datetime, - BIType.genericdatetime, # datetimes with fractional seconds are not supported # FIXME + UserDataType.uuid, # UUID is not supported + UserDataType.datetime, + UserDataType.genericdatetime, # datetimes with fractional seconds are not supported # FIXME ) ] diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_data_source.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_data_source.py index 075fb8731..9226ec848 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_data_source.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_data_source.py @@ -1,5 +1,12 @@ import pytest +from dl_constants.enums import ( + RawSQLLevel, + UserDataType, +) +from dl_core_testing.fixtures.sample_tables import TABLE_SPEC_SAMPLE_SUPERSTORE +from dl_core_testing.testcases.data_source import DefaultDataSourceTestClass + from dl_connector_bigquery.core.constants import ( SOURCE_TYPE_BIGQUERY_SUBSELECT, SOURCE_TYPE_BIGQUERY_TABLE, @@ -14,12 +21,6 @@ ) from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery from dl_connector_bigquery_tests.ext.core.base import BaseBigQueryTestClass -from dl_constants.enums import ( - BIType, - RawSQLLevel, -) -from dl_core_testing.fixtures.sample_tables import TABLE_SPEC_SAMPLE_SUPERSTORE -from dl_core_testing.testcases.data_source import DefaultDataSourceTestClass class TestBigQueryTableDataSource( @@ -41,7 +42,7 @@ def initial_data_source_spec(self, sample_table) -> BigQueryTableDataSourceSpec: ) return dsrc_spec - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: return list(TABLE_SPEC_SAMPLE_SUPERSTORE.table_schema) @@ -65,5 +66,5 @@ def initial_data_source_spec(self, sample_table) -> BigQuerySubselectDataSourceS ) return dsrc_spec - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: return list(TABLE_SPEC_SAMPLE_SUPERSTORE.table_schema) diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_dataset.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_dataset.py index 753ce8ae6..588f1b3e2 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_dataset.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/core/test_dataset.py @@ -1,12 +1,13 @@ import pytest -from dl_connector_bigquery.core.constants import SOURCE_TYPE_BIGQUERY_TABLE -from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery -from dl_connector_bigquery_tests.ext.core.base import BaseBigQueryTestClass from dl_core_testing.database import DbTable from dl_core_testing.testcases.dataset import DefaultDatasetTestSuite from dl_testing.regulated_test import RegulatedTestParams +from dl_connector_bigquery.core.constants import SOURCE_TYPE_BIGQUERY_TABLE +from dl_connector_bigquery.core.us_connection import ConnectionSQLBigQuery +from dl_connector_bigquery_tests.ext.core.base import BaseBigQueryTestClass + class TestBigQueryDataset(BaseBigQueryTestClass, DefaultDatasetTestSuite[ConnectionSQLBigQuery]): source_type = SOURCE_TYPE_BIGQUERY_TABLE diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/base.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/base.py index 8f7bc585a..14846e48c 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/base.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/base.py @@ -3,9 +3,10 @@ import pytest import sqlalchemy.exc as sa_exc +from dl_formula_testing.testcases.base import FormulaConnectorTestBase + from dl_connector_bigquery.db_testing.engine_wrapper import BigQueryDbEngineConfig from dl_connector_bigquery.formula.constants import BigQueryDialect as D -from dl_formula_testing.testcases.base import FormulaConnectorTestBase class BigQueryTestBase(FormulaConnectorTestBase): diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_conditional_blocks.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_conditional_blocks.py index 59322d123..8c7c42f6b 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_conditional_blocks.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_conditional_blocks.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.conditional_blocks import DefaultConditionalBlockFormulaConnectorTestSuite +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + class TestConditionalBlockBigQuery(BigQueryTestBase, DefaultConditionalBlockFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_aggregation.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_aggregation.py index 2f1360523..2869bbd8f 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_aggregation.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_aggregation.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.functions_aggregation import DefaultMainAggFunctionFormulaConnectorTestSuite +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + class TestMainAggFunctionBigQuery(BigQueryTestBase, DefaultMainAggFunctionFormulaConnectorTestSuite): supports_countd_approx = True diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_datetime.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_datetime.py index 29edc1a90..4a16d7ee4 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_datetime.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_datetime.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.functions_datetime import DefaultDateTimeFunctionFormulaConnectorTestSuite +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + class TestDateTimeFunctionBigQuery(BigQueryTestBase, DefaultDateTimeFunctionFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_logical.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_logical.py index 359e48a41..7cda44f07 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_logical.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_logical.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.functions_logical import DefaultLogicalFunctionFormulaConnectorTestSuite +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + class TestLogicalFunctionBigQuery(BigQueryTestBase, DefaultLogicalFunctionFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_markup.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_markup.py index fb4823a4e..983b130c2 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_markup.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_markup.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.functions_markup import DefaultMarkupFunctionFormulaConnectorTestSuite +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + class TestMarkupFunctionBigQuery(BigQueryTestBase, DefaultMarkupFunctionFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_math.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_math.py index a94e1420a..bfa809cf9 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_math.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_math.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.functions_math import DefaultMathFunctionFormulaConnectorTestSuite +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + class TestMathFunctionBigQuery(BigQueryTestBase, DefaultMathFunctionFormulaConnectorTestSuite): supports_float_div = False diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_string.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_string.py index 7cd741b22..b03e30fc9 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_string.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_string.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.functions_string import DefaultStringFunctionFormulaConnectorTestSuite +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + class TestStringFunctionBigQuery(BigQueryTestBase, DefaultStringFunctionFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_type_conversion.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_type_conversion.py index 478db9c51..bf05538cb 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_type_conversion.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_functions_type_conversion.py @@ -1,4 +1,3 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.functions_type_conversion import ( DefaultBoolTypeFunctionFormulaConnectorTestSuite, DefaultDateTypeFunctionFormulaConnectorTestSuite, @@ -10,6 +9,8 @@ DefaultStrTypeFunctionFormulaConnectorTestSuite, ) +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + # STR diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_operators.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_operators.py index d382d4d63..128d08946 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_operators.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/ext/formula/test_operators.py @@ -1,6 +1,7 @@ -from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase from dl_formula_testing.testcases.operators import DefaultOperatorFormulaConnectorTestSuite +from dl_connector_bigquery_tests.ext.formula.base import BigQueryTestBase + class TestOperatorBigQuery(BigQueryTestBase, DefaultOperatorFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/unit/test_connection_form.py b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/unit/test_connection_form.py index 672d1130e..7cfbe3f1a 100644 --- a/lib/dl_connector_bigquery/dl_connector_bigquery_tests/unit/test_connection_form.py +++ b/lib/dl_connector_bigquery/dl_connector_bigquery_tests/unit/test_connection_form.py @@ -1,5 +1,6 @@ from dl_api_connector.i18n.localizer import CONFIGS as BI_API_CONNECTOR_CONFIGS from dl_api_lib_testing.connection_form_base import ConnectionFormTestBase + from dl_connector_bigquery.api.connection_form.form_config import BigQueryConnectionFormFactory from dl_connector_bigquery.api.i18n.localizer import CONFIGS as BI_CONNECTOR_BIGQUERY_CONFIGS diff --git a/lib/dl_connector_bitrix_gds/LICENSE b/lib/dl_connector_bitrix_gds/LICENSE new file mode 100644 index 000000000..74ba5f6c7 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 YANDEX LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/dl_connector_bitrix_gds/README.md b/lib/dl_connector_bitrix_gds/README.md new file mode 100644 index 000000000..252b479fe --- /dev/null +++ b/lib/dl_connector_bitrix_gds/README.md @@ -0,0 +1 @@ +# dl_connector_bitrix_gds diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/api_schema/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/api_schema/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/api_schema/connection.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/api_schema/connection.py new file mode 100644 index 000000000..0dee4be87 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/api_schema/connection.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from urllib.parse import urlparse + +from marshmallow import fields as ma_fields +from marshmallow import validate as ma_validate + +from dl_api_connector.api_schema.connection_base import ( + ConnectionMetaMixin, + ConnectionSchema, +) +from dl_api_connector.api_schema.connection_base_fields import ( + cache_ttl_field, + secret_string_field, +) +from dl_api_connector.api_schema.extras import FieldExtra + +from dl_connector_bitrix_gds.core.us_connection import BitrixGDSConnection + + +class BitrixPortalValidator(ma_validate.Validator): + error = "Not a valid portal name" + + def __call__(self, portal: str) -> str: + parsed_host = urlparse("//{}".format(portal)).hostname + if portal.lower() != parsed_host: + raise ma_validate.ValidationError(self.error) + return portal + + +class BitrixGDSConnectionSchema(ConnectionMetaMixin, ConnectionSchema): + TARGET_CLS = BitrixGDSConnection + + portal = ma_fields.String( + attribute="data.portal", + required=True, + allow_none=False, + bi_extra=FieldExtra(editable=True), + validate=BitrixPortalValidator(), + ) + token = secret_string_field(attribute="data.token") + cache_ttl_sec = cache_ttl_field(attribute="data.cache_ttl_sec") diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connection_form/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connection_form/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connection_form/form_config.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connection_form/form_config.py new file mode 100644 index 000000000..6d1940dc2 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connection_form/form_config.py @@ -0,0 +1,100 @@ +from __future__ import annotations + +from enum import unique +from typing import Optional + +from dl_api_commons.base_models import TenantDef +from dl_api_connector.form_config.models.api_schema import ( + FormActionApiSchema, + FormApiSchema, + FormFieldApiSchema, +) +from dl_api_connector.form_config.models.base import ( + ConnectionForm, + ConnectionFormFactory, + ConnectionFormMode, +) +from dl_api_connector.form_config.models.common import ( + CommonFieldName, + FormFieldName, +) +import dl_api_connector.form_config.models.rows as C +from dl_api_connector.form_config.models.rows.base import FormRow +from dl_api_connector.form_config.models.shortcuts.rows import RowConstructor +from dl_configs.connectors_settings import ConnectorSettingsBase + +from dl_connector_bitrix_gds.api.connection_info import BitrixGDSConnectionInfoProvider +from dl_connector_bitrix_gds.api.i18n.localizer import Translatable + + +@unique +class BitrixGDSFieldName(FormFieldName): + portal = "portal" + + +class BitrixGDSConnectionFormFactory(ConnectionFormFactory): + def get_form_config( + self, + connector_settings: Optional[ConnectorSettingsBase], + tenant: Optional[TenantDef], + ) -> ConnectionForm: + rc = RowConstructor(localizer=self._localizer) + + token_row = C.CustomizableRow( + items=[ + C.LabelRowItem(text=self._localizer.translate(Translatable("label_token"))), + C.InputRowItem( + name=CommonFieldName.token, + width="l", + control_props=C.InputRowItem.Props(type="password"), + default_value="" if self.mode == ConnectionFormMode.create else None, + fake_value=None if self.mode == ConnectionFormMode.create else "******", + ), + ] + ) + + portal_row = C.CustomizableRow( + items=[ + C.LabelRowItem(text=self._localizer.translate(Translatable("label_portal"))), + C.InputRowItem(name=BitrixGDSFieldName.portal), + ] + ) + + rows: list[FormRow] = [ + portal_row, + token_row, + ] + if self.mode == ConnectionFormMode.create: + rows.append(rc.auto_create_dash_row()) + + edit_api_schema = FormActionApiSchema( + items=[ + FormFieldApiSchema(name=BitrixGDSFieldName.portal, required=True), + FormFieldApiSchema(name=CommonFieldName.token, required=self.mode == ConnectionFormMode.create), + ] + ) + + create_api_schema = FormActionApiSchema( + items=[ + *edit_api_schema.items, + *self._get_top_level_create_api_schema_items(), + ] + ) + + check_api_schema = FormActionApiSchema( + items=[ + *edit_api_schema.items, + *self._get_top_level_check_api_schema_items(), + ] + ) + + return ConnectionForm( + title=BitrixGDSConnectionInfoProvider.get_title(self._localizer), + template_name="bitrix24", + rows=rows, + api_schema=FormApiSchema( + create=create_api_schema if self.mode == ConnectionFormMode.create else None, + edit=edit_api_schema if self.mode == ConnectionFormMode.edit else None, + check=check_api_schema, + ), + ) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connection_info.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connection_info.py new file mode 100644 index 000000000..efde2d126 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connection_info.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from dl_api_connector.connection_info import ConnectionInfoProvider + +from dl_connector_bitrix_gds.api.i18n.localizer import Translatable + + +class BitrixGDSConnectionInfoProvider(ConnectionInfoProvider): + title_translatable = Translatable("label_connector-bitrix") diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connector.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connector.py new file mode 100644 index 000000000..b5b25257d --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/connector.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from dl_api_connector.api_schema.source_base import ( + SQLDataSourceSchema, + SQLDataSourceTemplateSchema, +) +from dl_api_connector.connector import ( + ApiConnectionDefinition, + ApiConnector, + ApiSourceDefinition, +) +from dl_api_lib.query.registry import MQMFactorySettingItem +from dl_constants.enums import QueryProcessingMode + +from dl_connector_bitrix_gds.api.api_schema.connection import BitrixGDSConnectionSchema +from dl_connector_bitrix_gds.api.connection_form.form_config import BitrixGDSConnectionFormFactory +from dl_connector_bitrix_gds.api.connection_info import BitrixGDSConnectionInfoProvider +from dl_connector_bitrix_gds.api.filter_compiler import BitrixGDSFilterFormulaCompiler +from dl_connector_bitrix_gds.api.i18n.localizer import CONFIGS +from dl_connector_bitrix_gds.api.multi_query import BitrixGDSMultiQueryMutatorFactory +from dl_connector_bitrix_gds.core.connector import ( + BitrixGDSCoreConnectionDefinition, + BitrixGDSCoreConnector, + BitrixGDSCoreSourceDefinition, +) +from dl_connector_bitrix_gds.formula.constants import DIALECT_NAME_BITRIX + + +class BitrixGDSApiSourceDefinition(ApiSourceDefinition): + core_source_def_cls = BitrixGDSCoreSourceDefinition + api_schema_cls = SQLDataSourceSchema + template_api_schema_cls = SQLDataSourceTemplateSchema + + +class BitrixGDSApiConnectionDefinition(ApiConnectionDefinition): + core_conn_def_cls = BitrixGDSCoreConnectionDefinition + api_generic_schema_cls = BitrixGDSConnectionSchema + form_factory_cls = BitrixGDSConnectionFormFactory + info_provider_cls = BitrixGDSConnectionInfoProvider + + +class BitrixGDSApiConnector(ApiConnector): + core_connector_cls = BitrixGDSCoreConnector + formula_dialect_name = DIALECT_NAME_BITRIX + multi_query_mutation_factories = ( + MQMFactorySettingItem( + query_proc_mode=QueryProcessingMode.basic, + factory_cls=BitrixGDSMultiQueryMutatorFactory, + ), + ) + connection_definitions = (BitrixGDSApiConnectionDefinition,) + source_definitions = (BitrixGDSApiSourceDefinition,) + is_forkable = False + is_compeng_executable = True + filter_formula_compiler_cls = BitrixGDSFilterFormulaCompiler + translation_configs = frozenset(CONFIGS) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/filter_compiler.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/filter_compiler.py new file mode 100644 index 000000000..89baa8773 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/filter_compiler.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from dl_query_processing.compilation.filter_compiler import MainFilterFormulaCompiler + + +if TYPE_CHECKING: + from dl_query_processing.compilation.filter_compiler import FilterParams + + +class BitrixGDSFilterFormulaCompiler(MainFilterFormulaCompiler): + """connector-specific customizations point""" + + def _mangle_date_filter(self, filter_params: FilterParams) -> FilterParams: + return filter_params # Disable the datetime mangling diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/i18n/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/i18n/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/i18n/localizer.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/i18n/localizer.py new file mode 100644 index 000000000..b69266ea1 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/i18n/localizer.py @@ -0,0 +1,28 @@ +import os + +import attr + +from dl_i18n.localizer_base import Translatable as BaseTranslatable +from dl_i18n.localizer_base import TranslationConfig + +import dl_connector_bitrix_gds as package + + +DOMAIN = f"{package.__name__}" +CONFIGS = [ + TranslationConfig( + path=os.path.relpath(os.path.join(os.path.dirname(__file__), "../../locales")), + domain=DOMAIN, + locale="en", + ), + TranslationConfig( + path=os.path.relpath(os.path.join(os.path.dirname(__file__), "../../locales")), + domain=DOMAIN, + locale="ru", + ), +] + + +@attr.s +class Translatable(BaseTranslatable): + domain: str = attr.ib(default=DOMAIN) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/multi_query.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/multi_query.py new file mode 100644 index 000000000..15c2fa4c9 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/api/multi_query.py @@ -0,0 +1,48 @@ +from typing import ClassVar + +import attr + +from dl_constants.enums import UserDataType +from dl_core.fields import ResultSchema +import dl_formula.core.nodes as formula_nodes +from dl_query_processing.compilation.primitives import CompiledFormulaInfo +from dl_query_processing.enums import ExecutionLevel +from dl_query_processing.multi_query.factory import MultiQueryMutatorFactoryBase +from dl_query_processing.multi_query.mutators.base import MultiQueryMutatorBase +from dl_query_processing.multi_query.mutators.splitter_based import SplitterMultiQueryMutator +from dl_query_processing.multi_query.splitters.prefiltered import PrefilteredFieldMultiQuerySplitter +from dl_query_processing.multi_query.splitters.query_fork import QueryForkQuerySplitter + + +@attr.s +class BitrixGDSMultiQuerySplitter(PrefilteredFieldMultiQuerySplitter): + expr_names: ClassVar[set[str]] = {"between", ">", ">=", "<", "<=", "=="} + data_types: ClassVar[set[UserDataType]] = {UserDataType.datetime, UserDataType.date, UserDataType.genericdatetime} + + result_schema: ResultSchema = attr.ib(kw_only=True) + + def is_pre_filter(self, formula: CompiledFormulaInfo) -> bool: + assert formula.original_field_id is not None + expr = formula.formula_obj.expr + if not isinstance(expr, formula_nodes.OperationCall): + return False + field = self.result_schema.by_guid(formula.original_field_id) + if field.data_type in self.data_types and expr.name in self.expr_names: + # FIXME: Refactor this + return True + return False + + +class BitrixGDSMultiQueryMutatorFactory(MultiQueryMutatorFactoryBase): + def get_mutators(self) -> list[MultiQueryMutatorBase]: + return [ + SplitterMultiQueryMutator( + splitters=[ + BitrixGDSMultiQuerySplitter( + crop_to_level_type=ExecutionLevel.compeng, + result_schema=self.result_schema, + ), + QueryForkQuerySplitter(), + ], + ) + ] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/adapter.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/adapter.py new file mode 100644 index 000000000..102078301 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/adapter.py @@ -0,0 +1,405 @@ +from __future__ import annotations + +import datetime +import json +import logging +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Optional, + Union, +) +import uuid + +import attr +from redis_cache_lock.types import TClientACM +from redis_cache_lock.utils import wrap_generate_func +import sqlalchemy as sa +from sqlalchemy.sql.elements import TypeCoerce + +from dl_app_tools.profiling_base import generic_profiler_async +from dl_constants.enums import ConnectionType +from dl_core.aio.web_app_services.redis import RedisConnParams +from dl_core.connection_executors.adapters.async_adapters_aiohttp import AiohttpDBAdapter +from dl_core.connection_executors.adapters.async_adapters_base import AsyncRawExecutionResult +from dl_core.connection_executors.models.db_adapter_data import ( + DBAdapterQuery, + RawColumnInfo, + RawSchemaInfo, +) +from dl_core.connection_models import ( + DBIdent, + SchemaIdent, + TableDefinition, + TableIdent, +) +from dl_core.connectors.base.error_handling import ETBasedExceptionMaker +from dl_core.data_processing.cache.engine import RedisCacheLockWrapped +from dl_core.db.native_type import CommonNativeType +from dl_core.exc import DatabaseQueryError + +from dl_connector_bitrix_gds.core.caches import ( + bitrix_cache_deserializer, + bitrix_cache_serializer, + build_local_key_rep, + get_redis_cli_acm_from_params, +) +from dl_connector_bitrix_gds.core.constants import ( + CONNECTION_TYPE_BITRIX24, + DEFAULT_DB, +) +from dl_connector_bitrix_gds.core.error_transformer import bitrix_error_transformer +from dl_connector_bitrix_gds.core.tables import ( + BITRIX_TABLES_MAP, + CRM_DYNAMIC_ITEMS_TABLE, + SMART_PROCESS_TABLE_PREFIX, +) + + +if TYPE_CHECKING: + from sqlalchemy.sql.elements import Label + + from dl_connector_bitrix_gds.core.tables import BitrixGDSTable + from dl_connector_bitrix_gds.core.target_dto import BitrixGDSConnTargetDTO + +LOGGER = logging.getLogger(__name__) + + +@attr.s(frozen=True, kw_only=True) +class BitrixRequestPayload: + portal: str = attr.ib() + table: str = attr.ib() + json_body: dict = attr.ib() + flatten_body: dict = attr.ib() + + +def extract_select_column_name(column: Label) -> str: + element = column.element + if isinstance(element, TypeCoerce): + element = element.typed_expression + return element.name + + +class BitrixGDSDefaultAdapter(AiohttpDBAdapter, ETBasedExceptionMaker): + conn_type: ClassVar[ConnectionType] = CONNECTION_TYPE_BITRIX24 + _target_dto: BitrixGDSConnTargetDTO = attr.ib() + _redis_cli_acm: Optional[TClientACM] = attr.ib(init=False) + + table: Optional[BitrixGDSTable] = None + + _error_transformer = bitrix_error_transformer + + EXTRA_EXC_CLS = (json.JSONDecodeError,) + + def __attrs_post_init__(self) -> None: + super().__attrs_post_init__() + redis_conn_params: Optional[RedisConnParams] + if self._target_dto.redis_conn_params is not None: + redis_conn_params = RedisConnParams(**self._target_dto.redis_conn_params) + else: + redis_conn_params = None + self._redis_cli_acm = get_redis_cli_acm_from_params(redis_conn_params) + + @generic_profiler_async("db-query-cached") # type: ignore # TODO: fix + async def _run_query_cached(self, dba_query: DBAdapterQuery) -> Any: + async def wrap_run_query() -> Any: + result = await self._run_query(dba_query) + return result + + payload = self._build_request_payload(dba_query) + local_key_rep = build_local_key_rep(payload.portal, payload.table, payload.flatten_body) + + assert self._redis_cli_acm is not None + rcl = RedisCacheLockWrapped( + key=local_key_rep.key_parts_hash, + client_acm=self._redis_cli_acm, + resource_tag="bic_conn_bitrix_query_cache", + lock_ttl_sec=60, + data_ttl_sec=self._target_dto.redis_caches_ttl or 600, + ) + + result_b, result = await rcl.generate_with_lock( + generate_func=wrap_generate_func( + func=wrap_run_query, + serialize=bitrix_cache_serializer, + ), + ) + if result is None: + LOGGER.info("Result found in cache: %s", local_key_rep.key_parts_hash) + result = bitrix_cache_deserializer(result_b) + + return result + + @generic_profiler_async("db-query") # type: ignore # TODO: fix + async def _run_query(self, dba_query: DBAdapterQuery) -> Any: + query_text = self.compile_query_for_execution(dba_query.query) + payload = self._build_request_payload(dba_query) + + api_url = f"https://{self._target_dto.portal}/bitrix/tools/biconnector/pbi.php" + request_id = self._req_ctx_info.request_id or str(uuid.uuid4()) + LOGGER.info( + "Sending query to Bitrix:\nrequest_id: %s\nurl: %s\nquery: %s\nparams: %s", + request_id, + api_url, + query_text, + json.dumps({k: (v if k != "key" else "...") for k, v in payload.json_body.items()}), + ) + + with self.handle_execution_error(query_text): + resp = await self._session.post( + url=api_url, + params={ + "table": payload.table, + "consumer": "datalens", + "request_id": request_id, + }, + json=payload.json_body, + ) + + if resp.status != 200: + body = await resp.text() + raise DatabaseQueryError(db_message=body, query=query_text) + + resp_body = await resp.json() + + return resp_body + + def _parse_response_body_data(self, body: list, selected_columns: Optional[list] = None) -> dict: + if not len(body): + raise ValueError("empty response") + cols = body[0] + rows = body[1:] + + assert self.table is not None + columns_type = self.table.get_columns_type() + if selected_columns is None: + selected_columns = cols + + try: + normalized_data = dict( + cols=[dict(id=col, label=col, type=columns_type.get(col, "string")) for col in selected_columns], + rows=[[dict(zip(cols, row))[col] for col in selected_columns] for row in rows], + ) + except (KeyError, TypeError, ValueError): + raise ValueError("unexpected data structure") + + return normalized_data + + def _table_schema(self, table: str) -> BitrixGDSTable: + if table.startswith(SMART_PROCESS_TABLE_PREFIX): + return CRM_DYNAMIC_ITEMS_TABLE + else: + return BITRIX_TABLES_MAP[table] + + def _build_request_payload(self, dba_query: DBAdapterQuery) -> BitrixRequestPayload: + table = self._extract_table_name(dba_query.query) + if self.table is None: + self.table = self._table_schema(table) + json_body, flatten_body = self.generate_body(dba_query) + payload = BitrixRequestPayload( + table=table, + portal=self._target_dto.portal, + json_body=json_body, + flatten_body=flatten_body, + ) + return payload + + def _extract_table_name(self, query: Union[sa.sql.Select, str]) -> str: + assert isinstance(query, sa.sql.Select) + froms = query.froms[0] + if isinstance(froms, sa.sql.Subquery) and hasattr(froms, "element"): + froms = froms.element.froms[0] + assert isinstance(froms, sa.sql.TableClause) + return froms.name + + def _parse_response_body(self, body: Any, dba_query: DBAdapterQuery) -> dict: + assert isinstance(dba_query.query, sa.sql.Select) + selected_columns_values = dba_query.query.selected_columns.values() + selected_columns: Optional[list[str]] = None + if "*" not in set(column.name for column in selected_columns_values): + selected_columns = [extract_select_column_name(column) for column in selected_columns_values] + # 'table."COLUMN_NAME"' -> 'COLUMN_NAME' + selected_columns = [col.split(".")[-1].replace('"', "").replace("`", "") for col in selected_columns] + + try: + if not isinstance(body, list): + raise TypeError("Unexpected response format") + return self._parse_response_body_data(body, selected_columns=selected_columns) + except (ValueError, TypeError) as err: + LOGGER.debug("Unexpected API response") + raise DatabaseQueryError( + message=f"Unexpected API response body: {err.args[0]}", + db_message="", + query=dba_query.debug_compiled_query, + orig=None, + details={}, + ) + + @generic_profiler_async("db-full") # type: ignore # TODO: fix + async def execute(self, query: DBAdapterQuery) -> AsyncRawExecutionResult: + with self.wrap_execute_excs(query=query, stage="request"): + if self._redis_cli_acm is not None: + resp_body = await self._run_query_cached(query) + else: + resp_body = await self._run_query(query) + + rd = self._parse_response_body(resp_body, query) + + async def chunk_gen(chunk_size=query.chunk_size or self._default_chunk_size): # type: ignore # TODO: fix + data = rd["rows"] + while data: + chunk = data[:chunk_size] + data = data[chunk_size:] + yield chunk + + return AsyncRawExecutionResult( + raw_cursor_info=dict(cols=rd["cols"]), + raw_chunk_generator=chunk_gen(), + ) + + async def get_db_version(self, db_ident: DBIdent) -> Optional[str]: + return None # Not Applicable + + async def get_schema_names(self, db_ident: DBIdent) -> list[str]: + raise NotImplementedError() + + async def get_tables(self, schema_ident: SchemaIdent) -> list[TableIdent]: + known_general_tables = BITRIX_TABLES_MAP.keys() + user_tables = await self._get_user_tables(schema_ident) + tables = [ + table_name + for table_name in user_tables + if table_name in known_general_tables or table_name.startswith(SMART_PROCESS_TABLE_PREFIX) + ] + return [ + TableIdent( + db_name=schema_ident.db_name, + schema_name=schema_ident.schema_name, + table_name=table_name, + ) + for table_name in tables + ] + + async def _get_user_tables(self, schema_ident: SchemaIdent) -> list[str]: + body: dict[str, Any] = { + "key": self._target_dto.token, + } + api_url: str = f"https://{self._target_dto.portal}/bitrix/tools/biconnector/gds.php?show_tables" + resp = await self._session.post( + url=api_url, + json=body, + ) + tables: list[str] = [table[0] for table in await resp.json()] + return tables + + @generic_profiler_async("db-table-info") # type: ignore # TODO: fix + async def get_table_info( + self, table_def: Optional[TableDefinition] = None, fetch_idx_info: bool = False + ) -> RawSchemaInfo: + assert isinstance(table_def, TableIdent) + table_name = table_def.table_name + + assert table_name in [table.table_name for table in await self.get_tables(SchemaIdent(DEFAULT_DB, None))] + bitrix_table = self._table_schema(table_name) + columns_type = bitrix_table.get_columns_type() + query = sa.select(["*"]).select_from(sa.table(table_name)) + if bitrix_table.daterange_col_name is not None: + query = query.where( + sa.column(bitrix_table.daterange_col_name) == "2000-01-01", + ) + query_obj = DBAdapterQuery( + query=query, + ) + res = await self.execute(query_obj) + res_cols = res.raw_cursor_info["cols"] + + return RawSchemaInfo( + columns=tuple( + RawColumnInfo( + name=col["id"], + title=col["id"], + nullable=True, + native_type=CommonNativeType( + conn_type=self.conn_type, + name=columns_type.get(col["id"], "string"), + nullable=True, + ), + ) + for col in res_cols + ) + ) + + async def is_table_exists(self, table_ident: TableIdent) -> bool: + return table_ident in await self.get_tables(SchemaIdent(DEFAULT_DB, None)) + # db_name and schema_name from SchemaIdent is used in method get_tables. + # Later method get_tables is used in get_parameter_combination, which sets + # db_name as 'default' and doesn't actually use schema_name + + async def test(self) -> None: + tables = await self.get_tables(SchemaIdent(DEFAULT_DB, None)) + table_name = tables[0].table_name + table = self._table_schema(table_name) + query_obj = DBAdapterQuery( + query=sa.select(["*"]) + .select_from(sa.table(table_name)) + .where( + sa.column(table.daterange_col_name) == "2000-01-01", + ), + ) + await self.execute(query_obj) + + def generate_body(self, dba_query: DBAdapterQuery) -> tuple[dict[str, Any], dict[str, str]]: + assert self.table is not None + body: dict[str, Any] = { + "key": self._target_dto.token, + "dateRange": {}, + } + + def date_converter(value: Any) -> Any: + if isinstance(value, (datetime.datetime, datetime.date)): + return value.isoformat() + return value + + def build_date_range(body: dict[str, Any], clause: sa.sql.expression.BinaryExpression) -> dict[str, Any]: + label: str = "" + if isinstance(clause.left, sa.sql.elements.ColumnClause): + label = clause.left.name + elif isinstance(clause.left, sa.sql.elements.Cast): + label = str(clause.left.anon_label) + col_name = label.split(".")[-1].replace('"', "").replace("`", "") + body["configParams"] = {"timeFilterColumn": col_name} + op = clause.operator.__name__ + if op == "eq": + body["dateRange"]["startDate"] = date_converter(clause.right.effective_value) + body["dateRange"]["endDate"] = date_converter(clause.right.effective_value) + elif op == "between_op": + body["dateRange"]["startDate"] = date_converter(clause.right.clauses[0].effective_value) + body["dateRange"]["endDate"] = date_converter(clause.right.clauses[1].effective_value) + # a bold assumption + elif op in ("gt", "ge"): + body["dateRange"]["startDate"] = date_converter(clause.right.effective_value) + elif op in ("lt", "le"): + body["dateRange"]["endDate"] = date_converter(clause.right.effective_value) + return body + + assert isinstance(dba_query.query, sa.sql.Select) + sa_whereclause = dba_query.query.whereclause + if sa_whereclause is not None: + if isinstance(sa_whereclause, sa.sql.expression.BooleanClauseList): + for clause in sa_whereclause.clauses: + body = build_date_range(body, clause) + # getting just first datetime filtration + break + elif isinstance(sa_whereclause, sa.sql.expression.BinaryExpression): + clause = sa_whereclause + body = build_date_range(body, clause) + + flatten_body = { + "key": body["key"], + "startDate": body["dateRange"].get("startDate"), + "endDate": body["dateRange"].get("endDate"), + "timeFilterColumn": body.get("configParams", dict()).get("timeFilterColumn"), + } + + return body, flatten_body diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/caches.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/caches.py new file mode 100644 index 000000000..058e876df --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/caches.py @@ -0,0 +1,67 @@ +from contextlib import asynccontextmanager +import gzip +import logging +from typing import ( + Any, + AsyncGenerator, + Optional, +) + +import attr +import redis.asyncio +from redis_cache_lock.types import TClientACM + +from dl_app_tools.profiling_base import GenericProfiler +from dl_constants.types import TJSONExt +from dl_core.aio.web_app_services.redis import RedisConnParams +from dl_core.data_processing.cache.primitives import LocalKeyRepresentation +from dl_core.serialization import ( + common_dumps, + common_loads, +) + + +LOGGER = logging.getLogger(__name__) + + +def build_local_key_rep(portal: str, table: str, body: dict) -> LocalKeyRepresentation: + local_key_rep = LocalKeyRepresentation() + local_key_rep = local_key_rep.extend(part_type="portal", part_content=portal) + local_key_rep = local_key_rep.extend(part_type="table", part_content=table) + local_key_rep = local_key_rep.extend(part_type="body", part_content=frozenset(body.items())) + + return local_key_rep + + +def make_simple_cli_acm(conn_params: RedisConnParams) -> TClientACM: + @asynccontextmanager + async def cli_acm(**_: Any) -> AsyncGenerator[redis.asyncio.Redis, None]: + rcli: redis.asyncio.Redis = redis.asyncio.Redis(**attr.asdict(conn_params)) + try: + yield rcli + finally: + await rcli.connection_pool.disconnect() + + return cli_acm + + +def get_redis_cli_acm_from_params(redis_conn_params: Optional[RedisConnParams]) -> Optional[TClientACM]: + if redis_conn_params is None: + return None + return make_simple_cli_acm(conn_params=redis_conn_params) + + +def bitrix_cache_serializer(data: TJSONExt) -> bytes: + with GenericProfiler("qcache-serialize"): + serialized_result_data = common_dumps(data) + with GenericProfiler("qcache-compress"): + result_data = gzip.compress(serialized_result_data) + return result_data + + +def bitrix_cache_deserializer(data_repr: bytes) -> TJSONExt: + with GenericProfiler("qcache-decompress"): + encoded_data = gzip.decompress(data_repr) + with GenericProfiler("qcache-deserialize"): + data = common_loads(encoded_data) + return data diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/connection_executors.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/connection_executors.py new file mode 100644 index 000000000..17c0785b4 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/connection_executors.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Optional, + Sequence, +) + +import attr + +from dl_core.aio.web_app_services.redis import RedisConnParams +from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor + +from dl_connector_bitrix_gds.core.adapter import BitrixGDSDefaultAdapter +from dl_connector_bitrix_gds.core.target_dto import BitrixGDSConnTargetDTO + + +if TYPE_CHECKING: + from dl_connector_bitrix_gds.core.dto import BitrixGDSConnDTO + + +@attr.s(cmp=False, hash=False) +class BitrixGDSAsyncAdapterConnExecutor(DefaultSqlAlchemyConnExecutor[BitrixGDSDefaultAdapter]): + TARGET_ADAPTER_CLS = BitrixGDSDefaultAdapter + + _conn_dto: BitrixGDSConnDTO = attr.ib() + + async def _make_target_conn_dto_pool(self) -> Sequence[BitrixGDSConnTargetDTO]: + redis_conn_params: Optional[RedisConnParams] = None + caches_ttl: Optional[int] = None + + assert self._services_registry is not None + rqe_caches_setting = self._services_registry.get_rqe_caches_settings() + if rqe_caches_setting is not None: + assert rqe_caches_setting.redis_settings is not None + redis_conn_params = RedisConnParams( + host=rqe_caches_setting.redis_settings.HOSTS[0], + port=rqe_caches_setting.redis_settings.PORT, + db=rqe_caches_setting.redis_settings.DB, + password=rqe_caches_setting.redis_settings.PASSWORD, + ssl=rqe_caches_setting.redis_settings.SSL, + ) + caches_ttl = rqe_caches_setting.caches_ttl + + conn_params: Optional[dict] + if isinstance(redis_conn_params, RedisConnParams): + conn_params = attr.asdict(redis_conn_params) + else: + conn_params = None + return [ + BitrixGDSConnTargetDTO( + conn_id=self._conn_dto.conn_id, + pass_db_messages_to_user=self._conn_options.pass_db_messages_to_user, + pass_db_query_to_user=self._conn_options.pass_db_query_to_user, + portal=self._conn_dto.portal, + token=self._conn_dto.token, + max_execution_time=self._conn_options.max_execution_time, # type: ignore # TODO: fix + total_timeout=self._conn_options.total_timeout, # type: ignore # TODO: fix + connect_timeout=self._conn_options.connect_timeout, # type: ignore # TODO: fix + redis_conn_params=conn_params, + redis_caches_ttl=caches_ttl, + ) + ] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/connector.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/connector.py new file mode 100644 index 000000000..34730b74c --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/connector.py @@ -0,0 +1,43 @@ +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreConnector, + CoreSourceDefinition, +) +from dl_core.data_source_spec.sql import StandardSQLDataSourceSpec +from dl_core.us_manager.storage_schemas.data_source_spec_base import SQLDataSourceSpecStorageSchema + +from dl_connector_bitrix_gds.core.adapter import BitrixGDSDefaultAdapter +from dl_connector_bitrix_gds.core.connection_executors import BitrixGDSAsyncAdapterConnExecutor +from dl_connector_bitrix_gds.core.constants import ( + BACKEND_TYPE_BITRIX_GDS, + CONNECTION_TYPE_BITRIX24, + SOURCE_TYPE_BITRIX_GDS, +) +from dl_connector_bitrix_gds.core.data_source import BitrixGDSDataSource +from dl_connector_bitrix_gds.core.storage_schemas.connection import BitrixGDSConnectionDataStorageSchema +from dl_connector_bitrix_gds.core.type_transformer import BitrixGDSTypeTransformer +from dl_connector_bitrix_gds.core.us_connection import BitrixGDSConnection + + +class BitrixGDSCoreConnectionDefinition(CoreConnectionDefinition): + conn_type = CONNECTION_TYPE_BITRIX24 + connection_cls = BitrixGDSConnection + us_storage_schema_cls = BitrixGDSConnectionDataStorageSchema + type_transformer_cls = BitrixGDSTypeTransformer + sync_conn_executor_cls = BitrixGDSAsyncAdapterConnExecutor + async_conn_executor_cls = BitrixGDSAsyncAdapterConnExecutor + dialect_string = "bi_bitrix" + + +class BitrixGDSCoreSourceDefinition(CoreSourceDefinition): + source_type = SOURCE_TYPE_BITRIX_GDS + source_cls = BitrixGDSDataSource + source_spec_cls = StandardSQLDataSourceSpec + us_storage_schema_cls = SQLDataSourceSpecStorageSchema + + +class BitrixGDSCoreConnector(CoreConnector): + backend_type = BACKEND_TYPE_BITRIX_GDS + connection_definitions = (BitrixGDSCoreConnectionDefinition,) + source_definitions = (BitrixGDSCoreSourceDefinition,) + rqe_adapter_classes = frozenset({BitrixGDSDefaultAdapter}) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/constants.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/constants.py new file mode 100644 index 000000000..65ea6434e --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/constants.py @@ -0,0 +1,38 @@ +from enum import ( + Enum, + unique, +) + +from dl_constants.enums import ( + ConnectionType, + DataSourceType, + SourceBackendType, +) + + +BACKEND_TYPE_BITRIX_GDS = SourceBackendType.declare("BITRIX_GDS") +CONNECTION_TYPE_BITRIX24 = ConnectionType.declare("bitrix24") +SOURCE_TYPE_BITRIX_GDS = DataSourceType.declare("BITRIX_GDS") + +DEFAULT_DB = "default" + + +@unique +class BitrixGDSTableType(Enum): + crm_deal = "crm_deal" + crm_lead = "crm_lead" + crm_company = "crm_company" + crm_contact = "crm_contact" + crm_deal_stage_history = "crm_deal_stage_history" + crm_lead_status_history = "crm_lead_status_history" + socialnetwork_group = "socialnetwork_group" + telephony_call = "telephony_call" + crm_activity = "crm_activity" + crm_lead_uf = "crm_lead_uf" + crm_deal_uf = "crm_deal_uf" + crm_lead_product_row = "crm_lead_product_row" + crm_deal_product_row = "crm_deal_product_row" + crm_dynamic_items = "crm_dynamic_items" + user = "user" + crm_company_uf = "crm_company_uf" + crm_contact_uf = "crm_contact_uf" diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/data_source.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/data_source.py new file mode 100644 index 000000000..b8b02c330 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/data_source.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from typing import ( + ClassVar, + Optional, +) + +from dl_constants.enums import ( + DataSourceType, + JoinType, +) +from dl_core.connection_models import ( + TableDefinition, + TableIdent, +) +from dl_core.data_source.sql import PseudoSQLDataSource + +from dl_connector_bitrix_gds.core.constants import ( + CONNECTION_TYPE_BITRIX24, + SOURCE_TYPE_BITRIX_GDS, +) + + +class BitrixGDSDataSource(PseudoSQLDataSource): + supported_join_types: ClassVar[frozenset[JoinType]] = frozenset() + conn_type = CONNECTION_TYPE_BITRIX24 + + @property + def db_version(self) -> Optional[str]: + return None + + @classmethod + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: + return source_type == SOURCE_TYPE_BITRIX_GDS + + def get_table_definition(self) -> TableDefinition: + assert self.table_name is not None + return TableIdent(db_name=self.db_name, schema_name=None, table_name=self.table_name) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/dto.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/dto.py new file mode 100644 index 000000000..435389dce --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/dto.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +import attr + +from dl_core.connection_models.dto_defs import ConnDTO +from dl_core.utils import secrepr + +from dl_connector_bitrix_gds.core.constants import CONNECTION_TYPE_BITRIX24 + + +@attr.s(frozen=True) +class BitrixGDSConnDTO(ConnDTO): + conn_type = CONNECTION_TYPE_BITRIX24 + + portal: str = attr.ib(kw_only=True) + token: str = attr.ib(kw_only=True, repr=secrepr) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/error_transformer.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/error_transformer.py new file mode 100644 index 000000000..380f7bdbe --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/error_transformer.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +import json + +import dl_core.connectors.base.error_transformer as error_transformer +from dl_core.connectors.base.error_transformer import DbErrorTransformer +from dl_core.connectors.base.error_transformer import ErrorTransformerRule as Rule +from dl_core.connectors.base.error_transformer import orig_exc_is +import dl_core.exc as exc + + +bitrix_error_transformer: DbErrorTransformer = error_transformer.make_default_transformer_with_custom_rules( + Rule( + when=orig_exc_is(orig_exc_cls=json.JSONDecodeError), + then_raise=exc.SourceResponseError, + ) +) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/storage_schemas/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/storage_schemas/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/storage_schemas/connection.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/storage_schemas/connection.py new file mode 100644 index 000000000..e1255f658 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/storage_schemas/connection.py @@ -0,0 +1,18 @@ +from marshmallow import fields as ma_fields + +from dl_core.us_manager.storage_schemas.connection import ( + CacheableConnectionDataSchemaMixin, + ConnectionBaseDataStorageSchema, +) + +from dl_connector_bitrix_gds.core.us_connection import BitrixGDSConnection + + +class BitrixGDSConnectionDataStorageSchema( + CacheableConnectionDataSchemaMixin, + ConnectionBaseDataStorageSchema[BitrixGDSConnection.DataModel], +): + TARGET_CLS = BitrixGDSConnection.DataModel + + portal = ma_fields.String(required=True, allow_none=False) + token = ma_fields.String(required=True, allow_none=False) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/tables.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/tables.py new file mode 100644 index 000000000..4e8063482 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/tables.py @@ -0,0 +1,620 @@ +from __future__ import annotations + +from typing import ( + Iterable, + Optional, +) + +import attr + +from dl_connector_bitrix_gds.core.constants import BitrixGDSTableType + + +CRM_DEAL = [ + {"name": "ID", "type": "integer"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "DATE_MODIFY", "type": "datetime"}, + {"name": "CREATED_BY_ID", "type": "integer"}, + {"name": "CREATED_BY_NAME", "type": "string"}, + {"name": "CREATED_BY", "type": "string"}, + {"name": "MODIFY_BY_ID", "type": "integer"}, + {"name": "MODIFIED_BY_NAME", "type": "string"}, + {"name": "MODIFIED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_ID", "type": "integer"}, + {"name": "ASSIGNED_BY_NAME", "type": "string"}, + {"name": "ASSIGNED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_DEPARTMENT", "type": "string"}, + {"name": "OPENED", "type": "string"}, + {"name": "LEAD_ID", "type": "integer"}, + {"name": "COMPANY_ID", "type": "integer"}, + {"name": "COMPANY_NAME", "type": "string"}, + {"name": "COMPANY", "type": "string"}, + {"name": "CONTACT_ID", "type": "string"}, + {"name": "CONTACT_NAME", "type": "string"}, + {"name": "CONTACT", "type": "string"}, + {"name": "TITLE", "type": "string"}, + {"name": "CRM_PRODUCT", "type": "string"}, + {"name": "CRM_PRODUCT_COUNT", "type": "string"}, + {"name": "CATEGORY_ID", "type": "string"}, + {"name": "CATEGORY_NAME", "type": "string"}, + {"name": "CATEGORY", "type": "string"}, + {"name": "STAGE_ID", "type": "string"}, + {"name": "STAGE_NAME", "type": "string"}, + {"name": "STAGE", "type": "string"}, + {"name": "STAGE_SEMANTIC_ID", "type": "string"}, + {"name": "STAGE_SEMANTIC", "type": "string"}, + {"name": "IS_NEW", "type": "string"}, + {"name": "IS_RECURRING", "type": "string"}, + {"name": "IS_RETURN_CUSTOMER", "type": "string"}, + {"name": "CLOSED", "type": "string"}, + {"name": "TYPE_ID", "type": "string"}, + {"name": "OPPORTUNITY", "type": "string"}, + {"name": "IS_MANUAL_OPPORTUNITY", "type": "string"}, + {"name": "TAX_VALUE", "type": "string"}, + {"name": "CURRENCY_ID", "type": "string"}, + {"name": "OPPORTUNITY_ACCOUNT", "type": "string"}, + {"name": "TAX_VALUE_ACCOUNT", "type": "string"}, + {"name": "ACCOUNT_CURRENCY_ID", "type": "string"}, + {"name": "PROBABILITY", "type": "string"}, + {"name": "COMMENTS", "type": "string"}, + {"name": "BEGINDATE", "type": "datetime"}, + {"name": "CLOSEDATE", "type": "datetime"}, + {"name": "LOCATION_ID", "type": "string"}, + {"name": "SOURCE_ID", "type": "string"}, + {"name": "SOURCE_NAME", "type": "string"}, + {"name": "SOURCE", "type": "string"}, + {"name": "SOURCE_DESCRIPTION", "type": "string"}, + {"name": "ORIGINATOR_ID", "type": "string"}, + {"name": "ORIGIN_ID", "type": "string"}, + {"name": "ADDITIONAL_INFO", "type": "string"}, + {"name": "UTM_SOURCE", "type": "string"}, + {"name": "UTM_MEDIUM", "type": "string"}, + {"name": "UTM_CAMPAIGN", "type": "string"}, + {"name": "UTM_CONTENT", "type": "string"}, + {"name": "UTM_TERM", "type": "string"}, + {"name": "BANK_DETAIL_ID", "type": "string"}, +] + + +CRM_LEAD = [ + {"name": "ID", "type": "integer"}, + {"name": "DATE_MODIFY", "type": "datetime"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "CREATED_BY_ID", "type": "integer"}, + {"name": "CREATED_BY", "type": "string"}, + {"name": "MODIFY_BY_ID", "type": "integer"}, + {"name": "MODIFIED_BY_NAME", "type": "string"}, + {"name": "MODIFIED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_ID", "type": "integer"}, + {"name": "ASSIGNED_BY_NAME", "type": "string"}, + {"name": "ASSIGNED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_DEPARTMENT", "type": "string"}, + {"name": "OPENED", "type": "string"}, + {"name": "COMPANY_ID", "type": "integer"}, + {"name": "COMPANY_NAME", "type": "string"}, + {"name": "COMPANY", "type": "string"}, + {"name": "CONTACT_ID", "type": "string"}, + {"name": "CONTACT_NAME", "type": "string"}, + {"name": "CONTACT", "type": "string"}, + {"name": "STATUS_ID", "type": "string"}, + {"name": "STATUS_NAME", "type": "string"}, + {"name": "STATUS", "type": "string"}, + {"name": "STATUS_DESCRIPTION", "type": "string"}, + {"name": "STATUS_SEMANTIC_ID", "type": "string"}, + {"name": "STATUS_SEMANTIC", "type": "string"}, + {"name": "CRM_PRODUCT_ID", "type": "string"}, + {"name": "CRM_PRODUCT_NAME", "type": "string"}, + {"name": "CRM_PRODUCT", "type": "string"}, + {"name": "OPPORTUNITY", "type": "string"}, + {"name": "CURRENCY_ID", "type": "string"}, + {"name": "SOURCE_ID", "type": "string"}, + {"name": "SOURCE_NAME", "type": "string"}, + {"name": "SOURCE", "type": "string"}, + {"name": "SOURCE_DESCRIPTION", "type": "string"}, + {"name": "TITLE", "type": "string"}, + {"name": "FULL_NAME", "type": "string"}, + {"name": "NAME", "type": "string"}, + {"name": "LAST_NAME", "type": "string"}, + {"name": "SECOND_NAME", "type": "string"}, + {"name": "COMPANY_TITLE", "type": "string"}, + {"name": "POST", "type": "string"}, + {"name": "ADDRESS_1", "type": "string"}, + {"name": "ADDRESS_2", "type": "string"}, + {"name": "ADDRESS_CITY", "type": "string"}, + {"name": "ADDRESS_POSTAL_CODE", "type": "string"}, + {"name": "ADDRESS_REGION", "type": "string"}, + {"name": "ADDRESS_PROVINCE", "type": "string"}, + {"name": "ADDRESS_COUNTRY", "type": "string"}, + {"name": "ADDRESS_COUNTRY_CODE", "type": "string"}, + {"name": "COMMENTS", "type": "string"}, + {"name": "ORIGINATOR_ID", "type": "string"}, + {"name": "ORIGIN_ID", "type": "string"}, + {"name": "DATE_CLOSED", "type": "datetime"}, + {"name": "BIRTHDATE", "type": "datetime"}, + {"name": "HONORIFIC", "type": "string"}, + {"name": "UTM_SOURCE", "type": "string"}, + {"name": "UTM_MEDIUM", "type": "string"}, + {"name": "UTM_CAMPAIGN", "type": "string"}, + {"name": "UTM_CONTENT", "type": "string"}, + {"name": "UTM_TERM", "type": "string"}, + {"name": "PHONE", "type": "string"}, + {"name": "WEB", "type": "string"}, + {"name": "EMAIL", "type": "string"}, + {"name": "IM", "type": "string"}, +] + + +CRM_COMPANY = [ + {"name": "ID", "type": "integer"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "DATE_MODIFY", "type": "datetime"}, + {"name": "CREATED_BY_ID", "type": "integer"}, + {"name": "CREATED_BY_NAME", "type": "string"}, + {"name": "CREATED_BY", "type": "string"}, + {"name": "MODIFY_BY_ID", "type": "integer"}, + {"name": "MODIFIED_BY_NAME", "type": "string"}, + {"name": "MODIFIED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_ID", "type": "integer"}, + {"name": "ASSIGNED_BY_NAME", "type": "string"}, + {"name": "ASSIGNED_BY", "type": "string"}, + {"name": "OPENED", "type": "string"}, + {"name": "TITLE", "type": "string"}, + {"name": "ADDRESS_1", "type": "string"}, + {"name": "ADDRESS_2", "type": "string"}, + {"name": "ADDRESS_CITY", "type": "string"}, + {"name": "ADDRESS_POSTAL_CODE", "type": "string"}, + {"name": "ADDRESS_REGION", "type": "string"}, + {"name": "ADDRESS_PROVINCE", "type": "string"}, + {"name": "ADDRESS_COUNTRY", "type": "string"}, + {"name": "ADDRESS_COUNTRY_CODE", "type": "string"}, + {"name": "BANKING_DETAILS", "type": "string"}, + {"name": "COMMENTS", "type": "string"}, + {"name": "COMPANY_TYPE_ID", "type": "string"}, + {"name": "COMPANY_TYPE_NAME", "type": "string"}, + {"name": "COMPANY_TYPE", "type": "string"}, + {"name": "INDUSTRY_ID", "type": "string"}, + {"name": "INDUSTRY_NAME", "type": "string"}, + {"name": "INDUSTRY", "type": "string"}, + {"name": "REVENUE", "type": "float"}, + {"name": "CURRENCY_ID", "type": "string"}, + {"name": "EMPLOYEES", "type": "string"}, + {"name": "LEAD_ID", "type": "integer"}, + {"name": "ORIGINATOR_ID", "type": "string"}, + {"name": "ORIGIN_ID", "type": "string"}, + {"name": "ORIGIN_VERSION", "type": "string"}, + {"name": "IS_MY_COMPANY", "type": "string"}, + {"name": "UTM_SOURCE", "type": "string"}, + {"name": "UTM_MEDIUM", "type": "string"}, + {"name": "UTM_CAMPAIGN", "type": "string"}, + {"name": "UTM_CONTENT", "type": "string"}, + {"name": "UTM_TERM", "type": "string"}, + {"name": "PHONE", "type": "string"}, + {"name": "WEB", "type": "string"}, + {"name": "EMAIL", "type": "string"}, + {"name": "IM", "type": "string"}, +] + + +CRM_CONTACT = [ + {"name": "ID", "type": "integer"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "DATE_MODIFY", "type": "datetime"}, + {"name": "CREATED_BY_ID", "type": "integer"}, + {"name": "CREATED_BY_NAME", "type": "string"}, + {"name": "CREATED_BY", "type": "string"}, + {"name": "MODIFY_BY_ID", "type": "integer"}, + {"name": "MODIFIED_BY_NAME", "type": "string"}, + {"name": "MODIFIED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_ID", "type": "integer"}, + {"name": "ASSIGNED_BY_NAME", "type": "string"}, + {"name": "ASSIGNED_BY", "type": "string"}, + {"name": "OPENED", "type": "string"}, + {"name": "COMPANY_ID", "type": "integer"}, + {"name": "SOURCE_ID", "type": "string"}, + {"name": "SOURCE_DESCRIPTION", "type": "string"}, + {"name": "NAME", "type": "string"}, + {"name": "LAST_NAME", "type": "string"}, + {"name": "SECOND_NAME", "type": "string"}, + {"name": "POST", "type": "string"}, + {"name": "ADDRESS_1", "type": "string"}, + {"name": "ADDRESS_2", "type": "string"}, + {"name": "ADDRESS_CITY", "type": "string"}, + {"name": "ADDRESS_POSTAL_CODE", "type": "string"}, + {"name": "ADDRESS_REGION", "type": "string"}, + {"name": "ADDRESS_PROVINCE", "type": "string"}, + {"name": "ADDRESS_COUNTRY", "type": "string"}, + {"name": "ADDRESS_COUNTRY_CODE", "type": "string"}, + {"name": "COMMENTS", "type": "string"}, + {"name": "LEAD_ID", "type": "integer"}, + {"name": "EXPORT", "type": "string"}, + {"name": "TYPE_ID", "type": "string"}, + {"name": "ORIGINATOR_ID", "type": "string"}, + {"name": "ORIGIN_ID", "type": "string"}, + {"name": "ORIGIN_VERSION", "type": "string"}, + {"name": "BIRTHDATE", "type": "date"}, + {"name": "HONORIFIC", "type": "string"}, + {"name": "FACE_ID", "type": "string"}, + {"name": "PHONE", "type": "string"}, + {"name": "WEB", "type": "string"}, + {"name": "EMAIL", "type": "string"}, + {"name": "IM", "type": "string"}, + {"name": "UTM_SOURCE", "type": "string"}, + {"name": "UTM_MEDIUM", "type": "string"}, + {"name": "UTM_CAMPAIGN", "type": "string"}, + {"name": "UTM_CONTENT", "type": "string"}, + {"name": "UTM_TERM", "type": "string"}, +] + + +CRM_DEAL_STAGE_HISTORY = [ + {"name": "ID", "type": "integer"}, + {"name": "TYPE_ID", "type": "integer"}, + {"name": "DEAL_ID", "type": "integer"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "START_DATE", "type": "string"}, + {"name": "END_DATE", "type": "string"}, + {"name": "ASSIGNED_BY_ID", "type": "integer"}, + {"name": "ASSIGNED_BY_NAME", "type": "string"}, + {"name": "ASSIGNED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_DEPARTMENT", "type": "string"}, + {"name": "STAGE_SEMANTIC_ID", "type": "string"}, + {"name": "STAGE_SEMANTIC", "type": "string"}, + {"name": "STAGE_ID", "type": "string"}, + {"name": "STAGE_NAME", "type": "string"}, + {"name": "STAGE", "type": "string"}, +] + + +CRM_LEAD_STATUS_HISTORY = [ + {"name": "ID", "type": "integer"}, + {"name": "TYPE_ID", "type": "integer"}, + {"name": "LEAD_ID", "type": "integer"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "ASSIGNED_BY_ID", "type": "integer"}, + {"name": "ASSIGNED_BY_NAME", "type": "string"}, + {"name": "ASSIGNED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_DEPARTMENT", "type": "string"}, + {"name": "STATUS_SEMANTIC_ID", "type": "string"}, + {"name": "STATUS_SEMANTIC", "type": "string"}, + {"name": "STATUS_ID", "type": "string"}, + {"name": "STATUS_NAME", "type": "string"}, + {"name": "STATUS", "type": "string"}, +] + + +SOCIALNETWORK_GROUP = [ + {"name": "ID", "type": "integer"}, + {"name": "SITE_ID", "type": "string"}, + {"name": "NAME", "type": "string"}, + {"name": "DESCRIPTION", "type": "string"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "DATE_MODIFY", "type": "datetime"}, + {"name": "OPENED", "type": "string"}, + {"name": "SUBJECT", "type": "string"}, + {"name": "OWNER_ID", "type": "integer"}, + {"name": "OWNER_NAME", "type": "string"}, + {"name": "OWNER", "type": "string"}, + {"name": "KEYWORDS", "type": "string"}, + {"name": "NUMBER_OF_MEMBERS", "type": "integer"}, + {"name": "DATE_ACTIVITY", "type": "datetime"}, + {"name": "CLOSED", "type": "string"}, + {"name": "PROJECT", "type": "string"}, +] + + +TELEPHONY_CALL = [ + {"name": "CALL_ID", "type": "string"}, + {"name": "PORTAL_USER_ID", "type": "integer"}, + {"name": "PORTAL_USER", "type": "string"}, + {"name": "PORTAL_USER_DEPARTMENT", "type": "string"}, + {"name": "PORTAL_NUMBER", "type": "string"}, + {"name": "PHONE_NUMBER", "type": "string"}, + {"name": "CALL_TYPE", "type": "integer"}, + {"name": "CALL_DURATION", "type": "integer"}, + {"name": "CALL_START_TIME", "type": "datetime"}, + {"name": "CALL_STATUS_CODE", "type": "string"}, + {"name": "CALL_STATUS_REASON", "type": "string"}, + {"name": "RECORD_FILE_ID", "type": "integer"}, + {"name": "CALL_VOTE", "type": "integer"}, + {"name": "COST", "type": "float"}, + {"name": "COST_CURRENCY", "type": "string"}, + {"name": "CRM_ENTITY_TYPE", "type": "string"}, + {"name": "CRM_ENTITY_ID", "type": "integer"}, + {"name": "CRM_ACTIVITY_ID", "type": "integer"}, + {"name": "REST_APP_ID", "type": "integer"}, + {"name": "REST_APP_NAME", "type": "string"}, + {"name": "TRANSCRIPT_PENDING", "type": "string"}, + {"name": "TRANSCRIPT_ID", "type": "integer"}, + {"name": "REDIAL_ATTEMPT", "type": "integer"}, + {"name": "COMMENT", "type": "string"}, +] + + +CRM_ACTIVITY = [ + {"name": "ID", "type": "integer"}, + {"name": "TYPE_ID", "type": "integer"}, + {"name": "TYPE_NAME", "type": "string"}, + {"name": "OWNER_ID", "type": "integer"}, + {"name": "OWNER_TYPE_ID", "type": "integer"}, + {"name": "OWNER_TYPE_NAME", "type": "string"}, + {"name": "ASSOCIATED_ENTITY_ID", "type": "integer"}, + {"name": "SUBJECT", "type": "string"}, + {"name": "COMPLETED", "type": "string"}, + {"name": "STATUS_ID", "type": "integer"}, + {"name": "STATUS_NAME", "type": "string"}, + {"name": "RESPONSIBLE_ID", "type": "integer"}, + {"name": "RESPONSIBLE_NAME", "type": "string"}, + {"name": "RESPONSIBLE", "type": "string"}, + {"name": "PRIORITY_ID", "type": "integer"}, + {"name": "PRIORITY_NAME", "type": "string"}, + {"name": "DESCRIPTION", "type": "string"}, + {"name": "DESCRIPTION_TYPE_ID", "type": "integer"}, + {"name": "DESCRIPTION_TYPE_NAME", "type": "string"}, + {"name": "DIRECTION_ID", "type": "integer"}, + {"name": "DIRECTION_NAME", "type": "string"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "DATE_MODIFY", "type": "datetime"}, + {"name": "START_TIME", "type": "datetime"}, + {"name": "END_TIME", "type": "datetime"}, + {"name": "DEADLINE", "type": "datetime"}, + {"name": "ORIGINATOR_ID", "type": "string"}, + {"name": "ORIGIN_ID", "type": "string"}, + {"name": "AUTHOR_ID", "type": "integer"}, + {"name": "AUTHOR_NAME", "type": "string"}, + {"name": "AUTHOR", "type": "string"}, + {"name": "EDITOR_ID", "type": "integer"}, + {"name": "EDITOR_NAME", "type": "string"}, + {"name": "EDITOR", "type": "string"}, +] + + +CRM_LEAD_UF = [ + {"name": "LEAD_ID", "type": "integer"}, + {"name": "DATE_MODIFY", "type": "datetime"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "DATE_CLOSED", "type": "datetime"}, +] + + +CRM_DEAL_UF = [ + {"name": "DEAL_ID", "type": "integer"}, + {"name": "DATE_CREATE", "type": "datetime"}, + {"name": "CLOSEDATE", "type": "datetime"}, +] + + +CRM_LEAD_PRODUCT_ROW = [ + {"name": "ID", "type": "integer"}, + {"name": "LEAD_ID", "type": "integer"}, + {"name": "LEAD_DATE_MODIFY", "type": "datetime"}, + {"name": "LEAD_DATE_CREATE", "type": "datetime"}, + {"name": "LEAD_DATE_CLOSED", "type": "datetime"}, + {"name": "PRODUCT", "type": "string"}, + {"name": "PRODUCT_ID", "type": "integer"}, + {"name": "PRODUCT_NAME", "type": "string"}, + {"name": "PRICE", "type": "string"}, + {"name": "PRICE_EXCLUSIVE", "type": "string"}, + {"name": "PRICE_NETTO", "type": "string"}, + {"name": "PRICE_BRUTTO", "type": "string"}, + {"name": "QUANTITY", "type": "string"}, + {"name": "DISCOUNT_TYPE", "type": "string"}, + {"name": "DISCOUNT_TYPE_ID", "type": "integer"}, + {"name": "DISCOUNT_TYPE_NAME", "type": "string"}, + {"name": "DISCOUNT_RATE", "type": "string"}, + {"name": "DISCOUNT_SUM", "type": "string"}, + {"name": "TAX_RATE", "type": "string"}, + {"name": "TAX_INCLUDED", "type": "string"}, + {"name": "CUSTOMIZED", "type": "string"}, + {"name": "MEASURE", "type": "string"}, + {"name": "MEASURE_CODE", "type": "integer"}, + {"name": "MEASURE_NAME", "type": "string"}, + {"name": "SORT", "type": "integer"}, +] + + +CRM_DEAL_PRODUCT_ROW = [ + {"name": "ID", "type": "integer"}, + {"name": "DEAL_ID", "type": "integer"}, + {"name": "DEAL_DATE_CREATE", "type": "datetime"}, + {"name": "DEAL_CLOSEDATE", "type": "datetime"}, + {"name": "PRODUCT", "type": "string"}, + {"name": "PRODUCT_ID", "type": "integer"}, + {"name": "PRODUCT_NAME", "type": "string"}, + {"name": "PRICE", "type": "string"}, + {"name": "PRICE_EXCLUSIVE", "type": "string"}, + {"name": "PRICE_NETTO", "type": "string"}, + {"name": "PRICE_BRUTTO", "type": "string"}, + {"name": "QUANTITY", "type": "string"}, + {"name": "DISCOUNT_TYPE", "type": "string"}, + {"name": "DISCOUNT_TYPE_ID", "type": "integer"}, + {"name": "DISCOUNT_TYPE_NAME", "type": "string"}, + {"name": "DISCOUNT_RATE", "type": "string"}, + {"name": "DISCOUNT_SUM", "type": "string"}, + {"name": "TAX_RATE", "type": "string"}, + {"name": "TAX_INCLUDED", "type": "string"}, + {"name": "CUSTOMIZED", "type": "string"}, + {"name": "MEASURE", "type": "string"}, + {"name": "MEASURE_CODE", "type": "integer"}, + {"name": "MEASURE_NAME", "type": "string"}, + {"name": "SORT", "type": "integer"}, +] + + +CRM_DYNAMIC_ITEMS = [ + {"name": "ID", "type": "integer"}, + {"name": "XML_ID", "type": "string"}, + {"name": "TITLE", "type": "string"}, + {"name": "CREATED_BY", "type": "string"}, + {"name": "CREATED_BY_ID", "type": "integer"}, + {"name": "CREATED_BY_NAME", "type": "string"}, + {"name": "UPDATED_BY", "type": "string"}, + {"name": "UPDATED_BY_ID", "type": "integer"}, + {"name": "UPDATED_BY_NAME", "type": "string"}, + {"name": "MOVED_BY", "type": "string"}, + {"name": "MOVED_BY_ID", "type": "integer"}, + {"name": "MOVED_BY_NAME", "type": "string"}, + {"name": "CREATED_TIME", "type": "datetime"}, + {"name": "UPDATED_TIME", "type": "datetime"}, + {"name": "MOVED_TIME", "type": "datetime"}, + {"name": "CATEGORY", "type": "string"}, + {"name": "CATEGORY_ID", "type": "integer"}, + {"name": "CATEGORY_NAME", "type": "string"}, + {"name": "OPENED", "type": "string"}, + {"name": "STAGE", "type": "string"}, + {"name": "STAGE_ID", "type": "string"}, + {"name": "STAGE_NAME", "type": "string"}, + {"name": "PREVIOUS_STAGE_ID", "type": "string"}, + {"name": "BEGINDATE", "type": "datetime"}, + {"name": "CLOSEDATE", "type": "datetime"}, + {"name": "COMPANY", "type": "string"}, + {"name": "COMPANY_ID", "type": "integer"}, + {"name": "COMPANY_NAME", "type": "string"}, + {"name": "CONTACT", "type": "string"}, + {"name": "CONTACT_ID", "type": "integer"}, + {"name": "CONTACT_NAME", "type": "string"}, + {"name": "OPPORTUNITY", "type": "string"}, + {"name": "IS_MANUAL_OPPORTUNITY", "type": "string"}, + {"name": "TAX_VALUE", "type": "string"}, + {"name": "CURRENCY_ID", "type": "string"}, + {"name": "OPPORTUNITY_ACCOUNT", "type": "string"}, + {"name": "TAX_VALUE_ACCOUNT", "type": "string"}, + {"name": "ACCOUNT_CURRENCY_ID", "type": "string"}, + {"name": "MYCOMPANY", "type": "string"}, + {"name": "MYCOMPANY_ID", "type": "integer"}, + {"name": "MYCOMPANY_NAME", "type": "string"}, + {"name": "SOURCE", "type": "string"}, + {"name": "SOURCE_ID", "type": "string"}, + {"name": "SOURCE_NAME", "type": "string"}, + {"name": "SOURCE_DESCRIPTION", "type": "string"}, + {"name": "ASSIGNED_BY", "type": "string"}, + {"name": "ASSIGNED_BY_ID", "type": "integer"}, + {"name": "ASSIGNED_BY_NAME", "type": "string"}, + {"name": "WEBFORM_ID", "type": "integer"}, +] + +USER = [ + {"name": "ID", "type": "integer"}, + {"name": "ACTIVE", "type": "string"}, + {"name": "NAME", "type": "string"}, + {"name": "DEPARTMENT", "type": "string"}, +] + +CRM_COMPANY_UF = [ + {"name": "COMPANY_ID", "type": "integer"}, + {"name": "DATE_CREATE", "type": "datetime"}, +] + +CRM_CONTACT_UF = [ + {"name": "CONTACT_ID", "type": "integer"}, + {"name": "DATE_CREATE", "type": "datetime"}, +] + + +@attr.s +class BitrixGDSColumn: + name: str = attr.ib() + type: str = attr.ib() + + +@attr.s +class BitrixGDSTable: + type: BitrixGDSTableType = attr.ib() + schema: Iterable[BitrixGDSColumn] = attr.ib() + daterange_col_name: Optional[str] = attr.ib(default=None) + + def get_columns(self) -> list[str]: + return [column.name for column in self.schema] + + def get_columns_type(self) -> dict[str, str]: + return {column.name: column.type for column in self.schema} + + +BITRIX_TABLES_MAP = { + "crm_deal": BitrixGDSTable( + type=BitrixGDSTableType.crm_deal, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_DEAL], + ), + "crm_lead": BitrixGDSTable( + type=BitrixGDSTableType.crm_lead, + daterange_col_name="DATE_MODIFY", + schema=[BitrixGDSColumn(**column) for column in CRM_LEAD], + ), + "crm_company": BitrixGDSTable( + type=BitrixGDSTableType.crm_company, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_COMPANY], + ), + "crm_contact": BitrixGDSTable( + type=BitrixGDSTableType.crm_contact, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_CONTACT], + ), + "crm_deal_stage_history": BitrixGDSTable( + type=BitrixGDSTableType.crm_deal_stage_history, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_DEAL_STAGE_HISTORY], + ), + "crm_lead_status_history": BitrixGDSTable( + type=BitrixGDSTableType.crm_lead_status_history, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_LEAD_STATUS_HISTORY], + ), + "socialnetwork_group": BitrixGDSTable( + type=BitrixGDSTableType.socialnetwork_group, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in SOCIALNETWORK_GROUP], + ), + "telephony_call": BitrixGDSTable( + type=BitrixGDSTableType.telephony_call, + daterange_col_name="CALL_START_TIME", + schema=[BitrixGDSColumn(**column) for column in TELEPHONY_CALL], + ), + "crm_activity": BitrixGDSTable( + type=BitrixGDSTableType.crm_activity, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_ACTIVITY], + ), + "crm_lead_uf": BitrixGDSTable( + type=BitrixGDSTableType.crm_lead_uf, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_LEAD_UF], + ), + "crm_deal_uf": BitrixGDSTable( + type=BitrixGDSTableType.crm_deal_uf, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_DEAL_UF], + ), + "crm_lead_product_row": BitrixGDSTable( + type=BitrixGDSTableType.crm_lead_product_row, + daterange_col_name="LEAD_DATE_MODIFY", + schema=[BitrixGDSColumn(**column) for column in CRM_LEAD_PRODUCT_ROW], + ), + "crm_deal_product_row": BitrixGDSTable( + type=BitrixGDSTableType.crm_deal_product_row, + daterange_col_name="DEAL_DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_DEAL_PRODUCT_ROW], + ), + "user": BitrixGDSTable( + type=BitrixGDSTableType.user, + schema=[BitrixGDSColumn(**column) for column in USER], + ), + "crm_company_uf": BitrixGDSTable( + type=BitrixGDSTableType.crm_company_uf, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_COMPANY_UF], + ), + "crm_contact_uf": BitrixGDSTable( + type=BitrixGDSTableType.crm_contact_uf, + daterange_col_name="DATE_CREATE", + schema=[BitrixGDSColumn(**column) for column in CRM_CONTACT_UF], + ), +} + +CRM_DYNAMIC_ITEMS_TABLE = BitrixGDSTable( + type=BitrixGDSTableType.crm_dynamic_items, + daterange_col_name="CREATED_TIME", + schema=[BitrixGDSColumn(**column) for column in CRM_DYNAMIC_ITEMS], +) + +SMART_PROCESS_TABLE_PREFIX = "crm_dynamic_items_" diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/target_dto.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/target_dto.py new file mode 100644 index 000000000..f63c65308 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/target_dto.py @@ -0,0 +1,30 @@ +from typing import Optional + +import attr + +from dl_core.connection_executors.models.connection_target_dto_base import ConnTargetDTO +from dl_core.utils import secrepr + + +def hide_pass(value: Optional[dict]) -> str: + if value is None: + return repr(value) + if not value: + return repr(value) + return repr({k: v for k, v in value.items() if k != "password"}) + + +@attr.s(frozen=True) +class BitrixGDSConnTargetDTO(ConnTargetDTO): + portal: str = attr.ib(kw_only=True) + token: str = attr.ib(kw_only=True, repr=secrepr) + + max_execution_time: Optional[int] = attr.ib() + connect_timeout: Optional[int] = attr.ib() + total_timeout: Optional[int] = attr.ib() + + redis_conn_params: Optional[dict] = attr.ib(repr=hide_pass) + redis_caches_ttl: Optional[int] = attr.ib() + + def get_effective_host(self) -> Optional[str]: + return None # Not Applicable diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/type_transformer.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/type_transformer.py new file mode 100644 index 000000000..6b87c2992 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/type_transformer.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from dl_constants.enums import UserDataType +from dl_core.db.conversion_base import ( + TypeTransformer, + make_native_type, +) + +from dl_connector_bitrix_gds.core.constants import CONNECTION_TYPE_BITRIX24 + + +class BitrixGDSTypeTransformer(TypeTransformer): + conn_type = CONNECTION_TYPE_BITRIX24 + native_to_user_map = { + make_native_type(CONNECTION_TYPE_BITRIX24, "integer"): UserDataType.integer, + make_native_type(CONNECTION_TYPE_BITRIX24, "float"): UserDataType.float, + make_native_type(CONNECTION_TYPE_BITRIX24, "string"): UserDataType.string, + make_native_type(CONNECTION_TYPE_BITRIX24, "date"): UserDataType.date, + make_native_type(CONNECTION_TYPE_BITRIX24, "datetime"): UserDataType.genericdatetime, + } + user_to_native_map = dict( + [(bi_type, native_type) for native_type, bi_type in native_to_user_map.items()] + + [ + (UserDataType.datetime, make_native_type(CONNECTION_TYPE_BITRIX24, "datetime")), + ] + ) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/us_connection.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/us_connection.py new file mode 100644 index 000000000..7e3c2d236 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/core/us_connection.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from typing import ( + Callable, + ClassVar, + Optional, +) + +import attr + +from dl_core.base_models import ConnCacheableDataModelMixin +from dl_core.connection_executors.sync_base import SyncConnExecutorBase +from dl_core.connection_models.conn_options import ConnectOptions +from dl_core.us_connection_base import ( + ConnectionBase, + DataSourceTemplate, + ExecutorBasedMixin, +) +from dl_core.utils import secrepr +from dl_utils.utils import DataKey + +from dl_connector_bitrix_gds.core.constants import ( + DEFAULT_DB, + SOURCE_TYPE_BITRIX_GDS, +) +from dl_connector_bitrix_gds.core.dto import BitrixGDSConnDTO + + +@attr.s(frozen=True, hash=True) +class BitrixGDSConnectOptions(ConnectOptions): + max_execution_time: Optional[int] = attr.ib(default=None) + connect_timeout: Optional[int] = attr.ib(default=None) + total_timeout: Optional[int] = attr.ib(default=None) + + +class BitrixGDSConnection(ExecutorBasedMixin, ConnectionBase): + allow_cache: ClassVar[bool] = True + + @attr.s(kw_only=True) + class DataModel(ConnCacheableDataModelMixin, ConnectionBase.DataModel): + portal: str = attr.ib() + token: str = attr.ib(repr=secrepr) + + @classmethod + def get_secret_keys(cls) -> set[DataKey]: + return { + *super().get_secret_keys(), + DataKey(parts=("token",)), + } + + @property + def cache_ttl_sec_override(self) -> Optional[int]: + return self.data.cache_ttl_sec + + def get_conn_options(self) -> BitrixGDSConnectOptions: + return super().get_conn_options().to_subclass(BitrixGDSConnectOptions) + + def get_conn_dto(self) -> BitrixGDSConnDTO: + return BitrixGDSConnDTO( + conn_id=self.uuid, + portal=self.data.portal, + token=self.data.token, + ) + + def get_parameter_combinations( + self, + conn_executor_factory: Callable[[ConnectionBase], SyncConnExecutorBase], + ) -> list[dict]: + return [ + dict(db_name=DEFAULT_DB, table_name=item.table_name) + for item in self.get_tables( + conn_executor_factory=conn_executor_factory, + db_name=DEFAULT_DB, + schema_name=None, + ) + ] + + def get_data_source_templates( + self, + conn_executor_factory: Callable[[ConnectionBase], SyncConnExecutorBase], + ) -> list[DataSourceTemplate]: + return [ + DataSourceTemplate( + title=parameters["table_name"], + group=[], + source_type=SOURCE_TYPE_BITRIX_GDS, + connection_id=self.uuid, # type: ignore # TODO: fix + parameters=parameters, + ) + for parameters in self.get_parameter_combinations(conn_executor_factory=conn_executor_factory) + ] + + @property + def allow_public_usage(self) -> bool: + return True diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/connector.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/connector.py new file mode 100644 index 000000000..f571e8020 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/connector.py @@ -0,0 +1,13 @@ +from dl_formula.connectors.base.connector import FormulaConnector +from dl_sqlalchemy_bitrix.base import BitrixDialect as SABitrixDialect + +from dl_connector_bitrix_gds.formula.constants import BitrixDialect as BitrixDialectNS +from dl_connector_bitrix_gds.formula.definitions.all import DEFINITIONS + + +class BitrixGDSFormulaConnector(FormulaConnector): + dialect_ns_cls = BitrixDialectNS + dialects = BitrixDialectNS.BITRIX + default_dialect = BitrixDialectNS.BITRIX + op_definitions = DEFINITIONS + sa_dialect = SABitrixDialect() diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/constants.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/constants.py new file mode 100644 index 000000000..e07f1d80d --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/constants.py @@ -0,0 +1,12 @@ +from dl_formula.core.dialect import ( + DialectName, + DialectNamespace, + simple_combo, +) + + +DIALECT_NAME_BITRIX = DialectName.declare("BITRIX") + + +class BitrixDialect(DialectNamespace): + BITRIX = simple_combo(name=DIALECT_NAME_BITRIX) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/all.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/all.py new file mode 100644 index 000000000..7ff8c9a1b --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/all.py @@ -0,0 +1,16 @@ +from dl_connector_bitrix_gds.formula.definitions.functions_datetime import DEFINITIONS_DATETIME +from dl_connector_bitrix_gds.formula.definitions.functions_markup import DEFINITIONS_MARKUP +from dl_connector_bitrix_gds.formula.definitions.functions_string import DEFINITIONS_STRING +from dl_connector_bitrix_gds.formula.definitions.functions_type import DEFINITIONS_TYPE +from dl_connector_bitrix_gds.formula.definitions.operators_binary import DEFINITIONS_BINARY +from dl_connector_bitrix_gds.formula.definitions.operators_ternary import DEFINITIONS_TERNARY + + +DEFINITIONS = [ + *DEFINITIONS_DATETIME, + *DEFINITIONS_MARKUP, + *DEFINITIONS_STRING, + *DEFINITIONS_TYPE, + *DEFINITIONS_BINARY, + *DEFINITIONS_TERNARY, +] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_datetime.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_datetime.py new file mode 100644 index 000000000..260c473cd --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_datetime.py @@ -0,0 +1,11 @@ +import dl_formula.definitions.functions_datetime as base + +from dl_connector_bitrix_gds.formula.constants import BitrixDialect as D + + +DEFINITIONS_DATETIME = [ + # dateadd + base.FuncDateadd1.for_dialect(D.BITRIX), + base.FuncDateadd2Unit.for_dialect(D.BITRIX), + base.FuncDateadd2Number.for_dialect(D.BITRIX), +] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_markup.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_markup.py new file mode 100644 index 000000000..2afc93268 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_markup.py @@ -0,0 +1,10 @@ +import dl_formula.definitions.functions_markup as base + +from dl_connector_bitrix_gds.formula.constants import BitrixDialect as D + + +DEFINITIONS_MARKUP = [ + # __str + base.FuncInternalStrConst.for_dialect(D.BITRIX), + base.FuncInternalStr.for_dialect(D.BITRIX), +] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_string.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_string.py new file mode 100644 index 000000000..8a12c81cc --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_string.py @@ -0,0 +1,9 @@ +import dl_formula.definitions.functions_string as base + +from dl_connector_bitrix_gds.formula.constants import BitrixDialect as D + + +DEFINITIONS_STRING = [ + # concat + base.ConcatMultiStrConst.for_dialect(D.BITRIX), +] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_type.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_type.py new file mode 100644 index 000000000..1e50819fd --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/functions_type.py @@ -0,0 +1,18 @@ +import dl_formula.definitions.functions_type as base + +from dl_connector_bitrix_gds.formula.constants import BitrixDialect as D + + +DEFINITIONS_TYPE = [ + # date + base.FuncDate1FromDatetime.for_dialect(D.BITRIX), + base.FuncDate1FromString.for_dialect(D.BITRIX), + # datetime + base.FuncDatetime1FromDatetime.for_dialect(D.BITRIX), + base.FuncDatetime1FromString.for_dialect(D.BITRIX), + # datetimetz + base.FuncDatetimeTZConst.for_dialect(D.BITRIX), + # genericdatetime + base.FuncGenericDatetime1FromDatetime.for_dialect(D.BITRIX), + base.FuncGenericDatetime1FromString.for_dialect(D.BITRIX), +] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/operators_binary.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/operators_binary.py new file mode 100644 index 000000000..45ee99341 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/operators_binary.py @@ -0,0 +1,25 @@ +import dl_formula.definitions.operators_binary as base + +from dl_connector_bitrix_gds.formula.constants import BitrixDialect as D + + +DEFINITIONS_BINARY = [ + # != + base.BinaryNotEqual.for_dialect(D.BITRIX), + # < + base.BinaryLessThan.for_dialect(D.BITRIX), + # <= + base.BinaryLessThanOrEqual.for_dialect(D.BITRIX), + # == + base.BinaryEqual.for_dialect(D.BITRIX), + # > + base.BinaryGreaterThan.for_dialect(D.BITRIX), + # >= + base.BinaryGreaterThanOrEqual.for_dialect(D.BITRIX), + # _!= + base.BinaryNotEqualInternal.for_dialect(D.BITRIX), + # _== + base.BinaryEqualInternal.for_dialect(D.BITRIX), + # _dneq + base.BinaryEqualDenullified.for_dialect(D.BITRIX), +] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/operators_ternary.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/operators_ternary.py new file mode 100644 index 000000000..7cdf906ff --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/formula/definitions/operators_ternary.py @@ -0,0 +1,9 @@ +import dl_formula.definitions.operators_ternary as base + +from dl_connector_bitrix_gds.formula.constants import BitrixDialect as D + + +DEFINITIONS_TERNARY = [ + # between + base.TernaryBetween.for_dialect(D.BITRIX), +] diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/en/LC_MESSAGES/dl_connector_bitrix_gds.mo b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/en/LC_MESSAGES/dl_connector_bitrix_gds.mo new file mode 100644 index 000000000..dc811010b Binary files /dev/null and b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/en/LC_MESSAGES/dl_connector_bitrix_gds.mo differ diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/en/LC_MESSAGES/dl_connector_bitrix_gds.po b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/en/LC_MESSAGES/dl_connector_bitrix_gds.po new file mode 100644 index 000000000..4cd451992 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/en/LC_MESSAGES/dl_connector_bitrix_gds.po @@ -0,0 +1,19 @@ +# Copyright (c) 2023 YANDEX LLC +# This file is distributed under the same license as the DataLens package. +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: datalens-opensource@yandex-team.ru\n" +"POT-Creation-Date: 2023-09-22 08:05+0000\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "label_connector-bitrix" +msgstr "Bitrix24" + +msgid "label_token" +msgstr "Token" + +msgid "label_portal" +msgstr "Portal" diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/ru/LC_MESSAGES/dl_connector_bitrix_gds.mo b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/ru/LC_MESSAGES/dl_connector_bitrix_gds.mo new file mode 100644 index 000000000..3e2d73a86 Binary files /dev/null and b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/ru/LC_MESSAGES/dl_connector_bitrix_gds.mo differ diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/ru/LC_MESSAGES/dl_connector_bitrix_gds.po b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/ru/LC_MESSAGES/dl_connector_bitrix_gds.po new file mode 100644 index 000000000..bd7c0db6d --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/locales/ru/LC_MESSAGES/dl_connector_bitrix_gds.po @@ -0,0 +1,19 @@ +# Copyright (c) 2023 YANDEX LLC +# This file is distributed under the same license as the DataLens package. +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: datalens-opensource@yandex-team.ru\n" +"POT-Creation-Date: 2023-09-22 08:05+0000\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "label_connector-bitrix" +msgstr "Битрикс24" + +msgid "label_token" +msgstr "Токен" + +msgid "label_portal" +msgstr "Портал" diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/py.typed b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/conftest.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/conftest.py new file mode 100644 index 000000000..24e2d3dac --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/conftest.py @@ -0,0 +1 @@ +pytest_plugins = ("aiohttp.pytest_plugin",) # and it, in turn, includes 'pytest_asyncio.plugin' diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/base.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/base.py new file mode 100644 index 000000000..399f84924 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/base.py @@ -0,0 +1,112 @@ +import pytest + +from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration +from dl_api_lib_testing.connection_base import ConnectionTestBase +from dl_api_lib_testing.data_api_base import ( + DataApiTestParams, + StandardizedDataApiTestBase, +) +from dl_api_lib_testing.dataset_base import DatasetTestBase +from dl_core_testing.database import ( + CoreDbConfig, + Db, +) +from dl_core_testing.engine_wrapper import TestingEngineWrapper + +from dl_connector_bitrix_gds.core.constants import ( + CONNECTION_TYPE_BITRIX24, + SOURCE_TYPE_BITRIX_GDS, +) +from dl_connector_bitrix_gds_tests.ext.config import ( + API_TEST_CONFIG, + BITRIX_PORTALS, + DB_NAME, + SMART_TABLE_NAME, + TABLE_NAME, +) + + +class BitrixConnectionTestBase(ConnectionTestBase): + conn_type = CONNECTION_TYPE_BITRIX24 + + @pytest.fixture(scope="class") + def db_url(self) -> str: + return "" + + @pytest.fixture(scope="class") + def db(self, db_config: CoreDbConfig) -> Db: + engine_wrapper = TestingEngineWrapper(config=db_config.engine_config) + return Db(config=db_config, engine_wrapper=engine_wrapper) + + @pytest.fixture(scope="class") + def bi_test_config(self) -> ApiTestEnvironmentConfiguration: + return API_TEST_CONFIG + + @pytest.fixture(scope="class") + def connection_params(self, bitrix_token: str) -> dict: + return dict( + portal=BITRIX_PORTALS["default"], + token=bitrix_token, + ) + + +class BitrixDatalensConnectionTestBase(BitrixConnectionTestBase): + @pytest.fixture(scope="class") + def connection_params(self, bitrix_datalens_token: str) -> dict: + return dict( + portal=BITRIX_PORTALS["datalens"], + token=bitrix_datalens_token, + ) + + +class BitrixInvalidConnectionTestBase(BitrixConnectionTestBase): + @pytest.fixture(scope="class") + def connection_params(self, bitrix_token: str) -> dict: + return dict( + portal=BITRIX_PORTALS["invalid"], + token=bitrix_token, + ) + + +class BitrixDatasetTestBase(BitrixConnectionTestBase, DatasetTestBase): + @pytest.fixture(scope="class") + def dataset_params(self) -> dict: + return dict( + source_type=SOURCE_TYPE_BITRIX_GDS.name, + title=TABLE_NAME, + parameters=dict( + db_name=DB_NAME, + table_name=TABLE_NAME, + ), + ) + + +class BitrixSmartTablesDatasetTestBase(BitrixDatalensConnectionTestBase, DatasetTestBase): + @pytest.fixture(scope="class") + def dataset_params(self) -> dict: + return dict( + source_type=SOURCE_TYPE_BITRIX_GDS.name, + title=SMART_TABLE_NAME, + parameters=dict( + db_name=DB_NAME, + table_name=SMART_TABLE_NAME, + ), + ) + + +class BitrixDataApiTestBase(BitrixDatasetTestBase, StandardizedDataApiTestBase): + mutation_caches_on = False + + @pytest.fixture(scope="class") + def data_api_test_params(self) -> DataApiTestParams: + return DataApiTestParams( + two_dims=("ASSIGNED_BY_NAME", "ID"), + summable_field="ID", + range_field="ID", + distinct_field="ASSIGNED_BY_NAME", + date_field="DATE_CREATE", + ) + + +class BitrixSmartTablesDataApiTestBase(BitrixSmartTablesDatasetTestBase, StandardizedDataApiTestBase): + mutation_caches_on = False diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_connection.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_connection.py new file mode 100644 index 000000000..79945b683 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_connection.py @@ -0,0 +1,55 @@ +import json + +from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase +from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite + +from dl_connector_bitrix_gds_tests.ext.api.base import ( + BitrixConnectionTestBase, + BitrixDatalensConnectionTestBase, + BitrixInvalidConnectionTestBase, +) +from dl_connector_bitrix_gds_tests.ext.config import BITRIX_PORTALS + + +class TestBitrixConnection(BitrixConnectionTestBase, DefaultConnectorConnectionTestSuite): + def test_portal_override(self, control_api_sync_client: SyncHttpClientBase, saved_connection_id: str) -> None: + resp = control_api_sync_client.get( + url=f"/api/v1/connections/{saved_connection_id}", + ) + assert resp.status_code == 200, resp.json + assert resp.json["portal"] == BITRIX_PORTALS["default"], resp.json + + new_portal = BITRIX_PORTALS["datalens"] + resp = control_api_sync_client.put( + url=f"/api/v1/connections/{saved_connection_id}", + content_type="application/json", + data=json.dumps({"portal": new_portal}), + ) + assert resp.status_code == 200, resp.json + + resp = control_api_sync_client.get( + url=f"/api/v1/connections/{saved_connection_id}", + ) + assert resp.status_code == 200, resp.json + assert resp.json["portal"] == BITRIX_PORTALS["datalens"], resp.json + + resp = control_api_sync_client.put( + url=f"/api/v1/connections/{saved_connection_id}", + content_type="application/json", + data=json.dumps({"portal": BITRIX_PORTALS["default"]}), + ) + assert resp.status_code == 200, resp.json + + +class TestBitrixDatalensConnection(BitrixDatalensConnectionTestBase, DefaultConnectorConnectionTestSuite): + pass + + +class TestBitrixInvalidConnection(BitrixInvalidConnectionTestBase, DefaultConnectorConnectionTestSuite): + def test_test_connection(self, control_api_sync_client: SyncHttpClientBase, saved_connection_id: str) -> None: + resp = control_api_sync_client.post( + f"/api/v1/connections/test_connection/{saved_connection_id}", + content_type="application/json", + data=json.dumps({}), + ) + assert resp.status_code == 400, resp.json diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_data.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_data.py new file mode 100644 index 000000000..223f9abff --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_data.py @@ -0,0 +1,69 @@ +from dl_api_client.dsmaker.api.data_api import SyncHttpDataApiV2 +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_testing.connector.data_api_suites import ( + DefaultConnectorDataDistinctTestSuite, + DefaultConnectorDataGroupByFormulaTestSuite, + DefaultConnectorDataPreviewTestSuite, + DefaultConnectorDataRangeTestSuite, + DefaultConnectorDataResultTestSuite, +) +from dl_constants.enums import WhereClauseOperation +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_bitrix_gds_tests.ext.api.base import ( + BitrixDataApiTestBase, + BitrixSmartTablesDataApiTestBase, +) + + +class TestBitrixDataResult(BitrixDataApiTestBase, DefaultConnectorDataResultTestSuite): + test_params = RegulatedTestParams( + mark_features_skipped={ + DefaultConnectorDataResultTestSuite.array_support: "Bitrix doesn't support arrays", + } + ) + + +class TestBitrixDataGroupBy(BitrixDataApiTestBase, DefaultConnectorDataGroupByFormulaTestSuite): + pass + + +class TestBitrixDataRange(BitrixDataApiTestBase, DefaultConnectorDataRangeTestSuite): + pass + + +class TestBitrixDataDistinct(BitrixDataApiTestBase, DefaultConnectorDataDistinctTestSuite): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectorDataDistinctTestSuite.test_date_filter_distinct: "Can't create a new table in bitrix tests", + } + ) + + +class TestBitrixDataPreview(BitrixDataApiTestBase, DefaultConnectorDataPreviewTestSuite): + pass + + +class TestBitrixSmartTablesData(BitrixSmartTablesDataApiTestBase): + def test_bitrix_string_to_date( + self, + saved_dataset: Dataset, + data_api: SyncHttpDataApiV2, + ) -> None: + ds = saved_dataset + ds.result_schema["Date from string user_field"] = ds.field(formula=f"DATE([UF_CRM_5_1694020695771])") + self.get_preview(ds, data_api) + + result_resp = data_api.get_result( + dataset=ds, + fields=[ds.find_field(title="Date from string user_field")], + filters=[ + ds.find_field(title="Date from string user_field").filter( + op=WhereClauseOperation.BETWEEN, + values=["2023-09-05", "2023-09-06"], + ) + ], + ) + assert result_resp.status_code == 200, result_resp.json + assert get_data_rows(result_resp) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_dataset.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_dataset.py new file mode 100644 index 000000000..3fbe1d650 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_dataset.py @@ -0,0 +1,25 @@ +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_lib_testing.connector.dataset_suite import DefaultConnectorDatasetTestSuite + +from dl_connector_bitrix_gds_tests.ext.api.base import ( + BitrixDatasetTestBase, + BitrixSmartTablesDatasetTestBase, +) + + +class TestBitrixDataset(BitrixDatasetTestBase, DefaultConnectorDatasetTestSuite): + def check_basic_dataset(self, ds: Dataset) -> None: + assert ds.id + assert len(ds.result_schema) + + field_names = {field.title for field in ds.result_schema} + assert {"ID", "DATE_CREATE", "DATE_MODIFY", "ASSIGNED_BY_NAME"}.issubset(field_names) + + +class TestBitrixSmartTablesDataset(BitrixSmartTablesDatasetTestBase, DefaultConnectorDatasetTestSuite): + def check_basic_dataset(self, ds: Dataset) -> None: + assert ds.id + assert len(ds.result_schema) + + field_names = {field.title for field in ds.result_schema} + assert {"ID", "UF_CRM_5_1694020695771", "ASSIGNED_BY_NAME"}.issubset(field_names) diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_sources.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_sources.py new file mode 100644 index 000000000..4b8f84922 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/api/test_sources.py @@ -0,0 +1,75 @@ +import abc + +from dl_api_client.dsmaker.api.data_api import SyncHttpDataApiV2 +from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 +from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase +from dl_api_lib_testing.data_api_base import DataApiTestBase +from dl_api_lib_testing.dataset_base import DatasetTestBase + +from dl_connector_bitrix_gds.core.constants import SOURCE_TYPE_BITRIX_GDS +from dl_connector_bitrix_gds_tests.ext.api.base import ( + BitrixDatasetTestBase, + BitrixSmartTablesDatasetTestBase, +) +from dl_connector_bitrix_gds_tests.ext.config import DB_NAME + + +class BitrixSourcesTestBase(DatasetTestBase, DataApiTestBase, metaclass=abc.ABCMeta): + @abc.abstractmethod + def do_check_source(self, source: dict) -> bool: + """Should return true if and only if we want to test this source in this class""" + pass + + def test_sources( + self, + control_api_sync_client: SyncHttpClientBase, + control_api: SyncHttpDatasetApiV1, + saved_connection_id: str, + data_api: SyncHttpDataApiV2, + ): + conn_id = saved_connection_id + + sources_resp = control_api_sync_client.get(f"/api/v1/connections/{conn_id}/info/sources") + assert sources_resp.status_code == 200, sources_resp.json + + sources_checked = 0 + for source in sources_resp.json["sources"]: + if not self.do_check_source(source): + continue + + dataset_params = dict( + source_type=SOURCE_TYPE_BITRIX_GDS.name, + title=source["title"], + parameters=dict( + db_name=DB_NAME, + table_name=source["title"], + ), + ) + ds = self.make_basic_dataset( + control_api=control_api, + connection_id=saved_connection_id, + dataset_params=dataset_params, + ) + preview_resp = data_api.get_preview(dataset=ds) + assert preview_resp.status_code == 200, preview_resp.response_errors + + sources_checked += 1 + + assert sources_checked > 1 + + +class TestBitrixSources(BitrixDatasetTestBase, BitrixSourcesTestBase): + def do_check_source(self, source: dict) -> bool: + title = source["title"] + return title not in [ + "telephony_call", + "crm_lead_uf", + "crm_deal_uf", + "crm_company_uf", + "crm_contact_uf", + ] and not title.startswith("crm_dynamic_items_") + + +class TestBitrixSmartTablesSources(BitrixSmartTablesDatasetTestBase, BitrixSourcesTestBase): + def do_check_source(self, source: dict) -> bool: + return source["title"].startswith("crm_dynamic_items_") diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/config.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/config.py new file mode 100644 index 000000000..8525583f2 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/config.py @@ -0,0 +1,31 @@ +from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration +from dl_core_testing.configuration import DefaultCoreTestConfiguration +from dl_testing.containers import get_test_container_hostport + + +# Infra settings +CORE_TEST_CONFIG = DefaultCoreTestConfiguration( + host_us_http=get_test_container_hostport("us", fallback_port=51911).host, + port_us_http=get_test_container_hostport("us", fallback_port=51911).port, + host_us_pg=get_test_container_hostport("pg-us", fallback_port=51910).host, + port_us_pg_5432=get_test_container_hostport("pg-us", fallback_port=51910).port, + us_master_token="AC1ofiek8coB", + core_connector_ep_names=["bitrix_gds", "postgresql"], +) + +COMPENG_URL = f'postgresql://datalens:qwerty@{get_test_container_hostport("db-postgres-13", fallback_port=52301).as_pair()}/test_data' +API_TEST_CONFIG = ApiTestEnvironmentConfiguration( + api_connector_ep_names=["bitrix_gds", "postgresql"], + core_test_config=CORE_TEST_CONFIG, + ext_query_executer_secret_key="_some_test_secret_key_", + bi_compeng_pg_url=COMPENG_URL, +) + +BITRIX_PORTALS = dict( + default="gds.office.bitrix.ru", + datalens="datalens.bitrix24.ru", + invalid="some_portal", +) +DB_NAME = "default" +TABLE_NAME = "crm_deal" +SMART_TABLE_NAME = "crm_dynamic_items_180" diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/conftest.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/conftest.py new file mode 100644 index 000000000..8d2cab937 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/conftest.py @@ -0,0 +1,28 @@ +import os + +import pytest + +from dl_api_lib_testing.initialization import initialize_api_lib_test +from dl_testing.env_params.generic import GenericEnvParamGetter + +from dl_connector_bitrix_gds_tests.ext.config import API_TEST_CONFIG + + +def pytest_configure(config): # noqa + initialize_api_lib_test(pytest_config=config, api_test_config=API_TEST_CONFIG) + + +@pytest.fixture(scope="session") +def env_param_getter() -> GenericEnvParamGetter: + filepath = os.path.join(os.path.dirname(__file__), "params.yml") + return GenericEnvParamGetter.from_yaml_file(filepath) + + +@pytest.fixture(scope="session") +def bitrix_token(env_param_getter): + return env_param_getter.get_str_value("BITRIX_TOKEN") + + +@pytest.fixture(scope="session") +def bitrix_datalens_token(env_param_getter): + return env_param_getter.get_str_value("BITRIX_DATALENS_TOKEN") diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/params.yml b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/params.yml new file mode 100644 index 000000000..70c80ad27 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/ext/params.yml @@ -0,0 +1,3 @@ +params: + BITRIX_TOKEN: {getter: $osenv, key: BITRIX_TOKEN} + BITRIX_DATALENS_TOKEN: { getter: $osenv, key: BITRIX_DATALENS_TOKEN } diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/unit/__init__.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/unit/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/unit/conftest.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/unit/conftest.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/unit/test_connection_form.py b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/unit/test_connection_form.py new file mode 100644 index 000000000..cb7d690e1 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/dl_connector_bitrix_gds_tests/unit/test_connection_form.py @@ -0,0 +1,10 @@ +from dl_api_connector.i18n.localizer import CONFIGS as BI_API_CONNECTOR_CONFIGS +from dl_api_lib_testing.connection_form_base import ConnectionFormTestBase + +from dl_connector_bitrix_gds.api.connection_form.form_config import BitrixGDSConnectionFormFactory +from dl_connector_bitrix_gds.api.i18n.localizer import CONFIGS as BI_CONNECTOR_BITRIX_GDS_CONFIGS + + +class TestBitrixGDSConnectionForm(ConnectionFormTestBase): + CONN_FORM_FACTORY_CLS = BitrixGDSConnectionFormFactory + TRANSLATION_CONFIGS = BI_API_CONNECTOR_CONFIGS + BI_CONNECTOR_BITRIX_GDS_CONFIGS diff --git a/lib/dl_connector_bitrix_gds/docker-compose.yml b/lib/dl_connector_bitrix_gds/docker-compose.yml new file mode 100644 index 000000000..a7eed0010 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/docker-compose.yml @@ -0,0 +1,42 @@ +version: '3.7' + +x-constants: + US_MASTER_TOKEN: &c-us-master-token "AC1ofiek8coB" + +services: + # COMPENG + db-postgres-13: + build: + context: docker-compose + dockerfile: Dockerfile.db-postgres-13 + environment: + POSTGRES_DB: "test_data" + POSTGRES_USER: "datalens" + POSTGRES_PASSWORD: "qwerty" + ports: + - "52302:5432" + + # INFRA + pg-us: + build: + context: ../testenv-common/images + dockerfile: Dockerfile.pg-us + environment: + POSTGRES_DB: us-db-ci_purgeable + POSTGRES_USER: us + POSTGRES_PASSWORD: us + ports: + - "51910:5432" + + us: + build: + context: ../testenv-common/images + dockerfile: Dockerfile.us + depends_on: + - pg-us + environment: + POSTGRES_DSN_LIST: "postgres://us:us@pg-us:5432/us-db-ci_purgeable" + AUTH_POLICY: "required" + MASTER_TOKEN: *c-us-master-token + ports: + - "51911:80" diff --git a/lib/dl_connector_bitrix_gds/docker-compose/Dockerfile.db-postgres-13 b/lib/dl_connector_bitrix_gds/docker-compose/Dockerfile.db-postgres-13 new file mode 100644 index 000000000..bfe2cc998 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/docker-compose/Dockerfile.db-postgres-13 @@ -0,0 +1,6 @@ +# FROM postgres:13-alpine +FROM postgres:13-alpine@sha256:b9f66c57932510574fb17bccd175776535cec9abcfe7ba306315af2f0b7bfbb4 + +COPY db-postgres/data /common-data + +COPY db-postgres/initdb.d/* /docker-entrypoint-initdb.d/ diff --git a/lib/dl_connector_bitrix_gds/docker-compose/db-postgres/data/sample.csv b/lib/dl_connector_bitrix_gds/docker-compose/db-postgres/data/sample.csv new file mode 100644 index 000000000..dc4bc81bb --- /dev/null +++ b/lib/dl_connector_bitrix_gds/docker-compose/db-postgres/data/sample.csv @@ -0,0 +1,1000 @@ +"Office Supplies","Houston","United States","DP-13000","Darren Powers",0.2,"2014-01-03","CA-2014-103800",77095,"OFF-PA-10000174","Message Book. Wirebound. Four 5 1/2"" X 4"" Forms/Pg.. 200 Dupl. Sets/Book",5.5512,2,"Central",7981,16.448,"Consumer","2014-01-07","Standard Class","Texas","Paper" +"Office Supplies","Naperville","United States","PO-19195","Phillina Ober",0.8,"2014-01-04","CA-2014-112326",60540,"OFF-BI-10004094","GBC Standard Plastic Binding Systems Combs",-5.487,2,"Central",742,3.54,"Home Office","2014-01-08","Standard Class","Illinois","Binders" +"Office Supplies","Naperville","United States","PO-19195","Phillina Ober",0.2,"2014-01-04","CA-2014-112326",60540,"OFF-LA-10003223","Avery 508",4.2717,3,"Central",740,11.783999,"Home Office","2014-01-08","Standard Class","Illinois","Labels" +"Office Supplies","Naperville","United States","PO-19195","Phillina Ober",0.2,"2014-01-04","CA-2014-112326",60540,"OFF-ST-10002743","SAFCO Boltless Steel Shelving",-64.774796,3,"Central",741,272.73602,"Home Office","2014-01-08","Standard Class","Illinois","Storage" +"Office Supplies","Philadelphia","United States","MB-18085","Mick Brown",0.2,"2014-01-05","CA-2014-141817",19143,"OFF-AR-10003478","Avery Hi-Liter EverBold Pen Style Fluorescent Highlighters. 4/Pack",4.8840003,3,"East",1760,19.536001,"Consumer","2014-01-12","Standard Class","Pennsylvania","Art" +"Furniture","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"FUR-CH-10004063","Global Deluxe High-Back Manager's Chair",746.40784,9,"South",7475,2573.82,"Home Office","2014-01-10","Standard Class","Kentucky","Chairs" +"Office Supplies","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"OFF-AR-10001662","Rogers Handheld Barrel Pencil Sharpener",1.4796,2,"South",7477,5.48,"Home Office","2014-01-10","Standard Class","Kentucky","Art" +"Office Supplies","Athens","United States","JO-15145","Jack O'Briant",0,"2014-01-06","CA-2014-106054",30605,"OFF-AR-10002399","Dixon Prang Watercolor Pencils. 10-Color Set with Brush",5.2398,3,"South",7181,12.78,"Corporate","2014-01-07","First Class","Georgia","Art" +"Office Supplies","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"OFF-BI-10004632","Ibico Hi-Tech Manual Binding System",274.491,2,"South",7476,609.98004,"Home Office","2014-01-10","Standard Class","Kentucky","Binders" +"Office Supplies","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"OFF-FA-10001883","Alliance Super-Size Bands. Assorted Sizes",0.3112,4,"South",7480,31.12,"Home Office","2014-01-10","Standard Class","Kentucky","Fasteners" +"Office Supplies","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"OFF-PA-10000955","Southworth 25% Cotton Granite Paper & Envelopes",3.0084,1,"South",7481,6.54,"Home Office","2014-01-10","Standard Class","Kentucky","Paper" +"Office Supplies","Los Angeles","United States","LS-17230","Lycoris Saunders",0,"2014-01-06","CA-2014-130813",90049,"OFF-PA-10002005","Xerox 225",9.331201,3,"West",5328,19.44,"Consumer","2014-01-08","Second Class","California","Paper" +"Technology","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"TEC-PH-10004539","Wireless Extenders zBoost YX545 SOHO Signal Booster",204.1092,4,"South",7479,755.96,"Home Office","2014-01-10","Standard Class","Kentucky","Phones" +"Technology","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"TEC-PH-10004977","GE 30524EE4",113.674194,2,"South",7478,391.97998,"Home Office","2014-01-10","Standard Class","Kentucky","Phones" +"Furniture","Huntsville","United States","VS-21820","Vivek Sundaresam",0.6,"2014-01-07","CA-2014-105417",77340,"FUR-FU-10004864","Howard Miller 14-1/2"" Diameter Chrome Round Wall Clock",-53.7096,3,"Central",7661,76.728,"Consumer","2014-01-12","Standard Class","Texas","Furnishings" +"Office Supplies","Huntsville","United States","VS-21820","Vivek Sundaresam",0.8,"2014-01-07","CA-2014-105417",77340,"OFF-BI-10003708","Acco Four Pocket Poly Ring Binder with Label Holder. Smoke. 1""",-18.2525,7,"Central",7662,10.429999,"Consumer","2014-01-12","Standard Class","Texas","Binders" +"Office Supplies","Laredo","United States","MS-17830","Melanie Seite",0.2,"2014-01-09","CA-2014-135405",78041,"OFF-AR-10004078","Newell 312",1.168,2,"Central",593,9.344,"Consumer","2014-01-13","Standard Class","Texas","Art" +"Technology","Laredo","United States","MS-17830","Melanie Seite",0.2,"2014-01-09","CA-2014-135405",78041,"TEC-AC-10001266","Memorex Micro Travel Drive 8 GB",9.75,3,"Central",594,31.2,"Consumer","2014-01-13","Standard Class","Texas","Accessories" +"Furniture","Springfield","United States","AJ-10780","Anthony Jacobs",0,"2014-01-10","CA-2014-149020",22153,"FUR-FU-10000965","Howard Miller 11-1/2"" Diameter Ridgewood Wall Clock",21.2954,1,"South",867,51.940002,"Corporate","2014-01-15","Standard Class","Virginia","Furnishings" +"Office Supplies","Springfield","United States","AJ-10780","Anthony Jacobs",0,"2014-01-10","CA-2014-149020",22153,"OFF-LA-10004272","Avery 482",1.3583,1,"South",866,2.8899999,"Corporate","2014-01-15","Standard Class","Virginia","Labels" +"Furniture","Dover","United States","SV-20365","Seth Vernon",0,"2014-01-11","CA-2014-130092",19901,"FUR-FU-10000010","DAX Value U-Channel Document Frames. Easel Back",3.0814,2,"East",717,9.94,"Consumer","2014-01-14","First Class","Delaware","Furnishings" +"Furniture","San Francisco","United States","BD-11605","Brian Dahlen",0.15,"2014-01-13","CA-2014-157147",94109,"FUR-BO-10003034","O'Sullivan Elevations Bookcase. Cherry Finish",3.9294002,3,"West",4939,333.999,"Consumer","2014-01-18","Standard Class","California","Bookcases" +"Furniture","Mount Pleasant","United States","ND-18370","Natalie DeCherney",0,"2014-01-13","CA-2014-109232",29464,"FUR-CH-10000422","Global Highback Leather Tilter in Burgundy",87.3504,6,"South",2979,545.94,"Consumer","2014-01-16","Second Class","South Carolina","Chairs" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0,"2014-01-13","CA-2014-157147",94109,"OFF-AR-10003514","4009 Highlighters by Sanford",6.567,5,"West",4940,19.9,"Consumer","2014-01-18","Standard Class","California","Art" +"Office Supplies","Newark","United States","MM-17920","Michael Moore",0.7,"2014-01-13","CA-2014-118192",43055,"OFF-BI-10003476","Avery Metallic Poly Binders",-2.5212,2,"East",9630,3.438,"Consumer","2014-01-18","Standard Class","Ohio","Binders" +"Office Supplies","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"OFF-BI-10004187","3-ring staple pack",2.7072,3,"South",767,5.64,"Corporate","2014-01-15","Second Class","Louisiana","Binders" +"Office Supplies","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"OFF-EN-10001532","Brown Kraft Recycled Envelopes",25.47,3,"South",765,50.940002,"Corporate","2014-01-15","Second Class","Louisiana","Envelopes" +"Office Supplies","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"OFF-EN-10001990","Staple envelope",5.3392,2,"South",764,11.360001,"Corporate","2014-01-15","Second Class","Louisiana","Envelopes" +"Office Supplies","Newark","United States","MM-17920","Michael Moore",0.2,"2014-01-13","CA-2014-118192",43055,"OFF-PA-10002947","Xerox 1923",13.0928,7,"East",9629,37.408,"Consumer","2014-01-18","Standard Class","Ohio","Paper" +"Office Supplies","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"OFF-ST-10000025","Fellowes Stor/Drawer Steel Plus Storage Drawers",34.3548,6,"South",768,572.58,"Corporate","2014-01-15","Second Class","Louisiana","Storage" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0,"2014-01-13","CA-2014-157147",94109,"OFF-ST-10000078","Tennsco 6- and 18-Compartment Lockers",238.65302,5,"West",4938,1325.8501,"Consumer","2014-01-18","Standard Class","California","Storage" +"Technology","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"TEC-AC-10003174","Plantronics S12 Corded Telephone Headset System",258.696,6,"South",766,646.74,"Corporate","2014-01-15","Second Class","Louisiana","Accessories" +"Furniture","Philadelphia","United States","BS-11590","Brendan Sweed",0.5,"2014-01-14","CA-2014-149524",19140,"FUR-BO-10003433","Sauder Cornerstone Collection Library",-53.285603,4,"East",6475,61.960003,"Corporate","2014-01-15","First Class","Pennsylvania","Bookcases" +"Technology","Roswell","United States","EH-13990","Erica Hackney",0,"2014-01-15","CA-2014-103366",30076,"TEC-AC-10003628","Logitech 910-002974 M325 Wireless Mouse for Web Scrolling",65.978004,5,"South",1914,149.95,"Consumer","2014-01-17","First Class","Georgia","Accessories" +"Furniture","Philadelphia","United States","DL-13315","Delfina Latchford",0.2,"2014-01-16","CA-2014-115791",19134,"FUR-FU-10001095","DAX Black Cherry Wood-Tone Poster Frame",28.5984,6,"East",971,127.104,"Consumer","2014-01-18","Second Class","Pennsylvania","Furnishings" +"Office Supplies","Philadelphia","United States","DL-13315","Delfina Latchford",0.7,"2014-01-16","CA-2014-115791",19134,"OFF-BI-10001575","GBC Linen Binding Covers",-13.631201,2,"East",973,18.588,"Consumer","2014-01-18","Second Class","Pennsylvania","Binders" +"Office Supplies","Philadelphia","United States","DL-13315","Delfina Latchford",0.2,"2014-01-16","CA-2014-115791",19134,"OFF-LA-10001074","Round Specialty Laser Printer Labels",10.149301,3,"East",974,30.072,"Consumer","2014-01-18","Second Class","Pennsylvania","Labels" +"Technology","Philadelphia","United States","DL-13315","Delfina Latchford",0.4,"2014-01-16","CA-2014-115791",19134,"TEC-PH-10004614","AT&T 841000 Phone",-31.05,3,"East",972,124.2,"Consumer","2014-01-18","Second Class","Pennsylvania","Phones" +"Office Supplies","Springfield","United States","DW-13195","David Wiener",0.2,"2014-01-18","CA-2014-123477",97477,"OFF-AP-10000692","Fellowes Mighty 8 Compact Surge Protector",6.4864,4,"West",5365,64.864,"Corporate","2014-01-21","Second Class","Oregon","Appliances" +"Furniture","Scottsdale","United States","TS-21340","Toby Swindell",0.7,"2014-01-19","CA-2014-146591",85254,"FUR-BO-10001972","O'Sullivan 4-Shelf Bookcase in Odessa Pine",-320.597,5,"West",5466,181.47,"Consumer","2014-01-20","First Class","Arizona","Bookcases" +"Office Supplies","Scottsdale","United States","TS-21340","Toby Swindell",0.7,"2014-01-19","CA-2014-146591",85254,"OFF-BI-10003676","GBC Standard Recycled Report Covers. Clear Plastic Sheets",-23.716002,10,"West",5463,32.34,"Consumer","2014-01-20","First Class","Arizona","Binders" +"Office Supplies","Scottsdale","United States","TS-21340","Toby Swindell",0.2,"2014-01-19","CA-2014-146591",85254,"OFF-EN-10002504","Tyvek Top-Opening Peel & Seel Envelopes. Plain White",36.692997,5,"West",5465,108.71999,"Consumer","2014-01-20","First Class","Arizona","Envelopes" +"Office Supplies","Scottsdale","United States","TS-21340","Toby Swindell",0.2,"2014-01-19","CA-2014-146591",85254,"OFF-PA-10000659","TOPS Carbonless Receipt Book. Four 2-3/4 x 7-1/4 Money Receipts per Page",19.622402,4,"West",5464,56.064003,"Consumer","2014-01-20","First Class","Arizona","Paper" +"Furniture","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"FUR-CH-10002331","Hon 4700 Series Mobuis Mid-Back Task Chairs with Adjustable Arms",224.2674,3,"South",1127,1067.9401,"Consumer","2014-01-26","Standard Class","Arkansas","Chairs" +"Furniture","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"FUR-FU-10002268","Ultra Door Push Plate",4.8609,3,"Central",6333,14.73,"Consumer","2014-01-26","Standard Class","Michigan","Furnishings" +"Furniture","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"FUR-FU-10002918","Eldon ClusterMat Chair Mat with Cordless Antistatic Protection",30.023401,3,"Central",6328,272.94,"Consumer","2014-01-26","Standard Class","Michigan","Furnishings" +"Furniture","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"FUR-FU-10003194","Eldon Expressions Desk Accessory. Wood Pencil Holder. Oak",11.58,4,"South",1124,38.6,"Consumer","2014-01-26","Standard Class","Arkansas","Furnishings" +"Furniture","Los Angeles","United States","MV-17485","Mark Van Huff",0,"2014-01-20","CA-2014-148614",90049,"FUR-FU-10003194","Eldon Expressions Desk Accessory. Wood Pencil Holder. Oak",5.79,2,"West",5738,19.3,"Consumer","2014-01-25","Standard Class","California","Furnishings" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0.1,"2014-01-20","CA-2014-167927",48185,"OFF-AP-10002311","Holmes Replacement Filter for HEPA Air Cleaner. Very Large Room. HEPA Filter",93.581604,4,"Central",6330,247.71599,"Consumer","2014-01-26","Standard Class","Michigan","Appliances" +"Office Supplies","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"OFF-AR-10002375","Newell 351",6.6584,7,"South",1123,22.96,"Consumer","2014-01-26","Standard Class","Arkansas","Art" +"Office Supplies","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"OFF-AR-10003811","Newell 327",1.7901001,3,"South",1125,6.63,"Consumer","2014-01-26","Standard Class","Arkansas","Art" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-AR-10004456","Panasonic KP-4ABK Battery-Operated Pencil Sharpener",12.736799,3,"Central",6332,43.920002,"Consumer","2014-01-26","Standard Class","Michigan","Art" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-BI-10000605","Acco Pressboard Covers with Storage Hooks. 9 1/2"" x 11"". Executive Red",8.9535,5,"Central",6329,19.05,"Consumer","2014-01-26","Standard Class","Michigan","Binders" +"Office Supplies","Smyrna","United States","MM-18280","Muhammed MacIntyre",0.7,"2014-01-20","US-2014-147774",37167,"OFF-BI-10003091","GBC DocuBind TL200 Manual Binding Machine",-51.5154,1,"South",7121,67.19399,"Corporate","2014-01-26","Standard Class","Tennessee","Binders" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-BI-10004364","Storex Dura Pro Binders",13.365001,5,"Central",6334,29.7,"Consumer","2014-01-26","Standard Class","Michigan","Binders" +"Office Supplies","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"OFF-EN-10001539","Staple envelope",10.9698,3,"South",1126,23.34,"Consumer","2014-01-26","Standard Class","Arkansas","Envelopes" +"Office Supplies","Los Angeles","United States","MV-17485","Mark Van Huff",0,"2014-01-20","CA-2014-148614",90049,"OFF-PA-10002893","Wirebound Service Call Books. 5 1/2"" x 4""",9.2928,2,"West",5737,19.359999,"Consumer","2014-01-25","Standard Class","California","Paper" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-ST-10000760","Eldon Fold 'N Roll Cart System",4.0542,1,"Central",6327,13.98,"Consumer","2014-01-26","Standard Class","Michigan","Storage" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-ST-10003123","Fellowes Bases and Tops For Staxonsteel/High-Stak Systems",15.979199,2,"Central",6331,66.58,"Consumer","2014-01-26","Standard Class","Michigan","Storage" +"Technology","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"TEC-PH-10001061","Apple iPhone 5C",181.9818,7,"South",1122,699.93005,"Consumer","2014-01-26","Standard Class","Arkansas","Phones" +"Furniture","Miami","United States","TB-21400","Tom Boeckenhauer",0.2,"2014-01-21","CA-2014-110422",33180,"FUR-FU-10001889","Ultra Door Pull Handle",4.1028,3,"South",9990,25.248001,"Consumer","2014-01-23","Second Class","Florida","Furnishings" +"Office Supplies","Lafayette","United States","SG-20605","Speros Goranitis",0,"2014-01-23","CA-2014-146997",47905,"OFF-FA-10003467","Alliance Big Bands Rubber Bands. 12/Pack",0,3,"Central",9887,5.94,"Consumer","2014-01-27","Standard Class","Indiana","Fasteners" +"Office Supplies","Las Vegas","United States","IM-15055","Ionia McGrath",0,"2014-01-23","CA-2014-102645",89115,"OFF-PA-10001804","Xerox 195",19.238401,6,"West",9156,40.08,"Consumer","2014-01-28","Standard Class","Nevada","Paper" +"Furniture","Rapid City","United States","CA-11965","Carol Adams",0,"2014-01-26","CA-2014-167997",57701,"FUR-BO-10004409","Safco Value Mate Series Steel Bookcases. Baked Enamel Finish on Steel. Gray",39.748802,2,"Central",8151,141.95999,"Corporate","2014-01-29","First Class","South Dakota","Bookcases" +"Furniture","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"FUR-FU-10001847","Eldon Image Series Black Desk Accessories",4.4712,3,"South",2584,12.42,"Home Office","2014-01-31","Standard Class","Virginia","Furnishings" +"Furniture","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"FUR-FU-10004587","GE General Use Halogen Bulbs. 100 Watts. 1 Bulb per Pack",30.7818,3,"South",2579,62.82,"Home Office","2014-01-31","Standard Class","Virginia","Furnishings" +"Office Supplies","Rapid City","United States","CA-11965","Carol Adams",0,"2014-01-26","CA-2014-167997",57701,"OFF-BI-10001758","Wilson Jones 14 Line Acrylic Coated Pressboard Data Binders",5.0196,2,"Central",8150,10.68,"Corporate","2014-01-29","First Class","South Dakota","Binders" +"Office Supplies","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"OFF-PA-10000380","REDIFORM Incoming/Outgoing Call Register. 11"" X 8 1/2"". 100 Messages",8.34,2,"South",2582,16.68,"Home Office","2014-01-31","Standard Class","Virginia","Paper" +"Office Supplies","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"OFF-PA-10003072","Eureka Recycled Copy Paper 8 1/2"" x 11"". Ream",9.331201,3,"South",2581,19.44,"Home Office","2014-01-31","Standard Class","Virginia","Paper" +"Office Supplies","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"OFF-ST-10004337","SAFCO Commercial Wire Shelving. 72h",0,8,"South",2580,489.91998,"Home Office","2014-01-31","Standard Class","Virginia","Storage" +"Technology","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"TEC-PH-10002103","Jabra SPEAK 410",52.6344,2,"South",2585,187.98,"Home Office","2014-01-31","Standard Class","Virginia","Phones" +"Technology","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"TEC-PH-10004833","Macally Suction Cup Mount",0,13,"South",2583,155.35,"Home Office","2014-01-31","Standard Class","Virginia","Phones" +"Furniture","San Diego","United States","EJ-13720","Ed Jacobs",0.2,"2014-01-27","US-2014-117163",92037,"FUR-TA-10003469","Balt Split Level Computer Training Table",-16.65,3,"West",3796,333,"Consumer","2014-02-02","Standard Class","California","Tables" +"Office Supplies","San Diego","United States","EJ-13720","Ed Jacobs",0,"2014-01-27","US-2014-117163",92037,"OFF-AR-10003179","Dixon Ticonderoga Core-Lock Colored Pencils",12.025201,4,"West",3797,36.440002,"Consumer","2014-02-02","Standard Class","California","Art" +"Office Supplies","San Diego","United States","EJ-13720","Ed Jacobs",0,"2014-01-27","US-2014-117163",92037,"OFF-ST-10003692","Recycled Steel Personal File for Hanging File Folders",14.3075,1,"West",3795,57.23,"Consumer","2014-02-02","Standard Class","California","Storage" +"Office Supplies","New York City","United States","JC-15340","Jasper Cacioppo",0.2,"2014-01-28","CA-2014-100328",10024,"OFF-BI-10000343","Pressboard Covers with Storage Hooks. 9 1/2"" x 11"". Light Blue",1.3256999,1,"East",3084,3.928,"Consumer","2014-02-03","Standard Class","New York","Binders" +"Office Supplies","Detroit","United States","MV-18190","Mike Vittorini",0,"2014-01-30","CA-2014-134103",48234,"OFF-PA-10001204","Xerox 1972",4.752,2,"Central",6388,10.56,"Consumer","2014-02-04","Standard Class","Michigan","Paper" +"Office Supplies","Detroit","United States","MV-18190","Mike Vittorini",0,"2014-01-30","CA-2014-134103",48234,"OFF-ST-10000991","Space Solutions HD Industrial Steel Shelving.",6.8982,2,"Central",6389,229.93999,"Consumer","2014-02-04","Standard Class","Michigan","Storage" +"Furniture","Mission Viejo","United States","LC-17050","Liz Carlisle",0.15,"2014-01-31","CA-2014-115161",92691,"FUR-BO-10003966","Sauder Facets Collection Library. Sky Alder Finish",3.4196002,2,"West",3366,290.66602,"Consumer","2014-02-02","First Class","California","Bookcases" +"Technology","Green Bay","United States","BD-11500","Bradley Drucker",0,"2014-02-01","CA-2014-140795",54302,"TEC-AC-10001432","Enermax Aurora Lite Keyboard",206.316,6,"Central",541,468.9,"Consumer","2014-02-03","First Class","Wisconsin","Accessories" +"Office Supplies","Saint Petersburg","United States","EB-13930","Eric Barreto",0.7,"2014-02-02","CA-2014-123400",33710,"OFF-BI-10000666","Surelock Post Binders",-12.224,2,"South",9138,18.336,"Consumer","2014-02-09","Standard Class","Florida","Binders" +"Office Supplies","San Diego","United States","CD-12790","Cynthia Delaney",0,"2014-02-02","CA-2014-139857",92037,"OFF-FA-10001843","Staples",5.8045006,5,"West",1704,12.35,"Home Office","2014-02-06","Standard Class","California","Fasteners" +"Technology","Saint Petersburg","United States","EB-13930","Eric Barreto",0.2,"2014-02-02","CA-2014-123400",33710,"TEC-PH-10002890","AT&T 17929 Lendline Telephone",13.572,5,"South",9139,180.95999,"Consumer","2014-02-09","Standard Class","Florida","Phones" +"Office Supplies","Seattle","United States","TB-21400","Tom Boeckenhauer",0.2,"2014-02-03","CA-2014-111059",98105,"OFF-BI-10002827","Avery Durable Poly Binders",4.3134003,3,"West",1519,13.271999,"Consumer","2014-02-06","Second Class","Washington","Binders" +"Office Supplies","Seattle","United States","TB-21400","Tom Boeckenhauer",0.2,"2014-02-03","CA-2014-111059",98105,"OFF-BI-10004593","Ibico Laser Imprintable Binding System Covers",27.248001,2,"West",1518,83.840004,"Consumer","2014-02-06","Second Class","Washington","Binders" +"Office Supplies","Escondido","United States","MH-17440","Mark Haberlin",0.2,"2014-02-04","CA-2014-104808",92025,"OFF-BI-10003676","GBC Standard Recycled Report Covers. Clear Plastic Sheets",6.0368004,2,"West",8585,17.248001,"Corporate","2014-02-08","Second Class","California","Binders" +"Office Supplies","San Diego","United States","DB-13270","Deborah Brumfield",0.2,"2014-02-04","CA-2014-107181",92024,"OFF-BI-10004230","GBC Recycled Grain Textured Covers",29.013601,3,"West",1544,82.895996,"Home Office","2014-02-08","Standard Class","California","Binders" +"Office Supplies","San Diego","United States","DB-13270","Deborah Brumfield",0,"2014-02-04","CA-2014-107181",92024,"OFF-PA-10000350","Message Book. Standard Line ""While You Were Out"". 5 1/2"" X 4"". 200 Sets/Book",16.0928,4,"West",1545,34.24,"Home Office","2014-02-08","Standard Class","California","Paper" +"Office Supplies","Romeoville","United States","DL-13315","Delfina Latchford",0.8,"2014-02-06","CA-2014-108182",60441,"OFF-BI-10001196","Avery Flip-Chart Easel Binder. Black",-14.7708,2,"Central",8981,8.952,"Consumer","2014-02-10","Second Class","Illinois","Binders" +"Office Supplies","Chesapeake","United States","ND-18460","Neil Ducich",0,"2014-02-06","CA-2014-131905",23320,"OFF-LA-10002787","Avery 480",7.2,4,"South",1998,15,"Corporate","2014-02-09","First Class","Virginia","Labels" +"Technology","Chesapeake","United States","ND-18460","Neil Ducich",0,"2014-02-06","CA-2014-131905",23320,"TEC-PH-10001615","AT&T CL82213",42.0355,5,"South",2000,144.95,"Corporate","2014-02-09","First Class","Virginia","Phones" +"Technology","Chesapeake","United States","ND-18460","Neil Ducich",0,"2014-02-06","CA-2014-131905",23320,"TEC-PH-10003645","Aastra 57i VoIP phone",42.018597,1,"South",1999,161.61,"Corporate","2014-02-09","First Class","Virginia","Phones" +"Office Supplies","New York City","United States","CS-12250","Chris Selesnick",0,"2014-02-07","US-2014-169390",10024,"OFF-ST-10001558","Acco Perma 4000 Stacking Storage Drawers",9.7439995,4,"East",8736,64.96,"Corporate","2014-02-10","Second Class","New York","Storage" +"Technology","Linden","United States","CK-12760","Cyma Kinney",0,"2014-02-07","CA-2014-107755",7036,"TEC-AC-10000710","Maxell DVD-RAM Discs",49.6048,7,"East",850,115.36,"Corporate","2014-02-12","Standard Class","New Jersey","Accessories" +"Furniture","North Las Vegas","United States","NM-18445","Nathan Mautz",0,"2014-02-08","CA-2014-125759",89031,"FUR-FU-10002111","Master Caster Door Stop. Large Brown",5.5328,2,"West",9267,14.56,"Home Office","2014-02-09","First Class","Nevada","Furnishings" +"Furniture","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"FUR-CH-10001146","Global Value Mid-Back Manager's Chair. Gray",15.2225,1,"Central",9255,60.89,"Consumer","2014-02-15","Second Class","Missouri","Chairs" +"Furniture","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"FUR-FU-10002298","Rubbermaid ClusterMat Chairmats. Mat Size- 66"" x 60"". Lip 20"" x 11"" -90 Degree Angle",53.2704,3,"Central",9258,332.94,"Consumer","2014-02-15","Second Class","Missouri","Furnishings" +"Furniture","Chesapeake","United States","NF-18385","Natalie Fritzler",0,"2014-02-11","CA-2014-127614",23320,"FUR-TA-10003715","Hon 2111 Invitation Series Corner Table",75.3732,6,"South",5632,1256.22,"Consumer","2014-02-15","Standard Class","Virginia","Tables" +"Office Supplies","Chesapeake","United States","NF-18385","Natalie Fritzler",0,"2014-02-11","CA-2014-127614",23320,"OFF-BI-10003291","Wilson Jones Leather-Like Binders with DublLock Round Rings",8.2062,2,"South",5633,17.46,"Consumer","2014-02-15","Standard Class","Virginia","Binders" +"Office Supplies","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"OFF-BI-10004654","VariCap6 Expandable Binder",24.393,3,"Central",9259,51.9,"Consumer","2014-02-15","Second Class","Missouri","Binders" +"Office Supplies","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"OFF-BI-10004728","Wilson Jones Turn Tabs Binder Tool for Ring Binders",4.4344006,2,"Central",9257,9.64,"Consumer","2014-02-15","Second Class","Missouri","Binders" +"Office Supplies","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"OFF-LA-10004853","Avery 483",6.8724003,3,"Central",9256,14.94,"Consumer","2014-02-15","Second Class","Missouri","Labels" +"Office Supplies","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"OFF-ST-10002583","Fellowes Neat Ideas Storage Cubes",2.5983999,2,"Central",9260,64.96,"Consumer","2014-02-15","Second Class","Missouri","Storage" +"Technology","Chesapeake","United States","NF-18385","Natalie Fritzler",0,"2014-02-11","CA-2014-127614",23320,"TEC-AC-10001432","Enermax Aurora Lite Keyboard",103.158005,3,"South",5631,234.45,"Consumer","2014-02-15","Standard Class","Virginia","Accessories" +"Furniture","Concord","United States","SC-20095","Sanjit Chand",0.2,"2014-02-12","US-2014-110674",94521,"FUR-CH-10000225","Global Geo Office Task Chair. Gray",-24.294,2,"West",457,129.568,"Consumer","2014-02-18","Standard Class","California","Chairs" +"Office Supplies","Seattle","United States","ML-17395","Marina Lichtenstein",0,"2014-02-14","CA-2014-121762",98103,"OFF-AP-10001293","Belkin 8 Outlet Surge Protector",22.9488,2,"West",9763,81.96,"Corporate","2014-02-18","Standard Class","Washington","Appliances" +"Office Supplies","Houston","United States","ST-20530","Shui Tom",0.2,"2014-02-14","CA-2014-107706",77095,"OFF-PA-10000466","Memo Book. 100 Message Capacity. 5 3/8” x 11”",6.066,3,"Central",5467,16.176,"Consumer","2014-02-19","Second Class","Texas","Paper" +"Office Supplies","Seattle","United States","ML-17395","Marina Lichtenstein",0,"2014-02-14","CA-2014-121762",98103,"OFF-SU-10000157","Compact Automatic Electric Letter Opener",4.7724,2,"West",9764,238.62001,"Corporate","2014-02-18","Standard Class","Washington","Supplies" +"Technology","Seattle","United States","ML-17395","Marina Lichtenstein",0,"2014-02-14","CA-2014-121762",98103,"TEC-AC-10000736","Logitech G600 MMO Gaming Mouse",86.389206,3,"West",9762,239.97,"Corporate","2014-02-18","Standard Class","Washington","Accessories" +"Office Supplies","Seattle","United States","BF-11020","Barry Französisch",0.2,"2014-02-15","CA-2014-165568",98105,"OFF-BI-10001031","Pressboard Data Binders by Wilson Jones",7.2089996,5,"West",7438,21.359999,"Corporate","2014-02-19","Standard Class","Washington","Binders" +"Office Supplies","Dallas","United States","MN-17935","Michael Nguyen",0.8,"2014-02-16","CA-2014-122567",75220,"OFF-AP-10001303","Holmes Cool Mist Humidifier for the Whole House with 8-Gallon Output per Day. Extended Life Filter",-13.929999,2,"Central",1334,7.96,"Consumer","2014-02-21","Standard Class","Texas","Appliances" +"Office Supplies","Dallas","United States","MN-17935","Michael Nguyen",0.8,"2014-02-16","CA-2014-122567",75220,"OFF-BI-10002012","Wilson Jones Easy Flow II Sheet Lifters",-1.728,3,"Central",1333,1.08,"Consumer","2014-02-21","Standard Class","Texas","Binders" +"Office Supplies","Chicago","United States","DL-13315","Delfina Latchford",0.2,"2014-02-17","CA-2014-154165",60653,"OFF-AR-10003631","Staples in misc. colors",8.808801,14,"Central",9516,54.208,"Consumer","2014-02-24","Standard Class","Illinois","Art" +"Furniture","Lubbock","United States","TB-21595","Troy Blackwell",0.6,"2014-02-18","US-2014-165589",79424,"FUR-FU-10002396","DAX Copper Panel Document Frame. 5 x 7 Size",-11.3220005,5,"Central",6082,25.16,"Consumer","2014-02-18","Same Day","Texas","Furnishings" +"Office Supplies","Arlington","United States","AB-10015","Aaron Bergman",0.2,"2014-02-18","CA-2014-152905",76017,"OFF-ST-10000321","Akro Stacking Bins",-2.5247998,2,"Central",8223,12.624001,"Consumer","2014-02-24","Standard Class","Texas","Storage" +"Furniture","Richmond","United States","LC-16930","Linda Cazamias",0,"2014-02-20","CA-2014-109491",47374,"FUR-FU-10000221","Master Caster Door Stop. Brown",6.9088,4,"Central",4522,20.32,"Corporate","2014-02-26","Standard Class","Indiana","Furnishings" +"Office Supplies","Los Angeles","United States","SR-20740","Steven Roelle",0,"2014-02-20","CA-2014-164903",90049,"OFF-PA-10003363","Xerox 204",6.2208,2,"West",8504,12.96,"Home Office","2014-02-24","Standard Class","California","Paper" +"Technology","Richmond","United States","LC-16930","Linda Cazamias",0,"2014-02-20","CA-2014-109491",47374,"TEC-AC-10001284","Enermax Briskie RF Wireless Keyboard and Mouse Combo",22.4316,3,"Central",4521,62.309998,"Corporate","2014-02-26","Standard Class","Indiana","Accessories" +"Office Supplies","Woodstock","United States","TM-21010","Tamara Manning",0.8,"2014-02-21","CA-2014-165540",60098,"OFF-BI-10004094","GBC Standard Plastic Binding Systems Combs",-13.7175,5,"Central",2883,8.85,"Consumer","2014-02-25","Standard Class","Illinois","Binders" +"Office Supplies","Moreno Valley","United States","SA-20830","Sue Ann Reed",0,"2014-02-22","CA-2014-133354",92553,"OFF-PA-10001800","Xerox 220",9.331201,3,"West",7611,19.44,"Consumer","2014-02-24","First Class","California","Paper" +"Office Supplies","El Paso","United States","MG-17875","Michael Grace",0.8,"2014-02-23","CA-2014-103744",79907,"OFF-BI-10000320","GBC Plastic Binding Combs",-6.8634,3,"Central",6531,4.428,"Home Office","2014-02-27","Standard Class","Texas","Binders" +"Office Supplies","El Paso","United States","MG-17875","Michael Grace",0.2,"2014-02-23","CA-2014-103744",79907,"OFF-LA-10004425","Staple-on labels",2.3409,3,"Central",6530,6.9360003,"Home Office","2014-02-27","Standard Class","Texas","Labels" +"Office Supplies","Medford","United States","JH-15430","Jennifer Halladay",0.2,"2014-02-24","US-2014-137680",97504,"OFF-PA-10000069","TOPS 4 x 6 Fluorescent Color Memo Sheets. 500 Sheets per Pack",7.6868997,3,"West",5513,22.776001,"Consumer","2014-03-02","Standard Class","Oregon","Paper" +"Office Supplies","Medford","United States","JH-15430","Jennifer Halladay",0.2,"2014-02-24","US-2014-137680",97504,"OFF-PA-10000174","Message Book. Wirebound. Four 5 1/2"" X 4"" Forms/Pg.. 200 Dupl. Sets/Book",11.1024,4,"West",5512,32.896,"Consumer","2014-03-02","Standard Class","Oregon","Paper" +"Office Supplies","Columbus","United States","JS-16030","Joy Smith",0.2,"2014-02-27","CA-2014-156545",43229,"OFF-AR-10003560","Zebra Zazzle Fluorescent Highlighters",3.4048,4,"East",9464,19.456,"Consumer","2014-03-03","First Class","Ohio","Art" +"Furniture","Elmhurst","United States","VF-21715","Vicky Freymann",0.3,"2014-03-01","CA-2014-113880",60126,"FUR-CH-10000863","Novimex Swivel Fabric Task Chair",-172.1172,6,"Central",6548,634.11597,"Home Office","2014-03-05","Standard Class","Illinois","Chairs" +"Furniture","El Paso","United States","SC-20380","Shahid Collister",0.3,"2014-03-01","CA-2014-131009",79907,"FUR-CH-10001270","Harbour Creations Steel Folding Chair",0,6,"Central",7949,362.25,"Consumer","2014-03-05","Standard Class","Texas","Chairs" +"Furniture","Seattle","United States","DB-13060","Dave Brooks",0.2,"2014-03-01","CA-2014-104269",98115,"FUR-CH-10004063","Global Deluxe High-Back Manager's Chair",51.476402,2,"West",158,457.568,"Consumer","2014-03-06","Second Class","Washington","Chairs" +"Furniture","El Paso","United States","SC-20380","Shahid Collister",0.6,"2014-03-01","CA-2014-131009",79907,"FUR-FU-10001095","DAX Black Cherry Wood-Tone Poster Frame",-34.953598,6,"Central",7950,63.552,"Consumer","2014-03-05","Standard Class","Texas","Furnishings" +"Furniture","Houston","United States","GW-14605","Giulietta Weimer",0.3,"2014-03-01","CA-2014-168312",77036,"FUR-TA-10001866","Bevis Round Conference Room Tables and Bases",-43.0296,3,"Central",8311,376.509,"Consumer","2014-03-07","Standard Class","Texas","Tables" +"Office Supplies","El Paso","United States","SC-20380","Shahid Collister",0.2,"2014-03-01","CA-2014-131009",79907,"OFF-FA-10004395","Plymouth Boxed Rubber Bands by Plymouth",-3.5325,5,"Central",7948,18.84,"Consumer","2014-03-05","Standard Class","Texas","Fasteners" +"Office Supplies","Elmhurst","United States","VF-21715","Vicky Freymann",0.2,"2014-03-01","CA-2014-113880",60126,"OFF-PA-10003036","Black Print Carbonless 8 1/2"" x 8 1/4"" Rapid Memo Book",5.6784,3,"Central",6549,17.472,"Home Office","2014-03-05","Standard Class","Illinois","Paper" +"Office Supplies","El Paso","United States","SC-20380","Shahid Collister",0.2,"2014-03-01","CA-2014-131009",79907,"OFF-ST-10001469","Fellowes Bankers Box Recycled Super Stor/Drawer",-22.6716,3,"Central",7951,129.552,"Consumer","2014-03-05","Standard Class","Texas","Storage" +"Office Supplies","Houston","United States","GW-14605","Giulietta Weimer",0.2,"2014-03-01","CA-2014-168312",77036,"OFF-ST-10003692","Recycled Steel Personal File for Hanging File Folders",8.584499,3,"Central",8310,137.352,"Consumer","2014-03-07","Standard Class","Texas","Storage" +"Technology","New York City","United States","HR-14770","Hallie Redmond",0,"2014-03-01","US-2014-143707",10035,"TEC-PH-10003655","Sannysis Cute Owl Design Soft Skin Case Cover for Samsung Galaxy S4",1.6038,3,"East",5714,5.94,"Home Office","2014-03-05","Standard Class","New York","Phones" +"Office Supplies","New York City","United States","KN-16705","Kristina Nunn",0,"2014-03-02","CA-2014-107524",10009,"OFF-EN-10001990","Staple envelope",5.3392,2,"East",6058,11.360001,"Home Office","2014-03-08","Standard Class","New York","Envelopes" +"Office Supplies","Philadelphia","United States","NH-18610","Nicole Hansen",0.2,"2014-03-02","CA-2014-111157",19120,"OFF-PA-10000327","Xerox 1971",1.07,1,"East",9946,3.424,"Corporate","2014-03-06","Standard Class","Pennsylvania","Paper" +"Office Supplies","New York City","United States","KN-16705","Kristina Nunn",0,"2014-03-02","CA-2014-107524",10009,"OFF-PA-10000587","Array Parchment Paper. Assorted Colors",17.472,5,"East",6059,36.4,"Home Office","2014-03-08","Standard Class","New York","Paper" +"Technology","Philadelphia","United States","NH-18610","Nicole Hansen",0.2,"2014-03-02","CA-2014-111157",19120,"TEC-AC-10004353","Hypercom P1300 Pinpad",32.129997,3,"East",9947,151.2,"Corporate","2014-03-06","Standard Class","Pennsylvania","Accessories" +"Furniture","Columbus","United States","JS-15595","Jill Stevenson",0.5,"2014-03-03","US-2014-127978",43229,"FUR-BO-10001972","O'Sullivan 4-Shelf Bookcase in Odessa Pine",-199.617,5,"East",7248,302.44998,"Corporate","2014-03-08","Standard Class","Ohio","Bookcases" +"Furniture","San Diego","United States","EJ-14155","Eva Jacobs",0.2,"2014-03-03","CA-2014-105648",92037,"FUR-TA-10002958","Bevis Oval Conference Table. Walnut",-23.4882,3,"West",8712,626.352,"Consumer","2014-03-07","Standard Class","California","Tables" +"Office Supplies","Houston","United States","SC-20020","Sam Craven",0.8,"2014-03-03","CA-2014-130421",77095,"OFF-AP-10002534","3.6 Cubic Foot Counter Height Office Refrigerator",-459.6072,3,"Central",7349,176.772,"Consumer","2014-03-07","Standard Class","Texas","Appliances" +"Office Supplies","Wilmington","United States","AJ-10945","Ashley Jarboe",0.2,"2014-03-03","CA-2014-155852",28403,"OFF-AR-10003560","Zebra Zazzle Fluorescent Highlighters",3.4048,4,"South",556,19.456,"Consumer","2014-03-07","Second Class","North Carolina","Art" +"Office Supplies","New York City","United States","AP-10720","Anne Pryor",0.2,"2014-03-03","CA-2014-127964",10035,"OFF-BI-10003429","Cardinal HOLDit! Binder Insert Strips.Extra Strips",9.1785,5,"East",1165,25.32,"Home Office","2014-03-08","Standard Class","New York","Binders" +"Office Supplies","New York City","United States","AP-10720","Anne Pryor",0.2,"2014-03-03","CA-2014-127964",10035,"OFF-BI-10004593","Ibico Laser Imprintable Binding System Covers",40.871998,3,"East",1164,125.759995,"Home Office","2014-03-08","Standard Class","New York","Binders" +"Office Supplies","Columbus","United States","JS-15595","Jill Stevenson",0.2,"2014-03-03","US-2014-127978",43229,"OFF-LA-10000305","Avery 495",4.9140005,3,"East",7247,15.120001,"Corporate","2014-03-08","Standard Class","Ohio","Labels" +"Office Supplies","Columbus","United States","JS-15595","Jill Stevenson",0.2,"2014-03-03","US-2014-127978",43229,"OFF-ST-10002486","Eldon Shelf Savers Cubes and Bins",-10.051201,8,"East",7249,44.671997,"Corporate","2014-03-08","Standard Class","Ohio","Storage" +"Technology","New York City","United States","AP-10720","Anne Pryor",0,"2014-03-03","CA-2014-127964",10035,"TEC-PH-10004700","PowerGen Dual USB Car Charger",4.5954003,1,"East",1163,9.99,"Home Office","2014-03-08","Standard Class","New York","Phones" +"Office Supplies","Margate","United States","SC-20095","Sanjit Chand",0.2,"2014-03-04","CA-2014-117016",33063,"OFF-AR-10001374","BIC Brite Liner Highlighters. Chisel Tip",2.3328,3,"South",4422,15.552,"Consumer","2014-03-09","Standard Class","Florida","Art" +"Office Supplies","Columbia","United States","CL-12565","Clay Ludtke",0,"2014-03-04","CA-2014-116239",29203,"OFF-ST-10001370","Sensible Storage WireTech Storage Systems",17.745,5,"South",1565,354.9,"Consumer","2014-03-04","Same Day","South Carolina","Storage" +"Office Supplies","Yonkers","United States","AB-10150","Aimee Bixby",0,"2014-03-05","CA-2014-169061",10701,"OFF-AR-10000588","Newell 345",15.4752,3,"East",8497,59.52,"Consumer","2014-03-08","Second Class","New York","Art" +"Office Supplies","Yonkers","United States","AB-10150","Aimee Bixby",0.2,"2014-03-05","CA-2014-169061",10701,"OFF-BI-10001617","GBC Wire Binding Combs",16.7508,6,"East",8500,49.631996,"Consumer","2014-03-08","Second Class","New York","Binders" +"Office Supplies","Yonkers","United States","AB-10150","Aimee Bixby",0,"2014-03-05","CA-2014-169061",10701,"OFF-PA-10001878","Xerox 1891",45.975403,2,"East",8501,97.82,"Consumer","2014-03-08","Second Class","New York","Paper" +"Office Supplies","Yonkers","United States","AB-10150","Aimee Bixby",0,"2014-03-05","CA-2014-169061",10701,"OFF-SU-10000381","Acme Forged Steel Scissors with Black Enamel Handles",5.3998003,2,"East",8499,18.62,"Consumer","2014-03-08","Second Class","New York","Supplies" +"Technology","Yonkers","United States","AB-10150","Aimee Bixby",0,"2014-03-05","CA-2014-169061",10701,"TEC-AC-10002001","Logitech Wireless Gaming Headset G930",177.5889,3,"East",8498,479.97,"Consumer","2014-03-08","Second Class","New York","Accessories" +"Furniture","Seattle","United States","CM-12715","Craig Molinari",0.2,"2014-03-07","CA-2014-104563",98103,"FUR-CH-10002780","Office Star - Task Chair with Contemporary Loop Arms",21.8352,6,"West",9643,436.704,"Corporate","2014-03-12","Standard Class","Washington","Chairs" +"Furniture","Seattle","United States","AB-10015","Aaron Bergman",0.2,"2014-03-07","CA-2014-156587",98103,"FUR-CH-10004477","Global Push Button Manager's Chair. Indigo",5.4801,1,"West",4962,48.711998,"Consumer","2014-03-08","First Class","Washington","Chairs" +"Furniture","Seattle","United States","CM-12715","Craig Molinari",0.2,"2014-03-07","CA-2014-104563",98103,"FUR-CH-10004495","Global Leather and Oak Executive Chair. Black",54.1764,2,"West",9644,481.568,"Corporate","2014-03-12","Standard Class","Washington","Chairs" +"Office Supplies","Seattle","United States","CM-12715","Craig Molinari",0,"2014-03-07","CA-2014-104563",98103,"OFF-AR-10000390","Newell Chalk Holder",9.499,5,"West",9641,20.65,"Corporate","2014-03-12","Standard Class","Washington","Art" +"Office Supplies","Seattle","United States","AB-10015","Aaron Bergman",0,"2014-03-07","CA-2014-156587",98103,"OFF-AR-10001427","Newell 330",4.6644,3,"West",4963,17.94,"Consumer","2014-03-08","First Class","Washington","Art" +"Office Supplies","Des Moines","United States","NP-18685","Nora Pelletier",0.2,"2014-03-07","US-2014-131982",98198,"OFF-BI-10004224","Catalog Binders with Expanding Posts",33.64,2,"West",5254,107.648,"Home Office","2014-03-11","Second Class","Washington","Binders" +"Office Supplies","Seattle","United States","CM-12715","Craig Molinari",0,"2014-03-07","CA-2014-104563",98103,"OFF-ST-10000934","Contico 72""H Heavy-Duty Storage System",0,5,"West",9642,204.9,"Corporate","2014-03-12","Standard Class","Washington","Storage" +"Office Supplies","Seattle","United States","AB-10015","Aaron Bergman",0,"2014-03-07","CA-2014-156587",98103,"OFF-ST-10002344","Carina 42""Hx23 3/4""W Media Storage Unit",4.8588004,3,"West",4964,242.93999,"Consumer","2014-03-08","First Class","Washington","Storage" +"Office Supplies","Denver","United States","KT-16480","Kean Thornton",0.2,"2014-03-10","US-2014-140116",80219,"OFF-AR-10001044","BOSTON Ranger #55 Pencil Sharpener. Black",9.3564,4,"West",2102,83.168,"Consumer","2014-03-17","Standard Class","Colorado","Art" +"Office Supplies","Royal Oak","United States","SW-20275","Scott Williamson",0,"2014-03-10","US-2014-100279",48073,"OFF-PA-10002259","Geographics Note Cards. Blank. White. 8 1/2"" x 11""",10.742399,2,"Central",1172,22.38,"Consumer","2014-03-14","Standard Class","Michigan","Paper" +"Office Supplies","Denver","United States","KT-16480","Kean Thornton",0.2,"2014-03-10","US-2014-140116",80219,"OFF-ST-10000078","Tennsco 6- and 18-Compartment Lockers",-15.9102,3,"West",2101,636.408,"Consumer","2014-03-17","Standard Class","Colorado","Storage" +"Furniture","Columbus","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-03-11","CA-2014-166884",43229,"FUR-FU-10003981","Eldon Wave Desk Accessories",2.2879999,5,"East",1542,8.320001,"Consumer","2014-03-16","Second Class","Ohio","Furnishings" +"Office Supplies","Richmond","United States","FM-14215","Filia McAdams",0,"2014-03-11","CA-2014-114790",40475,"OFF-AP-10002578","Fellowes Premier Superior Surge Suppressor. 10-Outlet. With Phone and Remote",38.157597,3,"South",7015,146.76,"Corporate","2014-03-13","Second Class","Kentucky","Appliances" +"Office Supplies","Roseville","United States","RB-19435","Richard Bierner",0,"2014-03-11","US-2014-103338",95661,"OFF-AR-10001770","Economy #2 Pencils",2.0747998,3,"West",8300,7.98,"Consumer","2014-03-15","Standard Class","California","Art" +"Office Supplies","Columbus","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-03-11","CA-2014-166884",43229,"OFF-FA-10001561","Stockwell Push Pins",1.7004,6,"East",1543,10.464,"Consumer","2014-03-16","Second Class","Ohio","Fasteners" +"Office Supplies","Richmond","United States","FM-14215","Filia McAdams",0,"2014-03-11","CA-2014-114790",40475,"OFF-PA-10000213","Xerox 198",7.0218,3,"South",7018,14.94,"Corporate","2014-03-13","Second Class","Kentucky","Paper" +"Office Supplies","New York City","United States","KA-16525","Kelly Andreada",0,"2014-03-11","CA-2014-158337",10024,"OFF-PA-10002137","Southworth 100% Résumé Paper. 24lb.",49.014,14,"East",3442,108.92,"Consumer","2014-03-14","Second Class","New York","Paper" +"Technology","Richmond","United States","FM-14215","Filia McAdams",0,"2014-03-11","CA-2014-114790",40475,"TEC-AC-10000710","Maxell DVD-RAM Discs",14.1728,2,"South",7016,32.960003,"Corporate","2014-03-13","Second Class","Kentucky","Accessories" +"Technology","Richmond","United States","FM-14215","Filia McAdams",0,"2014-03-11","CA-2014-114790",40475,"TEC-PH-10000984","Panasonic KX-TG9471B",164.6316,3,"South",7017,587.97003,"Corporate","2014-03-13","Second Class","Kentucky","Phones" +"Furniture","Chesapeake","United States","CK-12325","Christine Kargatis",0,"2014-03-14","US-2014-125521",23320,"FUR-CH-10003379","Global Commerce Series High-Back Swivel/Tilt Chairs",284.97998,4,"South",2841,1139.92,"Home Office","2014-03-19","Standard Class","Virginia","Chairs" +"Office Supplies","Huntington Beach","United States","DK-13225","Dean Katz",0,"2014-03-14","CA-2014-157623",92646,"OFF-AR-10003723","Avery Hi-Liter Fluorescent Desk Style Markers",1.2506,1,"West",1892,3.3799999,"Corporate","2014-03-18","Standard Class","California","Art" +"Office Supplies","Logan","United States","JF-15295","Jason Fortune-",0.2,"2014-03-14","CA-2014-114510",84321,"OFF-BI-10001617","GBC Wire Binding Combs",11.167201,4,"West",4677,33.088,"Consumer","2014-03-19","Standard Class","Utah","Binders" +"Office Supplies","Logan","United States","JF-15295","Jason Fortune-",0.2,"2014-03-14","CA-2014-114510",84321,"OFF-BI-10003007","Premium Transparent Presentation Covers. No Pattern/Clear. 8 1/2"" x 11""",20.1656,2,"West",4681,62.048,"Consumer","2014-03-19","Standard Class","Utah","Binders" +"Office Supplies","Jacksonville","United States","NF-18475","Neil Französisch",0.2,"2014-03-14","CA-2014-100293",32216,"OFF-PA-10000176","Xerox 1887",31.8696,6,"South",9515,91.056,"Home Office","2014-03-18","Standard Class","Florida","Paper" +"Office Supplies","Huntington Beach","United States","DK-13225","Dean Katz",0,"2014-03-14","CA-2014-157623",92646,"OFF-PA-10001204","Xerox 1972",4.752,2,"West",1891,10.56,"Corporate","2014-03-18","Standard Class","California","Paper" +"Office Supplies","Chicago","United States","RB-19465","Rick Bensley",0.2,"2014-03-14","CA-2014-152618",60653,"OFF-PA-10001215","Xerox 1963",2.6399999,2,"Central",9407,8.448,"Home Office","2014-03-17","First Class","Illinois","Paper" +"Office Supplies","Logan","United States","JF-15295","Jason Fortune-",0,"2014-03-14","CA-2014-114510",84321,"OFF-ST-10000736","Carina Double Wide Media Storage Towers in Natural & Black",3.2392,1,"West",4678,80.98,"Consumer","2014-03-19","Standard Class","Utah","Storage" +"Office Supplies","Logan","United States","JF-15295","Jason Fortune-",0,"2014-03-14","CA-2014-114510",84321,"OFF-ST-10003221","Staple magnet",5.7671995,2,"West",4680,21.359999,"Consumer","2014-03-19","Standard Class","Utah","Storage" +"Technology","Logan","United States","JF-15295","Jason Fortune-",0,"2014-03-14","CA-2014-114510",84321,"TEC-AC-10004877","Imation 30456 USB Flash Drive 8GB",6.624,12,"West",4679,82.8,"Consumer","2014-03-19","Standard Class","Utah","Accessories" +"Technology","Chicago","United States","RB-19465","Rick Bensley",0.3,"2014-03-14","CA-2014-152618",60653,"TEC-MA-10003626","Hewlett-Packard Deskjet 6540 Color Inkjet Printer",156.047,2,"Central",9406,574.91003,"Home Office","2014-03-17","First Class","Illinois","Machines" +"Furniture","Tampa","United States","AS-10240","Alan Shonely",0.2,"2014-03-15","CA-2014-149958",33614,"FUR-FU-10001756","Eldon Expressions Desk Accessory. Wood Photo Frame. Mahogany",5.1408,3,"South",604,45.696,"Consumer","2014-03-19","Standard Class","Florida","Furnishings" +"Office Supplies","Tampa","United States","AS-10240","Alan Shonely",0.7,"2014-03-15","CA-2014-149958",33614,"OFF-BI-10000773","Insertable Tab Post Binder Dividers",-5.5338,3,"South",605,7.218,"Consumer","2014-03-19","Standard Class","Florida","Binders" +"Office Supplies","Tampa","United States","AS-10240","Alan Shonely",0.7,"2014-03-15","CA-2014-149958",33614,"OFF-BI-10001543","GBC VeloBinder Manual Binding System",-31.6712,4,"South",606,43.188,"Consumer","2014-03-19","Standard Class","Florida","Binders" +"Office Supplies","Tampa","United States","AS-10240","Alan Shonely",0.2,"2014-03-15","CA-2014-149958",33614,"OFF-PA-10002120","Xerox 1889",47.815197,3,"South",607,131.90399,"Consumer","2014-03-19","Standard Class","Florida","Paper" +"Office Supplies","Tampa","United States","AS-10240","Alan Shonely",0.2,"2014-03-15","CA-2014-149958",33614,"OFF-ST-10001490","Hot File 7-Pocket. Floor Stand",17.847,1,"South",603,142.776,"Consumer","2014-03-19","Standard Class","Florida","Storage" +"Technology","Raleigh","United States","KN-16705","Kristina Nunn",0.2,"2014-03-16","CA-2014-157609",27604,"TEC-PH-10002415","Polycom VoiceStation 500 Conference phone",29.494999,2,"South",3453,471.91998,"Home Office","2014-03-21","Second Class","North Carolina","Phones" +"Furniture","New York City","United States","CP-12340","Christine Phan",0.4,"2014-03-17","US-2014-148838",10024,"FUR-TA-10001950","Balt Solid Wood Round Tables",-553.6476,4,"East",2359,1071.5759,"Corporate","2014-03-21","Standard Class","New York","Tables" +"Furniture","New York City","United States","CP-12340","Christine Phan",0.4,"2014-03-17","US-2014-148838",10024,"FUR-TA-10003473","Bretford Rectangular Conference Table Tops",-447.5947,7,"East",2358,1579.746,"Corporate","2014-03-21","Standard Class","New York","Tables" +"Furniture","New York City","United States","CP-12340","Christine Phan",0.4,"2014-03-17","US-2014-148838",10024,"FUR-TA-10004175","Hon 30"" x 60"" Table with Locking Drawer",-122.7816,3,"East",2360,613.908,"Corporate","2014-03-21","Standard Class","New York","Tables" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-AP-10001154","Bionaire Personal Warm Mist Humidifier/Vaporizer",36.5742,2,"Central",1346,93.78,"Consumer","2014-03-24","Standard Class","Minnesota","Appliances" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-AP-10003971","Belkin 6 Outlet Metallic Surge Strip",8.4942,3,"South",7432,32.67,"Corporate","2014-03-21","Standard Class","Mississippi","Appliances" +"Office Supplies","Dallas","United States","AZ-10750","Annie Zypern",0.2,"2014-03-17","CA-2014-158442",75217,"OFF-AR-10003732","Newell 333",0.3336,2,"Central",5101,4.448,"Consumer","2014-03-17","Same Day","Texas","Art" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-AR-10003829","Newell 35",5.7071996,6,"Central",1348,19.68,"Consumer","2014-03-24","Standard Class","Minnesota","Art" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-BI-10000136","Avery Non-Stick Heavy Duty View Round Locking Ring Binders",17.222403,6,"Central",1350,35.88,"Consumer","2014-03-24","Standard Class","Minnesota","Binders" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-BI-10000309","GBC Twin Loop Wire Binding Elements. 9/16"" Spine. Black",14.9156,2,"South",7429,30.44,"Corporate","2014-03-21","Standard Class","Mississippi","Binders" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-BI-10000605","Acco Pressboard Covers with Storage Hooks. 9 1/2"" x 11"". Executive Red",5.3721004,3,"South",7428,11.429999,"Corporate","2014-03-21","Standard Class","Mississippi","Binders" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-BI-10001758","Wilson Jones 14 Line Acrylic Coated Pressboard Data Binders",25.098,10,"Central",1349,53.4,"Consumer","2014-03-24","Standard Class","Minnesota","Binders" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-PA-10000466","Memo Book. 100 Message Capacity. 5 3/8” x 11”",23.59,7,"Central",1347,47.18,"Consumer","2014-03-24","Standard Class","Minnesota","Paper" +"Office Supplies","New York City","United States","CP-12340","Christine Phan",0,"2014-03-17","US-2014-148838",10024,"OFF-PA-10000919","Xerox 1918",75.969604,4,"East",2362,155.04,"Corporate","2014-03-21","Standard Class","New York","Paper" +"Office Supplies","Dallas","United States","AZ-10750","Annie Zypern",0.2,"2014-03-17","CA-2014-158442",75217,"OFF-PA-10002195","Xerox 1966",1.8792,1,"Central",5102,5.184,"Consumer","2014-03-17","Same Day","Texas","Paper" +"Office Supplies","Dallas","United States","AZ-10750","Annie Zypern",0.2,"2014-03-17","CA-2014-158442",75217,"OFF-PA-10002365","Xerox 1967",5.4431996,3,"Central",5103,15.552,"Consumer","2014-03-17","Same Day","Texas","Paper" +"Office Supplies","New York City","United States","CP-12340","Christine Phan",0,"2014-03-17","US-2014-148838",10024,"OFF-PA-10002960","Xerox 1926",16.3842,7,"East",2361,34.86,"Corporate","2014-03-21","Standard Class","New York","Paper" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-PA-10003063","EcoTones Memo Sheets",7.68,4,"South",7431,16,"Corporate","2014-03-21","Standard Class","Mississippi","Paper" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-PA-10003349","Xerox 1957",6.3504004,2,"South",7430,12.96,"Corporate","2014-03-21","Standard Class","Mississippi","Paper" +"Office Supplies","Philadelphia","United States","QJ-19255","Quincy Jones",0.2,"2014-03-17","CA-2014-127859",19134,"OFF-PA-10003641","Xerox 1909",41.152798,6,"East",8011,126.62399,"Corporate","2014-03-20","Second Class","Pennsylvania","Paper" +"Furniture","San Francisco","United States","EK-13795","Eileen Kiefer",0.15,"2014-03-18","CA-2014-111871",94110,"FUR-BO-10004218","Bush Heritage Pine Collection 5-Shelf Bookcase. Albany Pine Finish. *Special Order",70.49,10,"West",6152,1198.3301,"Home Office","2014-03-21","Second Class","California","Bookcases" +"Furniture","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"FUR-FU-10001986","Dana Fluorescent Magnifying Lamp. White. 36""",15.294,3,"South",2703,122.352005,"Home Office","2014-03-23","Standard Class","Florida","Furnishings" +"Furniture","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"FUR-FU-10004270","Executive Impressions 13"" Clairmont Wall Clock",8.076599,2,"South",2701,30.768,"Home Office","2014-03-23","Standard Class","Florida","Furnishings" +"Furniture","Burbank","United States","SC-20050","Sample Company A",0,"2014-03-18","US-2014-131275",91505,"FUR-FU-10004597","Eldon Cleatmat Chair Mats for Medium Pile Carpets",14.429999,2,"West",7328,111,"Home Office","2014-03-24","Standard Class","California","Furnishings" +"Office Supplies","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"OFF-AR-10003183","Avery Fluorescent Highlighter Four-Color Set",1.002,3,"South",2700,8.016,"Home Office","2014-03-23","Standard Class","Florida","Art" +"Office Supplies","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"OFF-FA-10003112","Staples",5.9175005,3,"South",2702,18.936,"Home Office","2014-03-23","Standard Class","Florida","Fasteners" +"Office Supplies","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"OFF-PA-10001804","Xerox 195",7.4816,4,"South",2699,21.376,"Home Office","2014-03-23","Standard Class","Florida","Paper" +"Office Supplies","Burbank","United States","SC-20050","Sample Company A",0,"2014-03-18","US-2014-131275",91505,"OFF-ST-10000078","Tennsco 6- and 18-Compartment Lockers",334.11423,7,"West",7330,1856.19,"Home Office","2014-03-24","Standard Class","California","Storage" +"Technology","Burbank","United States","SC-20050","Sample Company A",0.2,"2014-03-18","US-2014-131275",91505,"TEC-MA-10001148","Swingline SM12-08 MicroCut Jam Free Shredder",415.9896,4,"West",7329,1279.9681,"Home Office","2014-03-24","Standard Class","California","Machines" +"Technology","Jacksonville","United States","SM-20320","Sean Miller",0.5,"2014-03-18","CA-2014-145317",32216,"TEC-MA-10002412","Cisco TelePresence System EX90 Videoconferencing Unit",-1811.0784,6,"South",2698,22638.48,"Home Office","2014-03-23","Standard Class","Florida","Machines" +"Technology","Jacksonville","United States","SM-20320","Sean Miller",0.5,"2014-03-18","CA-2014-145317",32216,"TEC-MA-10003626","Hewlett-Packard Deskjet 6540 Color Inkjet Printer",-16.426,4,"South",2697,821.3,"Home Office","2014-03-23","Standard Class","Florida","Machines" +"Furniture","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"FUR-FU-10002508","Document Clip Frames",5.5044003,3,"South",6166,20.016,"Corporate","2014-03-21","First Class","Florida","Furnishings" +"Furniture","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"FUR-FU-10003981","Eldon Wave Desk Accessories",1.3728,3,"South",6165,4.992,"Corporate","2014-03-21","First Class","Florida","Furnishings" +"Office Supplies","Lakeland","United States","MC-17605","Matt Connell",0.7,"2014-03-19","CA-2014-103989",33801,"OFF-BI-10001196","Avery Flip-Chart Easel Binder. Black",-25.737001,5,"South",6164,33.57,"Corporate","2014-03-21","First Class","Florida","Binders" +"Office Supplies","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"OFF-LA-10000443","Avery 501",3.9852002,4,"South",6162,11.808001,"Corporate","2014-03-21","First Class","Florida","Labels" +"Office Supplies","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"OFF-ST-10000918","Crate-A-Files",1.9619999,3,"South",6163,26.16,"Corporate","2014-03-21","First Class","Florida","Storage" +"Technology","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"TEC-AC-10002647","Logitech Wireless Boombox Speaker - portable - wireless. wired",53.2,2,"South",6167,170.23999,"Corporate","2014-03-21","First Class","Florida","Accessories" +"Technology","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"TEC-PH-10004667","Cisco 8x8 Inc. 6753i IP Business Phone System",28.3479,3,"South",6161,323.976,"Corporate","2014-03-21","First Class","Florida","Phones" +"Furniture","Knoxville","United States","GM-14440","Gary McGarr",0.2,"2014-03-21","US-2014-155544",37918,"FUR-CH-10000422","Global Highback Leather Tilter in Burgundy",-10.9188,3,"South",8346,218.376,"Consumer","2014-03-25","Standard Class","Tennessee","Chairs" +"Furniture","Knoxville","United States","GM-14440","Gary McGarr",0.2,"2014-03-21","US-2014-155544",37918,"FUR-FU-10001473","DAX Wood Document Frame",6.5904,3,"South",8345,32.952,"Consumer","2014-03-25","Standard Class","Tennessee","Furnishings" +"Office Supplies","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"OFF-AR-10001573","American Pencil",2.7028,4,"South",7587,9.320001,"Corporate","2014-03-25","Standard Class","Virginia","Art" +"Office Supplies","Hamilton","United States","CB-12025","Cassandra Brandow",0.2,"2014-03-21","CA-2014-140004",45011,"OFF-AR-10004027","Binney & Smith inkTank Erasable Desk Highlighter. Chisel Tip. Yellow. 12/Box",1.5876,3,"East",197,6.048,"Consumer","2014-03-25","Standard Class","Ohio","Art" +"Office Supplies","Hamilton","United States","CB-12025","Cassandra Brandow",0.2,"2014-03-21","CA-2014-140004",45011,"OFF-AR-10004685","Binney & Smith Crayola Metallic Colored Pencils. 8-Color Set",1.2038001,2,"East",196,7.408,"Consumer","2014-03-25","Standard Class","Ohio","Art" +"Office Supplies","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"OFF-BI-10001597","Wilson Jones Ledger-Size. Piano-Hinge Binder. 2"". Blue",59.011196,3,"South",7588,122.94,"Corporate","2014-03-25","Standard Class","Virginia","Binders" +"Office Supplies","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"OFF-LA-10004409","Avery 492",6.624,5,"South",7585,14.4,"Corporate","2014-03-25","Standard Class","Virginia","Labels" +"Office Supplies","Knoxville","United States","GM-14440","Gary McGarr",0.2,"2014-03-21","US-2014-155544",37918,"OFF-LA-10004544","Avery 505",22.2,5,"South",8344,59.2,"Consumer","2014-03-25","Standard Class","Tennessee","Labels" +"Office Supplies","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"OFF-PA-10000675","Xerox 1919",60.255302,3,"South",7586,122.97,"Corporate","2014-03-25","Standard Class","Virginia","Paper" +"Office Supplies","Asheville","United States","TS-21205","Thomas Seio",0.2,"2014-03-21","US-2014-107405",28806,"OFF-ST-10002301","Tennsco Commercial Shelving",-3.8646,1,"South",5461,16.272001,"Corporate","2014-03-25","Standard Class","North Carolina","Storage" +"Technology","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"TEC-PH-10000730","Samsung Galaxy S4 Active",909.9818,7,"South",7584,3499.93,"Corporate","2014-03-25","Standard Class","Virginia","Phones" +"Furniture","Tucson","United States","BF-11275","Beth Fritzler",0.2,"2014-03-22","CA-2014-148040",85705,"FUR-CH-10001482","Office Star - Mesh Screen back chair with Vinyl seat",-35.3646,3,"West",2872,314.352,"Corporate","2014-03-26","Standard Class","Arizona","Chairs" +"Office Supplies","Portage","United States","VM-21835","Vivian Mathis",0,"2014-03-22","US-2014-129609",46368,"OFF-AR-10003478","Avery Hi-Liter EverBold Pen Style Fluorescent Highlighters. 4/Pack",6.512,2,"Central",4341,16.28,"Consumer","2014-03-22","Same Day","Indiana","Art" +"Office Supplies","Greensboro","United States","CC-12685","Craig Carroll",0.7,"2014-03-22","US-2014-158057",27405,"OFF-BI-10002735","GBC Prestige Therm-A-Bind Covers",-39.4565,5,"South",1769,51.465004,"Consumer","2014-03-26","Standard Class","North Carolina","Binders" +"Office Supplies","Greensboro","United States","CC-12685","Craig Carroll",0.7,"2014-03-22","US-2014-158057",27405,"OFF-BI-10004410","C-Line Peel & Stick Add-On Filing Pockets. 8-3/4 x 5-1/8. 10/Pack",-5.8604,4,"South",1768,7.644,"Consumer","2014-03-26","Standard Class","North Carolina","Binders" +"Office Supplies","Tucson","United States","BF-11275","Beth Fritzler",0.2,"2014-03-22","CA-2014-148040",85705,"OFF-PA-10002581","Xerox 1951",23.235,3,"West",2871,74.352005,"Corporate","2014-03-26","Standard Class","Arizona","Paper" +"Office Supplies","Delray Beach","United States","BG-11740","Bruce Geld",0.2,"2014-03-23","CA-2014-164749",33445,"OFF-LA-10004484","Avery 476",3.2214,3,"South",6591,9.912,"Consumer","2014-03-26","First Class","Florida","Labels" +"Office Supplies","Los Angeles","United States","PO-18865","Patrick O'Donnell",0,"2014-03-23","CA-2014-120838",90036,"OFF-ST-10000585","Economy Rollaway Files",85.904,2,"West",3555,330.4,"Consumer","2014-03-26","Second Class","California","Storage" +"Technology","Los Angeles","United States","PO-18865","Patrick O'Donnell",0.2,"2014-03-23","CA-2014-120838",90036,"TEC-PH-10003484","Ooma Telo VoIP Home Phone System",37.797,6,"West",3556,604.752,"Consumer","2014-03-26","Second Class","California","Phones" +"Furniture","Fresno","United States","MS-17710","Maurice Satty",0,"2014-03-24","CA-2014-143637",93727,"FUR-FU-10002813","DAX Contemporary Wood Frame with Silver Metal Mat. Desktop. 11 x 14 Size",14.5728,2,"West",7802,40.480003,"Consumer","2014-03-29","Second Class","California","Furnishings" +"Office Supplies","New York City","United States","CD-11920","Carlos Daly",0,"2014-03-24","CA-2014-147235",10024,"OFF-PA-10004948","Xerox 190",11.703,5,"East",4480,24.9,"Consumer","2014-03-28","Standard Class","New York","Paper" +"Furniture","New York City","United States","AZ-10750","Annie Zypern",0.1,"2014-03-25","CA-2014-148586",10009,"FUR-CH-10002439","Iceberg Nesting Folding Chair. 19w x 6d x 43h",65.20639,7,"East",2433,366.786,"Consumer","2014-04-01","Standard Class","New York","Chairs" +"Office Supplies","San Francisco","United States","CA-12265","Christina Anderson",0,"2014-03-25","CA-2014-128237",94110,"OFF-AR-10000034","BIC Brite Liner Grip Highlighters. Assorted. 5/Pack",9.9216,6,"West",9310,25.44,"Consumer","2014-03-30","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","CA-12265","Christina Anderson",0,"2014-03-25","CA-2014-128237",94110,"OFF-AR-10003338","Eberhard Faber 3 1/2"" Golf Pencils",3.72,2,"West",9308,14.88,"Consumer","2014-03-30","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","CA-12265","Christina Anderson",0,"2014-03-25","CA-2014-128237",94110,"OFF-AR-10003829","Newell 35",1.9023999,2,"West",9307,6.56,"Consumer","2014-03-30","Standard Class","California","Art" +"Technology","San Francisco","United States","CA-12265","Christina Anderson",0,"2014-03-25","CA-2014-128237",94110,"TEC-AC-10002558","Imation Swivel Flash Drive USB flash drive - 8 GB",15.918,4,"West",9309,45.480003,"Consumer","2014-03-30","Standard Class","California","Accessories" +"Office Supplies","San Francisco","United States","DK-12835","Damala Kotsonis",0,"2014-03-26","CA-2014-141838",94122,"OFF-AR-10004272","Newell 308",0.84000003,2,"West",9052,3.36,"Corporate","2014-03-31","Second Class","California","Art" +"Office Supplies","San Francisco","United States","DK-12835","Damala Kotsonis",0.2,"2014-03-26","CA-2014-141838",94122,"OFF-BI-10003291","Wilson Jones Leather-Like Binders with DublLock Round Rings",9.4284,4,"West",9053,27.936,"Corporate","2014-03-31","Second Class","California","Binders" +"Office Supplies","Pomona","United States","CS-12505","Cindy Stewart",0,"2014-03-26","CA-2014-100860",91767,"OFF-LA-10001982","Smead Alpha-Z Color-Coded Name Labels First Letter Starter Set",9,5,"West",9660,18.75,"Consumer","2014-03-30","Second Class","California","Labels" +"Technology","Los Angeles","United States","JD-15895","Jonathan Doherty",0,"2014-03-26","CA-2014-138436",90036,"TEC-AC-10002323","SanDisk Ultra 32 GB MicroSDHC Class 10 Memory Card",8.619,3,"West",4245,66.3,"Corporate","2014-03-30","Standard Class","California","Accessories" +"Technology","San Francisco","United States","DK-12835","Damala Kotsonis",0.2,"2014-03-26","CA-2014-141838",94122,"TEC-PH-10004100","Griffin GC17055 Auxiliary Audio Cable",2.8783998,2,"West",9054,28.784,"Corporate","2014-03-31","Second Class","California","Phones" +"Furniture","Springfield","United States","MP-18175","Mike Pelletier",0.4,"2014-03-28","US-2014-117380",45503,"FUR-TA-10000198","Chromcraft Bull-Nose Wood Oval Conference Tables & Bases",-143.25479,1,"East",9900,330.58798,"Home Office","2014-04-03","Standard Class","Ohio","Tables" +"Furniture","Des Moines","United States","CV-12295","Christina VanderZanden",0,"2014-03-28","US-2014-137869",50315,"FUR-TA-10003954","Hon 94000 Series Round Tables",106.624794,4,"Central",7841,1184.72,"Consumer","2014-04-02","Standard Class","Iowa","Tables" +"Office Supplies","Des Moines","United States","CV-12295","Christina VanderZanden",0,"2014-03-28","US-2014-137869",50315,"OFF-EN-10001509","Poly String Tie Envelopes",2.8764,3,"Central",7840,6.12,"Consumer","2014-04-02","Standard Class","Iowa","Envelopes" +"Technology","Albuquerque","United States","DR-12940","Daniel Raglin",0.2,"2014-03-28","CA-2014-100881",87105,"TEC-PH-10003273","AT&T TR1909W",22.6782,3,"West",8464,302.376,"Home Office","2014-04-01","Standard Class","New Mexico","Phones" +"Furniture","Plano","United States","MG-18145","Mike Gockenbach",0.3,"2014-03-29","CA-2014-137274",75023,"FUR-TA-10001889","Bush Advantage Collection Racetrack Conference Table",-152.71559,3,"Central",7306,890.84094,"Consumer","2014-04-02","Standard Class","Texas","Tables" +"Furniture","San Francisco","United States","GA-14725","Guy Armstrong",0.15,"2014-03-30","CA-2014-131247",94110,"FUR-BO-10001337","O'Sullivan Living Dimensions 2-Shelf Bookcases",-12.098001,2,"West",6139,205.666,"Consumer","2014-04-04","Standard Class","California","Bookcases" +"Furniture","Brownsville","United States","MP-17470","Mark Packer",0.3,"2014-03-30","CA-2014-162089",78521,"FUR-CH-10002304","Global Stack Chair without Arms. Black",-9.093,7,"Central",7632,127.302,"Home Office","2014-04-01","First Class","Texas","Chairs" +"Office Supplies","New York City","United States","SG-20890","Susan Gilcrest",0,"2014-03-30","US-2014-105151",10009,"OFF-AR-10001231","Sanford EarthWrite Recycled Pencils. Medium Soft. #2",2.94,5,"East",5954,10.5,"Corporate","2014-03-31","First Class","New York","Art" +"Office Supplies","Long Beach","United States","KM-16720","Kunst Miller",0,"2014-03-30","CA-2014-169033",11561,"OFF-AR-10001915","Peel-Off China Markers",20.852999,5,"East",2897,49.649998,"Consumer","2014-04-03","Standard Class","New York","Art" +"Office Supplies","Brownsville","United States","MP-17470","Mark Packer",0.2,"2014-03-30","CA-2014-162089",78521,"OFF-EN-10002230","Airmail Envelopes",113.305504,5,"Central",7630,335.72,"Home Office","2014-04-01","First Class","Texas","Envelopes" +"Office Supplies","Seattle","United States","Dl-13600","Dorris liebe",0,"2014-03-30","CA-2014-133424",98105,"OFF-LA-10002312","Avery 490",22.2,3,"West",5285,44.4,"Corporate","2014-04-04","Standard Class","Washington","Labels" +"Office Supplies","Apple Valley","United States","NC-18340","Nat Carroll",0,"2014-03-30","US-2014-113124",55124,"OFF-ST-10001511","Space Solutions Commercial Steel Shelving",6.465,2,"Central",8431,129.3,"Consumer","2014-04-05","Standard Class","Minnesota","Storage" +"Office Supplies","Seattle","United States","Dl-13600","Dorris liebe",0,"2014-03-30","CA-2014-133424",98105,"OFF-ST-10002957","Sterilite Show Offs Storage Containers",0,3,"West",5284,15.84,"Corporate","2014-04-04","Standard Class","Washington","Storage" +"Technology","Brownsville","United States","MP-17470","Mark Packer",0.2,"2014-03-30","CA-2014-162089",78521,"TEC-PH-10001819","Innergie mMini Combo Duo USB Travel Charging Kit",88.1804,7,"Central",7631,251.94398,"Home Office","2014-04-01","First Class","Texas","Phones" +"Furniture","Tampa","United States","TG-21640","Trudy Glocke",0.2,"2014-03-31","CA-2014-130428",33614,"FUR-CH-10002965","Global Leather Highback Executive Chair with Pneumatic Height Adjustment. Black",98.4802,7,"South",9566,1125.488,"Consumer","2014-03-31","Same Day","Florida","Chairs" +"Office Supplies","San Francisco","United States","AA-10315","Alex Avila",0,"2014-03-31","CA-2014-128055",94122,"OFF-AP-10002765","Fellowes Advanced Computer Series Surge Protectors",14.834399,2,"West",2231,52.980003,"Consumer","2014-04-05","Standard Class","California","Appliances" +"Office Supplies","Tampa","United States","TG-21640","Trudy Glocke",0.2,"2014-03-31","CA-2014-130428",33614,"OFF-AR-10004027","Binney & Smith inkTank Erasable Desk Highlighter. Chisel Tip. Yellow. 12/Box",1.0583999,2,"South",9568,4.032,"Consumer","2014-03-31","Same Day","Florida","Art" +"Office Supplies","Miami","United States","KB-16240","Karen Bern",0.7,"2014-03-31","CA-2014-101770",33180,"OFF-BI-10001097","Avery Hole Reinforcements",-1.3083,1,"South",5480,1.8689998,"Corporate","2014-04-04","Standard Class","Florida","Binders" +"Office Supplies","Tampa","United States","TG-21640","Trudy Glocke",0.7,"2014-03-31","CA-2014-130428",33614,"OFF-BI-10001636","Ibico Plastic and Wire Spiral Binding Combs",-10.116,5,"South",9567,12.645,"Consumer","2014-03-31","Same Day","Florida","Binders" +"Office Supplies","Chicago","United States","DD-13570","Dorothy Dickinson",0.8,"2014-03-31","US-2014-104759",60610,"OFF-BI-10002071","Fellowes Black Plastic Comb Bindings",-13.827801,7,"Central",5049,8.134,"Consumer","2014-04-04","Standard Class","Illinois","Binders" +"Office Supplies","Philadelphia","United States","JO-15280","Jas O'Carroll",0.7,"2014-03-31","CA-2014-112403",19120,"OFF-BI-10003529","Avery Round Ring Poly Binders",-0.59639996,1,"East",4712,0.852,"Consumer","2014-03-31","Same Day","Pennsylvania","Binders" +"Office Supplies","San Francisco","United States","AA-10315","Alex Avila",0.2,"2014-03-31","CA-2014-128055",94122,"OFF-BI-10004390","GBC DocuBind 200 Manual Binding Machine",252.588,2,"West",2230,673.568,"Consumer","2014-04-05","Standard Class","California","Binders" +"Technology","Chicago","United States","DD-13570","Dorothy Dickinson",0.2,"2014-03-31","US-2014-104759",60610,"TEC-AC-10004901","Kensington SlimBlade Notebook Wireless Mouse with Nano Receiver ",13.9972,2,"Central",5050,79.984,"Consumer","2014-04-04","Standard Class","Illinois","Accessories" +"Office Supplies","Vallejo","United States","KM-16720","Kunst Miller",0.2,"2014-04-01","US-2014-157021",94591,"OFF-BI-10000042","Pressboard Data Binder. Crimson. 12"" X 8 1/2""",5.5536,4,"West",1374,17.088,"Consumer","2014-04-06","Second Class","California","Binders" +"Office Supplies","Revere","United States","KH-16330","Katharine Harms",0,"2014-04-01","CA-2014-138359",2151,"OFF-BI-10000145","Zipper Ring Binder Pockets",3.0576,2,"East",7955,6.24,"Corporate","2014-04-06","Standard Class","Massachusetts","Binders" +"Office Supplies","Vallejo","United States","KM-16720","Kunst Miller",0,"2014-04-01","US-2014-157021",94591,"OFF-LA-10002312","Avery 490",14.8,2,"West",1373,29.6,"Consumer","2014-04-06","Second Class","California","Labels" +"Office Supplies","Revere","United States","KH-16330","Katharine Harms",0,"2014-04-01","CA-2014-138359",2151,"OFF-ST-10000636","Rogers Profile Extra Capacity Storage Tub",2.6783998,4,"East",7954,66.96,"Corporate","2014-04-06","Standard Class","Massachusetts","Storage" +"Furniture","Virginia Beach","United States","AH-10690","Anna Häberlin",0,"2014-04-02","CA-2014-160276",23464,"FUR-FU-10003192","Luxo Adjustable Task Clamp Lamp",46.1968,2,"South",8920,177.68001,"Corporate","2014-04-08","Standard Class","Virginia","Furnishings" +"Office Supplies","Athens","United States","RD-19585","Rob Dowd",0,"2014-04-02","CA-2014-164315",30605,"OFF-AP-10003842","Euro-Pro Shark Turbo Vacuum",40.274002,5,"South",5685,154.9,"Consumer","2014-04-08","Standard Class","Georgia","Appliances" +"Office Supplies","Houston","United States","SC-20020","Sam Craven",0.2,"2014-04-02","US-2014-157847",77095,"OFF-PA-10001593","Xerox 1947",10.465,7,"Central",8828,33.488003,"Consumer","2014-04-06","Second Class","Texas","Paper" +"Office Supplies","Houston","United States","SC-20020","Sam Craven",0.2,"2014-04-02","US-2014-157847",77095,"OFF-PA-10002986","Xerox 1898",9.352,5,"Central",8827,26.720001,"Consumer","2014-04-06","Second Class","Texas","Paper" +"Office Supplies","Athens","United States","RD-19585","Rob Dowd",0,"2014-04-02","CA-2014-164315",30605,"OFF-PA-10004248","Xerox 1990",7.128,3,"South",5683,15.84,"Consumer","2014-04-08","Standard Class","Georgia","Paper" +"Technology","Athens","United States","RD-19585","Rob Dowd",0,"2014-04-02","CA-2014-164315",30605,"TEC-PH-10001128","Motorola Droid Maxx",293.98038,7,"South",5684,1049.93,"Consumer","2014-04-08","Standard Class","Georgia","Phones" +"Office Supplies","Los Angeles","United States","KE-16420","Katrina Edelman",0,"2014-04-03","CA-2014-112291",90008,"OFF-EN-10001415","Staple envelope",5.58,2,"West",9884,11.160001,"Corporate","2014-04-08","Standard Class","California","Envelopes" +"Technology","Los Angeles","United States","KE-16420","Katrina Edelman",0,"2014-04-03","CA-2014-112291",90008,"TEC-AC-10000736","Logitech G600 MMO Gaming Mouse",57.5928,2,"West",9886,159.98,"Corporate","2014-04-08","Standard Class","California","Accessories" +"Technology","Los Angeles","United States","KE-16420","Katrina Edelman",0,"2014-04-03","CA-2014-112291",90008,"TEC-AC-10001284","Enermax Briskie RF Wireless Keyboard and Mouse Combo",22.4316,3,"West",9885,62.309998,"Corporate","2014-04-08","Standard Class","California","Accessories" +"Furniture","Detroit","United States","MS-17710","Maurice Satty",0,"2014-04-04","CA-2014-133228",48205,"FUR-FU-10004020","Advantus Panel Wall Acrylic Frame",2.3521,1,"Central",3955,5.4700003,"Consumer","2014-04-09","Standard Class","Michigan","Furnishings" +"Office Supplies","Detroit","United States","MS-17710","Maurice Satty",0,"2014-04-04","CA-2014-133228",48205,"OFF-AR-10001955","Newell 319",23.807999,4,"Central",3956,79.36,"Consumer","2014-04-09","Standard Class","Michigan","Art" +"Office Supplies","Los Angeles","United States","MZ-17515","Mary Zewe",0.2,"2014-04-04","US-2014-128685",90008,"OFF-BI-10004140","Avery Non-Stick Binders",2.2450001,2,"West",4845,7.184,"Corporate","2014-04-05","First Class","California","Binders" +"Office Supplies","San Francisco","United States","PK-18910","Paul Knutson",0,"2014-04-04","CA-2014-105172",94109,"OFF-LA-10001641","Avery 518",9.072,6,"West",1561,18.9,"Home Office","2014-04-09","Standard Class","California","Labels" +"Office Supplies","Lafayette","United States","KB-16585","Ken Black",0,"2014-04-04","CA-2014-149538",70506,"OFF-PA-10002195","Xerox 1966",6.3504004,2,"South",5124,12.96,"Corporate","2014-04-08","Standard Class","Louisiana","Paper" +"Office Supplies","Lafayette","United States","KB-16585","Ken Black",0,"2014-04-04","CA-2014-149538",70506,"OFF-PA-10003797","Xerox 209",9.331201,3,"South",5123,19.44,"Corporate","2014-04-08","Standard Class","Louisiana","Paper" +"Office Supplies","Lafayette","United States","KB-16585","Ken Black",0,"2014-04-04","CA-2014-149538",70506,"OFF-ST-10004180","Safco Commercial Shelving",9.302,5,"South",5121,232.55,"Corporate","2014-04-08","Standard Class","Louisiana","Storage" +"Technology","Lafayette","United States","KB-16585","Ken Black",0,"2014-04-04","CA-2014-149538",70506,"TEC-AC-10002926","Logitech Wireless Marathon Mouse M705",42.9914,2,"South",5122,99.98,"Corporate","2014-04-08","Standard Class","Louisiana","Accessories" +"Office Supplies","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"OFF-AR-10000127","Newell 321",6.6584,7,"South",3280,22.96,"Corporate","2014-04-09","Second Class","Virginia","Art" +"Office Supplies","Dearborn Heights","United States","RD-19900","Ruben Dartt",0,"2014-04-05","CA-2014-149104",48127,"OFF-AR-10002952","Stanley Contemporary Battery Pencil Sharpeners",7.4760003,2,"Central",9507,26.7,"Consumer","2014-04-07","Second Class","Michigan","Art" +"Office Supplies","Dearborn Heights","United States","RD-19900","Ruben Dartt",0,"2014-04-05","CA-2014-149104",48127,"OFF-AR-10004685","Binney & Smith Crayola Metallic Colored Pencils. 8-Color Set",4.5836997,3,"Central",9509,13.89,"Consumer","2014-04-07","Second Class","Michigan","Art" +"Office Supplies","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"OFF-AR-10004757","Crayola Colored Pencils",7.5768003,7,"South",3277,22.96,"Corporate","2014-04-09","Second Class","Virginia","Art" +"Office Supplies","Richmond","United States","RP-19855","Roy Phan",0,"2014-04-05","US-2014-157231",40475,"OFF-BI-10002852","Ibico Standard Transparent Covers",56.5264,7,"South",9912,115.36,"Corporate","2014-04-09","Standard Class","Kentucky","Binders" +"Office Supplies","Dearborn Heights","United States","RD-19900","Ruben Dartt",0,"2014-04-05","CA-2014-149104",48127,"OFF-BI-10004209","Fellowes Twister Kit. Gray/Clear. 3/pkg",18.09,5,"Central",9508,40.2,"Consumer","2014-04-07","Second Class","Michigan","Binders" +"Office Supplies","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"OFF-PA-10003349","Xerox 1957",6.3504004,2,"South",3279,12.96,"Corporate","2014-04-09","Second Class","Virginia","Paper" +"Office Supplies","New York City","United States","TH-21550","Tracy Hopkins",0,"2014-04-05","CA-2014-113887",10035,"OFF-PA-10004071","Eaton Premium Continuous-Feed Paper. 25% Cotton. Letter Size. White. 1000 Shts/Box",26.630402,1,"East",711,55.480003,"Home Office","2014-04-07","First Class","New York","Paper" +"Office Supplies","Dearborn Heights","United States","RD-19900","Ruben Dartt",0,"2014-04-05","CA-2014-149104",48127,"OFF-ST-10000991","Space Solutions HD Industrial Steel Shelving.",20.694601,6,"Central",9510,689.82,"Consumer","2014-04-07","Second Class","Michigan","Storage" +"Office Supplies","Decatur","United States","JG-15805","John Grady",0.2,"2014-04-05","CA-2014-151001",62521,"OFF-ST-10001031","Adjustable Personal File Tote",3.9072,4,"Central",3880,52.096,"Corporate","2014-04-07","First Class","Illinois","Storage" +"Office Supplies","Decatur","United States","JG-15805","John Grady",0.2,"2014-04-05","CA-2014-151001",62521,"OFF-ST-10003455","Tenex File Box. Personal Filing Tote with Lid. Black",3.7224002,4,"Central",3879,49.631996,"Corporate","2014-04-07","First Class","Illinois","Storage" +"Office Supplies","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"OFF-SU-10002881","Martin Yale Chadless Opener Electric Letter Opener",83.281,5,"South",3281,4164.05,"Corporate","2014-04-09","Second Class","Virginia","Supplies" +"Technology","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"TEC-PH-10001615","AT&T CL82213",8.4071,1,"South",3278,28.99,"Corporate","2014-04-09","Second Class","Virginia","Phones" +"Furniture","Los Angeles","United States","AY-10555","Andy Yotov",0,"2014-04-06","CA-2014-102652",90049,"FUR-FU-10000747","Tenex B1-RE Series Chair Mats for Low Pile Carpets",15.633201,2,"West",6016,91.96,"Corporate","2014-04-12","Standard Class","California","Furnishings" +"Furniture","Los Angeles","United States","AY-10555","Andy Yotov",0,"2014-04-06","CA-2014-102652",90049,"FUR-FU-10001918","C-Line Cubicle Keepers Polyproplyene Holder With Velcro Backings",12.9129,7,"West",6017,33.109997,"Corporate","2014-04-12","Standard Class","California","Furnishings" +"Furniture","Seattle","United States","SC-20260","Scott Cohen",0,"2014-04-06","CA-2014-169803",98115,"FUR-TA-10000688","Chromcraft Bull-Nose Wood Round Conference Table Top. Wood Base",111.1035,3,"West",5036,653.55,"Corporate","2014-04-12","Standard Class","Washington","Tables" +"Furniture","Philadelphia","United States","SD-20485","Shirley Daniels",0.4,"2014-04-06","US-2014-118486",19143,"FUR-TA-10001039","KI Adjustable-Height Table",-36.111595,3,"East",1406,154.76399,"Home Office","2014-04-08","First Class","Pennsylvania","Tables" +"Office Supplies","Los Angeles","United States","FH-14275","Frank Hawley",0,"2014-04-06","CA-2014-153808",90004,"OFF-AR-10001725","Boston Home & Office Model 2000 Electric Pencil Sharpeners",18.447,3,"West",8839,70.950005,"Corporate","2014-04-10","Second Class","California","Art" +"Office Supplies","Los Angeles","United States","FH-14275","Frank Hawley",0.2,"2014-04-06","CA-2014-153808",90004,"OFF-BI-10002794","Avery Trapezoid Ring Binder. 3"" Capacity. Black. 1040 sheets",23.7684,2,"West",8840,65.568,"Corporate","2014-04-10","Second Class","California","Binders" +"Office Supplies","Lancaster","United States","GP-14740","Guy Phonely",0.7,"2014-04-06","CA-2014-136742",17602,"OFF-BI-10003719","Large Capacity Hanging Post Binders",-35.928,6,"East",869,44.91,"Corporate","2014-04-10","Standard Class","Pennsylvania","Binders" +"Office Supplies","Los Angeles","United States","AY-10555","Andy Yotov",0,"2014-04-06","CA-2014-102652",90049,"OFF-PA-10001977","Xerox 194",26.630402,1,"West",6019,55.480003,"Corporate","2014-04-12","Standard Class","California","Paper" +"Office Supplies","Los Angeles","United States","AY-10555","Andy Yotov",0,"2014-04-06","CA-2014-102652",90049,"OFF-PA-10003309","Xerox 211",9.331201,3,"West",6018,19.44,"Corporate","2014-04-12","Standard Class","California","Paper" +"Office Supplies","Philadelphia","United States","SD-20485","Shirley Daniels",0.2,"2014-04-06","US-2014-118486",19143,"OFF-SU-10004498","Martin-Yale Premier Letter Opener",-2.1896,1,"East",1405,10.304,"Home Office","2014-04-08","First Class","Pennsylvania","Supplies" +"Technology","Seattle","United States","SC-20260","Scott Cohen",0,"2014-04-06","CA-2014-169803",98115,"TEC-AC-10003441","Kingston Digital DataTraveler 32GB USB 2.0",2.034,2,"West",5037,33.9,"Corporate","2014-04-12","Standard Class","Washington","Accessories" +"Technology","Los Angeles","United States","FH-14275","Frank Hawley",0,"2014-04-06","CA-2014-153808",90004,"TEC-AC-10004171","Razer Kraken 7.1 Surround Sound Over Ear USB Gaming Headset",131.98679,3,"West",8841,299.97,"Corporate","2014-04-10","Second Class","California","Accessories" +"Technology","Philadelphia","United States","SD-20485","Shirley Daniels",0.2,"2014-04-06","US-2014-118486",19143,"TEC-AC-10004659","Imation Secure+ Hardware Encrypted USB 2.0 Flash Drive; 16GB",21.897,2,"East",1407,116.784,"Home Office","2014-04-08","First Class","Pennsylvania","Accessories" +"Furniture","Mobile","United States","PJ-19015","Pauline Johnson",0,"2014-04-07","CA-2014-124023",36608,"FUR-FU-10004415","Stacking Tray. Side-Loading. Legal. Smoke",2.7776,2,"South",3482,8.96,"Consumer","2014-04-10","First Class","Alabama","Furnishings" +"Office Supplies","Columbia","United States","RS-19765","Roland Schwarz",0,"2014-04-07","CA-2014-128846",29203,"OFF-PA-10000100","Xerox 1945",60.255302,3,"South",4178,122.97,"Corporate","2014-04-12","Standard Class","South Carolina","Paper" +"Office Supplies","Marietta","United States","VM-21685","Valerie Mitchum",0,"2014-04-07","CA-2014-165806",30062,"OFF-PA-10003441","Xerox 226",27.9936,9,"South",6253,58.32,"Home Office","2014-04-07","Same Day","Georgia","Paper" +"Technology","Columbia","United States","RS-19765","Roland Schwarz",0,"2014-04-07","CA-2014-128846",29203,"TEC-PH-10003273","AT&T TR1909W",163.787,5,"South",4177,629.95,"Corporate","2014-04-12","Standard Class","South Carolina","Phones" +"Technology","Marietta","United States","VM-21685","Valerie Mitchum",0,"2014-04-07","CA-2014-165806",30062,"TEC-PH-10004922","RCA Visys Integrated PBX 8-Line Router",50.2425,3,"South",6254,200.97,"Home Office","2014-04-07","Same Day","Georgia","Phones" +"Furniture","Decatur","United States","RA-19885","Ruben Ausman",0,"2014-04-08","US-2014-118997",35601,"FUR-TA-10001086","SAFCO PlanMaster Boards. 60w x 37-1/2d. White Melamine",316.13922,8,"South",5927,1215.92,"Corporate","2014-04-12","Standard Class","Alabama","Tables" +"Furniture","Concord","United States","NM-18445","Nathan Mautz",0.2,"2014-04-08","CA-2014-150581",94521,"FUR-TA-10003748","Bevis 36 x 72 Conference Tables",2.4898,1,"West",7047,99.591995,"Home Office","2014-04-12","Standard Class","California","Tables" +"Furniture","Toledo","United States","JS-15880","John Stevenson",0.4,"2014-04-08","CA-2014-121769",43615,"FUR-TA-10004442","Riverside Furniture Stanwyck Manor Table Series",-94.6605,1,"East",7410,172.11,"Consumer","2014-04-12","Standard Class","Ohio","Tables" +"Office Supplies","Glendale","United States","JM-15655","Jim Mitchum",0.2,"2014-04-08","CA-2014-100363",85301,"OFF-FA-10000611","Binder Clips by OIC",0.8288,2,"West",3836,2.3679998,"Corporate","2014-04-15","Standard Class","Arizona","Fasteners" +"Office Supplies","Chandler","United States","PC-19000","Pauline Chand",0.2,"2014-04-08","US-2014-163797",85224,"OFF-FA-10001883","Alliance Super-Size Bands. Assorted Sizes",-11.825601,8,"West",6381,49.792,"Home Office","2014-04-13","Standard Class","Arizona","Fasteners" +"Office Supplies","Glendale","United States","JM-15655","Jim Mitchum",0.2,"2014-04-08","CA-2014-100363",85301,"OFF-PA-10004733","Things To Do Today Spiral Book",6.8904004,3,"West",3837,19.008,"Corporate","2014-04-15","Standard Class","Arizona","Paper" +"Technology","Concord","United States","NM-18445","Nathan Mautz",0,"2014-04-08","CA-2014-150581",94521,"TEC-AC-10001908","Logitech Wireless Headset h800",139.986,4,"West",7048,399.96,"Home Office","2014-04-12","Standard Class","California","Accessories" +"Office Supplies","Lewiston","United States","SE-20110","Sanjit Engle",0.2,"2014-04-11","US-2014-121734",83501,"OFF-BI-10004817","GBC Personal VeloBind Strips",3.3544,1,"West",4578,9.584,"Consumer","2014-04-16","Standard Class","Idaho","Binders" +"Office Supplies","Miami","United States","JK-15370","Jay Kimmel",0.2,"2014-04-11","US-2014-102715",33180,"OFF-FA-10001332","Acco Banker's Clasps. 5 3/4""-Long",2.3328,3,"South",3774,6.912,"Consumer","2014-04-13","Second Class","Florida","Fasteners" +"Office Supplies","Miami","United States","JK-15370","Jay Kimmel",0.2,"2014-04-11","US-2014-102715",33180,"OFF-PA-10000029","Xerox 224",3.6287997,2,"South",3776,10.368001,"Consumer","2014-04-13","Second Class","Florida","Paper" +"Office Supplies","Great Falls","United States","MM-18055","Michelle Moray",0,"2014-04-11","CA-2014-110555",59405,"OFF-ST-10000876","Eldon Simplefile Box Office",24.3824,7,"West",8358,87.08,"Consumer","2014-04-18","Standard Class","Montana","Storage" +"Technology","Miami","United States","JK-15370","Jay Kimmel",0.2,"2014-04-11","US-2014-102715",33180,"TEC-AC-10001874","Logitech Wireless Anywhere Mouse MX for PC and Mac",41.993,7,"South",3777,335.944,"Consumer","2014-04-13","Second Class","Florida","Accessories" +"Technology","Miami","United States","JK-15370","Jay Kimmel",0.2,"2014-04-11","US-2014-102715",33180,"TEC-AC-10002001","Logitech Wireless Gaming Headset G930",81.594894,3,"South",3775,383.976,"Consumer","2014-04-13","Second Class","Florida","Accessories" +"Technology","Great Falls","United States","MM-18055","Michelle Moray",0,"2014-04-11","CA-2014-110555",59405,"TEC-AC-10003399","Memorex Mini Travel Drive 64 GB USB 2.0 Flash Drive",91.3248,6,"West",8360,217.43999,"Consumer","2014-04-18","Standard Class","Montana","Accessories" +"Technology","Great Falls","United States","MM-18055","Michelle Moray",0.2,"2014-04-11","CA-2014-110555",59405,"TEC-PH-10000586","AT&T SB67148 SynJ",9.2386,2,"West",8359,105.584,"Consumer","2014-04-18","Standard Class","Montana","Phones" +"Technology","Austin","United States","GM-14455","Gary Mitchum",0.2,"2014-04-11","CA-2014-138940",78745,"TEC-PH-10001835","Jawbone JAMBOX Wireless Bluetooth Speaker",265.4232,6,"Central",4862,758.352,"Home Office","2014-04-16","Second Class","Texas","Phones" +"Furniture","Redondo Beach","United States","KN-16450","Kean Nguyen",0.15,"2014-04-12","CA-2014-154599",90278,"FUR-BO-10001337","O'Sullivan Living Dimensions 2-Shelf Bookcases",-18.147001,3,"West",2797,308.499,"Corporate","2014-04-17","Standard Class","California","Bookcases" +"Office Supplies","Lodi","United States","EH-13765","Edward Hooks",0,"2014-04-12","CA-2014-142979",95240,"OFF-AR-10002987","Prismacolor Color Pencil Set",16.268799,2,"West",9850,39.68,"Corporate","2014-04-18","Standard Class","California","Art" +"Office Supplies","Redondo Beach","United States","KN-16450","Kean Nguyen",0.2,"2014-04-12","CA-2014-154599",90278,"OFF-BI-10002764","Recycled Pressboard Report Cover with Reinforced Top Hinge",6.5569,7,"West",2796,18.088,"Corporate","2014-04-17","Standard Class","California","Binders" +"Office Supplies","Bloomington","United States","SV-20785","Stewart Visinsky",0,"2014-04-12","US-2014-158365",47401,"OFF-PA-10000289","Xerox 213",15.552,5,"Central",9098,32.4,"Consumer","2014-04-17","Standard Class","Indiana","Paper" +"Technology","Redondo Beach","United States","KN-16450","Kean Nguyen",0.2,"2014-04-12","CA-2014-154599",90278,"TEC-PH-10000576","AT&T 1080 Corded phone",38.357197,4,"West",2795,438.36798,"Corporate","2014-04-17","Standard Class","California","Phones" +"Technology","Redondo Beach","United States","KN-16450","Kean Nguyen",0.2,"2014-04-12","CA-2014-154599",90278,"TEC-PH-10001557","Pyle PMP37LED",94.0702,14,"West",2794,1075.088,"Corporate","2014-04-17","Standard Class","California","Phones" +"Office Supplies","Philadelphia","United States","JD-15895","Jonathan Doherty",0.2,"2014-04-13","CA-2014-122336",19140,"OFF-AR-10000122","Newell 314",1.116,4,"East",341,17.855999,"Corporate","2014-04-17","Second Class","Pennsylvania","Art" +"Office Supplies","Philadelphia","United States","JD-15895","Jonathan Doherty",0.7,"2014-04-13","CA-2014-122336",19140,"OFF-BI-10003656","Fellowes PB200 Plastic Comb Binding Machine",-407.976,10,"East",342,509.97,"Corporate","2014-04-17","Second Class","Pennsylvania","Binders" +"Office Supplies","Philadelphia","United States","JD-15895","Jonathan Doherty",0.2,"2014-04-13","CA-2014-122336",19140,"OFF-FA-10002780","Staples",10.072399,13,"East",343,30.992,"Corporate","2014-04-17","Second Class","Pennsylvania","Fasteners" +"Office Supplies","Baltimore","United States","BF-11170","Ben Ferrer",0,"2014-04-13","CA-2014-167724",21215,"OFF-LA-10002368","Avery 479",3.6017997,3,"East",6708,7.8300004,"Home Office","2014-04-18","Standard Class","Maryland","Labels" +"Technology","Philadelphia","United States","JD-15895","Jonathan Doherty",0.4,"2014-04-13","CA-2014-122336",19140,"TEC-PH-10000702","Square Credit Card Reader. 4 1/2"" x 4 1/2"" x 1"". White",8.391601,12,"East",344,71.928,"Corporate","2014-04-17","Second Class","Pennsylvania","Phones" +"Furniture","Los Angeles","United States","PS-18970","Paul Stevenson",0,"2014-04-15","US-2014-120740",90049,"FUR-FU-10004091","Howard Miller 13"" Diameter Goldtone Round Wall Clock",76.981606,4,"West",7658,187.76,"Home Office","2014-04-15","Same Day","California","Furnishings" +"Office Supplies","Los Angeles","United States","PS-18970","Paul Stevenson",0,"2014-04-15","US-2014-120740",90049,"OFF-AP-10000240","Belkin F9G930V10-GRY 9 Outlet Surge",31.0184,2,"West",7657,106.96,"Home Office","2014-04-15","Same Day","California","Appliances" +"Office Supplies","Wilmington","United States","SP-20650","Stephanie Phelps",0.2,"2014-04-16","CA-2014-155208",28403,"OFF-AR-10003478","Avery Hi-Liter EverBold Pen Style Fluorescent Highlighters. 4/Pack",9.768001,6,"South",404,39.072,"Corporate","2014-04-20","Standard Class","North Carolina","Art" +"Furniture","Houston","United States","KM-16720","Kunst Miller",0.3,"2014-04-18","CA-2014-100678",77095,"FUR-CH-10002602","DMI Arturo Collection Mission-style Design Wood Chair",-18.117601,3,"Central",6570,317.05798,"Consumer","2014-04-22","Standard Class","Texas","Chairs" +"Office Supplies","Los Angeles","United States","JL-15835","John Lee",0,"2014-04-18","CA-2014-110849",90049,"OFF-AR-10000657","Binney & Smith inkTank Desk Highlighter. Chisel Tip. Yellow. 12/Box",3.5475,5,"West",3535,10.75,"Consumer","2014-04-23","Standard Class","California","Art" +"Office Supplies","Houston","United States","KM-16720","Kunst Miller",0.2,"2014-04-18","CA-2014-100678",77095,"OFF-AR-10001868","Prang Dustless Chalk Sticks",1.008,2,"Central",6569,2.6879997,"Consumer","2014-04-22","Standard Class","Texas","Art" +"Office Supplies","Los Angeles","United States","JL-15835","John Lee",0,"2014-04-18","CA-2014-110849",90049,"OFF-AR-10002375","Newell 351",3.8047998,4,"West",3534,13.120001,"Consumer","2014-04-23","Standard Class","California","Art" +"Office Supplies","Houston","United States","KM-16720","Kunst Miller",0.2,"2014-04-18","CA-2014-100678",77095,"OFF-EN-10000056","Cameo Buff Policy Envelopes",50.406303,3,"Central",6571,149.352,"Consumer","2014-04-22","Standard Class","Texas","Envelopes" +"Office Supplies","Los Angeles","United States","JL-15835","John Lee",0,"2014-04-18","CA-2014-110849",90049,"OFF-FA-10000134","Advantus Push Pins. Aluminum Head",3.6022,2,"West",3536,11.620001,"Consumer","2014-04-23","Standard Class","California","Fasteners" +"Technology","Houston","United States","KM-16720","Kunst Miller",0.2,"2014-04-18","CA-2014-100678",77095,"TEC-AC-10000474","Kensington Expert Mouse Optical USB Trackball for PC or Mac",28.497,3,"Central",6572,227.976,"Consumer","2014-04-22","Standard Class","Texas","Accessories" +"Technology","Los Angeles","United States","JL-15835","John Lee",0.2,"2014-04-18","CA-2014-110849",90049,"TEC-MA-10002859","Ativa MDM8000 8-Sheet Micro-Cut Shredder",97.1892,4,"West",3533,287.968,"Consumer","2014-04-23","Standard Class","California","Machines" +"Furniture","San Jose","United States","NF-18595","Nicole Fjeld",0,"2014-04-19","CA-2014-169460",95123,"FUR-FU-10004017","Executive Impressions 13"" Chairman Wall Clock",26.649002,3,"West",4570,76.14,"Home Office","2014-04-21","Second Class","California","Furnishings" +"Office Supplies","Arlington","United States","AG-10390","Allen Goldenen",0,"2014-04-19","CA-2014-158470",22204,"OFF-BI-10003638","GBC Durable Plastic Covers",26.703001,3,"South",7843,58.05,"Consumer","2014-04-23","Second Class","Virginia","Binders" +"Office Supplies","Arlington","United States","AG-10390","Allen Goldenen",0,"2014-04-19","CA-2014-158470",22204,"OFF-PA-10001569","Xerox 232",34.2144,11,"South",7844,71.28,"Consumer","2014-04-23","Second Class","Virginia","Paper" +"Furniture","Los Angeles","United States","BP-11230","Benjamin Patterson",0,"2014-04-20","CA-2014-101462",90045,"FUR-FU-10000409","GE 4 Foot Flourescent Tube. 40 Watt",27.5632,4,"West",6850,59.920002,"Consumer","2014-04-25","Standard Class","California","Furnishings" +"Furniture","Houston","United States","SV-20365","Seth Vernon",0.3,"2014-04-20","CA-2014-134572",77070,"FUR-TA-10001705","Bush Advantage Collection Round Conference Table",-95.67,5,"Central",5076,744.1,"Consumer","2014-04-22","Second Class","Texas","Tables" +"Furniture","Houston","United States","SV-20365","Seth Vernon",0.3,"2014-04-20","CA-2014-134572",77070,"FUR-TA-10004442","Riverside Furniture Stanwyck Manor Table Series",-131.951,2,"Central",5078,401.59,"Consumer","2014-04-22","Second Class","Texas","Tables" +"Office Supplies","Houston","United States","SV-20365","Seth Vernon",0.2,"2014-04-20","CA-2014-134572",77070,"OFF-ST-10004634","Personal Folder Holder. Ebony",5.605,5,"Central",5077,44.84,"Consumer","2014-04-22","Second Class","Texas","Storage" +"Office Supplies","Los Angeles","United States","AA-10375","Allen Armold",0.2,"2014-04-21","CA-2014-158064",90008,"OFF-BI-10002976","ACCOHIDE Binder by Acco",5.5755005,5,"West",1173,16.52,"Consumer","2014-04-25","Standard Class","California","Binders" +"Office Supplies","Henderson","United States","DB-13555","Dorothy Badders",0,"2014-04-21","CA-2014-136336",42420,"OFF-ST-10002574","SAFCO Commercial Wire Shelving. Black",0,6,"South",9075,828.83997,"Corporate","2014-04-26","Standard Class","Kentucky","Storage" +"Office Supplies","Troy","United States","AH-10030","Aaron Hawkins",0.2,"2014-04-22","CA-2014-122070",12180,"OFF-BI-10004970","ACCOHIDE 3-Ring Binder. Blue. 1""",3.3453,3,"East",7165,9.912,"Corporate","2014-04-24","Second Class","New York","Binders" +"Office Supplies","Troy","United States","AH-10030","Aaron Hawkins",0,"2014-04-22","CA-2014-122070",12180,"OFF-EN-10004773","Staple envelope",121.441605,8,"East",7164,247.84,"Corporate","2014-04-24","Second Class","New York","Envelopes" +"Furniture","Columbus","United States","LB-16795","Laurel Beltran",0.3,"2014-04-23","US-2014-150119",43229,"FUR-CH-10002965","Global Leather Highback Executive Chair with Pneumatic Height Adjustment. Black",-12.058801,2,"East",3406,281.372,"Home Office","2014-04-27","Standard Class","Ohio","Chairs" +"Furniture","Columbus","United States","LB-16795","Laurel Beltran",0.3,"2014-04-23","US-2014-150119",43229,"FUR-CH-10002965","Global Leather Highback Executive Chair with Pneumatic Height Adjustment. Black",-12.058801,2,"East",3407,281.372,"Home Office","2014-04-27","Standard Class","Ohio","Chairs" +"Furniture","Columbus","United States","LB-16795","Laurel Beltran",0.2,"2014-04-23","US-2014-150119",43229,"FUR-FU-10002191","G.E. Halogen Desk Lamp Bulbs",7.8176003,4,"East",3409,22.336,"Home Office","2014-04-27","Standard Class","Ohio","Furnishings" +"Office Supplies","Columbus","United States","LB-16795","Laurel Beltran",0.7,"2014-04-23","US-2014-150119",43229,"OFF-BI-10000145","Zipper Ring Binder Pockets",-5.2415996,8,"East",3408,7.488,"Home Office","2014-04-27","Standard Class","Ohio","Binders" +"Office Supplies","Philadelphia","United States","TS-21160","Theresa Swint",0.7,"2014-04-23","US-2014-112564",19134,"OFF-BI-10004876","Wilson Jones Suede Grain Vinyl Binders",-1.7514,3,"East",1449,2.502,"Corporate","2014-04-24","First Class","Pennsylvania","Binders" +"Office Supplies","San Francisco","United States","NM-18445","Nathan Mautz",0,"2014-04-23","CA-2014-111857",94109,"OFF-PA-10001878","Xerox 1891",22.9877,1,"West",9290,48.91,"Home Office","2014-04-26","Second Class","California","Paper" +"Furniture","San Gabriel","United States","BT-11305","Beth Thompson",0.2,"2014-04-25","CA-2014-166954",91776,"FUR-CH-10003973","GuestStacker Chair with Chrome Finish Legs",148.704,5,"West",4378,1487.04,"Home Office","2014-04-30","Standard Class","California","Chairs" +"Furniture","San Gabriel","United States","BT-11305","Beth Thompson",0,"2014-04-25","CA-2014-166954",91776,"FUR-FU-10003708","Tenex Traditional Chairmats for Medium Pile Carpet. Standard Lip. 36"" x 48""",63.6825,5,"West",4376,303.25,"Home Office","2014-04-30","Standard Class","California","Furnishings" +"Office Supplies","San Gabriel","United States","BT-11305","Beth Thompson",0,"2014-04-25","CA-2014-166954",91776,"OFF-AP-10001391","Kensington 6 Outlet MasterPiece HOMEOFFICE Power Control Center",78.508804,3,"West",4377,270.72,"Home Office","2014-04-30","Standard Class","California","Appliances" +"Office Supplies","Houston","United States","DA-13450","Dianna Arnett",0.2,"2014-04-25","US-2014-157406",77095,"OFF-AR-10002221","12 Colored Short Pencils",0.546,3,"Central",1750,6.24,"Home Office","2014-04-29","Standard Class","Texas","Art" +"Office Supplies","Houston","United States","DA-13450","Dianna Arnett",0.2,"2014-04-25","US-2014-157406",77095,"OFF-PA-10003543","Xerox 1985",3.6287997,2,"Central",1749,10.368001,"Home Office","2014-04-29","Standard Class","Texas","Paper" +"Technology","Wilmington","United States","LC-16885","Lena Creighton",0.2,"2014-04-25","CA-2014-110100",28403,"TEC-PH-10004531","AT&T CL2909",37.797,3,"South",2714,302.376,"Consumer","2014-04-29","Standard Class","North Carolina","Phones" +"Furniture","Los Angeles","United States","AG-10300","Aleksandra Gannaway",0.2,"2014-04-26","CA-2014-147298",90049,"FUR-CH-10004886","Bevis Steel Folding Chairs",23.028,3,"West",2615,230.28,"Corporate","2014-05-03","Standard Class","California","Chairs" +"Office Supplies","Los Angeles","United States","MH-17290","Marc Harrigan",0,"2014-04-26","CA-2014-116785",90036,"OFF-AR-10003504","Newell 347",6.206,5,"West",7197,21.4,"Home Office","2014-04-30","Standard Class","California","Art" +"Office Supplies","Los Angeles","United States","AG-10300","Aleksandra Gannaway",0.2,"2014-04-26","CA-2014-147298",90049,"OFF-BI-10001525","Acco Pressboard Covers with Storage Hooks. 14 7/8"" x 11"". Executive Red",5.715,6,"West",2616,18.288,"Corporate","2014-05-03","Standard Class","California","Binders" +"Office Supplies","Los Angeles","United States","MH-17290","Marc Harrigan",0,"2014-04-26","CA-2014-116785",90036,"OFF-LA-10000305","Avery 495",5.796,2,"West",7198,12.6,"Home Office","2014-04-30","Standard Class","California","Labels" +"Office Supplies","Baltimore","United States","MY-18295","Muhammed Yedwab",0,"2014-04-28","CA-2014-118976",21215,"OFF-AR-10004752","Blackstonian Pencils",1.4952,2,"East",1776,5.34,"Corporate","2014-05-03","Standard Class","Maryland","Art" +"Office Supplies","Baltimore","United States","MY-18295","Muhammed Yedwab",0,"2014-04-28","CA-2014-118976",21215,"OFF-BI-10001628","Acco Data Flex Cable Posts For Top & Bottom Load Binders. 6"" Capacity",9.387,2,"East",1774,20.859999,"Corporate","2014-05-03","Standard Class","Maryland","Binders" +"Office Supplies","Baltimore","United States","MY-18295","Muhammed Yedwab",0,"2014-04-28","CA-2014-118976",21215,"OFF-LA-10000452","Avery 488",1.512,1,"East",1777,3.1499999,"Corporate","2014-05-03","Standard Class","Maryland","Labels" +"Office Supplies","Newark","United States","GB-14530","George Bell",0.2,"2014-04-28","CA-2014-140228",43055,"OFF-LA-10001613","Avery File Folder Labels",2.5056002,3,"East",3609,6.912,"Corporate","2014-05-03","Standard Class","Ohio","Labels" +"Office Supplies","Baltimore","United States","MY-18295","Muhammed Yedwab",0,"2014-04-28","CA-2014-118976",21215,"OFF-ST-10000877","Recycled Steel Personal File for Standard File Folders",129.3786,9,"East",1775,497.61002,"Corporate","2014-05-03","Standard Class","Maryland","Storage" +"Office Supplies","Newark","United States","GB-14530","George Bell",0.2,"2014-04-28","CA-2014-140228",43055,"OFF-ST-10001128","Carina Mini System Audio Rack. Model AR050B",-37.733196,2,"East",3611,177.568,"Corporate","2014-05-03","Standard Class","Ohio","Storage" +"Office Supplies","Newark","United States","GB-14530","George Bell",0.2,"2014-04-28","CA-2014-140228",43055,"OFF-ST-10002214","X-Rack File for Hanging Folders",2.0322,3,"East",3610,27.096,"Corporate","2014-05-03","Standard Class","Ohio","Storage" +"Technology","Jamestown","United States","DS-13180","David Smith",0,"2014-04-28","CA-2014-132983",14701,"TEC-AC-10000736","Logitech G600 MMO Gaming Mouse",57.5928,2,"East",8921,159.98,"Corporate","2014-05-03","Standard Class","New York","Accessories" +"Technology","San Francisco","United States","AI-10855","Arianne Irving",0.2,"2014-04-28","CA-2014-131387",94122,"TEC-PH-10001459","Samsung Galaxy Mega 6.3",125.997,5,"West",8452,1679.9601,"Consumer","2014-04-30","First Class","California","Phones" +"Furniture","Memphis","United States","LA-16780","Laura Armstrong",0.2,"2014-04-29","CA-2014-107811",38109,"FUR-CH-10001394","Global Leather Executive Chair",70.198,2,"South",6375,561.58405,"Corporate","2014-05-03","Standard Class","Tennessee","Chairs" +"Furniture","Lake Charles","United States","HG-14845","Harry Greene",0,"2014-04-29","CA-2014-142727",70601,"FUR-CH-10002304","Global Stack Chair without Arms. Black",12.99,2,"South",2891,51.960003,"Consumer","2014-05-01","Second Class","Louisiana","Chairs" +"Furniture","Rochester","United States","LL-16840","Lauren Leatherbury",0,"2014-04-29","CA-2014-134061",14609,"FUR-FU-10001424","Dax Clear Box Frame",5.9364004,2,"East",885,17.46,"Consumer","2014-05-04","Standard Class","New York","Furnishings" +"Office Supplies","Lake Charles","United States","HG-14845","Harry Greene",0,"2014-04-29","CA-2014-142727",70601,"OFF-BI-10000136","Avery Non-Stick Heavy Duty View Round Locking Ring Binders",8.611201,3,"South",2892,17.94,"Consumer","2014-05-01","Second Class","Louisiana","Binders" +"Office Supplies","Memphis","United States","LA-16780","Laura Armstrong",0.2,"2014-04-29","CA-2014-107811",38109,"OFF-ST-10000798","2300 Heavy-Duty Transfer File Systems by Perma",-1.249,5,"South",6376,99.92,"Corporate","2014-05-03","Standard Class","Tennessee","Storage" +"Technology","Dover","United States","KH-16360","Katherine Hughes",0,"2014-04-29","CA-2014-118304",19901,"TEC-PH-10000376","Square Credit Card Reader",5.1948004,2,"East",8267,19.98,"Consumer","2014-05-04","Standard Class","Delaware","Phones" +"Office Supplies","Louisville","United States","JM-16195","Justin MacKendrick",0,"2014-04-30","CA-2014-160094",40214,"OFF-AR-10004010","Hunt Boston Vacuum Mount KS Pencil Sharpener",45.487003,5,"South",8636,174.95,"Consumer","2014-05-02","First Class","Kentucky","Art" +"Office Supplies","Louisville","United States","JM-16195","Justin MacKendrick",0,"2014-04-30","CA-2014-160094",40214,"OFF-ST-10000585","Economy Rollaway Files",214.76,5,"South",8637,826,"Consumer","2014-05-02","First Class","Kentucky","Storage" +"Technology","Jackson","United States","TM-21010","Tamara Manning",0,"2014-04-30","CA-2014-156006",39212,"TEC-AC-10002550","Maxell 4.7GB DVD-RW 3/Pack",16.248602,3,"South",5005,47.79,"Consumer","2014-05-02","Second Class","Mississippi","Accessories" +"Office Supplies","Columbus","United States","AR-10510","Andrew Roberts",0.2,"2014-05-02","CA-2014-115056",43229,"OFF-AP-10003971","Belkin 6 Outlet Metallic Surge Strip",1.9602,3,"East",5825,26.136002,"Consumer","2014-05-02","Same Day","Ohio","Appliances" +"Technology","Lakeland","United States","CS-11860","Cari Schnelling",0.2,"2014-05-02","CA-2014-124618",33801,"TEC-CO-10004202","Brother DCP1000 Digital 3 in 1 Multifunction Machine",89.997,2,"South",7037,479.98398,"Consumer","2014-05-04","Second Class","Florida","Copiers" +"Office Supplies","Yonkers","United States","NC-18415","Nathan Cano",0.2,"2014-05-03","CA-2014-135755",10701,"OFF-BI-10003981","Avery Durable Plastic 1"" Binders",3.9498,3,"East",3863,10.896,"Consumer","2014-05-10","Standard Class","New York","Binders" +"Office Supplies","Yonkers","United States","NC-18415","Nathan Cano",0.2,"2014-05-03","CA-2014-135755",10701,"OFF-BI-10004828","GBC Poly Designer Binding Covers",14.563801,3,"East",3862,40.176,"Consumer","2014-05-10","Standard Class","New York","Binders" +"Office Supplies","Appleton","United States","JS-15940","Joni Sundaresam",0,"2014-05-03","CA-2014-130274",54915,"OFF-LA-10002195","Avery 481",10.348801,7,"Central",1553,21.56,"Home Office","2014-05-05","First Class","Wisconsin","Labels" +"Furniture","Middletown","United States","AA-10480","Andrew Allen",0,"2014-05-04","CA-2014-155271",6457,"FUR-FU-10001473","DAX Wood Document Frame",9.8856,2,"East",1460,27.46,"Consumer","2014-05-04","Same Day","Connecticut","Furnishings" +"Furniture","Seattle","United States","BS-11365","Bill Shonely",0,"2014-05-04","US-2014-148194",98105,"FUR-FU-10001852","Eldon Regeneration Recycled Desk Accessories. Smoke",3.8976,7,"West",7999,12.18,"Corporate","2014-05-07","First Class","Washington","Furnishings" +"Office Supplies","Seattle","United States","BS-11365","Bill Shonely",0,"2014-05-04","US-2014-148194",98105,"OFF-AP-10000696","Holmes Odor Grabber",19.0344,4,"West",8000,57.68,"Corporate","2014-05-07","First Class","Washington","Appliances" +"Office Supplies","Houston","United States","NC-18340","Nat Carroll",0.2,"2014-05-04","CA-2014-111899",77036,"OFF-AR-10001725","Boston Home & Office Model 2000 Electric Pencil Sharpeners",2.8379998,2,"Central",7296,37.84,"Consumer","2014-05-05","First Class","Texas","Art" +"Office Supplies","Jackson","United States","PM-18940","Paul MacIntyre",0,"2014-05-04","CA-2014-117709",49201,"OFF-BI-10001294","Fellowes Binding Cases",21.06,4,"Central",1166,46.8,"Consumer","2014-05-08","Standard Class","Michigan","Binders" +"Office Supplies","Houston","United States","NC-18340","Nat Carroll",0.2,"2014-05-04","CA-2014-111899",77036,"OFF-FA-10000840","OIC Thumb-Tacks",1.8468,6,"Central",7297,5.472,"Consumer","2014-05-05","First Class","Texas","Fasteners" +"Furniture","San Antonio","United States","EB-13870","Emily Burns",0.3,"2014-05-05","CA-2014-110219",78207,"FUR-CH-10001146","Global Value Mid-Back Manager's Chair. Gray",-9.1335,3,"Central",4560,127.869,"Consumer","2014-05-08","First Class","Texas","Chairs" +"Office Supplies","Richmond","United States","PF-19120","Peter Fuller",0,"2014-05-05","CA-2014-166457",40475,"OFF-AR-10003651","Newell 350",1.9023999,2,"South",4213,6.56,"Consumer","2014-05-09","Second Class","Kentucky","Art" +"Office Supplies","Arlington","United States","GD-14590","Giulietta Dortch",0,"2014-05-05","CA-2014-111934",22204,"OFF-BI-10004364","Storex Dura Pro Binders",5.346,2,"South",8596,11.88,"Corporate","2014-05-07","First Class","Virginia","Binders" +"Office Supplies","Arlington","United States","GD-14590","Giulietta Dortch",0,"2014-05-05","CA-2014-111934",22204,"OFF-PA-10000474","Easy-staple paper",16.6568,1,"South",8597,35.440002,"Corporate","2014-05-07","First Class","Virginia","Paper" +"Office Supplies","Richmond","United States","PF-19120","Peter Fuller",0,"2014-05-05","CA-2014-166457",40475,"OFF-PA-10001363","Xerox 1933",11.5432005,2,"South",4214,24.56,"Consumer","2014-05-09","Second Class","Kentucky","Paper" +"Office Supplies","Richmond","United States","PF-19120","Peter Fuller",0,"2014-05-05","CA-2014-166457",40475,"OFF-PA-10003016","Adams ""While You Were Out"" Message Pads",4.239,3,"South",4212,9.42,"Consumer","2014-05-09","Second Class","Kentucky","Paper" +"Office Supplies","Freeport","United States","KH-16330","Katharine Harms",0.2,"2014-05-05","CA-2014-160738",61032,"OFF-ST-10003442","Eldon Portable Mobile Manager",3.9592001,2,"Central",4656,45.248,"Corporate","2014-05-10","Standard Class","Illinois","Storage" +"Office Supplies","Los Angeles","United States","SN-20710","Steve Nguyen",0,"2014-05-06","US-2014-154879",90004,"OFF-AR-10001897","Model L Table or Wall-Mount Pencil Sharpener",30.223202,6,"West",4411,107.94,"Home Office","2014-05-11","Standard Class","California","Art" +"Office Supplies","Los Angeles","United States","HP-14815","Harold Pawlan",0.2,"2014-05-06","CA-2014-121664",90049,"OFF-BI-10003684","Wilson Jones Legal Size Ring Binders",52.776,8,"West",1335,140.736,"Home Office","2014-05-10","Standard Class","California","Binders" +"Office Supplies","Los Angeles","United States","SN-20710","Steve Nguyen",0,"2014-05-06","US-2014-154879",90004,"OFF-LA-10004425","Staple-on labels",2.7166002,2,"West",4410,5.7799997,"Home Office","2014-05-11","Standard Class","California","Labels" +"Furniture","Lawrence","United States","CT-11995","Carol Triggs",0,"2014-05-07","CA-2014-166086",1841,"FUR-CH-10004675","Lifetime Advantage Folding Chairs. 4/Carton",244.24959,4,"East",6566,872.32,"Consumer","2014-05-12","Standard Class","Massachusetts","Chairs" +"Furniture","Lawrence","United States","CT-11995","Carol Triggs",0.3,"2014-05-07","CA-2014-166086",1841,"FUR-TA-10003469","Balt Split Level Computer Training Table",-38.85,2,"East",6564,194.25,"Consumer","2014-05-12","Standard Class","Massachusetts","Tables" +"Office Supplies","Lawrence","United States","CT-11995","Carol Triggs",0,"2014-05-07","CA-2014-166086",1841,"OFF-AR-10002578","Newell 335",2.5056002,3,"East",6565,8.64,"Consumer","2014-05-12","Standard Class","Massachusetts","Art" +"Office Supplies","Lawrence","United States","CT-11995","Carol Triggs",0,"2014-05-07","CA-2014-166086",1841,"OFF-BI-10001078","Acco PRESSTEX Data Binder with Storage Hooks. Dark Blue. 14 7/8"" X 11""",7.9086,3,"East",6563,16.140001,"Consumer","2014-05-12","Standard Class","Massachusetts","Binders" +"Technology","Philadelphia","United States","KT-16480","Kean Thornton",0.4,"2014-05-08","CA-2014-116666",19134,"TEC-CO-10001449","Hewlett Packard LaserJet 3310 Copier",239.99599,5,"East",8800,1799.97,"Consumer","2014-05-10","First Class","Pennsylvania","Copiers" +"Office Supplies","Henderson","United States","SB-20290","Sean Braxton",0,"2014-05-09","CA-2014-158771",42420,"OFF-LA-10001641","Avery 518",4.5360003,3,"South",3539,9.45,"Corporate","2014-05-09","Same Day","Kentucky","Labels" +"Office Supplies","Henderson","United States","SB-20290","Sean Braxton",0,"2014-05-09","CA-2014-158771",42420,"OFF-LA-10003121","Avery 506",9.499,5,"South",3540,20.65,"Corporate","2014-05-09","Same Day","Kentucky","Labels" +"Office Supplies","Henderson","United States","SB-20290","Sean Braxton",0,"2014-05-09","CA-2014-158771",42420,"OFF-PA-10002689","Weyerhaeuser First Choice Laser/Copy Paper (20Lb. and 88 Bright)",21.7728,7,"South",3541,45.36,"Corporate","2014-05-09","Same Day","Kentucky","Paper" +"Office Supplies","Henderson","United States","SB-20290","Sean Braxton",0,"2014-05-09","CA-2014-158771",42420,"OFF-ST-10000107","Fellowes Super Stor/Drawer",14.985,3,"South",3538,83.25,"Corporate","2014-05-09","Same Day","Kentucky","Storage" +"Technology","San Francisco","United States","PG-18820","Patrick Gardner",0,"2014-05-09","US-2014-102071",94110,"TEC-AC-10000109","Sony Micro Vault Click 16 GB USB 2.0 Flash Drive",40.312798,3,"West",826,167.97,"Consumer","2014-05-15","Standard Class","California","Accessories" +"Technology","San Francisco","United States","PG-18820","Patrick Gardner",0,"2014-05-09","US-2014-102071",94110,"TEC-AC-10003441","Kingston Digital DataTraveler 32GB USB 2.0",4.068,4,"West",825,67.8,"Consumer","2014-05-15","Standard Class","California","Accessories" +"Furniture","Philadelphia","United States","AF-10885","Art Foster",0.5,"2014-05-10","CA-2014-114181",19134,"FUR-BO-10004467","Bestar Classic Bookcase",-216.9783,7,"East",7555,349.965,"Consumer","2014-05-14","Second Class","Pennsylvania","Bookcases" +"Furniture","San Jose","United States","GM-14680","Greg Matthias",0.2,"2014-05-10","CA-2014-103310",95123,"FUR-CH-10002320","Hon Pagoda Stacking Chairs",32.098,1,"West",9200,256.784,"Consumer","2014-05-15","Standard Class","California","Chairs" +"Office Supplies","Philadelphia","United States","AF-10885","Art Foster",0.2,"2014-05-10","CA-2014-114181",19134,"OFF-AR-10000716","DIXON Ticonderoga Erasable Checking Pencils",5.301,5,"East",7556,22.32,"Consumer","2014-05-14","Second Class","Pennsylvania","Art" +"Office Supplies","Kent","United States","AI-10855","Arianne Irving",0,"2014-05-10","CA-2014-159709",98031,"OFF-EN-10001434","Strathmore #10 Envelopes. Ultimate White",77.4837,3,"West",5197,158.13,"Consumer","2014-05-15","Standard Class","Washington","Envelopes" +"Office Supplies","San Jose","United States","GM-14680","Greg Matthias",0,"2014-05-10","CA-2014-103310",95123,"OFF-PA-10004353","Southworth 25% Cotton Premium Laser Paper and Envelopes",19.1808,2,"West",9197,39.960003,"Consumer","2014-05-15","Standard Class","California","Paper" +"Office Supplies","San Jose","United States","GM-14680","Greg Matthias",0,"2014-05-10","CA-2014-103310",95123,"OFF-SU-10004737","Acme Design Stainless Steel Bent Scissors",11.0808,6,"West",9199,41.04,"Consumer","2014-05-15","Standard Class","California","Supplies" +"Technology","San Jose","United States","GM-14680","Greg Matthias",0.2,"2014-05-10","CA-2014-103310",95123,"TEC-PH-10001817","Wilson Electronics DB Pro Signal Booster",125.3,5,"West",9198,1432,"Consumer","2014-05-15","Standard Class","California","Phones" +"Technology","Kent","United States","AI-10855","Arianne Irving",0.2,"2014-05-10","CA-2014-159709",98031,"TEC-PH-10003988","LF Elite 3D Dazzle Designer Hard Case Cover. Lf Stylus Pen and Wiper For Apple Iphone 5c Mini Lite",4.36,5,"West",5198,43.6,"Consumer","2014-05-15","Standard Class","Washington","Phones" +"Furniture","Huntsville","United States","VW-21775","Victoria Wilson",0.3,"2014-05-11","CA-2014-152100",77340,"FUR-CH-10000015","Hon Multipurpose Stacking Arm Chairs",-69.312004,8,"Central",9429,1212.9601,"Corporate","2014-05-16","Standard Class","Texas","Chairs" +"Furniture","Fort Worth","United States","PG-18895","Paul Gonzalez",0.6,"2014-05-11","CA-2014-103849",76106,"FUR-FU-10000723","Deflect-o EconoMat Studded. No Bevel Mat for Low Pile Carpeting",-84.29279,4,"Central",546,66.112,"Consumer","2014-05-16","Standard Class","Texas","Furnishings" +"Office Supplies","Watertown","United States","BG-11695","Brooke Gillingham",0,"2014-05-11","CA-2014-127523",13601,"OFF-AP-10004249","Staple holder",9.695701,3,"East",4125,35.91,"Corporate","2014-05-18","Standard Class","New York","Appliances" +"Office Supplies","Chicago","United States","HD-14785","Harold Dahlen",0.8,"2014-05-11","CA-2014-119172",60610,"OFF-BI-10002026","Avery Arch Ring Binders",-172.557,9,"Central",9591,104.58,"Home Office","2014-05-15","Standard Class","Illinois","Binders" +"Office Supplies","Chicago","United States","HD-14785","Harold Dahlen",0.2,"2014-05-11","CA-2014-119172",60610,"OFF-PA-10003036","Black Print Carbonless 8 1/2"" x 8 1/4"" Rapid Memo Book",5.6784,3,"Central",9590,17.472,"Home Office","2014-05-15","Standard Class","Illinois","Paper" +"Technology","Houston","United States","RM-19375","Raymond Messe",0.2,"2014-05-11","CA-2014-139017",77095,"TEC-AC-10001013","Logitech ClearChat Comfort/USB Headset H390",7.6154003,2,"Central",1759,46.864002,"Consumer","2014-05-17","Standard Class","Texas","Accessories" +"Technology","Fort Worth","United States","PG-18895","Paul Gonzalez",0.2,"2014-05-11","CA-2014-103849",76106,"TEC-AC-10001465","SanDisk Cruzer 64 GB USB Flash Drive",7.264,2,"Central",544,58.111996,"Consumer","2014-05-16","Standard Class","Texas","Accessories" +"Technology","Fort Worth","United States","PG-18895","Paul Gonzalez",0.2,"2014-05-11","CA-2014-103849",76106,"TEC-PH-10002597","Xblue XB-1670-86 X16 Small Office Telephone - Titanium",6.2995,1,"Central",545,100.79199,"Consumer","2014-05-16","Standard Class","Texas","Phones" +"Furniture","Milwaukee","United States","HL-15040","Hunter Lopez",0,"2014-05-12","CA-2014-127159",53209,"FUR-FU-10000010","DAX Value U-Channel Document Frames. Easel Back",10.7849,7,"Central",1658,34.79,"Consumer","2014-05-15","First Class","Wisconsin","Furnishings" +"Furniture","Franklin","United States","KT-16480","Kean Thornton",0.3,"2014-05-12","CA-2014-155887",2038,"FUR-TA-10002228","Bevis Traditional Conference Table Top. Plinth Base",-130.01039,3,"East",2747,700.05597,"Consumer","2014-05-17","Standard Class","Massachusetts","Tables" +"Furniture","Los Angeles","United States","AH-10030","Aaron Hawkins",0.2,"2014-05-13","CA-2014-113768",90004,"FUR-CH-10002439","Iceberg Nesting Folding Chair. 19w x 6d x 43h",20.9592,6,"West",5355,279.456,"Corporate","2014-05-19","Standard Class","California","Chairs" +"Office Supplies","Hialeah","United States","EH-14005","Erica Hernandez",0.7,"2014-05-13","CA-2014-140165",33012,"OFF-BI-10004519","GBC DocuBind P100 Manual Binding Machine",-331.96,8,"South",1888,398.352,"Home Office","2014-05-15","First Class","Florida","Binders" +"Office Supplies","Hialeah","United States","EH-14005","Erica Hernandez",0.2,"2014-05-13","CA-2014-140165",33012,"OFF-FA-10002815","Staples",2.3976,2,"South",1887,7.104,"Home Office","2014-05-15","First Class","Florida","Fasteners" +"Office Supplies","Los Angeles","United States","AH-10030","Aaron Hawkins",0,"2014-05-13","CA-2014-113768",90004,"OFF-PA-10003063","EcoTones Memo Sheets",3.84,2,"West",5356,8,"Corporate","2014-05-19","Standard Class","California","Paper" +"Office Supplies","West Jordan","United States","AG-10270","Alejandro Grove",0,"2014-05-13","CA-2014-167164",84084,"OFF-ST-10000107","Fellowes Super Stor/Drawer",9.99,2,"West",18,55.5,"Consumer","2014-05-15","Second Class","Utah","Storage" +"Technology","Oakland","United States","NP-18325","Naresj Patel",0,"2014-05-13","CA-2014-114433",94601,"TEC-AC-10002800","Plantronics Audio 478 Stereo USB Headset",52.489502,3,"West",3217,149.97,"Consumer","2014-05-17","Standard Class","California","Accessories" +"Furniture","Saint Petersburg","United States","AJ-10795","Anthony Johnson",0.2,"2014-05-14","CA-2014-106810",33710,"FUR-FU-10004306","Electrix Halogen Magnifier Lamp",23.316,2,"South",769,310.87997,"Corporate","2014-05-20","Standard Class","Florida","Furnishings" +"Furniture","Los Angeles","United States","PW-19030","Pauline Webber",0.2,"2014-05-16","CA-2014-125150",90036,"FUR-CH-10002439","Iceberg Nesting Folding Chair. 19w x 6d x 43h",17.466,5,"West",7825,232.88,"Corporate","2014-05-23","Standard Class","California","Chairs" +"Technology","San Francisco","United States","CC-12475","Cindy Chapman",0,"2014-05-16","US-2014-163146",94122,"TEC-AC-10002217","Imation Clip USB flash drive - 8 GB",3.3839998,3,"West",8536,56.4,"Consumer","2014-05-20","Standard Class","California","Accessories" +"Office Supplies","Eau Claire","United States","PA-19060","Pete Armstrong",0,"2014-05-17","CA-2014-148761",54703,"OFF-BI-10000666","Surelock Post Binders",45.84,3,"Central",3006,91.68,"Home Office","2014-05-21","Standard Class","Wisconsin","Binders" +"Furniture","Akron","United States","TT-21070","Ted Trevino",0.2,"2014-05-18","CA-2014-164224",44312,"FUR-FU-10000308","Deflect-o Glass Clear Studded Chair Mats",3.7308,3,"East",3201,149.232,"Consumer","2014-05-20","Second Class","Ohio","Furnishings" +"Office Supplies","Akron","United States","TT-21070","Ted Trevino",0.2,"2014-05-18","CA-2014-164224",44312,"OFF-PA-10001526","Xerox 1949",5.7768,4,"East",3202,15.935999,"Consumer","2014-05-20","Second Class","Ohio","Paper" +"Office Supplies","Dallas","United States","GA-14515","George Ashbrook",0.2,"2014-05-18","CA-2014-133963",75220,"OFF-PA-10001526","Xerox 1949",1.4441999,1,"Central",9833,3.984,"Consumer","2014-05-22","Second Class","Texas","Paper" +"Technology","Cleveland","United States","BS-11755","Bruce Stewart",0.4,"2014-05-18","CA-2014-103373",44105,"TEC-PH-10002885","Apple iPhone 5",-168.9558,2,"East",1830,779.796,"Consumer","2014-05-24","Standard Class","Ohio","Phones" +"Office Supplies","Midland","United States","JH-15820","John Huston",0,"2014-05-19","US-2014-107699",48640,"OFF-BI-10001249","Avery Heavy-Duty EZD View Binder with Locking Rings",26.4132,9,"Central",4143,57.420002,"Consumer","2014-05-23","Standard Class","Michigan","Binders" +"Office Supplies","Virginia Beach","United States","TD-20995","Tamara Dahlen",0,"2014-05-19","CA-2014-130155",23464,"OFF-SU-10004737","Acme Design Stainless Steel Bent Scissors",9.233999,5,"South",9603,34.2,"Consumer","2014-05-22","First Class","Virginia","Supplies" +"Furniture","Dover","United States","RF-19345","Randy Ferguson",0,"2014-05-20","US-2014-114188",3820,"FUR-FU-10000076","24-Hour Round Wall Clock",60.139797,7,"East",2132,139.86,"Corporate","2014-05-22","Second Class","New Hampshire","Furnishings" +"Furniture","San Marcos","United States","MC-17590","Matt Collister",0.6,"2014-05-20","CA-2014-130673",78666,"FUR-FU-10003489","Contemporary Borderless Frame",-5.9409,3,"Central",8581,10.332,"Corporate","2014-05-22","Second Class","Texas","Furnishings" +"Office Supplies","Dover","United States","RF-19345","Randy Ferguson",0,"2014-05-20","US-2014-114188",3820,"OFF-AP-10000124","Acco 6 Outlet Guardian Basic Surge Suppressor",9.3184,4,"East",2130,33.280003,"Corporate","2014-05-22","Second Class","New Hampshire","Appliances" +"Office Supplies","Dover","United States","RF-19345","Randy Ferguson",0,"2014-05-20","US-2014-114188",3820,"OFF-AR-10004511","Sanford Colorific Scented Colored Pencils. 12/Pack",11.9412,9,"East",2131,38.52,"Corporate","2014-05-22","Second Class","New Hampshire","Art" +"Office Supplies","San Marcos","United States","MC-17590","Matt Collister",0.2,"2014-05-20","CA-2014-130673",78666,"OFF-PA-10000289","Xerox 213",3.6287997,2,"Central",8582,10.368001,"Corporate","2014-05-22","Second Class","Texas","Paper" +"Office Supplies","San Marcos","United States","MC-17590","Matt Collister",0.2,"2014-05-20","CA-2014-130673",78666,"OFF-ST-10000636","Rogers Profile Extra Capacity Storage Tub",-13.392,5,"Central",8584,66.96,"Corporate","2014-05-22","Second Class","Texas","Storage" +"Technology","San Marcos","United States","MC-17590","Matt Collister",0.2,"2014-05-20","CA-2014-130673",78666,"TEC-AC-10004227","SanDisk Ultra 16 GB MicroSDHC Class 10 Memory Card",-3.6371999,2,"Central",8583,20.784,"Corporate","2014-05-22","Second Class","Texas","Accessories" +"Furniture","Houston","United States","KH-16360","Katherine Hughes",0.3,"2014-05-21","CA-2014-127166",77070,"FUR-CH-10003396","Global Deluxe Steno Chair",-29.252401,2,"Central",9793,107.771996,"Consumer","2014-05-23","Second Class","Texas","Chairs" +"Office Supplies","Lawrence","United States","FG-14260","Frank Gastineau",0,"2014-05-21","CA-2014-152443",1841,"OFF-AP-10001293","Belkin 8 Outlet Surge Protector",68.8464,6,"East",5879,245.88,"Home Office","2014-05-26","Standard Class","Massachusetts","Appliances" +"Office Supplies","San Diego","United States","BT-11530","Bradley Talbott",0,"2014-05-21","CA-2014-104780",92037,"OFF-AR-10003514","4009 Highlighters by Sanford",10.5072,8,"West",3079,31.84,"Home Office","2014-05-25","Standard Class","California","Art" +"Office Supplies","Houston","United States","KH-16360","Katherine Hughes",0.8,"2014-05-21","CA-2014-127166",77070,"OFF-BI-10000977","Ibico Plastic Spiral Binding Combs",-31.008,3,"Central",9795,18.240002,"Consumer","2014-05-23","Second Class","Texas","Binders" +"Office Supplies","Lawrence","United States","FG-14260","Frank Gastineau",0,"2014-05-21","CA-2014-152443",1841,"OFF-BI-10001071","GBC ProClick Punch Binding System",219.4514,7,"East",5877,447.86,"Home Office","2014-05-26","Standard Class","Massachusetts","Binders" +"Office Supplies","Virginia Beach","United States","MW-18235","Mitch Willingham",0,"2014-05-21","CA-2014-117639",23464,"OFF-BI-10003925","Fellowes PB300 Plastic Comb Binding Machine",1276.487,7,"South",995,2715.93,"Corporate","2014-05-25","Standard Class","Virginia","Binders" +"Office Supplies","Houston","United States","KH-16360","Katherine Hughes",0.2,"2014-05-21","CA-2014-127166",77070,"OFF-EN-10003134","Staple envelope",21.024,6,"Central",9792,56.064003,"Consumer","2014-05-23","Second Class","Texas","Envelopes" +"Office Supplies","Houston","United States","KH-16360","Katherine Hughes",0.2,"2014-05-21","CA-2014-127166",77070,"OFF-PA-10001560","Adams Telephone Message Books. 5 1/4” x 11”",1.6308,1,"Central",9794,4.8320003,"Consumer","2014-05-23","Second Class","Texas","Paper" +"Office Supplies","Lawrence","United States","FG-14260","Frank Gastineau",0,"2014-05-21","CA-2014-152443",1841,"OFF-PA-10003022","Xerox 1992",8.7906,3,"East",5878,17.94,"Home Office","2014-05-26","Standard Class","Massachusetts","Paper" +"Technology","Virginia Beach","United States","MW-18235","Mitch Willingham",0,"2014-05-21","CA-2014-117639",23464,"TEC-PH-10001530","Plantronics Voyager Pro Legend",173.0316,3,"South",996,617.97003,"Corporate","2014-05-25","Standard Class","Virginia","Phones" +"Technology","Decatur","United States","EH-14185","Evan Henry",0,"2014-05-22","CA-2014-143840",35601,"TEC-PH-10002660","Nortel Networks T7316 E Nt8 B27",33.995003,2,"South",3268,135.98,"Consumer","2014-05-29","Standard Class","Alabama","Phones" +"Technology","Decatur","United States","EH-14185","Evan Henry",0,"2014-05-22","CA-2014-143840",35601,"TEC-PH-10003171","Plantronics Encore H101 Dual Earpieces Headset",12.5859995,1,"South",3269,44.95,"Consumer","2014-05-29","Standard Class","Alabama","Phones" +"Office Supplies","New York City","United States","GT-14710","Greg Tran",0,"2014-05-23","US-2014-135881",10035,"OFF-AP-10002118","1.7 Cubic Foot Compact ""Cube"" Office Refrigerators",56.203197,1,"East",7013,208.16,"Consumer","2014-05-27","Standard Class","New York","Appliances" +"Office Supplies","Philadelphia","United States","AR-10510","Andrew Roberts",0.2,"2014-05-23","US-2014-105767",19134,"OFF-AR-10001246","Newell 317",2.3813999,9,"East",609,21.168,"Consumer","2014-05-27","Standard Class","Pennsylvania","Art" +"Office Supplies","New York City","United States","GT-14710","Greg Tran",0.2,"2014-05-23","US-2014-135881",10035,"OFF-BI-10000829","Avery Non-Stick Binders",5.8370004,5,"East",7011,17.96,"Consumer","2014-05-27","Standard Class","New York","Binders" +"Office Supplies","Philadelphia","United States","AR-10510","Andrew Roberts",0.7,"2014-05-23","US-2014-105767",19134,"OFF-BI-10000848","Angle-D Ring Binders",-2.6256,2,"East",608,3.282,"Consumer","2014-05-27","Standard Class","Pennsylvania","Binders" +"Office Supplies","Bellevue","United States","EH-13990","Erica Hackney",0,"2014-05-23","CA-2014-138513",98006,"OFF-PA-10003177","Xerox 1999",6.2208,2,"West",6628,12.96,"Consumer","2014-05-27","Standard Class","Washington","Paper" +"Office Supplies","New York City","United States","GT-14710","Greg Tran",0,"2014-05-23","US-2014-135881",10035,"OFF-SU-10003002","Letter Slitter",0.15120001,2,"East",7012,5.04,"Consumer","2014-05-27","Standard Class","New York","Supplies" +"Technology","Philadelphia","United States","AR-10510","Andrew Roberts",0.4,"2014-05-23","US-2014-105767",19134,"TEC-PH-10003092","Motorola L804",-10.117801,2,"East",610,55.188,"Consumer","2014-05-27","Standard Class","Pennsylvania","Phones" +"Office Supplies","Baltimore","United States","RP-19390","Resi Pölking",0,"2014-05-24","CA-2014-143413",21215,"OFF-PA-10002319","Xerox 1944",56.9772,3,"East",2704,116.28,"Consumer","2014-05-30","Standard Class","Maryland","Paper" +"Furniture","Chicago","United States","JL-15235","Janet Lee",0.6,"2014-05-25","US-2014-130379",60623,"FUR-FU-10002553","Electrix Incandescent Magnifying Lamp. Black",-24.189001,2,"Central",1820,29.32,"Consumer","2014-05-29","Standard Class","Illinois","Furnishings" +"Office Supplies","Chicago","United States","JL-15235","Janet Lee",0.8,"2014-05-25","US-2014-130379",60623,"OFF-AP-10001394","Harmony Air Purifier",-166.32,2,"Central",1819,75.6,"Consumer","2014-05-29","Standard Class","Illinois","Appliances" +"Office Supplies","New York City","United States","BW-11065","Barry Weirich",0,"2014-05-25","CA-2014-100391",10035,"OFF-PA-10001471","Strathmore Photo Frame Cards",6.7251997,2,"East",9441,14.620001,"Consumer","2014-05-29","Standard Class","New York","Paper" +"Furniture","Los Angeles","United States","ML-17395","Marina Lichtenstein",0.15,"2014-05-26","CA-2014-156349",90008,"FUR-BO-10000362","Sauder Inglewood Library Bookcases",27.356798,2,"West",1611,290.66602,"Corporate","2014-05-30","Standard Class","California","Bookcases" +"Furniture","Los Angeles","United States","HF-14995","Herbert Flentye",0.2,"2014-05-26","CA-2014-158029",90008,"FUR-CH-10000988","Hon Olson Stacker Stools",22.529602,2,"West",2752,225.29599,"Consumer","2014-05-30","Standard Class","California","Chairs" +"Furniture","Chicago","United States","MM-18055","Michelle Moray",0.3,"2014-05-26","CA-2014-144029",60623,"FUR-CH-10003981","Global Commerce Series Low-Back Swivel/Tilt Chairs",-5.1396,2,"Central",6741,359.77203,"Consumer","2014-05-31","Standard Class","Illinois","Chairs" +"Office Supplies","Chicago","United States","MM-18055","Michelle Moray",0.2,"2014-05-26","CA-2014-144029",60623,"OFF-AR-10000716","DIXON Ticonderoga Erasable Checking Pencils",3.1806,3,"Central",6742,13.392,"Consumer","2014-05-31","Standard Class","Illinois","Art" +"Office Supplies","Murray","United States","Dp-13240","Dean percer",0,"2014-05-26","CA-2014-129574",84107,"OFF-PA-10002893","Wirebound Service Call Books. 5 1/2"" x 4""",23.232002,5,"West",2761,48.4,"Home Office","2014-05-29","First Class","Utah","Paper" +"Office Supplies","Chicago","United States","MM-18055","Michelle Moray",0.2,"2014-05-26","CA-2014-144029",60623,"OFF-ST-10001837","SAFCO Mobile Desk Side File. Wire Frame",7.6968,3,"Central",6740,102.62399,"Consumer","2014-05-31","Standard Class","Illinois","Storage" +"Technology","Los Angeles","United States","ML-17395","Marina Lichtenstein",0.2,"2014-05-26","CA-2014-156349",90008,"TEC-PH-10000441","VTech DS6151",20.1584,2,"West",1612,201.584,"Corporate","2014-05-30","Standard Class","California","Phones" +"Technology","Los Angeles","United States","ML-17395","Marina Lichtenstein",0.2,"2014-05-26","CA-2014-156349",90008,"TEC-PH-10002726","netTALK DUO VoIP Telephone Service",31.494,2,"West",1613,83.984,"Corporate","2014-05-30","Standard Class","California","Phones" +"Furniture","San Diego","United States","MH-17785","Maya Herman",0.2,"2014-05-27","CA-2014-124429",92105,"FUR-TA-10002607","KI Conference Tables",-28.355999,10,"West",747,567.12,"Corporate","2014-05-27","Same Day","California","Tables" +"Office Supplies","Chicago","United States","LE-16810","Laurel Elliston",0.8,"2014-05-27","US-2014-117058",60653,"OFF-BI-10004139","Fellowes Presentation Covers for Comb Binding Machines",-30.554998,6,"Central",1326,17.46,"Consumer","2014-05-30","First Class","Illinois","Binders" +"Office Supplies","San Diego","United States","MH-17785","Maya Herman",0,"2014-05-27","CA-2014-124429",92105,"OFF-ST-10001809","Fellowes Officeware Wire Shelving",7.1864,4,"West",748,359.31998,"Corporate","2014-05-27","Same Day","California","Storage" +"Technology","San Francisco","United States","LC-16885","Lena Creighton",0,"2014-05-27","CA-2014-139192",94109,"TEC-AC-10001606","Logitech Wireless Performance Mouse MX for PC and Mac",37.9962,1,"West",1050,99.99,"Consumer","2014-06-01","Second Class","California","Accessories" +"Technology","San Francisco","United States","LC-16885","Lena Creighton",0.2,"2014-05-27","CA-2014-139192",94109,"TEC-PH-10000486","Plantronics HL10 Handset Lifter",125.269196,12,"West",1049,1113.504,"Consumer","2014-06-01","Second Class","California","Phones" +"Office Supplies","Seattle","United States","MM-17260","Magdelene Morse",0.2,"2014-05-28","CA-2014-108861",98105,"OFF-BI-10003876","Green Canvas Binder for 8-1/2"" x 14"" Sheets",51.36,4,"West",8439,136.95999,"Consumer","2014-06-01","Standard Class","Washington","Binders" +"Technology","Seattle","United States","PJ-18835","Patrick Jones",0,"2014-05-28","CA-2014-135993",98115,"TEC-AC-10004877","Imation 30456 USB Flash Drive 8GB",2.208,4,"West",5677,27.6,"Corporate","2014-06-02","Standard Class","Washington","Accessories" +"Technology","Seattle","United States","PJ-18835","Patrick Jones",0.2,"2014-05-28","CA-2014-135993",98115,"TEC-PH-10001552","I Need's 3d Hello Kitty Hybrid Silicone Case Cover for HTC One X 4g with 3d Hello Kitty Stylus Pen Green/pink",5.7408,6,"West",5676,57.408,"Corporate","2014-06-02","Standard Class","Washington","Phones" +"Furniture","Buffalo Grove","United States","SS-20410","Shahid Shariari",0.5,"2014-05-30","CA-2014-145800",60089,"FUR-TA-10001539","Chromcraft Rectangular Conference Tables",-184.8366,3,"Central",5751,355.455,"Consumer","2014-06-05","Standard Class","Illinois","Tables" +"Office Supplies","New York City","United States","LW-16825","Laurel Workman",0,"2014-05-30","CA-2014-103429",10024,"OFF-AP-10001005","Honeywell Quietcare HEPA Air Cleaner",77.8635,3,"East",7859,235.95,"Corporate","2014-06-01","First Class","New York","Appliances" +"Office Supplies","New York City","United States","JG-15160","James Galang",0.2,"2014-05-30","CA-2014-105872",10024,"OFF-BI-10003684","Wilson Jones Legal Size Ring Binders",26.387999,4,"East",7737,70.368004,"Consumer","2014-06-06","Standard Class","New York","Binders" +"Office Supplies","New York City","United States","LW-16825","Laurel Workman",0.2,"2014-05-30","CA-2014-103429",10024,"OFF-BI-10004233","GBC Pre-Punched Binding Paper. Plastic. White. 8-1/2"" x 11""",8.954399,2,"East",7857,25.584,"Corporate","2014-06-01","First Class","New York","Binders" +"Office Supplies","Richmond","United States","EB-13840","Ellis Ballard",0,"2014-05-30","CA-2014-146885",23223,"OFF-PA-10001622","Ampad Poly Cover Wirebound Steno Book. 6"" x 9"" Assorted Colors. Gregg Ruled",6.1289997,3,"South",4647,13.620001,"Corporate","2014-06-05","Standard Class","Virginia","Paper" +"Office Supplies","New York City","United States","LW-16825","Laurel Workman",0,"2014-05-30","CA-2014-103429",10024,"OFF-PA-10001712","Xerox 1948",17.982,4,"East",7860,39.960003,"Corporate","2014-06-01","First Class","New York","Paper" +"Technology","Chicago","United States","MC-17425","Mark Cousins",0.2,"2014-05-30","CA-2014-140473",60623,"TEC-CO-10004202","Brother DCP1000 Digital 3 in 1 Multifunction Machine",134.9955,3,"Central",8553,719.976,"Corporate","2014-06-03","Standard Class","Illinois","Copiers" +"Technology","New York City","United States","LW-16825","Laurel Workman",0,"2014-05-30","CA-2014-103429",10024,"TEC-PH-10003505","Geemarc AmpliPOWER60",134.56,5,"East",7858,464,"Corporate","2014-06-01","First Class","New York","Phones" +"Technology","Jackson","United States","JK-15625","Jim Karlsson",0,"2014-05-31","CA-2014-166051",39212,"TEC-PH-10002538","Grandstream GXP1160 VoIP phone",32.981705,3,"South",7979,113.729996,"Consumer","2014-06-05","Standard Class","Mississippi","Phones" +"Technology","Jackson","United States","JK-15625","Jim Karlsson",0,"2014-05-31","CA-2014-166051",39212,"TEC-PH-10002680","Samsung Galaxy Note 3",197.991,3,"South",7978,659.97003,"Consumer","2014-06-05","Standard Class","Mississippi","Phones" +"Furniture","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"FUR-CH-10004063","Global Deluxe High-Back Manager's Chair",580.5394,7,"Central",245,2001.8601,"Home Office","2014-06-06","Second Class","Minnesota","Chairs" +"Furniture","Little Rock","United States","LT-17110","Liz Thompson",0,"2014-06-01","US-2014-165659",72209,"FUR-FU-10001935","3M Hangers With Command Adhesive",9.102,6,"South",2201,22.2,"Consumer","2014-06-06","Standard Class","Arkansas","Furnishings" +"Office Supplies","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"OFF-AP-10002945","Honeywell Enviracaire Portable HEPA Air Cleaner for 17' x 22' Room",496.0725,5,"Central",248,1503.25,"Home Office","2014-06-06","Second Class","Minnesota","Appliances" +"Office Supplies","Detroit","United States","QJ-19255","Quincy Jones",0.1,"2014-06-01","US-2014-157070",48234,"OFF-AP-10004859","Acco 6 Outlet Guardian Premium Surge Suppressor",12.375999,5,"Central",9151,65.52,"Corporate","2014-06-06","Standard Class","Michigan","Appliances" +"Office Supplies","Lakewood","United States","CR-12625","Corey Roper",0,"2014-06-01","CA-2014-111003",8701,"OFF-AR-10002135","Boston Heavy-Duty Trimline Electric Pencil Sharpeners",83.868004,6,"East",308,289.2,"Home Office","2014-06-06","Standard Class","New Jersey","Art" +"Office Supplies","Lakewood","United States","CR-12625","Corey Roper",0,"2014-06-01","CA-2014-111003",8701,"OFF-BI-10001072","GBC Clear Cover. 8-1/2 x 11. unpunched. 25 covers per pack",20.9208,3,"East",307,45.480003,"Home Office","2014-06-06","Standard Class","New Jersey","Binders" +"Office Supplies","Detroit","United States","QJ-19255","Quincy Jones",0,"2014-06-01","US-2014-157070",48234,"OFF-BI-10001765","Wilson Jones Heavy-Duty Casebound Ring Binders with Metal Hinges",66.508804,4,"Central",9150,138.56,"Corporate","2014-06-06","Standard Class","Michigan","Binders" +"Office Supplies","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"OFF-PA-10000061","Xerox 205",12.4416,4,"Central",249,25.92,"Home Office","2014-06-06","Second Class","Minnesota","Paper" +"Office Supplies","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"OFF-PA-10004082","Adams Telephone Message Book w/Frequently-Called Numbers Space. 400 Messages per Book",23.94,6,"Central",247,47.88,"Home Office","2014-06-06","Second Class","Minnesota","Paper" +"Office Supplies","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"OFF-ST-10002276","Safco Steel Mobile File Cart",41.68,2,"Central",246,166.72,"Home Office","2014-06-06","Second Class","Minnesota","Storage" +"Technology","Little Rock","United States","LT-17110","Liz Thompson",0,"2014-06-01","US-2014-165659",72209,"TEC-PH-10002563","Adtran 1202752G1",229.3018,7,"South",2202,881.93005,"Consumer","2014-06-06","Standard Class","Arkansas","Phones" +"Furniture","Orem","United States","CK-12325","Christine Kargatis",0,"2014-06-02","CA-2014-104472",84057,"FUR-FU-10000246","Aluminum Document Frame",21.996,6,"West",720,73.32,"Home Office","2014-06-07","Standard Class","Utah","Furnishings" +"Office Supplies","Roswell","United States","SV-20785","Stewart Visinsky",0,"2014-06-02","CA-2014-100895",30076,"OFF-AR-10004511","Sanford Colorific Scented Colored Pencils. 12/Pack",2.6536,2,"South",3888,8.56,"Consumer","2014-06-06","Standard Class","Georgia","Art" +"Office Supplies","Orem","United States","CK-12325","Christine Kargatis",0.2,"2014-06-02","CA-2014-104472",84057,"OFF-BI-10001658","GBC Standard Therm-A-Bind Covers",19.437601,3,"West",719,59.808,"Home Office","2014-06-07","Standard Class","Utah","Binders" +"Office Supplies","Roswell","United States","SV-20785","Stewart Visinsky",0,"2014-06-02","CA-2014-100895",30076,"OFF-ST-10001490","Hot File 7-Pocket. Floor Stand",107.082,2,"South",3890,356.94,"Consumer","2014-06-06","Standard Class","Georgia","Storage" +"Technology","Roswell","United States","SV-20785","Stewart Visinsky",0,"2014-06-02","CA-2014-100895",30076,"TEC-PH-10001425","Mophie Juice Pack Helium for iPhone",67.1916,3,"South",3889,239.97,"Consumer","2014-06-06","Standard Class","Georgia","Phones" +"Furniture","Decatur","United States","RE-19450","Richard Eichhorn",0.6,"2014-06-03","CA-2014-163867",62521,"FUR-FU-10001475","Contract Clock. 14"". Brown",-40.003597,7,"Central",9845,61.544003,"Consumer","2014-06-06","First Class","Illinois","Furnishings" +"Furniture","Seattle","United States","SC-20725","Steven Cartwright",0,"2014-06-03","CA-2014-135657",98115,"FUR-TA-10004086","KI Adjustable-Height Table",113.4936,6,"West",1700,515.88,"Consumer","2014-06-07","Second Class","Washington","Tables" +"Office Supplies","Midland","United States","RB-19795","Ross Baird",0,"2014-06-03","CA-2014-141299",48640,"OFF-EN-10004459","Security-Tint Envelopes",7.4872,2,"Central",6323,15.28,"Home Office","2014-06-07","Second Class","Michigan","Envelopes" +"Office Supplies","Decatur","United States","RE-19450","Richard Eichhorn",0.2,"2014-06-03","CA-2014-163867",62521,"OFF-LA-10001771","Avery 513",5.1791997,4,"Central",9844,15.935999,"Consumer","2014-06-06","First Class","Illinois","Labels" +"Office Supplies","Decatur","United States","RE-19450","Richard Eichhorn",0.2,"2014-06-03","CA-2014-163867",62521,"OFF-ST-10000877","Recycled Steel Personal File for Standard File Folders",9.9522,3,"Central",9846,132.696,"Consumer","2014-06-06","First Class","Illinois","Storage" +"Furniture","New York City","United States","BT-11440","Bobby Trafton",0.2,"2014-06-04","CA-2014-151946",10035,"FUR-BO-10003272","O'Sullivan Living Dimensions 5-Shelf Bookcases",-44.196,2,"East",8610,353.568,"Consumer","2014-06-09","Standard Class","New York","Bookcases" +"Furniture","New York City","United States","BT-11440","Bobby Trafton",0,"2014-06-04","CA-2014-151946",10035,"FUR-FU-10002191","G.E. Halogen Desk Lamp Bulbs",6.7008,2,"East",8611,13.96,"Consumer","2014-06-09","Standard Class","New York","Furnishings" +"Furniture","New York City","United States","BT-11440","Bobby Trafton",0,"2014-06-04","CA-2014-151946",10035,"FUR-FU-10002878","Seth Thomas 14"" Day/Date Wall Clock",21.0752,2,"East",8608,56.960003,"Consumer","2014-06-09","Standard Class","New York","Furnishings" +"Office Supplies","New York City","United States","BT-11440","Bobby Trafton",0,"2014-06-04","CA-2014-151946",10035,"OFF-AP-10001626","Commercial WindTunnel Clean Air Upright Vacuum. Replacement Belts. Filtration Bags",4.0456,4,"East",8609,15.56,"Consumer","2014-06-09","Standard Class","New York","Appliances" +"Office Supplies","Columbus","United States","MP-17470","Mark Packer",0.2,"2014-06-04","CA-2014-147914",43229,"OFF-PA-10001685","Easy-staple paper",5.8812,2,"East",1875,16.224,"Home Office","2014-06-09","Standard Class","Ohio","Paper" +"Furniture","Long Beach","United States","GT-14635","Grant Thornton",0.4,"2014-06-06","CA-2014-159520",11561,"FUR-TA-10003238","Chromcraft Bull-Nose Wood 48"" x 96"" Rectangular Conference Tables",-347.1174,3,"East",2142,991.76404,"Corporate","2014-06-11","Standard Class","New York","Tables" +"Office Supplies","Rochester","United States","BM-11785","Bryan Mills",0,"2014-06-06","CA-2014-133270",14609,"OFF-AR-10002656","Sanford Liquid Accent Highlighters",4.9431996,2,"East",1746,13.360001,"Consumer","2014-06-09","First Class","New York","Art" +"Office Supplies","Jacksonville","United States","SV-20785","Stewart Visinsky",0.7,"2014-06-06","CA-2014-169257",32216,"OFF-BI-10002557","Presstex Flexible Ring Binders",-0.90999997,1,"South",6598,1.3649999,"Consumer","2014-06-12","Standard Class","Florida","Binders" +"Office Supplies","Chicago","United States","SB-20170","Sarah Bern",0.8,"2014-06-06","US-2014-161305",60623,"OFF-BI-10002794","Avery Trapezoid Ring Binder. 3"" Capacity. Black. 1040 sheets",-38.111397,3,"Central",6301,24.588,"Consumer","2014-06-12","Standard Class","Illinois","Binders" +"Office Supplies","Long Beach","United States","GT-14635","Grant Thornton",0.2,"2014-06-06","CA-2014-159520",11561,"OFF-BI-10003982","Wilson Jones Century Plastic Molded Ring Binders",50.4711,9,"East",2140,149.54399,"Corporate","2014-06-11","Standard Class","New York","Binders" +"Office Supplies","Chicago","United States","SB-20170","Sarah Bern",0.2,"2014-06-06","US-2014-161305",60623,"OFF-EN-10000461","#10- 4 1/8"" x 9 1/2"" Recycled Envelopes",4.7195997,2,"Central",6302,13.983999,"Consumer","2014-06-12","Standard Class","Illinois","Envelopes" +"Office Supplies","Houston","United States","VT-21700","Valerie Takahito",0.2,"2014-06-06","CA-2014-151897",77070,"OFF-LA-10001074","Round Specialty Laser Printer Labels",33.830997,10,"Central",6473,100.24,"Home Office","2014-06-10","Standard Class","Texas","Labels" +"Office Supplies","Jacksonville","United States","SV-20785","Stewart Visinsky",0.2,"2014-06-06","CA-2014-169257",32216,"OFF-PA-10002319","Xerox 1944",22.480799,2,"South",6599,62.016,"Consumer","2014-06-12","Standard Class","Florida","Paper" +"Office Supplies","Long Beach","United States","GT-14635","Grant Thornton",0,"2014-06-06","CA-2014-159520",11561,"OFF-SU-10001664","Acme Office Executive Series Stainless Steel Trimmers",4.4564004,2,"East",2141,17.140001,"Corporate","2014-06-11","Standard Class","New York","Supplies" +"Furniture","Aurora","United States","NR-18550","Nick Radford",0.5,"2014-06-07","CA-2014-106229",60505,"FUR-TA-10002041","Bevis Round Conference Table Top. X-Base",-209.7693,3,"Central",4722,268.935,"Consumer","2014-06-11","Second Class","Illinois","Tables" +"Office Supplies","Peoria","United States","BP-11095","Bart Pistole",0.8,"2014-06-07","US-2014-134971",61604,"OFF-BI-10003982","Wilson Jones Century Plastic Molded Ring Binders",-20.5623,3,"Central",394,12.462,"Corporate","2014-06-10","Second Class","Illinois","Binders" +"Furniture","Bristol","United States","KE-16420","Katrina Edelman",0.2,"2014-06-08","US-2014-109162",37620,"FUR-CH-10002647","Situations Contoured Folding Chairs. 4/Set",10.647,3,"South",3842,170.352,"Corporate","2014-06-12","Standard Class","Tennessee","Chairs" +"Furniture","Seattle","United States","CS-11950","Carlos Soltero",0.2,"2014-06-08","US-2014-141257",98115,"FUR-CH-10002758","Hon Deluxe Fabric Upholstered Stacking Chairs. Squared Back",73.19399,3,"West",2750,585.552,"Consumer","2014-06-14","Standard Class","Washington","Chairs" +"Office Supplies","Long Beach","United States","RD-19480","Rick Duston",0.2,"2014-06-08","CA-2014-108147",11561,"OFF-BI-10003876","Green Canvas Binder for 8-1/2"" x 14"" Sheets",25.68,2,"East",9367,68.48,"Consumer","2014-06-13","Standard Class","New York","Binders" +"Office Supplies","Long Beach","United States","RD-19480","Rick Duston",0,"2014-06-08","CA-2014-108147",11561,"OFF-ST-10003470","Tennsco Snap-Together Open Shelving Units. Starter Sets and Add-On Units",83.844,6,"East",9368,1676.88,"Consumer","2014-06-13","Standard Class","New York","Storage" +"Furniture","Los Angeles","United States","BH-11710","Brosina Hoffman",0,"2014-06-09","CA-2014-115812",90032,"FUR-FU-10001487","Eldon Expressions Wood and Plastic Desk Accessories. Cherry Wood",14.1694,7,"West",6,48.86,"Consumer","2014-06-14","Standard Class","California","Furnishings" +"Furniture","North Las Vegas","United States","TS-21205","Thomas Seio",0,"2014-06-09","CA-2014-160262",89031,"FUR-FU-10002685","Executive Impressions 13-1/2"" Indoor/Outdoor Wall Clock",14.212,2,"West",4957,37.4,"Corporate","2014-06-13","Second Class","Nevada","Furnishings" +"Furniture","Los Angeles","United States","BH-11710","Brosina Hoffman",0.2,"2014-06-09","CA-2014-115812",90032,"FUR-TA-10001539","Chromcraft Rectangular Conference Tables",85.309204,9,"West",11,1706.184,"Consumer","2014-06-14","Standard Class","California","Tables" +"Furniture","Harrisonburg","United States","FO-14305","Frank Olsen",0,"2014-06-09","CA-2014-132612",22801,"FUR-TA-10004534","Bevis 44 x 96 Conference Tables",245.02101,7,"South",1812,1441.3,"Consumer","2014-06-11","Second Class","Virginia","Tables" +"Office Supplies","Los Angeles","United States","BH-11710","Brosina Hoffman",0,"2014-06-09","CA-2014-115812",90032,"OFF-AP-10002892","Belkin F5C206VTEL 6 Outlet Surge",34.47,5,"West",10,114.9,"Consumer","2014-06-14","Standard Class","California","Appliances" +"Office Supplies","Huntsville","United States","CW-11905","Carl Weiss",0.2,"2014-06-09","CA-2014-133753",77340,"OFF-AR-10001953","Boston 1645 Deluxe Heavier-Duty Electric Pencil Sharpener",6.1572,2,"Central",491,70.368004,"Home Office","2014-06-13","Second Class","Texas","Art" +"Office Supplies","North Las Vegas","United States","TS-21205","Thomas Seio",0,"2014-06-09","CA-2014-160262",89031,"OFF-AR-10002335","DIXON Oriole Pencils",4.6956,7,"West",4955,18.06,"Corporate","2014-06-13","Second Class","Nevada","Art" +"Office Supplies","Los Angeles","United States","BH-11710","Brosina Hoffman",0,"2014-06-09","CA-2014-115812",90032,"OFF-AR-10002833","Newell 322",1.9655999,4,"West",7,7.2799997,"Consumer","2014-06-14","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","CM-12445","Chuck Magee",0,"2014-06-09","CA-2014-133851",94122,"OFF-AR-10003752","Deluxe Chalkboard Eraser Cleaner",10.626,2,"West",862,23.1,"Consumer","2014-06-16","Standard Class","California","Art" +"Office Supplies","Los Angeles","United States","BH-11710","Brosina Hoffman",0.2,"2014-06-09","CA-2014-115812",90032,"OFF-BI-10003910","DXL Angle-View Binders with Locking Rings by Samsill",5.7825,3,"West",9,18.504,"Consumer","2014-06-14","Standard Class","California","Binders" +"Office Supplies","North Las Vegas","United States","TS-21205","Thomas Seio",0,"2014-06-09","CA-2014-160262",89031,"OFF-PA-10003641","Xerox 1909",36.404404,3,"West",4956,79.14,"Corporate","2014-06-13","Second Class","Nevada","Paper" +"Office Supplies","San Francisco","United States","CM-12445","Chuck Magee",0,"2014-06-09","CA-2014-133851",94122,"OFF-SU-10001225","Staple remover",0.1472,2,"West",861,7.36,"Consumer","2014-06-16","Standard Class","California","Supplies" +"Technology","Huntsville","United States","CW-11905","Carl Weiss",0.2,"2014-06-09","CA-2014-133753",77340,"TEC-AC-10000303","Logitech M510 Wireless Mouse",10.3974,2,"Central",490,63.984005,"Home Office","2014-06-13","Second Class","Texas","Accessories" +"Technology","Huntsville","United States","CW-11905","Carl Weiss",0.2,"2014-06-09","CA-2014-133753",77340,"TEC-PH-10000376","Square Credit Card Reader",0.5994,1,"Central",489,7.992,"Home Office","2014-06-13","Second Class","Texas","Phones" +"Technology","Los Angeles","United States","BH-11710","Brosina Hoffman",0.2,"2014-06-09","CA-2014-115812",90032,"TEC-PH-10002033","Konftel 250 Conference phone - Charcoal black",68.356804,4,"West",12,911.4241,"Consumer","2014-06-14","Standard Class","California","Phones" +"Technology","Los Angeles","United States","BH-11710","Brosina Hoffman",0.2,"2014-06-09","CA-2014-115812",90032,"TEC-PH-10002275","Mitel 5320 IP Phone VoIP phone",90.715195,6,"West",8,907.152,"Consumer","2014-06-14","Standard Class","California","Phones" +"Office Supplies","Detroit","United States","HK-14890","Heather Kirkland",0,"2014-06-10","CA-2014-144281",48234,"OFF-LA-10003930","Dot Matrix Printer Tape Reel Labels. White. 5000/Box",240.85951,5,"Central",9697,491.55,"Corporate","2014-06-15","Second Class","Michigan","Labels" +"Office Supplies","Los Angeles","United States","FM-14215","Filia McAdams",0,"2014-06-13","CA-2014-114643",90032,"OFF-AR-10003631","Staples in misc. colors",4.7915998,3,"West",1856,14.52,"Corporate","2014-06-17","Standard Class","California","Art" +"Furniture","Detroit","United States","SR-20425","Sharelle Roach",0,"2014-06-14","CA-2014-140487",48234,"FUR-BO-10000711","Hon Metal Bookcases. Gray",57.4938,3,"Central",5221,212.93999,"Home Office","2014-06-20","Standard Class","Michigan","Bookcases" +"Furniture","San Antonio","United States","KL-16555","Kelly Lampkin",0.3,"2014-06-15","US-2014-141215",78207,"FUR-CH-10003379","Global Commerce Series High-Back Swivel/Tilt Chairs",-56.996002,4,"Central",917,797.94403,"Corporate","2014-06-21","Standard Class","Texas","Chairs" +"Furniture","San Antonio","United States","KL-16555","Kelly Lampkin",0.3,"2014-06-15","US-2014-141215",78207,"FUR-TA-10001520","Lesro Sheffield Collection Coffee Table. End Table. Center Table. Corner Table",-18.5562,2,"Central",916,99.91801,"Corporate","2014-06-21","Standard Class","Texas","Tables" +"Office Supplies","San Antonio","United States","KL-16555","Kelly Lampkin",0.8,"2014-06-15","US-2014-141215",78207,"OFF-BI-10002706","Avery Premier Heavy-Duty Binder with Round Locking Rings",-14.5656,3,"Central",918,8.568001,"Corporate","2014-06-21","Standard Class","Texas","Binders" +"Office Supplies","El Paso","United States","PS-18760","Pamela Stobb",0.2,"2014-06-15","CA-2014-126963",79907,"OFF-PA-10001952","Xerox 1902",11.8768,2,"Central",6938,36.544003,"Consumer","2014-06-15","Same Day","Texas","Paper" +"Furniture","Mishawaka","United States","SC-20575","Sonia Cooley",0,"2014-06-16","CA-2014-136644",46544,"FUR-CH-10000225","Global Geo Office Task Chair. Gray",32.392,8,"Central",2496,647.83997,"Consumer","2014-06-22","Standard Class","Indiana","Chairs" +"Office Supplies","Hempstead","United States","CK-12205","Chloris Kastensmidt",0,"2014-06-16","CA-2014-113929",11550,"OFF-AR-10003772","Boston 16750 Black Compact Battery Pencil Sharpener",10.5,4,"East",2913,35,"Consumer","2014-06-21","Standard Class","New York","Art" +"Office Supplies","Hempstead","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-06-16","CA-2014-113929",11550,"OFF-BI-10002852","Ibico Standard Transparent Covers",14.3376,3,"East",2914,39.552,"Consumer","2014-06-21","Standard Class","New York","Binders" +"Office Supplies","Hempstead","United States","CK-12205","Chloris Kastensmidt",0,"2014-06-16","CA-2014-113929",11550,"OFF-EN-10003286","Staple envelope",19.457998,5,"East",2912,41.4,"Consumer","2014-06-21","Standard Class","New York","Envelopes" +"Furniture","Seattle","United States","GH-14425","Gary Hwang",0,"2014-06-17","CA-2014-144414",98105,"FUR-FU-10003981","Eldon Wave Desk Accessories",2.6207998,3,"West",6816,6.24,"Consumer","2014-06-21","Standard Class","Washington","Furnishings" +"Office Supplies","Seattle","United States","GH-14425","Gary Hwang",0.2,"2014-06-17","CA-2014-144414",98105,"OFF-BI-10004995","GBC DocuBind P400 Electric Binding System",1061.5721,3,"West",6818,3266.3762,"Consumer","2014-06-21","Standard Class","Washington","Binders" +"Office Supplies","Seattle","United States","GH-14425","Gary Hwang",0,"2014-06-17","CA-2014-144414",98105,"OFF-FA-10000624","OIC Binder Clips",8.95,5,"West",6817,17.9,"Consumer","2014-06-21","Standard Class","Washington","Fasteners" +"Office Supplies","Chicago","United States","JF-15415","Jennifer Ferguson",0.2,"2014-06-17","CA-2014-156342",60653,"OFF-PA-10001725","Xerox 1892",22.480799,2,"Central",9897,62.016,"Consumer","2014-06-20","Second Class","Illinois","Paper" +"Technology","Newark","United States","DB-13660","Duane Benoit",0,"2014-06-17","CA-2014-104402",19711,"TEC-MA-10000423","Texas Instruments TI-34 Scientific Calculator",31.0059,3,"East",6324,65.97,"Consumer","2014-06-23","Standard Class","Delaware","Machines" +"Technology","Los Angeles","United States","MC-18100","Mick Crebagga",0.2,"2014-06-18","CA-2014-123855",90036,"TEC-PH-10000215","Plantronics Cordless Phone Headset with In-line Volume - M214C",12.232499,5,"West",9765,139.8,"Consumer","2014-06-23","Standard Class","California","Phones" +"Furniture","Plano","United States","SC-20020","Sam Craven",0.32000002,"2014-06-20","CA-2014-166863",75023,"FUR-BO-10001608","Hon Metal Bookcases. Black",-19.8744,4,"Central",3508,193.0656,"Consumer","2014-06-24","Standard Class","Texas","Bookcases" +"Office Supplies","Lawton","United States","Co-12640","Corey-Lock",0,"2014-06-20","US-2014-112949",73505,"OFF-AP-10001005","Honeywell Quietcare HEPA Air Cleaner",155.727,6,"Central",8146,471.9,"Consumer","2014-06-27","Standard Class","Oklahoma","Appliances" +"Office Supplies","Lawton","United States","Co-12640","Corey-Lock",0,"2014-06-20","US-2014-112949",73505,"OFF-AR-10003469","Nontoxic Chalk",1.6896,2,"Central",8147,3.52,"Consumer","2014-06-27","Standard Class","Oklahoma","Art" +"Office Supplies","Plano","United States","SC-20020","Sam Craven",0.8,"2014-06-20","CA-2014-166863",75023,"OFF-BI-10000756","Storex DuraTech Recycled Plastic Frosted Binders",-5.088,4,"Central",3507,3.392,"Consumer","2014-06-24","Standard Class","Texas","Binders" +"Office Supplies","Plano","United States","SC-20020","Sam Craven",0.2,"2014-06-20","CA-2014-166863",75023,"OFF-PA-10000587","Array Parchment Paper. Assorted Colors",4.0768003,2,"Central",3510,11.648001,"Consumer","2014-06-24","Standard Class","Texas","Paper" +"Office Supplies","Plano","United States","SC-20020","Sam Craven",0.2,"2014-06-20","CA-2014-166863",75023,"OFF-PA-10001166","Xerox 2",5.4431996,3,"Central",3509,15.552,"Consumer","2014-06-24","Standard Class","Texas","Paper" +"Office Supplies","Plano","United States","SC-20020","Sam Craven",0.2,"2014-06-20","CA-2014-166863",75023,"OFF-ST-10004123","Safco Industrial Wire Shelving System",-127.372,7,"Central",3512,509.48798,"Consumer","2014-06-24","Standard Class","Texas","Storage" +"Technology","Plano","United States","SC-20020","Sam Craven",0.4,"2014-06-20","CA-2014-166863",75023,"TEC-MA-10001972","Okidata C331dn Printer",-97.71999,2,"Central",3511,418.8,"Consumer","2014-06-24","Standard Class","Texas","Machines" +"Technology","Plano","United States","SC-20020","Sam Craven",0.2,"2014-06-20","CA-2014-166863",75023,"TEC-PH-10000369","HTC One Mini",20.1584,2,"Central",3506,201.584,"Consumer","2014-06-24","Standard Class","Texas","Phones" +"Furniture","Philadelphia","United States","NW-18400","Natalie Webber",0.2,"2014-06-21","CA-2014-138317",19120,"FUR-FU-10000550","Stacking Trays by OIC",0.6474001,1,"East",2998,3.984,"Consumer","2014-06-25","Standard Class","Pennsylvania","Furnishings" +"Furniture","Waynesboro","United States","NP-18325","Naresj Patel",0,"2014-06-21","US-2014-117135",22980,"FUR-FU-10004071","Luxo Professional Magnifying Clamp-On Fluorescent Lamps",14.5614,1,"South",690,104.01,"Consumer","2014-06-23","Second Class","Virginia","Furnishings" +"Office Supplies","New York City","United States","TB-21280","Toby Braunhardt",0,"2014-06-21","CA-2014-130624",10024,"OFF-AP-10001303","Holmes Cool Mist Humidifier for the Whole House with 8-Gallon Output per Day. Extended Life Filter",26.864998,3,"East",4414,59.7,"Consumer","2014-06-24","First Class","New York","Appliances" +"Office Supplies","Philadelphia","United States","NW-18400","Natalie Webber",0.2,"2014-06-21","CA-2014-138317",19120,"OFF-AP-10003860","Fellowes Advanced 8 Outlet Surge Suppressor with Phone/Fax Protection",3.8864,2,"East",3001,44.416,"Consumer","2014-06-25","Standard Class","Pennsylvania","Appliances" +"Office Supplies","Lakewood","United States","DB-13060","Dave Brooks",0,"2014-06-21","CA-2014-128146",8701,"OFF-AR-10001919","OIC #2 Pencils. Medium Soft",1.0904,2,"East",1182,3.76,"Consumer","2014-06-25","Standard Class","New Jersey","Art" +"Office Supplies","Meriden","United States","RM-19375","Raymond Messe",0,"2014-06-21","CA-2014-141278",6450,"OFF-AR-10003056","Newell 341",6.206,5,"East",1836,21.4,"Consumer","2014-06-24","First Class","Connecticut","Art" +"Office Supplies","Philadelphia","United States","NW-18400","Natalie Webber",0.7,"2014-06-21","CA-2014-138317",19120,"OFF-BI-10000069","GBC Prepunched Paper. 19-Hole. for Binding Systems. 24-lb",-7.2048,2,"East",3002,9.006,"Consumer","2014-06-25","Standard Class","Pennsylvania","Binders" +"Office Supplies","Pueblo","United States","SV-20785","Stewart Visinsky",0.7,"2014-06-21","US-2014-160780",81001,"OFF-BI-10001116","Wilson Jones 1"" Hanging DublLock Ring Binders",-8.131201,7,"West",2678,11.088,"Consumer","2014-06-21","Same Day","Colorado","Binders" +"Office Supplies","Pueblo","United States","SV-20785","Stewart Visinsky",0.7,"2014-06-21","US-2014-160780",81001,"OFF-BI-10002931","Avery Trapezoid Extra Heavy Duty 4"" Binders",-16.776001,2,"West",2679,25.164,"Consumer","2014-06-21","Same Day","Colorado","Binders" +"Office Supplies","Philadelphia","United States","NW-18400","Natalie Webber",0.2,"2014-06-21","CA-2014-138317",19120,"OFF-EN-10001539","Staple envelope",8.402399,4,"East",2997,24.896,"Consumer","2014-06-25","Standard Class","Pennsylvania","Envelopes" +"Office Supplies","New York City","United States","TB-21280","Toby Braunhardt",0,"2014-06-21","CA-2014-130624",10024,"OFF-PA-10003883","Message Book. Phone. Wirebound Standard Line Memo. 2 3/4"" X 5""",9.039,3,"East",4412,19.65,"Consumer","2014-06-24","First Class","New York","Paper" +"Office Supplies","Waynesboro","United States","NP-18325","Naresj Patel",0,"2014-06-21","US-2014-117135",22980,"OFF-ST-10002444","Recycled Eldon Regeneration Jumbo File",10.315201,3,"South",692,36.84,"Consumer","2014-06-23","Second Class","Virginia","Storage" +"Technology","Philadelphia","United States","NW-18400","Natalie Webber",0.2,"2014-06-21","CA-2014-138317",19120,"TEC-AC-10003628","Logitech 910-002974 M325 Wireless Mouse for Web Scrolling",28.790401,4,"East",2999,95.968,"Consumer","2014-06-25","Standard Class","Pennsylvania","Accessories" +"Technology","Philadelphia","United States","NW-18400","Natalie Webber",0.7,"2014-06-21","CA-2014-138317",19120,"TEC-MA-10004521","Epson Perfection V600 Photo Scanner",-172.4925,3,"East",3000,206.991,"Consumer","2014-06-25","Standard Class","Pennsylvania","Machines" +"Technology","Long Beach","United States","JG-15160","James Galang",0,"2014-06-21","CA-2014-141796",11561,"TEC-PH-10001578","Polycom SoundStation2 EX Conference phone",352.3065,3,"East",5150,1214.8501,"Consumer","2014-06-21","Same Day","New York","Phones" +"Technology","Waynesboro","United States","NP-18325","Naresj Patel",0,"2014-06-21","US-2014-117135",22980,"TEC-PH-10002033","Konftel 250 Conference phone - Charcoal black",74.0532,1,"South",691,284.81998,"Consumer","2014-06-23","Second Class","Virginia","Phones" +"Technology","New York City","United States","TB-21280","Toby Braunhardt",0,"2014-06-21","CA-2014-130624",10024,"TEC-PH-10003963","GE 2-Jack Phone Line Splitter",160.67221,3,"East",4413,617.97003,"Consumer","2014-06-24","First Class","New York","Phones" +"Technology","Lakewood","United States","DB-13060","Dave Brooks",0,"2014-06-21","CA-2014-128146",8701,"TEC-PH-10004539","Wireless Extenders zBoost YX545 SOHO Signal Booster",357.1911,7,"East",1181,1322.93,"Consumer","2014-06-25","Standard Class","New Jersey","Phones" +"Furniture","Chester","United States","AA-10645","Anna Andreadi",0.3,"2014-06-22","CA-2014-154963",19013,"FUR-CH-10000454","Hon Deluxe Fabric Upholstered Stacking Chairs. Rounded Back",0,5,"East",4023,853.93005,"Consumer","2014-06-27","Standard Class","Pennsylvania","Chairs" +"Furniture","Chester","United States","AA-10645","Anna Andreadi",0.3,"2014-06-22","CA-2014-154963",19013,"FUR-CH-10004698","Padded Folding Chairs. Black. 4/Carton",-4.8588004,3,"East",4021,170.058,"Consumer","2014-06-27","Standard Class","Pennsylvania","Chairs" +"Office Supplies","Phoenix","United States","TB-21280","Toby Braunhardt",0.7,"2014-06-22","CA-2014-133389",85023,"OFF-BI-10001553","SpineVue Locking Slant-D Ring Binders by Cardinal",-6.0324,3,"West",9001,8.226,"Consumer","2014-06-22","Same Day","Arizona","Binders" +"Office Supplies","Minneapolis","United States","DV-13465","Dianna Vittorini",0,"2014-06-22","CA-2014-124646",55407,"OFF-ST-10001097","Office Impressions Heavy Duty Welded Shelving & Multimedia Storage Drawers",0,3,"Central",2157,501.81,"Consumer","2014-06-24","First Class","Minnesota","Storage" +"Office Supplies","Minneapolis","United States","DV-13465","Dianna Vittorini",0,"2014-06-22","CA-2014-124646",55407,"OFF-ST-10001469","Fellowes Bankers Box Recycled Super Stor/Drawer",9.716399,3,"Central",2158,161.93999,"Consumer","2014-06-24","First Class","Minnesota","Storage" +"Technology","Louisville","United States","JE-15745","Joel Eaton",0.2,"2014-06-22","CA-2014-142048",80027,"TEC-AC-10004114","KeyTronic 6101 Series - Keyboard - Black",56.5662,6,"West",422,196.752,"Consumer","2014-06-25","First Class","Colorado","Accessories" +"Technology","Chester","United States","AA-10645","Anna Andreadi",0.4,"2014-06-22","CA-2014-154963",19013,"TEC-PH-10004093","Panasonic Kx-TS550",-15.1767,3,"East",4022,82.782,"Consumer","2014-06-27","Standard Class","Pennsylvania","Phones" +"Office Supplies","Concord","United States","DL-13330","Denise Leinenbach",0.2,"2014-06-23","US-2014-130358",28027,"OFF-AR-10002766","Prang Drawing Pencil Set",1.7514,9,"South",4064,20.016,"Consumer","2014-06-26","First Class","North Carolina","Art" +"Office Supplies","Concord","United States","DL-13330","Denise Leinenbach",0.2,"2014-06-23","US-2014-130358",28027,"OFF-SU-10002522","Acme Kleen Earth Office Shears",0.3492,1,"South",4065,3.1039999,"Consumer","2014-06-26","First Class","North Carolina","Supplies" +"Technology","Philadelphia","United States","BS-11665","Brian Stugart",0.2,"2014-06-23","CA-2014-126032",19143,"TEC-AC-10000158","Sony 64GB Class 10 Micro SDHC R40 Memory Card",1.0797,3,"East",2127,86.376,"Consumer","2014-06-28","Standard Class","Pennsylvania","Accessories" +"Furniture","Tucson","United States","LP-17080","Liz Pelletier",0.2,"2014-06-24","CA-2014-159814",85705,"FUR-FU-10001731","Acrylic Self-Standing Desk Frames",0.96119994,2,"West",9402,4.272,"Consumer","2014-06-28","Standard Class","Arizona","Furnishings" +"Furniture","Los Angeles","United States","NS-18640","Noel Staavos",0.2,"2014-06-25","CA-2014-159338",90049,"FUR-TA-10004147","Hon 4060 Series Tables",11.196,5,"West",1098,447.84,"Corporate","2014-06-28","First Class","California","Tables" +"Office Supplies","Salem","United States","GK-14620","Grace Kelly",0.2,"2014-06-25","CA-2014-164469",97301,"OFF-AR-10000475","Hunt BOSTON Vista Battery-Operated Pencil Sharpener. Black",0.81619996,1,"West",4849,9.328001,"Corporate","2014-06-27","Second Class","Oregon","Art" +"Office Supplies","Salem","United States","GK-14620","Grace Kelly",0.2,"2014-06-25","CA-2014-164469",97301,"OFF-AR-10003478","Avery Hi-Liter EverBold Pen Style Fluorescent Highlighters. 4/Pack",17.907999,11,"West",4848,71.631996,"Corporate","2014-06-27","Second Class","Oregon","Art" +"Technology","Salem","United States","GK-14620","Grace Kelly",0.2,"2014-06-25","CA-2014-164469",97301,"TEC-PH-10002115","Plantronics 81402",19.797,5,"West",4847,263.96,"Corporate","2014-06-27","Second Class","Oregon","Phones" +"Furniture","Southaven","United States","LM-17065","Liz MacKendrick",0,"2014-06-27","CA-2014-104283",38671,"FUR-TA-10001039","KI Adjustable-Height Table",22.354797,1,"South",2278,85.98,"Consumer","2014-07-01","Standard Class","Mississippi","Tables" +"Office Supplies","Southaven","United States","LM-17065","Liz MacKendrick",0,"2014-06-27","CA-2014-104283",38671,"OFF-ST-10004337","SAFCO Commercial Wire Shelving. 72h",0,5,"South",2277,306.2,"Consumer","2014-07-01","Standard Class","Mississippi","Storage" +"Technology","Southaven","United States","LM-17065","Liz MacKendrick",0,"2014-06-27","CA-2014-104283",38671,"TEC-AC-10000109","Sony Micro Vault Click 16 GB USB 2.0 Flash Drive",53.7504,4,"South",2279,223.95999,"Consumer","2014-07-01","Standard Class","Mississippi","Accessories" +"Furniture","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.3,"2014-06-28","CA-2014-140858",19140,"FUR-CH-10001394","Global Leather Executive Chair",0,5,"East",820,1228.4651,"Consumer","2014-07-02","Standard Class","Pennsylvania","Chairs" +"Office Supplies","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.7,"2014-06-28","CA-2014-140858",19140,"OFF-BI-10003094","Self-Adhesive Ring Binder Labels",-2.4287999,3,"East",819,3.1679997,"Consumer","2014-07-02","Standard Class","Pennsylvania","Binders" +"Office Supplies","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.7,"2014-06-28","CA-2014-140858",19140,"OFF-BI-10004230","GBC Recycled Grain Textured Covers",-22.796402,3,"East",821,31.086,"Consumer","2014-07-02","Standard Class","Pennsylvania","Binders" +"Office Supplies","Detroit","United States","RW-19630","Rob Williams",0,"2014-06-28","CA-2014-156993",48234,"OFF-FA-10003495","Staples",3.04,1,"Central",2924,6.08,"Corporate","2014-07-04","Standard Class","Michigan","Fasteners" +"Office Supplies","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.2,"2014-06-28","CA-2014-140858",19140,"OFF-PA-10000304","Xerox 1995",14.515201,8,"East",818,41.472,"Consumer","2014-07-02","Standard Class","Pennsylvania","Paper" +"Office Supplies","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.2,"2014-06-28","CA-2014-140858",19140,"OFF-PA-10003395","Xerox 1941",117.432,4,"East",822,335.52,"Consumer","2014-07-02","Standard Class","Pennsylvania","Paper" +"Office Supplies","Cincinnati","United States","DL-12865","Dan Lawera",0.2,"2014-06-29","CA-2014-165974",45231,"OFF-AR-10003405","Dixon My First Ticonderoga Pencil. #2",3.6855,7,"East",777,32.760002,"Consumer","2014-07-06","Standard Class","Ohio","Art" +"Office Supplies","New York City","United States","JK-15625","Jim Karlsson",0.2,"2014-06-29","CA-2014-111773",10024,"OFF-BI-10000174","Wilson Jones Clip & Carry Folder Binder Tool for Ring Binders. Clear",4.872,3,"East",5736,13.92,"Consumer","2014-07-03","Standard Class","New York","Binders" +"Office Supplies","New York City","United States","CS-11860","Cari Schnelling",0,"2014-06-30","US-2014-121566",10011,"OFF-AR-10001026","Sanford Uni-Blazer View Highlighters. Chisel Tip. Yellow",2.904,3,"East",5154,6.6,"Consumer","2014-07-06","Standard Class","New York","Art" +"Office Supplies","Los Angeles","United States","AR-10825","Anthony Rawles",0,"2014-06-30","CA-2014-109218",90004,"OFF-AR-10001374","BIC Brite Liner Highlighters. Chisel Tip",10.368001,5,"West",3148,32.4,"Corporate","2014-07-03","Second Class","California","Art" +"Office Supplies","Chicago","United States","RA-19915","Russell Applegate",0.2,"2014-06-30","CA-2014-123064",60653,"OFF-AR-10004582","BIC Brite Liner Grip Highlighters",1.64,4,"Central",2690,5.2479997,"Consumer","2014-07-02","First Class","Illinois","Art" +"Office Supplies","New York City","United States","HA-14905","Helen Abelman",0.2,"2014-06-30","CA-2014-146640",10024,"OFF-BI-10002867","GBC Recycled Regency Composition Covers",108.7996,7,"East",1414,334.768,"Consumer","2014-07-05","Standard Class","New York","Binders" +"Office Supplies","New York City","United States","CS-11860","Cari Schnelling",0.2,"2014-06-30","US-2014-121566",10011,"OFF-BI-10004528","Cardinal Poly Pocket Divider Pockets for Ring Binders",0.84000003,1,"East",5153,2.6879997,"Consumer","2014-07-06","Standard Class","New York","Binders" +"Office Supplies","Houston","United States","MS-17980","Michael Stewart",0.2,"2014-06-30","CA-2014-116757",77095,"OFF-FA-10002815","Staples",7.1928,6,"Central",4503,21.312,"Corporate","2014-07-04","Standard Class","Texas","Fasteners" +"Office Supplies","Wilmington","United States","BD-11620","Brian DeCherney",0.2,"2014-06-30","CA-2014-152254",28403,"OFF-PA-10001144","Xerox 1913",108.7408,7,"South",8397,310.688,"Consumer","2014-06-30","Same Day","North Carolina","Paper" +"Office Supplies","Houston","United States","MS-17980","Michael Stewart",0.2,"2014-06-30","CA-2014-116757",77095,"OFF-PA-10002005","Xerox 225",9.072,5,"Central",4502,25.92,"Corporate","2014-07-04","Standard Class","Texas","Paper" +"Office Supplies","Deltona","United States","LW-16825","Laurel Workman",0.7,"2014-07-01","CA-2014-160773",32725,"OFF-BI-10000546","Avery Durable Binders",-3.6287997,6,"South",1320,5.184,"Corporate","2014-07-05","Standard Class","Florida","Binders" +"Office Supplies","Seattle","United States","Dl-13600","Dorris liebe",0.2,"2014-07-01","CA-2014-153150",98105,"OFF-BI-10003355","Cardinal Holdit Business Card Pockets",6.972,5,"West",716,19.92,"Corporate","2014-07-06","Second Class","Washington","Binders" +"Technology","Deltona","United States","LW-16825","Laurel Workman",0.2,"2014-07-01","CA-2014-160773",32725,"TEC-PH-10004586","Wilson SignalBoost 841262 DB PRO Amplifier Kit",71.99,2,"South",1319,575.92004,"Corporate","2014-07-05","Standard Class","Florida","Phones" +"Office Supplies","Plainfield","United States","EH-13945","Eric Hoffmann",0,"2014-07-02","CA-2014-107594",7060,"OFF-AR-10000716","DIXON Ticonderoga Erasable Checking Pencils",2.1762,1,"East",2734,5.58,"Consumer","2014-07-06","Standard Class","New Jersey","Art" +"Technology","Plainfield","United States","EH-13945","Eric Hoffmann",0,"2014-07-02","CA-2014-107594",7060,"TEC-PH-10002923","Logitech B530 USB Headset - headset - Full size. Binaural",19.9746,2,"East",2733,73.98,"Consumer","2014-07-06","Standard Class","New Jersey","Phones" +"Office Supplies","Aurora","United States","KE-16420","Katrina Edelman",0.2,"2014-07-04","CA-2014-120096",80013,"OFF-AP-10000692","Fellowes Mighty 8 Compact Surge Protector",3.2432,2,"West",6734,32.432,"Corporate","2014-07-07","First Class","Colorado","Appliances" +"Office Supplies","Richmond","United States","MS-17770","Maxwell Schwartz",0,"2014-07-04","CA-2014-138709",23223,"OFF-BI-10000145","Zipper Ring Binder Pockets",7.644,5,"South",7163,15.6,"Consumer","2014-07-09","Standard Class","Virginia","Binders" +"Office Supplies","Aurora","United States","KE-16420","Katrina Edelman",0.2,"2014-07-04","CA-2014-120096",80013,"OFF-PA-10001977","Xerox 194",62.137596,4,"West",6733,177.536,"Corporate","2014-07-07","First Class","Colorado","Paper" +"Office Supplies","Richmond","United States","MS-17770","Maxwell Schwartz",0,"2014-07-04","CA-2014-138709",23223,"OFF-PA-10004734","Southworth Structures Collection",10.92,3,"South",7162,21.84,"Consumer","2014-07-09","Standard Class","Virginia","Paper" +"Furniture","Palm Coast","United States","DM-13525","Don Miller",0.2,"2014-07-05","CA-2014-103317",32137,"FUR-FU-10001591","Advantus Panel Wall Certificate Holder - 8.5x11",5.368,2,"South",5437,19.52,"Corporate","2014-07-08","First Class","Florida","Furnishings" +"Furniture","Palm Coast","United States","DM-13525","Don Miller",0.2,"2014-07-05","CA-2014-103317",32137,"FUR-FU-10003192","Luxo Adjustable Task Clamp Lamp",15.9912,3,"South",5439,213.21599,"Corporate","2014-07-08","First Class","Florida","Furnishings" +"Office Supplies","Knoxville","United States","DW-13195","David Wiener",0.2,"2014-07-05","CA-2014-152849",37918,"OFF-AR-10002833","Newell 322",0.38220003,3,"South",3885,4.368,"Corporate","2014-07-12","Standard Class","Tennessee","Art" +"Office Supplies","Los Angeles","United States","DP-13390","Dennis Pardue",0.2,"2014-07-05","CA-2014-107139",90004,"OFF-BI-10001670","Vinyl Sectional Post Binders",67.86,6,"West",3791,180.95999,"Home Office","2014-07-11","Standard Class","California","Binders" +"Office Supplies","Palm Coast","United States","DM-13525","Don Miller",0.7,"2014-07-05","CA-2014-103317",32137,"OFF-BI-10001787","Wilson Jones Four-Pocket Poly Binders",-6.867,5,"South",5438,9.81,"Corporate","2014-07-08","First Class","Florida","Binders" +"Office Supplies","Jackson","United States","MC-17845","Michael Chen",0,"2014-07-05","CA-2014-157784",39212,"OFF-LA-10001934","Avery 516",6.8714004,2,"South",687,14.620001,"Consumer","2014-07-08","First Class","Mississippi","Labels" +"Office Supplies","Jackson","United States","MC-17845","Michael Chen",0,"2014-07-05","CA-2014-157784",39212,"OFF-PA-10000304","Xerox 1995",9.331201,3,"South",688,19.44,"Consumer","2014-07-08","First Class","Mississippi","Paper" +"Office Supplies","Houston","United States","DC-12850","Dan Campbell",0.2,"2014-07-05","US-2014-160444",77036,"OFF-ST-10000563","Fellowes Bankers Box Stor/Drawer Steel Plus",-35.177998,11,"Central",1357,281.42398,"Consumer","2014-07-05","Same Day","Texas","Storage" +"Office Supplies","Houston","United States","DC-12850","Dan Campbell",0.2,"2014-07-05","US-2014-160444",77036,"OFF-ST-10001522","Gould Plastics 18-Pocket Panel Bin. 34w x 5-1/4d x 20-1/2h",-44.155197,3,"Central",1356,220.776,"Consumer","2014-07-05","Same Day","Texas","Storage" +"Technology","Jackson","United States","MC-17845","Michael Chen",0,"2014-07-05","CA-2014-157784",39212,"TEC-AC-10003911","NETGEAR AC1750 Dual Band Gigabit Smart WiFi Router",163.1898,3,"South",686,479.97,"Consumer","2014-07-08","First Class","Mississippi","Accessories" +"Furniture","El Cajon","United States","BC-11125","Becky Castell",0.2,"2014-07-06","CA-2014-147543",92020,"FUR-CH-10000155","Global Comet Stacking Armless Chair",47.848,2,"West",6846,478.47998,"Home Office","2014-07-12","Standard Class","California","Chairs" +"Technology","New York City","United States","EP-13915","Emily Phan",0.2,"2014-07-06","CA-2014-134278",10011,"TEC-CO-10001046","Canon Imageclass D680 Copier / Fax",174.9975,1,"East",1550,559.99207,"Consumer","2014-07-08","First Class","New York","Copiers" +"Furniture","Philadelphia","United States","JL-15835","John Lee",0.3,"2014-07-07","US-2014-138758",19120,"FUR-CH-10002880","Global High-Back Leather Tilter. Burgundy",-46.7362,2,"East",4398,172.186,"Consumer","2014-07-11","Standard Class","Pennsylvania","Chairs" +"Furniture","Philadelphia","United States","JL-15835","John Lee",0.2,"2014-07-07","US-2014-138758",19120,"FUR-FU-10003039","Howard Miller 11-1/2"" Diameter Grantwood Wall Clock",12.076399,2,"East",4399,69.008,"Consumer","2014-07-11","Standard Class","Pennsylvania","Furnishings" +"Furniture","Buffalo","United States","MH-18025","Michelle Huthwaite",0.1,"2014-07-08","CA-2014-150301",14215,"FUR-CH-10002647","Situations Contoured Folding Chairs. 4/Set",10.647,1,"East",4780,63.882,"Consumer","2014-07-10","First Class","New York","Chairs" +"Furniture","San Francisco","United States","EB-13705","Ed Braxton",0.2,"2014-07-08","CA-2014-100090",94122,"FUR-TA-10003715","Hon 2111 Invitation Series Corner Table",-87.935394,3,"West",6288,502.48798,"Corporate","2014-07-12","Standard Class","California","Tables" +"Office Supplies","San Francisco","United States","EB-13705","Ed Braxton",0.2,"2014-07-08","CA-2014-100090",94122,"OFF-BI-10001597","Wilson Jones Ledger-Size. Piano-Hinge Binder. 2"". Blue",68.8464,6,"West",6289,196.704,"Corporate","2014-07-12","Standard Class","California","Binders" +"Furniture","San Francisco","United States","DS-13030","Darrin Sayre",0.15,"2014-07-09","CA-2014-113271",94122,"FUR-BO-10004218","Bush Heritage Pine Collection 5-Shelf Bookcase. Albany Pine Finish. *Special Order",7.049,1,"West",9244,119.833,"Home Office","2014-07-14","Standard Class","California","Bookcases" +"Office Supplies","San Francisco","United States","DS-13030","Darrin Sayre",0,"2014-07-09","CA-2014-113271",94122,"OFF-AR-10003251","Prang Drawing Pencil Set",2.224,2,"West",9245,5.56,"Home Office","2014-07-14","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","DS-13030","Darrin Sayre",0.2,"2014-07-09","CA-2014-113271",94122,"OFF-BI-10002609","Avery Hidden Tab Dividers for Binding Systems",4.6488,6,"West",9243,14.304,"Home Office","2014-07-14","Standard Class","California","Binders" +"Office Supplies","Los Angeles","United States","GH-14410","Gary Hansen",0.2,"2014-07-09","CA-2014-114125",90049,"OFF-BI-10003291","Wilson Jones Leather-Like Binders with DublLock Round Rings",14.1426,6,"West",8174,41.904003,"Home Office","2014-07-13","Standard Class","California","Binders" +"Office Supplies","Los Angeles","United States","GH-14410","Gary Hansen",0,"2014-07-09","CA-2014-114125",90049,"OFF-LA-10004559","Avery 49",1.4112,1,"West",8173,2.8799999,"Home Office","2014-07-13","Standard Class","California","Labels" +"Office Supplies","Dallas","United States","BM-11650","Brian Moss",0.2,"2014-07-09","CA-2014-165379",75217,"OFF-PA-10002245","Xerox 1895",4.485,3,"Central",3368,14.352,"Corporate","2014-07-15","Standard Class","Texas","Paper" +"Office Supplies","San Francisco","United States","DS-13030","Darrin Sayre",0,"2014-07-09","CA-2014-113271",94122,"OFF-PA-10002365","Xerox 1967",15.552,5,"West",9246,32.4,"Home Office","2014-07-14","Standard Class","California","Paper" +"Office Supplies","Dallas","United States","BM-11650","Brian Moss",0.2,"2014-07-09","CA-2014-165379",75217,"OFF-PA-10003072","Eureka Recycled Copy Paper 8 1/2"" x 11"". Ream",3.6287997,2,"Central",3367,10.368001,"Corporate","2014-07-15","Standard Class","Texas","Paper" +"Office Supplies","Los Angeles","United States","GH-14410","Gary Hansen",0,"2014-07-09","CA-2014-114125",90049,"OFF-ST-10001505","Perma STOR-ALL Hanging File Box. 13 1/8""W x 12 1/4""D x 10 1/2""H",4.0664,4,"West",8175,23.92,"Home Office","2014-07-13","Standard Class","California","Storage" +"Furniture","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"FUR-CH-10003379","Global Commerce Series High-Back Swivel/Tilt Chairs",213.735,3,"East",1031,854.94,"Corporate","2014-07-15","Standard Class","New Jersey","Chairs" +"Furniture","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"FUR-FU-10000629","9-3/4 Diameter Round Wall Clock",52.126198,9,"East",1032,124.11,"Corporate","2014-07-15","Standard Class","New Jersey","Furnishings" +"Office Supplies","New York City","United States","MH-17455","Mark Hamilton",0.2,"2014-07-11","CA-2014-133305",10011,"OFF-BI-10002954","Newell 3-Hole Punched Plastic Slotted Magazine Holders for Binders",6.3980002,5,"East",6336,18.28,"Consumer","2014-07-15","Standard Class","New York","Binders" +"Office Supplies","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"OFF-LA-10001175","Avery 514",7.056,5,"East",1033,14.4,"Corporate","2014-07-15","Standard Class","New Jersey","Labels" +"Office Supplies","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"OFF-PA-10000474","Easy-staple paper",83.284,5,"East",1029,177.2,"Corporate","2014-07-15","Standard Class","New Jersey","Paper" +"Office Supplies","El Paso","United States","MN-17935","Michael Nguyen",0.2,"2014-07-11","CA-2014-123225",79907,"OFF-PA-10000552","Xerox 200",3.6287997,2,"Central",5802,10.368001,"Consumer","2014-07-14","First Class","Texas","Paper" +"Office Supplies","New York City","United States","MH-17455","Mark Hamilton",0,"2014-07-11","CA-2014-133305",10011,"OFF-PA-10001970","Xerox 1881",23.086401,4,"East",6335,49.12,"Consumer","2014-07-15","Standard Class","New York","Paper" +"Technology","El Paso","United States","MN-17935","Michael Nguyen",0.2,"2014-07-11","CA-2014-123225",79907,"TEC-PH-10000895","Polycom VVX 310 VoIP phone",43.197598,4,"Central",5801,575.968,"Consumer","2014-07-14","First Class","Texas","Phones" +"Technology","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"TEC-PH-10003885","Cisco SPA508G",57.4113,3,"East",1030,197.97,"Corporate","2014-07-15","Standard Class","New Jersey","Phones" +"Technology","Niagara Falls","United States","JK-15205","Jamie Kunitz",0,"2014-07-11","CA-2014-166555",14304,"TEC-PH-10004912","Cisco SPA112 2 Port Phone Adapter",47.8065,3,"East",9934,164.85,"Consumer","2014-07-14","First Class","New York","Phones" +"Furniture","Seattle","United States","CL-12565","Clay Ludtke",0.2,"2014-07-12","CA-2014-131310",98115,"FUR-CH-10001797","Safco Chair Connectors. 6/Carton",13.8528,4,"West",2982,123.135994,"Consumer","2014-07-18","Standard Class","Washington","Chairs" +"Furniture","League City","United States","PV-18985","Paul Van Hugh",0.3,"2014-07-12","CA-2014-161508",77573,"FUR-CH-10002126","Hon Deluxe Fabric Upholstered Stacking Chairs",-14.638801,3,"Central",8314,512.358,"Home Office","2014-07-16","Standard Class","Texas","Chairs" +"Furniture","San Francisco","United States","AC-10420","Alyssa Crouse",0.2,"2014-07-12","CA-2014-129924",94122,"FUR-TA-10004575","Hon 5100 Series Wood Tables",-17.458797,3,"West",339,698.352,"Corporate","2014-07-17","Standard Class","California","Tables" +"Office Supplies","League City","United States","PV-18985","Paul Van Hugh",0.2,"2014-07-12","CA-2014-161508",77573,"OFF-AR-10003158","Fluorescent Highlighters by Dixon",3.9004002,7,"Central",8316,22.288,"Home Office","2014-07-16","Standard Class","Texas","Art" +"Office Supplies","Seattle","United States","CL-12565","Clay Ludtke",0.2,"2014-07-12","CA-2014-131310",98115,"OFF-BI-10003094","Self-Adhesive Ring Binder Labels",3.8016,4,"West",2983,11.264,"Consumer","2014-07-18","Standard Class","Washington","Binders" +"Office Supplies","San Francisco","United States","AC-10420","Alyssa Crouse",0.2,"2014-07-12","CA-2014-129924",94122,"OFF-BI-10003314","Tuff Stuff Recycled Round Ring Binders",2.7956002,2,"West",338,7.712,"Corporate","2014-07-17","Standard Class","California","Binders" +"Office Supplies","League City","United States","PV-18985","Paul Van Hugh",0.2,"2014-07-12","CA-2014-161508",77573,"OFF-FA-10001561","Stockwell Push Pins",0.5668,2,"Central",8315,3.488,"Home Office","2014-07-16","Standard Class","Texas","Fasteners" +"Office Supplies","Chicago","United States","ME-17725","Max Engle",0.2,"2014-07-12","CA-2014-124807",60610,"OFF-PA-10001526","Xerox 1949",12.9978,9,"Central",7399,35.856,"Consumer","2014-07-15","Second Class","Illinois","Paper" +"Office Supplies","League City","United States","PV-18985","Paul Van Hugh",0.2,"2014-07-12","CA-2014-161508",77573,"OFF-PA-10001804","Xerox 195",5.6112,3,"Central",8317,16.032001,"Home Office","2014-07-16","Standard Class","Texas","Paper" +"Office Supplies","Los Angeles","United States","BF-11170","Ben Ferrer",0,"2014-07-12","CA-2014-110184",90036,"OFF-ST-10000107","Fellowes Super Stor/Drawer",44.955,9,"West",1221,249.75,"Home Office","2014-07-16","Standard Class","California","Storage" +"Technology","Chicago","United States","ME-17725","Max Engle",0.2,"2014-07-12","CA-2014-124807",60610,"TEC-AC-10002857","Verbatim 25 GB 6x Blu-ray Single Layer Recordable Disc. 1/Pack",3.2779999,4,"Central",7400,23.84,"Consumer","2014-07-15","Second Class","Illinois","Accessories" +"Technology","Los Angeles","United States","BF-11170","Ben Ferrer",0.2,"2014-07-12","CA-2014-110184",90036,"TEC-PH-10000439","GE DSL Phone Line Filter",28.7928,8,"West",1222,255.93599,"Home Office","2014-07-16","Standard Class","California","Phones" +"Furniture","Los Angeles","United States","GK-14620","Grace Kelly",0.2,"2014-07-13","US-2014-165862",90049,"FUR-TA-10002855","Bevis Round Conference Table Top & Single Column Base",4.3902,3,"West",4552,351.21603,"Corporate","2014-07-17","Standard Class","California","Tables" +"Furniture","Philadelphia","United States","ST-20530","Shui Tom",0.2,"2014-07-14","CA-2014-164182",19140,"FUR-FU-10001057","Tensor Track Tree Floor Lamp",1.1994,2,"East",6934,31.984,"Consumer","2014-07-18","Standard Class","Pennsylvania","Furnishings" +"Office Supplies","Newark","United States","SS-20515","Shirley Schmidt",0,"2014-07-14","CA-2014-124464",19711,"OFF-AP-10000576","Belkin 7 Outlet SurgeMaster II",11.0543995,1,"East",7624,39.480003,"Home Office","2014-07-20","Standard Class","Delaware","Appliances" +"Office Supplies","Philadelphia","United States","ST-20530","Shui Tom",0.2,"2014-07-14","CA-2014-164182",19140,"OFF-AR-10001044","BOSTON Ranger #55 Pencil Sharpener. Black",4.6782,2,"East",6935,41.584003,"Consumer","2014-07-18","Standard Class","Pennsylvania","Art" +"Office Supplies","New York City","United States","BF-10975","Barbara Fisher",0,"2014-07-14","CA-2014-109904",10009,"OFF-AR-10004999","Newell 315",4.485,3,"East",8053,17.94,"Corporate","2014-07-17","Second Class","New York","Art" +"Office Supplies","Aurora","United States","AW-10930","Arthur Wiediger",0.8,"2014-07-14","US-2014-103905",60505,"OFF-BI-10001098","Acco D-Ring Binder w/DublLock",-46.3946,7,"Central",2207,29.932001,"Home Office","2014-07-20","Standard Class","Illinois","Binders" +"Office Supplies","Phoenix","United States","PB-19150","Philip Brown",0.2,"2014-07-14","US-2014-150532",85023,"OFF-ST-10000760","Eldon Fold 'N Roll Cart System",6.291,5,"West",2996,55.920002,"Consumer","2014-07-21","Standard Class","Arizona","Storage" +"Technology","Aurora","United States","AW-10930","Arthur Wiediger",0.2,"2014-07-14","US-2014-103905",60505,"TEC-PH-10001552","I Need's 3d Hello Kitty Hybrid Silicone Case Cover for HTC One X 4g with 3d Hello Kitty Stylus Pen Green/pink",3.8272,4,"Central",2208,38.272,"Home Office","2014-07-20","Standard Class","Illinois","Phones" +"Technology","Philadelphia","United States","ST-20530","Shui Tom",0.4,"2014-07-14","CA-2014-164182",19140,"TEC-PH-10002070","Griffin GC36547 PowerJolt SE Lightning Charger",-2.249,1,"East",6932,13.4939995,"Consumer","2014-07-18","Standard Class","Pennsylvania","Phones" +"Technology","Philadelphia","United States","ST-20530","Shui Tom",0.4,"2014-07-14","CA-2014-164182",19140,"TEC-PH-10002583","iOttie HLCRIO102 Car Mount",-13.993,2,"East",6933,23.987999,"Consumer","2014-07-18","Standard Class","Pennsylvania","Phones" +"Office Supplies","Sioux Falls","United States","VW-21775","Victoria Wilson",0,"2014-07-15","CA-2014-115980",57103,"OFF-FA-10000304","Advantus Push Pins",2.6813998,3,"Central",3271,6.54,"Corporate","2014-07-19","Standard Class","South Dakota","Fasteners" +"Technology","Sioux Falls","United States","VW-21775","Victoria Wilson",0,"2014-07-15","CA-2014-115980",57103,"TEC-AC-10003709","Maxell 4.7GB DVD-R 5/Pack",1.3068,3,"Central",3270,2.97,"Corporate","2014-07-19","Standard Class","South Dakota","Accessories" +"Furniture","Tucson","United States","AH-10120","Adrian Hane",0.2,"2014-07-18","CA-2014-123295",85705,"FUR-CH-10002372","Office Star - Ergonomically Designed Knee Chair",-25.9136,4,"West",2103,259.13602,"Home Office","2014-07-18","Same Day","Arizona","Chairs" +"Office Supplies","New Rochelle","United States","JL-15130","Jack Lebron",0.2,"2014-07-18","CA-2014-138198",10801,"OFF-BI-10002103","Cardinal Slant-D Ring Binder. Heavy Gauge Vinyl",4.5188003,2,"East",9030,13.903999,"Consumer","2014-07-23","Standard Class","New York","Binders" +"Furniture","Bristol","United States","CA-12310","Christine Abelman",0.3,"2014-07-19","US-2014-150434",6010,"FUR-TA-10004152","Barricks 18"" x 48"" Non-Folding Utility Table with Bottom Storage Shelf",-4.032,1,"East",3355,70.56,"Corporate","2014-07-24","Standard Class","Connecticut","Tables" +"Office Supplies","Great Falls","United States","EM-14140","Eugene Moren",0.2,"2014-07-19","CA-2014-168158",59405,"OFF-BI-10001759","Acco Pressboard Covers with Storage Hooks. 14 7/8"" x 11"". Dark Blue",2.2098,2,"West",5589,6.096,"Home Office","2014-07-24","Standard Class","Montana","Binders" +"Office Supplies","Bristol","United States","CA-12310","Christine Abelman",0,"2014-07-19","US-2014-150434",6010,"OFF-BI-10002160","Acco Hanging Data Binders",1.8288,1,"East",3357,3.81,"Corporate","2014-07-24","Standard Class","Connecticut","Binders" +"Office Supplies","Bristol","United States","CA-12310","Christine Abelman",0,"2014-07-19","US-2014-150434",6010,"OFF-BI-10003694","Avery 3 1/2"" Diskette Storage Pages. 10/Pack",9.6048,2,"East",3356,20.88,"Corporate","2014-07-24","Standard Class","Connecticut","Binders" +"Technology","Bristol","United States","CA-12310","Christine Abelman",0,"2014-07-19","US-2014-150434",6010,"TEC-PH-10000895","Polycom VVX 310 VoIP phone",93.594795,2,"East",3354,359.97998,"Corporate","2014-07-24","Standard Class","Connecticut","Phones" +"Furniture","Dallas","United States","KM-16375","Katherine Murray",0.3,"2014-07-20","CA-2014-143903",75217,"FUR-CH-10002024","HON 5400 Series Task Chairs for Big and Tall",-140.196,2,"Central",3078,981.372,"Home Office","2014-07-24","Standard Class","Texas","Chairs" +"Furniture","San Diego","United States","CC-12145","Charles Crestani",0,"2014-07-20","CA-2014-141726",92105,"FUR-FU-10003577","Nu-Dell Leatherette Frames",15.4872,3,"West",5972,43.02,"Consumer","2014-07-22","First Class","California","Furnishings" +"Furniture","Dallas","United States","KM-16375","Katherine Murray",0.6,"2014-07-20","CA-2014-143903",75217,"FUR-FU-10003724","Westinghouse Clip-On Gooseneck Lamps",-14.229,5,"Central",3077,16.740002,"Home Office","2014-07-24","Standard Class","Texas","Furnishings" +"Office Supplies","Riverside","United States","JD-15895","Jonathan Doherty",0,"2014-07-20","CA-2014-127866",92503,"OFF-AR-10003481","Newell 348",3.8047998,4,"West",8132,13.120001,"Corporate","2014-07-23","First Class","California","Art" +"Office Supplies","Springfield","United States","SW-20245","Scot Wooten",0.7,"2014-07-20","CA-2014-142965",45503,"OFF-BI-10000977","Ibico Plastic Spiral Binding Combs",-21.887999,3,"East",4396,27.359999,"Consumer","2014-07-20","Same Day","Ohio","Binders" +"Office Supplies","San Diego","United States","CC-12145","Charles Crestani",0.2,"2014-07-20","CA-2014-141726",92105,"OFF-BI-10001982","Wilson Jones Custom Binder Spines & Labels",3.1552,2,"West",5970,8.7039995,"Consumer","2014-07-22","First Class","California","Binders" +"Office Supplies","San Francisco","United States","RD-19720","Roger Demir",0.2,"2014-07-20","CA-2014-157546",94122,"OFF-BI-10002498","Clear Mylar Reinforcing Strips",30.2778,6,"West",7229,89.712,"Consumer","2014-07-22","First Class","California","Binders" +"Office Supplies","San Diego","United States","CC-12145","Charles Crestani",0,"2014-07-20","CA-2014-141726",92105,"OFF-PA-10000418","Xerox 189",50.328,1,"West",5969,104.850006,"Consumer","2014-07-22","First Class","California","Paper" +"Office Supplies","San Diego","United States","CC-12145","Charles Crestani",0,"2014-07-20","CA-2014-141726",92105,"OFF-PA-10002230","Xerox 1897",9.7608,4,"West",5971,19.92,"Consumer","2014-07-22","First Class","California","Paper" +"Office Supplies","Riverside","United States","JD-15895","Jonathan Doherty",0,"2014-07-20","CA-2014-127866",92503,"OFF-PA-10003971","Xerox 1965",5.8604,2,"West",8131,11.96,"Corporate","2014-07-23","First Class","California","Paper" +"Office Supplies","San Francisco","United States","RD-19720","Roger Demir",0,"2014-07-20","CA-2014-157546",94122,"OFF-PA-10004569","Wirebound Message Books. Two 4 1/4"" x 5"" Forms per Page",10.7301,3,"West",7230,22.83,"Consumer","2014-07-22","First Class","California","Paper" +"Office Supplies","Riverside","United States","JD-15895","Jonathan Doherty",0,"2014-07-20","CA-2014-127866",92503,"OFF-ST-10001490","Hot File 7-Pocket. Floor Stand",160.62302,3,"West",8133,535.41003,"Corporate","2014-07-23","First Class","California","Storage" +"Office Supplies","Springfield","United States","SW-20245","Scot Wooten",0.2,"2014-07-20","CA-2014-142965",45503,"OFF-ST-10002583","Fellowes Neat Ideas Storage Cubes",-5.1968,1,"East",4395,25.984,"Consumer","2014-07-20","Same Day","Ohio","Storage" +"Office Supplies","Dallas","United States","KM-16375","Katherine Murray",0.2,"2014-07-20","CA-2014-143903",75217,"OFF-ST-10003306","Letter Size Cart",38.572197,3,"Central",3076,342.86398,"Home Office","2014-07-24","Standard Class","Texas","Storage" +"Technology","Riverside","United States","JD-15895","Jonathan Doherty",0,"2014-07-20","CA-2014-127866",92503,"TEC-AC-10000023","Maxell 74 Minute CD-R Spindle. 50/Pack",15.0984,2,"West",8130,41.940002,"Corporate","2014-07-23","First Class","California","Accessories" +"Furniture","San Francisco","United States","ME-18010","Michelle Ellison",0.2,"2014-07-21","CA-2014-116932",94122,"FUR-CH-10001215","Global Troy Executive Leather Low-Back Tilter",50.098,2,"West",1872,801.568,"Corporate","2014-07-25","Standard Class","California","Chairs" +"Furniture","Dallas","United States","HM-14860","Harry Marie",0.3,"2014-07-21","CA-2014-129189",75217,"FUR-CH-10004997","Hon Every-Day Series Multi-Task Chairs",-93.99,5,"Central",8024,657.93005,"Corporate","2014-07-25","Standard Class","Texas","Chairs" +"Furniture","San Francisco","United States","ME-18010","Michelle Ellison",0.2,"2014-07-21","CA-2014-116932",94122,"FUR-TA-10004175","Hon 30"" x 60"" Table with Locking Drawer",27.2848,1,"West",1873,272.848,"Corporate","2014-07-25","Standard Class","California","Tables" +"Office Supplies","Dallas","United States","HM-14860","Harry Marie",0.8,"2014-07-21","CA-2014-129189",75217,"OFF-AP-10000124","Acco 6 Outlet Guardian Basic Surge Suppressor",-12.979199,3,"Central",8022,4.992,"Corporate","2014-07-25","Standard Class","Texas","Appliances" +"Office Supplies","San Francisco","United States","ME-18010","Michelle Ellison",0,"2014-07-21","CA-2014-116932",94122,"OFF-AR-10002067","Newell 334",25.792002,5,"West",1871,99.2,"Corporate","2014-07-25","Standard Class","California","Art" +"Office Supplies","Dallas","United States","HM-14860","Harry Marie",0.8,"2014-07-21","CA-2014-129189",75217,"OFF-BI-10000494","Acco Economy Flexible Poly Round Ring Binder",-1.8269999,1,"Central",8025,1.0439999,"Corporate","2014-07-25","Standard Class","Texas","Binders" +"Office Supplies","Dallas","United States","HM-14860","Harry Marie",0.2,"2014-07-21","CA-2014-129189",75217,"OFF-EN-10003567","Inter-Office Recycled Envelopes. Brown Kraft. Button-String.10"" x 13"" . 100/Box",29.673,5,"Central",8023,87.92,"Corporate","2014-07-25","Standard Class","Texas","Envelopes" +"Technology","Omaha","United States","PG-18820","Patrick Gardner",0,"2014-07-21","CA-2014-109890",68104,"TEC-PH-10004100","Griffin GC17055 Auxiliary Audio Cable",10.074399,2,"Central",8481,35.980003,"Consumer","2014-07-27","Standard Class","Nebraska","Phones" +"Furniture","San Francisco","United States","NP-18325","Naresj Patel",0.2,"2014-07-22","CA-2014-117464",94122,"FUR-CH-10000155","Global Comet Stacking Armless Chair",71.771996,3,"West",7168,717.72003,"Consumer","2014-07-24","Second Class","California","Chairs" +"Furniture","San Francisco","United States","NP-18325","Naresj Patel",0.2,"2014-07-22","CA-2014-117464",94122,"FUR-TA-10004767","Safco Drafting Table",19.1646,3,"West",7170,170.352,"Consumer","2014-07-24","Second Class","California","Tables" +"Office Supplies","New York City","United States","EM-14065","Erin Mull",0,"2014-07-22","CA-2014-127691",10024,"OFF-AR-10002053","Premium Writing Pencils. Soft. #2 by Central Association for the Blind",1.6688,2,"East",483,5.96,"Consumer","2014-07-27","Standard Class","New York","Art" +"Office Supplies","San Francisco","United States","NP-18325","Naresj Patel",0,"2014-07-22","CA-2014-117464",94122,"OFF-AR-10003190","Newell 32",3.2256002,4,"West",7167,11.52,"Consumer","2014-07-24","Second Class","California","Art" +"Office Supplies","Los Angeles","United States","AB-10255","Alejandro Ballentine",0,"2014-07-22","CA-2014-122679",90008,"OFF-AR-10004757","Crayola Colored Pencils",6.4944005,6,"West",9484,19.68,"Home Office","2014-07-28","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","NP-18325","Naresj Patel",0,"2014-07-22","CA-2014-117464",94122,"OFF-ST-10003058","Eldon Mobile Mega Data Cart Mega Stackable Add-On Trays",68.585,10,"West",7169,236.5,"Consumer","2014-07-24","Second Class","California","Storage" +"Office Supplies","Houston","United States","JL-15850","John Lucas",0.2,"2014-07-22","US-2014-164644",77095,"OFF-ST-10003123","Fellowes Bases and Tops For Staxonsteel/High-Stak Systems",1.3316,1,"Central",9675,26.632002,"Consumer","2014-07-24","Second Class","Texas","Storage" +"Technology","New York City","United States","EM-14065","Erin Mull",0,"2014-07-22","CA-2014-127691",10024,"TEC-AC-10002567","Logitech G602 Wireless Gaming Mouse",57.5928,2,"East",484,159.98,"Consumer","2014-07-27","Standard Class","New York","Accessories" +"Office Supplies","Tucson","United States","AG-10900","Arthur Gainer",0.2,"2014-07-23","US-2014-119137",85705,"OFF-AR-10000658","Newell 324",0.92399997,1,"West",375,9.24,"Consumer","2014-07-27","Standard Class","Arizona","Art" +"Office Supplies","Tucson","United States","AG-10900","Arthur Gainer",0.7,"2014-07-23","US-2014-119137",85705,"OFF-BI-10001982","Wilson Jones Custom Binder Spines & Labels",-5.712,5,"West",373,8.160001,"Consumer","2014-07-27","Standard Class","Arizona","Binders" +"Office Supplies","San Francisco","United States","NC-18535","Nick Crebassa",0,"2014-07-23","CA-2014-145254",94122,"OFF-SU-10004664","Acme Softgrip Scissors",11.803,5,"West",9423,40.7,"Corporate","2014-07-27","Standard Class","California","Supplies" +"Technology","New York City","United States","AG-10270","Alejandro Grove",0,"2014-07-23","CA-2014-103058",10011,"TEC-AC-10001314","Case Logic 2.4GHz Wireless Keyboard",7.9984,2,"East",7411,99.98,"Consumer","2014-07-24","First Class","New York","Accessories" +"Technology","Tucson","United States","AG-10900","Arthur Gainer",0.2,"2014-07-23","US-2014-119137",85705,"TEC-AC-10002076","Microsoft Natural Keyboard Elite",-29.94,10,"West",376,479.04,"Consumer","2014-07-27","Standard Class","Arizona","Accessories" +"Technology","San Francisco","United States","NC-18535","Nick Crebassa",0,"2014-07-23","CA-2014-145254",94122,"TEC-AC-10002167","Imation 8gb Micro Traveldrive Usb 2.0 Flash Drive",4.9500003,3,"West",9425,45,"Corporate","2014-07-27","Standard Class","California","Accessories" +"Technology","Tucson","United States","AG-10900","Arthur Gainer",0.2,"2014-07-23","US-2014-119137",85705,"TEC-AC-10003911","NETGEAR AC1750 Dual Band Gigabit Smart WiFi Router",179.1888,8,"West",374,1023.93604,"Consumer","2014-07-27","Standard Class","Arizona","Accessories" +"Technology","San Francisco","United States","NC-18535","Nick Crebassa",0.2,"2014-07-23","CA-2014-145254",94122,"TEC-PH-10000441","VTech DS6151",60.4752,6,"West",9422,604.752,"Corporate","2014-07-27","Standard Class","California","Phones" +"Technology","San Francisco","United States","NC-18535","Nick Crebassa",0.2,"2014-07-23","CA-2014-145254",94122,"TEC-PH-10004531","AT&T CL2909",37.797,3,"West",9424,302.376,"Corporate","2014-07-27","Standard Class","California","Phones" +"Furniture","San Francisco","United States","KL-16645","Ken Lonsdale",0,"2014-07-25","CA-2014-143917",94122,"FUR-FU-10004351","Staple-based wall hangings",34.284805,8,"West",2507,77.92,"Consumer","2014-07-27","Second Class","California","Furnishings" +"Office Supplies","Los Angeles","United States","VF-21715","Vicky Freymann",0,"2014-07-25","CA-2014-146528",90045,"OFF-PA-10002195","Xerox 1966",3.1752,1,"West",3647,6.48,"Home Office","2014-07-27","Second Class","California","Paper" +"Office Supplies","San Francisco","United States","KL-16645","Ken Lonsdale",0,"2014-07-25","CA-2014-143917",94122,"OFF-ST-10001228","Fellowes Personal Hanging Folder Files. Navy",15.0416,4,"West",2505,53.72,"Consumer","2014-07-27","Second Class","California","Storage" +"Office Supplies","San Francisco","United States","KL-16645","Ken Lonsdale",0,"2014-07-25","CA-2014-143917",94122,"OFF-SU-10000151","High Speed Automatic Electric Letter Opener",327.506,5,"West",2506,8187.65,"Consumer","2014-07-27","Second Class","California","Supplies" +"Office Supplies","Los Angeles","United States","VF-21715","Vicky Freymann",0,"2014-07-25","CA-2014-146528",90045,"OFF-SU-10002522","Acme Kleen Earth Office Shears",4.5008,4,"West",3648,15.52,"Home Office","2014-07-27","Second Class","California","Supplies" +"Furniture","Atlanta","United States","SG-20470","Sheri Gordon",0,"2014-07-26","CA-2014-116190",30318,"FUR-CH-10000553","Metal Folding Chairs. Beige. 4/Carton",18.3276,2,"South",7341,67.880005,"Consumer","2014-08-01","Standard Class","Georgia","Chairs" +"Furniture","North Las Vegas","United States","KM-16720","Kunst Miller",0.2,"2014-07-26","CA-2014-126760",89031,"FUR-CH-10003312","Hon 2090 “Pillow Soft” Series Mid Back Swivel/Tilt Chairs",-109.5822,3,"West",3839,674.352,"Consumer","2014-08-02","Standard Class","Nevada","Chairs" +"Furniture","Atlanta","United States","SG-20470","Sheri Gordon",0,"2014-07-26","CA-2014-116190",30318,"FUR-FU-10000719","DAX Cubicle Frames. 8-1/2 x 11",9.2556,3,"South",7343,25.710001,"Consumer","2014-08-01","Standard Class","Georgia","Furnishings" +"Furniture","North Las Vegas","United States","KM-16720","Kunst Miller",0,"2014-07-26","CA-2014-126760",89031,"FUR-FU-10004018","Tensor Computer Mounted Lamp",36.1827,9,"West",3840,134.01,"Consumer","2014-08-02","Standard Class","Nevada","Furnishings" +"Furniture","San Antonio","United States","LF-17185","Luke Foster",0.6,"2014-07-26","CA-2014-169019",78207,"FUR-FU-10004666","DAX Clear Channel Poster Frame",-10.060201,3,"Central",9776,17.496,"Consumer","2014-07-30","Standard Class","Texas","Furnishings" +"Office Supplies","San Antonio","United States","LF-17185","Luke Foster",0.8,"2014-07-26","CA-2014-169019",78207,"OFF-AP-10003281","Acco 6 Outlet Guardian Standard Surge Suppressor",-12.09,2,"Central",9780,4.8360004,"Consumer","2014-07-30","Standard Class","Texas","Appliances" +"Office Supplies","San Antonio","United States","LF-17185","Luke Foster",0.8,"2014-07-26","CA-2014-169019",78207,"OFF-BI-10001524","GBC Premium Transparent Covers with Diagonal Lined Pattern",-26.854399,4,"Central",9777,16.784,"Consumer","2014-07-30","Standard Class","Texas","Binders" +"Office Supplies","San Antonio","United States","LF-17185","Luke Foster",0.8,"2014-07-26","CA-2014-169019",78207,"OFF-BI-10001679","GBC Instant Index System for Binding Systems",-13.320001,5,"Central",9779,8.88,"Consumer","2014-07-30","Standard Class","Texas","Binders" +"Office Supplies","San Antonio","United States","LF-17185","Luke Foster",0.8,"2014-07-26","CA-2014-169019",78207,"OFF-BI-10004995","GBC DocuBind P400 Electric Binding System",-3701.8928,8,"Central",9775,2177.584,"Consumer","2014-07-30","Standard Class","Texas","Binders" +"Office Supplies","Atlanta","United States","SG-20470","Sheri Gordon",0,"2014-07-26","CA-2014-116190",30318,"OFF-LA-10002762","Avery 485",76.558304,13,"South",7342,162.89,"Consumer","2014-08-01","Standard Class","Georgia","Labels" +"Office Supplies","Chicago","United States","CL-11890","Carl Ludwig",0.2,"2014-07-26","US-2014-155894",60623,"OFF-ST-10004804","Belkin 19"" Vented Equipment Shelf. Black",-29.343601,3,"Central",2344,123.552,"Consumer","2014-07-30","Second Class","Illinois","Storage" +"Technology","Draper","United States","JO-15145","Jack O'Briant",0,"2014-07-26","CA-2014-159121",84020,"TEC-AC-10002006","Memorex Micro Travel Drive 16 GB",34.6983,7,"West",6423,111.93,"Corporate","2014-08-01","Standard Class","Utah","Accessories" +"Technology","San Antonio","United States","LF-17185","Luke Foster",0.2,"2014-07-26","CA-2014-169019",78207,"TEC-AC-10002076","Microsoft Natural Keyboard Elite",-26.946001,9,"Central",9778,431.13602,"Consumer","2014-07-30","Standard Class","Texas","Accessories" +"Technology","North Las Vegas","United States","KM-16720","Kunst Miller",0,"2014-07-26","CA-2014-126760",89031,"TEC-AC-10004814","Logitech Illuminated Ultrathin Keyboard with Backlighting",70.0977,3,"West",3841,170.97,"Consumer","2014-08-02","Standard Class","Nevada","Accessories" +"Technology","North Las Vegas","United States","KM-16720","Kunst Miller",0.2,"2014-07-26","CA-2014-126760",89031,"TEC-PH-10001363","Apple iPhone 5S",113.998,2,"West",3838,911.9841,"Consumer","2014-08-02","Standard Class","Nevada","Phones" +"Office Supplies","New York City","United States","AS-10045","Aaron Smayling",0,"2014-07-27","US-2014-150126",10035,"OFF-PA-10002709","Xerox 1956",32.232197,11,"East",4580,65.78,"Corporate","2014-08-02","Standard Class","New York","Paper" +"Office Supplies","Los Angeles","United States","BB-10990","Barry Blumstein",0,"2014-07-27","CA-2014-169642",90036,"OFF-ST-10002574","SAFCO Commercial Wire Shelving. Black",0,2,"West",8888,276.28,"Corporate","2014-07-30","Second Class","California","Storage" +"Technology","San Francisco","United States","GW-14605","Giulietta Weimer",0,"2014-07-27","CA-2014-124709",94122,"TEC-AC-10002842","WD My Passport Ultra 2TB Portable External Hard Drive",38.08,2,"West",3866,238,"Consumer","2014-07-29","Second Class","California","Accessories" +"Furniture","Apopka","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-07-28","CA-2014-131541",32712,"FUR-FU-10003623","DataProducts Ampli Magnifier Task Lamp. Black.",12.9888,6,"South",5688,129.888,"Consumer","2014-07-28","Same Day","Florida","Furnishings" +"Office Supplies","Apopka","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-07-28","CA-2014-131541",32712,"OFF-EN-10000781","#10- 4 1/8"" x 9 1/2"" Recycled Envelopes",16.5186,7,"South",5689,48.944004,"Consumer","2014-07-28","Same Day","Florida","Envelopes" +"Office Supplies","Apopka","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-07-28","CA-2014-131541",32712,"OFF-FA-10000621","OIC Colored Binder Clips. Assorted Sizes",5.191,5,"South",5687,14.320001,"Consumer","2014-07-28","Same Day","Florida","Fasteners" +"Furniture","Seattle","United States","TS-21430","Tom Stivers",0,"2014-07-30","CA-2014-111192",98103,"FUR-BO-10002916","Rush Hierlooms Collection 1"" Thick Stackable Bookcases",259.8896,8,"West",5162,1367.8401,"Corporate","2014-08-05","Standard Class","Washington","Bookcases" +"Furniture","Charlotte","United States","BF-10975","Barbara Fisher",0.2,"2014-08-01","CA-2014-117345",28205,"FUR-FU-10000629","9-3/4 Diameter Round Wall Clock",12.1352005,4,"South",1839,44.128,"Corporate","2014-08-05","Standard Class","North Carolina","Furnishings" +"Office Supplies","Charlotte","United States","BF-10975","Barbara Fisher",0.2,"2014-08-01","CA-2014-117345",28205,"OFF-AP-10001005","Honeywell Quietcare HEPA Air Cleaner",10.2245,1,"South",1840,62.920002,"Corporate","2014-08-05","Standard Class","North Carolina","Appliances" +"Office Supplies","San Francisco","United States","IL-15100","Ivan Liston",0.2,"2014-08-01","CA-2014-152296",94122,"OFF-BI-10004506","Wilson Jones data.warehouse D-Ring Binders with DublLock",6.9132,3,"West",2173,19.752,"Consumer","2014-08-03","First Class","California","Binders" +"Office Supplies","Philadelphia","United States","AS-10630","Ann Steele",0.2,"2014-08-01","CA-2014-162684",19120,"OFF-FA-10000992","Acco Clips to Go Binder Clips. 24 Clips in Two Sizes",1.9169999,2,"East",2411,5.68,"Home Office","2014-08-06","Standard Class","Pennsylvania","Fasteners" +"Office Supplies","Charlotte","United States","BF-10975","Barbara Fisher",0.2,"2014-08-01","CA-2014-117345",28205,"OFF-LA-10000240","Self-Adhesive Address Labels for Typewriters by Universal",5.9211,3,"South",1838,17.544,"Corporate","2014-08-05","Standard Class","North Carolina","Labels" +"Office Supplies","Charlotte","United States","BF-10975","Barbara Fisher",0.2,"2014-08-01","CA-2014-117345",28205,"OFF-PA-10002377","Xerox 1916",29.363998,2,"South",1841,78.304,"Corporate","2014-08-05","Standard Class","North Carolina","Paper" +"Office Supplies","Springfield","United States","NZ-18565","Nick Zandusky",0,"2014-08-02","US-2014-106299",65807,"OFF-BI-10001758","Wilson Jones 14 Line Acrylic Coated Pressboard Data Binders",12.549,5,"Central",6173,26.7,"Home Office","2014-08-08","Standard Class","Missouri","Binders" +"Office Supplies","Springfield","United States","NZ-18565","Nick Zandusky",0,"2014-08-02","US-2014-106299",65807,"OFF-ST-10002011","Smead Adjustable Mobile File Trolley with Lockable Top",226.3626,2,"Central",6175,838.38,"Home Office","2014-08-08","Standard Class","Missouri","Storage" +"Technology","Springfield","United States","NZ-18565","Nick Zandusky",0,"2014-08-02","US-2014-106299",65807,"TEC-AC-10003237","Memorex Micro Travel Drive 4 GB",9.116,2,"Central",6174,21.2,"Home Office","2014-08-08","Standard Class","Missouri","Accessories" +"Furniture","Denver","United States","BS-11755","Bruce Stewart",0.5,"2014-08-03","CA-2014-133690",80219,"FUR-TA-10004289","BoxOffice By Design Rectangular and Half-Moon Meeting Room Tables",-161.87502,2,"West",202,218.75,"Consumer","2014-08-05","First Class","Colorado","Tables" +"Office Supplies","Denver","United States","BS-11755","Bruce Stewart",0.2,"2014-08-03","CA-2014-133690",80219,"OFF-AP-10003622","Bravo II Megaboss 12-Amp Hard Body Upright. Replacement Belts. 2 Belts per Pack",0.2925,1,"West",203,2.6,"Consumer","2014-08-05","First Class","Colorado","Appliances" +"Office Supplies","New York City","United States","BK-11260","Berenike Kampe",0,"2014-08-03","CA-2014-125612",10035,"OFF-PA-10001019","Xerox 1884",18.7812,2,"East",856,39.960003,"Consumer","2014-08-08","Standard Class","New York","Paper" +"Office Supplies","Glendale","United States","GH-14410","Gary Hansen",0.2,"2014-08-03","CA-2014-128986",85301,"OFF-PA-10001289","White Computer Printout Paper by Universal",33.7212,3,"West",3156,93.023994,"Home Office","2014-08-05","Second Class","Arizona","Paper" +"Office Supplies","New York City","United States","BK-11260","Berenike Kampe",0,"2014-08-03","CA-2014-125612",10035,"OFF-ST-10003221","Staple magnet",5.7671995,2,"East",858,21.359999,"Consumer","2014-08-08","Standard Class","New York","Storage" +"Office Supplies","New York City","United States","BK-11260","Berenike Kampe",0,"2014-08-03","CA-2014-125612",10035,"OFF-SU-10002537","Acme Box Cutter Scissors",26.598,10,"East",857,102.3,"Consumer","2014-08-08","Standard Class","New York","Supplies" +"Office Supplies","Bangor","United States","ML-17395","Marina Lichtenstein",0,"2014-08-04","CA-2014-134215",4401,"OFF-AP-10001271","Eureka The Boss Cordless Rechargeable Stick Vac",27.529202,2,"East",9157,101.96,"Corporate","2014-08-08","Standard Class","Maine","Appliances" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0,"2014-08-04","CA-2014-126361",84062,"OFF-AP-10003590","Hoover WindTunnel Plus Canister Vacuum",305.13,3,"West",1376,1089.75,"Consumer","2014-08-09","Second Class","Utah","Appliances" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0,"2014-08-04","CA-2014-126361",84062,"OFF-AR-10000896","Newell 329",4.264,5,"West",1378,16.4,"Consumer","2014-08-09","Second Class","Utah","Art" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0.2,"2014-08-04","CA-2014-126361",84062,"OFF-BI-10002852","Ibico Standard Transparent Covers",4.7791996,1,"West",1381,13.184,"Consumer","2014-08-09","Second Class","Utah","Binders" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0,"2014-08-04","CA-2014-126361",84062,"OFF-PA-10000806","Xerox 1934",219.44159,8,"West",1377,447.84,"Consumer","2014-08-09","Second Class","Utah","Paper" +"Office Supplies","Bangor","United States","ML-17395","Marina Lichtenstein",0,"2014-08-04","CA-2014-134215",4401,"OFF-PA-10004353","Southworth 25% Cotton Premium Laser Paper and Envelopes",124.67519,13,"East",9158,259.74002,"Corporate","2014-08-08","Standard Class","Maine","Paper" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0,"2014-08-04","CA-2014-126361",84062,"OFF-ST-10002289","Safco Wire Cube Shelving System. For Use as 4 or 5 14"" Cubes. Black",7.945,5,"West",1380,158.9,"Consumer","2014-08-09","Second Class","Utah","Storage" +"Technology","Bangor","United States","ML-17395","Marina Lichtenstein",0,"2014-08-04","CA-2014-134215",4401,"TEC-AC-10002473","Maxell 4.7GB DVD-R",104.72219,9,"East",9159,255.42,"Corporate","2014-08-08","Standard Class","Maine","Accessories" +"Technology","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0.2,"2014-08-04","CA-2014-126361",84062,"TEC-PH-10002310","Panasonic KX T7731-B Digital phone",34.996502,5,"West",1379,399.96,"Consumer","2014-08-09","Second Class","Utah","Phones" +"Furniture","Meriden","United States","RS-19420","Ricardo Sperren",0,"2014-08-05","US-2014-117968",6450,"FUR-CH-10002335","Hon GuestStacker Chair",294.67102,5,"East",5867,1133.3501,"Corporate","2014-08-07","Second Class","Connecticut","Chairs" +"Furniture","Los Angeles","United States","CS-12130","Chad Sievert",0.2,"2014-08-05","CA-2014-118962",90004,"FUR-CH-10003817","Global Value Steno Chair. Gray",21.259,7,"West",174,340.144,"Consumer","2014-08-09","Standard Class","California","Chairs" +"Furniture","Texas City","United States","GZ-14470","Gary Zandusky",0.3,"2014-08-05","CA-2014-124723",77590,"FUR-TA-10001307","SAFCO PlanMaster Heigh-Adjustable Drafting Table Base. 43w x 30d x 30-37h. Black",41.934002,2,"Central",8894,489.23,"Consumer","2014-08-12","Standard Class","Texas","Tables" +"Office Supplies","Meriden","United States","RS-19420","Ricardo Sperren",0,"2014-08-05","US-2014-117968",6450,"OFF-AP-10002765","Fellowes Advanced Computer Series Surge Protectors",22.2516,3,"East",5865,79.47,"Corporate","2014-08-07","Second Class","Connecticut","Appliances" +"Office Supplies","Meriden","United States","RS-19420","Ricardo Sperren",0,"2014-08-05","US-2014-117968",6450,"OFF-AR-10004165","Binney & Smith inkTank Erasable Pocket Highlighter. Chisel Tip. Yellow",2.0064,2,"East",5866,4.56,"Corporate","2014-08-07","Second Class","Connecticut","Art" +"Office Supplies","New York City","United States","SP-20650","Stephanie Phelps",0,"2014-08-05","CA-2014-110065",10009,"OFF-AR-10004165","Binney & Smith inkTank Erasable Pocket Highlighter. Chisel Tip. Yellow",7.0224,7,"East",6458,15.96,"Corporate","2014-08-11","Standard Class","New York","Art" +"Office Supplies","San Francisco","United States","SS-20590","Sonia Sunley",0,"2014-08-05","CA-2014-150490",94122,"OFF-AR-10004602","Boston KS Multi-Size Manual Pencil Sharpener",12.874399,2,"West",5360,45.980003,"Consumer","2014-08-11","Standard Class","California","Art" +"Office Supplies","Los Angeles","United States","CS-12130","Chad Sievert",0,"2014-08-05","CA-2014-118962",90004,"OFF-PA-10000659","Adams Phone Message Book. Professional. 400 Message Capacity. 5 3/6” x 11”",9.841801,3,"West",172,20.94,"Consumer","2014-08-09","Standard Class","California","Paper" +"Office Supplies","Los Angeles","United States","CS-12130","Chad Sievert",0,"2014-08-05","CA-2014-118962",90004,"OFF-PA-10001144","Xerox 1913",53.260803,2,"West",173,110.96,"Consumer","2014-08-09","Standard Class","California","Paper" +"Office Supplies","San Francisco","United States","SS-20590","Sonia Sunley",0,"2014-08-05","CA-2014-150490",94122,"OFF-ST-10000321","Akro Stacking Bins",0.63119996,2,"West",5359,15.78,"Consumer","2014-08-11","Standard Class","California","Storage" +"Technology","San Francisco","United States","SS-20590","Sonia Sunley",0,"2014-08-05","CA-2014-150490",94122,"TEC-AC-10004510","Logitech Desktop MK120 Mouse and keyboard Combo",1.636,1,"West",5358,16.359999,"Consumer","2014-08-11","Standard Class","California","Accessories" +"Technology","New York City","United States","SP-20650","Stephanie Phelps",0,"2014-08-05","CA-2014-110065",10009,"TEC-PH-10002468","Plantronics CS 50-USB - headset - Convertible. Monaural",36.7173,1,"East",6457,135.98999,"Corporate","2014-08-11","Standard Class","New York","Phones" +"Technology","Columbia","United States","RF-19735","Roland Fjeld",0,"2014-08-06","CA-2014-115357",29203,"TEC-AC-10000023","Maxell 74 Minute CD-R Spindle. 50/Pack",22.647602,3,"South",3773,62.91,"Consumer","2014-08-11","Second Class","South Carolina","Accessories" +"Technology","New York City","United States","TB-21625","Trudy Brown",0,"2014-08-06","CA-2014-137575",10035,"TEC-AC-10004571","Logitech G700s Rechargeable Gaming Mouse",83.9916,2,"East",8544,199.98,"Consumer","2014-08-11","Standard Class","New York","Accessories" +"Furniture","Jacksonville","United States","VG-21790","Vivek Gonzalez",0.2,"2014-08-08","CA-2014-156790",32216,"FUR-BO-10000468","O'Sullivan 2-Shelf Heavy-Duty Bookcases",-7.7728,4,"South",8955,155.456,"Consumer","2014-08-10","Second Class","Florida","Bookcases" +"Furniture","Glendale","United States","MB-17305","Maria Bertelson",0.2,"2014-08-08","CA-2014-151708",85301,"FUR-FU-10001602","Eldon Delta Triangular Chair Mat. 52"" x 58"". Clear",-3.0344,4,"West",1466,121.376,"Consumer","2014-08-14","Standard Class","Arizona","Furnishings" +"Furniture","San Diego","United States","LR-16915","Lena Radford",0,"2014-08-08","CA-2014-131450",92024,"FUR-FU-10001979","Dana Halogen Swing-Arm Architect Lamp",91.7728,8,"West",598,327.76,"Consumer","2014-08-15","Standard Class","California","Furnishings" +"Furniture","Trenton","United States","MA-17560","Matt Abelman",0,"2014-08-08","CA-2014-124478",48183,"FUR-FU-10002088","Nu-Dell Float Frame 11 x 14 1/2",22.629602,6,"Central",5072,53.88,"Home Office","2014-08-12","Standard Class","Michigan","Furnishings" +"Office Supplies","Trenton","United States","MA-17560","Matt Abelman",0.1,"2014-08-08","CA-2014-124478",48183,"OFF-AP-10002495","Acco Smartsocket Table Surge Protector. 6 Color-Coded Adapter Outlets",37.23,3,"Central",5070,167.535,"Home Office","2014-08-12","Standard Class","Michigan","Appliances" +"Office Supplies","San Diego","United States","LR-16915","Lena Radford",0,"2014-08-08","CA-2014-131450",92024,"OFF-AP-10004708","Fellowes Superior 10 Outlet Split Surge Protector",22.074799,2,"West",595,76.119995,"Consumer","2014-08-15","Standard Class","California","Appliances" +"Office Supplies","Trenton","United States","MA-17560","Matt Abelman",0,"2014-08-08","CA-2014-124478",48183,"OFF-EN-10002500","Globe Weis Peel & Seel First Class Envelopes",17.253,3,"Central",5071,38.34,"Home Office","2014-08-12","Standard Class","Michigan","Envelopes" +"Office Supplies","Vacaville","United States","TM-21010","Tamara Manning",0,"2014-08-08","CA-2014-154669",95687,"OFF-ST-10000532","Advantus Rolling Drawer Organizers",110.0528,11,"West",2751,423.28,"Consumer","2014-08-11","Second Class","California","Storage" +"Technology","Glendale","United States","MB-17305","Maria Bertelson",0.2,"2014-08-08","CA-2014-151708",85301,"TEC-AC-10001767","SanDisk Ultra 64 GB MicroSDHC Class 10 Memory Card",-10.7973,3,"West",1467,95.976,"Consumer","2014-08-14","Standard Class","Arizona","Accessories" +"Technology","Trenton","United States","MA-17560","Matt Abelman",0,"2014-08-08","CA-2014-124478",48183,"TEC-CO-10001571","Sharp 1540cs Digital Laser Copier",274.995,1,"Central",5069,549.99005,"Home Office","2014-08-12","Standard Class","Michigan","Copiers" +"Technology","San Diego","United States","LR-16915","Lena Radford",0.2,"2014-08-08","CA-2014-131450",92024,"TEC-CO-10004115","Sharp AL-1530CS Digital Copier",434.9913,3,"West",596,1199.976,"Consumer","2014-08-15","Standard Class","California","Copiers" +"Technology","Trenton","United States","MA-17560","Matt Abelman",0,"2014-08-08","CA-2014-124478",48183,"TEC-PH-10001128","Motorola Droid Maxx",83.99439,2,"Central",5073,299.97998,"Home Office","2014-08-12","Standard Class","Michigan","Phones" +"Technology","San Diego","United States","LR-16915","Lena Radford",0.2,"2014-08-08","CA-2014-131450",92024,"TEC-PH-10002398","AT&T 1070 Corded Phone",55.745003,5,"West",597,445.96,"Consumer","2014-08-15","Standard Class","California","Phones" +"Office Supplies","Phoenix","United States","RD-19720","Roger Demir",0.7,"2014-08-09","CA-2014-161249",85023,"OFF-BI-10001097","Avery Hole Reinforcements",-6.5415,5,"West",7434,9.345,"Consumer","2014-08-13","Standard Class","Arizona","Binders" +"Office Supplies","Seattle","United States","JH-15985","Joseph Holt",0.2,"2014-08-09","CA-2014-169726",98103,"OFF-BI-10004600","Ibico Ibimaster 300 Manual Binding System",643.98254,7,"West",3629,2060.744,"Consumer","2014-08-13","Standard Class","Washington","Binders" +"Office Supplies","Phoenix","United States","RD-19720","Roger Demir",0.2,"2014-08-09","CA-2014-161249",85023,"OFF-FA-10004838","Super Bands. 12/Pack",-0.9486,3,"West",7433,4.464,"Consumer","2014-08-13","Standard Class","Arizona","Fasteners" +"Office Supplies","San Diego","United States","ED-13885","Emily Ducich",0,"2014-08-09","CA-2014-110527",92037,"OFF-LA-10000262","Avery 494",9.6048,8,"West",1741,20.88,"Home Office","2014-08-16","Standard Class","California","Labels" +"Office Supplies","Fresno","United States","GM-14500","Gene McClure",0,"2014-08-09","CA-2014-141901",93727,"OFF-PA-10001667","Great White Multi-Use Recycled Paper (20Lb. and 84 Bright)",2.6909997,1,"West",7070,5.98,"Consumer","2014-08-14","Standard Class","California","Paper" +"Office Supplies","Saint Petersburg","United States","AG-10525","Andy Gerbode",0.2,"2014-08-09","CA-2014-167850",33710,"OFF-PA-10001937","Xerox 21",5.4431996,3,"South",316,15.552,"Corporate","2014-08-16","Standard Class","Florida","Paper" +"Technology","Seattle","United States","MG-17650","Matthew Grinstein",0.2,"2014-08-09","CA-2014-166471",98103,"TEC-PH-10000038","Jawbone MINI JAMBOX Wireless Bluetooth Speaker",-43.833595,2,"West",3393,219.168,"Home Office","2014-08-13","Standard Class","Washington","Phones" +"Technology","Seattle","United States","MG-17650","Matthew Grinstein",0.2,"2014-08-09","CA-2014-166471",98103,"TEC-PH-10001530","Cisco Unified IP Phone 7945G VoIP phone",68.198,4,"West",3392,1091.1681,"Home Office","2014-08-13","Standard Class","Washington","Phones" +"Technology","Saint Petersburg","United States","AG-10525","Andy Gerbode",0.2,"2014-08-09","CA-2014-167850",33710,"TEC-PH-10002398","AT&T 1070 Corded Phone",22.298,2,"South",315,178.384,"Corporate","2014-08-16","Standard Class","Florida","Phones" +"Furniture","Seattle","United States","GM-14680","Greg Matthias",0,"2014-08-11","CA-2014-127012",98105,"FUR-FU-10003691","Eldon Image Series Desk Accessories. Ebony",5.4340005,1,"West",1239,12.35,"Consumer","2014-08-15","Standard Class","Washington","Furnishings" +"Office Supplies","Seattle","United States","GM-14680","Greg Matthias",0,"2014-08-11","CA-2014-127012",98105,"OFF-AR-10003903","Sanford 52201 APSCO Electric Pencil Sharpener",10.652201,1,"West",1240,40.97,"Consumer","2014-08-15","Standard Class","Washington","Art" +"Office Supplies","Seattle","United States","GM-14680","Greg Matthias",0,"2014-08-11","CA-2014-127012",98105,"OFF-FA-10004854","Vinyl Coated Wire Paper Clips in Organizer Box. 800/Box",10.791201,2,"West",1241,22.96,"Consumer","2014-08-15","Standard Class","Washington","Fasteners" +"Office Supplies","New York City","United States","KL-16645","Ken Lonsdale",0,"2014-08-11","CA-2014-154641",10035,"OFF-ST-10004459","Tennsco Single-Tier Lockers",18.767,1,"East",3230,375.34,"Consumer","2014-08-16","Standard Class","New York","Storage" +"Furniture","San Francisco","United States","BW-11200","Ben Wallace",0,"2014-08-12","CA-2014-109897",94122,"FUR-FU-10002878","Seth Thomas 14"" Day/Date Wall Clock",31.6128,3,"West",8245,85.44,"Consumer","2014-08-16","Standard Class","California","Furnishings" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-AP-10002403","Acco Smartsocket Color-Coded Six-Outlet AC Adapter Model Surge Protectors",15.8436,6,"South",8290,211.24799,"Home Office","2014-08-16","Standard Class","Florida","Appliances" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-AR-10004344","Bulldog Vacuum Base Pencil Sharpener",4.1965003,5,"South",8288,47.960003,"Home Office","2014-08-16","Standard Class","Florida","Art" +"Office Supplies","Woodstock","United States","LL-16840","Lauren Leatherbury",0,"2014-08-12","CA-2014-153927",30188,"OFF-BI-10000138","Acco Translucent Poly Ring Binders",6.7391996,3,"South",9882,14.04,"Consumer","2014-08-13","First Class","Georgia","Binders" +"Office Supplies","Lakewood","United States","CS-12175","Charles Sheldon",0,"2014-08-12","CA-2014-109302",8701,"OFF-BI-10002854","Performers Binder/Pad Holder. Black",98.104996,7,"East",6769,196.20999,"Corporate","2014-08-16","Standard Class","New Jersey","Binders" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-LA-10000443","Avery 501",0.9962999,1,"South",8292,2.9520001,"Home Office","2014-08-16","Standard Class","Florida","Labels" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-PA-10000295","Xerox 229",10.886399,6,"South",8287,31.104,"Home Office","2014-08-16","Standard Class","Florida","Paper" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-SU-10000432","Acco Side-Punched Conventional Columnar Pads",-1.041,2,"South",8291,5.552,"Home Office","2014-08-16","Standard Class","Florida","Supplies" +"Technology","Woodstock","United States","LL-16840","Lauren Leatherbury",0,"2014-08-12","CA-2014-153927",30188,"TEC-AC-10000023","Maxell 74 Minute CD-R Spindle. 50/Pack",98.1396,13,"South",9883,272.61002,"Consumer","2014-08-13","First Class","Georgia","Accessories" +"Technology","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"TEC-AC-10002473","Maxell 4.7GB DVD-R",41.718597,7,"South",8289,158.928,"Home Office","2014-08-16","Standard Class","Florida","Accessories" +"Technology","San Francisco","United States","BW-11200","Ben Wallace",0.2,"2014-08-12","CA-2014-109897",94122,"TEC-PH-10003691","BlackBerry Q10",50.396,8,"West",8244,806.336,"Consumer","2014-08-16","Standard Class","California","Phones" +"Furniture","San Francisco","United States","BD-11605","Brian Dahlen",0.2,"2014-08-15","US-2014-164406",94122,"FUR-CH-10003833","Novimex Fabric Task Chair",-12.196,4,"West",9827,195.136,"Consumer","2014-08-19","Standard Class","California","Chairs" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0,"2014-08-15","US-2014-164406",94122,"OFF-AP-10003287","Tripp Lite TLP810NET Broadband Surge for Modem/Fax",42.8148,3,"West",9823,152.90999,"Consumer","2014-08-19","Standard Class","California","Appliances" +"Office Supplies","Hollywood","United States","CS-12355","Christine Sundaresam",0.2,"2014-08-15","CA-2014-109043",33021,"OFF-AP-10004708","Fellowes Superior 10 Outlet Split Surge Protector",17.127,5,"South",8996,152.23999,"Consumer","2014-08-17","First Class","Florida","Appliances" +"Office Supplies","Fairfield","United States","CS-12355","Christine Sundaresam",0,"2014-08-15","CA-2014-141005",6824,"OFF-BI-10001989","Premium Transparent Presentation Covers by GBC",30.2112,3,"East",6793,62.940002,"Consumer","2014-08-18","First Class","Connecticut","Binders" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0.2,"2014-08-15","US-2014-164406",94122,"OFF-BI-10002309","Avery Heavy-Duty EZD Binder With Locking Rings",6.2495995,4,"West",9825,17.855999,"Consumer","2014-08-19","Standard Class","California","Binders" +"Office Supplies","Dallas","United States","KH-16510","Keith Herrera",0.8,"2014-08-15","CA-2014-138023",75081,"OFF-BI-10003638","GBC Durable Plastic Covers",-52.631996,8,"Central",3295,30.96,"Consumer","2014-08-18","First Class","Texas","Binders" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0.2,"2014-08-15","US-2014-164406",94122,"OFF-BI-10003638","GBC Durable Plastic Covers",15.093,3,"West",9826,46.440002,"Consumer","2014-08-19","Standard Class","California","Binders" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0,"2014-08-15","US-2014-164406",94122,"OFF-PA-10000167","Xerox 1925",41.822998,3,"West",9824,92.94,"Consumer","2014-08-19","Standard Class","California","Paper" +"Office Supplies","Hollywood","United States","CS-12355","Christine Sundaresam",0.2,"2014-08-15","CA-2014-109043",33021,"OFF-PA-10000312","Xerox 1955",29.692001,5,"South",8995,91.36,"Consumer","2014-08-17","First Class","Florida","Paper" +"Furniture","Philadelphia","United States","TS-21610","Troy Staebel",0.4,"2014-08-16","CA-2014-142839",19143,"FUR-TA-10001539","Chromcraft Rectangular Conference Tables",-227.4912,6,"East",1690,853.09204,"Consumer","2014-08-20","Standard Class","Pennsylvania","Tables" +"Office Supplies","Little Rock","United States","DJ-13510","Don Jones",0,"2014-08-17","CA-2014-111500",72209,"OFF-BI-10000829","Avery Non-Stick Binders",8.2616005,4,"South",2532,17.96,"Corporate","2014-08-21","Standard Class","Arkansas","Binders" +"Office Supplies","Little Rock","United States","DJ-13510","Don Jones",0,"2014-08-17","CA-2014-111500",72209,"OFF-FA-10002676","Colored Push Pins",4.5611997,7,"South",2533,12.67,"Corporate","2014-08-21","Standard Class","Arkansas","Fasteners" +"Office Supplies","Little Rock","United States","DJ-13510","Don Jones",0,"2014-08-17","CA-2014-111500",72209,"OFF-PA-10000595","Xerox 1929",52.531998,5,"South",2531,114.2,"Corporate","2014-08-21","Standard Class","Arkansas","Paper" +"Office Supplies","Houston","United States","KB-16585","Ken Black",0.2,"2014-08-17","CA-2014-129168",77095,"OFF-PA-10001639","Xerox 203",5.4431996,3,"Central",2045,15.552,"Corporate","2014-08-23","Standard Class","Texas","Paper" +"Technology","Little Rock","United States","DJ-13510","Don Jones",0,"2014-08-17","CA-2014-111500",72209,"TEC-AC-10000844","Logitech Gaming G510s - Keyboard",122.385605,4,"South",2534,339.96,"Corporate","2014-08-21","Standard Class","Arkansas","Accessories" +"Furniture","Jonesboro","United States","LH-16900","Lena Hernandez",0,"2014-08-19","US-2014-156559",72401,"FUR-BO-10000711","Hon Metal Bookcases. Gray",172.4814,9,"South",4089,638.82,"Consumer","2014-08-26","Standard Class","Arkansas","Bookcases" +"Furniture","Los Angeles","United States","DW-13195","David Wiener",0,"2014-08-19","CA-2014-133158",90045,"FUR-FU-10000723","Deflect-o EconoMat Studded. No Bevel Mat for Low Pile Carpeting",26.0316,7,"West",7003,289.24002,"Corporate","2014-08-21","Second Class","California","Furnishings" +"Office Supplies","Columbus","United States","JK-15625","Jim Karlsson",0.2,"2014-08-19","CA-2014-121727",43229,"OFF-AR-10004930","Turquoise Lead Holder with Pocket Clip",1.742,2,"East",2340,10.72,"Consumer","2014-08-24","Standard Class","Ohio","Art" +"Office Supplies","Los Angeles","United States","DW-13195","David Wiener",0.2,"2014-08-19","CA-2014-133158",90045,"OFF-BI-10000632","Satellite Sectional Post Binders",22.5732,2,"West",7004,69.456,"Corporate","2014-08-21","Second Class","California","Binders" +"Office Supplies","Columbus","United States","MS-17770","Maxwell Schwartz",0.7,"2014-08-19","US-2014-164616",43229,"OFF-BI-10001718","GBC DocuBind P50 Personal Binding Machine",-58.8616,4,"East",2377,76.77599,"Consumer","2014-08-21","Second Class","Ohio","Binders" +"Office Supplies","Columbus","United States","BS-11365","Bill Shonely",0,"2014-08-19","US-2014-143581",31907,"OFF-ST-10000991","Space Solutions HD Industrial Steel Shelving.",10.347301,3,"South",8246,344.91,"Corporate","2014-08-23","Standard Class","Georgia","Storage" +"Office Supplies","Columbus","United States","MS-17770","Maxwell Schwartz",0.2,"2014-08-19","US-2014-164616",43229,"OFF-SU-10004768","Acme Kleencut Forged Steel Scissors",1.148,2,"East",2378,9.184,"Consumer","2014-08-21","Second Class","Ohio","Supplies" +"Furniture","Hampton","United States","NC-18535","Nick Crebassa",0,"2014-08-20","CA-2014-114321",23666,"FUR-CH-10001797","Safco Chair Connectors. 6/Carton",145.0696,13,"South",9733,500.24002,"Corporate","2014-08-25","Standard Class","Virginia","Chairs" +"Furniture","Chicago","United States","CR-12730","Craig Reiter",0.3,"2014-08-20","CA-2014-166716",60610,"FUR-CH-10004495","Global Leather and Oak Executive Chair. Black",-6.0196,2,"Central",5800,421.372,"Consumer","2014-08-25","Second Class","Illinois","Chairs" +"Office Supplies","Hampton","United States","NC-18535","Nick Crebassa",0,"2014-08-20","CA-2014-114321",23666,"OFF-BI-10001359","GBC DocuBind TL300 Electric Binding System",421.5853,1,"South",9735,896.99005,"Corporate","2014-08-25","Standard Class","Virginia","Binders" +"Office Supplies","Hampton","United States","NC-18535","Nick Crebassa",0,"2014-08-20","CA-2014-114321",23666,"OFF-PA-10000246","Riverleaf Stik-Withit Designer Note Cubes",9.2552,2,"South",9734,20.12,"Corporate","2014-08-25","Standard Class","Virginia","Paper" +"Office Supplies","Lakewood","United States","DE-13255","Deanra Eno",0.2,"2014-08-22","CA-2014-123253",44107,"OFF-AR-10002804","Faber Castell Col-Erase Pencils",1.0269,1,"East",4303,3.9120002,"Home Office","2014-08-25","Second Class","Ohio","Art" +"Office Supplies","Saint Charles","United States","JF-15415","Jennifer Ferguson",0,"2014-08-22","US-2014-166828",63301,"OFF-PA-10001846","Xerox 1899",5.6644,2,"Central",9366,11.56,"Consumer","2014-08-25","First Class","Missouri","Paper" +"Office Supplies","North Miami","United States","JP-15520","Jeremy Pistek",0.2,"2014-08-22","CA-2014-130918",33161,"OFF-SU-10003936","Acme Serrated Blade Letter Opener",-1.8125999,3,"South",8941,7.632,"Consumer","2014-08-24","Second Class","Florida","Supplies" +"Furniture","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"FUR-FU-10003274","Regeneration Desk Collection",1.2672,3,"West",7022,4.224,"Consumer","2014-08-27","Standard Class","Colorado","Furnishings" +"Office Supplies","Los Angeles","United States","RB-19645","Robert Barroso",0.2,"2014-08-23","CA-2014-154837",90032,"OFF-BI-10001575","GBC Linen Binding Covers",17.348799,2,"West",6672,49.568,"Corporate","2014-08-27","Second Class","California","Binders" +"Office Supplies","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"OFF-PA-10000019","Xerox 1931",10.886399,6,"West",7024,31.104,"Consumer","2014-08-27","Standard Class","Colorado","Paper" +"Office Supplies","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"OFF-PA-10001125","Xerox 1988",69.704994,9,"West",7025,223.056,"Consumer","2014-08-27","Standard Class","Colorado","Paper" +"Office Supplies","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"OFF-PA-10001837","Xerox 1976",5.4431996,3,"West",7020,15.552,"Consumer","2014-08-27","Standard Class","Colorado","Paper" +"Office Supplies","Grand Rapids","United States","CR-12625","Corey Roper",0,"2014-08-23","US-2014-112795",49505,"OFF-PA-10001934","Xerox 1993",9.5256,3,"Central",8695,19.44,"Home Office","2014-08-28","Second Class","Michigan","Paper" +"Office Supplies","New York City","United States","RH-19495","Rick Hansen",0,"2014-08-23","CA-2014-110639",10009,"OFF-PA-10003936","Xerox 1994",12.4416,4,"East",2338,25.92,"Consumer","2014-08-23","Same Day","New York","Paper" +"Office Supplies","New York City","United States","RH-19495","Rick Hansen",0,"2014-08-23","CA-2014-110639",10009,"OFF-PA-10004530","Personal Creations Ink Jet Cards and Labels",22.5008,4,"East",2339,45.920002,"Consumer","2014-08-23","Same Day","New York","Paper" +"Office Supplies","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"OFF-SU-10004661","Acme Titanium Bonded Scissors",0.51,1,"West",7021,6.8,"Consumer","2014-08-27","Standard Class","Colorado","Supplies" +"Technology","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"TEC-PH-10000149","Cisco SPA525G2 IP Phone - Wireless",10.773,9,"West",7023,143.64,"Consumer","2014-08-27","Standard Class","Colorado","Phones" +"Furniture","Oceanside","United States","JP-15460","Jennifer Patt",0,"2014-08-24","CA-2014-107916",11572,"FUR-FU-10004586","G.E. Longer-Life Indoor Recessed Floodlight Bulbs",6.3744006,2,"East",3133,13.28,"Corporate","2014-08-26","First Class","New York","Furnishings" +"Office Supplies","Oceanside","United States","JP-15460","Jennifer Patt",0.2,"2014-08-24","CA-2014-107916",11572,"OFF-BI-10001116","Wilson Jones 1"" Hanging DublLock Ring Binders",4.4352,3,"East",3134,12.672,"Corporate","2014-08-26","First Class","New York","Binders" +"Office Supplies","Billings","United States","RB-19645","Robert Barroso",0.2,"2014-08-24","CA-2014-106719",59102,"OFF-BI-10002799","SlimView Poly Binder. 3/8""",2.6936,2,"West",3028,8.288,"Corporate","2014-08-24","Same Day","Montana","Binders" +"Office Supplies","Owensboro","United States","DW-13585","Dorothy Wardle",0,"2014-08-24","CA-2014-120432",42301,"OFF-SU-10004661","Acme Titanium Bonded Scissors",6.63,3,"South",5913,25.5,"Corporate","2014-08-26","Second Class","Kentucky","Supplies" +"Furniture","Los Angeles","United States","JM-15265","Janet Molinari",0,"2014-08-25","CA-2014-104178",90036,"FUR-FU-10000771","Eldon 200 Class Desk Accessories. Smoke",2.6376,1,"West",5944,6.2799997,"Corporate","2014-08-29","Standard Class","California","Furnishings" +"Office Supplies","Columbus","United States","BS-11590","Brendan Sweed",0.7,"2014-08-25","CA-2014-107398",43229,"OFF-BI-10001982","Wilson Jones Custom Binder Spines & Labels",-4.5695996,4,"East",8885,6.528,"Corporate","2014-08-30","Standard Class","Ohio","Binders" +"Office Supplies","Columbus","United States","BS-11590","Brendan Sweed",0.7,"2014-08-25","CA-2014-107398",43229,"OFF-BI-10002103","Cardinal Slant-D Ring Binder. Heavy Gauge Vinyl",-16.6848,8,"East",8887,20.855999,"Corporate","2014-08-30","Standard Class","Ohio","Binders" +"Office Supplies","Houston","United States","JE-15715","Joe Elijah",0.8,"2014-08-25","CA-2014-126200",77070,"OFF-BI-10002133","Wilson Jones Elliptical Ring 3 1/2"" Capacity Binders. 800 sheets",-39.804,3,"Central",8459,25.68,"Consumer","2014-08-29","Standard Class","Texas","Binders" +"Office Supplies","Houston","United States","JE-15715","Joe Elijah",0.8,"2014-08-25","CA-2014-126200",77070,"OFF-BI-10002225","Square Ring Data Binders. Rigid 75 Pt. Covers. 11"" x 14-7/8""",-19.8144,3,"Central",8460,12.384,"Consumer","2014-08-29","Standard Class","Texas","Binders" +"Office Supplies","Columbus","United States","BS-11590","Brendan Sweed",0.7,"2014-08-25","CA-2014-107398",43229,"OFF-BI-10004141","Insertable Tab Indexes For Data Binders",-2.2896001,3,"East",8886,2.862,"Corporate","2014-08-30","Standard Class","Ohio","Binders" +"Office Supplies","Columbus","United States","RC-19960","Ryan Crowe",0.2,"2014-08-25","CA-2014-115259",43229,"OFF-EN-10002504","Tyvek Top-Opening Peel & Seel Envelopes. Plain White",44.031597,6,"East",117,130.46399,"Consumer","2014-08-27","Second Class","Ohio","Envelopes" +"Office Supplies","Columbus","United States","RC-19960","Ryan Crowe",0.2,"2014-08-25","CA-2014-115259",43229,"OFF-EN-10002600","Redi-Strip #10 Envelopes. 4 1/8 x 9 1/2",1.652,2,"East",115,4.72,"Consumer","2014-08-27","Second Class","Ohio","Envelopes" +"Office Supplies","Columbus","United States","RC-19960","Ryan Crowe",0.2,"2014-08-25","CA-2014-115259",43229,"OFF-FA-10000621","OIC Colored Binder Clips. Assorted Sizes",14.5348,14,"East",114,40.096,"Consumer","2014-08-27","Second Class","Ohio","Fasteners" +"Office Supplies","Columbus","United States","RC-19960","Ryan Crowe",0.2,"2014-08-25","CA-2014-115259",43229,"OFF-PA-10004965","Xerox 1921",7.4925003,3,"East",116,23.976,"Consumer","2014-08-27","Second Class","Ohio","Paper" +"Office Supplies","Los Angeles","United States","JM-15265","Janet Molinari",0,"2014-08-25","CA-2014-104178",90036,"OFF-PA-10004983","Xerox 23",12.4416,4,"West",5946,25.92,"Corporate","2014-08-29","Standard Class","California","Paper" +"Office Supplies","Los Angeles","United States","JM-15265","Janet Molinari",0,"2014-08-25","CA-2014-104178",90036,"OFF-ST-10004258","Portable Personal File Box",13.1868,4,"West",5947,48.84,"Corporate","2014-08-29","Standard Class","California","Storage" +"Technology","Los Angeles","United States","JM-15265","Janet Molinari",0,"2014-08-25","CA-2014-104178",90036,"TEC-AC-10002399","SanDisk Cruzer 32 GB USB Flash Drive",30.432001,5,"West",5945,95.1,"Corporate","2014-08-29","Standard Class","California","Accessories" +"Technology","Seattle","United States","MW-18220","Mitch Webber",0.2,"2014-08-25","CA-2014-103660",98103,"TEC-PH-10000895","Polycom VVX 310 VoIP phone",75.595795,7,"West",6579,1007.94403,"Consumer","2014-08-30","Standard Class","Washington","Phones" +"Furniture","Newark","United States","KB-16315","Karl Braun",0,"2014-08-26","CA-2014-103331",19711,"FUR-FU-10001731","Acrylic Self-Standing Desk Frames",4.0584,4,"East",5334,10.68,"Consumer","2014-09-01","Standard Class","Delaware","Furnishings" +"Office Supplies","Newark","United States","PP-18955","Paul Prost",0,"2014-08-26","US-2014-115413",19711,"OFF-AR-10003770","Newell 340",2.5056002,3,"East",7823,8.64,"Home Office","2014-09-01","Standard Class","Delaware","Art" +"Office Supplies","Newark","United States","KB-16315","Karl Braun",0,"2014-08-26","CA-2014-103331",19711,"OFF-PA-10002160","Xerox 1978",8.496599,3,"East",5335,17.34,"Consumer","2014-09-01","Standard Class","Delaware","Paper" +"Office Supplies","Newark","United States","KB-16315","Karl Braun",0,"2014-08-26","CA-2014-103331",19711,"OFF-PA-10002659","Avoid Verbal Orders Carbonless Minifold Book",1.5547999,1,"East",5336,3.3799999,"Consumer","2014-09-01","Standard Class","Delaware","Paper" +"Technology","Los Angeles","United States","FM-14290","Frank Merwin",0,"2014-08-26","CA-2014-123260",90032,"TEC-AC-10002323","SanDisk Ultra 32 GB MicroSDHC Class 10 Memory Card",22.984,8,"West",187,176.8,"Home Office","2014-08-30","Standard Class","California","Accessories" +"Technology","Newark","United States","PP-18955","Paul Prost",0,"2014-08-26","US-2014-115413",19711,"TEC-AC-10002800","Plantronics Audio 478 Stereo USB Headset",52.489502,3,"East",7824,149.97,"Home Office","2014-09-01","Standard Class","Delaware","Accessories" +"Furniture","Springfield","United States","CC-12610","Corey Catlett",0,"2014-08-27","CA-2014-124688",22153,"FUR-FU-10002456","Master Caster Door Stop. Large Neon Orange",12.521601,4,"South",3484,29.12,"Corporate","2014-08-29","First Class","Virginia","Furnishings" +"Furniture","Springfield","United States","CC-12610","Corey Catlett",0,"2014-08-27","CA-2014-124688",22153,"FUR-TA-10003569","Bretford CR8500 Series Meeting Room Furniture",300.73502,3,"South",3485,1202.9401,"Corporate","2014-08-29","First Class","Virginia","Tables" +"Office Supplies","San Francisco","United States","ZD-21925","Zuschuss Donatelli",0,"2014-08-27","CA-2014-143336",94109,"OFF-AR-10003056","Newell 341",2.4824002,2,"West",19,8.56,"Consumer","2014-09-01","Second Class","California","Art" +"Office Supplies","San Francisco","United States","ZD-21925","Zuschuss Donatelli",0.2,"2014-08-27","CA-2014-143336",94109,"OFF-BI-10002215","Wilson Jones Hanging View Binder. White. 1""",7.3840003,4,"West",21,22.720001,"Consumer","2014-09-01","Second Class","California","Binders" +"Office Supplies","Springfield","United States","MM-17920","Michael Moore",0,"2014-08-27","CA-2014-101266",22153,"OFF-PA-10002986","Xerox 1898",6.4128003,2,"South",6170,13.360001,"Consumer","2014-08-30","Second Class","Virginia","Paper" +"Technology","Springfield","United States","CC-12610","Corey Catlett",0,"2014-08-27","CA-2014-124688",22153,"TEC-PH-10000455","GE 30522EE2",168.18552,5,"South",3483,579.95,"Corporate","2014-08-29","First Class","Virginia","Phones" +"Technology","San Francisco","United States","ZD-21925","Zuschuss Donatelli",0.2,"2014-08-27","CA-2014-143336",94109,"TEC-PH-10001949","Cisco SPA 501G IP Phone",16.011,3,"West",20,213.48,"Consumer","2014-09-01","Second Class","California","Phones" +"Furniture","Miami","United States","RA-19945","Ryan Akin",0.45000002,"2014-08-29","CA-2014-169775",33178,"FUR-TA-10001857","Balt Solid Wood Rectangular Table",-110.764496,3,"South",1900,174.0585,"Consumer","2014-09-02","Second Class","Florida","Tables" +"Office Supplies","Miami","United States","RA-19945","Ryan Akin",0.7,"2014-08-29","CA-2014-169775",33178,"OFF-BI-10004390","GBC DocuBind 200 Manual Binding Machine",-336.784,4,"South",1899,505.17603,"Consumer","2014-09-02","Second Class","Florida","Binders" +"Office Supplies","Miami","United States","RA-19945","Ryan Akin",0.2,"2014-08-29","CA-2014-169775",33178,"OFF-EN-10001749","Jiffy Padded Mailers with Self-Seal Closure",10.8054,2,"South",1898,29.807999,"Consumer","2014-09-02","Second Class","Florida","Envelopes" +"Office Supplies","San Francisco","United States","HH-15010","Hilary Holden",0,"2014-08-29","CA-2014-135699",94110,"OFF-PA-10003001","Xerox 1986",6.4128003,2,"West",1652,13.360001,"Corporate","2014-08-29","Same Day","California","Paper" +"Office Supplies","San Francisco","United States","HH-15010","Hilary Holden",0,"2014-08-29","CA-2014-135699",94110,"OFF-PA-10004475","Xerox 1940",53.8608,2,"West",1651,109.92,"Corporate","2014-08-29","Same Day","California","Paper" +"Office Supplies","Bristol","United States","AT-10735","Annie Thurman",0,"2014-08-30","CA-2014-146500",6010,"OFF-BI-10002432","Wilson Jones Standard D-Ring Binders",11.891001,5,"East",8461,25.3,"Consumer","2014-09-04","Standard Class","Connecticut","Binders" +"Office Supplies","Bristol","United States","AT-10735","Annie Thurman",0,"2014-08-30","CA-2014-146500",6010,"OFF-ST-10000563","Fellowes Bankers Box Stor/Drawer Steel Plus",9.594,3,"East",8462,95.94,"Consumer","2014-09-04","Standard Class","Connecticut","Storage" +"Technology","Santa Fe","United States","SJ-20500","Shirley Jackson",0,"2014-08-31","CA-2014-143385",87505,"TEC-AC-10001635","KeyTronic KT400U2 - Keyboard - Black",18.504,9,"West",3478,92.52,"Consumer","2014-09-05","Standard Class","New Mexico","Accessories" +"Office Supplies","Escondido","United States","LT-16765","Larry Tron",0,"2014-09-01","CA-2014-126522",92025,"OFF-AR-10004042","BOSTON Model 1800 Electric Pencil Sharpeners. Putty/Woodgrain",15.6426,3,"West",1162,53.940002,"Consumer","2014-09-05","Second Class","California","Art" +"Office Supplies","Houston","United States","JL-15130","Jack Lebron",0.8,"2014-09-01","CA-2014-165428",77036,"OFF-BI-10002949","Prestige Round Ring Binders",-6.0192,3,"Central",3194,3.6479998,"Consumer","2014-09-04","First Class","Texas","Binders" +"Office Supplies","New York City","United States","LH-16900","Lena Hernandez",0.2,"2014-09-01","CA-2014-109855",10009,"OFF-BI-10004716","Wilson Jones Hanging Recycled Pressboard Data Binders",8.3104,2,"East",8242,23.744001,"Consumer","2014-09-05","Standard Class","New York","Binders" +"Office Supplies","Houston","United States","JL-15130","Jack Lebron",0.2,"2014-09-01","CA-2014-165428",77036,"OFF-PA-10004100","Xerox 216",10.886399,6,"Central",3195,31.104,"Consumer","2014-09-04","First Class","Texas","Paper" +"Technology","New York City","United States","LH-16900","Lena Hernandez",0,"2014-09-01","CA-2014-109855",10009,"TEC-AC-10002842","WD My Passport Ultra 2TB Portable External Hard Drive",57.12,3,"East",8243,357,"Consumer","2014-09-05","Standard Class","New York","Accessories" +"Furniture","Watertown","United States","JM-16195","Justin MacKendrick",0,"2014-09-02","CA-2014-157721",13601,"FUR-FU-10002116","Tenex Carpeted. Granite-Look or Clear Contemporary Contour Shape Chair Mats",4.9497,1,"East",2926,70.71,"Consumer","2014-09-05","First Class","New York","Furnishings" +"Office Supplies","Watertown","United States","JM-16195","Justin MacKendrick",0,"2014-09-02","CA-2014-157721",13601,"OFF-AP-10001303","Holmes Cool Mist Humidifier for the Whole House with 8-Gallon Output per Day. Extended Life Filter",8.955,1,"East",2925,19.9,"Consumer","2014-09-05","First Class","New York","Appliances" +"Office Supplies","New York City","United States","KD-16345","Katherine Ducich",0,"2014-09-02","US-2014-138828",10009,"OFF-AR-10000658","Newell 324",16.17,5,"East",4930,57.75,"Consumer","2014-09-03","First Class","New York","Art" +"Office Supplies","New York City","United States","CL-12565","Clay Ludtke",0,"2014-09-02","CA-2014-127936",10009,"OFF-AR-10002445","SANFORD Major Accent Highlighters",8.0712,3,"East",4404,21.240002,"Consumer","2014-09-04","First Class","New York","Art" +"Office Supplies","Fayetteville","United States","SC-20095","Sanjit Chand",0,"2014-09-02","CA-2014-152268",72701,"OFF-BI-10001359","GBC DocuBind TL300 Electric Binding System",843.1706,2,"South",9034,1793.98,"Consumer","2014-09-07","Standard Class","Arkansas","Binders" +"Office Supplies","New York City","United States","KD-16345","Katherine Ducich",0,"2014-09-02","US-2014-138828",10009,"OFF-PA-10000349","Easy-staple paper",7.0218,3,"East",4931,14.94,"Consumer","2014-09-03","First Class","New York","Paper" +"Technology","Buffalo Grove","United States","BM-11785","Bryan Mills",0.2,"2014-09-02","CA-2014-106971",60089,"TEC-AC-10000844","Logitech Gaming G510s - Keyboard",95.188805,7,"Central",9140,475.944,"Consumer","2014-09-08","Standard Class","Illinois","Accessories" +"Technology","Chicago","United States","CV-12295","Christina VanderZanden",0.2,"2014-09-02","CA-2014-151792",60653,"TEC-AC-10001606","Logitech Wireless Performance Mouse MX for PC and Mac",53.994602,3,"Central",5029,239.976,"Consumer","2014-09-07","Second Class","Illinois","Accessories" +"Technology","Houston","United States","AG-10525","Andy Gerbode",0.4,"2014-09-02","CA-2014-158281",77095,"TEC-MA-10002210","Epson TM-T88V Direct Thermal Printer - Monochrome - Desktop",-121.2705,3,"Central",7289,559.71,"Corporate","2014-09-07","Standard Class","Texas","Machines" +"Office Supplies","Houston","United States","MR-17545","Mathew Reese",0.8,"2014-09-03","US-2014-120236",77095,"OFF-BI-10004099","GBC VeloBinder Strips",-11.52,5,"Central",8398,7.68,"Home Office","2014-09-04","First Class","Texas","Binders" +"Office Supplies","New York City","United States","AG-10390","Allen Goldenen",0,"2014-09-03","CA-2014-125171",10009,"OFF-LA-10001175","Avery 514",7.056,5,"East",4986,14.4,"Consumer","2014-09-03","Same Day","New York","Labels" +"Furniture","Saint Petersburg","United States","PR-18880","Patrick Ryan",0.2,"2014-09-05","CA-2014-136861",33710,"FUR-FU-10001967","Telescoping Adjustable Floor Lamp",1.999,2,"South",5430,31.984,"Consumer","2014-09-07","First Class","Florida","Furnishings" +"Office Supplies","Bowling Green","United States","NP-18700","Nora Preis",0.2,"2014-09-05","CA-2014-106572",43402,"OFF-ST-10000585","Economy Rollaway Files",19.824,2,"East",2516,264.31998,"Consumer","2014-09-10","Standard Class","Ohio","Storage" +"Furniture","San Francisco","United States","VP-21760","Victoria Pisteka",0,"2014-09-06","CA-2014-130449",94109,"FUR-FU-10001487","Eldon Expressions Wood and Plastic Desk Accessories. Cherry Wood",12.145201,6,"West",6623,41.88,"Corporate","2014-09-09","First Class","California","Furnishings" +"Office Supplies","San Francisco","United States","VP-21760","Victoria Pisteka",0,"2014-09-06","CA-2014-130449",94109,"OFF-LA-10001934","Avery 516",27.4856,8,"West",6624,58.480003,"Corporate","2014-09-09","First Class","California","Labels" +"Furniture","Tulsa","United States","RB-19465","Rick Bensley",0,"2014-09-07","CA-2014-117765",74133,"FUR-CH-10004698","Padded Folding Chairs. Black. 4/Carton",45.3488,2,"Central",5981,161.95999,"Home Office","2014-09-13","Standard Class","Oklahoma","Chairs" +"Furniture","Philadelphia","United States","SB-20290","Sean Braxton",0.2,"2014-09-07","CA-2014-122882",19134,"FUR-FU-10000758","DAX Natural Wood-Tone Poster Frame",8.473599,2,"East",1681,42.368,"Corporate","2014-09-13","Standard Class","Pennsylvania","Furnishings" +"Furniture","Tulsa","United States","TB-21400","Tom Boeckenhauer",0,"2014-09-07","CA-2014-131002",74133,"FUR-FU-10004270","Executive Impressions 13"" Clairmont Wall Clock",23.6529,3,"Central",2646,57.69,"Consumer","2014-09-12","Second Class","Oklahoma","Furnishings" +"Furniture","Tulsa","United States","TB-21400","Tom Boeckenhauer",0,"2014-09-07","CA-2014-131002",74133,"FUR-FU-10004665","3M Polarizing Task Lamp with Clamp Arm. Light Gray",213.6888,6,"Central",2649,821.88,"Consumer","2014-09-12","Second Class","Oklahoma","Furnishings" +"Furniture","Tulsa","United States","RB-19465","Rick Bensley",0,"2014-09-07","CA-2014-117765",74133,"FUR-TA-10001039","KI Adjustable-Height Table",111.773994,5,"Central",5979,429.9,"Home Office","2014-09-13","Standard Class","Oklahoma","Tables" +"Furniture","Houston","United States","SZ-20035","Sam Zeldin",0.3,"2014-09-07","CA-2014-105165",77036,"FUR-TA-10004154","Riverside Furniture Oval Coffee Table. Oval End Table. End Table with Drawer",-22.948,1,"Central",5263,200.795,"Home Office","2014-09-10","First Class","Texas","Tables" +"Office Supplies","Houston","United States","SZ-20035","Sam Zeldin",0.2,"2014-09-07","CA-2014-105165",77036,"OFF-AR-10003179","Dixon Ticonderoga Core-Lock Colored Pencils",3.5529,3,"Central",5265,21.863998,"Home Office","2014-09-10","First Class","Texas","Art" +"Office Supplies","Houston","United States","SZ-20035","Sam Zeldin",0.8,"2014-09-07","CA-2014-105165",77036,"OFF-BI-10000050","Angle-D Binders with Locking Rings. Label Holders",-4.8180003,2,"Central",5262,2.92,"Home Office","2014-09-10","First Class","Texas","Binders" +"Office Supplies","Tulsa","United States","RB-19465","Rick Bensley",0,"2014-09-07","CA-2014-117765",74133,"OFF-BI-10000474","Avery Recycled Flexi-View Covers for Binding Systems",15.388801,2,"Central",5980,32.06,"Home Office","2014-09-13","Standard Class","Oklahoma","Binders" +"Office Supplies","Tulsa","United States","TB-21400","Tom Boeckenhauer",0,"2017-12-30","CA-2014-131002",74133,"OFF-BI-10000948","GBC Laser Imprintable Binding System Covers. Desert Sand",20.1207,3,"Central",2647,42.809998,"Consumer","2017-12-30","Second Class","Oklahoma","Binders" \ No newline at end of file diff --git a/lib/dl_connector_bitrix_gds/docker-compose/db-postgres/initdb.d/01_prepare_db.sql b/lib/dl_connector_bitrix_gds/docker-compose/db-postgres/initdb.d/01_prepare_db.sql new file mode 100644 index 000000000..0142e9e26 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/docker-compose/db-postgres/initdb.d/01_prepare_db.sql @@ -0,0 +1,33 @@ +CREATE SCHEMA IF NOT EXISTS test_data; +DROP TABLE IF EXISTS test_data.sample; +CREATE TABLE test_data.sample ( + "Category" VARCHAR(255), + "City" VARCHAR(255), + "Country" VARCHAR(255), + "Customer ID" VARCHAR(255), + "Customer Name" VARCHAR(255), + "Discount" FLOAT, + "Order Date" DATE, + "Order ID" VARCHAR(255), + "Postal Code" INTEGER, + "Product ID" VARCHAR(255), + "Product Name" VARCHAR(255), + "Profit" FLOAT, + "Quantity" INTEGER, + "Region" VARCHAR(255), + "Row ID" INTEGER PRIMARY KEY, + "Sales" FLOAT, + "Segment" VARCHAR(255), + "Ship Date" DATE, + "Ship Mode" VARCHAR(255), + "State" VARCHAR(255), + "Sub-Category" VARCHAR(255) +); +create index order_date_idx on test_data.sample("Order Date"); + +COPY test_data.sample ( + "Category", "City", "Country", "Customer ID", "Customer Name", "Discount", "Order Date", "Order ID", + "Postal Code", "Product ID", "Product Name", "Profit", "Quantity", "Region", "Row ID", "Sales", "Segment", + "Ship Date", "Ship Mode", "State", "Sub-Category" +) +FROM '/common-data/sample.csv' WITH (FORMAT csv, DELIMITER ',', QUOTE '"'); diff --git a/lib/dl_connector_bitrix_gds/docker-compose/tests/entrypoint.sh b/lib/dl_connector_bitrix_gds/docker-compose/tests/entrypoint.sh new file mode 100644 index 000000000..5fc44481d --- /dev/null +++ b/lib/dl_connector_bitrix_gds/docker-compose/tests/entrypoint.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +exec "$@" diff --git a/lib/dl_connector_bitrix_gds/pyproject.toml b/lib/dl_connector_bitrix_gds/pyproject.toml new file mode 100644 index 000000000..de95cd884 --- /dev/null +++ b/lib/dl_connector_bitrix_gds/pyproject.toml @@ -0,0 +1,73 @@ +[tool.poetry] +name = "datalens-connector-bitrix-gds" +version = "0.0.1" +description = "" +authors = ["DataLens Team "] +packages = [{include = "dl_connector_bitrix_gds"}] +license = "Apache 2.0" +readme = "README.md" + +[tool.poetry.dependencies] +attrs = ">=22.2.0" +marshmallow = ">=3.19.0" +redis = ">=4.5.1" +sqlalchemy = ">=1.4.46, <2.0" +python = ">=3.10, <3.12" +redis_cache_lock = {path = "../redis-cache-lock"} +datalens-api-commons = {path = "../dl_api_commons"} +datalens-utils = {path = "../dl_utils"} +datalens-constants = {path = "../dl_constants"} +datalens-i18n = {path = "../dl_i18n"} +datalens-configs = {path = "../dl_configs"} +datalens-api-connector = {path = "../dl_api_connector"} +datalens-core = {path = "../dl_core"} +datalens-app-tools = {path = "../dl_app_tools"} +datalens-query-processing = {path = "../dl_query_processing"} +datalens-sqlalchemy-bitrix = {path = "../dl_sqlalchemy_bitrix"} + +[tool.poetry.plugins] +[tool.poetry.plugins."dl_api_lib.connectors"] +bitrix_gds = "dl_connector_bitrix_gds.api.connector:BitrixGDSApiConnector" + +[tool.poetry.plugins."dl_core.connectors"] +bitrix_gds = "dl_connector_bitrix_gds.core.connector:BitrixGDSCoreConnector" + +[tool.poetry.plugins."dl_formula.connectors"] +bitrix_gds = "dl_connector_bitrix_gds.formula.connector:BitrixGDSFormulaConnector" + +[tool.poetry.group.tests.dependencies] +pytest = ">=7.2.2" +datalens-compeng-pg = {path = "../dl_compeng_pg"} + +[build-system] +build-backend = "poetry.core.masonry.api" +requires = [ + "poetry-core", +] + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = [] + +[datalens.pytest.ext] +root_dir = "dl_connector_bitrix_gds_tests/" +target_path = "ext" +labels = ["ext_public"] + +[datalens.pytest.unit] +root_dir = "dl_connector_bitrix_gds_tests/" +target_path = "unit" +skip_compose = "true" + +[tool.mypy] +warn_unused_configs = true +disallow_untyped_defs = true +check_untyped_defs = true +strict_optional = true + +[datalens.i18n.domains] +dl_connector_bitrix_gds = [ + {path = "dl_connector_bitrix_gds/api"}, + {path = "dl_connector_bitrix_gds/core"}, +] diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/connection.py index 9d6318c1e..b7e028fd9 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/connection.py @@ -13,11 +13,6 @@ from dl_api_connector.api_schema.component_errors import ComponentErrorListSchema from dl_api_connector.api_schema.connection_base import ConnectionSchema from dl_api_connector.api_schema.extras import FieldExtra -from dl_connector_bundle_chs3.chs3_base.api.api_schema.source import ( - BaseFileSourceSchema, - ReplaceFileSourceSchema, -) -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection from dl_constants.exc import ( DEFAULT_ERR_CODE_API_PREFIX, GLOBAL_ERR_PREFIX, @@ -29,6 +24,12 @@ make_user_auth_headers, ) +from dl_connector_bundle_chs3.chs3_base.api.api_schema.source import ( + BaseFileSourceSchema, + ReplaceFileSourceSchema, +) +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection + class BaseFileS3ConnectionSchema(ConnectionSchema): TARGET_CLS = BaseFileS3Connection diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/source.py index 8408798b9..77ecffc82 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/source.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/api_schema/source.py @@ -18,18 +18,19 @@ SQLDataSourceSchema, SQLDataSourceTemplateSchema, ) -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection from dl_constants.enums import ( - BIType, FileProcessingStatus, + UserDataType, ) from dl_model_tools.schema.base import BaseSchema +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection + class RawSchemaColumnSchema(Schema): name = fields.String() title = fields.String() - user_type = fields.Enum(BIType) + user_type = fields.Enum(UserDataType) class BaseFileSourceSchema(Schema): diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/connector.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/connector.py index 24bb58737..1ee0fe1cc 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/connector.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/connector.py @@ -5,6 +5,7 @@ ApiConnector, ApiSourceDefinition, ) + from dl_connector_bundle_chs3.chs3_base.api.api_schema.connection import BaseFileS3ConnectionSchema from dl_connector_bundle_chs3.chs3_base.api.api_schema.source import ( BaseFileS3DataSourceSchema, diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/i18n/localizer.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/i18n/localizer.py index 5c03c7ce3..7b34524ae 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/i18n/localizer.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/api/i18n/localizer.py @@ -2,10 +2,11 @@ import attr -import dl_connector_bundle_chs3 as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_bundle_chs3 as package + DOMAIN = f"{package.__name__}" CONFIGS = [ diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/adapter.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/adapter.py index 5b5e35892..7c91e3792 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/adapter.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/adapter.py @@ -8,17 +8,18 @@ from aiohttp import ClientResponse import attr +from dl_core.connection_executors.models.db_adapter_data import DBAdapterQuery +from dl_core.connection_models import ( + DBIdent, + TableIdent, +) + from dl_connector_bundle_chs3.chs3_base.core.target_dto import BaseFileS3ConnTargetDTO from dl_connector_clickhouse.core.clickhouse_base.adapters import BaseAsyncClickHouseAdapter from dl_connector_clickhouse.core.clickhouse_base.ch_commons import ( ClickHouseBaseUtils, get_ch_settings, ) -from dl_core.connection_executors.models.db_adapter_data import DBAdapterQuery -from dl_core.connection_models import ( - DBIdent, - TableIdent, -) class FileS3Utils(ClickHouseBaseUtils): diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/connection_executors.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/connection_executors.py index b22ac797d..dc90f0474 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/connection_executors.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/connection_executors.py @@ -3,11 +3,11 @@ from dl_connector_bundle_chs3.chs3_base.core.dto import BaseFileS3ConnDTO from dl_connector_bundle_chs3.chs3_base.core.target_dto import BaseFileS3ConnTargetDTO from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions -from dl_connector_clickhouse.core.clickhouse_base.connection_executors import ClickHouseAsyncAdapterConnExecutor +from dl_connector_clickhouse.core.clickhouse_base.connection_executors import AsyncClickHouseConnExecutor @attr.s(cmp=False, hash=False) -class BaseFileS3AsyncAdapterConnExecutor(ClickHouseAsyncAdapterConnExecutor): +class BaseFileS3AsyncAdapterConnExecutor(AsyncClickHouseConnExecutor): _conn_dto: BaseFileS3ConnDTO = attr.ib() _conn_options: CHConnectOptions = attr.ib() diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/connector.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/connector.py index 1f5db19f2..01722273b 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/connector.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/connector.py @@ -1,8 +1,5 @@ from clickhouse_sqlalchemy.orm.query import Query as CHQuery -from dl_connector_bundle_chs3.chs3_base.core.constants import BACKEND_TYPE_CHS3 -from dl_connector_bundle_chs3.chs3_base.core.dto import BaseFileS3ConnDTO -from dl_connector_bundle_chs3.chs3_base.core.type_transformer import FileTypeTransformer from dl_core.connections_security.base import ( ConnSecuritySettings, NonUserInputConnectionSafetyChecker, @@ -13,6 +10,10 @@ CoreSourceDefinition, ) +from dl_connector_bundle_chs3.chs3_base.core.constants import BACKEND_TYPE_CHS3 +from dl_connector_bundle_chs3.chs3_base.core.dto import BaseFileS3ConnDTO +from dl_connector_bundle_chs3.chs3_base.core.type_transformer import FileTypeTransformer + class BaseFileS3CoreConnectionDefinition(CoreConnectionDefinition): type_transformer_cls = FileTypeTransformer diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/data_source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/data_source.py index e9e392b2d..1202b9cfa 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/data_source.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/data_source.py @@ -11,19 +11,20 @@ from clickhouse_sqlalchemy.quoting import Quoter -from dl_connector_bundle_chs3.chs3_base.core.data_source_spec import BaseFileS3DataSourceSpec -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection -from dl_connector_bundle_chs3.file.core.adapter import AsyncFileS3Adapter -from dl_connector_clickhouse.core.clickhouse_base.ch_commons import create_column_sql -from dl_connector_clickhouse.core.clickhouse_base.data_source import ClickHouseDataSourceBase from dl_constants.enums import ( - CreateDSFrom, + DataSourceType, FileProcessingStatus, ) from dl_core import exc from dl_core.db import SchemaInfo from dl_core.utils import sa_plain_text +from dl_connector_bundle_chs3.chs3_base.core.data_source_spec import BaseFileS3DataSourceSpec +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3.file.core.adapter import AsyncFileS3Adapter +from dl_connector_clickhouse.core.clickhouse_base.ch_commons import create_column_sql +from dl_connector_clickhouse.core.clickhouse_base.data_source import ClickHouseDataSourceBase + if TYPE_CHECKING: from dl_core.connection_executors.sync_base import SyncConnExecutorBase @@ -46,7 +47,7 @@ class BaseFileS3DataSource(ClickHouseDataSourceBase): _quoter: Optional[Quoter] = None @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: raise NotImplementedError @property diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/lifecycle.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/lifecycle.py index 08ecba617..8abaf796e 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/lifecycle.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/lifecycle.py @@ -7,7 +7,6 @@ import attr from dl_api_commons.base_models import RequestContextInfo -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection from dl_core.connectors.base.lifecycle import ConnectionLifecycleManager from dl_file_uploader_task_interface.tasks import ( DeleteFileTask, @@ -16,6 +15,8 @@ from dl_task_processor.processor import TaskProcessor from dl_utils.aio import await_sync +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection + LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/storage_schemas/data_source_spec.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/storage_schemas/data_source_spec.py index 2cd0eed44..f4efbc676 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/storage_schemas/data_source_spec.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/storage_schemas/data_source_spec.py @@ -1,9 +1,10 @@ from marshmallow import fields -from dl_connector_bundle_chs3.chs3_base.core.data_source_spec import BaseFileS3DataSourceSpec from dl_constants.enums import FileProcessingStatus from dl_core.us_manager.storage_schemas.data_source_spec_base import SQLDataSourceSpecStorageSchema +from dl_connector_bundle_chs3.chs3_base.core.data_source_spec import BaseFileS3DataSourceSpec + class BaseFileS3DataSourceSpecStorageSchema(SQLDataSourceSpecStorageSchema): TARGET_CLS = BaseFileS3DataSourceSpec diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/testing/utils.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/testing/utils.py index 4a9010930..91eb97a2e 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/testing/utils.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/testing/utils.py @@ -1,8 +1,9 @@ from dl_configs.settings_submodels import S3Settings -from dl_connector_bundle_chs3.chs3_base.core.dto import BaseFileS3ConnDTO from dl_core_testing.database import DbTable from dl_testing.s3_utils import s3_tbl_func_maker +from dl_connector_bundle_chs3.chs3_base.core.dto import BaseFileS3ConnDTO + def create_s3_native_from_ch_table( filename: str, @@ -10,9 +11,7 @@ def create_s3_native_from_ch_table( s3_settings: S3Settings, clickhouse_table: DbTable, tbl_schema: str, - double_data: bool = False, ) -> None: - # FIXME: Move to chs3 connectors tbl = clickhouse_table db = clickhouse_table.db s3_tbl_func = s3_tbl_func_maker(s3_settings) @@ -34,9 +33,7 @@ def create_s3_native_from_ch_table( ), filename=filename, file_fmt="Native", - schema_line=tbl_schema, # TODO: update DbTable to serve some sort of schema + schema_line=tbl_schema, ) insert_stmt = f"INSERT INTO FUNCTION {s3_tbl_func_for_db} SELECT * FROM {tbl.db.quote(tbl.name)}" - if double_data: - insert_stmt += f" UNION ALL SELECT * FROM {tbl.db.quote(tbl.name)}" db.execute(insert_stmt) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/type_transformer.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/type_transformer.py index aa0fb9edc..22bd24456 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/type_transformer.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/type_transformer.py @@ -9,9 +9,7 @@ from clickhouse_sqlalchemy import types as ch_types -from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE -from dl_connector_clickhouse.core.clickhouse_base.type_transformer import ClickHouseTypeTransformer -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.db.conversion_base import ( BooleanTypeCaster, DatetimeTypeCaster, @@ -23,6 +21,9 @@ make_native_type, ) +from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE +from dl_connector_clickhouse.core.clickhouse_base.type_transformer import ClickHouseTypeTransformer + def make_int_cleanup_spaces(value: Any) -> Optional[int]: if isinstance(value, str): @@ -102,24 +103,24 @@ class BooleanFileTypeCaster(BooleanTypeCaster): class FileTypeTransformer(ClickHouseTypeTransformer): casters = { **ClickHouseTypeTransformer.casters, - BIType.integer: IntegerFileTypeCaster(), - BIType.float: FloatFileTypeCaster(), - BIType.date: DateFileTypeCaster(), - BIType.datetime: DatetimeFileTypeCaster(), - BIType.datetimetz: DatetimeTZFileTypeCaster(), - BIType.genericdatetime: GenericDatetimeFileTypeCaster(), - BIType.boolean: BooleanFileTypeCaster(), + UserDataType.integer: IntegerFileTypeCaster(), + UserDataType.float: FloatFileTypeCaster(), + UserDataType.date: DateFileTypeCaster(), + UserDataType.datetime: DatetimeFileTypeCaster(), + UserDataType.datetimetz: DatetimeTZFileTypeCaster(), + UserDataType.genericdatetime: GenericDatetimeFileTypeCaster(), + UserDataType.boolean: BooleanFileTypeCaster(), } user_to_native_map = { **ClickHouseTypeTransformer.user_to_native_map, - BIType.datetime: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime64), - BIType.genericdatetime: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime64), - BIType.date: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Date32), + UserDataType.datetime: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime64), + UserDataType.genericdatetime: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime64), + UserDataType.date: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Date32), } @classmethod - def cast_for_input(cls, value: Any, user_t: BIType) -> Any: + def cast_for_input(cls, value: Any, user_t: UserDataType) -> Any: """Prepare value for insertion into the database""" if value == "" or value is None: return None diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/us_connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/us_connection.py index 922607def..5fc579e9d 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/us_connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_base/core/us_connection.py @@ -13,10 +13,6 @@ import attr import xxhash -from dl_connector_bundle_chs3.chs3_base.core.dto import BaseFileS3ConnDTO -from dl_connector_bundle_chs3.chs3_base.core.settings import FileS3ConnectorSettings -from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions -from dl_connector_clickhouse.core.clickhouse_base.us_connection import ConnectionClickhouseBase from dl_constants.enums import ( DataSourceRole, FileProcessingStatus, @@ -41,6 +37,11 @@ parse_comma_separated_hosts, ) +from dl_connector_bundle_chs3.chs3_base.core.dto import BaseFileS3ConnDTO +from dl_connector_bundle_chs3.chs3_base.core.settings import FileS3ConnectorSettings +from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions +from dl_connector_clickhouse.core.clickhouse_base.us_connection import ConnectionClickhouseBase + if TYPE_CHECKING: from dl_core.services_registry.top_level import ServicesRegistry diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/api_schema/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/api_schema/connection.py index 488d7858b..6f3d4a713 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/api_schema/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/api_schema/connection.py @@ -6,6 +6,7 @@ ) from dl_api_connector.api_schema.extras import FieldExtra + from dl_connector_bundle_chs3.chs3_base.api.api_schema.connection import BaseFileS3ConnectionSchema from dl_connector_bundle_chs3.chs3_gsheets.api.api_schema.source import GSheetsFileSourceSchema from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/connection_info.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/connection_info.py index 8f6cae58a..8d8878497 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/connection_info.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/api/connection_info.py @@ -1,6 +1,7 @@ from __future__ import annotations from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_bundle_chs3.chs3_base.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/constants.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/constants.py index b3fec0368..4717b3641 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/constants.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/constants.py @@ -1,12 +1,12 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, NotificationType, ) CONNECTION_TYPE_GSHEETS_V2 = ConnectionType.declare("gsheets_v2") -SOURCE_TYPE_GSHEETS_V2 = CreateDSFrom.declare("GSHEETS_V2") +SOURCE_TYPE_GSHEETS_V2 = DataSourceType.declare("GSHEETS_V2") NOTIF_TYPE_GSHEETS_V2_STALE_DATA = NotificationType.declare("stale_data") NOTIF_TYPE_GSHEETS_V2_DATA_UPDATE_FAILURE = NotificationType.declare("data_update_failure") diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/data_source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/data_source.py index 2bfaac514..f79e3560a 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/data_source.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/data_source.py @@ -1,24 +1,25 @@ from __future__ import annotations +from dl_constants.enums import ( + ComponentErrorLevel, + DataSourceType, +) +from dl_core import exc +from dl_core.reporting.notifications import get_notification_record + from dl_connector_bundle_chs3.chs3_base.core.data_source import BaseFileS3DataSource from dl_connector_bundle_chs3.chs3_gsheets.core.constants import ( CONNECTION_TYPE_GSHEETS_V2, NOTIF_TYPE_GSHEETS_V2_DATA_UPDATE_FAILURE, SOURCE_TYPE_GSHEETS_V2, ) -from dl_constants.enums import ( - ComponentErrorLevel, - CreateDSFrom, -) -from dl_core import exc -from dl_core.reporting.notifications import get_notification_record class GSheetsFileS3DataSource(BaseFileS3DataSource): conn_type = CONNECTION_TYPE_GSHEETS_V2 @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in { SOURCE_TYPE_GSHEETS_V2, } diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/lifecycle.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/lifecycle.py index 2f6d27acb..7e2a40026 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/lifecycle.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/lifecycle.py @@ -1,8 +1,6 @@ import datetime +from typing import ClassVar -from dl_connector_bundle_chs3.chs3_base.core.lifecycle import BaseFileS3ConnectionLifecycleManager -from dl_connector_bundle_chs3.chs3_gsheets.core.constants import NOTIF_TYPE_GSHEETS_V2_STALE_DATA -from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection from dl_constants.enums import FileProcessingStatus from dl_core.connectors.base.lifecycle import ConnectionLifecycleManager from dl_core.reporting.notifications import get_notification_record @@ -11,6 +9,10 @@ make_user_auth_headers, ) +from dl_connector_bundle_chs3.chs3_base.core.lifecycle import BaseFileS3ConnectionLifecycleManager +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import NOTIF_TYPE_GSHEETS_V2_STALE_DATA +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection + class GSheetsFileS3ConnectionLifecycleManager( BaseFileS3ConnectionLifecycleManager, @@ -18,6 +20,8 @@ class GSheetsFileS3ConnectionLifecycleManager( ): ENTRY_CLS = GSheetsFileS3Connection + STALE_THRESHOLD_SECONDS: ClassVar[int] = 30 * 60 + async def post_exec_async_hook(self) -> None: await super().post_exec_async_hook() @@ -30,17 +34,16 @@ async def post_exec_async_hook(self) -> None: dt_now = datetime.datetime.now(datetime.timezone.utc) - stale_threshold_seconds = 30 * 60 data_updated_at_all = data.oldest_data_update_time() if ( data_updated_at_all is not None - and (dt_now - data_updated_at_all).total_seconds() >= stale_threshold_seconds + and (dt_now - data_updated_at_all).total_seconds() >= self.STALE_THRESHOLD_SECONDS ): reporting_registry = self._service_registry.get_reporting_registry() reporting_registry.save_reporting_record(get_notification_record(NOTIF_TYPE_GSHEETS_V2_STALE_DATA)) data_updated_at = data.oldest_data_update_time(exclude_statuses={FileProcessingStatus.in_progress}) - if data_updated_at is None or (dt_now - data_updated_at).total_seconds() < stale_threshold_seconds: + if data_updated_at is None or (dt_now - data_updated_at).total_seconds() < self.STALE_THRESHOLD_SECONDS: return fu_client_factory = self._service_registry.get_file_uploader_client_factory() diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/notifications.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/notifications.py index f28d4e7d1..262609595 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/notifications.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/notifications.py @@ -1,11 +1,12 @@ from typing import Optional +from dl_constants.enums import NotificationLevel +from dl_core.reporting.notifications import BaseNotification + from dl_connector_bundle_chs3.chs3_gsheets.core.constants import ( NOTIF_TYPE_GSHEETS_V2_DATA_UPDATE_FAILURE, NOTIF_TYPE_GSHEETS_V2_STALE_DATA, ) -from dl_constants.enums import NotificationLevel -from dl_core.reporting.notifications import BaseNotification class StaleDataNotification(BaseNotification): diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/settings.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/settings.py index d9299cc47..0a0b08955 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/settings.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/settings.py @@ -3,15 +3,16 @@ ConnectorSettingsBase, ) from dl_configs.settings_loaders.meta_definition import required -from dl_connector_bundle_chs3.chs3_base.core.settings import ( - ConnectorsDataFileBase, - FileS3ConnectorSettings, -) from dl_core.connectors.settings.primitives import ( ConnectorSettingsDefinition, get_connectors_settings_config, ) +from dl_connector_bundle_chs3.chs3_base.core.settings import ( + ConnectorsDataFileBase, + FileS3ConnectorSettings, +) + def gsheets_file_s3_settings_fallback(full_cfg: ConnectorsConfigType) -> dict[str, ConnectorSettingsBase]: cfg = get_connectors_settings_config( diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/storage_schemas/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/storage_schemas/connection.py index c8cdf8a95..f524ab86e 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/storage_schemas/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/storage_schemas/connection.py @@ -1,11 +1,12 @@ from marshmallow import fields +from dl_core.us_manager.storage_schemas.connection import BaseConnectionDataStorageSchema + from dl_connector_bundle_chs3.chs3_base.core.storage_schemas.connection import ( BaseFileConnectionDataStorageSchema, BaseFileConnectionSourceStorageSchema, ) from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection -from dl_core.us_manager.storage_schemas.connection import BaseConnectionDataStorageSchema class GSheetsFileConnectionSourceStorageSchema(BaseFileConnectionSourceStorageSchema): diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/testing/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/testing/connection.py index 99376cd85..d62c1ad78 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/testing/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/testing/connection.py @@ -1,79 +1,30 @@ from __future__ import annotations -import asyncio +from typing import Any import uuid -from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 -from dl_connector_clickhouse.db_testing.engine_wrapper import ClickhouseDbEngineConfig -from dl_constants.enums import FileProcessingStatus from dl_core.us_manager.us_manager_sync import SyncUSManager -from dl_core_testing.database import DbTable +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection -def make_saved_gsheets_v2_connection( # type: ignore # TODO: fix - sync_usm: SyncUSManager, clickhouse_table: DbTable, filename: str, **kwargs -): - from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection - from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions - from dl_connector_clickhouse.core.clickhouse_base.connection_executors import ClickHouseSyncAdapterConnExecutor - from dl_connector_clickhouse.core.clickhouse_base.dto import ClickHouseConnDTO - from dl_core.connection_executors import ( - ExecutionMode, - SyncWrapperForAsyncConnExecutor, - ) - from dl_core.connection_models import TableIdent - from dl_core.connections_security.base import InsecureConnectionSecurityManager - conn_name = "gsheets_v2 conn %s" % uuid.uuid4() - engine_config = clickhouse_table.db.engine_config - assert isinstance(engine_config, ClickhouseDbEngineConfig) - cluster = engine_config.cluster - assert cluster is not None - with SyncWrapperForAsyncConnExecutor( - async_conn_executor=ClickHouseSyncAdapterConnExecutor( # type: ignore # TODO: fix - conn_dto=ClickHouseConnDTO( - conn_id=None, - protocol="http", - endpoint=None, - cluster_name=cluster, - multihosts=clickhouse_table.db.get_conn_hosts(), # type: ignore # TODO: fix - **clickhouse_table.db.get_conn_credentials(full=True), - ), - conn_options=CHConnectOptions(max_execution_time=None, connect_timeout=None, total_timeout=None), - req_ctx_info=None, - exec_mode=ExecutionMode.DIRECT, - sec_mgr=InsecureConnectionSecurityManager(), - remote_qe_data=None, - tpe=None, - conn_hosts_pool=clickhouse_table.db.get_conn_hosts(), - host_fail_callback=lambda h: None, - ), - loop=asyncio.get_event_loop(), - ) as ce: - raw_schema = ce.get_table_schema_info( - TableIdent( - db_name=clickhouse_table.db.name, - schema_name=None, - table_name=clickhouse_table.name, - ) - ).schema - data_dict = GSheetsFileS3Connection.DataModel( - sources=[ - GSheetsFileS3Connection.FileDataSource( - id=str(uuid.uuid4()), - file_id=str(uuid.uuid4()), - title=f"Title -- {filename.upper()}", - s3_filename=filename, - raw_schema=raw_schema, - status=FileProcessingStatus.ready, - sheet_id=0, - first_line_is_header=True, - spreadsheet_id="some_spreadsheet_id", - ), - ], - ) +def make_saved_gsheets_v2_connection( + sync_usm: SyncUSManager, + sources: list[GSheetsFileS3Connection.FileDataSource], + **kwargs: Any, +) -> GSheetsFileS3Connection: + conn_type = CONNECTION_TYPE_GSHEETS_V2 + + conn_name = "{} test conn {}".format(conn_type.name, uuid.uuid4()) conn = GSheetsFileS3Connection.create_from_dict( - data_dict, ds_key=conn_name, type_=CONNECTION_TYPE_GSHEETS_V2.name, us_manager=sync_usm, **kwargs + data_dict=GSheetsFileS3Connection.DataModel( + sources=sources, + ), + ds_key=conn_name, + type_=CONNECTION_TYPE_GSHEETS_V2.name, + us_manager=sync_usm, + **kwargs, ) sync_usm.save(conn) return conn diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/us_connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/us_connection.py index 7f5119248..a93d13f8b 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/us_connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/chs3_gsheets/core/us_connection.py @@ -9,8 +9,6 @@ import attr -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection -from dl_connector_bundle_chs3.chs3_gsheets.core.constants import SOURCE_TYPE_GSHEETS_V2 from dl_constants.enums import ( DataSourceRole, FileProcessingStatus, @@ -18,6 +16,9 @@ from dl_core.services_registry.file_uploader_client_factory import GSheetsFileSourceDesc from dl_utils.utils import DataKey +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import SOURCE_TYPE_GSHEETS_V2 + LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/api_schema/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/api_schema/connection.py index 57524bfe0..8741ebe9b 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/api_schema/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/api_schema/connection.py @@ -6,6 +6,7 @@ ) from dl_api_connector.api_schema.extras import FieldExtra + from dl_connector_bundle_chs3.chs3_base.api.api_schema.connection import BaseFileS3ConnectionSchema from dl_connector_bundle_chs3.file.api.api_schema.source import FileSourceSchema from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/api_schema/source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/api_schema/source.py index 08b0640e4..8026a54d4 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/api_schema/source.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/api_schema/source.py @@ -6,9 +6,10 @@ fields, ) +from dl_constants.enums import UserDataType + from dl_connector_bundle_chs3.chs3_base.api.api_schema.connection import BaseFileSourceSchema from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection -from dl_constants.enums import BIType class FileSourceColumnTypeSchema(Schema): @@ -16,7 +17,7 @@ class Meta: unknown = RAISE name = fields.String() - user_type = fields.Enum(BIType) + user_type = fields.Enum(UserDataType) class FileSourceSchema(BaseFileSourceSchema): diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/connection_info.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/connection_info.py index ed7b66454..a3ec95791 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/connection_info.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/api/connection_info.py @@ -1,6 +1,7 @@ from __future__ import annotations from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_bundle_chs3.chs3_base.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/constants.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/constants.py index 1b7fa857e..860073e39 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/constants.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/constants.py @@ -1,8 +1,8 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, ) CONNECTION_TYPE_FILE = ConnectionType.declare("file") -SOURCE_TYPE_FILE_S3_TABLE = CreateDSFrom.declare("FILE_S3_TABLE") +SOURCE_TYPE_FILE_S3_TABLE = DataSourceType.declare("FILE_S3_TABLE") diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/data_source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/data_source.py index 0610ea229..a584e87a7 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/data_source.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/data_source.py @@ -1,18 +1,19 @@ from __future__ import annotations +from dl_constants.enums import DataSourceType + from dl_connector_bundle_chs3.chs3_base.core.data_source import BaseFileS3DataSource from dl_connector_bundle_chs3.file.core.constants import ( CONNECTION_TYPE_FILE, SOURCE_TYPE_FILE_S3_TABLE, ) -from dl_constants.enums import CreateDSFrom class FileS3DataSource(BaseFileS3DataSource): conn_type = CONNECTION_TYPE_FILE @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in { SOURCE_TYPE_FILE_S3_TABLE, } diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/settings.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/settings.py index 3e85da97a..cf36f1415 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/settings.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/settings.py @@ -3,15 +3,16 @@ ConnectorSettingsBase, ) from dl_configs.settings_loaders.meta_definition import required -from dl_connector_bundle_chs3.chs3_base.core.settings import ( - ConnectorsDataFileBase, - FileS3ConnectorSettings, -) from dl_core.connectors.settings.primitives import ( ConnectorSettingsDefinition, get_connectors_settings_config, ) +from dl_connector_bundle_chs3.chs3_base.core.settings import ( + ConnectorsDataFileBase, + FileS3ConnectorSettings, +) + def file_s3_settings_fallback(full_cfg: ConnectorsConfigType) -> dict[str, ConnectorSettingsBase]: cfg = get_connectors_settings_config( diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/storage_schemas/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/storage_schemas/connection.py index 8f6cbb1c8..9cf1fe20b 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/storage_schemas/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/storage_schemas/connection.py @@ -1,11 +1,12 @@ from marshmallow import fields +from dl_core.us_manager.storage_schemas.connection import BaseConnectionDataStorageSchema + from dl_connector_bundle_chs3.chs3_base.core.storage_schemas.connection import ( BaseFileConnectionDataStorageSchema, BaseFileConnectionSourceStorageSchema, ) from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection -from dl_core.us_manager.storage_schemas.connection import BaseConnectionDataStorageSchema class FileConnectionSourceStorageSchema(BaseFileConnectionSourceStorageSchema): diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/testing/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/testing/connection.py index 5793b4023..bdb934cd0 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/testing/connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/testing/connection.py @@ -1,79 +1,30 @@ from __future__ import annotations -import asyncio +from typing import Any import uuid +from dl_core.us_manager.us_manager_sync import SyncUSManager + from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection -from dl_connector_clickhouse.db_testing.engine_wrapper import ClickhouseDbEngineConfig -from dl_constants.enums import FileProcessingStatus -from dl_core.us_manager.us_manager_sync import SyncUSManager -from dl_core_testing.database import DbTable def make_saved_file_connection( sync_usm: SyncUSManager, - clickhouse_table: DbTable, - filename: str, - **kwargs, + sources: list[FileS3Connection.FileDataSource], + **kwargs: Any, ) -> FileS3Connection: - from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions - from dl_connector_clickhouse.core.clickhouse_base.connection_executors import ClickHouseSyncAdapterConnExecutor - from dl_connector_clickhouse.core.clickhouse_base.dto import ClickHouseConnDTO - from dl_core.connection_executors import ( - ExecutionMode, - SyncWrapperForAsyncConnExecutor, - ) - from dl_core.connection_models import TableIdent - from dl_core.connections_security.base import InsecureConnectionSecurityManager + conn_type = CONNECTION_TYPE_FILE - conn_name = "file conn %s" % uuid.uuid4() - engine_config = clickhouse_table.db.engine_config - assert isinstance(engine_config, ClickhouseDbEngineConfig) - cluster = engine_config.cluster - assert cluster is not None - with SyncWrapperForAsyncConnExecutor( - async_conn_executor=ClickHouseSyncAdapterConnExecutor( - conn_dto=ClickHouseConnDTO( - conn_id=None, - protocol="http", - endpoint=None, - cluster_name=cluster, - multihosts=clickhouse_table.db.get_conn_hosts(), - **clickhouse_table.db.get_conn_credentials(full=True), - ), - conn_options=CHConnectOptions(max_execution_time=None, connect_timeout=None, total_timeout=None), - req_ctx_info=None, - exec_mode=ExecutionMode.DIRECT, - sec_mgr=InsecureConnectionSecurityManager(), - remote_qe_data=None, - tpe=None, - conn_hosts_pool=clickhouse_table.db.get_conn_hosts(), - host_fail_callback=lambda h: None, - ), - loop=asyncio.get_event_loop(), - ) as ce: - raw_schema = ce.get_table_schema_info( - TableIdent( - db_name=clickhouse_table.db.name, - schema_name=None, - table_name=clickhouse_table.name, - ) - ).schema - data_dict = FileS3Connection.DataModel( - sources=[ - FileS3Connection.FileDataSource( - id=str(uuid.uuid4()), - file_id=str(uuid.uuid4()), - title=f"Title -- {filename.upper()}", - s3_filename=filename, - raw_schema=raw_schema, - status=FileProcessingStatus.ready, - ), - ], - ) + conn_name = "{} test conn {}".format(conn_type.name, uuid.uuid4()) conn = FileS3Connection.create_from_dict( - data_dict, ds_key=conn_name, type_=CONNECTION_TYPE_FILE.name, us_manager=sync_usm, **kwargs + data_dict=FileS3Connection.DataModel( + sources=sources, + ), + ds_key=conn_name, + type_=CONNECTION_TYPE_FILE.name, + us_manager=sync_usm, + **kwargs, ) sync_usm.save(conn) return conn diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/us_connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/us_connection.py index 854aab6d8..7e1b51f76 100644 --- a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/us_connection.py +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3/file/core/us_connection.py @@ -4,11 +4,12 @@ import attr -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection -from dl_connector_bundle_chs3.file.core.constants import SOURCE_TYPE_FILE_S3_TABLE from dl_constants.enums import DataSourceRole from dl_core.services_registry.file_uploader_client_factory import FileSourceDesc +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3.file.core.constants import SOURCE_TYPE_FILE_S3_TABLE + LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/conftest.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/conftest.py new file mode 100644 index 000000000..c6fe683cc --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/conftest.py @@ -0,0 +1 @@ +pytest_plugins = ("aiohttp.pytest_plugin",) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/base.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/base.py new file mode 100644 index 000000000..f776cb204 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/base.py @@ -0,0 +1,116 @@ +import abc +import datetime +import math +from typing import Generator + +import attr +import pytest + +from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase +from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration +from dl_api_lib_testing.connection_base import ConnectionTestBase +from dl_configs.connectors_settings import ConnectorSettingsBase +from dl_constants.enums import ( + ConnectionType, + UserDataType, +) +from dl_core.services_registry.file_uploader_client_factory import ( + FileSourceDesc, + FileUploaderClient, + FileUploaderClientFactory, + SourceInternalParams, + SourcePreview, +) +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.database import ( + C, + make_sample_data, +) + +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.base import ( + FILE_CONN_TV, + BaseCHS3TestClass, +) +from dl_connector_bundle_chs3_tests.db.config import API_TEST_CONFIG + + +@attr.s +class FileUploaderClientMockup(FileUploaderClient): + # In reality, we already receive json-serialized values for the preview from the source, + # so tune value generators here as well + _VALUE_GENERATORS = { + UserDataType.date: lambda rn, ts, **kwargs: (ts.date() + datetime.timedelta(days=rn)).isoformat(), + UserDataType.datetime: lambda rn, ts, **kwargs: (ts + datetime.timedelta(days=rn / math.pi)).isoformat(), + UserDataType.genericdatetime: lambda rn, ts, **kwargs: (ts + datetime.timedelta(days=rn / math.pi)).isoformat(), + } + + async def get_preview(self, src: FileSourceDesc) -> SourcePreview: + if src.raw_schema is None: + return SourcePreview(source_id=src.source_id, preview=[]) + cols = [ + C(sch_col.name, sch_col.user_type, sch_col.nullable, vg=self._VALUE_GENERATORS.get(sch_col.user_type)) + for sch_col in src.raw_schema + ] + preview_dicts = make_sample_data(cols, rows=20) + preview = [[row[c.name] for c in cols] for row in preview_dicts] + return SourcePreview(source_id=src.source_id, preview=preview) + + async def get_internal_params(self, src: FileSourceDesc) -> SourceInternalParams: + """Should normally return actual connection params from US, but this will do for tests""" + + return SourceInternalParams( + preview_id=src.preview_id, + raw_schema=src.raw_schema, + ) + + +class CHS3ConnectionApiTestBase(BaseCHS3TestClass[FILE_CONN_TV], ConnectionTestBase, metaclass=abc.ABCMeta): + bi_compeng_pg_on = False + + @pytest.fixture(scope="class") + def bi_test_config(self) -> ApiTestEnvironmentConfiguration: + return API_TEST_CONFIG + + @pytest.fixture(scope="class") + def monkeyclass(self) -> Generator[pytest.MonkeyPatch, None, None]: + with pytest.MonkeyPatch.context() as mp: + yield mp + + @pytest.fixture(scope="class", autouse=True) + def patch_file_uploader_client(self, monkeyclass: pytest.MonkeyPatch) -> None: + monkeyclass.setattr(FileUploaderClientFactory, "_file_uploader_client_cls", FileUploaderClientMockup) + + @pytest.fixture(scope="class") + def connectors_settings(self) -> dict[ConnectionType, ConnectorSettingsBase]: + return {self.conn_type: self.connection_settings} + + @pytest.fixture(scope="function") + def saved_connection_id( + self, + control_api_sync_client: SyncHttpClientBase, + connection_params: dict, + sync_us_manager: SyncUSManager, + sample_file_data_source: BaseFileS3Connection.FileDataSource, + ) -> Generator[str, None, None]: + """ + Normally connections are updated by file uploader worker, + but that would require the whole file-uploader pipeline to be setup in tests + """ + + with super().create_connection( + control_api_sync_client=control_api_sync_client, + connection_params=connection_params, + ) as conn_id: + conn = sync_us_manager.get_by_id(conn_id, BaseFileS3Connection) + for src in conn.data.sources: + src.status = sample_file_data_source.status + src.raw_schema = sample_file_data_source.raw_schema + src.s3_filename = sample_file_data_source.s3_filename + sync_us_manager.save(conn) + yield conn_id + + @abc.abstractmethod + @pytest.fixture(scope="function") + def connection_params(self, sample_file_data_source: BaseFileS3Connection.FileDataSource) -> dict: + raise NotImplementedError() diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/connection.py new file mode 100644 index 000000000..e31f5adc4 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/connection.py @@ -0,0 +1,235 @@ +import abc +from http import HTTPStatus +import uuid + +from flask.testing import FlaskClient +import pytest + +from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite +from dl_core.exc import DataSourceTitleConflict +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.api.base import CHS3ConnectionApiTestBase +from dl_connector_bundle_chs3_tests.db.base.core.base import FILE_CONN_TV + + +class CHS3ConnectionTestSuite( + CHS3ConnectionApiTestBase[FILE_CONN_TV], + DefaultConnectorConnectionTestSuite, + metaclass=abc.ABCMeta, +): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectorConnectionTestSuite.test_test_connection: "Not implemented", + DefaultConnectorConnectionTestSuite.test_cache_ttl_sec_override: "Unavailable for CHS3 connectors", + }, + ) + + @abc.abstractmethod + @pytest.fixture(scope="function") + def single_new_conn_source_params(self) -> dict: + raise NotImplementedError() + + def test_add_and_drop_connection_source( + self, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + single_new_conn_source_params: dict, + ) -> None: + conn_id = saved_connection_id + orig_conn = sync_us_manager.get_by_id(conn_id, BaseFileS3Connection) + + new_source = single_new_conn_source_params + add_resp = client.put( + "/api/v1/connections/{}".format(conn_id), + json={ + "sources": [ + {"id": orig_conn.data.sources[0].id, "title": orig_conn.data.sources[0].title}, + new_source, + ], + }, + ) + assert add_resp.status_code == HTTPStatus.OK, add_resp.json + + conn = sync_us_manager.get_by_id(conn_id) + assert len(conn.data.sources) == len(orig_conn.data.sources) + 1 + + drop_resp = client.put( + "/api/v1/connections/{}".format(conn_id), + json={ + "sources": [ + {"id": orig_conn.data.sources[0].id, "title": orig_conn.data.sources[0].title}, + ], + }, + ) + assert drop_resp.status_code == HTTPStatus.OK, drop_resp.json + + conn = sync_us_manager.get_by_id(conn_id) + assert len(conn.data.sources) == len(orig_conn.data.sources) + + def test_rename_connection_source( + self, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + ) -> None: + conn_id = saved_connection_id + orig_conn = sync_us_manager.get_by_id(conn_id, BaseFileS3Connection) + + resp = client.put( + "/api/v1/connections/{}".format(conn_id), + json={ + "sources": [ + {"id": orig_conn.data.sources[0].id, "title": "renamed source"}, + ], + }, + ) + assert resp.status_code == HTTPStatus.OK, resp.json + + conn = sync_us_manager.get_by_id(conn_id) + assert conn.data.sources[0].title == "renamed source" + + def test_replace_connection_source( + self, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + single_new_conn_source_params: dict, + ) -> None: + conn_id = saved_connection_id + orig_conn = sync_us_manager.get_by_id(conn_id, BaseFileS3Connection) + + source_to_replace = orig_conn.data.sources[0] + new_source = single_new_conn_source_params + resp = client.put( + "/api/v1/connections/{}".format(conn_id), + json={ + "sources": [new_source], + "replace_sources": [ + { + "old_source_id": source_to_replace.id, + "new_source_id": new_source["id"], + }, + ], + }, + ) + assert resp.status_code == HTTPStatus.OK, resp.json + conn = sync_us_manager.get_by_id(conn_id, BaseFileS3Connection) + + old_source_ids = set(src.id for src in orig_conn.data.sources) + new_source_ids = set(src.id for src in conn.data.sources) + assert old_source_ids == new_source_ids + new_replaced_source = conn.get_file_source_by_id(source_to_replace.id) + assert new_replaced_source.file_id != source_to_replace.file_id + + def test_consistency_checks_pass_not_configured_source( + self, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + single_new_conn_source_params: dict, + ) -> None: + conn = sync_us_manager.get_by_id(saved_connection_id, BaseFileS3Connection) + + new_source = single_new_conn_source_params + new_source.pop("file_id") # as if this source already exists + resp = client.put( + "/api/v1/connections/{}".format(saved_connection_id), + json={ + "sources": [ + {"id": conn.data.sources[0].id, "title": conn.data.sources[0].title}, + new_source, + ], + }, + ) + assert resp.status_code == HTTPStatus.BAD_REQUEST, resp.json + details: dict[str, list[str]] = resp.json["details"] + assert details == { + "not_configured_not_saved": [new_source["id"]], + } + + def test_consistency_checks_replace_non_existent_source( + self, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + single_new_conn_source_params: dict, + ) -> None: + conn = sync_us_manager.get_by_id(saved_connection_id, BaseFileS3Connection) + + replaced_source_id = str(uuid.uuid4()) # replacing a non-existent source + new_source = single_new_conn_source_params + resp = client.put( + "/api/v1/connections/{}".format(saved_connection_id), + json={ + "sources": [ + {"id": conn.data.sources[0].id, "title": conn.data.sources[0].title}, + new_source, + ], + "replace_sources": [ + { + "old_source_id": replaced_source_id, + "new_source_id": new_source["id"], + }, + ], + }, + ) + assert resp.status_code == HTTPStatus.BAD_REQUEST, resp.json + details: dict[str, list[str]] = resp.json["details"] + assert details == { + "replaced_not_saved": [replaced_source_id], + } + + def test_consistency_checks_non_unique_titles( + self, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + single_new_conn_source_params: dict, + ) -> None: + conn = sync_us_manager.get_by_id(saved_connection_id, BaseFileS3Connection) + + new_source = single_new_conn_source_params + new_source["title"] = conn.data.sources[0].title + resp = client.put( + "/api/v1/connections/{}".format(saved_connection_id), + json={ + "sources": [ + {"id": conn.data.sources[0].id, "title": conn.data.sources[0].title}, + new_source, + ], + }, + ) + assert resp.status_code == HTTPStatus.BAD_REQUEST, resp.json + assert resp.json["message"] == DataSourceTitleConflict().message + + def test_table_name_spoofing( + self, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + ) -> None: + usm = sync_us_manager + orig_conn = usm.get_by_id(saved_connection_id, BaseFileS3Connection) + orig_filename = orig_conn.data.sources[0].s3_filename + + fake_filename = "hack_me.native" + resp = client.put( + "/api/v1/connections/{}".format(saved_connection_id), + json={ + "sources": [ + { + "id": orig_conn.data.sources[0].id, + "title": orig_conn.data.sources[0].title, + "s3_filename": fake_filename, + } + ], + }, + ) + assert resp.status_code == HTTPStatus.BAD_REQUEST, resp.json + assert resp.json["sources"]["0"]["s3_filename"] == ["Unknown field."] + conn = usm.get_by_id(saved_connection_id, BaseFileS3Connection) + assert conn.data.sources[0].s3_filename == orig_filename diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/data.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/data.py new file mode 100644 index 000000000..a7fd3ac24 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/data.py @@ -0,0 +1,137 @@ +import abc +from http import HTTPStatus + +from flask.testing import FlaskClient +import pytest + +from dl_api_client.dsmaker.api.data_api import SyncHttpDataApiV2 +from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_lib_testing.connector.data_api_suites import DefaultConnectorDataResultTestSuite +from dl_api_lib_testing.data_api_base import ( + DataApiTestParams, + StandardizedDataApiTestBase, +) +from dl_constants.enums import ( + DataSourceRole, + FileProcessingStatus, +) +from dl_core.us_manager.us_manager_sync import SyncUSManager + +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.api.dataset import CHS3DatasetTestBase +from dl_connector_bundle_chs3_tests.db.base.core.base import FILE_CONN_TV + + +class CHS3DataApiTestBase(CHS3DatasetTestBase[FILE_CONN_TV], StandardizedDataApiTestBase, metaclass=abc.ABCMeta): + mutation_caches_on = False + + +class CHS3DataResultTestSuite( + CHS3DataApiTestBase[FILE_CONN_TV], + DefaultConnectorDataResultTestSuite, + metaclass=abc.ABCMeta, +): + @pytest.mark.xfail(reason="TODO") + def test_date32(self) -> None: # TODO implement + assert 0 + + @pytest.mark.parametrize( + "case, expected_status", + ( + ("w_status_in_progress", HTTPStatus.BAD_REQUEST), + ("w_raw_schema_removed", HTTPStatus.OK), # ok, because raw schema is stored in the dataset + ), + ) + def test_file_not_ready_result( + self, + case: str, + expected_status: HTTPStatus, + sync_us_manager: SyncUSManager, + data_api: SyncHttpDataApiV2, + saved_dataset: Dataset, + saved_connection_id: str, + sample_file_data_source: BaseFileS3Connection.FileDataSource, + data_api_test_params: DataApiTestParams, + ) -> None: + ds = saved_dataset + conn = sync_us_manager.get_by_id(saved_connection_id, BaseFileS3Connection) + conn.update_data_source( + id=sample_file_data_source.id, + role=DataSourceRole.origin, + status=FileProcessingStatus.in_progress if case == "w_status_in_progress" else FileProcessingStatus.ready, + remove_raw_schema=True if case == "w_raw_schema_removed" else False, + ) + sync_us_manager.save(conn) + + result_resp = data_api.get_result( + dataset=ds, fields=[ds.find_field(title=data_api_test_params.date_field)], fail_ok=True + ) + assert result_resp.status_code == expected_status, result_resp.json + + def test_dataset_with_removed_file( + self, + client: FlaskClient, + control_api: SyncHttpDatasetApiV1, + data_api: SyncHttpDataApiV2, + saved_dataset: Dataset, + saved_connection_id: str, + data_api_test_params: DataApiTestParams, + ) -> None: + ds = saved_dataset + ds.result_schema["Measure"] = ds.field(formula=f"SUM([{data_api_test_params.summable_field}])") + + ds_resp = control_api.apply_updates(dataset=ds, fail_ok=True) + assert ds_resp.status_code == HTTPStatus.OK, ds_resp.response_errors + ds = ds_resp.dataset + ds = control_api.save_dataset(ds).dataset + + preview_resp = data_api.get_preview(dataset=ds) + assert preview_resp.status_code == HTTPStatus.OK, preview_resp.json + + # remove source from the connection + update_resp = client.put( + "/api/v1/connections/{}".format(saved_connection_id), + json={"sources": [{"id": "dummy", "title": "dummy", "file_id": "dummy"}]}, + ) + assert update_resp.status_code == HTTPStatus.OK, update_resp.json + + get_ds_resp = client.get(f"/api/v1/datasets/{ds.id}/versions/draft") + assert get_ds_resp.status_code == HTTPStatus.OK, get_ds_resp.json + + refresh_resp = control_api.refresh_dataset_sources(ds, [ds.sources[0].id], fail_ok=True) + assert refresh_resp.status_code == HTTPStatus.BAD_REQUEST + assert refresh_resp.json["code"] == "ERR.DS_API.VALIDATION.ERROR" + + preview_resp = data_api.get_preview(dataset=ds, fail_ok=True) + assert preview_resp.status_code == HTTPStatus.BAD_REQUEST, preview_resp.json + assert preview_resp.json["code"] == "ERR.DS_API.DB.SOURCE_DOES_NOT_EXIST" + + result_resp = data_api.get_result(dataset=ds, fields=[ds.result_schema["Measure"]], fail_ok=True) + assert result_resp.status_code == HTTPStatus.BAD_REQUEST, preview_resp.json + assert result_resp.json["code"] == "ERR.DS_API.DB.SOURCE_DOES_NOT_EXIST" + + def test_table_name_spoofing( + self, + data_api: SyncHttpDataApiV2, + saved_dataset: Dataset, + data_api_test_params: DataApiTestParams, + ) -> None: + fake_filename = "hack_me.native" + fake_parameters = { + "db_name": "fake db_name", + "db_version": "fake db_version", + "table_name": fake_filename, + "origin_source_id": "fake_source_id", + } + + ds = saved_dataset + ds.sources[0].parameters = fake_parameters + result_resp = data_api.get_result( + dataset=ds, fields=[ds.find_field(title=data_api_test_params.date_field)], fail_ok=True + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + + preview_resp = data_api.get_preview(dataset=ds, fail_ok=True) + assert preview_resp.status_code == HTTPStatus.BAD_REQUEST, preview_resp.json + # ^ because of fake origin_source_id in parameters diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/dataset.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/dataset.py new file mode 100644 index 000000000..6eba538ec --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/api/dataset.py @@ -0,0 +1,121 @@ +import abc +from http import HTTPStatus + +import pytest + +from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_lib_testing.connector.dataset_suite import DefaultConnectorDatasetTestSuite +from dl_api_lib_testing.dataset_base import DatasetTestBase +from dl_constants.enums import ( + DataSourceRole, + FileProcessingStatus, +) +from dl_core.us_manager.us_manager_sync import SyncUSManager + +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.api.base import CHS3ConnectionApiTestBase +from dl_connector_bundle_chs3_tests.db.base.core.base import FILE_CONN_TV + + +class CHS3DatasetTestBase(CHS3ConnectionApiTestBase[FILE_CONN_TV], DatasetTestBase, metaclass=abc.ABCMeta): + @pytest.fixture(scope="function") + def dataset_params(self, sample_file_data_source) -> dict: + return dict( + source_type=self.source_type.name, + parameters=dict( + origin_source_id=sample_file_data_source.id, + ), + ) + + +class CHS3DatasetTestSuite(CHS3DatasetTestBase, DefaultConnectorDatasetTestSuite, metaclass=abc.ABCMeta): + @pytest.mark.parametrize( + "source_status, expected_status", + ( + (FileProcessingStatus.ready, HTTPStatus.OK), + (FileProcessingStatus.in_progress, HTTPStatus.BAD_REQUEST), + (FileProcessingStatus.failed, HTTPStatus.BAD_REQUEST), + ), + ) + def test_add_dataset_source( + self, + sync_us_manager: SyncUSManager, + saved_connection: FILE_CONN_TV, + sample_file_data_source: BaseFileS3Connection.FileDataSource, + control_api: SyncHttpDatasetApiV1, + source_status: FileProcessingStatus, + expected_status: int, + ) -> None: + saved_connection.update_data_source( + id=sample_file_data_source.id, + role=DataSourceRole.origin, + status=source_status, + ) + sync_us_manager.save(saved_connection) + + ds = Dataset() + ds.sources["source_1"] = ds.source( + source_type=self.source_type, + connection_id=saved_connection.uuid, + parameters=dict( + origin_source_id=sample_file_data_source.id, + ), + ) + ds.source_avatars["avatar_1"] = ds.sources["source_1"].avatar() + ds_resp = control_api.apply_updates(dataset=ds, fail_ok=True) + assert ds_resp.status_code == expected_status, ds_resp.json + + ds = ds_resp.dataset + ds_resp = control_api.save_dataset(dataset=ds) + assert ds_resp.status_code == HTTPStatus.OK, ds_resp.response_errors + + def test_table_name_spoofing( + self, + control_api: SyncHttpDatasetApiV1, + saved_connection_id: str, + ) -> None: + fake_parameters = { + "db_name": "fake db_name", + "db_version": "fake db_version", + "table_name": "hack_me.native", + "origin_source_id": "fake_source_id", + } + + ds = Dataset() + ds.sources["source_1"] = ds.source( + source_type=self.source_type, + connection_id=saved_connection_id, + parameters=fake_parameters, + ) + ds.source_avatars["avatar_1"] = ds.sources["source_1"].avatar() + ds_resp = control_api.apply_updates(dataset=ds, fail_ok=True) + assert ds_resp.status_code == HTTPStatus.BAD_REQUEST, ds_resp.json + + def test_update_connection_source( + self, + sync_us_manager: SyncUSManager, + control_api: SyncHttpDatasetApiV1, + saved_dataset: Dataset, + saved_connection_id: str, + sample_file_data_source: BaseFileS3Connection.FileDataSource, + ) -> None: + orig_ds = saved_dataset + + # --- change connection source: add one column --- + conn = sync_us_manager.get_by_id(saved_connection_id, BaseFileS3Connection) + new_field = sample_file_data_source.raw_schema[-1].clone(name="new_field") + conn.update_data_source( + id=sample_file_data_source.id, + role=DataSourceRole.origin, + raw_schema=sample_file_data_source.raw_schema + [new_field], + ) + sync_us_manager.save(conn) + # --- ---------------------------------------- --- + + refresh_resp = control_api.refresh_dataset_sources(orig_ds, [orig_ds.sources[0].id]) + assert refresh_resp.status_code == HTTPStatus.OK, refresh_resp.json + ds = refresh_resp.dataset + + old_raw_schema, new_raw_schema = orig_ds.sources[0].raw_schema, ds.sources[0].raw_schema + assert len(new_raw_schema) == len(old_raw_schema) + 1 diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/base.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/base.py new file mode 100644 index 000000000..af30596e9 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/base.py @@ -0,0 +1,190 @@ +from __future__ import annotations + +import abc +import asyncio +from typing import ( + TYPE_CHECKING, + AsyncGenerator, + ClassVar, + Generator, + TypeVar, +) +import uuid + +import attr +import pytest + +from dl_api_commons.base_models import ( + RequestContextInfo, + TenantCommon, +) +from dl_api_lib_testing.app import RedisSettingMaker +from dl_configs.settings_submodels import S3Settings +from dl_constants.enums import DataSourceType +from dl_core.db import ( + SchemaColumn, + get_type_transformer, +) +from dl_core.services_registry import ServicesRegistry +from dl_core_testing.database import DbTable +from dl_core_testing.fixtures.primitives import FixtureTableSpec +from dl_core_testing.fixtures.sample_tables import TABLE_SPEC_SAMPLE_SUPERSTORE +from dl_core_testing.testcases.connection import BaseConnectionTestClass +from dl_file_uploader_worker_lib.utils.parsing_utils import get_field_id_generator +from dl_task_processor.processor import ( + DummyTaskProcessorFactory, + TaskProcessorFactory, +) +from dl_testing.s3_utils import ( + create_s3_bucket, + create_s3_client, +) + +from dl_connector_bundle_chs3.chs3_base.core.testing.utils import create_s3_native_from_ch_table +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3_tests.db import config as test_config +from dl_connector_clickhouse.db_testing.engine_wrapper import ClickhouseDbEngineConfig + + +if TYPE_CHECKING: + from types_aiobotocore_s3 import S3Client as AsyncS3Client + + +FILE_CONN_TV = TypeVar("FILE_CONN_TV", bound=BaseFileS3Connection) + + +class BaseCHS3TestClass(BaseConnectionTestClass[FILE_CONN_TV], metaclass=abc.ABCMeta): + core_test_config = test_config.CORE_TEST_CONFIG + connection_settings = test_config.SR_CONNECTION_SETTINGS + + source_type: ClassVar[DataSourceType] + + @pytest.fixture(scope="session") + def event_loop(self): + """Avoid spontaneous event loop closes between tests""" + loop = asyncio.get_event_loop() + yield loop + loop.close() + + @pytest.fixture(scope="session", autouse=True) + # FIXME: This fixture is a temporary solution for failing core tests when they are run together with api tests + def loop(self, event_loop: asyncio.AbstractEventLoop) -> Generator[asyncio.AbstractEventLoop, None, None]: + asyncio.set_event_loop(event_loop) + yield event_loop + # Attempt to cover an old version of pytest-asyncio: + # https://github.com/pytest-dev/pytest-asyncio/commit/51d986cec83fdbc14fa08015424c79397afc7ad9 + asyncio.set_event_loop_policy(None) + + @pytest.fixture(scope="class") + def db_url(self) -> str: + return test_config.DB_CH_URL + + @pytest.fixture(scope="class") + def engine_config(self, db_url: str, engine_params: dict) -> ClickhouseDbEngineConfig: + return ClickhouseDbEngineConfig(url=db_url, engine_params=engine_params) + + @pytest.fixture(scope="session") + def conn_bi_context(self) -> RequestContextInfo: + return RequestContextInfo(tenant=TenantCommon()) + + @pytest.fixture(scope="session") + def redis_setting_maker(self) -> RedisSettingMaker: + bi_test_config = test_config.API_TEST_CONFIG + return RedisSettingMaker(bi_test_config=bi_test_config) + + @pytest.fixture(scope="session") + def s3_settings(self) -> S3Settings: + return S3Settings( + ENDPOINT_URL=test_config.S3_ENDPOINT_URL, + ACCESS_KEY_ID=self.connection_settings.ACCESS_KEY_ID, + SECRET_ACCESS_KEY=self.connection_settings.SECRET_ACCESS_KEY, + ) + + @pytest.fixture(scope="session") + def task_processor_factory(self) -> TaskProcessorFactory: + return DummyTaskProcessorFactory() + + @pytest.fixture(scope="session") + def conn_sync_service_registry( + self, + conn_bi_context: RequestContextInfo, + task_processor_factory: TaskProcessorFactory, + ) -> ServicesRegistry: + return self.service_registry_factory( + conn_exec_factory_async_env=False, + conn_bi_context=conn_bi_context, + task_processor_factory=task_processor_factory, + ) + + @pytest.fixture(scope="session") + def conn_async_service_registry( + self, + conn_bi_context: RequestContextInfo, + task_processor_factory: TaskProcessorFactory, + ) -> ServicesRegistry: + return self.service_registry_factory( + conn_exec_factory_async_env=True, + conn_bi_context=conn_bi_context, + task_processor_factory=task_processor_factory, + ) + + @pytest.fixture(scope="function") + async def s3_client(self, s3_settings: S3Settings) -> AsyncS3Client: + async with create_s3_client(s3_settings) as client: + yield client + + @pytest.fixture(scope="function") + async def s3_bucket(self, s3_client: AsyncS3Client) -> str: + bucket_name = self.connection_settings.BUCKET + await create_s3_bucket(s3_client, bucket_name) + return bucket_name + + @pytest.fixture(scope="class") + def sample_table_spec(self) -> FixtureTableSpec: + return attr.evolve(TABLE_SPEC_SAMPLE_SUPERSTORE, nullable=True) + + @pytest.fixture(scope="function") + async def sample_s3_file( + self, + s3_client: AsyncS3Client, + s3_bucket: str, + s3_settings: S3Settings, + sample_table: DbTable, + ) -> AsyncGenerator[str, None]: + filename = f"my_file_{uuid.uuid4()}.native" + + field_id_gen = get_field_id_generator(self.conn_type) + tbl_schema = ", ".join( + "{} {}".format(field_id_gen.make_field_id(dict(title=col.name, index=idx)), col.type.compile()) + for idx, col in enumerate(sample_table.table.columns) + ) + tbl_schema = tbl_schema.replace("()", "") # String() -> String: type arguments are not needed here + + create_s3_native_from_ch_table(filename, s3_bucket, s3_settings, sample_table, tbl_schema) + + yield filename + + await s3_client.delete_object(Bucket=s3_bucket, Key=filename) + + def _get_raw_schema_for_ch_table(self, table_spec: FixtureTableSpec) -> list[SchemaColumn]: + field_id_gen = get_field_id_generator(self.conn_type) + type_transformer = get_type_transformer(self.conn_type) + raw_schema = [ + SchemaColumn( + name=field_id_gen.make_field_id(dict(title=col[0], index=idx)), + title=col[0], + user_type=(user_type := table_spec.get_user_type_for_col(col[0])), + native_type=type_transformer.type_user_to_native(user_type), + ) + for idx, col in enumerate(table_spec.table_schema) + ] + return raw_schema + + @abc.abstractmethod + @pytest.fixture(scope="function") + def sample_file_data_source(self, sample_s3_file: str) -> BaseFileS3Connection.FileDataSource: + raise NotImplementedError() + + @pytest.fixture(scope="function") + def connection_creation_params(self, sample_file_data_source: BaseFileS3Connection.FileDataSource) -> dict: + return dict(sources=[sample_file_data_source]) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/connection.py new file mode 100644 index 000000000..283f90e2e --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/connection.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +import abc + +from dl_core.us_connection_base import DataSourceTemplate +from dl_core_testing.testcases.connection import DefaultConnectionTestClass + +from dl_connector_bundle_chs3_tests.db.base.core.base import ( + FILE_CONN_TV, + BaseCHS3TestClass, +) + + +class CHS3ConnectionTestBase( + BaseCHS3TestClass, + DefaultConnectionTestClass[FILE_CONN_TV], + metaclass=abc.ABCMeta, +): + do_check_data_export_flag = False + + def check_saved_connection(self, conn: FILE_CONN_TV, params: dict) -> None: + assert set(src.title for src in conn.data.sources) == set(src.title for src in params["sources"]) + + def check_data_source_templates( + self, + conn: FILE_CONN_TV, + dsrc_templates: list[DataSourceTemplate], + ) -> None: + assert dsrc_templates + for dsrc_tmpl in dsrc_templates: + conn_src = conn.get_file_source_by_id(dsrc_tmpl.parameters["origin_source_id"]) + assert conn_src.title == dsrc_tmpl.title diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/connection_executor.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/connection_executor.py new file mode 100644 index 000000000..55da78e3f --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/connection_executor.py @@ -0,0 +1,55 @@ +import abc + +import pytest + +from dl_core.connection_models import DBIdent +from dl_core_testing.testcases.connection_executor import ( + DefaultAsyncConnectionExecutorTestSuite, + DefaultSyncAsyncConnectionExecutorCheckBase, + DefaultSyncConnectionExecutorTestSuite, +) +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_bundle_chs3_tests.db.base.core.base import ( + FILE_CONN_TV, + BaseCHS3TestClass, +) + + +class CHS3SyncAsyncConnectionExecutorTestBase( + BaseCHS3TestClass, + DefaultSyncAsyncConnectionExecutorCheckBase[FILE_CONN_TV], + metaclass=abc.ABCMeta, +): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultAsyncConnectionExecutorTestSuite.test_get_db_version: "Not implemented", + DefaultAsyncConnectionExecutorTestSuite.test_table_not_exists: "Assumes it always exists", + DefaultAsyncConnectionExecutorTestSuite.test_get_table_schema_info: "Not implemented", + DefaultAsyncConnectionExecutorTestSuite.test_get_table_schema_info_for_nonexistent_table: "Not implemented", + }, + ) + + @pytest.fixture(scope="function") + def db_ident(self) -> DBIdent: + pass + + +class CHS3SyncConnectionExecutorTestBase( + CHS3SyncAsyncConnectionExecutorTestBase[FILE_CONN_TV], + DefaultSyncConnectionExecutorTestSuite[FILE_CONN_TV], + metaclass=abc.ABCMeta, +): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultSyncConnectionExecutorTestSuite.test_type_recognition: "Not implemented", + }, + ) + + +class CHS3AsyncConnectionExecutorTestBase( + CHS3SyncAsyncConnectionExecutorTestBase[FILE_CONN_TV], + DefaultAsyncConnectionExecutorTestSuite[FILE_CONN_TV], + metaclass=abc.ABCMeta, +): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/data_source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/data_source.py new file mode 100644 index 000000000..9489dac21 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/data_source.py @@ -0,0 +1,72 @@ +import abc +from typing import TypeVar + +import pytest + +from dl_constants.enums import UserDataType +from dl_core.db import SchemaColumn +from dl_core.db.native_type import GenericNativeType +from dl_core_testing.fixtures.sample_tables import TABLE_SPEC_SAMPLE_SUPERSTORE +from dl_core_testing.testcases.data_source import DefaultDataSourceTestClass + +from dl_connector_bundle_chs3.chs3_base.core.data_source import BaseFileS3DataSource +from dl_connector_bundle_chs3.chs3_base.core.data_source_spec import BaseFileS3DataSourceSpec +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3.file.core.constants import SOURCE_TYPE_FILE_S3_TABLE +from dl_connector_bundle_chs3_tests.db.base.core.base import ( + FILE_CONN_TV, + BaseCHS3TestClass, +) + + +FILE_DSRC_SPEC_TV = TypeVar("FILE_DSRC_SPEC_TV", bound=BaseFileS3DataSourceSpec) +FILE_DSRC_TV = TypeVar("FILE_DSRC_TV", bound=BaseFileS3DataSource) + + +class CHS3TableDataSourceTestBase( + BaseCHS3TestClass, + DefaultDataSourceTestClass[FILE_CONN_TV, FILE_DSRC_SPEC_TV, FILE_DSRC_TV], + metaclass=abc.ABCMeta, +): + @pytest.fixture(scope="function") + def initial_data_source_spec( + self, + sample_file_data_source: BaseFileS3Connection.FileDataSource, + ) -> FILE_DSRC_SPEC_TV: + dsrc_spec = BaseFileS3DataSourceSpec( + source_type=SOURCE_TYPE_FILE_S3_TABLE, + raw_schema=sample_file_data_source.raw_schema, + s3_endpoint=self.connection_settings.S3_ENDPOINT, + bucket=self.connection_settings.BUCKET, + origin_source_id=sample_file_data_source.id, + ) + return dsrc_spec + + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: + return list(TABLE_SPEC_SAMPLE_SUPERSTORE.table_schema) + + def test_build_from_clause( + self, + data_source: FILE_DSRC_TV, + sample_file_data_source: BaseFileS3Connection.FileDataSource, + saved_connection: FILE_CONN_TV, + ) -> None: + data_source._spec.raw_schema = [ # leaving one column to simplify the test + SchemaColumn( + name="c1", + native_type=GenericNativeType(conn_type=self.conn_type, name="Int64"), + user_type=UserDataType.integer, + ), + ] + query_from = data_source.get_sql_source().compile(compile_kwargs={"literal_binds": True}).string + + replace_secret = saved_connection.get_conn_dto().replace_secret + + expected = ( + f"s3(" + f"'{self.connection_settings.S3_ENDPOINT}/{self.connection_settings.BUCKET}/" + f"{sample_file_data_source.s3_filename}', " + f"'key_id_{replace_secret}', 'secret_key_{replace_secret}', 'Native', " + f"'c1 Nullable(Int64)')" + ) + assert query_from == expected, query_from diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/dataset.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/dataset.py new file mode 100644 index 000000000..fa224f23f --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/base/core/dataset.py @@ -0,0 +1,52 @@ +import abc +from typing import Generic + +import pytest + +from dl_core.services_registry import ServicesRegistry +from dl_core.us_dataset import Dataset +from dl_core_testing.database import DbTable +from dl_core_testing.dataset_wrappers import DatasetTestWrapper +from dl_core_testing.testcases.dataset import DefaultDatasetTestSuite + +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.base import ( + FILE_CONN_TV, + BaseCHS3TestClass, +) + + +class CHS3DatasetTestBase(BaseCHS3TestClass, DefaultDatasetTestSuite, Generic[FILE_CONN_TV], abc.ABC): + @pytest.fixture(scope="function") + def dsrc_params(self, dataset_table: DbTable, sample_file_data_source: BaseFileS3Connection.FileDataSource) -> dict: + return dict( + origin_source_id=sample_file_data_source.id, + ) + + def test_get_param_hash( + self, + sample_table: DbTable, + saved_connection: FileS3Connection, + saved_dataset: Dataset, + conn_default_service_registry: ServicesRegistry, + dataset_wrapper: DatasetTestWrapper, + sample_file_data_source: BaseFileS3Connection.FileDataSource, + ) -> None: + dataset = saved_dataset + service_registry = conn_default_service_registry + source_id = dataset.get_single_data_source_id() + dsrc_coll = dataset_wrapper.get_data_source_coll_strict(source_id=source_id) + hash_from_dataset = dsrc_coll.get_param_hash() + + templates = saved_connection.get_data_source_templates( + conn_executor_factory=service_registry.get_conn_executor_factory().get_sync_conn_executor, + ) + found_template = False + for template in templates: + if template.parameters["origin_source_id"] == sample_file_data_source.id: + found_template = True + hash_from_template = template.get_param_hash() + assert hash_from_dataset == hash_from_template + + assert found_template diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/config.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/config.py new file mode 100644 index 000000000..672ecbb7b --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/config.py @@ -0,0 +1,43 @@ +from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration +from dl_core_testing.configuration import DefaultCoreTestConfiguration +from dl_testing.containers import get_test_container_hostport + +from dl_connector_bundle_chs3.chs3_base.core.settings import FileS3ConnectorSettings + + +CORE_TEST_CONFIG = DefaultCoreTestConfiguration( + host_us_http=get_test_container_hostport("us", fallback_port=52611).host, + port_us_http=get_test_container_hostport("us", fallback_port=52611).port, + host_us_pg=get_test_container_hostport("pg-us", fallback_port=52610).host, + port_us_pg_5432=get_test_container_hostport("pg-us", fallback_port=52610).port, + us_master_token="AC1ofiek8coB", + core_connector_ep_names=["clickhouse", "file", "gsheets_v2"], +) + +SR_CONNECTION_SETTINGS = FileS3ConnectorSettings( + SECURE=False, + HOST=get_test_container_hostport("db-clickhouse", original_port=8123).host, + PORT=get_test_container_hostport("db-clickhouse", original_port=8123).port, + USERNAME="datalens", + PASSWORD="qwerty", + ACCESS_KEY_ID="accessKey1", + SECRET_ACCESS_KEY="verySecretKey1", + BUCKET="dl-file-uploader", + S3_ENDPOINT="http://s3-storage:8000", # compose svc name, because this is a container interaction (ch <-> s3) +) + +DB_CH_URL = ( + f"clickhouse://datalens:qwerty@" + f"{get_test_container_hostport('db-clickhouse', fallback_port=52604).as_pair()}/test_data" +) + +S3_ENDPOINT_URL = f"http://{get_test_container_hostport('s3-storage', fallback_port=52620).as_pair()}" + +API_TEST_CONFIG = ApiTestEnvironmentConfiguration( + api_connector_ep_names=["clickhouse", "file", "gsheets_v2"], + core_test_config=CORE_TEST_CONFIG, + ext_query_executer_secret_key="_some_test_secret_key_", + redis_host=get_test_container_hostport("redis", fallback_port=52604).host, + redis_port=get_test_container_hostport("redis", fallback_port=52604).port, + redis_password="AwockEuvavDyinmeakmiRiopanbesBepsensUrdIz5", +) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/conftest.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/conftest.py new file mode 100644 index 000000000..456b2a3ac --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/conftest.py @@ -0,0 +1,7 @@ +from dl_api_lib_testing.initialization import initialize_api_lib_test + +from dl_connector_bundle_chs3_tests.db.config import API_TEST_CONFIG + + +def pytest_configure(config): # noqa + initialize_api_lib_test(pytest_config=config, api_test_config=API_TEST_CONFIG) diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/base.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/base.py new file mode 100644 index 000000000..0d8340b23 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/base.py @@ -0,0 +1,36 @@ +import pytest + +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.api.base import CHS3ConnectionApiTestBase +from dl_connector_bundle_chs3_tests.db.base.api.data import CHS3DataApiTestBase +from dl_connector_bundle_chs3_tests.db.base.api.dataset import CHS3DatasetTestBase +from dl_connector_bundle_chs3_tests.db.file.core.base import BaseFileS3TestClass + + +class FileS3ApiConnectionTestBase( + BaseFileS3TestClass, + CHS3ConnectionApiTestBase[FileS3Connection], +): + @pytest.fixture(scope="function") + def connection_params( + self, + sample_file_data_source: FileS3Connection.FileDataSource, + ) -> dict: + return dict( + sources=[ + dict( + file_id=sample_file_data_source.file_id, + id=sample_file_data_source.id, + title=sample_file_data_source.title, + column_types=sample_file_data_source.column_types, + ), + ], + ) + + +class FileS3DatasetTestBase(FileS3ApiConnectionTestBase, CHS3DatasetTestBase[FileS3Connection]): + pass + + +class FileS3DataApiTestBase(FileS3DatasetTestBase, CHS3DataApiTestBase[FileS3Connection]): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_connection.py new file mode 100644 index 000000000..e493a9008 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_connection.py @@ -0,0 +1,20 @@ +import uuid + +import pytest + +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.api.connection import CHS3ConnectionTestSuite +from dl_connector_bundle_chs3_tests.db.file.api.base import FileS3ApiConnectionTestBase + + +class TestFileS3Connection(FileS3ApiConnectionTestBase, CHS3ConnectionTestSuite[FileS3Connection]): + @pytest.fixture(scope="function") + def single_new_conn_source_params(self) -> dict: + return { + "id": str(uuid.uuid4()), + "file_id": str(uuid.uuid4()), + "title": f"New File {str(uuid.uuid4())}", + "column_types": [ + {"name": "new_field", "user_type": "string"}, + ], + } diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_data.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_data.py new file mode 100644 index 000000000..5d8eeab46 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_data.py @@ -0,0 +1,38 @@ +from dl_api_lib_testing.connector.data_api_suites import ( + DefaultConnectorDataDistinctTestSuite, + DefaultConnectorDataGroupByFormulaTestSuite, + DefaultConnectorDataPreviewTestSuite, + DefaultConnectorDataRangeTestSuite, +) +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_bundle_chs3_tests.db.base.api.data import CHS3DataResultTestSuite +from dl_connector_bundle_chs3_tests.db.file.api.base import FileS3DataApiTestBase + + +class TestFileS3DataResult(FileS3DataApiTestBase, CHS3DataResultTestSuite): + test_params = RegulatedTestParams( + mark_features_skipped={ + CHS3DataResultTestSuite.array_support: "File connector doesn't support arrays", + } + ) + + +class TestFileS3DataGroupBy(FileS3DataApiTestBase, DefaultConnectorDataGroupByFormulaTestSuite): + pass + + +class TestFileS3DataRange(FileS3DataApiTestBase, DefaultConnectorDataRangeTestSuite): + pass + + +class TestFileDataDistinct(FileS3DataApiTestBase, DefaultConnectorDataDistinctTestSuite): + test_params = RegulatedTestParams( + mark_tests_failed={ + DefaultConnectorDataDistinctTestSuite.test_date_filter_distinct: "FIXME", + } + ) + + +class TestFileS3DataPreview(FileS3DataApiTestBase, DefaultConnectorDataPreviewTestSuite): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_dataset.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_dataset.py new file mode 100644 index 000000000..c3512f2b0 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/api/test_dataset.py @@ -0,0 +1,6 @@ +from dl_connector_bundle_chs3_tests.db.base.api.dataset import CHS3DatasetTestSuite +from dl_connector_bundle_chs3_tests.db.file.api.base import FileS3DatasetTestBase + + +class TestFileS3Dataset(FileS3DatasetTestBase, CHS3DatasetTestSuite): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/base.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/base.py new file mode 100644 index 000000000..787e298ef --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/base.py @@ -0,0 +1,49 @@ +import uuid + +import pytest + +from dl_constants.enums import FileProcessingStatus +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.fixtures.primitives import FixtureTableSpec + +from dl_connector_bundle_chs3.file.core.constants import ( + CONNECTION_TYPE_FILE, + SOURCE_TYPE_FILE_S3_TABLE, +) +from dl_connector_bundle_chs3.file.core.testing.connection import make_saved_file_connection +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.base import BaseCHS3TestClass + + +class BaseFileS3TestClass(BaseCHS3TestClass[FileS3Connection]): + conn_type = CONNECTION_TYPE_FILE + source_type = SOURCE_TYPE_FILE_S3_TABLE + + @pytest.fixture(scope="function") + def sample_file_data_source( + self, + sample_table_spec: FixtureTableSpec, + sample_s3_file: str, + ) -> FileS3Connection.FileDataSource: + raw_schema = self._get_raw_schema_for_ch_table(sample_table_spec) + return FileS3Connection.FileDataSource( + id=str(uuid.uuid4()), + file_id=str(uuid.uuid4()), + title=sample_s3_file, + s3_filename=sample_s3_file, + raw_schema=raw_schema, + status=FileProcessingStatus.ready, + column_types=[{"name": col[0], "user_type": col[1].name} for col in sample_table_spec.table_schema], + ) + + @pytest.fixture(scope="function") + def saved_connection( + self, + sync_us_manager: SyncUSManager, + connection_creation_params: dict, + ) -> FileS3Connection: + conn = make_saved_file_connection( + sync_usm=sync_us_manager, + **connection_creation_params, + ) + return conn diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_connection.py new file mode 100644 index 000000000..0a5f578ac --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_connection.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.connection import CHS3ConnectionTestBase +from dl_connector_bundle_chs3_tests.db.file.core.base import BaseFileS3TestClass + + +class TestFileS3Connection(BaseFileS3TestClass, CHS3ConnectionTestBase[FileS3Connection]): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_connection_executor.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_connection_executor.py new file mode 100644 index 000000000..a5bad094a --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_connection_executor.py @@ -0,0 +1,14 @@ +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.connection_executor import ( + CHS3AsyncConnectionExecutorTestBase, + CHS3SyncConnectionExecutorTestBase, +) +from dl_connector_bundle_chs3_tests.db.file.core.base import BaseFileS3TestClass + + +class TestFileS3SyncConnectionExecutor(BaseFileS3TestClass, CHS3SyncConnectionExecutorTestBase[FileS3Connection]): + pass + + +class TestFileS3AsyncConnectionExecutor(BaseFileS3TestClass, CHS3AsyncConnectionExecutorTestBase[FileS3Connection]): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_data_source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_data_source.py new file mode 100644 index 000000000..3b7fb20e1 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_data_source.py @@ -0,0 +1,12 @@ +from dl_connector_bundle_chs3.file.core.data_source import FileS3DataSource +from dl_connector_bundle_chs3.file.core.data_source_spec import FileS3DataSourceSpec +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.data_source import CHS3TableDataSourceTestBase +from dl_connector_bundle_chs3_tests.db.file.core.base import BaseFileS3TestClass + + +class TestFileS3TableDataSource( + BaseFileS3TestClass, + CHS3TableDataSourceTestBase[FileS3Connection, FileS3DataSourceSpec, FileS3DataSource], +): + DSRC_CLS = FileS3DataSource diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_dataset.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_dataset.py new file mode 100644 index 000000000..fbb7543a4 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/file/core/test_dataset.py @@ -0,0 +1,7 @@ +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.dataset import CHS3DatasetTestBase +from dl_connector_bundle_chs3_tests.db.file.core.base import BaseFileS3TestClass + + +class TestFileS3Dataset(BaseFileS3TestClass, CHS3DatasetTestBase[FileS3Connection]): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/base.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/base.py new file mode 100644 index 000000000..ee632854d --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/base.py @@ -0,0 +1,76 @@ +import datetime +import logging + +from aiohttp import web +import pytest + +from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration +from dl_core.us_manager.us_manager_async import AsyncUSManager + +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.api.base import CHS3ConnectionApiTestBase +from dl_connector_bundle_chs3_tests.db.base.api.data import CHS3DataApiTestBase +from dl_connector_bundle_chs3_tests.db.base.api.dataset import CHS3DatasetTestBase +from dl_connector_bundle_chs3_tests.db.gsheets_v2.core.base import BaseGSheetsFileS3TestClass + + +LOGGER = logging.getLogger(__name__) + + +class GSheetsFileS3ApiConnectionTestBase( + BaseGSheetsFileS3TestClass, + CHS3ConnectionApiTestBase[GSheetsFileS3Connection], +): + @pytest.fixture(scope="function") + async def mock_file_uploader_api( + self, + aiohttp_server, + bi_test_config: ApiTestEnvironmentConfiguration, + async_us_manager: AsyncUSManager, + ) -> None: + async def mocked_update_connection_data_internal(request: web.Request) -> web.Response: + req_data = await request.json() + conn_id: str = req_data["connection_id"] + sources_to_update = [src["id"] for src in req_data["sources"]] + + conn = await async_us_manager.get_by_id(conn_id, GSheetsFileS3Connection) + for src_id in sources_to_update: + src: GSheetsFileS3Connection.FileDataSource = conn.get_file_source_by_id(src_id) + src.data_updated_at = datetime.datetime.now(datetime.timezone.utc) + LOGGER.info(f"Successfully updated source id {src_id}") + await async_us_manager.save(conn) + + return web.HTTPOk() + + app = web.Application() + app.router.add_route("POST", "/api/v2/update_connection_data_internal", mocked_update_connection_data_internal) + + server = await aiohttp_server(app, port=bi_test_config.file_uploader_api_port) + + yield + + await server.close() + + @pytest.fixture(scope="function") + def connection_params( + self, + sample_file_data_source: GSheetsFileS3Connection.FileDataSource, + ) -> dict: + return dict( + refresh_enabled=True, + sources=[ + dict( + file_id=sample_file_data_source.file_id, + id=sample_file_data_source.id, + title=sample_file_data_source.title, + ), + ], + ) + + +class GSheetsFileS3DatasetTestBase(GSheetsFileS3ApiConnectionTestBase, CHS3DatasetTestBase[GSheetsFileS3Connection]): + pass + + +class GSheetsFileS3DataApiTestBase(GSheetsFileS3DatasetTestBase, CHS3DataApiTestBase[GSheetsFileS3Connection]): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_connection.py new file mode 100644 index 000000000..cb5b7ad58 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_connection.py @@ -0,0 +1,111 @@ +from http import HTTPStatus +import logging +import uuid + +from flask.testing import FlaskClient +import pytest + +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_testing.utils import get_log_record + +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.api.connection import CHS3ConnectionTestSuite +from dl_connector_bundle_chs3_tests.db.gsheets_v2.api.base import GSheetsFileS3ApiConnectionTestBase + + +class TestGSheetsFileS3Connection(GSheetsFileS3ApiConnectionTestBase, CHS3ConnectionTestSuite[GSheetsFileS3Connection]): + @pytest.fixture(scope="function") + def single_new_conn_source_params(self) -> dict: + return { + "id": str(uuid.uuid4()), + "file_id": str(uuid.uuid4()), + "title": f"New File {str(uuid.uuid4())}", + } + + def test_authorization_field( + self, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + ) -> None: + conn_id = saved_connection_id + conn = sync_us_manager.get_by_id(conn_id, GSheetsFileS3Connection) + base_update_data = { + "refresh_enabled": True, + "sources": [{"id": src.id, "title": src.title} for src in conn.data.sources], + } + + # no token => not authorized + assert conn.authorized is False + conn_resp = client.get(f"/api/v1/connections/{conn_id}") + assert conn_resp.status_code == HTTPStatus.OK, conn_resp.json + assert conn_resp.json["authorized"] is False + assert "refresh_token" not in conn_resp.json + + # add token into connection + resp = client.put( + "/api/v1/connections/{}".format(conn_id), + json={ + **base_update_data, + "refresh_token": "some_token", + }, + ) + assert resp.status_code == HTTPStatus.OK, resp.json + conn = sync_us_manager.get_by_id(conn_id, GSheetsFileS3Connection) + assert conn.authorized is True + conn_resp = client.get(f"/api/v1/connections/{conn_id}") + assert conn_resp.status_code == HTTPStatus.OK, conn_resp.json + assert conn_resp.json["authorized"] is True + assert "refresh_token" not in conn_resp.json + + # remove token from the connection + resp = client.put( + "/api/v1/connections/{}".format(conn_id), + json={ + **base_update_data, + "refresh_token": None, + }, + ) + assert resp.status_code == HTTPStatus.OK, resp.json + conn: GSheetsFileS3Connection = sync_us_manager.get_by_id(conn_id, GSheetsFileS3Connection) + assert conn.authorized is False + conn_resp = client.get(f"/api/v1/connections/{conn_id}") + assert conn_resp.status_code == HTTPStatus.OK + assert conn_resp.json["authorized"] is False + assert "refresh_token" not in conn_resp.json + + def test_force_update_with_file_id( + self, + caplog, + client: FlaskClient, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + ) -> None: + """Passed file_id to an existing source means that it has been updated => this should trigger data update""" + + caplog.set_level(logging.INFO) + + conn_id = saved_connection_id + usm = sync_us_manager + conn = usm.get_by_id(conn_id, GSheetsFileS3Connection) + + resp = client.put( + "/api/v1/connections/{}".format(conn_id), + json={ + "sources": [ + { + "file_id": str(uuid.uuid4()), # force source update by passing file_id + "id": conn.data.sources[0].id, + "title": conn.data.sources[0].title, + }, + ], + }, + ) + assert resp.status_code == HTTPStatus.OK, resp.json + + schedule_save_src_log_record = get_log_record( + caplog, + predicate=lambda r: r.message.startswith("Scheduled task SaveSourceTask for source_id"), + single=True, + ) + assert conn.data.sources[0].id in schedule_save_src_log_record.message diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_data.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_data.py new file mode 100644 index 000000000..23661e0a7 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_data.py @@ -0,0 +1,193 @@ +import datetime +from http import HTTPStatus + +from flask.testing import FlaskClient +import pytest + +from dl_api_client.dsmaker.api.data_api import SyncHttpDataApiV2 +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_lib_testing.connector.data_api_suites import ( + DefaultConnectorDataDistinctTestSuite, + DefaultConnectorDataGroupByFormulaTestSuite, + DefaultConnectorDataPreviewTestSuite, + DefaultConnectorDataRangeTestSuite, + DefaultConnectorDataResultTestSuite, +) +from dl_api_lib_testing.data_api_base import DataApiTestParams +from dl_constants.enums import ( + ComponentErrorLevel, + ComponentType, + DataSourceRole, + FileProcessingStatus, +) +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import NOTIF_TYPE_GSHEETS_V2_STALE_DATA +from dl_connector_bundle_chs3.chs3_gsheets.core.lifecycle import GSheetsFileS3ConnectionLifecycleManager +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.api.data import CHS3DataResultTestSuite +from dl_connector_bundle_chs3_tests.db.gsheets_v2.api.base import GSheetsFileS3DataApiTestBase + + +class TestGSheetsFileS3DataResult(GSheetsFileS3DataApiTestBase, CHS3DataResultTestSuite): + test_params = RegulatedTestParams( + mark_features_skipped={ + DefaultConnectorDataResultTestSuite.array_support: "GSheets V2 connector doesn't support arrays", + }, + ) + + @pytest.mark.asyncio + def test_update_data( + self, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + data_api: SyncHttpDataApiV2, + saved_dataset: Dataset, + data_api_test_params: DataApiTestParams, + mock_file_uploader_api, + ) -> None: + ds = saved_dataset + + # prepare connection sources: set updated time to the current moment + conn = sync_us_manager.get_by_id(saved_connection_id, GSheetsFileS3Connection) + dt_now = datetime.datetime.now(datetime.timezone.utc) + data_updated_at_orig = dt_now + for src in conn.data.sources: + src.data_updated_at = dt_now + sync_us_manager.save(conn) + + def get_notifications_from_result_resp() -> list[dict]: + result_resp = data_api.get_result( + dataset=ds, fields=[ds.find_field(title=data_api_test_params.date_field)], fail_ok=True + ) + return result_resp.json.get("notifications", []) + + # it is not time to update data yet, so we expect no data updates or corresponding notifications + notifications = get_notifications_from_result_resp() + assert all( + notification["locator"] != NOTIF_TYPE_GSHEETS_V2_STALE_DATA.value for notification in notifications + ), notifications + conn = sync_us_manager.get_by_id(saved_connection_id, GSheetsFileS3Connection) + assert conn.data.sources[0].data_updated_at == data_updated_at_orig + + # trigger data update by setting the data update time in the connection to N minutes ago + data_updated_at = conn.data.oldest_data_update_time() - datetime.timedelta( + seconds=GSheetsFileS3ConnectionLifecycleManager.STALE_THRESHOLD_SECONDS + 60, # just in case + ) + for src in conn.data.sources: + src.data_updated_at = data_updated_at + sync_us_manager.save(conn) + + # now notifications should be there, as well as connection sources should be updated + notifications = get_notifications_from_result_resp() + assert any( + notification["locator"] == NOTIF_TYPE_GSHEETS_V2_STALE_DATA.value for notification in notifications + ), notifications + conn = sync_us_manager.get_by_id(saved_connection_id, GSheetsFileS3Connection) + assert conn.data.sources[0].data_updated_at != data_updated_at + + def test_component_error( + self, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + saved_dataset: Dataset, + client: FlaskClient, + data_api: SyncHttpDataApiV2, + data_api_test_params: DataApiTestParams, + ) -> None: + conn = sync_us_manager.get_by_id(saved_connection_id, GSheetsFileS3Connection) + err_details = {"error": "details", "request-id": "637"} + conn.data.component_errors.add_error( + id=conn.data.sources[0].id, + type=ComponentType.data_source, + message="Custom error message", + code=["FILE", "CUSTOM_FILE_ERROR"], + details=err_details, + ) + conn.update_data_source( + conn.data.sources[0].id, + role=DataSourceRole.origin, + s3_filename=None, + status=FileProcessingStatus.failed, + preview_id=None, + data_updated_at=datetime.datetime.now(datetime.timezone.utc), + ) + sync_us_manager.save(conn) + + ds = saved_dataset + result_resp = data_api.get_result( + dataset=ds, fields=[ds.find_field(title=data_api_test_params.distinct_field)], fail_ok=True + ) + assert result_resp.status_code == HTTPStatus.BAD_REQUEST, result_resp.json + assert result_resp.json["details"] == err_details + assert result_resp.json["message"] == "Custom error message" + assert result_resp.json["code"] == "ERR.DS_API.SOURCE.FILE.CUSTOM_FILE_ERROR" + + conn_resp = client.get(f"/api/v1/connections/{saved_connection_id}") + assert conn_resp.status_code == HTTPStatus.OK, conn_resp.json + assert conn_resp.json["component_errors"], conn_resp.json + actual_errors = conn_resp.json["component_errors"]["items"][0]["errors"] + assert len(actual_errors) == 1, actual_errors + assert actual_errors[0]["code"] == "ERR.DS_API.SOURCE.FILE.CUSTOM_FILE_ERROR" + + @pytest.mark.asyncio + def test_component_error_warning( + self, + sync_us_manager: SyncUSManager, + saved_connection_id: str, + saved_dataset: Dataset, + data_api: SyncHttpDataApiV2, + data_api_test_params: DataApiTestParams, + mock_file_uploader_api, + ) -> None: + conn = sync_us_manager.get_by_id(saved_connection_id, GSheetsFileS3Connection) + + long_long_ago = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta( + seconds=GSheetsFileS3ConnectionLifecycleManager.STALE_THRESHOLD_SECONDS + 60, # just in case + ) + err_details = {"error": "details", "request-id": "637"} + conn.data.component_errors.add_error( + id=conn.data.sources[0].id, + type=ComponentType.data_source, + message="Custom error message", + code=["FILE", "CUSTOM_FILE_ERROR"], + details=err_details, + level=ComponentErrorLevel.warning, + ) + conn.update_data_source( + conn.data.sources[0].id, + role=DataSourceRole.origin, + data_updated_at=long_long_ago, + ) + sync_us_manager.save(conn) + + ds = saved_dataset + result_resp = data_api.get_result( + dataset=ds, fields=[ds.find_field(title=data_api_test_params.distinct_field)], fail_ok=True + ) + assert result_resp.status_code == HTTPStatus.OK, result_resp.json + assert len(result_resp.json["notifications"]) == 2 + assert "Reason: FILE.CUSTOM_FILE_ERROR, Request-ID: 637" in result_resp.json["notifications"][0]["message"] + conn = sync_us_manager.get_by_id(saved_connection_id, GSheetsFileS3Connection) + assert conn.data.sources[0].data_updated_at > long_long_ago # data update was triggered + + +class TestGSheetsFileS3DataGroupBy(GSheetsFileS3DataApiTestBase, DefaultConnectorDataGroupByFormulaTestSuite): + pass + + +class TestGSheetsFileS3DataRange(GSheetsFileS3DataApiTestBase, DefaultConnectorDataRangeTestSuite): + pass + + +class TestGSheetsFileDataDistinct(GSheetsFileS3DataApiTestBase, DefaultConnectorDataDistinctTestSuite): + test_params = RegulatedTestParams( + mark_tests_failed={ + DefaultConnectorDataDistinctTestSuite.test_date_filter_distinct: "FIXME", + } + ) + + +class TestGSheetsFileS3DataPreview(GSheetsFileS3DataApiTestBase, DefaultConnectorDataPreviewTestSuite): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_dataset.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_dataset.py new file mode 100644 index 000000000..afe91315b --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/api/test_dataset.py @@ -0,0 +1,6 @@ +from dl_connector_bundle_chs3_tests.db.base.api.dataset import CHS3DatasetTestSuite +from dl_connector_bundle_chs3_tests.db.gsheets_v2.api.base import GSheetsFileS3DatasetTestBase + + +class TestGSheetsFileS3Dataset(GSheetsFileS3DatasetTestBase, CHS3DatasetTestSuite): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/__init__.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/base.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/base.py new file mode 100644 index 000000000..afe6991e8 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/base.py @@ -0,0 +1,48 @@ +import uuid + +import pytest + +from dl_constants.enums import FileProcessingStatus +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.fixtures.primitives import FixtureTableSpec + +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import ( + CONNECTION_TYPE_GSHEETS_V2, + SOURCE_TYPE_GSHEETS_V2, +) +from dl_connector_bundle_chs3.chs3_gsheets.core.testing.connection import make_saved_gsheets_v2_connection +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.base import BaseCHS3TestClass + + +class BaseGSheetsFileS3TestClass(BaseCHS3TestClass[GSheetsFileS3Connection]): + conn_type = CONNECTION_TYPE_GSHEETS_V2 + source_type = SOURCE_TYPE_GSHEETS_V2 + + @pytest.fixture(scope="function") + def sample_file_data_source( + self, + sample_table_spec: FixtureTableSpec, + sample_s3_file: str, + ) -> GSheetsFileS3Connection.FileDataSource: + raw_schema = self._get_raw_schema_for_ch_table(sample_table_spec) + return GSheetsFileS3Connection.FileDataSource( + id=str(uuid.uuid4()), + file_id=str(uuid.uuid4()), + title=sample_s3_file, + s3_filename=sample_s3_file, + raw_schema=raw_schema, + status=FileProcessingStatus.ready, + ) + + @pytest.fixture(scope="function") + def saved_connection( + self, + sync_us_manager: SyncUSManager, + connection_creation_params: dict, + ) -> GSheetsFileS3Connection: + conn = make_saved_gsheets_v2_connection( + sync_usm=sync_us_manager, + **connection_creation_params, + ) + return conn diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_connection.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_connection.py new file mode 100644 index 000000000..f3d630ca0 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_connection.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.connection import CHS3ConnectionTestBase +from dl_connector_bundle_chs3_tests.db.gsheets_v2.core.base import BaseGSheetsFileS3TestClass + + +class TestGSheetsFileS3Connection(BaseGSheetsFileS3TestClass, CHS3ConnectionTestBase[FileS3Connection]): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_connection_executor.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_connection_executor.py new file mode 100644 index 000000000..8a896c736 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_connection_executor.py @@ -0,0 +1,20 @@ +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.connection_executor import ( + CHS3AsyncConnectionExecutorTestBase, + CHS3SyncConnectionExecutorTestBase, +) +from dl_connector_bundle_chs3_tests.db.gsheets_v2.core.base import BaseGSheetsFileS3TestClass + + +class TestGSheetsFileS3SyncConnectionExecutor( + BaseGSheetsFileS3TestClass, + CHS3SyncConnectionExecutorTestBase[FileS3Connection], +): + pass + + +class TestGSheetsFileS3AsyncConnectionExecutor( + BaseGSheetsFileS3TestClass, + CHS3AsyncConnectionExecutorTestBase[FileS3Connection], +): + pass diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_data_source.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_data_source.py new file mode 100644 index 000000000..e3ec4149d --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_data_source.py @@ -0,0 +1,12 @@ +from dl_connector_bundle_chs3.chs3_gsheets.core.data_source import GSheetsFileS3DataSource +from dl_connector_bundle_chs3.chs3_gsheets.core.data_source_spec import GSheetsFileS3DataSourceSpec +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.data_source import CHS3TableDataSourceTestBase +from dl_connector_bundle_chs3_tests.db.gsheets_v2.core.base import BaseGSheetsFileS3TestClass + + +class TestGSheetsFileS3TableDataSource( + BaseGSheetsFileS3TestClass, + CHS3TableDataSourceTestBase[GSheetsFileS3Connection, GSheetsFileS3DataSourceSpec, GSheetsFileS3DataSource], +): + DSRC_CLS = GSheetsFileS3DataSource diff --git a/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_dataset.py b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_dataset.py new file mode 100644 index 000000000..7c1b7d5b0 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/dl_connector_bundle_chs3_tests/db/gsheets_v2/core/test_dataset.py @@ -0,0 +1,7 @@ +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection +from dl_connector_bundle_chs3_tests.db.base.core.dataset import CHS3DatasetTestBase +from dl_connector_bundle_chs3_tests.db.gsheets_v2.core.base import BaseGSheetsFileS3TestClass + + +class TestGSheetsFileS3Dataset(BaseGSheetsFileS3TestClass, CHS3DatasetTestBase[GSheetsFileS3Connection]): + pass diff --git a/lib/dl_connector_bundle_chs3/docker-compose.yml b/lib/dl_connector_bundle_chs3/docker-compose.yml new file mode 100644 index 000000000..983e9380f --- /dev/null +++ b/lib/dl_connector_bundle_chs3/docker-compose.yml @@ -0,0 +1,57 @@ +version: '3.7' + +x-constants: + US_MASTER_TOKEN: &c-us-master-token "AC1ofiek8coB" + REDIS_PASSWORD: &c-redis-password "AwockEuvavDyinmeakmiRiopanbesBepsensUrdIz5" + + +services: + redis: + image: "bitnami/redis:5.0.8@sha256:3127620da977815556439a9dc347fff89432a79b6bb6e93a16f20ac4a34ce337" + environment: + REDIS_PASSWORD: *c-redis-password + ports: + - 52604:6379 + + db-clickhouse: + build: + context: docker-compose + dockerfile: Dockerfile.db-clickhouse-22-10 + ports: + - 52610:8123 + - 52611:9000 + + s3-storage: + build: + context: ../testenv-common/images + dockerfile: Dockerfile.s3-storage + command: bash /data/entrypoint.sh + environment: + S3BACKEND: "mem" + REMOTE_MANAGEMENT_DISABLE: 1 + ports: + - 52620:8000 + + pg-us: + build: + context: ../testenv-common/images + dockerfile: Dockerfile.pg-us + environment: + POSTGRES_DB: us-db-ci_purgeable + POSTGRES_USER: us + POSTGRES_PASSWORD: us + ports: + - 52609:5432 + + us: + build: + context: ../testenv-common/images + dockerfile: Dockerfile.us + ports: + - 52600:80 + depends_on: + - pg-us + environment: + POSTGRES_DSN_LIST: "postgres://us:us@pg-us:5432/us-db-ci_purgeable" + AUTH_POLICY: "required" + MASTER_TOKEN: *c-us-master-token diff --git a/lib/dl_connector_bundle_chs3/docker-compose/Dockerfile.db-clickhouse-22-10 b/lib/dl_connector_bundle_chs3/docker-compose/Dockerfile.db-clickhouse-22-10 new file mode 100644 index 000000000..b9032f02f --- /dev/null +++ b/lib/dl_connector_bundle_chs3/docker-compose/Dockerfile.db-clickhouse-22-10 @@ -0,0 +1,8 @@ +# clickhouse/clickhouse-server:22.11-alpine +FROM docker.io/clickhouse/clickhouse-server@sha256:144ae73d876ee7d04691cb57f040f1ac98da05be689d171938f9a0e01a996578 + +COPY db-clickhouse/users.xml /etc/clickhouse-server/users.xml + +COPY db-clickhouse/data /common-data + +COPY db-clickhouse/docker-entrypoint-initdb.d/ /docker-entrypoint-initdb.d/ diff --git a/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/data/sample.csv b/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/data/sample.csv new file mode 100644 index 000000000..dc4bc81bb --- /dev/null +++ b/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/data/sample.csv @@ -0,0 +1,1000 @@ +"Office Supplies","Houston","United States","DP-13000","Darren Powers",0.2,"2014-01-03","CA-2014-103800",77095,"OFF-PA-10000174","Message Book. Wirebound. Four 5 1/2"" X 4"" Forms/Pg.. 200 Dupl. Sets/Book",5.5512,2,"Central",7981,16.448,"Consumer","2014-01-07","Standard Class","Texas","Paper" +"Office Supplies","Naperville","United States","PO-19195","Phillina Ober",0.8,"2014-01-04","CA-2014-112326",60540,"OFF-BI-10004094","GBC Standard Plastic Binding Systems Combs",-5.487,2,"Central",742,3.54,"Home Office","2014-01-08","Standard Class","Illinois","Binders" +"Office Supplies","Naperville","United States","PO-19195","Phillina Ober",0.2,"2014-01-04","CA-2014-112326",60540,"OFF-LA-10003223","Avery 508",4.2717,3,"Central",740,11.783999,"Home Office","2014-01-08","Standard Class","Illinois","Labels" +"Office Supplies","Naperville","United States","PO-19195","Phillina Ober",0.2,"2014-01-04","CA-2014-112326",60540,"OFF-ST-10002743","SAFCO Boltless Steel Shelving",-64.774796,3,"Central",741,272.73602,"Home Office","2014-01-08","Standard Class","Illinois","Storage" +"Office Supplies","Philadelphia","United States","MB-18085","Mick Brown",0.2,"2014-01-05","CA-2014-141817",19143,"OFF-AR-10003478","Avery Hi-Liter EverBold Pen Style Fluorescent Highlighters. 4/Pack",4.8840003,3,"East",1760,19.536001,"Consumer","2014-01-12","Standard Class","Pennsylvania","Art" +"Furniture","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"FUR-CH-10004063","Global Deluxe High-Back Manager's Chair",746.40784,9,"South",7475,2573.82,"Home Office","2014-01-10","Standard Class","Kentucky","Chairs" +"Office Supplies","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"OFF-AR-10001662","Rogers Handheld Barrel Pencil Sharpener",1.4796,2,"South",7477,5.48,"Home Office","2014-01-10","Standard Class","Kentucky","Art" +"Office Supplies","Athens","United States","JO-15145","Jack O'Briant",0,"2014-01-06","CA-2014-106054",30605,"OFF-AR-10002399","Dixon Prang Watercolor Pencils. 10-Color Set with Brush",5.2398,3,"South",7181,12.78,"Corporate","2014-01-07","First Class","Georgia","Art" +"Office Supplies","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"OFF-BI-10004632","Ibico Hi-Tech Manual Binding System",274.491,2,"South",7476,609.98004,"Home Office","2014-01-10","Standard Class","Kentucky","Binders" +"Office Supplies","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"OFF-FA-10001883","Alliance Super-Size Bands. Assorted Sizes",0.3112,4,"South",7480,31.12,"Home Office","2014-01-10","Standard Class","Kentucky","Fasteners" +"Office Supplies","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"OFF-PA-10000955","Southworth 25% Cotton Granite Paper & Envelopes",3.0084,1,"South",7481,6.54,"Home Office","2014-01-10","Standard Class","Kentucky","Paper" +"Office Supplies","Los Angeles","United States","LS-17230","Lycoris Saunders",0,"2014-01-06","CA-2014-130813",90049,"OFF-PA-10002005","Xerox 225",9.331201,3,"West",5328,19.44,"Consumer","2014-01-08","Second Class","California","Paper" +"Technology","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"TEC-PH-10004539","Wireless Extenders zBoost YX545 SOHO Signal Booster",204.1092,4,"South",7479,755.96,"Home Office","2014-01-10","Standard Class","Kentucky","Phones" +"Technology","Henderson","United States","ME-17320","Maria Etezadi",0,"2014-01-06","CA-2014-167199",42420,"TEC-PH-10004977","GE 30524EE4",113.674194,2,"South",7478,391.97998,"Home Office","2014-01-10","Standard Class","Kentucky","Phones" +"Furniture","Huntsville","United States","VS-21820","Vivek Sundaresam",0.6,"2014-01-07","CA-2014-105417",77340,"FUR-FU-10004864","Howard Miller 14-1/2"" Diameter Chrome Round Wall Clock",-53.7096,3,"Central",7661,76.728,"Consumer","2014-01-12","Standard Class","Texas","Furnishings" +"Office Supplies","Huntsville","United States","VS-21820","Vivek Sundaresam",0.8,"2014-01-07","CA-2014-105417",77340,"OFF-BI-10003708","Acco Four Pocket Poly Ring Binder with Label Holder. Smoke. 1""",-18.2525,7,"Central",7662,10.429999,"Consumer","2014-01-12","Standard Class","Texas","Binders" +"Office Supplies","Laredo","United States","MS-17830","Melanie Seite",0.2,"2014-01-09","CA-2014-135405",78041,"OFF-AR-10004078","Newell 312",1.168,2,"Central",593,9.344,"Consumer","2014-01-13","Standard Class","Texas","Art" +"Technology","Laredo","United States","MS-17830","Melanie Seite",0.2,"2014-01-09","CA-2014-135405",78041,"TEC-AC-10001266","Memorex Micro Travel Drive 8 GB",9.75,3,"Central",594,31.2,"Consumer","2014-01-13","Standard Class","Texas","Accessories" +"Furniture","Springfield","United States","AJ-10780","Anthony Jacobs",0,"2014-01-10","CA-2014-149020",22153,"FUR-FU-10000965","Howard Miller 11-1/2"" Diameter Ridgewood Wall Clock",21.2954,1,"South",867,51.940002,"Corporate","2014-01-15","Standard Class","Virginia","Furnishings" +"Office Supplies","Springfield","United States","AJ-10780","Anthony Jacobs",0,"2014-01-10","CA-2014-149020",22153,"OFF-LA-10004272","Avery 482",1.3583,1,"South",866,2.8899999,"Corporate","2014-01-15","Standard Class","Virginia","Labels" +"Furniture","Dover","United States","SV-20365","Seth Vernon",0,"2014-01-11","CA-2014-130092",19901,"FUR-FU-10000010","DAX Value U-Channel Document Frames. Easel Back",3.0814,2,"East",717,9.94,"Consumer","2014-01-14","First Class","Delaware","Furnishings" +"Furniture","San Francisco","United States","BD-11605","Brian Dahlen",0.15,"2014-01-13","CA-2014-157147",94109,"FUR-BO-10003034","O'Sullivan Elevations Bookcase. Cherry Finish",3.9294002,3,"West",4939,333.999,"Consumer","2014-01-18","Standard Class","California","Bookcases" +"Furniture","Mount Pleasant","United States","ND-18370","Natalie DeCherney",0,"2014-01-13","CA-2014-109232",29464,"FUR-CH-10000422","Global Highback Leather Tilter in Burgundy",87.3504,6,"South",2979,545.94,"Consumer","2014-01-16","Second Class","South Carolina","Chairs" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0,"2014-01-13","CA-2014-157147",94109,"OFF-AR-10003514","4009 Highlighters by Sanford",6.567,5,"West",4940,19.9,"Consumer","2014-01-18","Standard Class","California","Art" +"Office Supplies","Newark","United States","MM-17920","Michael Moore",0.7,"2014-01-13","CA-2014-118192",43055,"OFF-BI-10003476","Avery Metallic Poly Binders",-2.5212,2,"East",9630,3.438,"Consumer","2014-01-18","Standard Class","Ohio","Binders" +"Office Supplies","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"OFF-BI-10004187","3-ring staple pack",2.7072,3,"South",767,5.64,"Corporate","2014-01-15","Second Class","Louisiana","Binders" +"Office Supplies","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"OFF-EN-10001532","Brown Kraft Recycled Envelopes",25.47,3,"South",765,50.940002,"Corporate","2014-01-15","Second Class","Louisiana","Envelopes" +"Office Supplies","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"OFF-EN-10001990","Staple envelope",5.3392,2,"South",764,11.360001,"Corporate","2014-01-15","Second Class","Louisiana","Envelopes" +"Office Supplies","Newark","United States","MM-17920","Michael Moore",0.2,"2014-01-13","CA-2014-118192",43055,"OFF-PA-10002947","Xerox 1923",13.0928,7,"East",9629,37.408,"Consumer","2014-01-18","Standard Class","Ohio","Paper" +"Office Supplies","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"OFF-ST-10000025","Fellowes Stor/Drawer Steel Plus Storage Drawers",34.3548,6,"South",768,572.58,"Corporate","2014-01-15","Second Class","Louisiana","Storage" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0,"2014-01-13","CA-2014-157147",94109,"OFF-ST-10000078","Tennsco 6- and 18-Compartment Lockers",238.65302,5,"West",4938,1325.8501,"Consumer","2014-01-18","Standard Class","California","Storage" +"Technology","Bossier City","United States","CS-12250","Chris Selesnick",0,"2014-01-13","CA-2014-162775",71111,"TEC-AC-10003174","Plantronics S12 Corded Telephone Headset System",258.696,6,"South",766,646.74,"Corporate","2014-01-15","Second Class","Louisiana","Accessories" +"Furniture","Philadelphia","United States","BS-11590","Brendan Sweed",0.5,"2014-01-14","CA-2014-149524",19140,"FUR-BO-10003433","Sauder Cornerstone Collection Library",-53.285603,4,"East",6475,61.960003,"Corporate","2014-01-15","First Class","Pennsylvania","Bookcases" +"Technology","Roswell","United States","EH-13990","Erica Hackney",0,"2014-01-15","CA-2014-103366",30076,"TEC-AC-10003628","Logitech 910-002974 M325 Wireless Mouse for Web Scrolling",65.978004,5,"South",1914,149.95,"Consumer","2014-01-17","First Class","Georgia","Accessories" +"Furniture","Philadelphia","United States","DL-13315","Delfina Latchford",0.2,"2014-01-16","CA-2014-115791",19134,"FUR-FU-10001095","DAX Black Cherry Wood-Tone Poster Frame",28.5984,6,"East",971,127.104,"Consumer","2014-01-18","Second Class","Pennsylvania","Furnishings" +"Office Supplies","Philadelphia","United States","DL-13315","Delfina Latchford",0.7,"2014-01-16","CA-2014-115791",19134,"OFF-BI-10001575","GBC Linen Binding Covers",-13.631201,2,"East",973,18.588,"Consumer","2014-01-18","Second Class","Pennsylvania","Binders" +"Office Supplies","Philadelphia","United States","DL-13315","Delfina Latchford",0.2,"2014-01-16","CA-2014-115791",19134,"OFF-LA-10001074","Round Specialty Laser Printer Labels",10.149301,3,"East",974,30.072,"Consumer","2014-01-18","Second Class","Pennsylvania","Labels" +"Technology","Philadelphia","United States","DL-13315","Delfina Latchford",0.4,"2014-01-16","CA-2014-115791",19134,"TEC-PH-10004614","AT&T 841000 Phone",-31.05,3,"East",972,124.2,"Consumer","2014-01-18","Second Class","Pennsylvania","Phones" +"Office Supplies","Springfield","United States","DW-13195","David Wiener",0.2,"2014-01-18","CA-2014-123477",97477,"OFF-AP-10000692","Fellowes Mighty 8 Compact Surge Protector",6.4864,4,"West",5365,64.864,"Corporate","2014-01-21","Second Class","Oregon","Appliances" +"Furniture","Scottsdale","United States","TS-21340","Toby Swindell",0.7,"2014-01-19","CA-2014-146591",85254,"FUR-BO-10001972","O'Sullivan 4-Shelf Bookcase in Odessa Pine",-320.597,5,"West",5466,181.47,"Consumer","2014-01-20","First Class","Arizona","Bookcases" +"Office Supplies","Scottsdale","United States","TS-21340","Toby Swindell",0.7,"2014-01-19","CA-2014-146591",85254,"OFF-BI-10003676","GBC Standard Recycled Report Covers. Clear Plastic Sheets",-23.716002,10,"West",5463,32.34,"Consumer","2014-01-20","First Class","Arizona","Binders" +"Office Supplies","Scottsdale","United States","TS-21340","Toby Swindell",0.2,"2014-01-19","CA-2014-146591",85254,"OFF-EN-10002504","Tyvek Top-Opening Peel & Seel Envelopes. Plain White",36.692997,5,"West",5465,108.71999,"Consumer","2014-01-20","First Class","Arizona","Envelopes" +"Office Supplies","Scottsdale","United States","TS-21340","Toby Swindell",0.2,"2014-01-19","CA-2014-146591",85254,"OFF-PA-10000659","TOPS Carbonless Receipt Book. Four 2-3/4 x 7-1/4 Money Receipts per Page",19.622402,4,"West",5464,56.064003,"Consumer","2014-01-20","First Class","Arizona","Paper" +"Furniture","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"FUR-CH-10002331","Hon 4700 Series Mobuis Mid-Back Task Chairs with Adjustable Arms",224.2674,3,"South",1127,1067.9401,"Consumer","2014-01-26","Standard Class","Arkansas","Chairs" +"Furniture","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"FUR-FU-10002268","Ultra Door Push Plate",4.8609,3,"Central",6333,14.73,"Consumer","2014-01-26","Standard Class","Michigan","Furnishings" +"Furniture","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"FUR-FU-10002918","Eldon ClusterMat Chair Mat with Cordless Antistatic Protection",30.023401,3,"Central",6328,272.94,"Consumer","2014-01-26","Standard Class","Michigan","Furnishings" +"Furniture","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"FUR-FU-10003194","Eldon Expressions Desk Accessory. Wood Pencil Holder. Oak",11.58,4,"South",1124,38.6,"Consumer","2014-01-26","Standard Class","Arkansas","Furnishings" +"Furniture","Los Angeles","United States","MV-17485","Mark Van Huff",0,"2014-01-20","CA-2014-148614",90049,"FUR-FU-10003194","Eldon Expressions Desk Accessory. Wood Pencil Holder. Oak",5.79,2,"West",5738,19.3,"Consumer","2014-01-25","Standard Class","California","Furnishings" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0.1,"2014-01-20","CA-2014-167927",48185,"OFF-AP-10002311","Holmes Replacement Filter for HEPA Air Cleaner. Very Large Room. HEPA Filter",93.581604,4,"Central",6330,247.71599,"Consumer","2014-01-26","Standard Class","Michigan","Appliances" +"Office Supplies","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"OFF-AR-10002375","Newell 351",6.6584,7,"South",1123,22.96,"Consumer","2014-01-26","Standard Class","Arkansas","Art" +"Office Supplies","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"OFF-AR-10003811","Newell 327",1.7901001,3,"South",1125,6.63,"Consumer","2014-01-26","Standard Class","Arkansas","Art" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-AR-10004456","Panasonic KP-4ABK Battery-Operated Pencil Sharpener",12.736799,3,"Central",6332,43.920002,"Consumer","2014-01-26","Standard Class","Michigan","Art" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-BI-10000605","Acco Pressboard Covers with Storage Hooks. 9 1/2"" x 11"". Executive Red",8.9535,5,"Central",6329,19.05,"Consumer","2014-01-26","Standard Class","Michigan","Binders" +"Office Supplies","Smyrna","United States","MM-18280","Muhammed MacIntyre",0.7,"2014-01-20","US-2014-147774",37167,"OFF-BI-10003091","GBC DocuBind TL200 Manual Binding Machine",-51.5154,1,"South",7121,67.19399,"Corporate","2014-01-26","Standard Class","Tennessee","Binders" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-BI-10004364","Storex Dura Pro Binders",13.365001,5,"Central",6334,29.7,"Consumer","2014-01-26","Standard Class","Michigan","Binders" +"Office Supplies","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"OFF-EN-10001539","Staple envelope",10.9698,3,"South",1126,23.34,"Consumer","2014-01-26","Standard Class","Arkansas","Envelopes" +"Office Supplies","Los Angeles","United States","MV-17485","Mark Van Huff",0,"2014-01-20","CA-2014-148614",90049,"OFF-PA-10002893","Wirebound Service Call Books. 5 1/2"" x 4""",9.2928,2,"West",5737,19.359999,"Consumer","2014-01-25","Standard Class","California","Paper" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-ST-10000760","Eldon Fold 'N Roll Cart System",4.0542,1,"Central",6327,13.98,"Consumer","2014-01-26","Standard Class","Michigan","Storage" +"Office Supplies","Westland","United States","XP-21865","Xylona Preis",0,"2014-01-20","CA-2014-167927",48185,"OFF-ST-10003123","Fellowes Bases and Tops For Staxonsteel/High-Stak Systems",15.979199,2,"Central",6331,66.58,"Consumer","2014-01-26","Standard Class","Michigan","Storage" +"Technology","Jonesboro","United States","HL-15040","Hunter Lopez",0,"2014-01-20","US-2014-147627",72401,"TEC-PH-10001061","Apple iPhone 5C",181.9818,7,"South",1122,699.93005,"Consumer","2014-01-26","Standard Class","Arkansas","Phones" +"Furniture","Miami","United States","TB-21400","Tom Boeckenhauer",0.2,"2014-01-21","CA-2014-110422",33180,"FUR-FU-10001889","Ultra Door Pull Handle",4.1028,3,"South",9990,25.248001,"Consumer","2014-01-23","Second Class","Florida","Furnishings" +"Office Supplies","Lafayette","United States","SG-20605","Speros Goranitis",0,"2014-01-23","CA-2014-146997",47905,"OFF-FA-10003467","Alliance Big Bands Rubber Bands. 12/Pack",0,3,"Central",9887,5.94,"Consumer","2014-01-27","Standard Class","Indiana","Fasteners" +"Office Supplies","Las Vegas","United States","IM-15055","Ionia McGrath",0,"2014-01-23","CA-2014-102645",89115,"OFF-PA-10001804","Xerox 195",19.238401,6,"West",9156,40.08,"Consumer","2014-01-28","Standard Class","Nevada","Paper" +"Furniture","Rapid City","United States","CA-11965","Carol Adams",0,"2014-01-26","CA-2014-167997",57701,"FUR-BO-10004409","Safco Value Mate Series Steel Bookcases. Baked Enamel Finish on Steel. Gray",39.748802,2,"Central",8151,141.95999,"Corporate","2014-01-29","First Class","South Dakota","Bookcases" +"Furniture","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"FUR-FU-10001847","Eldon Image Series Black Desk Accessories",4.4712,3,"South",2584,12.42,"Home Office","2014-01-31","Standard Class","Virginia","Furnishings" +"Furniture","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"FUR-FU-10004587","GE General Use Halogen Bulbs. 100 Watts. 1 Bulb per Pack",30.7818,3,"South",2579,62.82,"Home Office","2014-01-31","Standard Class","Virginia","Furnishings" +"Office Supplies","Rapid City","United States","CA-11965","Carol Adams",0,"2014-01-26","CA-2014-167997",57701,"OFF-BI-10001758","Wilson Jones 14 Line Acrylic Coated Pressboard Data Binders",5.0196,2,"Central",8150,10.68,"Corporate","2014-01-29","First Class","South Dakota","Binders" +"Office Supplies","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"OFF-PA-10000380","REDIFORM Incoming/Outgoing Call Register. 11"" X 8 1/2"". 100 Messages",8.34,2,"South",2582,16.68,"Home Office","2014-01-31","Standard Class","Virginia","Paper" +"Office Supplies","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"OFF-PA-10003072","Eureka Recycled Copy Paper 8 1/2"" x 11"". Ream",9.331201,3,"South",2581,19.44,"Home Office","2014-01-31","Standard Class","Virginia","Paper" +"Office Supplies","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"OFF-ST-10004337","SAFCO Commercial Wire Shelving. 72h",0,8,"South",2580,489.91998,"Home Office","2014-01-31","Standard Class","Virginia","Storage" +"Technology","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"TEC-PH-10002103","Jabra SPEAK 410",52.6344,2,"South",2585,187.98,"Home Office","2014-01-31","Standard Class","Virginia","Phones" +"Technology","Alexandria","United States","SD-20485","Shirley Daniels",0,"2014-01-26","US-2014-155502",22304,"TEC-PH-10004833","Macally Suction Cup Mount",0,13,"South",2583,155.35,"Home Office","2014-01-31","Standard Class","Virginia","Phones" +"Furniture","San Diego","United States","EJ-13720","Ed Jacobs",0.2,"2014-01-27","US-2014-117163",92037,"FUR-TA-10003469","Balt Split Level Computer Training Table",-16.65,3,"West",3796,333,"Consumer","2014-02-02","Standard Class","California","Tables" +"Office Supplies","San Diego","United States","EJ-13720","Ed Jacobs",0,"2014-01-27","US-2014-117163",92037,"OFF-AR-10003179","Dixon Ticonderoga Core-Lock Colored Pencils",12.025201,4,"West",3797,36.440002,"Consumer","2014-02-02","Standard Class","California","Art" +"Office Supplies","San Diego","United States","EJ-13720","Ed Jacobs",0,"2014-01-27","US-2014-117163",92037,"OFF-ST-10003692","Recycled Steel Personal File for Hanging File Folders",14.3075,1,"West",3795,57.23,"Consumer","2014-02-02","Standard Class","California","Storage" +"Office Supplies","New York City","United States","JC-15340","Jasper Cacioppo",0.2,"2014-01-28","CA-2014-100328",10024,"OFF-BI-10000343","Pressboard Covers with Storage Hooks. 9 1/2"" x 11"". Light Blue",1.3256999,1,"East",3084,3.928,"Consumer","2014-02-03","Standard Class","New York","Binders" +"Office Supplies","Detroit","United States","MV-18190","Mike Vittorini",0,"2014-01-30","CA-2014-134103",48234,"OFF-PA-10001204","Xerox 1972",4.752,2,"Central",6388,10.56,"Consumer","2014-02-04","Standard Class","Michigan","Paper" +"Office Supplies","Detroit","United States","MV-18190","Mike Vittorini",0,"2014-01-30","CA-2014-134103",48234,"OFF-ST-10000991","Space Solutions HD Industrial Steel Shelving.",6.8982,2,"Central",6389,229.93999,"Consumer","2014-02-04","Standard Class","Michigan","Storage" +"Furniture","Mission Viejo","United States","LC-17050","Liz Carlisle",0.15,"2014-01-31","CA-2014-115161",92691,"FUR-BO-10003966","Sauder Facets Collection Library. Sky Alder Finish",3.4196002,2,"West",3366,290.66602,"Consumer","2014-02-02","First Class","California","Bookcases" +"Technology","Green Bay","United States","BD-11500","Bradley Drucker",0,"2014-02-01","CA-2014-140795",54302,"TEC-AC-10001432","Enermax Aurora Lite Keyboard",206.316,6,"Central",541,468.9,"Consumer","2014-02-03","First Class","Wisconsin","Accessories" +"Office Supplies","Saint Petersburg","United States","EB-13930","Eric Barreto",0.7,"2014-02-02","CA-2014-123400",33710,"OFF-BI-10000666","Surelock Post Binders",-12.224,2,"South",9138,18.336,"Consumer","2014-02-09","Standard Class","Florida","Binders" +"Office Supplies","San Diego","United States","CD-12790","Cynthia Delaney",0,"2014-02-02","CA-2014-139857",92037,"OFF-FA-10001843","Staples",5.8045006,5,"West",1704,12.35,"Home Office","2014-02-06","Standard Class","California","Fasteners" +"Technology","Saint Petersburg","United States","EB-13930","Eric Barreto",0.2,"2014-02-02","CA-2014-123400",33710,"TEC-PH-10002890","AT&T 17929 Lendline Telephone",13.572,5,"South",9139,180.95999,"Consumer","2014-02-09","Standard Class","Florida","Phones" +"Office Supplies","Seattle","United States","TB-21400","Tom Boeckenhauer",0.2,"2014-02-03","CA-2014-111059",98105,"OFF-BI-10002827","Avery Durable Poly Binders",4.3134003,3,"West",1519,13.271999,"Consumer","2014-02-06","Second Class","Washington","Binders" +"Office Supplies","Seattle","United States","TB-21400","Tom Boeckenhauer",0.2,"2014-02-03","CA-2014-111059",98105,"OFF-BI-10004593","Ibico Laser Imprintable Binding System Covers",27.248001,2,"West",1518,83.840004,"Consumer","2014-02-06","Second Class","Washington","Binders" +"Office Supplies","Escondido","United States","MH-17440","Mark Haberlin",0.2,"2014-02-04","CA-2014-104808",92025,"OFF-BI-10003676","GBC Standard Recycled Report Covers. Clear Plastic Sheets",6.0368004,2,"West",8585,17.248001,"Corporate","2014-02-08","Second Class","California","Binders" +"Office Supplies","San Diego","United States","DB-13270","Deborah Brumfield",0.2,"2014-02-04","CA-2014-107181",92024,"OFF-BI-10004230","GBC Recycled Grain Textured Covers",29.013601,3,"West",1544,82.895996,"Home Office","2014-02-08","Standard Class","California","Binders" +"Office Supplies","San Diego","United States","DB-13270","Deborah Brumfield",0,"2014-02-04","CA-2014-107181",92024,"OFF-PA-10000350","Message Book. Standard Line ""While You Were Out"". 5 1/2"" X 4"". 200 Sets/Book",16.0928,4,"West",1545,34.24,"Home Office","2014-02-08","Standard Class","California","Paper" +"Office Supplies","Romeoville","United States","DL-13315","Delfina Latchford",0.8,"2014-02-06","CA-2014-108182",60441,"OFF-BI-10001196","Avery Flip-Chart Easel Binder. Black",-14.7708,2,"Central",8981,8.952,"Consumer","2014-02-10","Second Class","Illinois","Binders" +"Office Supplies","Chesapeake","United States","ND-18460","Neil Ducich",0,"2014-02-06","CA-2014-131905",23320,"OFF-LA-10002787","Avery 480",7.2,4,"South",1998,15,"Corporate","2014-02-09","First Class","Virginia","Labels" +"Technology","Chesapeake","United States","ND-18460","Neil Ducich",0,"2014-02-06","CA-2014-131905",23320,"TEC-PH-10001615","AT&T CL82213",42.0355,5,"South",2000,144.95,"Corporate","2014-02-09","First Class","Virginia","Phones" +"Technology","Chesapeake","United States","ND-18460","Neil Ducich",0,"2014-02-06","CA-2014-131905",23320,"TEC-PH-10003645","Aastra 57i VoIP phone",42.018597,1,"South",1999,161.61,"Corporate","2014-02-09","First Class","Virginia","Phones" +"Office Supplies","New York City","United States","CS-12250","Chris Selesnick",0,"2014-02-07","US-2014-169390",10024,"OFF-ST-10001558","Acco Perma 4000 Stacking Storage Drawers",9.7439995,4,"East",8736,64.96,"Corporate","2014-02-10","Second Class","New York","Storage" +"Technology","Linden","United States","CK-12760","Cyma Kinney",0,"2014-02-07","CA-2014-107755",7036,"TEC-AC-10000710","Maxell DVD-RAM Discs",49.6048,7,"East",850,115.36,"Corporate","2014-02-12","Standard Class","New Jersey","Accessories" +"Furniture","North Las Vegas","United States","NM-18445","Nathan Mautz",0,"2014-02-08","CA-2014-125759",89031,"FUR-FU-10002111","Master Caster Door Stop. Large Brown",5.5328,2,"West",9267,14.56,"Home Office","2014-02-09","First Class","Nevada","Furnishings" +"Furniture","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"FUR-CH-10001146","Global Value Mid-Back Manager's Chair. Gray",15.2225,1,"Central",9255,60.89,"Consumer","2014-02-15","Second Class","Missouri","Chairs" +"Furniture","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"FUR-FU-10002298","Rubbermaid ClusterMat Chairmats. Mat Size- 66"" x 60"". Lip 20"" x 11"" -90 Degree Angle",53.2704,3,"Central",9258,332.94,"Consumer","2014-02-15","Second Class","Missouri","Furnishings" +"Furniture","Chesapeake","United States","NF-18385","Natalie Fritzler",0,"2014-02-11","CA-2014-127614",23320,"FUR-TA-10003715","Hon 2111 Invitation Series Corner Table",75.3732,6,"South",5632,1256.22,"Consumer","2014-02-15","Standard Class","Virginia","Tables" +"Office Supplies","Chesapeake","United States","NF-18385","Natalie Fritzler",0,"2014-02-11","CA-2014-127614",23320,"OFF-BI-10003291","Wilson Jones Leather-Like Binders with DublLock Round Rings",8.2062,2,"South",5633,17.46,"Consumer","2014-02-15","Standard Class","Virginia","Binders" +"Office Supplies","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"OFF-BI-10004654","VariCap6 Expandable Binder",24.393,3,"Central",9259,51.9,"Consumer","2014-02-15","Second Class","Missouri","Binders" +"Office Supplies","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"OFF-BI-10004728","Wilson Jones Turn Tabs Binder Tool for Ring Binders",4.4344006,2,"Central",9257,9.64,"Consumer","2014-02-15","Second Class","Missouri","Binders" +"Office Supplies","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"OFF-LA-10004853","Avery 483",6.8724003,3,"Central",9256,14.94,"Consumer","2014-02-15","Second Class","Missouri","Labels" +"Office Supplies","Columbia","United States","GA-14725","Guy Armstrong",0,"2014-02-11","CA-2014-168368",65203,"OFF-ST-10002583","Fellowes Neat Ideas Storage Cubes",2.5983999,2,"Central",9260,64.96,"Consumer","2014-02-15","Second Class","Missouri","Storage" +"Technology","Chesapeake","United States","NF-18385","Natalie Fritzler",0,"2014-02-11","CA-2014-127614",23320,"TEC-AC-10001432","Enermax Aurora Lite Keyboard",103.158005,3,"South",5631,234.45,"Consumer","2014-02-15","Standard Class","Virginia","Accessories" +"Furniture","Concord","United States","SC-20095","Sanjit Chand",0.2,"2014-02-12","US-2014-110674",94521,"FUR-CH-10000225","Global Geo Office Task Chair. Gray",-24.294,2,"West",457,129.568,"Consumer","2014-02-18","Standard Class","California","Chairs" +"Office Supplies","Seattle","United States","ML-17395","Marina Lichtenstein",0,"2014-02-14","CA-2014-121762",98103,"OFF-AP-10001293","Belkin 8 Outlet Surge Protector",22.9488,2,"West",9763,81.96,"Corporate","2014-02-18","Standard Class","Washington","Appliances" +"Office Supplies","Houston","United States","ST-20530","Shui Tom",0.2,"2014-02-14","CA-2014-107706",77095,"OFF-PA-10000466","Memo Book. 100 Message Capacity. 5 3/8” x 11”",6.066,3,"Central",5467,16.176,"Consumer","2014-02-19","Second Class","Texas","Paper" +"Office Supplies","Seattle","United States","ML-17395","Marina Lichtenstein",0,"2014-02-14","CA-2014-121762",98103,"OFF-SU-10000157","Compact Automatic Electric Letter Opener",4.7724,2,"West",9764,238.62001,"Corporate","2014-02-18","Standard Class","Washington","Supplies" +"Technology","Seattle","United States","ML-17395","Marina Lichtenstein",0,"2014-02-14","CA-2014-121762",98103,"TEC-AC-10000736","Logitech G600 MMO Gaming Mouse",86.389206,3,"West",9762,239.97,"Corporate","2014-02-18","Standard Class","Washington","Accessories" +"Office Supplies","Seattle","United States","BF-11020","Barry Französisch",0.2,"2014-02-15","CA-2014-165568",98105,"OFF-BI-10001031","Pressboard Data Binders by Wilson Jones",7.2089996,5,"West",7438,21.359999,"Corporate","2014-02-19","Standard Class","Washington","Binders" +"Office Supplies","Dallas","United States","MN-17935","Michael Nguyen",0.8,"2014-02-16","CA-2014-122567",75220,"OFF-AP-10001303","Holmes Cool Mist Humidifier for the Whole House with 8-Gallon Output per Day. Extended Life Filter",-13.929999,2,"Central",1334,7.96,"Consumer","2014-02-21","Standard Class","Texas","Appliances" +"Office Supplies","Dallas","United States","MN-17935","Michael Nguyen",0.8,"2014-02-16","CA-2014-122567",75220,"OFF-BI-10002012","Wilson Jones Easy Flow II Sheet Lifters",-1.728,3,"Central",1333,1.08,"Consumer","2014-02-21","Standard Class","Texas","Binders" +"Office Supplies","Chicago","United States","DL-13315","Delfina Latchford",0.2,"2014-02-17","CA-2014-154165",60653,"OFF-AR-10003631","Staples in misc. colors",8.808801,14,"Central",9516,54.208,"Consumer","2014-02-24","Standard Class","Illinois","Art" +"Furniture","Lubbock","United States","TB-21595","Troy Blackwell",0.6,"2014-02-18","US-2014-165589",79424,"FUR-FU-10002396","DAX Copper Panel Document Frame. 5 x 7 Size",-11.3220005,5,"Central",6082,25.16,"Consumer","2014-02-18","Same Day","Texas","Furnishings" +"Office Supplies","Arlington","United States","AB-10015","Aaron Bergman",0.2,"2014-02-18","CA-2014-152905",76017,"OFF-ST-10000321","Akro Stacking Bins",-2.5247998,2,"Central",8223,12.624001,"Consumer","2014-02-24","Standard Class","Texas","Storage" +"Furniture","Richmond","United States","LC-16930","Linda Cazamias",0,"2014-02-20","CA-2014-109491",47374,"FUR-FU-10000221","Master Caster Door Stop. Brown",6.9088,4,"Central",4522,20.32,"Corporate","2014-02-26","Standard Class","Indiana","Furnishings" +"Office Supplies","Los Angeles","United States","SR-20740","Steven Roelle",0,"2014-02-20","CA-2014-164903",90049,"OFF-PA-10003363","Xerox 204",6.2208,2,"West",8504,12.96,"Home Office","2014-02-24","Standard Class","California","Paper" +"Technology","Richmond","United States","LC-16930","Linda Cazamias",0,"2014-02-20","CA-2014-109491",47374,"TEC-AC-10001284","Enermax Briskie RF Wireless Keyboard and Mouse Combo",22.4316,3,"Central",4521,62.309998,"Corporate","2014-02-26","Standard Class","Indiana","Accessories" +"Office Supplies","Woodstock","United States","TM-21010","Tamara Manning",0.8,"2014-02-21","CA-2014-165540",60098,"OFF-BI-10004094","GBC Standard Plastic Binding Systems Combs",-13.7175,5,"Central",2883,8.85,"Consumer","2014-02-25","Standard Class","Illinois","Binders" +"Office Supplies","Moreno Valley","United States","SA-20830","Sue Ann Reed",0,"2014-02-22","CA-2014-133354",92553,"OFF-PA-10001800","Xerox 220",9.331201,3,"West",7611,19.44,"Consumer","2014-02-24","First Class","California","Paper" +"Office Supplies","El Paso","United States","MG-17875","Michael Grace",0.8,"2014-02-23","CA-2014-103744",79907,"OFF-BI-10000320","GBC Plastic Binding Combs",-6.8634,3,"Central",6531,4.428,"Home Office","2014-02-27","Standard Class","Texas","Binders" +"Office Supplies","El Paso","United States","MG-17875","Michael Grace",0.2,"2014-02-23","CA-2014-103744",79907,"OFF-LA-10004425","Staple-on labels",2.3409,3,"Central",6530,6.9360003,"Home Office","2014-02-27","Standard Class","Texas","Labels" +"Office Supplies","Medford","United States","JH-15430","Jennifer Halladay",0.2,"2014-02-24","US-2014-137680",97504,"OFF-PA-10000069","TOPS 4 x 6 Fluorescent Color Memo Sheets. 500 Sheets per Pack",7.6868997,3,"West",5513,22.776001,"Consumer","2014-03-02","Standard Class","Oregon","Paper" +"Office Supplies","Medford","United States","JH-15430","Jennifer Halladay",0.2,"2014-02-24","US-2014-137680",97504,"OFF-PA-10000174","Message Book. Wirebound. Four 5 1/2"" X 4"" Forms/Pg.. 200 Dupl. Sets/Book",11.1024,4,"West",5512,32.896,"Consumer","2014-03-02","Standard Class","Oregon","Paper" +"Office Supplies","Columbus","United States","JS-16030","Joy Smith",0.2,"2014-02-27","CA-2014-156545",43229,"OFF-AR-10003560","Zebra Zazzle Fluorescent Highlighters",3.4048,4,"East",9464,19.456,"Consumer","2014-03-03","First Class","Ohio","Art" +"Furniture","Elmhurst","United States","VF-21715","Vicky Freymann",0.3,"2014-03-01","CA-2014-113880",60126,"FUR-CH-10000863","Novimex Swivel Fabric Task Chair",-172.1172,6,"Central",6548,634.11597,"Home Office","2014-03-05","Standard Class","Illinois","Chairs" +"Furniture","El Paso","United States","SC-20380","Shahid Collister",0.3,"2014-03-01","CA-2014-131009",79907,"FUR-CH-10001270","Harbour Creations Steel Folding Chair",0,6,"Central",7949,362.25,"Consumer","2014-03-05","Standard Class","Texas","Chairs" +"Furniture","Seattle","United States","DB-13060","Dave Brooks",0.2,"2014-03-01","CA-2014-104269",98115,"FUR-CH-10004063","Global Deluxe High-Back Manager's Chair",51.476402,2,"West",158,457.568,"Consumer","2014-03-06","Second Class","Washington","Chairs" +"Furniture","El Paso","United States","SC-20380","Shahid Collister",0.6,"2014-03-01","CA-2014-131009",79907,"FUR-FU-10001095","DAX Black Cherry Wood-Tone Poster Frame",-34.953598,6,"Central",7950,63.552,"Consumer","2014-03-05","Standard Class","Texas","Furnishings" +"Furniture","Houston","United States","GW-14605","Giulietta Weimer",0.3,"2014-03-01","CA-2014-168312",77036,"FUR-TA-10001866","Bevis Round Conference Room Tables and Bases",-43.0296,3,"Central",8311,376.509,"Consumer","2014-03-07","Standard Class","Texas","Tables" +"Office Supplies","El Paso","United States","SC-20380","Shahid Collister",0.2,"2014-03-01","CA-2014-131009",79907,"OFF-FA-10004395","Plymouth Boxed Rubber Bands by Plymouth",-3.5325,5,"Central",7948,18.84,"Consumer","2014-03-05","Standard Class","Texas","Fasteners" +"Office Supplies","Elmhurst","United States","VF-21715","Vicky Freymann",0.2,"2014-03-01","CA-2014-113880",60126,"OFF-PA-10003036","Black Print Carbonless 8 1/2"" x 8 1/4"" Rapid Memo Book",5.6784,3,"Central",6549,17.472,"Home Office","2014-03-05","Standard Class","Illinois","Paper" +"Office Supplies","El Paso","United States","SC-20380","Shahid Collister",0.2,"2014-03-01","CA-2014-131009",79907,"OFF-ST-10001469","Fellowes Bankers Box Recycled Super Stor/Drawer",-22.6716,3,"Central",7951,129.552,"Consumer","2014-03-05","Standard Class","Texas","Storage" +"Office Supplies","Houston","United States","GW-14605","Giulietta Weimer",0.2,"2014-03-01","CA-2014-168312",77036,"OFF-ST-10003692","Recycled Steel Personal File for Hanging File Folders",8.584499,3,"Central",8310,137.352,"Consumer","2014-03-07","Standard Class","Texas","Storage" +"Technology","New York City","United States","HR-14770","Hallie Redmond",0,"2014-03-01","US-2014-143707",10035,"TEC-PH-10003655","Sannysis Cute Owl Design Soft Skin Case Cover for Samsung Galaxy S4",1.6038,3,"East",5714,5.94,"Home Office","2014-03-05","Standard Class","New York","Phones" +"Office Supplies","New York City","United States","KN-16705","Kristina Nunn",0,"2014-03-02","CA-2014-107524",10009,"OFF-EN-10001990","Staple envelope",5.3392,2,"East",6058,11.360001,"Home Office","2014-03-08","Standard Class","New York","Envelopes" +"Office Supplies","Philadelphia","United States","NH-18610","Nicole Hansen",0.2,"2014-03-02","CA-2014-111157",19120,"OFF-PA-10000327","Xerox 1971",1.07,1,"East",9946,3.424,"Corporate","2014-03-06","Standard Class","Pennsylvania","Paper" +"Office Supplies","New York City","United States","KN-16705","Kristina Nunn",0,"2014-03-02","CA-2014-107524",10009,"OFF-PA-10000587","Array Parchment Paper. Assorted Colors",17.472,5,"East",6059,36.4,"Home Office","2014-03-08","Standard Class","New York","Paper" +"Technology","Philadelphia","United States","NH-18610","Nicole Hansen",0.2,"2014-03-02","CA-2014-111157",19120,"TEC-AC-10004353","Hypercom P1300 Pinpad",32.129997,3,"East",9947,151.2,"Corporate","2014-03-06","Standard Class","Pennsylvania","Accessories" +"Furniture","Columbus","United States","JS-15595","Jill Stevenson",0.5,"2014-03-03","US-2014-127978",43229,"FUR-BO-10001972","O'Sullivan 4-Shelf Bookcase in Odessa Pine",-199.617,5,"East",7248,302.44998,"Corporate","2014-03-08","Standard Class","Ohio","Bookcases" +"Furniture","San Diego","United States","EJ-14155","Eva Jacobs",0.2,"2014-03-03","CA-2014-105648",92037,"FUR-TA-10002958","Bevis Oval Conference Table. Walnut",-23.4882,3,"West",8712,626.352,"Consumer","2014-03-07","Standard Class","California","Tables" +"Office Supplies","Houston","United States","SC-20020","Sam Craven",0.8,"2014-03-03","CA-2014-130421",77095,"OFF-AP-10002534","3.6 Cubic Foot Counter Height Office Refrigerator",-459.6072,3,"Central",7349,176.772,"Consumer","2014-03-07","Standard Class","Texas","Appliances" +"Office Supplies","Wilmington","United States","AJ-10945","Ashley Jarboe",0.2,"2014-03-03","CA-2014-155852",28403,"OFF-AR-10003560","Zebra Zazzle Fluorescent Highlighters",3.4048,4,"South",556,19.456,"Consumer","2014-03-07","Second Class","North Carolina","Art" +"Office Supplies","New York City","United States","AP-10720","Anne Pryor",0.2,"2014-03-03","CA-2014-127964",10035,"OFF-BI-10003429","Cardinal HOLDit! Binder Insert Strips.Extra Strips",9.1785,5,"East",1165,25.32,"Home Office","2014-03-08","Standard Class","New York","Binders" +"Office Supplies","New York City","United States","AP-10720","Anne Pryor",0.2,"2014-03-03","CA-2014-127964",10035,"OFF-BI-10004593","Ibico Laser Imprintable Binding System Covers",40.871998,3,"East",1164,125.759995,"Home Office","2014-03-08","Standard Class","New York","Binders" +"Office Supplies","Columbus","United States","JS-15595","Jill Stevenson",0.2,"2014-03-03","US-2014-127978",43229,"OFF-LA-10000305","Avery 495",4.9140005,3,"East",7247,15.120001,"Corporate","2014-03-08","Standard Class","Ohio","Labels" +"Office Supplies","Columbus","United States","JS-15595","Jill Stevenson",0.2,"2014-03-03","US-2014-127978",43229,"OFF-ST-10002486","Eldon Shelf Savers Cubes and Bins",-10.051201,8,"East",7249,44.671997,"Corporate","2014-03-08","Standard Class","Ohio","Storage" +"Technology","New York City","United States","AP-10720","Anne Pryor",0,"2014-03-03","CA-2014-127964",10035,"TEC-PH-10004700","PowerGen Dual USB Car Charger",4.5954003,1,"East",1163,9.99,"Home Office","2014-03-08","Standard Class","New York","Phones" +"Office Supplies","Margate","United States","SC-20095","Sanjit Chand",0.2,"2014-03-04","CA-2014-117016",33063,"OFF-AR-10001374","BIC Brite Liner Highlighters. Chisel Tip",2.3328,3,"South",4422,15.552,"Consumer","2014-03-09","Standard Class","Florida","Art" +"Office Supplies","Columbia","United States","CL-12565","Clay Ludtke",0,"2014-03-04","CA-2014-116239",29203,"OFF-ST-10001370","Sensible Storage WireTech Storage Systems",17.745,5,"South",1565,354.9,"Consumer","2014-03-04","Same Day","South Carolina","Storage" +"Office Supplies","Yonkers","United States","AB-10150","Aimee Bixby",0,"2014-03-05","CA-2014-169061",10701,"OFF-AR-10000588","Newell 345",15.4752,3,"East",8497,59.52,"Consumer","2014-03-08","Second Class","New York","Art" +"Office Supplies","Yonkers","United States","AB-10150","Aimee Bixby",0.2,"2014-03-05","CA-2014-169061",10701,"OFF-BI-10001617","GBC Wire Binding Combs",16.7508,6,"East",8500,49.631996,"Consumer","2014-03-08","Second Class","New York","Binders" +"Office Supplies","Yonkers","United States","AB-10150","Aimee Bixby",0,"2014-03-05","CA-2014-169061",10701,"OFF-PA-10001878","Xerox 1891",45.975403,2,"East",8501,97.82,"Consumer","2014-03-08","Second Class","New York","Paper" +"Office Supplies","Yonkers","United States","AB-10150","Aimee Bixby",0,"2014-03-05","CA-2014-169061",10701,"OFF-SU-10000381","Acme Forged Steel Scissors with Black Enamel Handles",5.3998003,2,"East",8499,18.62,"Consumer","2014-03-08","Second Class","New York","Supplies" +"Technology","Yonkers","United States","AB-10150","Aimee Bixby",0,"2014-03-05","CA-2014-169061",10701,"TEC-AC-10002001","Logitech Wireless Gaming Headset G930",177.5889,3,"East",8498,479.97,"Consumer","2014-03-08","Second Class","New York","Accessories" +"Furniture","Seattle","United States","CM-12715","Craig Molinari",0.2,"2014-03-07","CA-2014-104563",98103,"FUR-CH-10002780","Office Star - Task Chair with Contemporary Loop Arms",21.8352,6,"West",9643,436.704,"Corporate","2014-03-12","Standard Class","Washington","Chairs" +"Furniture","Seattle","United States","AB-10015","Aaron Bergman",0.2,"2014-03-07","CA-2014-156587",98103,"FUR-CH-10004477","Global Push Button Manager's Chair. Indigo",5.4801,1,"West",4962,48.711998,"Consumer","2014-03-08","First Class","Washington","Chairs" +"Furniture","Seattle","United States","CM-12715","Craig Molinari",0.2,"2014-03-07","CA-2014-104563",98103,"FUR-CH-10004495","Global Leather and Oak Executive Chair. Black",54.1764,2,"West",9644,481.568,"Corporate","2014-03-12","Standard Class","Washington","Chairs" +"Office Supplies","Seattle","United States","CM-12715","Craig Molinari",0,"2014-03-07","CA-2014-104563",98103,"OFF-AR-10000390","Newell Chalk Holder",9.499,5,"West",9641,20.65,"Corporate","2014-03-12","Standard Class","Washington","Art" +"Office Supplies","Seattle","United States","AB-10015","Aaron Bergman",0,"2014-03-07","CA-2014-156587",98103,"OFF-AR-10001427","Newell 330",4.6644,3,"West",4963,17.94,"Consumer","2014-03-08","First Class","Washington","Art" +"Office Supplies","Des Moines","United States","NP-18685","Nora Pelletier",0.2,"2014-03-07","US-2014-131982",98198,"OFF-BI-10004224","Catalog Binders with Expanding Posts",33.64,2,"West",5254,107.648,"Home Office","2014-03-11","Second Class","Washington","Binders" +"Office Supplies","Seattle","United States","CM-12715","Craig Molinari",0,"2014-03-07","CA-2014-104563",98103,"OFF-ST-10000934","Contico 72""H Heavy-Duty Storage System",0,5,"West",9642,204.9,"Corporate","2014-03-12","Standard Class","Washington","Storage" +"Office Supplies","Seattle","United States","AB-10015","Aaron Bergman",0,"2014-03-07","CA-2014-156587",98103,"OFF-ST-10002344","Carina 42""Hx23 3/4""W Media Storage Unit",4.8588004,3,"West",4964,242.93999,"Consumer","2014-03-08","First Class","Washington","Storage" +"Office Supplies","Denver","United States","KT-16480","Kean Thornton",0.2,"2014-03-10","US-2014-140116",80219,"OFF-AR-10001044","BOSTON Ranger #55 Pencil Sharpener. Black",9.3564,4,"West",2102,83.168,"Consumer","2014-03-17","Standard Class","Colorado","Art" +"Office Supplies","Royal Oak","United States","SW-20275","Scott Williamson",0,"2014-03-10","US-2014-100279",48073,"OFF-PA-10002259","Geographics Note Cards. Blank. White. 8 1/2"" x 11""",10.742399,2,"Central",1172,22.38,"Consumer","2014-03-14","Standard Class","Michigan","Paper" +"Office Supplies","Denver","United States","KT-16480","Kean Thornton",0.2,"2014-03-10","US-2014-140116",80219,"OFF-ST-10000078","Tennsco 6- and 18-Compartment Lockers",-15.9102,3,"West",2101,636.408,"Consumer","2014-03-17","Standard Class","Colorado","Storage" +"Furniture","Columbus","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-03-11","CA-2014-166884",43229,"FUR-FU-10003981","Eldon Wave Desk Accessories",2.2879999,5,"East",1542,8.320001,"Consumer","2014-03-16","Second Class","Ohio","Furnishings" +"Office Supplies","Richmond","United States","FM-14215","Filia McAdams",0,"2014-03-11","CA-2014-114790",40475,"OFF-AP-10002578","Fellowes Premier Superior Surge Suppressor. 10-Outlet. With Phone and Remote",38.157597,3,"South",7015,146.76,"Corporate","2014-03-13","Second Class","Kentucky","Appliances" +"Office Supplies","Roseville","United States","RB-19435","Richard Bierner",0,"2014-03-11","US-2014-103338",95661,"OFF-AR-10001770","Economy #2 Pencils",2.0747998,3,"West",8300,7.98,"Consumer","2014-03-15","Standard Class","California","Art" +"Office Supplies","Columbus","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-03-11","CA-2014-166884",43229,"OFF-FA-10001561","Stockwell Push Pins",1.7004,6,"East",1543,10.464,"Consumer","2014-03-16","Second Class","Ohio","Fasteners" +"Office Supplies","Richmond","United States","FM-14215","Filia McAdams",0,"2014-03-11","CA-2014-114790",40475,"OFF-PA-10000213","Xerox 198",7.0218,3,"South",7018,14.94,"Corporate","2014-03-13","Second Class","Kentucky","Paper" +"Office Supplies","New York City","United States","KA-16525","Kelly Andreada",0,"2014-03-11","CA-2014-158337",10024,"OFF-PA-10002137","Southworth 100% Résumé Paper. 24lb.",49.014,14,"East",3442,108.92,"Consumer","2014-03-14","Second Class","New York","Paper" +"Technology","Richmond","United States","FM-14215","Filia McAdams",0,"2014-03-11","CA-2014-114790",40475,"TEC-AC-10000710","Maxell DVD-RAM Discs",14.1728,2,"South",7016,32.960003,"Corporate","2014-03-13","Second Class","Kentucky","Accessories" +"Technology","Richmond","United States","FM-14215","Filia McAdams",0,"2014-03-11","CA-2014-114790",40475,"TEC-PH-10000984","Panasonic KX-TG9471B",164.6316,3,"South",7017,587.97003,"Corporate","2014-03-13","Second Class","Kentucky","Phones" +"Furniture","Chesapeake","United States","CK-12325","Christine Kargatis",0,"2014-03-14","US-2014-125521",23320,"FUR-CH-10003379","Global Commerce Series High-Back Swivel/Tilt Chairs",284.97998,4,"South",2841,1139.92,"Home Office","2014-03-19","Standard Class","Virginia","Chairs" +"Office Supplies","Huntington Beach","United States","DK-13225","Dean Katz",0,"2014-03-14","CA-2014-157623",92646,"OFF-AR-10003723","Avery Hi-Liter Fluorescent Desk Style Markers",1.2506,1,"West",1892,3.3799999,"Corporate","2014-03-18","Standard Class","California","Art" +"Office Supplies","Logan","United States","JF-15295","Jason Fortune-",0.2,"2014-03-14","CA-2014-114510",84321,"OFF-BI-10001617","GBC Wire Binding Combs",11.167201,4,"West",4677,33.088,"Consumer","2014-03-19","Standard Class","Utah","Binders" +"Office Supplies","Logan","United States","JF-15295","Jason Fortune-",0.2,"2014-03-14","CA-2014-114510",84321,"OFF-BI-10003007","Premium Transparent Presentation Covers. No Pattern/Clear. 8 1/2"" x 11""",20.1656,2,"West",4681,62.048,"Consumer","2014-03-19","Standard Class","Utah","Binders" +"Office Supplies","Jacksonville","United States","NF-18475","Neil Französisch",0.2,"2014-03-14","CA-2014-100293",32216,"OFF-PA-10000176","Xerox 1887",31.8696,6,"South",9515,91.056,"Home Office","2014-03-18","Standard Class","Florida","Paper" +"Office Supplies","Huntington Beach","United States","DK-13225","Dean Katz",0,"2014-03-14","CA-2014-157623",92646,"OFF-PA-10001204","Xerox 1972",4.752,2,"West",1891,10.56,"Corporate","2014-03-18","Standard Class","California","Paper" +"Office Supplies","Chicago","United States","RB-19465","Rick Bensley",0.2,"2014-03-14","CA-2014-152618",60653,"OFF-PA-10001215","Xerox 1963",2.6399999,2,"Central",9407,8.448,"Home Office","2014-03-17","First Class","Illinois","Paper" +"Office Supplies","Logan","United States","JF-15295","Jason Fortune-",0,"2014-03-14","CA-2014-114510",84321,"OFF-ST-10000736","Carina Double Wide Media Storage Towers in Natural & Black",3.2392,1,"West",4678,80.98,"Consumer","2014-03-19","Standard Class","Utah","Storage" +"Office Supplies","Logan","United States","JF-15295","Jason Fortune-",0,"2014-03-14","CA-2014-114510",84321,"OFF-ST-10003221","Staple magnet",5.7671995,2,"West",4680,21.359999,"Consumer","2014-03-19","Standard Class","Utah","Storage" +"Technology","Logan","United States","JF-15295","Jason Fortune-",0,"2014-03-14","CA-2014-114510",84321,"TEC-AC-10004877","Imation 30456 USB Flash Drive 8GB",6.624,12,"West",4679,82.8,"Consumer","2014-03-19","Standard Class","Utah","Accessories" +"Technology","Chicago","United States","RB-19465","Rick Bensley",0.3,"2014-03-14","CA-2014-152618",60653,"TEC-MA-10003626","Hewlett-Packard Deskjet 6540 Color Inkjet Printer",156.047,2,"Central",9406,574.91003,"Home Office","2014-03-17","First Class","Illinois","Machines" +"Furniture","Tampa","United States","AS-10240","Alan Shonely",0.2,"2014-03-15","CA-2014-149958",33614,"FUR-FU-10001756","Eldon Expressions Desk Accessory. Wood Photo Frame. Mahogany",5.1408,3,"South",604,45.696,"Consumer","2014-03-19","Standard Class","Florida","Furnishings" +"Office Supplies","Tampa","United States","AS-10240","Alan Shonely",0.7,"2014-03-15","CA-2014-149958",33614,"OFF-BI-10000773","Insertable Tab Post Binder Dividers",-5.5338,3,"South",605,7.218,"Consumer","2014-03-19","Standard Class","Florida","Binders" +"Office Supplies","Tampa","United States","AS-10240","Alan Shonely",0.7,"2014-03-15","CA-2014-149958",33614,"OFF-BI-10001543","GBC VeloBinder Manual Binding System",-31.6712,4,"South",606,43.188,"Consumer","2014-03-19","Standard Class","Florida","Binders" +"Office Supplies","Tampa","United States","AS-10240","Alan Shonely",0.2,"2014-03-15","CA-2014-149958",33614,"OFF-PA-10002120","Xerox 1889",47.815197,3,"South",607,131.90399,"Consumer","2014-03-19","Standard Class","Florida","Paper" +"Office Supplies","Tampa","United States","AS-10240","Alan Shonely",0.2,"2014-03-15","CA-2014-149958",33614,"OFF-ST-10001490","Hot File 7-Pocket. Floor Stand",17.847,1,"South",603,142.776,"Consumer","2014-03-19","Standard Class","Florida","Storage" +"Technology","Raleigh","United States","KN-16705","Kristina Nunn",0.2,"2014-03-16","CA-2014-157609",27604,"TEC-PH-10002415","Polycom VoiceStation 500 Conference phone",29.494999,2,"South",3453,471.91998,"Home Office","2014-03-21","Second Class","North Carolina","Phones" +"Furniture","New York City","United States","CP-12340","Christine Phan",0.4,"2014-03-17","US-2014-148838",10024,"FUR-TA-10001950","Balt Solid Wood Round Tables",-553.6476,4,"East",2359,1071.5759,"Corporate","2014-03-21","Standard Class","New York","Tables" +"Furniture","New York City","United States","CP-12340","Christine Phan",0.4,"2014-03-17","US-2014-148838",10024,"FUR-TA-10003473","Bretford Rectangular Conference Table Tops",-447.5947,7,"East",2358,1579.746,"Corporate","2014-03-21","Standard Class","New York","Tables" +"Furniture","New York City","United States","CP-12340","Christine Phan",0.4,"2014-03-17","US-2014-148838",10024,"FUR-TA-10004175","Hon 30"" x 60"" Table with Locking Drawer",-122.7816,3,"East",2360,613.908,"Corporate","2014-03-21","Standard Class","New York","Tables" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-AP-10001154","Bionaire Personal Warm Mist Humidifier/Vaporizer",36.5742,2,"Central",1346,93.78,"Consumer","2014-03-24","Standard Class","Minnesota","Appliances" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-AP-10003971","Belkin 6 Outlet Metallic Surge Strip",8.4942,3,"South",7432,32.67,"Corporate","2014-03-21","Standard Class","Mississippi","Appliances" +"Office Supplies","Dallas","United States","AZ-10750","Annie Zypern",0.2,"2014-03-17","CA-2014-158442",75217,"OFF-AR-10003732","Newell 333",0.3336,2,"Central",5101,4.448,"Consumer","2014-03-17","Same Day","Texas","Art" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-AR-10003829","Newell 35",5.7071996,6,"Central",1348,19.68,"Consumer","2014-03-24","Standard Class","Minnesota","Art" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-BI-10000136","Avery Non-Stick Heavy Duty View Round Locking Ring Binders",17.222403,6,"Central",1350,35.88,"Consumer","2014-03-24","Standard Class","Minnesota","Binders" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-BI-10000309","GBC Twin Loop Wire Binding Elements. 9/16"" Spine. Black",14.9156,2,"South",7429,30.44,"Corporate","2014-03-21","Standard Class","Mississippi","Binders" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-BI-10000605","Acco Pressboard Covers with Storage Hooks. 9 1/2"" x 11"". Executive Red",5.3721004,3,"South",7428,11.429999,"Corporate","2014-03-21","Standard Class","Mississippi","Binders" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-BI-10001758","Wilson Jones 14 Line Acrylic Coated Pressboard Data Binders",25.098,10,"Central",1349,53.4,"Consumer","2014-03-24","Standard Class","Minnesota","Binders" +"Office Supplies","Lakeville","United States","BN-11515","Bradley Nguyen",0,"2014-03-17","CA-2014-118339",55044,"OFF-PA-10000466","Memo Book. 100 Message Capacity. 5 3/8” x 11”",23.59,7,"Central",1347,47.18,"Consumer","2014-03-24","Standard Class","Minnesota","Paper" +"Office Supplies","New York City","United States","CP-12340","Christine Phan",0,"2014-03-17","US-2014-148838",10024,"OFF-PA-10000919","Xerox 1918",75.969604,4,"East",2362,155.04,"Corporate","2014-03-21","Standard Class","New York","Paper" +"Office Supplies","Dallas","United States","AZ-10750","Annie Zypern",0.2,"2014-03-17","CA-2014-158442",75217,"OFF-PA-10002195","Xerox 1966",1.8792,1,"Central",5102,5.184,"Consumer","2014-03-17","Same Day","Texas","Paper" +"Office Supplies","Dallas","United States","AZ-10750","Annie Zypern",0.2,"2014-03-17","CA-2014-158442",75217,"OFF-PA-10002365","Xerox 1967",5.4431996,3,"Central",5103,15.552,"Consumer","2014-03-17","Same Day","Texas","Paper" +"Office Supplies","New York City","United States","CP-12340","Christine Phan",0,"2014-03-17","US-2014-148838",10024,"OFF-PA-10002960","Xerox 1926",16.3842,7,"East",2361,34.86,"Corporate","2014-03-21","Standard Class","New York","Paper" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-PA-10003063","EcoTones Memo Sheets",7.68,4,"South",7431,16,"Corporate","2014-03-21","Standard Class","Mississippi","Paper" +"Office Supplies","Jackson","United States","MH-17440","Mark Haberlin",0,"2014-03-17","US-2014-164763",39212,"OFF-PA-10003349","Xerox 1957",6.3504004,2,"South",7430,12.96,"Corporate","2014-03-21","Standard Class","Mississippi","Paper" +"Office Supplies","Philadelphia","United States","QJ-19255","Quincy Jones",0.2,"2014-03-17","CA-2014-127859",19134,"OFF-PA-10003641","Xerox 1909",41.152798,6,"East",8011,126.62399,"Corporate","2014-03-20","Second Class","Pennsylvania","Paper" +"Furniture","San Francisco","United States","EK-13795","Eileen Kiefer",0.15,"2014-03-18","CA-2014-111871",94110,"FUR-BO-10004218","Bush Heritage Pine Collection 5-Shelf Bookcase. Albany Pine Finish. *Special Order",70.49,10,"West",6152,1198.3301,"Home Office","2014-03-21","Second Class","California","Bookcases" +"Furniture","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"FUR-FU-10001986","Dana Fluorescent Magnifying Lamp. White. 36""",15.294,3,"South",2703,122.352005,"Home Office","2014-03-23","Standard Class","Florida","Furnishings" +"Furniture","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"FUR-FU-10004270","Executive Impressions 13"" Clairmont Wall Clock",8.076599,2,"South",2701,30.768,"Home Office","2014-03-23","Standard Class","Florida","Furnishings" +"Furniture","Burbank","United States","SC-20050","Sample Company A",0,"2014-03-18","US-2014-131275",91505,"FUR-FU-10004597","Eldon Cleatmat Chair Mats for Medium Pile Carpets",14.429999,2,"West",7328,111,"Home Office","2014-03-24","Standard Class","California","Furnishings" +"Office Supplies","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"OFF-AR-10003183","Avery Fluorescent Highlighter Four-Color Set",1.002,3,"South",2700,8.016,"Home Office","2014-03-23","Standard Class","Florida","Art" +"Office Supplies","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"OFF-FA-10003112","Staples",5.9175005,3,"South",2702,18.936,"Home Office","2014-03-23","Standard Class","Florida","Fasteners" +"Office Supplies","Jacksonville","United States","SM-20320","Sean Miller",0.2,"2014-03-18","CA-2014-145317",32216,"OFF-PA-10001804","Xerox 195",7.4816,4,"South",2699,21.376,"Home Office","2014-03-23","Standard Class","Florida","Paper" +"Office Supplies","Burbank","United States","SC-20050","Sample Company A",0,"2014-03-18","US-2014-131275",91505,"OFF-ST-10000078","Tennsco 6- and 18-Compartment Lockers",334.11423,7,"West",7330,1856.19,"Home Office","2014-03-24","Standard Class","California","Storage" +"Technology","Burbank","United States","SC-20050","Sample Company A",0.2,"2014-03-18","US-2014-131275",91505,"TEC-MA-10001148","Swingline SM12-08 MicroCut Jam Free Shredder",415.9896,4,"West",7329,1279.9681,"Home Office","2014-03-24","Standard Class","California","Machines" +"Technology","Jacksonville","United States","SM-20320","Sean Miller",0.5,"2014-03-18","CA-2014-145317",32216,"TEC-MA-10002412","Cisco TelePresence System EX90 Videoconferencing Unit",-1811.0784,6,"South",2698,22638.48,"Home Office","2014-03-23","Standard Class","Florida","Machines" +"Technology","Jacksonville","United States","SM-20320","Sean Miller",0.5,"2014-03-18","CA-2014-145317",32216,"TEC-MA-10003626","Hewlett-Packard Deskjet 6540 Color Inkjet Printer",-16.426,4,"South",2697,821.3,"Home Office","2014-03-23","Standard Class","Florida","Machines" +"Furniture","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"FUR-FU-10002508","Document Clip Frames",5.5044003,3,"South",6166,20.016,"Corporate","2014-03-21","First Class","Florida","Furnishings" +"Furniture","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"FUR-FU-10003981","Eldon Wave Desk Accessories",1.3728,3,"South",6165,4.992,"Corporate","2014-03-21","First Class","Florida","Furnishings" +"Office Supplies","Lakeland","United States","MC-17605","Matt Connell",0.7,"2014-03-19","CA-2014-103989",33801,"OFF-BI-10001196","Avery Flip-Chart Easel Binder. Black",-25.737001,5,"South",6164,33.57,"Corporate","2014-03-21","First Class","Florida","Binders" +"Office Supplies","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"OFF-LA-10000443","Avery 501",3.9852002,4,"South",6162,11.808001,"Corporate","2014-03-21","First Class","Florida","Labels" +"Office Supplies","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"OFF-ST-10000918","Crate-A-Files",1.9619999,3,"South",6163,26.16,"Corporate","2014-03-21","First Class","Florida","Storage" +"Technology","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"TEC-AC-10002647","Logitech Wireless Boombox Speaker - portable - wireless. wired",53.2,2,"South",6167,170.23999,"Corporate","2014-03-21","First Class","Florida","Accessories" +"Technology","Lakeland","United States","MC-17605","Matt Connell",0.2,"2014-03-19","CA-2014-103989",33801,"TEC-PH-10004667","Cisco 8x8 Inc. 6753i IP Business Phone System",28.3479,3,"South",6161,323.976,"Corporate","2014-03-21","First Class","Florida","Phones" +"Furniture","Knoxville","United States","GM-14440","Gary McGarr",0.2,"2014-03-21","US-2014-155544",37918,"FUR-CH-10000422","Global Highback Leather Tilter in Burgundy",-10.9188,3,"South",8346,218.376,"Consumer","2014-03-25","Standard Class","Tennessee","Chairs" +"Furniture","Knoxville","United States","GM-14440","Gary McGarr",0.2,"2014-03-21","US-2014-155544",37918,"FUR-FU-10001473","DAX Wood Document Frame",6.5904,3,"South",8345,32.952,"Consumer","2014-03-25","Standard Class","Tennessee","Furnishings" +"Office Supplies","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"OFF-AR-10001573","American Pencil",2.7028,4,"South",7587,9.320001,"Corporate","2014-03-25","Standard Class","Virginia","Art" +"Office Supplies","Hamilton","United States","CB-12025","Cassandra Brandow",0.2,"2014-03-21","CA-2014-140004",45011,"OFF-AR-10004027","Binney & Smith inkTank Erasable Desk Highlighter. Chisel Tip. Yellow. 12/Box",1.5876,3,"East",197,6.048,"Consumer","2014-03-25","Standard Class","Ohio","Art" +"Office Supplies","Hamilton","United States","CB-12025","Cassandra Brandow",0.2,"2014-03-21","CA-2014-140004",45011,"OFF-AR-10004685","Binney & Smith Crayola Metallic Colored Pencils. 8-Color Set",1.2038001,2,"East",196,7.408,"Consumer","2014-03-25","Standard Class","Ohio","Art" +"Office Supplies","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"OFF-BI-10001597","Wilson Jones Ledger-Size. Piano-Hinge Binder. 2"". Blue",59.011196,3,"South",7588,122.94,"Corporate","2014-03-25","Standard Class","Virginia","Binders" +"Office Supplies","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"OFF-LA-10004409","Avery 492",6.624,5,"South",7585,14.4,"Corporate","2014-03-25","Standard Class","Virginia","Labels" +"Office Supplies","Knoxville","United States","GM-14440","Gary McGarr",0.2,"2014-03-21","US-2014-155544",37918,"OFF-LA-10004544","Avery 505",22.2,5,"South",8344,59.2,"Consumer","2014-03-25","Standard Class","Tennessee","Labels" +"Office Supplies","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"OFF-PA-10000675","Xerox 1919",60.255302,3,"South",7586,122.97,"Corporate","2014-03-25","Standard Class","Virginia","Paper" +"Office Supplies","Asheville","United States","TS-21205","Thomas Seio",0.2,"2014-03-21","US-2014-107405",28806,"OFF-ST-10002301","Tennsco Commercial Shelving",-3.8646,1,"South",5461,16.272001,"Corporate","2014-03-25","Standard Class","North Carolina","Storage" +"Technology","Springfield","United States","KH-16690","Kristen Hastings",0,"2014-03-21","CA-2014-163223",22153,"TEC-PH-10000730","Samsung Galaxy S4 Active",909.9818,7,"South",7584,3499.93,"Corporate","2014-03-25","Standard Class","Virginia","Phones" +"Furniture","Tucson","United States","BF-11275","Beth Fritzler",0.2,"2014-03-22","CA-2014-148040",85705,"FUR-CH-10001482","Office Star - Mesh Screen back chair with Vinyl seat",-35.3646,3,"West",2872,314.352,"Corporate","2014-03-26","Standard Class","Arizona","Chairs" +"Office Supplies","Portage","United States","VM-21835","Vivian Mathis",0,"2014-03-22","US-2014-129609",46368,"OFF-AR-10003478","Avery Hi-Liter EverBold Pen Style Fluorescent Highlighters. 4/Pack",6.512,2,"Central",4341,16.28,"Consumer","2014-03-22","Same Day","Indiana","Art" +"Office Supplies","Greensboro","United States","CC-12685","Craig Carroll",0.7,"2014-03-22","US-2014-158057",27405,"OFF-BI-10002735","GBC Prestige Therm-A-Bind Covers",-39.4565,5,"South",1769,51.465004,"Consumer","2014-03-26","Standard Class","North Carolina","Binders" +"Office Supplies","Greensboro","United States","CC-12685","Craig Carroll",0.7,"2014-03-22","US-2014-158057",27405,"OFF-BI-10004410","C-Line Peel & Stick Add-On Filing Pockets. 8-3/4 x 5-1/8. 10/Pack",-5.8604,4,"South",1768,7.644,"Consumer","2014-03-26","Standard Class","North Carolina","Binders" +"Office Supplies","Tucson","United States","BF-11275","Beth Fritzler",0.2,"2014-03-22","CA-2014-148040",85705,"OFF-PA-10002581","Xerox 1951",23.235,3,"West",2871,74.352005,"Corporate","2014-03-26","Standard Class","Arizona","Paper" +"Office Supplies","Delray Beach","United States","BG-11740","Bruce Geld",0.2,"2014-03-23","CA-2014-164749",33445,"OFF-LA-10004484","Avery 476",3.2214,3,"South",6591,9.912,"Consumer","2014-03-26","First Class","Florida","Labels" +"Office Supplies","Los Angeles","United States","PO-18865","Patrick O'Donnell",0,"2014-03-23","CA-2014-120838",90036,"OFF-ST-10000585","Economy Rollaway Files",85.904,2,"West",3555,330.4,"Consumer","2014-03-26","Second Class","California","Storage" +"Technology","Los Angeles","United States","PO-18865","Patrick O'Donnell",0.2,"2014-03-23","CA-2014-120838",90036,"TEC-PH-10003484","Ooma Telo VoIP Home Phone System",37.797,6,"West",3556,604.752,"Consumer","2014-03-26","Second Class","California","Phones" +"Furniture","Fresno","United States","MS-17710","Maurice Satty",0,"2014-03-24","CA-2014-143637",93727,"FUR-FU-10002813","DAX Contemporary Wood Frame with Silver Metal Mat. Desktop. 11 x 14 Size",14.5728,2,"West",7802,40.480003,"Consumer","2014-03-29","Second Class","California","Furnishings" +"Office Supplies","New York City","United States","CD-11920","Carlos Daly",0,"2014-03-24","CA-2014-147235",10024,"OFF-PA-10004948","Xerox 190",11.703,5,"East",4480,24.9,"Consumer","2014-03-28","Standard Class","New York","Paper" +"Furniture","New York City","United States","AZ-10750","Annie Zypern",0.1,"2014-03-25","CA-2014-148586",10009,"FUR-CH-10002439","Iceberg Nesting Folding Chair. 19w x 6d x 43h",65.20639,7,"East",2433,366.786,"Consumer","2014-04-01","Standard Class","New York","Chairs" +"Office Supplies","San Francisco","United States","CA-12265","Christina Anderson",0,"2014-03-25","CA-2014-128237",94110,"OFF-AR-10000034","BIC Brite Liner Grip Highlighters. Assorted. 5/Pack",9.9216,6,"West",9310,25.44,"Consumer","2014-03-30","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","CA-12265","Christina Anderson",0,"2014-03-25","CA-2014-128237",94110,"OFF-AR-10003338","Eberhard Faber 3 1/2"" Golf Pencils",3.72,2,"West",9308,14.88,"Consumer","2014-03-30","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","CA-12265","Christina Anderson",0,"2014-03-25","CA-2014-128237",94110,"OFF-AR-10003829","Newell 35",1.9023999,2,"West",9307,6.56,"Consumer","2014-03-30","Standard Class","California","Art" +"Technology","San Francisco","United States","CA-12265","Christina Anderson",0,"2014-03-25","CA-2014-128237",94110,"TEC-AC-10002558","Imation Swivel Flash Drive USB flash drive - 8 GB",15.918,4,"West",9309,45.480003,"Consumer","2014-03-30","Standard Class","California","Accessories" +"Office Supplies","San Francisco","United States","DK-12835","Damala Kotsonis",0,"2014-03-26","CA-2014-141838",94122,"OFF-AR-10004272","Newell 308",0.84000003,2,"West",9052,3.36,"Corporate","2014-03-31","Second Class","California","Art" +"Office Supplies","San Francisco","United States","DK-12835","Damala Kotsonis",0.2,"2014-03-26","CA-2014-141838",94122,"OFF-BI-10003291","Wilson Jones Leather-Like Binders with DublLock Round Rings",9.4284,4,"West",9053,27.936,"Corporate","2014-03-31","Second Class","California","Binders" +"Office Supplies","Pomona","United States","CS-12505","Cindy Stewart",0,"2014-03-26","CA-2014-100860",91767,"OFF-LA-10001982","Smead Alpha-Z Color-Coded Name Labels First Letter Starter Set",9,5,"West",9660,18.75,"Consumer","2014-03-30","Second Class","California","Labels" +"Technology","Los Angeles","United States","JD-15895","Jonathan Doherty",0,"2014-03-26","CA-2014-138436",90036,"TEC-AC-10002323","SanDisk Ultra 32 GB MicroSDHC Class 10 Memory Card",8.619,3,"West",4245,66.3,"Corporate","2014-03-30","Standard Class","California","Accessories" +"Technology","San Francisco","United States","DK-12835","Damala Kotsonis",0.2,"2014-03-26","CA-2014-141838",94122,"TEC-PH-10004100","Griffin GC17055 Auxiliary Audio Cable",2.8783998,2,"West",9054,28.784,"Corporate","2014-03-31","Second Class","California","Phones" +"Furniture","Springfield","United States","MP-18175","Mike Pelletier",0.4,"2014-03-28","US-2014-117380",45503,"FUR-TA-10000198","Chromcraft Bull-Nose Wood Oval Conference Tables & Bases",-143.25479,1,"East",9900,330.58798,"Home Office","2014-04-03","Standard Class","Ohio","Tables" +"Furniture","Des Moines","United States","CV-12295","Christina VanderZanden",0,"2014-03-28","US-2014-137869",50315,"FUR-TA-10003954","Hon 94000 Series Round Tables",106.624794,4,"Central",7841,1184.72,"Consumer","2014-04-02","Standard Class","Iowa","Tables" +"Office Supplies","Des Moines","United States","CV-12295","Christina VanderZanden",0,"2014-03-28","US-2014-137869",50315,"OFF-EN-10001509","Poly String Tie Envelopes",2.8764,3,"Central",7840,6.12,"Consumer","2014-04-02","Standard Class","Iowa","Envelopes" +"Technology","Albuquerque","United States","DR-12940","Daniel Raglin",0.2,"2014-03-28","CA-2014-100881",87105,"TEC-PH-10003273","AT&T TR1909W",22.6782,3,"West",8464,302.376,"Home Office","2014-04-01","Standard Class","New Mexico","Phones" +"Furniture","Plano","United States","MG-18145","Mike Gockenbach",0.3,"2014-03-29","CA-2014-137274",75023,"FUR-TA-10001889","Bush Advantage Collection Racetrack Conference Table",-152.71559,3,"Central",7306,890.84094,"Consumer","2014-04-02","Standard Class","Texas","Tables" +"Furniture","San Francisco","United States","GA-14725","Guy Armstrong",0.15,"2014-03-30","CA-2014-131247",94110,"FUR-BO-10001337","O'Sullivan Living Dimensions 2-Shelf Bookcases",-12.098001,2,"West",6139,205.666,"Consumer","2014-04-04","Standard Class","California","Bookcases" +"Furniture","Brownsville","United States","MP-17470","Mark Packer",0.3,"2014-03-30","CA-2014-162089",78521,"FUR-CH-10002304","Global Stack Chair without Arms. Black",-9.093,7,"Central",7632,127.302,"Home Office","2014-04-01","First Class","Texas","Chairs" +"Office Supplies","New York City","United States","SG-20890","Susan Gilcrest",0,"2014-03-30","US-2014-105151",10009,"OFF-AR-10001231","Sanford EarthWrite Recycled Pencils. Medium Soft. #2",2.94,5,"East",5954,10.5,"Corporate","2014-03-31","First Class","New York","Art" +"Office Supplies","Long Beach","United States","KM-16720","Kunst Miller",0,"2014-03-30","CA-2014-169033",11561,"OFF-AR-10001915","Peel-Off China Markers",20.852999,5,"East",2897,49.649998,"Consumer","2014-04-03","Standard Class","New York","Art" +"Office Supplies","Brownsville","United States","MP-17470","Mark Packer",0.2,"2014-03-30","CA-2014-162089",78521,"OFF-EN-10002230","Airmail Envelopes",113.305504,5,"Central",7630,335.72,"Home Office","2014-04-01","First Class","Texas","Envelopes" +"Office Supplies","Seattle","United States","Dl-13600","Dorris liebe",0,"2014-03-30","CA-2014-133424",98105,"OFF-LA-10002312","Avery 490",22.2,3,"West",5285,44.4,"Corporate","2014-04-04","Standard Class","Washington","Labels" +"Office Supplies","Apple Valley","United States","NC-18340","Nat Carroll",0,"2014-03-30","US-2014-113124",55124,"OFF-ST-10001511","Space Solutions Commercial Steel Shelving",6.465,2,"Central",8431,129.3,"Consumer","2014-04-05","Standard Class","Minnesota","Storage" +"Office Supplies","Seattle","United States","Dl-13600","Dorris liebe",0,"2014-03-30","CA-2014-133424",98105,"OFF-ST-10002957","Sterilite Show Offs Storage Containers",0,3,"West",5284,15.84,"Corporate","2014-04-04","Standard Class","Washington","Storage" +"Technology","Brownsville","United States","MP-17470","Mark Packer",0.2,"2014-03-30","CA-2014-162089",78521,"TEC-PH-10001819","Innergie mMini Combo Duo USB Travel Charging Kit",88.1804,7,"Central",7631,251.94398,"Home Office","2014-04-01","First Class","Texas","Phones" +"Furniture","Tampa","United States","TG-21640","Trudy Glocke",0.2,"2014-03-31","CA-2014-130428",33614,"FUR-CH-10002965","Global Leather Highback Executive Chair with Pneumatic Height Adjustment. Black",98.4802,7,"South",9566,1125.488,"Consumer","2014-03-31","Same Day","Florida","Chairs" +"Office Supplies","San Francisco","United States","AA-10315","Alex Avila",0,"2014-03-31","CA-2014-128055",94122,"OFF-AP-10002765","Fellowes Advanced Computer Series Surge Protectors",14.834399,2,"West",2231,52.980003,"Consumer","2014-04-05","Standard Class","California","Appliances" +"Office Supplies","Tampa","United States","TG-21640","Trudy Glocke",0.2,"2014-03-31","CA-2014-130428",33614,"OFF-AR-10004027","Binney & Smith inkTank Erasable Desk Highlighter. Chisel Tip. Yellow. 12/Box",1.0583999,2,"South",9568,4.032,"Consumer","2014-03-31","Same Day","Florida","Art" +"Office Supplies","Miami","United States","KB-16240","Karen Bern",0.7,"2014-03-31","CA-2014-101770",33180,"OFF-BI-10001097","Avery Hole Reinforcements",-1.3083,1,"South",5480,1.8689998,"Corporate","2014-04-04","Standard Class","Florida","Binders" +"Office Supplies","Tampa","United States","TG-21640","Trudy Glocke",0.7,"2014-03-31","CA-2014-130428",33614,"OFF-BI-10001636","Ibico Plastic and Wire Spiral Binding Combs",-10.116,5,"South",9567,12.645,"Consumer","2014-03-31","Same Day","Florida","Binders" +"Office Supplies","Chicago","United States","DD-13570","Dorothy Dickinson",0.8,"2014-03-31","US-2014-104759",60610,"OFF-BI-10002071","Fellowes Black Plastic Comb Bindings",-13.827801,7,"Central",5049,8.134,"Consumer","2014-04-04","Standard Class","Illinois","Binders" +"Office Supplies","Philadelphia","United States","JO-15280","Jas O'Carroll",0.7,"2014-03-31","CA-2014-112403",19120,"OFF-BI-10003529","Avery Round Ring Poly Binders",-0.59639996,1,"East",4712,0.852,"Consumer","2014-03-31","Same Day","Pennsylvania","Binders" +"Office Supplies","San Francisco","United States","AA-10315","Alex Avila",0.2,"2014-03-31","CA-2014-128055",94122,"OFF-BI-10004390","GBC DocuBind 200 Manual Binding Machine",252.588,2,"West",2230,673.568,"Consumer","2014-04-05","Standard Class","California","Binders" +"Technology","Chicago","United States","DD-13570","Dorothy Dickinson",0.2,"2014-03-31","US-2014-104759",60610,"TEC-AC-10004901","Kensington SlimBlade Notebook Wireless Mouse with Nano Receiver ",13.9972,2,"Central",5050,79.984,"Consumer","2014-04-04","Standard Class","Illinois","Accessories" +"Office Supplies","Vallejo","United States","KM-16720","Kunst Miller",0.2,"2014-04-01","US-2014-157021",94591,"OFF-BI-10000042","Pressboard Data Binder. Crimson. 12"" X 8 1/2""",5.5536,4,"West",1374,17.088,"Consumer","2014-04-06","Second Class","California","Binders" +"Office Supplies","Revere","United States","KH-16330","Katharine Harms",0,"2014-04-01","CA-2014-138359",2151,"OFF-BI-10000145","Zipper Ring Binder Pockets",3.0576,2,"East",7955,6.24,"Corporate","2014-04-06","Standard Class","Massachusetts","Binders" +"Office Supplies","Vallejo","United States","KM-16720","Kunst Miller",0,"2014-04-01","US-2014-157021",94591,"OFF-LA-10002312","Avery 490",14.8,2,"West",1373,29.6,"Consumer","2014-04-06","Second Class","California","Labels" +"Office Supplies","Revere","United States","KH-16330","Katharine Harms",0,"2014-04-01","CA-2014-138359",2151,"OFF-ST-10000636","Rogers Profile Extra Capacity Storage Tub",2.6783998,4,"East",7954,66.96,"Corporate","2014-04-06","Standard Class","Massachusetts","Storage" +"Furniture","Virginia Beach","United States","AH-10690","Anna Häberlin",0,"2014-04-02","CA-2014-160276",23464,"FUR-FU-10003192","Luxo Adjustable Task Clamp Lamp",46.1968,2,"South",8920,177.68001,"Corporate","2014-04-08","Standard Class","Virginia","Furnishings" +"Office Supplies","Athens","United States","RD-19585","Rob Dowd",0,"2014-04-02","CA-2014-164315",30605,"OFF-AP-10003842","Euro-Pro Shark Turbo Vacuum",40.274002,5,"South",5685,154.9,"Consumer","2014-04-08","Standard Class","Georgia","Appliances" +"Office Supplies","Houston","United States","SC-20020","Sam Craven",0.2,"2014-04-02","US-2014-157847",77095,"OFF-PA-10001593","Xerox 1947",10.465,7,"Central",8828,33.488003,"Consumer","2014-04-06","Second Class","Texas","Paper" +"Office Supplies","Houston","United States","SC-20020","Sam Craven",0.2,"2014-04-02","US-2014-157847",77095,"OFF-PA-10002986","Xerox 1898",9.352,5,"Central",8827,26.720001,"Consumer","2014-04-06","Second Class","Texas","Paper" +"Office Supplies","Athens","United States","RD-19585","Rob Dowd",0,"2014-04-02","CA-2014-164315",30605,"OFF-PA-10004248","Xerox 1990",7.128,3,"South",5683,15.84,"Consumer","2014-04-08","Standard Class","Georgia","Paper" +"Technology","Athens","United States","RD-19585","Rob Dowd",0,"2014-04-02","CA-2014-164315",30605,"TEC-PH-10001128","Motorola Droid Maxx",293.98038,7,"South",5684,1049.93,"Consumer","2014-04-08","Standard Class","Georgia","Phones" +"Office Supplies","Los Angeles","United States","KE-16420","Katrina Edelman",0,"2014-04-03","CA-2014-112291",90008,"OFF-EN-10001415","Staple envelope",5.58,2,"West",9884,11.160001,"Corporate","2014-04-08","Standard Class","California","Envelopes" +"Technology","Los Angeles","United States","KE-16420","Katrina Edelman",0,"2014-04-03","CA-2014-112291",90008,"TEC-AC-10000736","Logitech G600 MMO Gaming Mouse",57.5928,2,"West",9886,159.98,"Corporate","2014-04-08","Standard Class","California","Accessories" +"Technology","Los Angeles","United States","KE-16420","Katrina Edelman",0,"2014-04-03","CA-2014-112291",90008,"TEC-AC-10001284","Enermax Briskie RF Wireless Keyboard and Mouse Combo",22.4316,3,"West",9885,62.309998,"Corporate","2014-04-08","Standard Class","California","Accessories" +"Furniture","Detroit","United States","MS-17710","Maurice Satty",0,"2014-04-04","CA-2014-133228",48205,"FUR-FU-10004020","Advantus Panel Wall Acrylic Frame",2.3521,1,"Central",3955,5.4700003,"Consumer","2014-04-09","Standard Class","Michigan","Furnishings" +"Office Supplies","Detroit","United States","MS-17710","Maurice Satty",0,"2014-04-04","CA-2014-133228",48205,"OFF-AR-10001955","Newell 319",23.807999,4,"Central",3956,79.36,"Consumer","2014-04-09","Standard Class","Michigan","Art" +"Office Supplies","Los Angeles","United States","MZ-17515","Mary Zewe",0.2,"2014-04-04","US-2014-128685",90008,"OFF-BI-10004140","Avery Non-Stick Binders",2.2450001,2,"West",4845,7.184,"Corporate","2014-04-05","First Class","California","Binders" +"Office Supplies","San Francisco","United States","PK-18910","Paul Knutson",0,"2014-04-04","CA-2014-105172",94109,"OFF-LA-10001641","Avery 518",9.072,6,"West",1561,18.9,"Home Office","2014-04-09","Standard Class","California","Labels" +"Office Supplies","Lafayette","United States","KB-16585","Ken Black",0,"2014-04-04","CA-2014-149538",70506,"OFF-PA-10002195","Xerox 1966",6.3504004,2,"South",5124,12.96,"Corporate","2014-04-08","Standard Class","Louisiana","Paper" +"Office Supplies","Lafayette","United States","KB-16585","Ken Black",0,"2014-04-04","CA-2014-149538",70506,"OFF-PA-10003797","Xerox 209",9.331201,3,"South",5123,19.44,"Corporate","2014-04-08","Standard Class","Louisiana","Paper" +"Office Supplies","Lafayette","United States","KB-16585","Ken Black",0,"2014-04-04","CA-2014-149538",70506,"OFF-ST-10004180","Safco Commercial Shelving",9.302,5,"South",5121,232.55,"Corporate","2014-04-08","Standard Class","Louisiana","Storage" +"Technology","Lafayette","United States","KB-16585","Ken Black",0,"2014-04-04","CA-2014-149538",70506,"TEC-AC-10002926","Logitech Wireless Marathon Mouse M705",42.9914,2,"South",5122,99.98,"Corporate","2014-04-08","Standard Class","Louisiana","Accessories" +"Office Supplies","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"OFF-AR-10000127","Newell 321",6.6584,7,"South",3280,22.96,"Corporate","2014-04-09","Second Class","Virginia","Art" +"Office Supplies","Dearborn Heights","United States","RD-19900","Ruben Dartt",0,"2014-04-05","CA-2014-149104",48127,"OFF-AR-10002952","Stanley Contemporary Battery Pencil Sharpeners",7.4760003,2,"Central",9507,26.7,"Consumer","2014-04-07","Second Class","Michigan","Art" +"Office Supplies","Dearborn Heights","United States","RD-19900","Ruben Dartt",0,"2014-04-05","CA-2014-149104",48127,"OFF-AR-10004685","Binney & Smith Crayola Metallic Colored Pencils. 8-Color Set",4.5836997,3,"Central",9509,13.89,"Consumer","2014-04-07","Second Class","Michigan","Art" +"Office Supplies","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"OFF-AR-10004757","Crayola Colored Pencils",7.5768003,7,"South",3277,22.96,"Corporate","2014-04-09","Second Class","Virginia","Art" +"Office Supplies","Richmond","United States","RP-19855","Roy Phan",0,"2014-04-05","US-2014-157231",40475,"OFF-BI-10002852","Ibico Standard Transparent Covers",56.5264,7,"South",9912,115.36,"Corporate","2014-04-09","Standard Class","Kentucky","Binders" +"Office Supplies","Dearborn Heights","United States","RD-19900","Ruben Dartt",0,"2014-04-05","CA-2014-149104",48127,"OFF-BI-10004209","Fellowes Twister Kit. Gray/Clear. 3/pkg",18.09,5,"Central",9508,40.2,"Consumer","2014-04-07","Second Class","Michigan","Binders" +"Office Supplies","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"OFF-PA-10003349","Xerox 1957",6.3504004,2,"South",3279,12.96,"Corporate","2014-04-09","Second Class","Virginia","Paper" +"Office Supplies","New York City","United States","TH-21550","Tracy Hopkins",0,"2014-04-05","CA-2014-113887",10035,"OFF-PA-10004071","Eaton Premium Continuous-Feed Paper. 25% Cotton. Letter Size. White. 1000 Shts/Box",26.630402,1,"East",711,55.480003,"Home Office","2014-04-07","First Class","New York","Paper" +"Office Supplies","Dearborn Heights","United States","RD-19900","Ruben Dartt",0,"2014-04-05","CA-2014-149104",48127,"OFF-ST-10000991","Space Solutions HD Industrial Steel Shelving.",20.694601,6,"Central",9510,689.82,"Consumer","2014-04-07","Second Class","Michigan","Storage" +"Office Supplies","Decatur","United States","JG-15805","John Grady",0.2,"2014-04-05","CA-2014-151001",62521,"OFF-ST-10001031","Adjustable Personal File Tote",3.9072,4,"Central",3880,52.096,"Corporate","2014-04-07","First Class","Illinois","Storage" +"Office Supplies","Decatur","United States","JG-15805","John Grady",0.2,"2014-04-05","CA-2014-151001",62521,"OFF-ST-10003455","Tenex File Box. Personal Filing Tote with Lid. Black",3.7224002,4,"Central",3879,49.631996,"Corporate","2014-04-07","First Class","Illinois","Storage" +"Office Supplies","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"OFF-SU-10002881","Martin Yale Chadless Opener Electric Letter Opener",83.281,5,"South",3281,4164.05,"Corporate","2014-04-09","Second Class","Virginia","Supplies" +"Technology","Alexandria","United States","GM-14695","Greg Maxwell",0,"2014-04-05","CA-2014-102988",22304,"TEC-PH-10001615","AT&T CL82213",8.4071,1,"South",3278,28.99,"Corporate","2014-04-09","Second Class","Virginia","Phones" +"Furniture","Los Angeles","United States","AY-10555","Andy Yotov",0,"2014-04-06","CA-2014-102652",90049,"FUR-FU-10000747","Tenex B1-RE Series Chair Mats for Low Pile Carpets",15.633201,2,"West",6016,91.96,"Corporate","2014-04-12","Standard Class","California","Furnishings" +"Furniture","Los Angeles","United States","AY-10555","Andy Yotov",0,"2014-04-06","CA-2014-102652",90049,"FUR-FU-10001918","C-Line Cubicle Keepers Polyproplyene Holder With Velcro Backings",12.9129,7,"West",6017,33.109997,"Corporate","2014-04-12","Standard Class","California","Furnishings" +"Furniture","Seattle","United States","SC-20260","Scott Cohen",0,"2014-04-06","CA-2014-169803",98115,"FUR-TA-10000688","Chromcraft Bull-Nose Wood Round Conference Table Top. Wood Base",111.1035,3,"West",5036,653.55,"Corporate","2014-04-12","Standard Class","Washington","Tables" +"Furniture","Philadelphia","United States","SD-20485","Shirley Daniels",0.4,"2014-04-06","US-2014-118486",19143,"FUR-TA-10001039","KI Adjustable-Height Table",-36.111595,3,"East",1406,154.76399,"Home Office","2014-04-08","First Class","Pennsylvania","Tables" +"Office Supplies","Los Angeles","United States","FH-14275","Frank Hawley",0,"2014-04-06","CA-2014-153808",90004,"OFF-AR-10001725","Boston Home & Office Model 2000 Electric Pencil Sharpeners",18.447,3,"West",8839,70.950005,"Corporate","2014-04-10","Second Class","California","Art" +"Office Supplies","Los Angeles","United States","FH-14275","Frank Hawley",0.2,"2014-04-06","CA-2014-153808",90004,"OFF-BI-10002794","Avery Trapezoid Ring Binder. 3"" Capacity. Black. 1040 sheets",23.7684,2,"West",8840,65.568,"Corporate","2014-04-10","Second Class","California","Binders" +"Office Supplies","Lancaster","United States","GP-14740","Guy Phonely",0.7,"2014-04-06","CA-2014-136742",17602,"OFF-BI-10003719","Large Capacity Hanging Post Binders",-35.928,6,"East",869,44.91,"Corporate","2014-04-10","Standard Class","Pennsylvania","Binders" +"Office Supplies","Los Angeles","United States","AY-10555","Andy Yotov",0,"2014-04-06","CA-2014-102652",90049,"OFF-PA-10001977","Xerox 194",26.630402,1,"West",6019,55.480003,"Corporate","2014-04-12","Standard Class","California","Paper" +"Office Supplies","Los Angeles","United States","AY-10555","Andy Yotov",0,"2014-04-06","CA-2014-102652",90049,"OFF-PA-10003309","Xerox 211",9.331201,3,"West",6018,19.44,"Corporate","2014-04-12","Standard Class","California","Paper" +"Office Supplies","Philadelphia","United States","SD-20485","Shirley Daniels",0.2,"2014-04-06","US-2014-118486",19143,"OFF-SU-10004498","Martin-Yale Premier Letter Opener",-2.1896,1,"East",1405,10.304,"Home Office","2014-04-08","First Class","Pennsylvania","Supplies" +"Technology","Seattle","United States","SC-20260","Scott Cohen",0,"2014-04-06","CA-2014-169803",98115,"TEC-AC-10003441","Kingston Digital DataTraveler 32GB USB 2.0",2.034,2,"West",5037,33.9,"Corporate","2014-04-12","Standard Class","Washington","Accessories" +"Technology","Los Angeles","United States","FH-14275","Frank Hawley",0,"2014-04-06","CA-2014-153808",90004,"TEC-AC-10004171","Razer Kraken 7.1 Surround Sound Over Ear USB Gaming Headset",131.98679,3,"West",8841,299.97,"Corporate","2014-04-10","Second Class","California","Accessories" +"Technology","Philadelphia","United States","SD-20485","Shirley Daniels",0.2,"2014-04-06","US-2014-118486",19143,"TEC-AC-10004659","Imation Secure+ Hardware Encrypted USB 2.0 Flash Drive; 16GB",21.897,2,"East",1407,116.784,"Home Office","2014-04-08","First Class","Pennsylvania","Accessories" +"Furniture","Mobile","United States","PJ-19015","Pauline Johnson",0,"2014-04-07","CA-2014-124023",36608,"FUR-FU-10004415","Stacking Tray. Side-Loading. Legal. Smoke",2.7776,2,"South",3482,8.96,"Consumer","2014-04-10","First Class","Alabama","Furnishings" +"Office Supplies","Columbia","United States","RS-19765","Roland Schwarz",0,"2014-04-07","CA-2014-128846",29203,"OFF-PA-10000100","Xerox 1945",60.255302,3,"South",4178,122.97,"Corporate","2014-04-12","Standard Class","South Carolina","Paper" +"Office Supplies","Marietta","United States","VM-21685","Valerie Mitchum",0,"2014-04-07","CA-2014-165806",30062,"OFF-PA-10003441","Xerox 226",27.9936,9,"South",6253,58.32,"Home Office","2014-04-07","Same Day","Georgia","Paper" +"Technology","Columbia","United States","RS-19765","Roland Schwarz",0,"2014-04-07","CA-2014-128846",29203,"TEC-PH-10003273","AT&T TR1909W",163.787,5,"South",4177,629.95,"Corporate","2014-04-12","Standard Class","South Carolina","Phones" +"Technology","Marietta","United States","VM-21685","Valerie Mitchum",0,"2014-04-07","CA-2014-165806",30062,"TEC-PH-10004922","RCA Visys Integrated PBX 8-Line Router",50.2425,3,"South",6254,200.97,"Home Office","2014-04-07","Same Day","Georgia","Phones" +"Furniture","Decatur","United States","RA-19885","Ruben Ausman",0,"2014-04-08","US-2014-118997",35601,"FUR-TA-10001086","SAFCO PlanMaster Boards. 60w x 37-1/2d. White Melamine",316.13922,8,"South",5927,1215.92,"Corporate","2014-04-12","Standard Class","Alabama","Tables" +"Furniture","Concord","United States","NM-18445","Nathan Mautz",0.2,"2014-04-08","CA-2014-150581",94521,"FUR-TA-10003748","Bevis 36 x 72 Conference Tables",2.4898,1,"West",7047,99.591995,"Home Office","2014-04-12","Standard Class","California","Tables" +"Furniture","Toledo","United States","JS-15880","John Stevenson",0.4,"2014-04-08","CA-2014-121769",43615,"FUR-TA-10004442","Riverside Furniture Stanwyck Manor Table Series",-94.6605,1,"East",7410,172.11,"Consumer","2014-04-12","Standard Class","Ohio","Tables" +"Office Supplies","Glendale","United States","JM-15655","Jim Mitchum",0.2,"2014-04-08","CA-2014-100363",85301,"OFF-FA-10000611","Binder Clips by OIC",0.8288,2,"West",3836,2.3679998,"Corporate","2014-04-15","Standard Class","Arizona","Fasteners" +"Office Supplies","Chandler","United States","PC-19000","Pauline Chand",0.2,"2014-04-08","US-2014-163797",85224,"OFF-FA-10001883","Alliance Super-Size Bands. Assorted Sizes",-11.825601,8,"West",6381,49.792,"Home Office","2014-04-13","Standard Class","Arizona","Fasteners" +"Office Supplies","Glendale","United States","JM-15655","Jim Mitchum",0.2,"2014-04-08","CA-2014-100363",85301,"OFF-PA-10004733","Things To Do Today Spiral Book",6.8904004,3,"West",3837,19.008,"Corporate","2014-04-15","Standard Class","Arizona","Paper" +"Technology","Concord","United States","NM-18445","Nathan Mautz",0,"2014-04-08","CA-2014-150581",94521,"TEC-AC-10001908","Logitech Wireless Headset h800",139.986,4,"West",7048,399.96,"Home Office","2014-04-12","Standard Class","California","Accessories" +"Office Supplies","Lewiston","United States","SE-20110","Sanjit Engle",0.2,"2014-04-11","US-2014-121734",83501,"OFF-BI-10004817","GBC Personal VeloBind Strips",3.3544,1,"West",4578,9.584,"Consumer","2014-04-16","Standard Class","Idaho","Binders" +"Office Supplies","Miami","United States","JK-15370","Jay Kimmel",0.2,"2014-04-11","US-2014-102715",33180,"OFF-FA-10001332","Acco Banker's Clasps. 5 3/4""-Long",2.3328,3,"South",3774,6.912,"Consumer","2014-04-13","Second Class","Florida","Fasteners" +"Office Supplies","Miami","United States","JK-15370","Jay Kimmel",0.2,"2014-04-11","US-2014-102715",33180,"OFF-PA-10000029","Xerox 224",3.6287997,2,"South",3776,10.368001,"Consumer","2014-04-13","Second Class","Florida","Paper" +"Office Supplies","Great Falls","United States","MM-18055","Michelle Moray",0,"2014-04-11","CA-2014-110555",59405,"OFF-ST-10000876","Eldon Simplefile Box Office",24.3824,7,"West",8358,87.08,"Consumer","2014-04-18","Standard Class","Montana","Storage" +"Technology","Miami","United States","JK-15370","Jay Kimmel",0.2,"2014-04-11","US-2014-102715",33180,"TEC-AC-10001874","Logitech Wireless Anywhere Mouse MX for PC and Mac",41.993,7,"South",3777,335.944,"Consumer","2014-04-13","Second Class","Florida","Accessories" +"Technology","Miami","United States","JK-15370","Jay Kimmel",0.2,"2014-04-11","US-2014-102715",33180,"TEC-AC-10002001","Logitech Wireless Gaming Headset G930",81.594894,3,"South",3775,383.976,"Consumer","2014-04-13","Second Class","Florida","Accessories" +"Technology","Great Falls","United States","MM-18055","Michelle Moray",0,"2014-04-11","CA-2014-110555",59405,"TEC-AC-10003399","Memorex Mini Travel Drive 64 GB USB 2.0 Flash Drive",91.3248,6,"West",8360,217.43999,"Consumer","2014-04-18","Standard Class","Montana","Accessories" +"Technology","Great Falls","United States","MM-18055","Michelle Moray",0.2,"2014-04-11","CA-2014-110555",59405,"TEC-PH-10000586","AT&T SB67148 SynJ",9.2386,2,"West",8359,105.584,"Consumer","2014-04-18","Standard Class","Montana","Phones" +"Technology","Austin","United States","GM-14455","Gary Mitchum",0.2,"2014-04-11","CA-2014-138940",78745,"TEC-PH-10001835","Jawbone JAMBOX Wireless Bluetooth Speaker",265.4232,6,"Central",4862,758.352,"Home Office","2014-04-16","Second Class","Texas","Phones" +"Furniture","Redondo Beach","United States","KN-16450","Kean Nguyen",0.15,"2014-04-12","CA-2014-154599",90278,"FUR-BO-10001337","O'Sullivan Living Dimensions 2-Shelf Bookcases",-18.147001,3,"West",2797,308.499,"Corporate","2014-04-17","Standard Class","California","Bookcases" +"Office Supplies","Lodi","United States","EH-13765","Edward Hooks",0,"2014-04-12","CA-2014-142979",95240,"OFF-AR-10002987","Prismacolor Color Pencil Set",16.268799,2,"West",9850,39.68,"Corporate","2014-04-18","Standard Class","California","Art" +"Office Supplies","Redondo Beach","United States","KN-16450","Kean Nguyen",0.2,"2014-04-12","CA-2014-154599",90278,"OFF-BI-10002764","Recycled Pressboard Report Cover with Reinforced Top Hinge",6.5569,7,"West",2796,18.088,"Corporate","2014-04-17","Standard Class","California","Binders" +"Office Supplies","Bloomington","United States","SV-20785","Stewart Visinsky",0,"2014-04-12","US-2014-158365",47401,"OFF-PA-10000289","Xerox 213",15.552,5,"Central",9098,32.4,"Consumer","2014-04-17","Standard Class","Indiana","Paper" +"Technology","Redondo Beach","United States","KN-16450","Kean Nguyen",0.2,"2014-04-12","CA-2014-154599",90278,"TEC-PH-10000576","AT&T 1080 Corded phone",38.357197,4,"West",2795,438.36798,"Corporate","2014-04-17","Standard Class","California","Phones" +"Technology","Redondo Beach","United States","KN-16450","Kean Nguyen",0.2,"2014-04-12","CA-2014-154599",90278,"TEC-PH-10001557","Pyle PMP37LED",94.0702,14,"West",2794,1075.088,"Corporate","2014-04-17","Standard Class","California","Phones" +"Office Supplies","Philadelphia","United States","JD-15895","Jonathan Doherty",0.2,"2014-04-13","CA-2014-122336",19140,"OFF-AR-10000122","Newell 314",1.116,4,"East",341,17.855999,"Corporate","2014-04-17","Second Class","Pennsylvania","Art" +"Office Supplies","Philadelphia","United States","JD-15895","Jonathan Doherty",0.7,"2014-04-13","CA-2014-122336",19140,"OFF-BI-10003656","Fellowes PB200 Plastic Comb Binding Machine",-407.976,10,"East",342,509.97,"Corporate","2014-04-17","Second Class","Pennsylvania","Binders" +"Office Supplies","Philadelphia","United States","JD-15895","Jonathan Doherty",0.2,"2014-04-13","CA-2014-122336",19140,"OFF-FA-10002780","Staples",10.072399,13,"East",343,30.992,"Corporate","2014-04-17","Second Class","Pennsylvania","Fasteners" +"Office Supplies","Baltimore","United States","BF-11170","Ben Ferrer",0,"2014-04-13","CA-2014-167724",21215,"OFF-LA-10002368","Avery 479",3.6017997,3,"East",6708,7.8300004,"Home Office","2014-04-18","Standard Class","Maryland","Labels" +"Technology","Philadelphia","United States","JD-15895","Jonathan Doherty",0.4,"2014-04-13","CA-2014-122336",19140,"TEC-PH-10000702","Square Credit Card Reader. 4 1/2"" x 4 1/2"" x 1"". White",8.391601,12,"East",344,71.928,"Corporate","2014-04-17","Second Class","Pennsylvania","Phones" +"Furniture","Los Angeles","United States","PS-18970","Paul Stevenson",0,"2014-04-15","US-2014-120740",90049,"FUR-FU-10004091","Howard Miller 13"" Diameter Goldtone Round Wall Clock",76.981606,4,"West",7658,187.76,"Home Office","2014-04-15","Same Day","California","Furnishings" +"Office Supplies","Los Angeles","United States","PS-18970","Paul Stevenson",0,"2014-04-15","US-2014-120740",90049,"OFF-AP-10000240","Belkin F9G930V10-GRY 9 Outlet Surge",31.0184,2,"West",7657,106.96,"Home Office","2014-04-15","Same Day","California","Appliances" +"Office Supplies","Wilmington","United States","SP-20650","Stephanie Phelps",0.2,"2014-04-16","CA-2014-155208",28403,"OFF-AR-10003478","Avery Hi-Liter EverBold Pen Style Fluorescent Highlighters. 4/Pack",9.768001,6,"South",404,39.072,"Corporate","2014-04-20","Standard Class","North Carolina","Art" +"Furniture","Houston","United States","KM-16720","Kunst Miller",0.3,"2014-04-18","CA-2014-100678",77095,"FUR-CH-10002602","DMI Arturo Collection Mission-style Design Wood Chair",-18.117601,3,"Central",6570,317.05798,"Consumer","2014-04-22","Standard Class","Texas","Chairs" +"Office Supplies","Los Angeles","United States","JL-15835","John Lee",0,"2014-04-18","CA-2014-110849",90049,"OFF-AR-10000657","Binney & Smith inkTank Desk Highlighter. Chisel Tip. Yellow. 12/Box",3.5475,5,"West",3535,10.75,"Consumer","2014-04-23","Standard Class","California","Art" +"Office Supplies","Houston","United States","KM-16720","Kunst Miller",0.2,"2014-04-18","CA-2014-100678",77095,"OFF-AR-10001868","Prang Dustless Chalk Sticks",1.008,2,"Central",6569,2.6879997,"Consumer","2014-04-22","Standard Class","Texas","Art" +"Office Supplies","Los Angeles","United States","JL-15835","John Lee",0,"2014-04-18","CA-2014-110849",90049,"OFF-AR-10002375","Newell 351",3.8047998,4,"West",3534,13.120001,"Consumer","2014-04-23","Standard Class","California","Art" +"Office Supplies","Houston","United States","KM-16720","Kunst Miller",0.2,"2014-04-18","CA-2014-100678",77095,"OFF-EN-10000056","Cameo Buff Policy Envelopes",50.406303,3,"Central",6571,149.352,"Consumer","2014-04-22","Standard Class","Texas","Envelopes" +"Office Supplies","Los Angeles","United States","JL-15835","John Lee",0,"2014-04-18","CA-2014-110849",90049,"OFF-FA-10000134","Advantus Push Pins. Aluminum Head",3.6022,2,"West",3536,11.620001,"Consumer","2014-04-23","Standard Class","California","Fasteners" +"Technology","Houston","United States","KM-16720","Kunst Miller",0.2,"2014-04-18","CA-2014-100678",77095,"TEC-AC-10000474","Kensington Expert Mouse Optical USB Trackball for PC or Mac",28.497,3,"Central",6572,227.976,"Consumer","2014-04-22","Standard Class","Texas","Accessories" +"Technology","Los Angeles","United States","JL-15835","John Lee",0.2,"2014-04-18","CA-2014-110849",90049,"TEC-MA-10002859","Ativa MDM8000 8-Sheet Micro-Cut Shredder",97.1892,4,"West",3533,287.968,"Consumer","2014-04-23","Standard Class","California","Machines" +"Furniture","San Jose","United States","NF-18595","Nicole Fjeld",0,"2014-04-19","CA-2014-169460",95123,"FUR-FU-10004017","Executive Impressions 13"" Chairman Wall Clock",26.649002,3,"West",4570,76.14,"Home Office","2014-04-21","Second Class","California","Furnishings" +"Office Supplies","Arlington","United States","AG-10390","Allen Goldenen",0,"2014-04-19","CA-2014-158470",22204,"OFF-BI-10003638","GBC Durable Plastic Covers",26.703001,3,"South",7843,58.05,"Consumer","2014-04-23","Second Class","Virginia","Binders" +"Office Supplies","Arlington","United States","AG-10390","Allen Goldenen",0,"2014-04-19","CA-2014-158470",22204,"OFF-PA-10001569","Xerox 232",34.2144,11,"South",7844,71.28,"Consumer","2014-04-23","Second Class","Virginia","Paper" +"Furniture","Los Angeles","United States","BP-11230","Benjamin Patterson",0,"2014-04-20","CA-2014-101462",90045,"FUR-FU-10000409","GE 4 Foot Flourescent Tube. 40 Watt",27.5632,4,"West",6850,59.920002,"Consumer","2014-04-25","Standard Class","California","Furnishings" +"Furniture","Houston","United States","SV-20365","Seth Vernon",0.3,"2014-04-20","CA-2014-134572",77070,"FUR-TA-10001705","Bush Advantage Collection Round Conference Table",-95.67,5,"Central",5076,744.1,"Consumer","2014-04-22","Second Class","Texas","Tables" +"Furniture","Houston","United States","SV-20365","Seth Vernon",0.3,"2014-04-20","CA-2014-134572",77070,"FUR-TA-10004442","Riverside Furniture Stanwyck Manor Table Series",-131.951,2,"Central",5078,401.59,"Consumer","2014-04-22","Second Class","Texas","Tables" +"Office Supplies","Houston","United States","SV-20365","Seth Vernon",0.2,"2014-04-20","CA-2014-134572",77070,"OFF-ST-10004634","Personal Folder Holder. Ebony",5.605,5,"Central",5077,44.84,"Consumer","2014-04-22","Second Class","Texas","Storage" +"Office Supplies","Los Angeles","United States","AA-10375","Allen Armold",0.2,"2014-04-21","CA-2014-158064",90008,"OFF-BI-10002976","ACCOHIDE Binder by Acco",5.5755005,5,"West",1173,16.52,"Consumer","2014-04-25","Standard Class","California","Binders" +"Office Supplies","Henderson","United States","DB-13555","Dorothy Badders",0,"2014-04-21","CA-2014-136336",42420,"OFF-ST-10002574","SAFCO Commercial Wire Shelving. Black",0,6,"South",9075,828.83997,"Corporate","2014-04-26","Standard Class","Kentucky","Storage" +"Office Supplies","Troy","United States","AH-10030","Aaron Hawkins",0.2,"2014-04-22","CA-2014-122070",12180,"OFF-BI-10004970","ACCOHIDE 3-Ring Binder. Blue. 1""",3.3453,3,"East",7165,9.912,"Corporate","2014-04-24","Second Class","New York","Binders" +"Office Supplies","Troy","United States","AH-10030","Aaron Hawkins",0,"2014-04-22","CA-2014-122070",12180,"OFF-EN-10004773","Staple envelope",121.441605,8,"East",7164,247.84,"Corporate","2014-04-24","Second Class","New York","Envelopes" +"Furniture","Columbus","United States","LB-16795","Laurel Beltran",0.3,"2014-04-23","US-2014-150119",43229,"FUR-CH-10002965","Global Leather Highback Executive Chair with Pneumatic Height Adjustment. Black",-12.058801,2,"East",3406,281.372,"Home Office","2014-04-27","Standard Class","Ohio","Chairs" +"Furniture","Columbus","United States","LB-16795","Laurel Beltran",0.3,"2014-04-23","US-2014-150119",43229,"FUR-CH-10002965","Global Leather Highback Executive Chair with Pneumatic Height Adjustment. Black",-12.058801,2,"East",3407,281.372,"Home Office","2014-04-27","Standard Class","Ohio","Chairs" +"Furniture","Columbus","United States","LB-16795","Laurel Beltran",0.2,"2014-04-23","US-2014-150119",43229,"FUR-FU-10002191","G.E. Halogen Desk Lamp Bulbs",7.8176003,4,"East",3409,22.336,"Home Office","2014-04-27","Standard Class","Ohio","Furnishings" +"Office Supplies","Columbus","United States","LB-16795","Laurel Beltran",0.7,"2014-04-23","US-2014-150119",43229,"OFF-BI-10000145","Zipper Ring Binder Pockets",-5.2415996,8,"East",3408,7.488,"Home Office","2014-04-27","Standard Class","Ohio","Binders" +"Office Supplies","Philadelphia","United States","TS-21160","Theresa Swint",0.7,"2014-04-23","US-2014-112564",19134,"OFF-BI-10004876","Wilson Jones Suede Grain Vinyl Binders",-1.7514,3,"East",1449,2.502,"Corporate","2014-04-24","First Class","Pennsylvania","Binders" +"Office Supplies","San Francisco","United States","NM-18445","Nathan Mautz",0,"2014-04-23","CA-2014-111857",94109,"OFF-PA-10001878","Xerox 1891",22.9877,1,"West",9290,48.91,"Home Office","2014-04-26","Second Class","California","Paper" +"Furniture","San Gabriel","United States","BT-11305","Beth Thompson",0.2,"2014-04-25","CA-2014-166954",91776,"FUR-CH-10003973","GuestStacker Chair with Chrome Finish Legs",148.704,5,"West",4378,1487.04,"Home Office","2014-04-30","Standard Class","California","Chairs" +"Furniture","San Gabriel","United States","BT-11305","Beth Thompson",0,"2014-04-25","CA-2014-166954",91776,"FUR-FU-10003708","Tenex Traditional Chairmats for Medium Pile Carpet. Standard Lip. 36"" x 48""",63.6825,5,"West",4376,303.25,"Home Office","2014-04-30","Standard Class","California","Furnishings" +"Office Supplies","San Gabriel","United States","BT-11305","Beth Thompson",0,"2014-04-25","CA-2014-166954",91776,"OFF-AP-10001391","Kensington 6 Outlet MasterPiece HOMEOFFICE Power Control Center",78.508804,3,"West",4377,270.72,"Home Office","2014-04-30","Standard Class","California","Appliances" +"Office Supplies","Houston","United States","DA-13450","Dianna Arnett",0.2,"2014-04-25","US-2014-157406",77095,"OFF-AR-10002221","12 Colored Short Pencils",0.546,3,"Central",1750,6.24,"Home Office","2014-04-29","Standard Class","Texas","Art" +"Office Supplies","Houston","United States","DA-13450","Dianna Arnett",0.2,"2014-04-25","US-2014-157406",77095,"OFF-PA-10003543","Xerox 1985",3.6287997,2,"Central",1749,10.368001,"Home Office","2014-04-29","Standard Class","Texas","Paper" +"Technology","Wilmington","United States","LC-16885","Lena Creighton",0.2,"2014-04-25","CA-2014-110100",28403,"TEC-PH-10004531","AT&T CL2909",37.797,3,"South",2714,302.376,"Consumer","2014-04-29","Standard Class","North Carolina","Phones" +"Furniture","Los Angeles","United States","AG-10300","Aleksandra Gannaway",0.2,"2014-04-26","CA-2014-147298",90049,"FUR-CH-10004886","Bevis Steel Folding Chairs",23.028,3,"West",2615,230.28,"Corporate","2014-05-03","Standard Class","California","Chairs" +"Office Supplies","Los Angeles","United States","MH-17290","Marc Harrigan",0,"2014-04-26","CA-2014-116785",90036,"OFF-AR-10003504","Newell 347",6.206,5,"West",7197,21.4,"Home Office","2014-04-30","Standard Class","California","Art" +"Office Supplies","Los Angeles","United States","AG-10300","Aleksandra Gannaway",0.2,"2014-04-26","CA-2014-147298",90049,"OFF-BI-10001525","Acco Pressboard Covers with Storage Hooks. 14 7/8"" x 11"". Executive Red",5.715,6,"West",2616,18.288,"Corporate","2014-05-03","Standard Class","California","Binders" +"Office Supplies","Los Angeles","United States","MH-17290","Marc Harrigan",0,"2014-04-26","CA-2014-116785",90036,"OFF-LA-10000305","Avery 495",5.796,2,"West",7198,12.6,"Home Office","2014-04-30","Standard Class","California","Labels" +"Office Supplies","Baltimore","United States","MY-18295","Muhammed Yedwab",0,"2014-04-28","CA-2014-118976",21215,"OFF-AR-10004752","Blackstonian Pencils",1.4952,2,"East",1776,5.34,"Corporate","2014-05-03","Standard Class","Maryland","Art" +"Office Supplies","Baltimore","United States","MY-18295","Muhammed Yedwab",0,"2014-04-28","CA-2014-118976",21215,"OFF-BI-10001628","Acco Data Flex Cable Posts For Top & Bottom Load Binders. 6"" Capacity",9.387,2,"East",1774,20.859999,"Corporate","2014-05-03","Standard Class","Maryland","Binders" +"Office Supplies","Baltimore","United States","MY-18295","Muhammed Yedwab",0,"2014-04-28","CA-2014-118976",21215,"OFF-LA-10000452","Avery 488",1.512,1,"East",1777,3.1499999,"Corporate","2014-05-03","Standard Class","Maryland","Labels" +"Office Supplies","Newark","United States","GB-14530","George Bell",0.2,"2014-04-28","CA-2014-140228",43055,"OFF-LA-10001613","Avery File Folder Labels",2.5056002,3,"East",3609,6.912,"Corporate","2014-05-03","Standard Class","Ohio","Labels" +"Office Supplies","Baltimore","United States","MY-18295","Muhammed Yedwab",0,"2014-04-28","CA-2014-118976",21215,"OFF-ST-10000877","Recycled Steel Personal File for Standard File Folders",129.3786,9,"East",1775,497.61002,"Corporate","2014-05-03","Standard Class","Maryland","Storage" +"Office Supplies","Newark","United States","GB-14530","George Bell",0.2,"2014-04-28","CA-2014-140228",43055,"OFF-ST-10001128","Carina Mini System Audio Rack. Model AR050B",-37.733196,2,"East",3611,177.568,"Corporate","2014-05-03","Standard Class","Ohio","Storage" +"Office Supplies","Newark","United States","GB-14530","George Bell",0.2,"2014-04-28","CA-2014-140228",43055,"OFF-ST-10002214","X-Rack File for Hanging Folders",2.0322,3,"East",3610,27.096,"Corporate","2014-05-03","Standard Class","Ohio","Storage" +"Technology","Jamestown","United States","DS-13180","David Smith",0,"2014-04-28","CA-2014-132983",14701,"TEC-AC-10000736","Logitech G600 MMO Gaming Mouse",57.5928,2,"East",8921,159.98,"Corporate","2014-05-03","Standard Class","New York","Accessories" +"Technology","San Francisco","United States","AI-10855","Arianne Irving",0.2,"2014-04-28","CA-2014-131387",94122,"TEC-PH-10001459","Samsung Galaxy Mega 6.3",125.997,5,"West",8452,1679.9601,"Consumer","2014-04-30","First Class","California","Phones" +"Furniture","Memphis","United States","LA-16780","Laura Armstrong",0.2,"2014-04-29","CA-2014-107811",38109,"FUR-CH-10001394","Global Leather Executive Chair",70.198,2,"South",6375,561.58405,"Corporate","2014-05-03","Standard Class","Tennessee","Chairs" +"Furniture","Lake Charles","United States","HG-14845","Harry Greene",0,"2014-04-29","CA-2014-142727",70601,"FUR-CH-10002304","Global Stack Chair without Arms. Black",12.99,2,"South",2891,51.960003,"Consumer","2014-05-01","Second Class","Louisiana","Chairs" +"Furniture","Rochester","United States","LL-16840","Lauren Leatherbury",0,"2014-04-29","CA-2014-134061",14609,"FUR-FU-10001424","Dax Clear Box Frame",5.9364004,2,"East",885,17.46,"Consumer","2014-05-04","Standard Class","New York","Furnishings" +"Office Supplies","Lake Charles","United States","HG-14845","Harry Greene",0,"2014-04-29","CA-2014-142727",70601,"OFF-BI-10000136","Avery Non-Stick Heavy Duty View Round Locking Ring Binders",8.611201,3,"South",2892,17.94,"Consumer","2014-05-01","Second Class","Louisiana","Binders" +"Office Supplies","Memphis","United States","LA-16780","Laura Armstrong",0.2,"2014-04-29","CA-2014-107811",38109,"OFF-ST-10000798","2300 Heavy-Duty Transfer File Systems by Perma",-1.249,5,"South",6376,99.92,"Corporate","2014-05-03","Standard Class","Tennessee","Storage" +"Technology","Dover","United States","KH-16360","Katherine Hughes",0,"2014-04-29","CA-2014-118304",19901,"TEC-PH-10000376","Square Credit Card Reader",5.1948004,2,"East",8267,19.98,"Consumer","2014-05-04","Standard Class","Delaware","Phones" +"Office Supplies","Louisville","United States","JM-16195","Justin MacKendrick",0,"2014-04-30","CA-2014-160094",40214,"OFF-AR-10004010","Hunt Boston Vacuum Mount KS Pencil Sharpener",45.487003,5,"South",8636,174.95,"Consumer","2014-05-02","First Class","Kentucky","Art" +"Office Supplies","Louisville","United States","JM-16195","Justin MacKendrick",0,"2014-04-30","CA-2014-160094",40214,"OFF-ST-10000585","Economy Rollaway Files",214.76,5,"South",8637,826,"Consumer","2014-05-02","First Class","Kentucky","Storage" +"Technology","Jackson","United States","TM-21010","Tamara Manning",0,"2014-04-30","CA-2014-156006",39212,"TEC-AC-10002550","Maxell 4.7GB DVD-RW 3/Pack",16.248602,3,"South",5005,47.79,"Consumer","2014-05-02","Second Class","Mississippi","Accessories" +"Office Supplies","Columbus","United States","AR-10510","Andrew Roberts",0.2,"2014-05-02","CA-2014-115056",43229,"OFF-AP-10003971","Belkin 6 Outlet Metallic Surge Strip",1.9602,3,"East",5825,26.136002,"Consumer","2014-05-02","Same Day","Ohio","Appliances" +"Technology","Lakeland","United States","CS-11860","Cari Schnelling",0.2,"2014-05-02","CA-2014-124618",33801,"TEC-CO-10004202","Brother DCP1000 Digital 3 in 1 Multifunction Machine",89.997,2,"South",7037,479.98398,"Consumer","2014-05-04","Second Class","Florida","Copiers" +"Office Supplies","Yonkers","United States","NC-18415","Nathan Cano",0.2,"2014-05-03","CA-2014-135755",10701,"OFF-BI-10003981","Avery Durable Plastic 1"" Binders",3.9498,3,"East",3863,10.896,"Consumer","2014-05-10","Standard Class","New York","Binders" +"Office Supplies","Yonkers","United States","NC-18415","Nathan Cano",0.2,"2014-05-03","CA-2014-135755",10701,"OFF-BI-10004828","GBC Poly Designer Binding Covers",14.563801,3,"East",3862,40.176,"Consumer","2014-05-10","Standard Class","New York","Binders" +"Office Supplies","Appleton","United States","JS-15940","Joni Sundaresam",0,"2014-05-03","CA-2014-130274",54915,"OFF-LA-10002195","Avery 481",10.348801,7,"Central",1553,21.56,"Home Office","2014-05-05","First Class","Wisconsin","Labels" +"Furniture","Middletown","United States","AA-10480","Andrew Allen",0,"2014-05-04","CA-2014-155271",6457,"FUR-FU-10001473","DAX Wood Document Frame",9.8856,2,"East",1460,27.46,"Consumer","2014-05-04","Same Day","Connecticut","Furnishings" +"Furniture","Seattle","United States","BS-11365","Bill Shonely",0,"2014-05-04","US-2014-148194",98105,"FUR-FU-10001852","Eldon Regeneration Recycled Desk Accessories. Smoke",3.8976,7,"West",7999,12.18,"Corporate","2014-05-07","First Class","Washington","Furnishings" +"Office Supplies","Seattle","United States","BS-11365","Bill Shonely",0,"2014-05-04","US-2014-148194",98105,"OFF-AP-10000696","Holmes Odor Grabber",19.0344,4,"West",8000,57.68,"Corporate","2014-05-07","First Class","Washington","Appliances" +"Office Supplies","Houston","United States","NC-18340","Nat Carroll",0.2,"2014-05-04","CA-2014-111899",77036,"OFF-AR-10001725","Boston Home & Office Model 2000 Electric Pencil Sharpeners",2.8379998,2,"Central",7296,37.84,"Consumer","2014-05-05","First Class","Texas","Art" +"Office Supplies","Jackson","United States","PM-18940","Paul MacIntyre",0,"2014-05-04","CA-2014-117709",49201,"OFF-BI-10001294","Fellowes Binding Cases",21.06,4,"Central",1166,46.8,"Consumer","2014-05-08","Standard Class","Michigan","Binders" +"Office Supplies","Houston","United States","NC-18340","Nat Carroll",0.2,"2014-05-04","CA-2014-111899",77036,"OFF-FA-10000840","OIC Thumb-Tacks",1.8468,6,"Central",7297,5.472,"Consumer","2014-05-05","First Class","Texas","Fasteners" +"Furniture","San Antonio","United States","EB-13870","Emily Burns",0.3,"2014-05-05","CA-2014-110219",78207,"FUR-CH-10001146","Global Value Mid-Back Manager's Chair. Gray",-9.1335,3,"Central",4560,127.869,"Consumer","2014-05-08","First Class","Texas","Chairs" +"Office Supplies","Richmond","United States","PF-19120","Peter Fuller",0,"2014-05-05","CA-2014-166457",40475,"OFF-AR-10003651","Newell 350",1.9023999,2,"South",4213,6.56,"Consumer","2014-05-09","Second Class","Kentucky","Art" +"Office Supplies","Arlington","United States","GD-14590","Giulietta Dortch",0,"2014-05-05","CA-2014-111934",22204,"OFF-BI-10004364","Storex Dura Pro Binders",5.346,2,"South",8596,11.88,"Corporate","2014-05-07","First Class","Virginia","Binders" +"Office Supplies","Arlington","United States","GD-14590","Giulietta Dortch",0,"2014-05-05","CA-2014-111934",22204,"OFF-PA-10000474","Easy-staple paper",16.6568,1,"South",8597,35.440002,"Corporate","2014-05-07","First Class","Virginia","Paper" +"Office Supplies","Richmond","United States","PF-19120","Peter Fuller",0,"2014-05-05","CA-2014-166457",40475,"OFF-PA-10001363","Xerox 1933",11.5432005,2,"South",4214,24.56,"Consumer","2014-05-09","Second Class","Kentucky","Paper" +"Office Supplies","Richmond","United States","PF-19120","Peter Fuller",0,"2014-05-05","CA-2014-166457",40475,"OFF-PA-10003016","Adams ""While You Were Out"" Message Pads",4.239,3,"South",4212,9.42,"Consumer","2014-05-09","Second Class","Kentucky","Paper" +"Office Supplies","Freeport","United States","KH-16330","Katharine Harms",0.2,"2014-05-05","CA-2014-160738",61032,"OFF-ST-10003442","Eldon Portable Mobile Manager",3.9592001,2,"Central",4656,45.248,"Corporate","2014-05-10","Standard Class","Illinois","Storage" +"Office Supplies","Los Angeles","United States","SN-20710","Steve Nguyen",0,"2014-05-06","US-2014-154879",90004,"OFF-AR-10001897","Model L Table or Wall-Mount Pencil Sharpener",30.223202,6,"West",4411,107.94,"Home Office","2014-05-11","Standard Class","California","Art" +"Office Supplies","Los Angeles","United States","HP-14815","Harold Pawlan",0.2,"2014-05-06","CA-2014-121664",90049,"OFF-BI-10003684","Wilson Jones Legal Size Ring Binders",52.776,8,"West",1335,140.736,"Home Office","2014-05-10","Standard Class","California","Binders" +"Office Supplies","Los Angeles","United States","SN-20710","Steve Nguyen",0,"2014-05-06","US-2014-154879",90004,"OFF-LA-10004425","Staple-on labels",2.7166002,2,"West",4410,5.7799997,"Home Office","2014-05-11","Standard Class","California","Labels" +"Furniture","Lawrence","United States","CT-11995","Carol Triggs",0,"2014-05-07","CA-2014-166086",1841,"FUR-CH-10004675","Lifetime Advantage Folding Chairs. 4/Carton",244.24959,4,"East",6566,872.32,"Consumer","2014-05-12","Standard Class","Massachusetts","Chairs" +"Furniture","Lawrence","United States","CT-11995","Carol Triggs",0.3,"2014-05-07","CA-2014-166086",1841,"FUR-TA-10003469","Balt Split Level Computer Training Table",-38.85,2,"East",6564,194.25,"Consumer","2014-05-12","Standard Class","Massachusetts","Tables" +"Office Supplies","Lawrence","United States","CT-11995","Carol Triggs",0,"2014-05-07","CA-2014-166086",1841,"OFF-AR-10002578","Newell 335",2.5056002,3,"East",6565,8.64,"Consumer","2014-05-12","Standard Class","Massachusetts","Art" +"Office Supplies","Lawrence","United States","CT-11995","Carol Triggs",0,"2014-05-07","CA-2014-166086",1841,"OFF-BI-10001078","Acco PRESSTEX Data Binder with Storage Hooks. Dark Blue. 14 7/8"" X 11""",7.9086,3,"East",6563,16.140001,"Consumer","2014-05-12","Standard Class","Massachusetts","Binders" +"Technology","Philadelphia","United States","KT-16480","Kean Thornton",0.4,"2014-05-08","CA-2014-116666",19134,"TEC-CO-10001449","Hewlett Packard LaserJet 3310 Copier",239.99599,5,"East",8800,1799.97,"Consumer","2014-05-10","First Class","Pennsylvania","Copiers" +"Office Supplies","Henderson","United States","SB-20290","Sean Braxton",0,"2014-05-09","CA-2014-158771",42420,"OFF-LA-10001641","Avery 518",4.5360003,3,"South",3539,9.45,"Corporate","2014-05-09","Same Day","Kentucky","Labels" +"Office Supplies","Henderson","United States","SB-20290","Sean Braxton",0,"2014-05-09","CA-2014-158771",42420,"OFF-LA-10003121","Avery 506",9.499,5,"South",3540,20.65,"Corporate","2014-05-09","Same Day","Kentucky","Labels" +"Office Supplies","Henderson","United States","SB-20290","Sean Braxton",0,"2014-05-09","CA-2014-158771",42420,"OFF-PA-10002689","Weyerhaeuser First Choice Laser/Copy Paper (20Lb. and 88 Bright)",21.7728,7,"South",3541,45.36,"Corporate","2014-05-09","Same Day","Kentucky","Paper" +"Office Supplies","Henderson","United States","SB-20290","Sean Braxton",0,"2014-05-09","CA-2014-158771",42420,"OFF-ST-10000107","Fellowes Super Stor/Drawer",14.985,3,"South",3538,83.25,"Corporate","2014-05-09","Same Day","Kentucky","Storage" +"Technology","San Francisco","United States","PG-18820","Patrick Gardner",0,"2014-05-09","US-2014-102071",94110,"TEC-AC-10000109","Sony Micro Vault Click 16 GB USB 2.0 Flash Drive",40.312798,3,"West",826,167.97,"Consumer","2014-05-15","Standard Class","California","Accessories" +"Technology","San Francisco","United States","PG-18820","Patrick Gardner",0,"2014-05-09","US-2014-102071",94110,"TEC-AC-10003441","Kingston Digital DataTraveler 32GB USB 2.0",4.068,4,"West",825,67.8,"Consumer","2014-05-15","Standard Class","California","Accessories" +"Furniture","Philadelphia","United States","AF-10885","Art Foster",0.5,"2014-05-10","CA-2014-114181",19134,"FUR-BO-10004467","Bestar Classic Bookcase",-216.9783,7,"East",7555,349.965,"Consumer","2014-05-14","Second Class","Pennsylvania","Bookcases" +"Furniture","San Jose","United States","GM-14680","Greg Matthias",0.2,"2014-05-10","CA-2014-103310",95123,"FUR-CH-10002320","Hon Pagoda Stacking Chairs",32.098,1,"West",9200,256.784,"Consumer","2014-05-15","Standard Class","California","Chairs" +"Office Supplies","Philadelphia","United States","AF-10885","Art Foster",0.2,"2014-05-10","CA-2014-114181",19134,"OFF-AR-10000716","DIXON Ticonderoga Erasable Checking Pencils",5.301,5,"East",7556,22.32,"Consumer","2014-05-14","Second Class","Pennsylvania","Art" +"Office Supplies","Kent","United States","AI-10855","Arianne Irving",0,"2014-05-10","CA-2014-159709",98031,"OFF-EN-10001434","Strathmore #10 Envelopes. Ultimate White",77.4837,3,"West",5197,158.13,"Consumer","2014-05-15","Standard Class","Washington","Envelopes" +"Office Supplies","San Jose","United States","GM-14680","Greg Matthias",0,"2014-05-10","CA-2014-103310",95123,"OFF-PA-10004353","Southworth 25% Cotton Premium Laser Paper and Envelopes",19.1808,2,"West",9197,39.960003,"Consumer","2014-05-15","Standard Class","California","Paper" +"Office Supplies","San Jose","United States","GM-14680","Greg Matthias",0,"2014-05-10","CA-2014-103310",95123,"OFF-SU-10004737","Acme Design Stainless Steel Bent Scissors",11.0808,6,"West",9199,41.04,"Consumer","2014-05-15","Standard Class","California","Supplies" +"Technology","San Jose","United States","GM-14680","Greg Matthias",0.2,"2014-05-10","CA-2014-103310",95123,"TEC-PH-10001817","Wilson Electronics DB Pro Signal Booster",125.3,5,"West",9198,1432,"Consumer","2014-05-15","Standard Class","California","Phones" +"Technology","Kent","United States","AI-10855","Arianne Irving",0.2,"2014-05-10","CA-2014-159709",98031,"TEC-PH-10003988","LF Elite 3D Dazzle Designer Hard Case Cover. Lf Stylus Pen and Wiper For Apple Iphone 5c Mini Lite",4.36,5,"West",5198,43.6,"Consumer","2014-05-15","Standard Class","Washington","Phones" +"Furniture","Huntsville","United States","VW-21775","Victoria Wilson",0.3,"2014-05-11","CA-2014-152100",77340,"FUR-CH-10000015","Hon Multipurpose Stacking Arm Chairs",-69.312004,8,"Central",9429,1212.9601,"Corporate","2014-05-16","Standard Class","Texas","Chairs" +"Furniture","Fort Worth","United States","PG-18895","Paul Gonzalez",0.6,"2014-05-11","CA-2014-103849",76106,"FUR-FU-10000723","Deflect-o EconoMat Studded. No Bevel Mat for Low Pile Carpeting",-84.29279,4,"Central",546,66.112,"Consumer","2014-05-16","Standard Class","Texas","Furnishings" +"Office Supplies","Watertown","United States","BG-11695","Brooke Gillingham",0,"2014-05-11","CA-2014-127523",13601,"OFF-AP-10004249","Staple holder",9.695701,3,"East",4125,35.91,"Corporate","2014-05-18","Standard Class","New York","Appliances" +"Office Supplies","Chicago","United States","HD-14785","Harold Dahlen",0.8,"2014-05-11","CA-2014-119172",60610,"OFF-BI-10002026","Avery Arch Ring Binders",-172.557,9,"Central",9591,104.58,"Home Office","2014-05-15","Standard Class","Illinois","Binders" +"Office Supplies","Chicago","United States","HD-14785","Harold Dahlen",0.2,"2014-05-11","CA-2014-119172",60610,"OFF-PA-10003036","Black Print Carbonless 8 1/2"" x 8 1/4"" Rapid Memo Book",5.6784,3,"Central",9590,17.472,"Home Office","2014-05-15","Standard Class","Illinois","Paper" +"Technology","Houston","United States","RM-19375","Raymond Messe",0.2,"2014-05-11","CA-2014-139017",77095,"TEC-AC-10001013","Logitech ClearChat Comfort/USB Headset H390",7.6154003,2,"Central",1759,46.864002,"Consumer","2014-05-17","Standard Class","Texas","Accessories" +"Technology","Fort Worth","United States","PG-18895","Paul Gonzalez",0.2,"2014-05-11","CA-2014-103849",76106,"TEC-AC-10001465","SanDisk Cruzer 64 GB USB Flash Drive",7.264,2,"Central",544,58.111996,"Consumer","2014-05-16","Standard Class","Texas","Accessories" +"Technology","Fort Worth","United States","PG-18895","Paul Gonzalez",0.2,"2014-05-11","CA-2014-103849",76106,"TEC-PH-10002597","Xblue XB-1670-86 X16 Small Office Telephone - Titanium",6.2995,1,"Central",545,100.79199,"Consumer","2014-05-16","Standard Class","Texas","Phones" +"Furniture","Milwaukee","United States","HL-15040","Hunter Lopez",0,"2014-05-12","CA-2014-127159",53209,"FUR-FU-10000010","DAX Value U-Channel Document Frames. Easel Back",10.7849,7,"Central",1658,34.79,"Consumer","2014-05-15","First Class","Wisconsin","Furnishings" +"Furniture","Franklin","United States","KT-16480","Kean Thornton",0.3,"2014-05-12","CA-2014-155887",2038,"FUR-TA-10002228","Bevis Traditional Conference Table Top. Plinth Base",-130.01039,3,"East",2747,700.05597,"Consumer","2014-05-17","Standard Class","Massachusetts","Tables" +"Furniture","Los Angeles","United States","AH-10030","Aaron Hawkins",0.2,"2014-05-13","CA-2014-113768",90004,"FUR-CH-10002439","Iceberg Nesting Folding Chair. 19w x 6d x 43h",20.9592,6,"West",5355,279.456,"Corporate","2014-05-19","Standard Class","California","Chairs" +"Office Supplies","Hialeah","United States","EH-14005","Erica Hernandez",0.7,"2014-05-13","CA-2014-140165",33012,"OFF-BI-10004519","GBC DocuBind P100 Manual Binding Machine",-331.96,8,"South",1888,398.352,"Home Office","2014-05-15","First Class","Florida","Binders" +"Office Supplies","Hialeah","United States","EH-14005","Erica Hernandez",0.2,"2014-05-13","CA-2014-140165",33012,"OFF-FA-10002815","Staples",2.3976,2,"South",1887,7.104,"Home Office","2014-05-15","First Class","Florida","Fasteners" +"Office Supplies","Los Angeles","United States","AH-10030","Aaron Hawkins",0,"2014-05-13","CA-2014-113768",90004,"OFF-PA-10003063","EcoTones Memo Sheets",3.84,2,"West",5356,8,"Corporate","2014-05-19","Standard Class","California","Paper" +"Office Supplies","West Jordan","United States","AG-10270","Alejandro Grove",0,"2014-05-13","CA-2014-167164",84084,"OFF-ST-10000107","Fellowes Super Stor/Drawer",9.99,2,"West",18,55.5,"Consumer","2014-05-15","Second Class","Utah","Storage" +"Technology","Oakland","United States","NP-18325","Naresj Patel",0,"2014-05-13","CA-2014-114433",94601,"TEC-AC-10002800","Plantronics Audio 478 Stereo USB Headset",52.489502,3,"West",3217,149.97,"Consumer","2014-05-17","Standard Class","California","Accessories" +"Furniture","Saint Petersburg","United States","AJ-10795","Anthony Johnson",0.2,"2014-05-14","CA-2014-106810",33710,"FUR-FU-10004306","Electrix Halogen Magnifier Lamp",23.316,2,"South",769,310.87997,"Corporate","2014-05-20","Standard Class","Florida","Furnishings" +"Furniture","Los Angeles","United States","PW-19030","Pauline Webber",0.2,"2014-05-16","CA-2014-125150",90036,"FUR-CH-10002439","Iceberg Nesting Folding Chair. 19w x 6d x 43h",17.466,5,"West",7825,232.88,"Corporate","2014-05-23","Standard Class","California","Chairs" +"Technology","San Francisco","United States","CC-12475","Cindy Chapman",0,"2014-05-16","US-2014-163146",94122,"TEC-AC-10002217","Imation Clip USB flash drive - 8 GB",3.3839998,3,"West",8536,56.4,"Consumer","2014-05-20","Standard Class","California","Accessories" +"Office Supplies","Eau Claire","United States","PA-19060","Pete Armstrong",0,"2014-05-17","CA-2014-148761",54703,"OFF-BI-10000666","Surelock Post Binders",45.84,3,"Central",3006,91.68,"Home Office","2014-05-21","Standard Class","Wisconsin","Binders" +"Furniture","Akron","United States","TT-21070","Ted Trevino",0.2,"2014-05-18","CA-2014-164224",44312,"FUR-FU-10000308","Deflect-o Glass Clear Studded Chair Mats",3.7308,3,"East",3201,149.232,"Consumer","2014-05-20","Second Class","Ohio","Furnishings" +"Office Supplies","Akron","United States","TT-21070","Ted Trevino",0.2,"2014-05-18","CA-2014-164224",44312,"OFF-PA-10001526","Xerox 1949",5.7768,4,"East",3202,15.935999,"Consumer","2014-05-20","Second Class","Ohio","Paper" +"Office Supplies","Dallas","United States","GA-14515","George Ashbrook",0.2,"2014-05-18","CA-2014-133963",75220,"OFF-PA-10001526","Xerox 1949",1.4441999,1,"Central",9833,3.984,"Consumer","2014-05-22","Second Class","Texas","Paper" +"Technology","Cleveland","United States","BS-11755","Bruce Stewart",0.4,"2014-05-18","CA-2014-103373",44105,"TEC-PH-10002885","Apple iPhone 5",-168.9558,2,"East",1830,779.796,"Consumer","2014-05-24","Standard Class","Ohio","Phones" +"Office Supplies","Midland","United States","JH-15820","John Huston",0,"2014-05-19","US-2014-107699",48640,"OFF-BI-10001249","Avery Heavy-Duty EZD View Binder with Locking Rings",26.4132,9,"Central",4143,57.420002,"Consumer","2014-05-23","Standard Class","Michigan","Binders" +"Office Supplies","Virginia Beach","United States","TD-20995","Tamara Dahlen",0,"2014-05-19","CA-2014-130155",23464,"OFF-SU-10004737","Acme Design Stainless Steel Bent Scissors",9.233999,5,"South",9603,34.2,"Consumer","2014-05-22","First Class","Virginia","Supplies" +"Furniture","Dover","United States","RF-19345","Randy Ferguson",0,"2014-05-20","US-2014-114188",3820,"FUR-FU-10000076","24-Hour Round Wall Clock",60.139797,7,"East",2132,139.86,"Corporate","2014-05-22","Second Class","New Hampshire","Furnishings" +"Furniture","San Marcos","United States","MC-17590","Matt Collister",0.6,"2014-05-20","CA-2014-130673",78666,"FUR-FU-10003489","Contemporary Borderless Frame",-5.9409,3,"Central",8581,10.332,"Corporate","2014-05-22","Second Class","Texas","Furnishings" +"Office Supplies","Dover","United States","RF-19345","Randy Ferguson",0,"2014-05-20","US-2014-114188",3820,"OFF-AP-10000124","Acco 6 Outlet Guardian Basic Surge Suppressor",9.3184,4,"East",2130,33.280003,"Corporate","2014-05-22","Second Class","New Hampshire","Appliances" +"Office Supplies","Dover","United States","RF-19345","Randy Ferguson",0,"2014-05-20","US-2014-114188",3820,"OFF-AR-10004511","Sanford Colorific Scented Colored Pencils. 12/Pack",11.9412,9,"East",2131,38.52,"Corporate","2014-05-22","Second Class","New Hampshire","Art" +"Office Supplies","San Marcos","United States","MC-17590","Matt Collister",0.2,"2014-05-20","CA-2014-130673",78666,"OFF-PA-10000289","Xerox 213",3.6287997,2,"Central",8582,10.368001,"Corporate","2014-05-22","Second Class","Texas","Paper" +"Office Supplies","San Marcos","United States","MC-17590","Matt Collister",0.2,"2014-05-20","CA-2014-130673",78666,"OFF-ST-10000636","Rogers Profile Extra Capacity Storage Tub",-13.392,5,"Central",8584,66.96,"Corporate","2014-05-22","Second Class","Texas","Storage" +"Technology","San Marcos","United States","MC-17590","Matt Collister",0.2,"2014-05-20","CA-2014-130673",78666,"TEC-AC-10004227","SanDisk Ultra 16 GB MicroSDHC Class 10 Memory Card",-3.6371999,2,"Central",8583,20.784,"Corporate","2014-05-22","Second Class","Texas","Accessories" +"Furniture","Houston","United States","KH-16360","Katherine Hughes",0.3,"2014-05-21","CA-2014-127166",77070,"FUR-CH-10003396","Global Deluxe Steno Chair",-29.252401,2,"Central",9793,107.771996,"Consumer","2014-05-23","Second Class","Texas","Chairs" +"Office Supplies","Lawrence","United States","FG-14260","Frank Gastineau",0,"2014-05-21","CA-2014-152443",1841,"OFF-AP-10001293","Belkin 8 Outlet Surge Protector",68.8464,6,"East",5879,245.88,"Home Office","2014-05-26","Standard Class","Massachusetts","Appliances" +"Office Supplies","San Diego","United States","BT-11530","Bradley Talbott",0,"2014-05-21","CA-2014-104780",92037,"OFF-AR-10003514","4009 Highlighters by Sanford",10.5072,8,"West",3079,31.84,"Home Office","2014-05-25","Standard Class","California","Art" +"Office Supplies","Houston","United States","KH-16360","Katherine Hughes",0.8,"2014-05-21","CA-2014-127166",77070,"OFF-BI-10000977","Ibico Plastic Spiral Binding Combs",-31.008,3,"Central",9795,18.240002,"Consumer","2014-05-23","Second Class","Texas","Binders" +"Office Supplies","Lawrence","United States","FG-14260","Frank Gastineau",0,"2014-05-21","CA-2014-152443",1841,"OFF-BI-10001071","GBC ProClick Punch Binding System",219.4514,7,"East",5877,447.86,"Home Office","2014-05-26","Standard Class","Massachusetts","Binders" +"Office Supplies","Virginia Beach","United States","MW-18235","Mitch Willingham",0,"2014-05-21","CA-2014-117639",23464,"OFF-BI-10003925","Fellowes PB300 Plastic Comb Binding Machine",1276.487,7,"South",995,2715.93,"Corporate","2014-05-25","Standard Class","Virginia","Binders" +"Office Supplies","Houston","United States","KH-16360","Katherine Hughes",0.2,"2014-05-21","CA-2014-127166",77070,"OFF-EN-10003134","Staple envelope",21.024,6,"Central",9792,56.064003,"Consumer","2014-05-23","Second Class","Texas","Envelopes" +"Office Supplies","Houston","United States","KH-16360","Katherine Hughes",0.2,"2014-05-21","CA-2014-127166",77070,"OFF-PA-10001560","Adams Telephone Message Books. 5 1/4” x 11”",1.6308,1,"Central",9794,4.8320003,"Consumer","2014-05-23","Second Class","Texas","Paper" +"Office Supplies","Lawrence","United States","FG-14260","Frank Gastineau",0,"2014-05-21","CA-2014-152443",1841,"OFF-PA-10003022","Xerox 1992",8.7906,3,"East",5878,17.94,"Home Office","2014-05-26","Standard Class","Massachusetts","Paper" +"Technology","Virginia Beach","United States","MW-18235","Mitch Willingham",0,"2014-05-21","CA-2014-117639",23464,"TEC-PH-10001530","Plantronics Voyager Pro Legend",173.0316,3,"South",996,617.97003,"Corporate","2014-05-25","Standard Class","Virginia","Phones" +"Technology","Decatur","United States","EH-14185","Evan Henry",0,"2014-05-22","CA-2014-143840",35601,"TEC-PH-10002660","Nortel Networks T7316 E Nt8 B27",33.995003,2,"South",3268,135.98,"Consumer","2014-05-29","Standard Class","Alabama","Phones" +"Technology","Decatur","United States","EH-14185","Evan Henry",0,"2014-05-22","CA-2014-143840",35601,"TEC-PH-10003171","Plantronics Encore H101 Dual Earpieces Headset",12.5859995,1,"South",3269,44.95,"Consumer","2014-05-29","Standard Class","Alabama","Phones" +"Office Supplies","New York City","United States","GT-14710","Greg Tran",0,"2014-05-23","US-2014-135881",10035,"OFF-AP-10002118","1.7 Cubic Foot Compact ""Cube"" Office Refrigerators",56.203197,1,"East",7013,208.16,"Consumer","2014-05-27","Standard Class","New York","Appliances" +"Office Supplies","Philadelphia","United States","AR-10510","Andrew Roberts",0.2,"2014-05-23","US-2014-105767",19134,"OFF-AR-10001246","Newell 317",2.3813999,9,"East",609,21.168,"Consumer","2014-05-27","Standard Class","Pennsylvania","Art" +"Office Supplies","New York City","United States","GT-14710","Greg Tran",0.2,"2014-05-23","US-2014-135881",10035,"OFF-BI-10000829","Avery Non-Stick Binders",5.8370004,5,"East",7011,17.96,"Consumer","2014-05-27","Standard Class","New York","Binders" +"Office Supplies","Philadelphia","United States","AR-10510","Andrew Roberts",0.7,"2014-05-23","US-2014-105767",19134,"OFF-BI-10000848","Angle-D Ring Binders",-2.6256,2,"East",608,3.282,"Consumer","2014-05-27","Standard Class","Pennsylvania","Binders" +"Office Supplies","Bellevue","United States","EH-13990","Erica Hackney",0,"2014-05-23","CA-2014-138513",98006,"OFF-PA-10003177","Xerox 1999",6.2208,2,"West",6628,12.96,"Consumer","2014-05-27","Standard Class","Washington","Paper" +"Office Supplies","New York City","United States","GT-14710","Greg Tran",0,"2014-05-23","US-2014-135881",10035,"OFF-SU-10003002","Letter Slitter",0.15120001,2,"East",7012,5.04,"Consumer","2014-05-27","Standard Class","New York","Supplies" +"Technology","Philadelphia","United States","AR-10510","Andrew Roberts",0.4,"2014-05-23","US-2014-105767",19134,"TEC-PH-10003092","Motorola L804",-10.117801,2,"East",610,55.188,"Consumer","2014-05-27","Standard Class","Pennsylvania","Phones" +"Office Supplies","Baltimore","United States","RP-19390","Resi Pölking",0,"2014-05-24","CA-2014-143413",21215,"OFF-PA-10002319","Xerox 1944",56.9772,3,"East",2704,116.28,"Consumer","2014-05-30","Standard Class","Maryland","Paper" +"Furniture","Chicago","United States","JL-15235","Janet Lee",0.6,"2014-05-25","US-2014-130379",60623,"FUR-FU-10002553","Electrix Incandescent Magnifying Lamp. Black",-24.189001,2,"Central",1820,29.32,"Consumer","2014-05-29","Standard Class","Illinois","Furnishings" +"Office Supplies","Chicago","United States","JL-15235","Janet Lee",0.8,"2014-05-25","US-2014-130379",60623,"OFF-AP-10001394","Harmony Air Purifier",-166.32,2,"Central",1819,75.6,"Consumer","2014-05-29","Standard Class","Illinois","Appliances" +"Office Supplies","New York City","United States","BW-11065","Barry Weirich",0,"2014-05-25","CA-2014-100391",10035,"OFF-PA-10001471","Strathmore Photo Frame Cards",6.7251997,2,"East",9441,14.620001,"Consumer","2014-05-29","Standard Class","New York","Paper" +"Furniture","Los Angeles","United States","ML-17395","Marina Lichtenstein",0.15,"2014-05-26","CA-2014-156349",90008,"FUR-BO-10000362","Sauder Inglewood Library Bookcases",27.356798,2,"West",1611,290.66602,"Corporate","2014-05-30","Standard Class","California","Bookcases" +"Furniture","Los Angeles","United States","HF-14995","Herbert Flentye",0.2,"2014-05-26","CA-2014-158029",90008,"FUR-CH-10000988","Hon Olson Stacker Stools",22.529602,2,"West",2752,225.29599,"Consumer","2014-05-30","Standard Class","California","Chairs" +"Furniture","Chicago","United States","MM-18055","Michelle Moray",0.3,"2014-05-26","CA-2014-144029",60623,"FUR-CH-10003981","Global Commerce Series Low-Back Swivel/Tilt Chairs",-5.1396,2,"Central",6741,359.77203,"Consumer","2014-05-31","Standard Class","Illinois","Chairs" +"Office Supplies","Chicago","United States","MM-18055","Michelle Moray",0.2,"2014-05-26","CA-2014-144029",60623,"OFF-AR-10000716","DIXON Ticonderoga Erasable Checking Pencils",3.1806,3,"Central",6742,13.392,"Consumer","2014-05-31","Standard Class","Illinois","Art" +"Office Supplies","Murray","United States","Dp-13240","Dean percer",0,"2014-05-26","CA-2014-129574",84107,"OFF-PA-10002893","Wirebound Service Call Books. 5 1/2"" x 4""",23.232002,5,"West",2761,48.4,"Home Office","2014-05-29","First Class","Utah","Paper" +"Office Supplies","Chicago","United States","MM-18055","Michelle Moray",0.2,"2014-05-26","CA-2014-144029",60623,"OFF-ST-10001837","SAFCO Mobile Desk Side File. Wire Frame",7.6968,3,"Central",6740,102.62399,"Consumer","2014-05-31","Standard Class","Illinois","Storage" +"Technology","Los Angeles","United States","ML-17395","Marina Lichtenstein",0.2,"2014-05-26","CA-2014-156349",90008,"TEC-PH-10000441","VTech DS6151",20.1584,2,"West",1612,201.584,"Corporate","2014-05-30","Standard Class","California","Phones" +"Technology","Los Angeles","United States","ML-17395","Marina Lichtenstein",0.2,"2014-05-26","CA-2014-156349",90008,"TEC-PH-10002726","netTALK DUO VoIP Telephone Service",31.494,2,"West",1613,83.984,"Corporate","2014-05-30","Standard Class","California","Phones" +"Furniture","San Diego","United States","MH-17785","Maya Herman",0.2,"2014-05-27","CA-2014-124429",92105,"FUR-TA-10002607","KI Conference Tables",-28.355999,10,"West",747,567.12,"Corporate","2014-05-27","Same Day","California","Tables" +"Office Supplies","Chicago","United States","LE-16810","Laurel Elliston",0.8,"2014-05-27","US-2014-117058",60653,"OFF-BI-10004139","Fellowes Presentation Covers for Comb Binding Machines",-30.554998,6,"Central",1326,17.46,"Consumer","2014-05-30","First Class","Illinois","Binders" +"Office Supplies","San Diego","United States","MH-17785","Maya Herman",0,"2014-05-27","CA-2014-124429",92105,"OFF-ST-10001809","Fellowes Officeware Wire Shelving",7.1864,4,"West",748,359.31998,"Corporate","2014-05-27","Same Day","California","Storage" +"Technology","San Francisco","United States","LC-16885","Lena Creighton",0,"2014-05-27","CA-2014-139192",94109,"TEC-AC-10001606","Logitech Wireless Performance Mouse MX for PC and Mac",37.9962,1,"West",1050,99.99,"Consumer","2014-06-01","Second Class","California","Accessories" +"Technology","San Francisco","United States","LC-16885","Lena Creighton",0.2,"2014-05-27","CA-2014-139192",94109,"TEC-PH-10000486","Plantronics HL10 Handset Lifter",125.269196,12,"West",1049,1113.504,"Consumer","2014-06-01","Second Class","California","Phones" +"Office Supplies","Seattle","United States","MM-17260","Magdelene Morse",0.2,"2014-05-28","CA-2014-108861",98105,"OFF-BI-10003876","Green Canvas Binder for 8-1/2"" x 14"" Sheets",51.36,4,"West",8439,136.95999,"Consumer","2014-06-01","Standard Class","Washington","Binders" +"Technology","Seattle","United States","PJ-18835","Patrick Jones",0,"2014-05-28","CA-2014-135993",98115,"TEC-AC-10004877","Imation 30456 USB Flash Drive 8GB",2.208,4,"West",5677,27.6,"Corporate","2014-06-02","Standard Class","Washington","Accessories" +"Technology","Seattle","United States","PJ-18835","Patrick Jones",0.2,"2014-05-28","CA-2014-135993",98115,"TEC-PH-10001552","I Need's 3d Hello Kitty Hybrid Silicone Case Cover for HTC One X 4g with 3d Hello Kitty Stylus Pen Green/pink",5.7408,6,"West",5676,57.408,"Corporate","2014-06-02","Standard Class","Washington","Phones" +"Furniture","Buffalo Grove","United States","SS-20410","Shahid Shariari",0.5,"2014-05-30","CA-2014-145800",60089,"FUR-TA-10001539","Chromcraft Rectangular Conference Tables",-184.8366,3,"Central",5751,355.455,"Consumer","2014-06-05","Standard Class","Illinois","Tables" +"Office Supplies","New York City","United States","LW-16825","Laurel Workman",0,"2014-05-30","CA-2014-103429",10024,"OFF-AP-10001005","Honeywell Quietcare HEPA Air Cleaner",77.8635,3,"East",7859,235.95,"Corporate","2014-06-01","First Class","New York","Appliances" +"Office Supplies","New York City","United States","JG-15160","James Galang",0.2,"2014-05-30","CA-2014-105872",10024,"OFF-BI-10003684","Wilson Jones Legal Size Ring Binders",26.387999,4,"East",7737,70.368004,"Consumer","2014-06-06","Standard Class","New York","Binders" +"Office Supplies","New York City","United States","LW-16825","Laurel Workman",0.2,"2014-05-30","CA-2014-103429",10024,"OFF-BI-10004233","GBC Pre-Punched Binding Paper. Plastic. White. 8-1/2"" x 11""",8.954399,2,"East",7857,25.584,"Corporate","2014-06-01","First Class","New York","Binders" +"Office Supplies","Richmond","United States","EB-13840","Ellis Ballard",0,"2014-05-30","CA-2014-146885",23223,"OFF-PA-10001622","Ampad Poly Cover Wirebound Steno Book. 6"" x 9"" Assorted Colors. Gregg Ruled",6.1289997,3,"South",4647,13.620001,"Corporate","2014-06-05","Standard Class","Virginia","Paper" +"Office Supplies","New York City","United States","LW-16825","Laurel Workman",0,"2014-05-30","CA-2014-103429",10024,"OFF-PA-10001712","Xerox 1948",17.982,4,"East",7860,39.960003,"Corporate","2014-06-01","First Class","New York","Paper" +"Technology","Chicago","United States","MC-17425","Mark Cousins",0.2,"2014-05-30","CA-2014-140473",60623,"TEC-CO-10004202","Brother DCP1000 Digital 3 in 1 Multifunction Machine",134.9955,3,"Central",8553,719.976,"Corporate","2014-06-03","Standard Class","Illinois","Copiers" +"Technology","New York City","United States","LW-16825","Laurel Workman",0,"2014-05-30","CA-2014-103429",10024,"TEC-PH-10003505","Geemarc AmpliPOWER60",134.56,5,"East",7858,464,"Corporate","2014-06-01","First Class","New York","Phones" +"Technology","Jackson","United States","JK-15625","Jim Karlsson",0,"2014-05-31","CA-2014-166051",39212,"TEC-PH-10002538","Grandstream GXP1160 VoIP phone",32.981705,3,"South",7979,113.729996,"Consumer","2014-06-05","Standard Class","Mississippi","Phones" +"Technology","Jackson","United States","JK-15625","Jim Karlsson",0,"2014-05-31","CA-2014-166051",39212,"TEC-PH-10002680","Samsung Galaxy Note 3",197.991,3,"South",7978,659.97003,"Consumer","2014-06-05","Standard Class","Mississippi","Phones" +"Furniture","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"FUR-CH-10004063","Global Deluxe High-Back Manager's Chair",580.5394,7,"Central",245,2001.8601,"Home Office","2014-06-06","Second Class","Minnesota","Chairs" +"Furniture","Little Rock","United States","LT-17110","Liz Thompson",0,"2014-06-01","US-2014-165659",72209,"FUR-FU-10001935","3M Hangers With Command Adhesive",9.102,6,"South",2201,22.2,"Consumer","2014-06-06","Standard Class","Arkansas","Furnishings" +"Office Supplies","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"OFF-AP-10002945","Honeywell Enviracaire Portable HEPA Air Cleaner for 17' x 22' Room",496.0725,5,"Central",248,1503.25,"Home Office","2014-06-06","Second Class","Minnesota","Appliances" +"Office Supplies","Detroit","United States","QJ-19255","Quincy Jones",0.1,"2014-06-01","US-2014-157070",48234,"OFF-AP-10004859","Acco 6 Outlet Guardian Premium Surge Suppressor",12.375999,5,"Central",9151,65.52,"Corporate","2014-06-06","Standard Class","Michigan","Appliances" +"Office Supplies","Lakewood","United States","CR-12625","Corey Roper",0,"2014-06-01","CA-2014-111003",8701,"OFF-AR-10002135","Boston Heavy-Duty Trimline Electric Pencil Sharpeners",83.868004,6,"East",308,289.2,"Home Office","2014-06-06","Standard Class","New Jersey","Art" +"Office Supplies","Lakewood","United States","CR-12625","Corey Roper",0,"2014-06-01","CA-2014-111003",8701,"OFF-BI-10001072","GBC Clear Cover. 8-1/2 x 11. unpunched. 25 covers per pack",20.9208,3,"East",307,45.480003,"Home Office","2014-06-06","Standard Class","New Jersey","Binders" +"Office Supplies","Detroit","United States","QJ-19255","Quincy Jones",0,"2014-06-01","US-2014-157070",48234,"OFF-BI-10001765","Wilson Jones Heavy-Duty Casebound Ring Binders with Metal Hinges",66.508804,4,"Central",9150,138.56,"Corporate","2014-06-06","Standard Class","Michigan","Binders" +"Office Supplies","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"OFF-PA-10000061","Xerox 205",12.4416,4,"Central",249,25.92,"Home Office","2014-06-06","Second Class","Minnesota","Paper" +"Office Supplies","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"OFF-PA-10004082","Adams Telephone Message Book w/Frequently-Called Numbers Space. 400 Messages per Book",23.94,6,"Central",247,47.88,"Home Office","2014-06-06","Second Class","Minnesota","Paper" +"Office Supplies","Lakeville","United States","DW-13480","Dianna Wilson",0,"2014-06-01","CA-2014-131926",55044,"OFF-ST-10002276","Safco Steel Mobile File Cart",41.68,2,"Central",246,166.72,"Home Office","2014-06-06","Second Class","Minnesota","Storage" +"Technology","Little Rock","United States","LT-17110","Liz Thompson",0,"2014-06-01","US-2014-165659",72209,"TEC-PH-10002563","Adtran 1202752G1",229.3018,7,"South",2202,881.93005,"Consumer","2014-06-06","Standard Class","Arkansas","Phones" +"Furniture","Orem","United States","CK-12325","Christine Kargatis",0,"2014-06-02","CA-2014-104472",84057,"FUR-FU-10000246","Aluminum Document Frame",21.996,6,"West",720,73.32,"Home Office","2014-06-07","Standard Class","Utah","Furnishings" +"Office Supplies","Roswell","United States","SV-20785","Stewart Visinsky",0,"2014-06-02","CA-2014-100895",30076,"OFF-AR-10004511","Sanford Colorific Scented Colored Pencils. 12/Pack",2.6536,2,"South",3888,8.56,"Consumer","2014-06-06","Standard Class","Georgia","Art" +"Office Supplies","Orem","United States","CK-12325","Christine Kargatis",0.2,"2014-06-02","CA-2014-104472",84057,"OFF-BI-10001658","GBC Standard Therm-A-Bind Covers",19.437601,3,"West",719,59.808,"Home Office","2014-06-07","Standard Class","Utah","Binders" +"Office Supplies","Roswell","United States","SV-20785","Stewart Visinsky",0,"2014-06-02","CA-2014-100895",30076,"OFF-ST-10001490","Hot File 7-Pocket. Floor Stand",107.082,2,"South",3890,356.94,"Consumer","2014-06-06","Standard Class","Georgia","Storage" +"Technology","Roswell","United States","SV-20785","Stewart Visinsky",0,"2014-06-02","CA-2014-100895",30076,"TEC-PH-10001425","Mophie Juice Pack Helium for iPhone",67.1916,3,"South",3889,239.97,"Consumer","2014-06-06","Standard Class","Georgia","Phones" +"Furniture","Decatur","United States","RE-19450","Richard Eichhorn",0.6,"2014-06-03","CA-2014-163867",62521,"FUR-FU-10001475","Contract Clock. 14"". Brown",-40.003597,7,"Central",9845,61.544003,"Consumer","2014-06-06","First Class","Illinois","Furnishings" +"Furniture","Seattle","United States","SC-20725","Steven Cartwright",0,"2014-06-03","CA-2014-135657",98115,"FUR-TA-10004086","KI Adjustable-Height Table",113.4936,6,"West",1700,515.88,"Consumer","2014-06-07","Second Class","Washington","Tables" +"Office Supplies","Midland","United States","RB-19795","Ross Baird",0,"2014-06-03","CA-2014-141299",48640,"OFF-EN-10004459","Security-Tint Envelopes",7.4872,2,"Central",6323,15.28,"Home Office","2014-06-07","Second Class","Michigan","Envelopes" +"Office Supplies","Decatur","United States","RE-19450","Richard Eichhorn",0.2,"2014-06-03","CA-2014-163867",62521,"OFF-LA-10001771","Avery 513",5.1791997,4,"Central",9844,15.935999,"Consumer","2014-06-06","First Class","Illinois","Labels" +"Office Supplies","Decatur","United States","RE-19450","Richard Eichhorn",0.2,"2014-06-03","CA-2014-163867",62521,"OFF-ST-10000877","Recycled Steel Personal File for Standard File Folders",9.9522,3,"Central",9846,132.696,"Consumer","2014-06-06","First Class","Illinois","Storage" +"Furniture","New York City","United States","BT-11440","Bobby Trafton",0.2,"2014-06-04","CA-2014-151946",10035,"FUR-BO-10003272","O'Sullivan Living Dimensions 5-Shelf Bookcases",-44.196,2,"East",8610,353.568,"Consumer","2014-06-09","Standard Class","New York","Bookcases" +"Furniture","New York City","United States","BT-11440","Bobby Trafton",0,"2014-06-04","CA-2014-151946",10035,"FUR-FU-10002191","G.E. Halogen Desk Lamp Bulbs",6.7008,2,"East",8611,13.96,"Consumer","2014-06-09","Standard Class","New York","Furnishings" +"Furniture","New York City","United States","BT-11440","Bobby Trafton",0,"2014-06-04","CA-2014-151946",10035,"FUR-FU-10002878","Seth Thomas 14"" Day/Date Wall Clock",21.0752,2,"East",8608,56.960003,"Consumer","2014-06-09","Standard Class","New York","Furnishings" +"Office Supplies","New York City","United States","BT-11440","Bobby Trafton",0,"2014-06-04","CA-2014-151946",10035,"OFF-AP-10001626","Commercial WindTunnel Clean Air Upright Vacuum. Replacement Belts. Filtration Bags",4.0456,4,"East",8609,15.56,"Consumer","2014-06-09","Standard Class","New York","Appliances" +"Office Supplies","Columbus","United States","MP-17470","Mark Packer",0.2,"2014-06-04","CA-2014-147914",43229,"OFF-PA-10001685","Easy-staple paper",5.8812,2,"East",1875,16.224,"Home Office","2014-06-09","Standard Class","Ohio","Paper" +"Furniture","Long Beach","United States","GT-14635","Grant Thornton",0.4,"2014-06-06","CA-2014-159520",11561,"FUR-TA-10003238","Chromcraft Bull-Nose Wood 48"" x 96"" Rectangular Conference Tables",-347.1174,3,"East",2142,991.76404,"Corporate","2014-06-11","Standard Class","New York","Tables" +"Office Supplies","Rochester","United States","BM-11785","Bryan Mills",0,"2014-06-06","CA-2014-133270",14609,"OFF-AR-10002656","Sanford Liquid Accent Highlighters",4.9431996,2,"East",1746,13.360001,"Consumer","2014-06-09","First Class","New York","Art" +"Office Supplies","Jacksonville","United States","SV-20785","Stewart Visinsky",0.7,"2014-06-06","CA-2014-169257",32216,"OFF-BI-10002557","Presstex Flexible Ring Binders",-0.90999997,1,"South",6598,1.3649999,"Consumer","2014-06-12","Standard Class","Florida","Binders" +"Office Supplies","Chicago","United States","SB-20170","Sarah Bern",0.8,"2014-06-06","US-2014-161305",60623,"OFF-BI-10002794","Avery Trapezoid Ring Binder. 3"" Capacity. Black. 1040 sheets",-38.111397,3,"Central",6301,24.588,"Consumer","2014-06-12","Standard Class","Illinois","Binders" +"Office Supplies","Long Beach","United States","GT-14635","Grant Thornton",0.2,"2014-06-06","CA-2014-159520",11561,"OFF-BI-10003982","Wilson Jones Century Plastic Molded Ring Binders",50.4711,9,"East",2140,149.54399,"Corporate","2014-06-11","Standard Class","New York","Binders" +"Office Supplies","Chicago","United States","SB-20170","Sarah Bern",0.2,"2014-06-06","US-2014-161305",60623,"OFF-EN-10000461","#10- 4 1/8"" x 9 1/2"" Recycled Envelopes",4.7195997,2,"Central",6302,13.983999,"Consumer","2014-06-12","Standard Class","Illinois","Envelopes" +"Office Supplies","Houston","United States","VT-21700","Valerie Takahito",0.2,"2014-06-06","CA-2014-151897",77070,"OFF-LA-10001074","Round Specialty Laser Printer Labels",33.830997,10,"Central",6473,100.24,"Home Office","2014-06-10","Standard Class","Texas","Labels" +"Office Supplies","Jacksonville","United States","SV-20785","Stewart Visinsky",0.2,"2014-06-06","CA-2014-169257",32216,"OFF-PA-10002319","Xerox 1944",22.480799,2,"South",6599,62.016,"Consumer","2014-06-12","Standard Class","Florida","Paper" +"Office Supplies","Long Beach","United States","GT-14635","Grant Thornton",0,"2014-06-06","CA-2014-159520",11561,"OFF-SU-10001664","Acme Office Executive Series Stainless Steel Trimmers",4.4564004,2,"East",2141,17.140001,"Corporate","2014-06-11","Standard Class","New York","Supplies" +"Furniture","Aurora","United States","NR-18550","Nick Radford",0.5,"2014-06-07","CA-2014-106229",60505,"FUR-TA-10002041","Bevis Round Conference Table Top. X-Base",-209.7693,3,"Central",4722,268.935,"Consumer","2014-06-11","Second Class","Illinois","Tables" +"Office Supplies","Peoria","United States","BP-11095","Bart Pistole",0.8,"2014-06-07","US-2014-134971",61604,"OFF-BI-10003982","Wilson Jones Century Plastic Molded Ring Binders",-20.5623,3,"Central",394,12.462,"Corporate","2014-06-10","Second Class","Illinois","Binders" +"Furniture","Bristol","United States","KE-16420","Katrina Edelman",0.2,"2014-06-08","US-2014-109162",37620,"FUR-CH-10002647","Situations Contoured Folding Chairs. 4/Set",10.647,3,"South",3842,170.352,"Corporate","2014-06-12","Standard Class","Tennessee","Chairs" +"Furniture","Seattle","United States","CS-11950","Carlos Soltero",0.2,"2014-06-08","US-2014-141257",98115,"FUR-CH-10002758","Hon Deluxe Fabric Upholstered Stacking Chairs. Squared Back",73.19399,3,"West",2750,585.552,"Consumer","2014-06-14","Standard Class","Washington","Chairs" +"Office Supplies","Long Beach","United States","RD-19480","Rick Duston",0.2,"2014-06-08","CA-2014-108147",11561,"OFF-BI-10003876","Green Canvas Binder for 8-1/2"" x 14"" Sheets",25.68,2,"East",9367,68.48,"Consumer","2014-06-13","Standard Class","New York","Binders" +"Office Supplies","Long Beach","United States","RD-19480","Rick Duston",0,"2014-06-08","CA-2014-108147",11561,"OFF-ST-10003470","Tennsco Snap-Together Open Shelving Units. Starter Sets and Add-On Units",83.844,6,"East",9368,1676.88,"Consumer","2014-06-13","Standard Class","New York","Storage" +"Furniture","Los Angeles","United States","BH-11710","Brosina Hoffman",0,"2014-06-09","CA-2014-115812",90032,"FUR-FU-10001487","Eldon Expressions Wood and Plastic Desk Accessories. Cherry Wood",14.1694,7,"West",6,48.86,"Consumer","2014-06-14","Standard Class","California","Furnishings" +"Furniture","North Las Vegas","United States","TS-21205","Thomas Seio",0,"2014-06-09","CA-2014-160262",89031,"FUR-FU-10002685","Executive Impressions 13-1/2"" Indoor/Outdoor Wall Clock",14.212,2,"West",4957,37.4,"Corporate","2014-06-13","Second Class","Nevada","Furnishings" +"Furniture","Los Angeles","United States","BH-11710","Brosina Hoffman",0.2,"2014-06-09","CA-2014-115812",90032,"FUR-TA-10001539","Chromcraft Rectangular Conference Tables",85.309204,9,"West",11,1706.184,"Consumer","2014-06-14","Standard Class","California","Tables" +"Furniture","Harrisonburg","United States","FO-14305","Frank Olsen",0,"2014-06-09","CA-2014-132612",22801,"FUR-TA-10004534","Bevis 44 x 96 Conference Tables",245.02101,7,"South",1812,1441.3,"Consumer","2014-06-11","Second Class","Virginia","Tables" +"Office Supplies","Los Angeles","United States","BH-11710","Brosina Hoffman",0,"2014-06-09","CA-2014-115812",90032,"OFF-AP-10002892","Belkin F5C206VTEL 6 Outlet Surge",34.47,5,"West",10,114.9,"Consumer","2014-06-14","Standard Class","California","Appliances" +"Office Supplies","Huntsville","United States","CW-11905","Carl Weiss",0.2,"2014-06-09","CA-2014-133753",77340,"OFF-AR-10001953","Boston 1645 Deluxe Heavier-Duty Electric Pencil Sharpener",6.1572,2,"Central",491,70.368004,"Home Office","2014-06-13","Second Class","Texas","Art" +"Office Supplies","North Las Vegas","United States","TS-21205","Thomas Seio",0,"2014-06-09","CA-2014-160262",89031,"OFF-AR-10002335","DIXON Oriole Pencils",4.6956,7,"West",4955,18.06,"Corporate","2014-06-13","Second Class","Nevada","Art" +"Office Supplies","Los Angeles","United States","BH-11710","Brosina Hoffman",0,"2014-06-09","CA-2014-115812",90032,"OFF-AR-10002833","Newell 322",1.9655999,4,"West",7,7.2799997,"Consumer","2014-06-14","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","CM-12445","Chuck Magee",0,"2014-06-09","CA-2014-133851",94122,"OFF-AR-10003752","Deluxe Chalkboard Eraser Cleaner",10.626,2,"West",862,23.1,"Consumer","2014-06-16","Standard Class","California","Art" +"Office Supplies","Los Angeles","United States","BH-11710","Brosina Hoffman",0.2,"2014-06-09","CA-2014-115812",90032,"OFF-BI-10003910","DXL Angle-View Binders with Locking Rings by Samsill",5.7825,3,"West",9,18.504,"Consumer","2014-06-14","Standard Class","California","Binders" +"Office Supplies","North Las Vegas","United States","TS-21205","Thomas Seio",0,"2014-06-09","CA-2014-160262",89031,"OFF-PA-10003641","Xerox 1909",36.404404,3,"West",4956,79.14,"Corporate","2014-06-13","Second Class","Nevada","Paper" +"Office Supplies","San Francisco","United States","CM-12445","Chuck Magee",0,"2014-06-09","CA-2014-133851",94122,"OFF-SU-10001225","Staple remover",0.1472,2,"West",861,7.36,"Consumer","2014-06-16","Standard Class","California","Supplies" +"Technology","Huntsville","United States","CW-11905","Carl Weiss",0.2,"2014-06-09","CA-2014-133753",77340,"TEC-AC-10000303","Logitech M510 Wireless Mouse",10.3974,2,"Central",490,63.984005,"Home Office","2014-06-13","Second Class","Texas","Accessories" +"Technology","Huntsville","United States","CW-11905","Carl Weiss",0.2,"2014-06-09","CA-2014-133753",77340,"TEC-PH-10000376","Square Credit Card Reader",0.5994,1,"Central",489,7.992,"Home Office","2014-06-13","Second Class","Texas","Phones" +"Technology","Los Angeles","United States","BH-11710","Brosina Hoffman",0.2,"2014-06-09","CA-2014-115812",90032,"TEC-PH-10002033","Konftel 250 Conference phone - Charcoal black",68.356804,4,"West",12,911.4241,"Consumer","2014-06-14","Standard Class","California","Phones" +"Technology","Los Angeles","United States","BH-11710","Brosina Hoffman",0.2,"2014-06-09","CA-2014-115812",90032,"TEC-PH-10002275","Mitel 5320 IP Phone VoIP phone",90.715195,6,"West",8,907.152,"Consumer","2014-06-14","Standard Class","California","Phones" +"Office Supplies","Detroit","United States","HK-14890","Heather Kirkland",0,"2014-06-10","CA-2014-144281",48234,"OFF-LA-10003930","Dot Matrix Printer Tape Reel Labels. White. 5000/Box",240.85951,5,"Central",9697,491.55,"Corporate","2014-06-15","Second Class","Michigan","Labels" +"Office Supplies","Los Angeles","United States","FM-14215","Filia McAdams",0,"2014-06-13","CA-2014-114643",90032,"OFF-AR-10003631","Staples in misc. colors",4.7915998,3,"West",1856,14.52,"Corporate","2014-06-17","Standard Class","California","Art" +"Furniture","Detroit","United States","SR-20425","Sharelle Roach",0,"2014-06-14","CA-2014-140487",48234,"FUR-BO-10000711","Hon Metal Bookcases. Gray",57.4938,3,"Central",5221,212.93999,"Home Office","2014-06-20","Standard Class","Michigan","Bookcases" +"Furniture","San Antonio","United States","KL-16555","Kelly Lampkin",0.3,"2014-06-15","US-2014-141215",78207,"FUR-CH-10003379","Global Commerce Series High-Back Swivel/Tilt Chairs",-56.996002,4,"Central",917,797.94403,"Corporate","2014-06-21","Standard Class","Texas","Chairs" +"Furniture","San Antonio","United States","KL-16555","Kelly Lampkin",0.3,"2014-06-15","US-2014-141215",78207,"FUR-TA-10001520","Lesro Sheffield Collection Coffee Table. End Table. Center Table. Corner Table",-18.5562,2,"Central",916,99.91801,"Corporate","2014-06-21","Standard Class","Texas","Tables" +"Office Supplies","San Antonio","United States","KL-16555","Kelly Lampkin",0.8,"2014-06-15","US-2014-141215",78207,"OFF-BI-10002706","Avery Premier Heavy-Duty Binder with Round Locking Rings",-14.5656,3,"Central",918,8.568001,"Corporate","2014-06-21","Standard Class","Texas","Binders" +"Office Supplies","El Paso","United States","PS-18760","Pamela Stobb",0.2,"2014-06-15","CA-2014-126963",79907,"OFF-PA-10001952","Xerox 1902",11.8768,2,"Central",6938,36.544003,"Consumer","2014-06-15","Same Day","Texas","Paper" +"Furniture","Mishawaka","United States","SC-20575","Sonia Cooley",0,"2014-06-16","CA-2014-136644",46544,"FUR-CH-10000225","Global Geo Office Task Chair. Gray",32.392,8,"Central",2496,647.83997,"Consumer","2014-06-22","Standard Class","Indiana","Chairs" +"Office Supplies","Hempstead","United States","CK-12205","Chloris Kastensmidt",0,"2014-06-16","CA-2014-113929",11550,"OFF-AR-10003772","Boston 16750 Black Compact Battery Pencil Sharpener",10.5,4,"East",2913,35,"Consumer","2014-06-21","Standard Class","New York","Art" +"Office Supplies","Hempstead","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-06-16","CA-2014-113929",11550,"OFF-BI-10002852","Ibico Standard Transparent Covers",14.3376,3,"East",2914,39.552,"Consumer","2014-06-21","Standard Class","New York","Binders" +"Office Supplies","Hempstead","United States","CK-12205","Chloris Kastensmidt",0,"2014-06-16","CA-2014-113929",11550,"OFF-EN-10003286","Staple envelope",19.457998,5,"East",2912,41.4,"Consumer","2014-06-21","Standard Class","New York","Envelopes" +"Furniture","Seattle","United States","GH-14425","Gary Hwang",0,"2014-06-17","CA-2014-144414",98105,"FUR-FU-10003981","Eldon Wave Desk Accessories",2.6207998,3,"West",6816,6.24,"Consumer","2014-06-21","Standard Class","Washington","Furnishings" +"Office Supplies","Seattle","United States","GH-14425","Gary Hwang",0.2,"2014-06-17","CA-2014-144414",98105,"OFF-BI-10004995","GBC DocuBind P400 Electric Binding System",1061.5721,3,"West",6818,3266.3762,"Consumer","2014-06-21","Standard Class","Washington","Binders" +"Office Supplies","Seattle","United States","GH-14425","Gary Hwang",0,"2014-06-17","CA-2014-144414",98105,"OFF-FA-10000624","OIC Binder Clips",8.95,5,"West",6817,17.9,"Consumer","2014-06-21","Standard Class","Washington","Fasteners" +"Office Supplies","Chicago","United States","JF-15415","Jennifer Ferguson",0.2,"2014-06-17","CA-2014-156342",60653,"OFF-PA-10001725","Xerox 1892",22.480799,2,"Central",9897,62.016,"Consumer","2014-06-20","Second Class","Illinois","Paper" +"Technology","Newark","United States","DB-13660","Duane Benoit",0,"2014-06-17","CA-2014-104402",19711,"TEC-MA-10000423","Texas Instruments TI-34 Scientific Calculator",31.0059,3,"East",6324,65.97,"Consumer","2014-06-23","Standard Class","Delaware","Machines" +"Technology","Los Angeles","United States","MC-18100","Mick Crebagga",0.2,"2014-06-18","CA-2014-123855",90036,"TEC-PH-10000215","Plantronics Cordless Phone Headset with In-line Volume - M214C",12.232499,5,"West",9765,139.8,"Consumer","2014-06-23","Standard Class","California","Phones" +"Furniture","Plano","United States","SC-20020","Sam Craven",0.32000002,"2014-06-20","CA-2014-166863",75023,"FUR-BO-10001608","Hon Metal Bookcases. Black",-19.8744,4,"Central",3508,193.0656,"Consumer","2014-06-24","Standard Class","Texas","Bookcases" +"Office Supplies","Lawton","United States","Co-12640","Corey-Lock",0,"2014-06-20","US-2014-112949",73505,"OFF-AP-10001005","Honeywell Quietcare HEPA Air Cleaner",155.727,6,"Central",8146,471.9,"Consumer","2014-06-27","Standard Class","Oklahoma","Appliances" +"Office Supplies","Lawton","United States","Co-12640","Corey-Lock",0,"2014-06-20","US-2014-112949",73505,"OFF-AR-10003469","Nontoxic Chalk",1.6896,2,"Central",8147,3.52,"Consumer","2014-06-27","Standard Class","Oklahoma","Art" +"Office Supplies","Plano","United States","SC-20020","Sam Craven",0.8,"2014-06-20","CA-2014-166863",75023,"OFF-BI-10000756","Storex DuraTech Recycled Plastic Frosted Binders",-5.088,4,"Central",3507,3.392,"Consumer","2014-06-24","Standard Class","Texas","Binders" +"Office Supplies","Plano","United States","SC-20020","Sam Craven",0.2,"2014-06-20","CA-2014-166863",75023,"OFF-PA-10000587","Array Parchment Paper. Assorted Colors",4.0768003,2,"Central",3510,11.648001,"Consumer","2014-06-24","Standard Class","Texas","Paper" +"Office Supplies","Plano","United States","SC-20020","Sam Craven",0.2,"2014-06-20","CA-2014-166863",75023,"OFF-PA-10001166","Xerox 2",5.4431996,3,"Central",3509,15.552,"Consumer","2014-06-24","Standard Class","Texas","Paper" +"Office Supplies","Plano","United States","SC-20020","Sam Craven",0.2,"2014-06-20","CA-2014-166863",75023,"OFF-ST-10004123","Safco Industrial Wire Shelving System",-127.372,7,"Central",3512,509.48798,"Consumer","2014-06-24","Standard Class","Texas","Storage" +"Technology","Plano","United States","SC-20020","Sam Craven",0.4,"2014-06-20","CA-2014-166863",75023,"TEC-MA-10001972","Okidata C331dn Printer",-97.71999,2,"Central",3511,418.8,"Consumer","2014-06-24","Standard Class","Texas","Machines" +"Technology","Plano","United States","SC-20020","Sam Craven",0.2,"2014-06-20","CA-2014-166863",75023,"TEC-PH-10000369","HTC One Mini",20.1584,2,"Central",3506,201.584,"Consumer","2014-06-24","Standard Class","Texas","Phones" +"Furniture","Philadelphia","United States","NW-18400","Natalie Webber",0.2,"2014-06-21","CA-2014-138317",19120,"FUR-FU-10000550","Stacking Trays by OIC",0.6474001,1,"East",2998,3.984,"Consumer","2014-06-25","Standard Class","Pennsylvania","Furnishings" +"Furniture","Waynesboro","United States","NP-18325","Naresj Patel",0,"2014-06-21","US-2014-117135",22980,"FUR-FU-10004071","Luxo Professional Magnifying Clamp-On Fluorescent Lamps",14.5614,1,"South",690,104.01,"Consumer","2014-06-23","Second Class","Virginia","Furnishings" +"Office Supplies","New York City","United States","TB-21280","Toby Braunhardt",0,"2014-06-21","CA-2014-130624",10024,"OFF-AP-10001303","Holmes Cool Mist Humidifier for the Whole House with 8-Gallon Output per Day. Extended Life Filter",26.864998,3,"East",4414,59.7,"Consumer","2014-06-24","First Class","New York","Appliances" +"Office Supplies","Philadelphia","United States","NW-18400","Natalie Webber",0.2,"2014-06-21","CA-2014-138317",19120,"OFF-AP-10003860","Fellowes Advanced 8 Outlet Surge Suppressor with Phone/Fax Protection",3.8864,2,"East",3001,44.416,"Consumer","2014-06-25","Standard Class","Pennsylvania","Appliances" +"Office Supplies","Lakewood","United States","DB-13060","Dave Brooks",0,"2014-06-21","CA-2014-128146",8701,"OFF-AR-10001919","OIC #2 Pencils. Medium Soft",1.0904,2,"East",1182,3.76,"Consumer","2014-06-25","Standard Class","New Jersey","Art" +"Office Supplies","Meriden","United States","RM-19375","Raymond Messe",0,"2014-06-21","CA-2014-141278",6450,"OFF-AR-10003056","Newell 341",6.206,5,"East",1836,21.4,"Consumer","2014-06-24","First Class","Connecticut","Art" +"Office Supplies","Philadelphia","United States","NW-18400","Natalie Webber",0.7,"2014-06-21","CA-2014-138317",19120,"OFF-BI-10000069","GBC Prepunched Paper. 19-Hole. for Binding Systems. 24-lb",-7.2048,2,"East",3002,9.006,"Consumer","2014-06-25","Standard Class","Pennsylvania","Binders" +"Office Supplies","Pueblo","United States","SV-20785","Stewart Visinsky",0.7,"2014-06-21","US-2014-160780",81001,"OFF-BI-10001116","Wilson Jones 1"" Hanging DublLock Ring Binders",-8.131201,7,"West",2678,11.088,"Consumer","2014-06-21","Same Day","Colorado","Binders" +"Office Supplies","Pueblo","United States","SV-20785","Stewart Visinsky",0.7,"2014-06-21","US-2014-160780",81001,"OFF-BI-10002931","Avery Trapezoid Extra Heavy Duty 4"" Binders",-16.776001,2,"West",2679,25.164,"Consumer","2014-06-21","Same Day","Colorado","Binders" +"Office Supplies","Philadelphia","United States","NW-18400","Natalie Webber",0.2,"2014-06-21","CA-2014-138317",19120,"OFF-EN-10001539","Staple envelope",8.402399,4,"East",2997,24.896,"Consumer","2014-06-25","Standard Class","Pennsylvania","Envelopes" +"Office Supplies","New York City","United States","TB-21280","Toby Braunhardt",0,"2014-06-21","CA-2014-130624",10024,"OFF-PA-10003883","Message Book. Phone. Wirebound Standard Line Memo. 2 3/4"" X 5""",9.039,3,"East",4412,19.65,"Consumer","2014-06-24","First Class","New York","Paper" +"Office Supplies","Waynesboro","United States","NP-18325","Naresj Patel",0,"2014-06-21","US-2014-117135",22980,"OFF-ST-10002444","Recycled Eldon Regeneration Jumbo File",10.315201,3,"South",692,36.84,"Consumer","2014-06-23","Second Class","Virginia","Storage" +"Technology","Philadelphia","United States","NW-18400","Natalie Webber",0.2,"2014-06-21","CA-2014-138317",19120,"TEC-AC-10003628","Logitech 910-002974 M325 Wireless Mouse for Web Scrolling",28.790401,4,"East",2999,95.968,"Consumer","2014-06-25","Standard Class","Pennsylvania","Accessories" +"Technology","Philadelphia","United States","NW-18400","Natalie Webber",0.7,"2014-06-21","CA-2014-138317",19120,"TEC-MA-10004521","Epson Perfection V600 Photo Scanner",-172.4925,3,"East",3000,206.991,"Consumer","2014-06-25","Standard Class","Pennsylvania","Machines" +"Technology","Long Beach","United States","JG-15160","James Galang",0,"2014-06-21","CA-2014-141796",11561,"TEC-PH-10001578","Polycom SoundStation2 EX Conference phone",352.3065,3,"East",5150,1214.8501,"Consumer","2014-06-21","Same Day","New York","Phones" +"Technology","Waynesboro","United States","NP-18325","Naresj Patel",0,"2014-06-21","US-2014-117135",22980,"TEC-PH-10002033","Konftel 250 Conference phone - Charcoal black",74.0532,1,"South",691,284.81998,"Consumer","2014-06-23","Second Class","Virginia","Phones" +"Technology","New York City","United States","TB-21280","Toby Braunhardt",0,"2014-06-21","CA-2014-130624",10024,"TEC-PH-10003963","GE 2-Jack Phone Line Splitter",160.67221,3,"East",4413,617.97003,"Consumer","2014-06-24","First Class","New York","Phones" +"Technology","Lakewood","United States","DB-13060","Dave Brooks",0,"2014-06-21","CA-2014-128146",8701,"TEC-PH-10004539","Wireless Extenders zBoost YX545 SOHO Signal Booster",357.1911,7,"East",1181,1322.93,"Consumer","2014-06-25","Standard Class","New Jersey","Phones" +"Furniture","Chester","United States","AA-10645","Anna Andreadi",0.3,"2014-06-22","CA-2014-154963",19013,"FUR-CH-10000454","Hon Deluxe Fabric Upholstered Stacking Chairs. Rounded Back",0,5,"East",4023,853.93005,"Consumer","2014-06-27","Standard Class","Pennsylvania","Chairs" +"Furniture","Chester","United States","AA-10645","Anna Andreadi",0.3,"2014-06-22","CA-2014-154963",19013,"FUR-CH-10004698","Padded Folding Chairs. Black. 4/Carton",-4.8588004,3,"East",4021,170.058,"Consumer","2014-06-27","Standard Class","Pennsylvania","Chairs" +"Office Supplies","Phoenix","United States","TB-21280","Toby Braunhardt",0.7,"2014-06-22","CA-2014-133389",85023,"OFF-BI-10001553","SpineVue Locking Slant-D Ring Binders by Cardinal",-6.0324,3,"West",9001,8.226,"Consumer","2014-06-22","Same Day","Arizona","Binders" +"Office Supplies","Minneapolis","United States","DV-13465","Dianna Vittorini",0,"2014-06-22","CA-2014-124646",55407,"OFF-ST-10001097","Office Impressions Heavy Duty Welded Shelving & Multimedia Storage Drawers",0,3,"Central",2157,501.81,"Consumer","2014-06-24","First Class","Minnesota","Storage" +"Office Supplies","Minneapolis","United States","DV-13465","Dianna Vittorini",0,"2014-06-22","CA-2014-124646",55407,"OFF-ST-10001469","Fellowes Bankers Box Recycled Super Stor/Drawer",9.716399,3,"Central",2158,161.93999,"Consumer","2014-06-24","First Class","Minnesota","Storage" +"Technology","Louisville","United States","JE-15745","Joel Eaton",0.2,"2014-06-22","CA-2014-142048",80027,"TEC-AC-10004114","KeyTronic 6101 Series - Keyboard - Black",56.5662,6,"West",422,196.752,"Consumer","2014-06-25","First Class","Colorado","Accessories" +"Technology","Chester","United States","AA-10645","Anna Andreadi",0.4,"2014-06-22","CA-2014-154963",19013,"TEC-PH-10004093","Panasonic Kx-TS550",-15.1767,3,"East",4022,82.782,"Consumer","2014-06-27","Standard Class","Pennsylvania","Phones" +"Office Supplies","Concord","United States","DL-13330","Denise Leinenbach",0.2,"2014-06-23","US-2014-130358",28027,"OFF-AR-10002766","Prang Drawing Pencil Set",1.7514,9,"South",4064,20.016,"Consumer","2014-06-26","First Class","North Carolina","Art" +"Office Supplies","Concord","United States","DL-13330","Denise Leinenbach",0.2,"2014-06-23","US-2014-130358",28027,"OFF-SU-10002522","Acme Kleen Earth Office Shears",0.3492,1,"South",4065,3.1039999,"Consumer","2014-06-26","First Class","North Carolina","Supplies" +"Technology","Philadelphia","United States","BS-11665","Brian Stugart",0.2,"2014-06-23","CA-2014-126032",19143,"TEC-AC-10000158","Sony 64GB Class 10 Micro SDHC R40 Memory Card",1.0797,3,"East",2127,86.376,"Consumer","2014-06-28","Standard Class","Pennsylvania","Accessories" +"Furniture","Tucson","United States","LP-17080","Liz Pelletier",0.2,"2014-06-24","CA-2014-159814",85705,"FUR-FU-10001731","Acrylic Self-Standing Desk Frames",0.96119994,2,"West",9402,4.272,"Consumer","2014-06-28","Standard Class","Arizona","Furnishings" +"Furniture","Los Angeles","United States","NS-18640","Noel Staavos",0.2,"2014-06-25","CA-2014-159338",90049,"FUR-TA-10004147","Hon 4060 Series Tables",11.196,5,"West",1098,447.84,"Corporate","2014-06-28","First Class","California","Tables" +"Office Supplies","Salem","United States","GK-14620","Grace Kelly",0.2,"2014-06-25","CA-2014-164469",97301,"OFF-AR-10000475","Hunt BOSTON Vista Battery-Operated Pencil Sharpener. Black",0.81619996,1,"West",4849,9.328001,"Corporate","2014-06-27","Second Class","Oregon","Art" +"Office Supplies","Salem","United States","GK-14620","Grace Kelly",0.2,"2014-06-25","CA-2014-164469",97301,"OFF-AR-10003478","Avery Hi-Liter EverBold Pen Style Fluorescent Highlighters. 4/Pack",17.907999,11,"West",4848,71.631996,"Corporate","2014-06-27","Second Class","Oregon","Art" +"Technology","Salem","United States","GK-14620","Grace Kelly",0.2,"2014-06-25","CA-2014-164469",97301,"TEC-PH-10002115","Plantronics 81402",19.797,5,"West",4847,263.96,"Corporate","2014-06-27","Second Class","Oregon","Phones" +"Furniture","Southaven","United States","LM-17065","Liz MacKendrick",0,"2014-06-27","CA-2014-104283",38671,"FUR-TA-10001039","KI Adjustable-Height Table",22.354797,1,"South",2278,85.98,"Consumer","2014-07-01","Standard Class","Mississippi","Tables" +"Office Supplies","Southaven","United States","LM-17065","Liz MacKendrick",0,"2014-06-27","CA-2014-104283",38671,"OFF-ST-10004337","SAFCO Commercial Wire Shelving. 72h",0,5,"South",2277,306.2,"Consumer","2014-07-01","Standard Class","Mississippi","Storage" +"Technology","Southaven","United States","LM-17065","Liz MacKendrick",0,"2014-06-27","CA-2014-104283",38671,"TEC-AC-10000109","Sony Micro Vault Click 16 GB USB 2.0 Flash Drive",53.7504,4,"South",2279,223.95999,"Consumer","2014-07-01","Standard Class","Mississippi","Accessories" +"Furniture","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.3,"2014-06-28","CA-2014-140858",19140,"FUR-CH-10001394","Global Leather Executive Chair",0,5,"East",820,1228.4651,"Consumer","2014-07-02","Standard Class","Pennsylvania","Chairs" +"Office Supplies","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.7,"2014-06-28","CA-2014-140858",19140,"OFF-BI-10003094","Self-Adhesive Ring Binder Labels",-2.4287999,3,"East",819,3.1679997,"Consumer","2014-07-02","Standard Class","Pennsylvania","Binders" +"Office Supplies","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.7,"2014-06-28","CA-2014-140858",19140,"OFF-BI-10004230","GBC Recycled Grain Textured Covers",-22.796402,3,"East",821,31.086,"Consumer","2014-07-02","Standard Class","Pennsylvania","Binders" +"Office Supplies","Detroit","United States","RW-19630","Rob Williams",0,"2014-06-28","CA-2014-156993",48234,"OFF-FA-10003495","Staples",3.04,1,"Central",2924,6.08,"Corporate","2014-07-04","Standard Class","Michigan","Fasteners" +"Office Supplies","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.2,"2014-06-28","CA-2014-140858",19140,"OFF-PA-10000304","Xerox 1995",14.515201,8,"East",818,41.472,"Consumer","2014-07-02","Standard Class","Pennsylvania","Paper" +"Office Supplies","Philadelphia","United States","CA-12775","Cynthia Arntzen",0.2,"2014-06-28","CA-2014-140858",19140,"OFF-PA-10003395","Xerox 1941",117.432,4,"East",822,335.52,"Consumer","2014-07-02","Standard Class","Pennsylvania","Paper" +"Office Supplies","Cincinnati","United States","DL-12865","Dan Lawera",0.2,"2014-06-29","CA-2014-165974",45231,"OFF-AR-10003405","Dixon My First Ticonderoga Pencil. #2",3.6855,7,"East",777,32.760002,"Consumer","2014-07-06","Standard Class","Ohio","Art" +"Office Supplies","New York City","United States","JK-15625","Jim Karlsson",0.2,"2014-06-29","CA-2014-111773",10024,"OFF-BI-10000174","Wilson Jones Clip & Carry Folder Binder Tool for Ring Binders. Clear",4.872,3,"East",5736,13.92,"Consumer","2014-07-03","Standard Class","New York","Binders" +"Office Supplies","New York City","United States","CS-11860","Cari Schnelling",0,"2014-06-30","US-2014-121566",10011,"OFF-AR-10001026","Sanford Uni-Blazer View Highlighters. Chisel Tip. Yellow",2.904,3,"East",5154,6.6,"Consumer","2014-07-06","Standard Class","New York","Art" +"Office Supplies","Los Angeles","United States","AR-10825","Anthony Rawles",0,"2014-06-30","CA-2014-109218",90004,"OFF-AR-10001374","BIC Brite Liner Highlighters. Chisel Tip",10.368001,5,"West",3148,32.4,"Corporate","2014-07-03","Second Class","California","Art" +"Office Supplies","Chicago","United States","RA-19915","Russell Applegate",0.2,"2014-06-30","CA-2014-123064",60653,"OFF-AR-10004582","BIC Brite Liner Grip Highlighters",1.64,4,"Central",2690,5.2479997,"Consumer","2014-07-02","First Class","Illinois","Art" +"Office Supplies","New York City","United States","HA-14905","Helen Abelman",0.2,"2014-06-30","CA-2014-146640",10024,"OFF-BI-10002867","GBC Recycled Regency Composition Covers",108.7996,7,"East",1414,334.768,"Consumer","2014-07-05","Standard Class","New York","Binders" +"Office Supplies","New York City","United States","CS-11860","Cari Schnelling",0.2,"2014-06-30","US-2014-121566",10011,"OFF-BI-10004528","Cardinal Poly Pocket Divider Pockets for Ring Binders",0.84000003,1,"East",5153,2.6879997,"Consumer","2014-07-06","Standard Class","New York","Binders" +"Office Supplies","Houston","United States","MS-17980","Michael Stewart",0.2,"2014-06-30","CA-2014-116757",77095,"OFF-FA-10002815","Staples",7.1928,6,"Central",4503,21.312,"Corporate","2014-07-04","Standard Class","Texas","Fasteners" +"Office Supplies","Wilmington","United States","BD-11620","Brian DeCherney",0.2,"2014-06-30","CA-2014-152254",28403,"OFF-PA-10001144","Xerox 1913",108.7408,7,"South",8397,310.688,"Consumer","2014-06-30","Same Day","North Carolina","Paper" +"Office Supplies","Houston","United States","MS-17980","Michael Stewart",0.2,"2014-06-30","CA-2014-116757",77095,"OFF-PA-10002005","Xerox 225",9.072,5,"Central",4502,25.92,"Corporate","2014-07-04","Standard Class","Texas","Paper" +"Office Supplies","Deltona","United States","LW-16825","Laurel Workman",0.7,"2014-07-01","CA-2014-160773",32725,"OFF-BI-10000546","Avery Durable Binders",-3.6287997,6,"South",1320,5.184,"Corporate","2014-07-05","Standard Class","Florida","Binders" +"Office Supplies","Seattle","United States","Dl-13600","Dorris liebe",0.2,"2014-07-01","CA-2014-153150",98105,"OFF-BI-10003355","Cardinal Holdit Business Card Pockets",6.972,5,"West",716,19.92,"Corporate","2014-07-06","Second Class","Washington","Binders" +"Technology","Deltona","United States","LW-16825","Laurel Workman",0.2,"2014-07-01","CA-2014-160773",32725,"TEC-PH-10004586","Wilson SignalBoost 841262 DB PRO Amplifier Kit",71.99,2,"South",1319,575.92004,"Corporate","2014-07-05","Standard Class","Florida","Phones" +"Office Supplies","Plainfield","United States","EH-13945","Eric Hoffmann",0,"2014-07-02","CA-2014-107594",7060,"OFF-AR-10000716","DIXON Ticonderoga Erasable Checking Pencils",2.1762,1,"East",2734,5.58,"Consumer","2014-07-06","Standard Class","New Jersey","Art" +"Technology","Plainfield","United States","EH-13945","Eric Hoffmann",0,"2014-07-02","CA-2014-107594",7060,"TEC-PH-10002923","Logitech B530 USB Headset - headset - Full size. Binaural",19.9746,2,"East",2733,73.98,"Consumer","2014-07-06","Standard Class","New Jersey","Phones" +"Office Supplies","Aurora","United States","KE-16420","Katrina Edelman",0.2,"2014-07-04","CA-2014-120096",80013,"OFF-AP-10000692","Fellowes Mighty 8 Compact Surge Protector",3.2432,2,"West",6734,32.432,"Corporate","2014-07-07","First Class","Colorado","Appliances" +"Office Supplies","Richmond","United States","MS-17770","Maxwell Schwartz",0,"2014-07-04","CA-2014-138709",23223,"OFF-BI-10000145","Zipper Ring Binder Pockets",7.644,5,"South",7163,15.6,"Consumer","2014-07-09","Standard Class","Virginia","Binders" +"Office Supplies","Aurora","United States","KE-16420","Katrina Edelman",0.2,"2014-07-04","CA-2014-120096",80013,"OFF-PA-10001977","Xerox 194",62.137596,4,"West",6733,177.536,"Corporate","2014-07-07","First Class","Colorado","Paper" +"Office Supplies","Richmond","United States","MS-17770","Maxwell Schwartz",0,"2014-07-04","CA-2014-138709",23223,"OFF-PA-10004734","Southworth Structures Collection",10.92,3,"South",7162,21.84,"Consumer","2014-07-09","Standard Class","Virginia","Paper" +"Furniture","Palm Coast","United States","DM-13525","Don Miller",0.2,"2014-07-05","CA-2014-103317",32137,"FUR-FU-10001591","Advantus Panel Wall Certificate Holder - 8.5x11",5.368,2,"South",5437,19.52,"Corporate","2014-07-08","First Class","Florida","Furnishings" +"Furniture","Palm Coast","United States","DM-13525","Don Miller",0.2,"2014-07-05","CA-2014-103317",32137,"FUR-FU-10003192","Luxo Adjustable Task Clamp Lamp",15.9912,3,"South",5439,213.21599,"Corporate","2014-07-08","First Class","Florida","Furnishings" +"Office Supplies","Knoxville","United States","DW-13195","David Wiener",0.2,"2014-07-05","CA-2014-152849",37918,"OFF-AR-10002833","Newell 322",0.38220003,3,"South",3885,4.368,"Corporate","2014-07-12","Standard Class","Tennessee","Art" +"Office Supplies","Los Angeles","United States","DP-13390","Dennis Pardue",0.2,"2014-07-05","CA-2014-107139",90004,"OFF-BI-10001670","Vinyl Sectional Post Binders",67.86,6,"West",3791,180.95999,"Home Office","2014-07-11","Standard Class","California","Binders" +"Office Supplies","Palm Coast","United States","DM-13525","Don Miller",0.7,"2014-07-05","CA-2014-103317",32137,"OFF-BI-10001787","Wilson Jones Four-Pocket Poly Binders",-6.867,5,"South",5438,9.81,"Corporate","2014-07-08","First Class","Florida","Binders" +"Office Supplies","Jackson","United States","MC-17845","Michael Chen",0,"2014-07-05","CA-2014-157784",39212,"OFF-LA-10001934","Avery 516",6.8714004,2,"South",687,14.620001,"Consumer","2014-07-08","First Class","Mississippi","Labels" +"Office Supplies","Jackson","United States","MC-17845","Michael Chen",0,"2014-07-05","CA-2014-157784",39212,"OFF-PA-10000304","Xerox 1995",9.331201,3,"South",688,19.44,"Consumer","2014-07-08","First Class","Mississippi","Paper" +"Office Supplies","Houston","United States","DC-12850","Dan Campbell",0.2,"2014-07-05","US-2014-160444",77036,"OFF-ST-10000563","Fellowes Bankers Box Stor/Drawer Steel Plus",-35.177998,11,"Central",1357,281.42398,"Consumer","2014-07-05","Same Day","Texas","Storage" +"Office Supplies","Houston","United States","DC-12850","Dan Campbell",0.2,"2014-07-05","US-2014-160444",77036,"OFF-ST-10001522","Gould Plastics 18-Pocket Panel Bin. 34w x 5-1/4d x 20-1/2h",-44.155197,3,"Central",1356,220.776,"Consumer","2014-07-05","Same Day","Texas","Storage" +"Technology","Jackson","United States","MC-17845","Michael Chen",0,"2014-07-05","CA-2014-157784",39212,"TEC-AC-10003911","NETGEAR AC1750 Dual Band Gigabit Smart WiFi Router",163.1898,3,"South",686,479.97,"Consumer","2014-07-08","First Class","Mississippi","Accessories" +"Furniture","El Cajon","United States","BC-11125","Becky Castell",0.2,"2014-07-06","CA-2014-147543",92020,"FUR-CH-10000155","Global Comet Stacking Armless Chair",47.848,2,"West",6846,478.47998,"Home Office","2014-07-12","Standard Class","California","Chairs" +"Technology","New York City","United States","EP-13915","Emily Phan",0.2,"2014-07-06","CA-2014-134278",10011,"TEC-CO-10001046","Canon Imageclass D680 Copier / Fax",174.9975,1,"East",1550,559.99207,"Consumer","2014-07-08","First Class","New York","Copiers" +"Furniture","Philadelphia","United States","JL-15835","John Lee",0.3,"2014-07-07","US-2014-138758",19120,"FUR-CH-10002880","Global High-Back Leather Tilter. Burgundy",-46.7362,2,"East",4398,172.186,"Consumer","2014-07-11","Standard Class","Pennsylvania","Chairs" +"Furniture","Philadelphia","United States","JL-15835","John Lee",0.2,"2014-07-07","US-2014-138758",19120,"FUR-FU-10003039","Howard Miller 11-1/2"" Diameter Grantwood Wall Clock",12.076399,2,"East",4399,69.008,"Consumer","2014-07-11","Standard Class","Pennsylvania","Furnishings" +"Furniture","Buffalo","United States","MH-18025","Michelle Huthwaite",0.1,"2014-07-08","CA-2014-150301",14215,"FUR-CH-10002647","Situations Contoured Folding Chairs. 4/Set",10.647,1,"East",4780,63.882,"Consumer","2014-07-10","First Class","New York","Chairs" +"Furniture","San Francisco","United States","EB-13705","Ed Braxton",0.2,"2014-07-08","CA-2014-100090",94122,"FUR-TA-10003715","Hon 2111 Invitation Series Corner Table",-87.935394,3,"West",6288,502.48798,"Corporate","2014-07-12","Standard Class","California","Tables" +"Office Supplies","San Francisco","United States","EB-13705","Ed Braxton",0.2,"2014-07-08","CA-2014-100090",94122,"OFF-BI-10001597","Wilson Jones Ledger-Size. Piano-Hinge Binder. 2"". Blue",68.8464,6,"West",6289,196.704,"Corporate","2014-07-12","Standard Class","California","Binders" +"Furniture","San Francisco","United States","DS-13030","Darrin Sayre",0.15,"2014-07-09","CA-2014-113271",94122,"FUR-BO-10004218","Bush Heritage Pine Collection 5-Shelf Bookcase. Albany Pine Finish. *Special Order",7.049,1,"West",9244,119.833,"Home Office","2014-07-14","Standard Class","California","Bookcases" +"Office Supplies","San Francisco","United States","DS-13030","Darrin Sayre",0,"2014-07-09","CA-2014-113271",94122,"OFF-AR-10003251","Prang Drawing Pencil Set",2.224,2,"West",9245,5.56,"Home Office","2014-07-14","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","DS-13030","Darrin Sayre",0.2,"2014-07-09","CA-2014-113271",94122,"OFF-BI-10002609","Avery Hidden Tab Dividers for Binding Systems",4.6488,6,"West",9243,14.304,"Home Office","2014-07-14","Standard Class","California","Binders" +"Office Supplies","Los Angeles","United States","GH-14410","Gary Hansen",0.2,"2014-07-09","CA-2014-114125",90049,"OFF-BI-10003291","Wilson Jones Leather-Like Binders with DublLock Round Rings",14.1426,6,"West",8174,41.904003,"Home Office","2014-07-13","Standard Class","California","Binders" +"Office Supplies","Los Angeles","United States","GH-14410","Gary Hansen",0,"2014-07-09","CA-2014-114125",90049,"OFF-LA-10004559","Avery 49",1.4112,1,"West",8173,2.8799999,"Home Office","2014-07-13","Standard Class","California","Labels" +"Office Supplies","Dallas","United States","BM-11650","Brian Moss",0.2,"2014-07-09","CA-2014-165379",75217,"OFF-PA-10002245","Xerox 1895",4.485,3,"Central",3368,14.352,"Corporate","2014-07-15","Standard Class","Texas","Paper" +"Office Supplies","San Francisco","United States","DS-13030","Darrin Sayre",0,"2014-07-09","CA-2014-113271",94122,"OFF-PA-10002365","Xerox 1967",15.552,5,"West",9246,32.4,"Home Office","2014-07-14","Standard Class","California","Paper" +"Office Supplies","Dallas","United States","BM-11650","Brian Moss",0.2,"2014-07-09","CA-2014-165379",75217,"OFF-PA-10003072","Eureka Recycled Copy Paper 8 1/2"" x 11"". Ream",3.6287997,2,"Central",3367,10.368001,"Corporate","2014-07-15","Standard Class","Texas","Paper" +"Office Supplies","Los Angeles","United States","GH-14410","Gary Hansen",0,"2014-07-09","CA-2014-114125",90049,"OFF-ST-10001505","Perma STOR-ALL Hanging File Box. 13 1/8""W x 12 1/4""D x 10 1/2""H",4.0664,4,"West",8175,23.92,"Home Office","2014-07-13","Standard Class","California","Storage" +"Furniture","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"FUR-CH-10003379","Global Commerce Series High-Back Swivel/Tilt Chairs",213.735,3,"East",1031,854.94,"Corporate","2014-07-15","Standard Class","New Jersey","Chairs" +"Furniture","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"FUR-FU-10000629","9-3/4 Diameter Round Wall Clock",52.126198,9,"East",1032,124.11,"Corporate","2014-07-15","Standard Class","New Jersey","Furnishings" +"Office Supplies","New York City","United States","MH-17455","Mark Hamilton",0.2,"2014-07-11","CA-2014-133305",10011,"OFF-BI-10002954","Newell 3-Hole Punched Plastic Slotted Magazine Holders for Binders",6.3980002,5,"East",6336,18.28,"Consumer","2014-07-15","Standard Class","New York","Binders" +"Office Supplies","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"OFF-LA-10001175","Avery 514",7.056,5,"East",1033,14.4,"Corporate","2014-07-15","Standard Class","New Jersey","Labels" +"Office Supplies","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"OFF-PA-10000474","Easy-staple paper",83.284,5,"East",1029,177.2,"Corporate","2014-07-15","Standard Class","New Jersey","Paper" +"Office Supplies","El Paso","United States","MN-17935","Michael Nguyen",0.2,"2014-07-11","CA-2014-123225",79907,"OFF-PA-10000552","Xerox 200",3.6287997,2,"Central",5802,10.368001,"Consumer","2014-07-14","First Class","Texas","Paper" +"Office Supplies","New York City","United States","MH-17455","Mark Hamilton",0,"2014-07-11","CA-2014-133305",10011,"OFF-PA-10001970","Xerox 1881",23.086401,4,"East",6335,49.12,"Consumer","2014-07-15","Standard Class","New York","Paper" +"Technology","El Paso","United States","MN-17935","Michael Nguyen",0.2,"2014-07-11","CA-2014-123225",79907,"TEC-PH-10000895","Polycom VVX 310 VoIP phone",43.197598,4,"Central",5801,575.968,"Consumer","2014-07-14","First Class","Texas","Phones" +"Technology","Hackensack","United States","LA-16780","Laura Armstrong",0,"2014-07-11","CA-2014-163552",7601,"TEC-PH-10003885","Cisco SPA508G",57.4113,3,"East",1030,197.97,"Corporate","2014-07-15","Standard Class","New Jersey","Phones" +"Technology","Niagara Falls","United States","JK-15205","Jamie Kunitz",0,"2014-07-11","CA-2014-166555",14304,"TEC-PH-10004912","Cisco SPA112 2 Port Phone Adapter",47.8065,3,"East",9934,164.85,"Consumer","2014-07-14","First Class","New York","Phones" +"Furniture","Seattle","United States","CL-12565","Clay Ludtke",0.2,"2014-07-12","CA-2014-131310",98115,"FUR-CH-10001797","Safco Chair Connectors. 6/Carton",13.8528,4,"West",2982,123.135994,"Consumer","2014-07-18","Standard Class","Washington","Chairs" +"Furniture","League City","United States","PV-18985","Paul Van Hugh",0.3,"2014-07-12","CA-2014-161508",77573,"FUR-CH-10002126","Hon Deluxe Fabric Upholstered Stacking Chairs",-14.638801,3,"Central",8314,512.358,"Home Office","2014-07-16","Standard Class","Texas","Chairs" +"Furniture","San Francisco","United States","AC-10420","Alyssa Crouse",0.2,"2014-07-12","CA-2014-129924",94122,"FUR-TA-10004575","Hon 5100 Series Wood Tables",-17.458797,3,"West",339,698.352,"Corporate","2014-07-17","Standard Class","California","Tables" +"Office Supplies","League City","United States","PV-18985","Paul Van Hugh",0.2,"2014-07-12","CA-2014-161508",77573,"OFF-AR-10003158","Fluorescent Highlighters by Dixon",3.9004002,7,"Central",8316,22.288,"Home Office","2014-07-16","Standard Class","Texas","Art" +"Office Supplies","Seattle","United States","CL-12565","Clay Ludtke",0.2,"2014-07-12","CA-2014-131310",98115,"OFF-BI-10003094","Self-Adhesive Ring Binder Labels",3.8016,4,"West",2983,11.264,"Consumer","2014-07-18","Standard Class","Washington","Binders" +"Office Supplies","San Francisco","United States","AC-10420","Alyssa Crouse",0.2,"2014-07-12","CA-2014-129924",94122,"OFF-BI-10003314","Tuff Stuff Recycled Round Ring Binders",2.7956002,2,"West",338,7.712,"Corporate","2014-07-17","Standard Class","California","Binders" +"Office Supplies","League City","United States","PV-18985","Paul Van Hugh",0.2,"2014-07-12","CA-2014-161508",77573,"OFF-FA-10001561","Stockwell Push Pins",0.5668,2,"Central",8315,3.488,"Home Office","2014-07-16","Standard Class","Texas","Fasteners" +"Office Supplies","Chicago","United States","ME-17725","Max Engle",0.2,"2014-07-12","CA-2014-124807",60610,"OFF-PA-10001526","Xerox 1949",12.9978,9,"Central",7399,35.856,"Consumer","2014-07-15","Second Class","Illinois","Paper" +"Office Supplies","League City","United States","PV-18985","Paul Van Hugh",0.2,"2014-07-12","CA-2014-161508",77573,"OFF-PA-10001804","Xerox 195",5.6112,3,"Central",8317,16.032001,"Home Office","2014-07-16","Standard Class","Texas","Paper" +"Office Supplies","Los Angeles","United States","BF-11170","Ben Ferrer",0,"2014-07-12","CA-2014-110184",90036,"OFF-ST-10000107","Fellowes Super Stor/Drawer",44.955,9,"West",1221,249.75,"Home Office","2014-07-16","Standard Class","California","Storage" +"Technology","Chicago","United States","ME-17725","Max Engle",0.2,"2014-07-12","CA-2014-124807",60610,"TEC-AC-10002857","Verbatim 25 GB 6x Blu-ray Single Layer Recordable Disc. 1/Pack",3.2779999,4,"Central",7400,23.84,"Consumer","2014-07-15","Second Class","Illinois","Accessories" +"Technology","Los Angeles","United States","BF-11170","Ben Ferrer",0.2,"2014-07-12","CA-2014-110184",90036,"TEC-PH-10000439","GE DSL Phone Line Filter",28.7928,8,"West",1222,255.93599,"Home Office","2014-07-16","Standard Class","California","Phones" +"Furniture","Los Angeles","United States","GK-14620","Grace Kelly",0.2,"2014-07-13","US-2014-165862",90049,"FUR-TA-10002855","Bevis Round Conference Table Top & Single Column Base",4.3902,3,"West",4552,351.21603,"Corporate","2014-07-17","Standard Class","California","Tables" +"Furniture","Philadelphia","United States","ST-20530","Shui Tom",0.2,"2014-07-14","CA-2014-164182",19140,"FUR-FU-10001057","Tensor Track Tree Floor Lamp",1.1994,2,"East",6934,31.984,"Consumer","2014-07-18","Standard Class","Pennsylvania","Furnishings" +"Office Supplies","Newark","United States","SS-20515","Shirley Schmidt",0,"2014-07-14","CA-2014-124464",19711,"OFF-AP-10000576","Belkin 7 Outlet SurgeMaster II",11.0543995,1,"East",7624,39.480003,"Home Office","2014-07-20","Standard Class","Delaware","Appliances" +"Office Supplies","Philadelphia","United States","ST-20530","Shui Tom",0.2,"2014-07-14","CA-2014-164182",19140,"OFF-AR-10001044","BOSTON Ranger #55 Pencil Sharpener. Black",4.6782,2,"East",6935,41.584003,"Consumer","2014-07-18","Standard Class","Pennsylvania","Art" +"Office Supplies","New York City","United States","BF-10975","Barbara Fisher",0,"2014-07-14","CA-2014-109904",10009,"OFF-AR-10004999","Newell 315",4.485,3,"East",8053,17.94,"Corporate","2014-07-17","Second Class","New York","Art" +"Office Supplies","Aurora","United States","AW-10930","Arthur Wiediger",0.8,"2014-07-14","US-2014-103905",60505,"OFF-BI-10001098","Acco D-Ring Binder w/DublLock",-46.3946,7,"Central",2207,29.932001,"Home Office","2014-07-20","Standard Class","Illinois","Binders" +"Office Supplies","Phoenix","United States","PB-19150","Philip Brown",0.2,"2014-07-14","US-2014-150532",85023,"OFF-ST-10000760","Eldon Fold 'N Roll Cart System",6.291,5,"West",2996,55.920002,"Consumer","2014-07-21","Standard Class","Arizona","Storage" +"Technology","Aurora","United States","AW-10930","Arthur Wiediger",0.2,"2014-07-14","US-2014-103905",60505,"TEC-PH-10001552","I Need's 3d Hello Kitty Hybrid Silicone Case Cover for HTC One X 4g with 3d Hello Kitty Stylus Pen Green/pink",3.8272,4,"Central",2208,38.272,"Home Office","2014-07-20","Standard Class","Illinois","Phones" +"Technology","Philadelphia","United States","ST-20530","Shui Tom",0.4,"2014-07-14","CA-2014-164182",19140,"TEC-PH-10002070","Griffin GC36547 PowerJolt SE Lightning Charger",-2.249,1,"East",6932,13.4939995,"Consumer","2014-07-18","Standard Class","Pennsylvania","Phones" +"Technology","Philadelphia","United States","ST-20530","Shui Tom",0.4,"2014-07-14","CA-2014-164182",19140,"TEC-PH-10002583","iOttie HLCRIO102 Car Mount",-13.993,2,"East",6933,23.987999,"Consumer","2014-07-18","Standard Class","Pennsylvania","Phones" +"Office Supplies","Sioux Falls","United States","VW-21775","Victoria Wilson",0,"2014-07-15","CA-2014-115980",57103,"OFF-FA-10000304","Advantus Push Pins",2.6813998,3,"Central",3271,6.54,"Corporate","2014-07-19","Standard Class","South Dakota","Fasteners" +"Technology","Sioux Falls","United States","VW-21775","Victoria Wilson",0,"2014-07-15","CA-2014-115980",57103,"TEC-AC-10003709","Maxell 4.7GB DVD-R 5/Pack",1.3068,3,"Central",3270,2.97,"Corporate","2014-07-19","Standard Class","South Dakota","Accessories" +"Furniture","Tucson","United States","AH-10120","Adrian Hane",0.2,"2014-07-18","CA-2014-123295",85705,"FUR-CH-10002372","Office Star - Ergonomically Designed Knee Chair",-25.9136,4,"West",2103,259.13602,"Home Office","2014-07-18","Same Day","Arizona","Chairs" +"Office Supplies","New Rochelle","United States","JL-15130","Jack Lebron",0.2,"2014-07-18","CA-2014-138198",10801,"OFF-BI-10002103","Cardinal Slant-D Ring Binder. Heavy Gauge Vinyl",4.5188003,2,"East",9030,13.903999,"Consumer","2014-07-23","Standard Class","New York","Binders" +"Furniture","Bristol","United States","CA-12310","Christine Abelman",0.3,"2014-07-19","US-2014-150434",6010,"FUR-TA-10004152","Barricks 18"" x 48"" Non-Folding Utility Table with Bottom Storage Shelf",-4.032,1,"East",3355,70.56,"Corporate","2014-07-24","Standard Class","Connecticut","Tables" +"Office Supplies","Great Falls","United States","EM-14140","Eugene Moren",0.2,"2014-07-19","CA-2014-168158",59405,"OFF-BI-10001759","Acco Pressboard Covers with Storage Hooks. 14 7/8"" x 11"". Dark Blue",2.2098,2,"West",5589,6.096,"Home Office","2014-07-24","Standard Class","Montana","Binders" +"Office Supplies","Bristol","United States","CA-12310","Christine Abelman",0,"2014-07-19","US-2014-150434",6010,"OFF-BI-10002160","Acco Hanging Data Binders",1.8288,1,"East",3357,3.81,"Corporate","2014-07-24","Standard Class","Connecticut","Binders" +"Office Supplies","Bristol","United States","CA-12310","Christine Abelman",0,"2014-07-19","US-2014-150434",6010,"OFF-BI-10003694","Avery 3 1/2"" Diskette Storage Pages. 10/Pack",9.6048,2,"East",3356,20.88,"Corporate","2014-07-24","Standard Class","Connecticut","Binders" +"Technology","Bristol","United States","CA-12310","Christine Abelman",0,"2014-07-19","US-2014-150434",6010,"TEC-PH-10000895","Polycom VVX 310 VoIP phone",93.594795,2,"East",3354,359.97998,"Corporate","2014-07-24","Standard Class","Connecticut","Phones" +"Furniture","Dallas","United States","KM-16375","Katherine Murray",0.3,"2014-07-20","CA-2014-143903",75217,"FUR-CH-10002024","HON 5400 Series Task Chairs for Big and Tall",-140.196,2,"Central",3078,981.372,"Home Office","2014-07-24","Standard Class","Texas","Chairs" +"Furniture","San Diego","United States","CC-12145","Charles Crestani",0,"2014-07-20","CA-2014-141726",92105,"FUR-FU-10003577","Nu-Dell Leatherette Frames",15.4872,3,"West",5972,43.02,"Consumer","2014-07-22","First Class","California","Furnishings" +"Furniture","Dallas","United States","KM-16375","Katherine Murray",0.6,"2014-07-20","CA-2014-143903",75217,"FUR-FU-10003724","Westinghouse Clip-On Gooseneck Lamps",-14.229,5,"Central",3077,16.740002,"Home Office","2014-07-24","Standard Class","Texas","Furnishings" +"Office Supplies","Riverside","United States","JD-15895","Jonathan Doherty",0,"2014-07-20","CA-2014-127866",92503,"OFF-AR-10003481","Newell 348",3.8047998,4,"West",8132,13.120001,"Corporate","2014-07-23","First Class","California","Art" +"Office Supplies","Springfield","United States","SW-20245","Scot Wooten",0.7,"2014-07-20","CA-2014-142965",45503,"OFF-BI-10000977","Ibico Plastic Spiral Binding Combs",-21.887999,3,"East",4396,27.359999,"Consumer","2014-07-20","Same Day","Ohio","Binders" +"Office Supplies","San Diego","United States","CC-12145","Charles Crestani",0.2,"2014-07-20","CA-2014-141726",92105,"OFF-BI-10001982","Wilson Jones Custom Binder Spines & Labels",3.1552,2,"West",5970,8.7039995,"Consumer","2014-07-22","First Class","California","Binders" +"Office Supplies","San Francisco","United States","RD-19720","Roger Demir",0.2,"2014-07-20","CA-2014-157546",94122,"OFF-BI-10002498","Clear Mylar Reinforcing Strips",30.2778,6,"West",7229,89.712,"Consumer","2014-07-22","First Class","California","Binders" +"Office Supplies","San Diego","United States","CC-12145","Charles Crestani",0,"2014-07-20","CA-2014-141726",92105,"OFF-PA-10000418","Xerox 189",50.328,1,"West",5969,104.850006,"Consumer","2014-07-22","First Class","California","Paper" +"Office Supplies","San Diego","United States","CC-12145","Charles Crestani",0,"2014-07-20","CA-2014-141726",92105,"OFF-PA-10002230","Xerox 1897",9.7608,4,"West",5971,19.92,"Consumer","2014-07-22","First Class","California","Paper" +"Office Supplies","Riverside","United States","JD-15895","Jonathan Doherty",0,"2014-07-20","CA-2014-127866",92503,"OFF-PA-10003971","Xerox 1965",5.8604,2,"West",8131,11.96,"Corporate","2014-07-23","First Class","California","Paper" +"Office Supplies","San Francisco","United States","RD-19720","Roger Demir",0,"2014-07-20","CA-2014-157546",94122,"OFF-PA-10004569","Wirebound Message Books. Two 4 1/4"" x 5"" Forms per Page",10.7301,3,"West",7230,22.83,"Consumer","2014-07-22","First Class","California","Paper" +"Office Supplies","Riverside","United States","JD-15895","Jonathan Doherty",0,"2014-07-20","CA-2014-127866",92503,"OFF-ST-10001490","Hot File 7-Pocket. Floor Stand",160.62302,3,"West",8133,535.41003,"Corporate","2014-07-23","First Class","California","Storage" +"Office Supplies","Springfield","United States","SW-20245","Scot Wooten",0.2,"2014-07-20","CA-2014-142965",45503,"OFF-ST-10002583","Fellowes Neat Ideas Storage Cubes",-5.1968,1,"East",4395,25.984,"Consumer","2014-07-20","Same Day","Ohio","Storage" +"Office Supplies","Dallas","United States","KM-16375","Katherine Murray",0.2,"2014-07-20","CA-2014-143903",75217,"OFF-ST-10003306","Letter Size Cart",38.572197,3,"Central",3076,342.86398,"Home Office","2014-07-24","Standard Class","Texas","Storage" +"Technology","Riverside","United States","JD-15895","Jonathan Doherty",0,"2014-07-20","CA-2014-127866",92503,"TEC-AC-10000023","Maxell 74 Minute CD-R Spindle. 50/Pack",15.0984,2,"West",8130,41.940002,"Corporate","2014-07-23","First Class","California","Accessories" +"Furniture","San Francisco","United States","ME-18010","Michelle Ellison",0.2,"2014-07-21","CA-2014-116932",94122,"FUR-CH-10001215","Global Troy Executive Leather Low-Back Tilter",50.098,2,"West",1872,801.568,"Corporate","2014-07-25","Standard Class","California","Chairs" +"Furniture","Dallas","United States","HM-14860","Harry Marie",0.3,"2014-07-21","CA-2014-129189",75217,"FUR-CH-10004997","Hon Every-Day Series Multi-Task Chairs",-93.99,5,"Central",8024,657.93005,"Corporate","2014-07-25","Standard Class","Texas","Chairs" +"Furniture","San Francisco","United States","ME-18010","Michelle Ellison",0.2,"2014-07-21","CA-2014-116932",94122,"FUR-TA-10004175","Hon 30"" x 60"" Table with Locking Drawer",27.2848,1,"West",1873,272.848,"Corporate","2014-07-25","Standard Class","California","Tables" +"Office Supplies","Dallas","United States","HM-14860","Harry Marie",0.8,"2014-07-21","CA-2014-129189",75217,"OFF-AP-10000124","Acco 6 Outlet Guardian Basic Surge Suppressor",-12.979199,3,"Central",8022,4.992,"Corporate","2014-07-25","Standard Class","Texas","Appliances" +"Office Supplies","San Francisco","United States","ME-18010","Michelle Ellison",0,"2014-07-21","CA-2014-116932",94122,"OFF-AR-10002067","Newell 334",25.792002,5,"West",1871,99.2,"Corporate","2014-07-25","Standard Class","California","Art" +"Office Supplies","Dallas","United States","HM-14860","Harry Marie",0.8,"2014-07-21","CA-2014-129189",75217,"OFF-BI-10000494","Acco Economy Flexible Poly Round Ring Binder",-1.8269999,1,"Central",8025,1.0439999,"Corporate","2014-07-25","Standard Class","Texas","Binders" +"Office Supplies","Dallas","United States","HM-14860","Harry Marie",0.2,"2014-07-21","CA-2014-129189",75217,"OFF-EN-10003567","Inter-Office Recycled Envelopes. Brown Kraft. Button-String.10"" x 13"" . 100/Box",29.673,5,"Central",8023,87.92,"Corporate","2014-07-25","Standard Class","Texas","Envelopes" +"Technology","Omaha","United States","PG-18820","Patrick Gardner",0,"2014-07-21","CA-2014-109890",68104,"TEC-PH-10004100","Griffin GC17055 Auxiliary Audio Cable",10.074399,2,"Central",8481,35.980003,"Consumer","2014-07-27","Standard Class","Nebraska","Phones" +"Furniture","San Francisco","United States","NP-18325","Naresj Patel",0.2,"2014-07-22","CA-2014-117464",94122,"FUR-CH-10000155","Global Comet Stacking Armless Chair",71.771996,3,"West",7168,717.72003,"Consumer","2014-07-24","Second Class","California","Chairs" +"Furniture","San Francisco","United States","NP-18325","Naresj Patel",0.2,"2014-07-22","CA-2014-117464",94122,"FUR-TA-10004767","Safco Drafting Table",19.1646,3,"West",7170,170.352,"Consumer","2014-07-24","Second Class","California","Tables" +"Office Supplies","New York City","United States","EM-14065","Erin Mull",0,"2014-07-22","CA-2014-127691",10024,"OFF-AR-10002053","Premium Writing Pencils. Soft. #2 by Central Association for the Blind",1.6688,2,"East",483,5.96,"Consumer","2014-07-27","Standard Class","New York","Art" +"Office Supplies","San Francisco","United States","NP-18325","Naresj Patel",0,"2014-07-22","CA-2014-117464",94122,"OFF-AR-10003190","Newell 32",3.2256002,4,"West",7167,11.52,"Consumer","2014-07-24","Second Class","California","Art" +"Office Supplies","Los Angeles","United States","AB-10255","Alejandro Ballentine",0,"2014-07-22","CA-2014-122679",90008,"OFF-AR-10004757","Crayola Colored Pencils",6.4944005,6,"West",9484,19.68,"Home Office","2014-07-28","Standard Class","California","Art" +"Office Supplies","San Francisco","United States","NP-18325","Naresj Patel",0,"2014-07-22","CA-2014-117464",94122,"OFF-ST-10003058","Eldon Mobile Mega Data Cart Mega Stackable Add-On Trays",68.585,10,"West",7169,236.5,"Consumer","2014-07-24","Second Class","California","Storage" +"Office Supplies","Houston","United States","JL-15850","John Lucas",0.2,"2014-07-22","US-2014-164644",77095,"OFF-ST-10003123","Fellowes Bases and Tops For Staxonsteel/High-Stak Systems",1.3316,1,"Central",9675,26.632002,"Consumer","2014-07-24","Second Class","Texas","Storage" +"Technology","New York City","United States","EM-14065","Erin Mull",0,"2014-07-22","CA-2014-127691",10024,"TEC-AC-10002567","Logitech G602 Wireless Gaming Mouse",57.5928,2,"East",484,159.98,"Consumer","2014-07-27","Standard Class","New York","Accessories" +"Office Supplies","Tucson","United States","AG-10900","Arthur Gainer",0.2,"2014-07-23","US-2014-119137",85705,"OFF-AR-10000658","Newell 324",0.92399997,1,"West",375,9.24,"Consumer","2014-07-27","Standard Class","Arizona","Art" +"Office Supplies","Tucson","United States","AG-10900","Arthur Gainer",0.7,"2014-07-23","US-2014-119137",85705,"OFF-BI-10001982","Wilson Jones Custom Binder Spines & Labels",-5.712,5,"West",373,8.160001,"Consumer","2014-07-27","Standard Class","Arizona","Binders" +"Office Supplies","San Francisco","United States","NC-18535","Nick Crebassa",0,"2014-07-23","CA-2014-145254",94122,"OFF-SU-10004664","Acme Softgrip Scissors",11.803,5,"West",9423,40.7,"Corporate","2014-07-27","Standard Class","California","Supplies" +"Technology","New York City","United States","AG-10270","Alejandro Grove",0,"2014-07-23","CA-2014-103058",10011,"TEC-AC-10001314","Case Logic 2.4GHz Wireless Keyboard",7.9984,2,"East",7411,99.98,"Consumer","2014-07-24","First Class","New York","Accessories" +"Technology","Tucson","United States","AG-10900","Arthur Gainer",0.2,"2014-07-23","US-2014-119137",85705,"TEC-AC-10002076","Microsoft Natural Keyboard Elite",-29.94,10,"West",376,479.04,"Consumer","2014-07-27","Standard Class","Arizona","Accessories" +"Technology","San Francisco","United States","NC-18535","Nick Crebassa",0,"2014-07-23","CA-2014-145254",94122,"TEC-AC-10002167","Imation 8gb Micro Traveldrive Usb 2.0 Flash Drive",4.9500003,3,"West",9425,45,"Corporate","2014-07-27","Standard Class","California","Accessories" +"Technology","Tucson","United States","AG-10900","Arthur Gainer",0.2,"2014-07-23","US-2014-119137",85705,"TEC-AC-10003911","NETGEAR AC1750 Dual Band Gigabit Smart WiFi Router",179.1888,8,"West",374,1023.93604,"Consumer","2014-07-27","Standard Class","Arizona","Accessories" +"Technology","San Francisco","United States","NC-18535","Nick Crebassa",0.2,"2014-07-23","CA-2014-145254",94122,"TEC-PH-10000441","VTech DS6151",60.4752,6,"West",9422,604.752,"Corporate","2014-07-27","Standard Class","California","Phones" +"Technology","San Francisco","United States","NC-18535","Nick Crebassa",0.2,"2014-07-23","CA-2014-145254",94122,"TEC-PH-10004531","AT&T CL2909",37.797,3,"West",9424,302.376,"Corporate","2014-07-27","Standard Class","California","Phones" +"Furniture","San Francisco","United States","KL-16645","Ken Lonsdale",0,"2014-07-25","CA-2014-143917",94122,"FUR-FU-10004351","Staple-based wall hangings",34.284805,8,"West",2507,77.92,"Consumer","2014-07-27","Second Class","California","Furnishings" +"Office Supplies","Los Angeles","United States","VF-21715","Vicky Freymann",0,"2014-07-25","CA-2014-146528",90045,"OFF-PA-10002195","Xerox 1966",3.1752,1,"West",3647,6.48,"Home Office","2014-07-27","Second Class","California","Paper" +"Office Supplies","San Francisco","United States","KL-16645","Ken Lonsdale",0,"2014-07-25","CA-2014-143917",94122,"OFF-ST-10001228","Fellowes Personal Hanging Folder Files. Navy",15.0416,4,"West",2505,53.72,"Consumer","2014-07-27","Second Class","California","Storage" +"Office Supplies","San Francisco","United States","KL-16645","Ken Lonsdale",0,"2014-07-25","CA-2014-143917",94122,"OFF-SU-10000151","High Speed Automatic Electric Letter Opener",327.506,5,"West",2506,8187.65,"Consumer","2014-07-27","Second Class","California","Supplies" +"Office Supplies","Los Angeles","United States","VF-21715","Vicky Freymann",0,"2014-07-25","CA-2014-146528",90045,"OFF-SU-10002522","Acme Kleen Earth Office Shears",4.5008,4,"West",3648,15.52,"Home Office","2014-07-27","Second Class","California","Supplies" +"Furniture","Atlanta","United States","SG-20470","Sheri Gordon",0,"2014-07-26","CA-2014-116190",30318,"FUR-CH-10000553","Metal Folding Chairs. Beige. 4/Carton",18.3276,2,"South",7341,67.880005,"Consumer","2014-08-01","Standard Class","Georgia","Chairs" +"Furniture","North Las Vegas","United States","KM-16720","Kunst Miller",0.2,"2014-07-26","CA-2014-126760",89031,"FUR-CH-10003312","Hon 2090 “Pillow Soft” Series Mid Back Swivel/Tilt Chairs",-109.5822,3,"West",3839,674.352,"Consumer","2014-08-02","Standard Class","Nevada","Chairs" +"Furniture","Atlanta","United States","SG-20470","Sheri Gordon",0,"2014-07-26","CA-2014-116190",30318,"FUR-FU-10000719","DAX Cubicle Frames. 8-1/2 x 11",9.2556,3,"South",7343,25.710001,"Consumer","2014-08-01","Standard Class","Georgia","Furnishings" +"Furniture","North Las Vegas","United States","KM-16720","Kunst Miller",0,"2014-07-26","CA-2014-126760",89031,"FUR-FU-10004018","Tensor Computer Mounted Lamp",36.1827,9,"West",3840,134.01,"Consumer","2014-08-02","Standard Class","Nevada","Furnishings" +"Furniture","San Antonio","United States","LF-17185","Luke Foster",0.6,"2014-07-26","CA-2014-169019",78207,"FUR-FU-10004666","DAX Clear Channel Poster Frame",-10.060201,3,"Central",9776,17.496,"Consumer","2014-07-30","Standard Class","Texas","Furnishings" +"Office Supplies","San Antonio","United States","LF-17185","Luke Foster",0.8,"2014-07-26","CA-2014-169019",78207,"OFF-AP-10003281","Acco 6 Outlet Guardian Standard Surge Suppressor",-12.09,2,"Central",9780,4.8360004,"Consumer","2014-07-30","Standard Class","Texas","Appliances" +"Office Supplies","San Antonio","United States","LF-17185","Luke Foster",0.8,"2014-07-26","CA-2014-169019",78207,"OFF-BI-10001524","GBC Premium Transparent Covers with Diagonal Lined Pattern",-26.854399,4,"Central",9777,16.784,"Consumer","2014-07-30","Standard Class","Texas","Binders" +"Office Supplies","San Antonio","United States","LF-17185","Luke Foster",0.8,"2014-07-26","CA-2014-169019",78207,"OFF-BI-10001679","GBC Instant Index System for Binding Systems",-13.320001,5,"Central",9779,8.88,"Consumer","2014-07-30","Standard Class","Texas","Binders" +"Office Supplies","San Antonio","United States","LF-17185","Luke Foster",0.8,"2014-07-26","CA-2014-169019",78207,"OFF-BI-10004995","GBC DocuBind P400 Electric Binding System",-3701.8928,8,"Central",9775,2177.584,"Consumer","2014-07-30","Standard Class","Texas","Binders" +"Office Supplies","Atlanta","United States","SG-20470","Sheri Gordon",0,"2014-07-26","CA-2014-116190",30318,"OFF-LA-10002762","Avery 485",76.558304,13,"South",7342,162.89,"Consumer","2014-08-01","Standard Class","Georgia","Labels" +"Office Supplies","Chicago","United States","CL-11890","Carl Ludwig",0.2,"2014-07-26","US-2014-155894",60623,"OFF-ST-10004804","Belkin 19"" Vented Equipment Shelf. Black",-29.343601,3,"Central",2344,123.552,"Consumer","2014-07-30","Second Class","Illinois","Storage" +"Technology","Draper","United States","JO-15145","Jack O'Briant",0,"2014-07-26","CA-2014-159121",84020,"TEC-AC-10002006","Memorex Micro Travel Drive 16 GB",34.6983,7,"West",6423,111.93,"Corporate","2014-08-01","Standard Class","Utah","Accessories" +"Technology","San Antonio","United States","LF-17185","Luke Foster",0.2,"2014-07-26","CA-2014-169019",78207,"TEC-AC-10002076","Microsoft Natural Keyboard Elite",-26.946001,9,"Central",9778,431.13602,"Consumer","2014-07-30","Standard Class","Texas","Accessories" +"Technology","North Las Vegas","United States","KM-16720","Kunst Miller",0,"2014-07-26","CA-2014-126760",89031,"TEC-AC-10004814","Logitech Illuminated Ultrathin Keyboard with Backlighting",70.0977,3,"West",3841,170.97,"Consumer","2014-08-02","Standard Class","Nevada","Accessories" +"Technology","North Las Vegas","United States","KM-16720","Kunst Miller",0.2,"2014-07-26","CA-2014-126760",89031,"TEC-PH-10001363","Apple iPhone 5S",113.998,2,"West",3838,911.9841,"Consumer","2014-08-02","Standard Class","Nevada","Phones" +"Office Supplies","New York City","United States","AS-10045","Aaron Smayling",0,"2014-07-27","US-2014-150126",10035,"OFF-PA-10002709","Xerox 1956",32.232197,11,"East",4580,65.78,"Corporate","2014-08-02","Standard Class","New York","Paper" +"Office Supplies","Los Angeles","United States","BB-10990","Barry Blumstein",0,"2014-07-27","CA-2014-169642",90036,"OFF-ST-10002574","SAFCO Commercial Wire Shelving. Black",0,2,"West",8888,276.28,"Corporate","2014-07-30","Second Class","California","Storage" +"Technology","San Francisco","United States","GW-14605","Giulietta Weimer",0,"2014-07-27","CA-2014-124709",94122,"TEC-AC-10002842","WD My Passport Ultra 2TB Portable External Hard Drive",38.08,2,"West",3866,238,"Consumer","2014-07-29","Second Class","California","Accessories" +"Furniture","Apopka","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-07-28","CA-2014-131541",32712,"FUR-FU-10003623","DataProducts Ampli Magnifier Task Lamp. Black.",12.9888,6,"South",5688,129.888,"Consumer","2014-07-28","Same Day","Florida","Furnishings" +"Office Supplies","Apopka","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-07-28","CA-2014-131541",32712,"OFF-EN-10000781","#10- 4 1/8"" x 9 1/2"" Recycled Envelopes",16.5186,7,"South",5689,48.944004,"Consumer","2014-07-28","Same Day","Florida","Envelopes" +"Office Supplies","Apopka","United States","CK-12205","Chloris Kastensmidt",0.2,"2014-07-28","CA-2014-131541",32712,"OFF-FA-10000621","OIC Colored Binder Clips. Assorted Sizes",5.191,5,"South",5687,14.320001,"Consumer","2014-07-28","Same Day","Florida","Fasteners" +"Furniture","Seattle","United States","TS-21430","Tom Stivers",0,"2014-07-30","CA-2014-111192",98103,"FUR-BO-10002916","Rush Hierlooms Collection 1"" Thick Stackable Bookcases",259.8896,8,"West",5162,1367.8401,"Corporate","2014-08-05","Standard Class","Washington","Bookcases" +"Furniture","Charlotte","United States","BF-10975","Barbara Fisher",0.2,"2014-08-01","CA-2014-117345",28205,"FUR-FU-10000629","9-3/4 Diameter Round Wall Clock",12.1352005,4,"South",1839,44.128,"Corporate","2014-08-05","Standard Class","North Carolina","Furnishings" +"Office Supplies","Charlotte","United States","BF-10975","Barbara Fisher",0.2,"2014-08-01","CA-2014-117345",28205,"OFF-AP-10001005","Honeywell Quietcare HEPA Air Cleaner",10.2245,1,"South",1840,62.920002,"Corporate","2014-08-05","Standard Class","North Carolina","Appliances" +"Office Supplies","San Francisco","United States","IL-15100","Ivan Liston",0.2,"2014-08-01","CA-2014-152296",94122,"OFF-BI-10004506","Wilson Jones data.warehouse D-Ring Binders with DublLock",6.9132,3,"West",2173,19.752,"Consumer","2014-08-03","First Class","California","Binders" +"Office Supplies","Philadelphia","United States","AS-10630","Ann Steele",0.2,"2014-08-01","CA-2014-162684",19120,"OFF-FA-10000992","Acco Clips to Go Binder Clips. 24 Clips in Two Sizes",1.9169999,2,"East",2411,5.68,"Home Office","2014-08-06","Standard Class","Pennsylvania","Fasteners" +"Office Supplies","Charlotte","United States","BF-10975","Barbara Fisher",0.2,"2014-08-01","CA-2014-117345",28205,"OFF-LA-10000240","Self-Adhesive Address Labels for Typewriters by Universal",5.9211,3,"South",1838,17.544,"Corporate","2014-08-05","Standard Class","North Carolina","Labels" +"Office Supplies","Charlotte","United States","BF-10975","Barbara Fisher",0.2,"2014-08-01","CA-2014-117345",28205,"OFF-PA-10002377","Xerox 1916",29.363998,2,"South",1841,78.304,"Corporate","2014-08-05","Standard Class","North Carolina","Paper" +"Office Supplies","Springfield","United States","NZ-18565","Nick Zandusky",0,"2014-08-02","US-2014-106299",65807,"OFF-BI-10001758","Wilson Jones 14 Line Acrylic Coated Pressboard Data Binders",12.549,5,"Central",6173,26.7,"Home Office","2014-08-08","Standard Class","Missouri","Binders" +"Office Supplies","Springfield","United States","NZ-18565","Nick Zandusky",0,"2014-08-02","US-2014-106299",65807,"OFF-ST-10002011","Smead Adjustable Mobile File Trolley with Lockable Top",226.3626,2,"Central",6175,838.38,"Home Office","2014-08-08","Standard Class","Missouri","Storage" +"Technology","Springfield","United States","NZ-18565","Nick Zandusky",0,"2014-08-02","US-2014-106299",65807,"TEC-AC-10003237","Memorex Micro Travel Drive 4 GB",9.116,2,"Central",6174,21.2,"Home Office","2014-08-08","Standard Class","Missouri","Accessories" +"Furniture","Denver","United States","BS-11755","Bruce Stewart",0.5,"2014-08-03","CA-2014-133690",80219,"FUR-TA-10004289","BoxOffice By Design Rectangular and Half-Moon Meeting Room Tables",-161.87502,2,"West",202,218.75,"Consumer","2014-08-05","First Class","Colorado","Tables" +"Office Supplies","Denver","United States","BS-11755","Bruce Stewart",0.2,"2014-08-03","CA-2014-133690",80219,"OFF-AP-10003622","Bravo II Megaboss 12-Amp Hard Body Upright. Replacement Belts. 2 Belts per Pack",0.2925,1,"West",203,2.6,"Consumer","2014-08-05","First Class","Colorado","Appliances" +"Office Supplies","New York City","United States","BK-11260","Berenike Kampe",0,"2014-08-03","CA-2014-125612",10035,"OFF-PA-10001019","Xerox 1884",18.7812,2,"East",856,39.960003,"Consumer","2014-08-08","Standard Class","New York","Paper" +"Office Supplies","Glendale","United States","GH-14410","Gary Hansen",0.2,"2014-08-03","CA-2014-128986",85301,"OFF-PA-10001289","White Computer Printout Paper by Universal",33.7212,3,"West",3156,93.023994,"Home Office","2014-08-05","Second Class","Arizona","Paper" +"Office Supplies","New York City","United States","BK-11260","Berenike Kampe",0,"2014-08-03","CA-2014-125612",10035,"OFF-ST-10003221","Staple magnet",5.7671995,2,"East",858,21.359999,"Consumer","2014-08-08","Standard Class","New York","Storage" +"Office Supplies","New York City","United States","BK-11260","Berenike Kampe",0,"2014-08-03","CA-2014-125612",10035,"OFF-SU-10002537","Acme Box Cutter Scissors",26.598,10,"East",857,102.3,"Consumer","2014-08-08","Standard Class","New York","Supplies" +"Office Supplies","Bangor","United States","ML-17395","Marina Lichtenstein",0,"2014-08-04","CA-2014-134215",4401,"OFF-AP-10001271","Eureka The Boss Cordless Rechargeable Stick Vac",27.529202,2,"East",9157,101.96,"Corporate","2014-08-08","Standard Class","Maine","Appliances" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0,"2014-08-04","CA-2014-126361",84062,"OFF-AP-10003590","Hoover WindTunnel Plus Canister Vacuum",305.13,3,"West",1376,1089.75,"Consumer","2014-08-09","Second Class","Utah","Appliances" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0,"2014-08-04","CA-2014-126361",84062,"OFF-AR-10000896","Newell 329",4.264,5,"West",1378,16.4,"Consumer","2014-08-09","Second Class","Utah","Art" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0.2,"2014-08-04","CA-2014-126361",84062,"OFF-BI-10002852","Ibico Standard Transparent Covers",4.7791996,1,"West",1381,13.184,"Consumer","2014-08-09","Second Class","Utah","Binders" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0,"2014-08-04","CA-2014-126361",84062,"OFF-PA-10000806","Xerox 1934",219.44159,8,"West",1377,447.84,"Consumer","2014-08-09","Second Class","Utah","Paper" +"Office Supplies","Bangor","United States","ML-17395","Marina Lichtenstein",0,"2014-08-04","CA-2014-134215",4401,"OFF-PA-10004353","Southworth 25% Cotton Premium Laser Paper and Envelopes",124.67519,13,"East",9158,259.74002,"Corporate","2014-08-08","Standard Class","Maine","Paper" +"Office Supplies","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0,"2014-08-04","CA-2014-126361",84062,"OFF-ST-10002289","Safco Wire Cube Shelving System. For Use as 4 or 5 14"" Cubes. Black",7.945,5,"West",1380,158.9,"Consumer","2014-08-09","Second Class","Utah","Storage" +"Technology","Bangor","United States","ML-17395","Marina Lichtenstein",0,"2014-08-04","CA-2014-134215",4401,"TEC-AC-10002473","Maxell 4.7GB DVD-R",104.72219,9,"East",9159,255.42,"Corporate","2014-08-08","Standard Class","Maine","Accessories" +"Technology","Pleasant Grove","United States","VD-21670","Valerie Dominguez",0.2,"2014-08-04","CA-2014-126361",84062,"TEC-PH-10002310","Panasonic KX T7731-B Digital phone",34.996502,5,"West",1379,399.96,"Consumer","2014-08-09","Second Class","Utah","Phones" +"Furniture","Meriden","United States","RS-19420","Ricardo Sperren",0,"2014-08-05","US-2014-117968",6450,"FUR-CH-10002335","Hon GuestStacker Chair",294.67102,5,"East",5867,1133.3501,"Corporate","2014-08-07","Second Class","Connecticut","Chairs" +"Furniture","Los Angeles","United States","CS-12130","Chad Sievert",0.2,"2014-08-05","CA-2014-118962",90004,"FUR-CH-10003817","Global Value Steno Chair. Gray",21.259,7,"West",174,340.144,"Consumer","2014-08-09","Standard Class","California","Chairs" +"Furniture","Texas City","United States","GZ-14470","Gary Zandusky",0.3,"2014-08-05","CA-2014-124723",77590,"FUR-TA-10001307","SAFCO PlanMaster Heigh-Adjustable Drafting Table Base. 43w x 30d x 30-37h. Black",41.934002,2,"Central",8894,489.23,"Consumer","2014-08-12","Standard Class","Texas","Tables" +"Office Supplies","Meriden","United States","RS-19420","Ricardo Sperren",0,"2014-08-05","US-2014-117968",6450,"OFF-AP-10002765","Fellowes Advanced Computer Series Surge Protectors",22.2516,3,"East",5865,79.47,"Corporate","2014-08-07","Second Class","Connecticut","Appliances" +"Office Supplies","Meriden","United States","RS-19420","Ricardo Sperren",0,"2014-08-05","US-2014-117968",6450,"OFF-AR-10004165","Binney & Smith inkTank Erasable Pocket Highlighter. Chisel Tip. Yellow",2.0064,2,"East",5866,4.56,"Corporate","2014-08-07","Second Class","Connecticut","Art" +"Office Supplies","New York City","United States","SP-20650","Stephanie Phelps",0,"2014-08-05","CA-2014-110065",10009,"OFF-AR-10004165","Binney & Smith inkTank Erasable Pocket Highlighter. Chisel Tip. Yellow",7.0224,7,"East",6458,15.96,"Corporate","2014-08-11","Standard Class","New York","Art" +"Office Supplies","San Francisco","United States","SS-20590","Sonia Sunley",0,"2014-08-05","CA-2014-150490",94122,"OFF-AR-10004602","Boston KS Multi-Size Manual Pencil Sharpener",12.874399,2,"West",5360,45.980003,"Consumer","2014-08-11","Standard Class","California","Art" +"Office Supplies","Los Angeles","United States","CS-12130","Chad Sievert",0,"2014-08-05","CA-2014-118962",90004,"OFF-PA-10000659","Adams Phone Message Book. Professional. 400 Message Capacity. 5 3/6” x 11”",9.841801,3,"West",172,20.94,"Consumer","2014-08-09","Standard Class","California","Paper" +"Office Supplies","Los Angeles","United States","CS-12130","Chad Sievert",0,"2014-08-05","CA-2014-118962",90004,"OFF-PA-10001144","Xerox 1913",53.260803,2,"West",173,110.96,"Consumer","2014-08-09","Standard Class","California","Paper" +"Office Supplies","San Francisco","United States","SS-20590","Sonia Sunley",0,"2014-08-05","CA-2014-150490",94122,"OFF-ST-10000321","Akro Stacking Bins",0.63119996,2,"West",5359,15.78,"Consumer","2014-08-11","Standard Class","California","Storage" +"Technology","San Francisco","United States","SS-20590","Sonia Sunley",0,"2014-08-05","CA-2014-150490",94122,"TEC-AC-10004510","Logitech Desktop MK120 Mouse and keyboard Combo",1.636,1,"West",5358,16.359999,"Consumer","2014-08-11","Standard Class","California","Accessories" +"Technology","New York City","United States","SP-20650","Stephanie Phelps",0,"2014-08-05","CA-2014-110065",10009,"TEC-PH-10002468","Plantronics CS 50-USB - headset - Convertible. Monaural",36.7173,1,"East",6457,135.98999,"Corporate","2014-08-11","Standard Class","New York","Phones" +"Technology","Columbia","United States","RF-19735","Roland Fjeld",0,"2014-08-06","CA-2014-115357",29203,"TEC-AC-10000023","Maxell 74 Minute CD-R Spindle. 50/Pack",22.647602,3,"South",3773,62.91,"Consumer","2014-08-11","Second Class","South Carolina","Accessories" +"Technology","New York City","United States","TB-21625","Trudy Brown",0,"2014-08-06","CA-2014-137575",10035,"TEC-AC-10004571","Logitech G700s Rechargeable Gaming Mouse",83.9916,2,"East",8544,199.98,"Consumer","2014-08-11","Standard Class","New York","Accessories" +"Furniture","Jacksonville","United States","VG-21790","Vivek Gonzalez",0.2,"2014-08-08","CA-2014-156790",32216,"FUR-BO-10000468","O'Sullivan 2-Shelf Heavy-Duty Bookcases",-7.7728,4,"South",8955,155.456,"Consumer","2014-08-10","Second Class","Florida","Bookcases" +"Furniture","Glendale","United States","MB-17305","Maria Bertelson",0.2,"2014-08-08","CA-2014-151708",85301,"FUR-FU-10001602","Eldon Delta Triangular Chair Mat. 52"" x 58"". Clear",-3.0344,4,"West",1466,121.376,"Consumer","2014-08-14","Standard Class","Arizona","Furnishings" +"Furniture","San Diego","United States","LR-16915","Lena Radford",0,"2014-08-08","CA-2014-131450",92024,"FUR-FU-10001979","Dana Halogen Swing-Arm Architect Lamp",91.7728,8,"West",598,327.76,"Consumer","2014-08-15","Standard Class","California","Furnishings" +"Furniture","Trenton","United States","MA-17560","Matt Abelman",0,"2014-08-08","CA-2014-124478",48183,"FUR-FU-10002088","Nu-Dell Float Frame 11 x 14 1/2",22.629602,6,"Central",5072,53.88,"Home Office","2014-08-12","Standard Class","Michigan","Furnishings" +"Office Supplies","Trenton","United States","MA-17560","Matt Abelman",0.1,"2014-08-08","CA-2014-124478",48183,"OFF-AP-10002495","Acco Smartsocket Table Surge Protector. 6 Color-Coded Adapter Outlets",37.23,3,"Central",5070,167.535,"Home Office","2014-08-12","Standard Class","Michigan","Appliances" +"Office Supplies","San Diego","United States","LR-16915","Lena Radford",0,"2014-08-08","CA-2014-131450",92024,"OFF-AP-10004708","Fellowes Superior 10 Outlet Split Surge Protector",22.074799,2,"West",595,76.119995,"Consumer","2014-08-15","Standard Class","California","Appliances" +"Office Supplies","Trenton","United States","MA-17560","Matt Abelman",0,"2014-08-08","CA-2014-124478",48183,"OFF-EN-10002500","Globe Weis Peel & Seel First Class Envelopes",17.253,3,"Central",5071,38.34,"Home Office","2014-08-12","Standard Class","Michigan","Envelopes" +"Office Supplies","Vacaville","United States","TM-21010","Tamara Manning",0,"2014-08-08","CA-2014-154669",95687,"OFF-ST-10000532","Advantus Rolling Drawer Organizers",110.0528,11,"West",2751,423.28,"Consumer","2014-08-11","Second Class","California","Storage" +"Technology","Glendale","United States","MB-17305","Maria Bertelson",0.2,"2014-08-08","CA-2014-151708",85301,"TEC-AC-10001767","SanDisk Ultra 64 GB MicroSDHC Class 10 Memory Card",-10.7973,3,"West",1467,95.976,"Consumer","2014-08-14","Standard Class","Arizona","Accessories" +"Technology","Trenton","United States","MA-17560","Matt Abelman",0,"2014-08-08","CA-2014-124478",48183,"TEC-CO-10001571","Sharp 1540cs Digital Laser Copier",274.995,1,"Central",5069,549.99005,"Home Office","2014-08-12","Standard Class","Michigan","Copiers" +"Technology","San Diego","United States","LR-16915","Lena Radford",0.2,"2014-08-08","CA-2014-131450",92024,"TEC-CO-10004115","Sharp AL-1530CS Digital Copier",434.9913,3,"West",596,1199.976,"Consumer","2014-08-15","Standard Class","California","Copiers" +"Technology","Trenton","United States","MA-17560","Matt Abelman",0,"2014-08-08","CA-2014-124478",48183,"TEC-PH-10001128","Motorola Droid Maxx",83.99439,2,"Central",5073,299.97998,"Home Office","2014-08-12","Standard Class","Michigan","Phones" +"Technology","San Diego","United States","LR-16915","Lena Radford",0.2,"2014-08-08","CA-2014-131450",92024,"TEC-PH-10002398","AT&T 1070 Corded Phone",55.745003,5,"West",597,445.96,"Consumer","2014-08-15","Standard Class","California","Phones" +"Office Supplies","Phoenix","United States","RD-19720","Roger Demir",0.7,"2014-08-09","CA-2014-161249",85023,"OFF-BI-10001097","Avery Hole Reinforcements",-6.5415,5,"West",7434,9.345,"Consumer","2014-08-13","Standard Class","Arizona","Binders" +"Office Supplies","Seattle","United States","JH-15985","Joseph Holt",0.2,"2014-08-09","CA-2014-169726",98103,"OFF-BI-10004600","Ibico Ibimaster 300 Manual Binding System",643.98254,7,"West",3629,2060.744,"Consumer","2014-08-13","Standard Class","Washington","Binders" +"Office Supplies","Phoenix","United States","RD-19720","Roger Demir",0.2,"2014-08-09","CA-2014-161249",85023,"OFF-FA-10004838","Super Bands. 12/Pack",-0.9486,3,"West",7433,4.464,"Consumer","2014-08-13","Standard Class","Arizona","Fasteners" +"Office Supplies","San Diego","United States","ED-13885","Emily Ducich",0,"2014-08-09","CA-2014-110527",92037,"OFF-LA-10000262","Avery 494",9.6048,8,"West",1741,20.88,"Home Office","2014-08-16","Standard Class","California","Labels" +"Office Supplies","Fresno","United States","GM-14500","Gene McClure",0,"2014-08-09","CA-2014-141901",93727,"OFF-PA-10001667","Great White Multi-Use Recycled Paper (20Lb. and 84 Bright)",2.6909997,1,"West",7070,5.98,"Consumer","2014-08-14","Standard Class","California","Paper" +"Office Supplies","Saint Petersburg","United States","AG-10525","Andy Gerbode",0.2,"2014-08-09","CA-2014-167850",33710,"OFF-PA-10001937","Xerox 21",5.4431996,3,"South",316,15.552,"Corporate","2014-08-16","Standard Class","Florida","Paper" +"Technology","Seattle","United States","MG-17650","Matthew Grinstein",0.2,"2014-08-09","CA-2014-166471",98103,"TEC-PH-10000038","Jawbone MINI JAMBOX Wireless Bluetooth Speaker",-43.833595,2,"West",3393,219.168,"Home Office","2014-08-13","Standard Class","Washington","Phones" +"Technology","Seattle","United States","MG-17650","Matthew Grinstein",0.2,"2014-08-09","CA-2014-166471",98103,"TEC-PH-10001530","Cisco Unified IP Phone 7945G VoIP phone",68.198,4,"West",3392,1091.1681,"Home Office","2014-08-13","Standard Class","Washington","Phones" +"Technology","Saint Petersburg","United States","AG-10525","Andy Gerbode",0.2,"2014-08-09","CA-2014-167850",33710,"TEC-PH-10002398","AT&T 1070 Corded Phone",22.298,2,"South",315,178.384,"Corporate","2014-08-16","Standard Class","Florida","Phones" +"Furniture","Seattle","United States","GM-14680","Greg Matthias",0,"2014-08-11","CA-2014-127012",98105,"FUR-FU-10003691","Eldon Image Series Desk Accessories. Ebony",5.4340005,1,"West",1239,12.35,"Consumer","2014-08-15","Standard Class","Washington","Furnishings" +"Office Supplies","Seattle","United States","GM-14680","Greg Matthias",0,"2014-08-11","CA-2014-127012",98105,"OFF-AR-10003903","Sanford 52201 APSCO Electric Pencil Sharpener",10.652201,1,"West",1240,40.97,"Consumer","2014-08-15","Standard Class","Washington","Art" +"Office Supplies","Seattle","United States","GM-14680","Greg Matthias",0,"2014-08-11","CA-2014-127012",98105,"OFF-FA-10004854","Vinyl Coated Wire Paper Clips in Organizer Box. 800/Box",10.791201,2,"West",1241,22.96,"Consumer","2014-08-15","Standard Class","Washington","Fasteners" +"Office Supplies","New York City","United States","KL-16645","Ken Lonsdale",0,"2014-08-11","CA-2014-154641",10035,"OFF-ST-10004459","Tennsco Single-Tier Lockers",18.767,1,"East",3230,375.34,"Consumer","2014-08-16","Standard Class","New York","Storage" +"Furniture","San Francisco","United States","BW-11200","Ben Wallace",0,"2014-08-12","CA-2014-109897",94122,"FUR-FU-10002878","Seth Thomas 14"" Day/Date Wall Clock",31.6128,3,"West",8245,85.44,"Consumer","2014-08-16","Standard Class","California","Furnishings" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-AP-10002403","Acco Smartsocket Color-Coded Six-Outlet AC Adapter Model Surge Protectors",15.8436,6,"South",8290,211.24799,"Home Office","2014-08-16","Standard Class","Florida","Appliances" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-AR-10004344","Bulldog Vacuum Base Pencil Sharpener",4.1965003,5,"South",8288,47.960003,"Home Office","2014-08-16","Standard Class","Florida","Art" +"Office Supplies","Woodstock","United States","LL-16840","Lauren Leatherbury",0,"2014-08-12","CA-2014-153927",30188,"OFF-BI-10000138","Acco Translucent Poly Ring Binders",6.7391996,3,"South",9882,14.04,"Consumer","2014-08-13","First Class","Georgia","Binders" +"Office Supplies","Lakewood","United States","CS-12175","Charles Sheldon",0,"2014-08-12","CA-2014-109302",8701,"OFF-BI-10002854","Performers Binder/Pad Holder. Black",98.104996,7,"East",6769,196.20999,"Corporate","2014-08-16","Standard Class","New Jersey","Binders" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-LA-10000443","Avery 501",0.9962999,1,"South",8292,2.9520001,"Home Office","2014-08-16","Standard Class","Florida","Labels" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-PA-10000295","Xerox 229",10.886399,6,"South",8287,31.104,"Home Office","2014-08-16","Standard Class","Florida","Paper" +"Office Supplies","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"OFF-SU-10000432","Acco Side-Punched Conventional Columnar Pads",-1.041,2,"South",8291,5.552,"Home Office","2014-08-16","Standard Class","Florida","Supplies" +"Technology","Woodstock","United States","LL-16840","Lauren Leatherbury",0,"2014-08-12","CA-2014-153927",30188,"TEC-AC-10000023","Maxell 74 Minute CD-R Spindle. 50/Pack",98.1396,13,"South",9883,272.61002,"Consumer","2014-08-13","First Class","Georgia","Accessories" +"Technology","Miami","United States","DH-13675","Duane Huffman",0.2,"2014-08-12","CA-2014-156244",33180,"TEC-AC-10002473","Maxell 4.7GB DVD-R",41.718597,7,"South",8289,158.928,"Home Office","2014-08-16","Standard Class","Florida","Accessories" +"Technology","San Francisco","United States","BW-11200","Ben Wallace",0.2,"2014-08-12","CA-2014-109897",94122,"TEC-PH-10003691","BlackBerry Q10",50.396,8,"West",8244,806.336,"Consumer","2014-08-16","Standard Class","California","Phones" +"Furniture","San Francisco","United States","BD-11605","Brian Dahlen",0.2,"2014-08-15","US-2014-164406",94122,"FUR-CH-10003833","Novimex Fabric Task Chair",-12.196,4,"West",9827,195.136,"Consumer","2014-08-19","Standard Class","California","Chairs" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0,"2014-08-15","US-2014-164406",94122,"OFF-AP-10003287","Tripp Lite TLP810NET Broadband Surge for Modem/Fax",42.8148,3,"West",9823,152.90999,"Consumer","2014-08-19","Standard Class","California","Appliances" +"Office Supplies","Hollywood","United States","CS-12355","Christine Sundaresam",0.2,"2014-08-15","CA-2014-109043",33021,"OFF-AP-10004708","Fellowes Superior 10 Outlet Split Surge Protector",17.127,5,"South",8996,152.23999,"Consumer","2014-08-17","First Class","Florida","Appliances" +"Office Supplies","Fairfield","United States","CS-12355","Christine Sundaresam",0,"2014-08-15","CA-2014-141005",6824,"OFF-BI-10001989","Premium Transparent Presentation Covers by GBC",30.2112,3,"East",6793,62.940002,"Consumer","2014-08-18","First Class","Connecticut","Binders" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0.2,"2014-08-15","US-2014-164406",94122,"OFF-BI-10002309","Avery Heavy-Duty EZD Binder With Locking Rings",6.2495995,4,"West",9825,17.855999,"Consumer","2014-08-19","Standard Class","California","Binders" +"Office Supplies","Dallas","United States","KH-16510","Keith Herrera",0.8,"2014-08-15","CA-2014-138023",75081,"OFF-BI-10003638","GBC Durable Plastic Covers",-52.631996,8,"Central",3295,30.96,"Consumer","2014-08-18","First Class","Texas","Binders" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0.2,"2014-08-15","US-2014-164406",94122,"OFF-BI-10003638","GBC Durable Plastic Covers",15.093,3,"West",9826,46.440002,"Consumer","2014-08-19","Standard Class","California","Binders" +"Office Supplies","San Francisco","United States","BD-11605","Brian Dahlen",0,"2014-08-15","US-2014-164406",94122,"OFF-PA-10000167","Xerox 1925",41.822998,3,"West",9824,92.94,"Consumer","2014-08-19","Standard Class","California","Paper" +"Office Supplies","Hollywood","United States","CS-12355","Christine Sundaresam",0.2,"2014-08-15","CA-2014-109043",33021,"OFF-PA-10000312","Xerox 1955",29.692001,5,"South",8995,91.36,"Consumer","2014-08-17","First Class","Florida","Paper" +"Furniture","Philadelphia","United States","TS-21610","Troy Staebel",0.4,"2014-08-16","CA-2014-142839",19143,"FUR-TA-10001539","Chromcraft Rectangular Conference Tables",-227.4912,6,"East",1690,853.09204,"Consumer","2014-08-20","Standard Class","Pennsylvania","Tables" +"Office Supplies","Little Rock","United States","DJ-13510","Don Jones",0,"2014-08-17","CA-2014-111500",72209,"OFF-BI-10000829","Avery Non-Stick Binders",8.2616005,4,"South",2532,17.96,"Corporate","2014-08-21","Standard Class","Arkansas","Binders" +"Office Supplies","Little Rock","United States","DJ-13510","Don Jones",0,"2014-08-17","CA-2014-111500",72209,"OFF-FA-10002676","Colored Push Pins",4.5611997,7,"South",2533,12.67,"Corporate","2014-08-21","Standard Class","Arkansas","Fasteners" +"Office Supplies","Little Rock","United States","DJ-13510","Don Jones",0,"2014-08-17","CA-2014-111500",72209,"OFF-PA-10000595","Xerox 1929",52.531998,5,"South",2531,114.2,"Corporate","2014-08-21","Standard Class","Arkansas","Paper" +"Office Supplies","Houston","United States","KB-16585","Ken Black",0.2,"2014-08-17","CA-2014-129168",77095,"OFF-PA-10001639","Xerox 203",5.4431996,3,"Central",2045,15.552,"Corporate","2014-08-23","Standard Class","Texas","Paper" +"Technology","Little Rock","United States","DJ-13510","Don Jones",0,"2014-08-17","CA-2014-111500",72209,"TEC-AC-10000844","Logitech Gaming G510s - Keyboard",122.385605,4,"South",2534,339.96,"Corporate","2014-08-21","Standard Class","Arkansas","Accessories" +"Furniture","Jonesboro","United States","LH-16900","Lena Hernandez",0,"2014-08-19","US-2014-156559",72401,"FUR-BO-10000711","Hon Metal Bookcases. Gray",172.4814,9,"South",4089,638.82,"Consumer","2014-08-26","Standard Class","Arkansas","Bookcases" +"Furniture","Los Angeles","United States","DW-13195","David Wiener",0,"2014-08-19","CA-2014-133158",90045,"FUR-FU-10000723","Deflect-o EconoMat Studded. No Bevel Mat for Low Pile Carpeting",26.0316,7,"West",7003,289.24002,"Corporate","2014-08-21","Second Class","California","Furnishings" +"Office Supplies","Columbus","United States","JK-15625","Jim Karlsson",0.2,"2014-08-19","CA-2014-121727",43229,"OFF-AR-10004930","Turquoise Lead Holder with Pocket Clip",1.742,2,"East",2340,10.72,"Consumer","2014-08-24","Standard Class","Ohio","Art" +"Office Supplies","Los Angeles","United States","DW-13195","David Wiener",0.2,"2014-08-19","CA-2014-133158",90045,"OFF-BI-10000632","Satellite Sectional Post Binders",22.5732,2,"West",7004,69.456,"Corporate","2014-08-21","Second Class","California","Binders" +"Office Supplies","Columbus","United States","MS-17770","Maxwell Schwartz",0.7,"2014-08-19","US-2014-164616",43229,"OFF-BI-10001718","GBC DocuBind P50 Personal Binding Machine",-58.8616,4,"East",2377,76.77599,"Consumer","2014-08-21","Second Class","Ohio","Binders" +"Office Supplies","Columbus","United States","BS-11365","Bill Shonely",0,"2014-08-19","US-2014-143581",31907,"OFF-ST-10000991","Space Solutions HD Industrial Steel Shelving.",10.347301,3,"South",8246,344.91,"Corporate","2014-08-23","Standard Class","Georgia","Storage" +"Office Supplies","Columbus","United States","MS-17770","Maxwell Schwartz",0.2,"2014-08-19","US-2014-164616",43229,"OFF-SU-10004768","Acme Kleencut Forged Steel Scissors",1.148,2,"East",2378,9.184,"Consumer","2014-08-21","Second Class","Ohio","Supplies" +"Furniture","Hampton","United States","NC-18535","Nick Crebassa",0,"2014-08-20","CA-2014-114321",23666,"FUR-CH-10001797","Safco Chair Connectors. 6/Carton",145.0696,13,"South",9733,500.24002,"Corporate","2014-08-25","Standard Class","Virginia","Chairs" +"Furniture","Chicago","United States","CR-12730","Craig Reiter",0.3,"2014-08-20","CA-2014-166716",60610,"FUR-CH-10004495","Global Leather and Oak Executive Chair. Black",-6.0196,2,"Central",5800,421.372,"Consumer","2014-08-25","Second Class","Illinois","Chairs" +"Office Supplies","Hampton","United States","NC-18535","Nick Crebassa",0,"2014-08-20","CA-2014-114321",23666,"OFF-BI-10001359","GBC DocuBind TL300 Electric Binding System",421.5853,1,"South",9735,896.99005,"Corporate","2014-08-25","Standard Class","Virginia","Binders" +"Office Supplies","Hampton","United States","NC-18535","Nick Crebassa",0,"2014-08-20","CA-2014-114321",23666,"OFF-PA-10000246","Riverleaf Stik-Withit Designer Note Cubes",9.2552,2,"South",9734,20.12,"Corporate","2014-08-25","Standard Class","Virginia","Paper" +"Office Supplies","Lakewood","United States","DE-13255","Deanra Eno",0.2,"2014-08-22","CA-2014-123253",44107,"OFF-AR-10002804","Faber Castell Col-Erase Pencils",1.0269,1,"East",4303,3.9120002,"Home Office","2014-08-25","Second Class","Ohio","Art" +"Office Supplies","Saint Charles","United States","JF-15415","Jennifer Ferguson",0,"2014-08-22","US-2014-166828",63301,"OFF-PA-10001846","Xerox 1899",5.6644,2,"Central",9366,11.56,"Consumer","2014-08-25","First Class","Missouri","Paper" +"Office Supplies","North Miami","United States","JP-15520","Jeremy Pistek",0.2,"2014-08-22","CA-2014-130918",33161,"OFF-SU-10003936","Acme Serrated Blade Letter Opener",-1.8125999,3,"South",8941,7.632,"Consumer","2014-08-24","Second Class","Florida","Supplies" +"Furniture","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"FUR-FU-10003274","Regeneration Desk Collection",1.2672,3,"West",7022,4.224,"Consumer","2014-08-27","Standard Class","Colorado","Furnishings" +"Office Supplies","Los Angeles","United States","RB-19645","Robert Barroso",0.2,"2014-08-23","CA-2014-154837",90032,"OFF-BI-10001575","GBC Linen Binding Covers",17.348799,2,"West",6672,49.568,"Corporate","2014-08-27","Second Class","California","Binders" +"Office Supplies","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"OFF-PA-10000019","Xerox 1931",10.886399,6,"West",7024,31.104,"Consumer","2014-08-27","Standard Class","Colorado","Paper" +"Office Supplies","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"OFF-PA-10001125","Xerox 1988",69.704994,9,"West",7025,223.056,"Consumer","2014-08-27","Standard Class","Colorado","Paper" +"Office Supplies","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"OFF-PA-10001837","Xerox 1976",5.4431996,3,"West",7020,15.552,"Consumer","2014-08-27","Standard Class","Colorado","Paper" +"Office Supplies","Grand Rapids","United States","CR-12625","Corey Roper",0,"2014-08-23","US-2014-112795",49505,"OFF-PA-10001934","Xerox 1993",9.5256,3,"Central",8695,19.44,"Home Office","2014-08-28","Second Class","Michigan","Paper" +"Office Supplies","New York City","United States","RH-19495","Rick Hansen",0,"2014-08-23","CA-2014-110639",10009,"OFF-PA-10003936","Xerox 1994",12.4416,4,"East",2338,25.92,"Consumer","2014-08-23","Same Day","New York","Paper" +"Office Supplies","New York City","United States","RH-19495","Rick Hansen",0,"2014-08-23","CA-2014-110639",10009,"OFF-PA-10004530","Personal Creations Ink Jet Cards and Labels",22.5008,4,"East",2339,45.920002,"Consumer","2014-08-23","Same Day","New York","Paper" +"Office Supplies","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"OFF-SU-10004661","Acme Titanium Bonded Scissors",0.51,1,"West",7021,6.8,"Consumer","2014-08-27","Standard Class","Colorado","Supplies" +"Technology","Denver","United States","AP-10915","Arthur Prichep",0.2,"2014-08-23","CA-2014-124737",80219,"TEC-PH-10000149","Cisco SPA525G2 IP Phone - Wireless",10.773,9,"West",7023,143.64,"Consumer","2014-08-27","Standard Class","Colorado","Phones" +"Furniture","Oceanside","United States","JP-15460","Jennifer Patt",0,"2014-08-24","CA-2014-107916",11572,"FUR-FU-10004586","G.E. Longer-Life Indoor Recessed Floodlight Bulbs",6.3744006,2,"East",3133,13.28,"Corporate","2014-08-26","First Class","New York","Furnishings" +"Office Supplies","Oceanside","United States","JP-15460","Jennifer Patt",0.2,"2014-08-24","CA-2014-107916",11572,"OFF-BI-10001116","Wilson Jones 1"" Hanging DublLock Ring Binders",4.4352,3,"East",3134,12.672,"Corporate","2014-08-26","First Class","New York","Binders" +"Office Supplies","Billings","United States","RB-19645","Robert Barroso",0.2,"2014-08-24","CA-2014-106719",59102,"OFF-BI-10002799","SlimView Poly Binder. 3/8""",2.6936,2,"West",3028,8.288,"Corporate","2014-08-24","Same Day","Montana","Binders" +"Office Supplies","Owensboro","United States","DW-13585","Dorothy Wardle",0,"2014-08-24","CA-2014-120432",42301,"OFF-SU-10004661","Acme Titanium Bonded Scissors",6.63,3,"South",5913,25.5,"Corporate","2014-08-26","Second Class","Kentucky","Supplies" +"Furniture","Los Angeles","United States","JM-15265","Janet Molinari",0,"2014-08-25","CA-2014-104178",90036,"FUR-FU-10000771","Eldon 200 Class Desk Accessories. Smoke",2.6376,1,"West",5944,6.2799997,"Corporate","2014-08-29","Standard Class","California","Furnishings" +"Office Supplies","Columbus","United States","BS-11590","Brendan Sweed",0.7,"2014-08-25","CA-2014-107398",43229,"OFF-BI-10001982","Wilson Jones Custom Binder Spines & Labels",-4.5695996,4,"East",8885,6.528,"Corporate","2014-08-30","Standard Class","Ohio","Binders" +"Office Supplies","Columbus","United States","BS-11590","Brendan Sweed",0.7,"2014-08-25","CA-2014-107398",43229,"OFF-BI-10002103","Cardinal Slant-D Ring Binder. Heavy Gauge Vinyl",-16.6848,8,"East",8887,20.855999,"Corporate","2014-08-30","Standard Class","Ohio","Binders" +"Office Supplies","Houston","United States","JE-15715","Joe Elijah",0.8,"2014-08-25","CA-2014-126200",77070,"OFF-BI-10002133","Wilson Jones Elliptical Ring 3 1/2"" Capacity Binders. 800 sheets",-39.804,3,"Central",8459,25.68,"Consumer","2014-08-29","Standard Class","Texas","Binders" +"Office Supplies","Houston","United States","JE-15715","Joe Elijah",0.8,"2014-08-25","CA-2014-126200",77070,"OFF-BI-10002225","Square Ring Data Binders. Rigid 75 Pt. Covers. 11"" x 14-7/8""",-19.8144,3,"Central",8460,12.384,"Consumer","2014-08-29","Standard Class","Texas","Binders" +"Office Supplies","Columbus","United States","BS-11590","Brendan Sweed",0.7,"2014-08-25","CA-2014-107398",43229,"OFF-BI-10004141","Insertable Tab Indexes For Data Binders",-2.2896001,3,"East",8886,2.862,"Corporate","2014-08-30","Standard Class","Ohio","Binders" +"Office Supplies","Columbus","United States","RC-19960","Ryan Crowe",0.2,"2014-08-25","CA-2014-115259",43229,"OFF-EN-10002504","Tyvek Top-Opening Peel & Seel Envelopes. Plain White",44.031597,6,"East",117,130.46399,"Consumer","2014-08-27","Second Class","Ohio","Envelopes" +"Office Supplies","Columbus","United States","RC-19960","Ryan Crowe",0.2,"2014-08-25","CA-2014-115259",43229,"OFF-EN-10002600","Redi-Strip #10 Envelopes. 4 1/8 x 9 1/2",1.652,2,"East",115,4.72,"Consumer","2014-08-27","Second Class","Ohio","Envelopes" +"Office Supplies","Columbus","United States","RC-19960","Ryan Crowe",0.2,"2014-08-25","CA-2014-115259",43229,"OFF-FA-10000621","OIC Colored Binder Clips. Assorted Sizes",14.5348,14,"East",114,40.096,"Consumer","2014-08-27","Second Class","Ohio","Fasteners" +"Office Supplies","Columbus","United States","RC-19960","Ryan Crowe",0.2,"2014-08-25","CA-2014-115259",43229,"OFF-PA-10004965","Xerox 1921",7.4925003,3,"East",116,23.976,"Consumer","2014-08-27","Second Class","Ohio","Paper" +"Office Supplies","Los Angeles","United States","JM-15265","Janet Molinari",0,"2014-08-25","CA-2014-104178",90036,"OFF-PA-10004983","Xerox 23",12.4416,4,"West",5946,25.92,"Corporate","2014-08-29","Standard Class","California","Paper" +"Office Supplies","Los Angeles","United States","JM-15265","Janet Molinari",0,"2014-08-25","CA-2014-104178",90036,"OFF-ST-10004258","Portable Personal File Box",13.1868,4,"West",5947,48.84,"Corporate","2014-08-29","Standard Class","California","Storage" +"Technology","Los Angeles","United States","JM-15265","Janet Molinari",0,"2014-08-25","CA-2014-104178",90036,"TEC-AC-10002399","SanDisk Cruzer 32 GB USB Flash Drive",30.432001,5,"West",5945,95.1,"Corporate","2014-08-29","Standard Class","California","Accessories" +"Technology","Seattle","United States","MW-18220","Mitch Webber",0.2,"2014-08-25","CA-2014-103660",98103,"TEC-PH-10000895","Polycom VVX 310 VoIP phone",75.595795,7,"West",6579,1007.94403,"Consumer","2014-08-30","Standard Class","Washington","Phones" +"Furniture","Newark","United States","KB-16315","Karl Braun",0,"2014-08-26","CA-2014-103331",19711,"FUR-FU-10001731","Acrylic Self-Standing Desk Frames",4.0584,4,"East",5334,10.68,"Consumer","2014-09-01","Standard Class","Delaware","Furnishings" +"Office Supplies","Newark","United States","PP-18955","Paul Prost",0,"2014-08-26","US-2014-115413",19711,"OFF-AR-10003770","Newell 340",2.5056002,3,"East",7823,8.64,"Home Office","2014-09-01","Standard Class","Delaware","Art" +"Office Supplies","Newark","United States","KB-16315","Karl Braun",0,"2014-08-26","CA-2014-103331",19711,"OFF-PA-10002160","Xerox 1978",8.496599,3,"East",5335,17.34,"Consumer","2014-09-01","Standard Class","Delaware","Paper" +"Office Supplies","Newark","United States","KB-16315","Karl Braun",0,"2014-08-26","CA-2014-103331",19711,"OFF-PA-10002659","Avoid Verbal Orders Carbonless Minifold Book",1.5547999,1,"East",5336,3.3799999,"Consumer","2014-09-01","Standard Class","Delaware","Paper" +"Technology","Los Angeles","United States","FM-14290","Frank Merwin",0,"2014-08-26","CA-2014-123260",90032,"TEC-AC-10002323","SanDisk Ultra 32 GB MicroSDHC Class 10 Memory Card",22.984,8,"West",187,176.8,"Home Office","2014-08-30","Standard Class","California","Accessories" +"Technology","Newark","United States","PP-18955","Paul Prost",0,"2014-08-26","US-2014-115413",19711,"TEC-AC-10002800","Plantronics Audio 478 Stereo USB Headset",52.489502,3,"East",7824,149.97,"Home Office","2014-09-01","Standard Class","Delaware","Accessories" +"Furniture","Springfield","United States","CC-12610","Corey Catlett",0,"2014-08-27","CA-2014-124688",22153,"FUR-FU-10002456","Master Caster Door Stop. Large Neon Orange",12.521601,4,"South",3484,29.12,"Corporate","2014-08-29","First Class","Virginia","Furnishings" +"Furniture","Springfield","United States","CC-12610","Corey Catlett",0,"2014-08-27","CA-2014-124688",22153,"FUR-TA-10003569","Bretford CR8500 Series Meeting Room Furniture",300.73502,3,"South",3485,1202.9401,"Corporate","2014-08-29","First Class","Virginia","Tables" +"Office Supplies","San Francisco","United States","ZD-21925","Zuschuss Donatelli",0,"2014-08-27","CA-2014-143336",94109,"OFF-AR-10003056","Newell 341",2.4824002,2,"West",19,8.56,"Consumer","2014-09-01","Second Class","California","Art" +"Office Supplies","San Francisco","United States","ZD-21925","Zuschuss Donatelli",0.2,"2014-08-27","CA-2014-143336",94109,"OFF-BI-10002215","Wilson Jones Hanging View Binder. White. 1""",7.3840003,4,"West",21,22.720001,"Consumer","2014-09-01","Second Class","California","Binders" +"Office Supplies","Springfield","United States","MM-17920","Michael Moore",0,"2014-08-27","CA-2014-101266",22153,"OFF-PA-10002986","Xerox 1898",6.4128003,2,"South",6170,13.360001,"Consumer","2014-08-30","Second Class","Virginia","Paper" +"Technology","Springfield","United States","CC-12610","Corey Catlett",0,"2014-08-27","CA-2014-124688",22153,"TEC-PH-10000455","GE 30522EE2",168.18552,5,"South",3483,579.95,"Corporate","2014-08-29","First Class","Virginia","Phones" +"Technology","San Francisco","United States","ZD-21925","Zuschuss Donatelli",0.2,"2014-08-27","CA-2014-143336",94109,"TEC-PH-10001949","Cisco SPA 501G IP Phone",16.011,3,"West",20,213.48,"Consumer","2014-09-01","Second Class","California","Phones" +"Furniture","Miami","United States","RA-19945","Ryan Akin",0.45000002,"2014-08-29","CA-2014-169775",33178,"FUR-TA-10001857","Balt Solid Wood Rectangular Table",-110.764496,3,"South",1900,174.0585,"Consumer","2014-09-02","Second Class","Florida","Tables" +"Office Supplies","Miami","United States","RA-19945","Ryan Akin",0.7,"2014-08-29","CA-2014-169775",33178,"OFF-BI-10004390","GBC DocuBind 200 Manual Binding Machine",-336.784,4,"South",1899,505.17603,"Consumer","2014-09-02","Second Class","Florida","Binders" +"Office Supplies","Miami","United States","RA-19945","Ryan Akin",0.2,"2014-08-29","CA-2014-169775",33178,"OFF-EN-10001749","Jiffy Padded Mailers with Self-Seal Closure",10.8054,2,"South",1898,29.807999,"Consumer","2014-09-02","Second Class","Florida","Envelopes" +"Office Supplies","San Francisco","United States","HH-15010","Hilary Holden",0,"2014-08-29","CA-2014-135699",94110,"OFF-PA-10003001","Xerox 1986",6.4128003,2,"West",1652,13.360001,"Corporate","2014-08-29","Same Day","California","Paper" +"Office Supplies","San Francisco","United States","HH-15010","Hilary Holden",0,"2014-08-29","CA-2014-135699",94110,"OFF-PA-10004475","Xerox 1940",53.8608,2,"West",1651,109.92,"Corporate","2014-08-29","Same Day","California","Paper" +"Office Supplies","Bristol","United States","AT-10735","Annie Thurman",0,"2014-08-30","CA-2014-146500",6010,"OFF-BI-10002432","Wilson Jones Standard D-Ring Binders",11.891001,5,"East",8461,25.3,"Consumer","2014-09-04","Standard Class","Connecticut","Binders" +"Office Supplies","Bristol","United States","AT-10735","Annie Thurman",0,"2014-08-30","CA-2014-146500",6010,"OFF-ST-10000563","Fellowes Bankers Box Stor/Drawer Steel Plus",9.594,3,"East",8462,95.94,"Consumer","2014-09-04","Standard Class","Connecticut","Storage" +"Technology","Santa Fe","United States","SJ-20500","Shirley Jackson",0,"2014-08-31","CA-2014-143385",87505,"TEC-AC-10001635","KeyTronic KT400U2 - Keyboard - Black",18.504,9,"West",3478,92.52,"Consumer","2014-09-05","Standard Class","New Mexico","Accessories" +"Office Supplies","Escondido","United States","LT-16765","Larry Tron",0,"2014-09-01","CA-2014-126522",92025,"OFF-AR-10004042","BOSTON Model 1800 Electric Pencil Sharpeners. Putty/Woodgrain",15.6426,3,"West",1162,53.940002,"Consumer","2014-09-05","Second Class","California","Art" +"Office Supplies","Houston","United States","JL-15130","Jack Lebron",0.8,"2014-09-01","CA-2014-165428",77036,"OFF-BI-10002949","Prestige Round Ring Binders",-6.0192,3,"Central",3194,3.6479998,"Consumer","2014-09-04","First Class","Texas","Binders" +"Office Supplies","New York City","United States","LH-16900","Lena Hernandez",0.2,"2014-09-01","CA-2014-109855",10009,"OFF-BI-10004716","Wilson Jones Hanging Recycled Pressboard Data Binders",8.3104,2,"East",8242,23.744001,"Consumer","2014-09-05","Standard Class","New York","Binders" +"Office Supplies","Houston","United States","JL-15130","Jack Lebron",0.2,"2014-09-01","CA-2014-165428",77036,"OFF-PA-10004100","Xerox 216",10.886399,6,"Central",3195,31.104,"Consumer","2014-09-04","First Class","Texas","Paper" +"Technology","New York City","United States","LH-16900","Lena Hernandez",0,"2014-09-01","CA-2014-109855",10009,"TEC-AC-10002842","WD My Passport Ultra 2TB Portable External Hard Drive",57.12,3,"East",8243,357,"Consumer","2014-09-05","Standard Class","New York","Accessories" +"Furniture","Watertown","United States","JM-16195","Justin MacKendrick",0,"2014-09-02","CA-2014-157721",13601,"FUR-FU-10002116","Tenex Carpeted. Granite-Look or Clear Contemporary Contour Shape Chair Mats",4.9497,1,"East",2926,70.71,"Consumer","2014-09-05","First Class","New York","Furnishings" +"Office Supplies","Watertown","United States","JM-16195","Justin MacKendrick",0,"2014-09-02","CA-2014-157721",13601,"OFF-AP-10001303","Holmes Cool Mist Humidifier for the Whole House with 8-Gallon Output per Day. Extended Life Filter",8.955,1,"East",2925,19.9,"Consumer","2014-09-05","First Class","New York","Appliances" +"Office Supplies","New York City","United States","KD-16345","Katherine Ducich",0,"2014-09-02","US-2014-138828",10009,"OFF-AR-10000658","Newell 324",16.17,5,"East",4930,57.75,"Consumer","2014-09-03","First Class","New York","Art" +"Office Supplies","New York City","United States","CL-12565","Clay Ludtke",0,"2014-09-02","CA-2014-127936",10009,"OFF-AR-10002445","SANFORD Major Accent Highlighters",8.0712,3,"East",4404,21.240002,"Consumer","2014-09-04","First Class","New York","Art" +"Office Supplies","Fayetteville","United States","SC-20095","Sanjit Chand",0,"2014-09-02","CA-2014-152268",72701,"OFF-BI-10001359","GBC DocuBind TL300 Electric Binding System",843.1706,2,"South",9034,1793.98,"Consumer","2014-09-07","Standard Class","Arkansas","Binders" +"Office Supplies","New York City","United States","KD-16345","Katherine Ducich",0,"2014-09-02","US-2014-138828",10009,"OFF-PA-10000349","Easy-staple paper",7.0218,3,"East",4931,14.94,"Consumer","2014-09-03","First Class","New York","Paper" +"Technology","Buffalo Grove","United States","BM-11785","Bryan Mills",0.2,"2014-09-02","CA-2014-106971",60089,"TEC-AC-10000844","Logitech Gaming G510s - Keyboard",95.188805,7,"Central",9140,475.944,"Consumer","2014-09-08","Standard Class","Illinois","Accessories" +"Technology","Chicago","United States","CV-12295","Christina VanderZanden",0.2,"2014-09-02","CA-2014-151792",60653,"TEC-AC-10001606","Logitech Wireless Performance Mouse MX for PC and Mac",53.994602,3,"Central",5029,239.976,"Consumer","2014-09-07","Second Class","Illinois","Accessories" +"Technology","Houston","United States","AG-10525","Andy Gerbode",0.4,"2014-09-02","CA-2014-158281",77095,"TEC-MA-10002210","Epson TM-T88V Direct Thermal Printer - Monochrome - Desktop",-121.2705,3,"Central",7289,559.71,"Corporate","2014-09-07","Standard Class","Texas","Machines" +"Office Supplies","Houston","United States","MR-17545","Mathew Reese",0.8,"2014-09-03","US-2014-120236",77095,"OFF-BI-10004099","GBC VeloBinder Strips",-11.52,5,"Central",8398,7.68,"Home Office","2014-09-04","First Class","Texas","Binders" +"Office Supplies","New York City","United States","AG-10390","Allen Goldenen",0,"2014-09-03","CA-2014-125171",10009,"OFF-LA-10001175","Avery 514",7.056,5,"East",4986,14.4,"Consumer","2014-09-03","Same Day","New York","Labels" +"Furniture","Saint Petersburg","United States","PR-18880","Patrick Ryan",0.2,"2014-09-05","CA-2014-136861",33710,"FUR-FU-10001967","Telescoping Adjustable Floor Lamp",1.999,2,"South",5430,31.984,"Consumer","2014-09-07","First Class","Florida","Furnishings" +"Office Supplies","Bowling Green","United States","NP-18700","Nora Preis",0.2,"2014-09-05","CA-2014-106572",43402,"OFF-ST-10000585","Economy Rollaway Files",19.824,2,"East",2516,264.31998,"Consumer","2014-09-10","Standard Class","Ohio","Storage" +"Furniture","San Francisco","United States","VP-21760","Victoria Pisteka",0,"2014-09-06","CA-2014-130449",94109,"FUR-FU-10001487","Eldon Expressions Wood and Plastic Desk Accessories. Cherry Wood",12.145201,6,"West",6623,41.88,"Corporate","2014-09-09","First Class","California","Furnishings" +"Office Supplies","San Francisco","United States","VP-21760","Victoria Pisteka",0,"2014-09-06","CA-2014-130449",94109,"OFF-LA-10001934","Avery 516",27.4856,8,"West",6624,58.480003,"Corporate","2014-09-09","First Class","California","Labels" +"Furniture","Tulsa","United States","RB-19465","Rick Bensley",0,"2014-09-07","CA-2014-117765",74133,"FUR-CH-10004698","Padded Folding Chairs. Black. 4/Carton",45.3488,2,"Central",5981,161.95999,"Home Office","2014-09-13","Standard Class","Oklahoma","Chairs" +"Furniture","Philadelphia","United States","SB-20290","Sean Braxton",0.2,"2014-09-07","CA-2014-122882",19134,"FUR-FU-10000758","DAX Natural Wood-Tone Poster Frame",8.473599,2,"East",1681,42.368,"Corporate","2014-09-13","Standard Class","Pennsylvania","Furnishings" +"Furniture","Tulsa","United States","TB-21400","Tom Boeckenhauer",0,"2014-09-07","CA-2014-131002",74133,"FUR-FU-10004270","Executive Impressions 13"" Clairmont Wall Clock",23.6529,3,"Central",2646,57.69,"Consumer","2014-09-12","Second Class","Oklahoma","Furnishings" +"Furniture","Tulsa","United States","TB-21400","Tom Boeckenhauer",0,"2014-09-07","CA-2014-131002",74133,"FUR-FU-10004665","3M Polarizing Task Lamp with Clamp Arm. Light Gray",213.6888,6,"Central",2649,821.88,"Consumer","2014-09-12","Second Class","Oklahoma","Furnishings" +"Furniture","Tulsa","United States","RB-19465","Rick Bensley",0,"2014-09-07","CA-2014-117765",74133,"FUR-TA-10001039","KI Adjustable-Height Table",111.773994,5,"Central",5979,429.9,"Home Office","2014-09-13","Standard Class","Oklahoma","Tables" +"Furniture","Houston","United States","SZ-20035","Sam Zeldin",0.3,"2014-09-07","CA-2014-105165",77036,"FUR-TA-10004154","Riverside Furniture Oval Coffee Table. Oval End Table. End Table with Drawer",-22.948,1,"Central",5263,200.795,"Home Office","2014-09-10","First Class","Texas","Tables" +"Office Supplies","Houston","United States","SZ-20035","Sam Zeldin",0.2,"2014-09-07","CA-2014-105165",77036,"OFF-AR-10003179","Dixon Ticonderoga Core-Lock Colored Pencils",3.5529,3,"Central",5265,21.863998,"Home Office","2014-09-10","First Class","Texas","Art" +"Office Supplies","Houston","United States","SZ-20035","Sam Zeldin",0.8,"2014-09-07","CA-2014-105165",77036,"OFF-BI-10000050","Angle-D Binders with Locking Rings. Label Holders",-4.8180003,2,"Central",5262,2.92,"Home Office","2014-09-10","First Class","Texas","Binders" +"Office Supplies","Tulsa","United States","RB-19465","Rick Bensley",0,"2014-09-07","CA-2014-117765",74133,"OFF-BI-10000474","Avery Recycled Flexi-View Covers for Binding Systems",15.388801,2,"Central",5980,32.06,"Home Office","2014-09-13","Standard Class","Oklahoma","Binders" +"Office Supplies","Tulsa","United States","TB-21400","Tom Boeckenhauer",0,"2017-12-30","CA-2014-131002",74133,"OFF-BI-10000948","GBC Laser Imprintable Binding System Covers. Desert Sand",20.1207,3,"Central",2647,42.809998,"Consumer","2017-12-30","Second Class","Oklahoma","Binders" \ No newline at end of file diff --git a/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/docker-entrypoint-initdb.d/001_prepare_db.sql b/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/docker-entrypoint-initdb.d/001_prepare_db.sql new file mode 100755 index 000000000..6cb5cd952 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/docker-entrypoint-initdb.d/001_prepare_db.sql @@ -0,0 +1,26 @@ +CREATE DATABASE IF NOT EXISTS test_data; + +DROP TABLE IF EXISTS test_data.sample; +CREATE TABLE IF NOT EXISTS test_data.sample ( + Category String, + City String, + Country String, + `Customer ID` String, + `Customer Name` String, + Discount Float32, + `Order Date` Date, + `Order ID` String, + `Postal Code` Int32, + `Product ID` String, + `Product Name` String, + Profit Float32, + Quantity Int32, + Region String, + `Row ID` Int32, + Sales Float32, + Segment String, + `Ship Date` Date, + `Ship Mode` String, + State String, + `Sub-Category` String +) ENGINE = MergeTree() ORDER BY `Order Date`; diff --git a/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/docker-entrypoint-initdb.d/011_load_test_data.sh b/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/docker-entrypoint-initdb.d/011_load_test_data.sh new file mode 100644 index 000000000..1e7ae7966 --- /dev/null +++ b/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/docker-entrypoint-initdb.d/011_load_test_data.sh @@ -0,0 +1 @@ +cat /common-data/sample.csv | clickhouse-client --query="INSERT INTO test_data.sample FORMAT CSV" diff --git a/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/users.xml b/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/users.xml new file mode 100644 index 000000000..8e81aea2c --- /dev/null +++ b/lib/dl_connector_bundle_chs3/docker-compose/db-clickhouse/users.xml @@ -0,0 +1,42 @@ + + + + + + 8 + + + + + + + + 3600 + + 0 + 0 + 0 + 0 + 0 + + + + + + + ::/0 + + + default + default + + + + ::/0 + + qwerty + default + default + + + diff --git a/lib/dl_connector_bundle_chs3/docker-compose/tests/entrypoint.sh b/lib/dl_connector_bundle_chs3/docker-compose/tests/entrypoint.sh new file mode 100644 index 000000000..5fc44481d --- /dev/null +++ b/lib/dl_connector_bundle_chs3/docker-compose/tests/entrypoint.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +exec "$@" diff --git a/lib/dl_connector_bundle_chs3/pyproject.toml b/lib/dl_connector_bundle_chs3/pyproject.toml index a15fc0e8f..e9e6672a8 100644 --- a/lib/dl_connector_bundle_chs3/pyproject.toml +++ b/lib/dl_connector_bundle_chs3/pyproject.toml @@ -53,10 +53,13 @@ minversion = "6.0" addopts = "-ra" testpaths = [] +[datalens.pytest.db_file] +root_dir = "dl_connector_bundle_chs3_tests/db" +target_path = "file" - -[datalens_ci] -skip_test = true +[datalens.pytest.db_gsheets_v2] +root_dir = "dl_connector_bundle_chs3_tests/db" +target_path = "gsheets_v2" [tool.mypy] warn_unused_configs = true @@ -64,6 +67,10 @@ disallow_untyped_defs = true check_untyped_defs = true strict_optional = true +[[tool.mypy.overrides]] +module = ["clickhouse_sqlalchemy.*"] +ignore_missing_imports = true + [datalens.i18n.domains] dl_connector_bundle_chs3 = [ {path = "dl_connector_bundle_chs3/chs3_base/api"}, diff --git a/lib/dl_connector_chyt/dl_connector_chyt/api/api_schema/connection.py b/lib/dl_connector_chyt/dl_connector_chyt/api/api_schema/connection.py index 09a9c8e82..d2390ec32 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/api/api_schema/connection.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/api/api_schema/connection.py @@ -16,6 +16,7 @@ ) from dl_api_connector.api_schema.connection_sql import DBHostField from dl_api_connector.api_schema.extras import FieldExtra + from dl_connector_chyt.core.us_connection import ConnectionCHYTToken diff --git a/lib/dl_connector_chyt/dl_connector_chyt/api/connection_form/form_config.py b/lib/dl_connector_chyt/dl_connector_chyt/api/connection_form/form_config.py index 55a54c210..545f7c66f 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/api/connection_form/form_config.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/api/connection_form/form_config.py @@ -21,6 +21,7 @@ import dl_api_connector.form_config.models.rows as C from dl_api_connector.form_config.models.shortcuts.rows import RowConstructor from dl_configs.connectors_settings import ConnectorSettingsBase + from dl_connector_chyt.api.connection_info import CHYTConnectionInfoProvider from dl_connector_chyt.api.i18n.localizer import Translatable from dl_connector_chyt.core.settings import CHYTConnectorSettings diff --git a/lib/dl_connector_chyt/dl_connector_chyt/api/connection_info.py b/lib/dl_connector_chyt/dl_connector_chyt/api/connection_info.py index a47d9c2f6..7c53158d0 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/api/connection_info.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/api/connection_info.py @@ -1,6 +1,7 @@ from __future__ import annotations from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_chyt.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_chyt/dl_connector_chyt/api/connector.py b/lib/dl_connector_chyt/dl_connector_chyt/api/connector.py index ec8943068..e6542dbb0 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/api/connector.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/api/connector.py @@ -11,6 +11,7 @@ ApiConnector, ApiSourceDefinition, ) + from dl_connector_chyt.api.api_schema.connection import CHYTConnectionSchema from dl_connector_chyt.api.api_schema.source import ( CHYTTableListDataSourceSchema, diff --git a/lib/dl_connector_chyt/dl_connector_chyt/api/i18n/localizer.py b/lib/dl_connector_chyt/dl_connector_chyt/api/i18n/localizer.py index d08772f63..933351114 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/api/i18n/localizer.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/api/i18n/localizer.py @@ -2,10 +2,11 @@ import attr -import dl_connector_chyt as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_chyt as package + DOMAIN = f"{package.__name__}" CONFIGS = [ diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/adapters.py b/lib/dl_connector_chyt/dl_connector_chyt/core/adapters.py index 59bdc5c93..05a7bf6d5 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/adapters.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/adapters.py @@ -11,6 +11,17 @@ from aiochclient.http_clients import aiohttp import attr +from dl_constants.enums import IndexKind +from dl_core import exc +from dl_core.connection_executors.models.db_adapter_data import RawIndexInfo +from dl_core.connection_models import TableIdent +from dl_core.connectors.base.error_transformer import DBExcKWArgs +from dl_core.utils import ( + get_current_w3c_tracing_headers, + make_url, +) +from dl_utils.aio import await_sync + from dl_connector_chyt.core.constants import CONNECTION_TYPE_CHYT from dl_connector_chyt.core.target_dto import ( BaseCHYTConnTargetDTO, @@ -25,16 +36,6 @@ ensure_db_message, get_ch_settings, ) -from dl_constants.enums import IndexKind -from dl_core import exc -from dl_core.connection_executors.models.db_adapter_data import RawIndexInfo -from dl_core.connection_models import TableIdent -from dl_core.connectors.base.error_transformer import DBExcKWArgs -from dl_core.utils import ( - get_current_w3c_tracing_headers, - make_url, -) -from dl_utils.aio import await_sync class CHYTConnLineConstructor(BaseClickHouseConnLineConstructor): diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/async_adapters.py b/lib/dl_connector_chyt/dl_connector_chyt/core/async_adapters.py index c71d4ee8f..fd1603b85 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/async_adapters.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/async_adapters.py @@ -5,11 +5,12 @@ import attr from clickhouse_sqlalchemy.drivers.http.transport import _get_type # noqa +from dl_core.utils import get_current_w3c_tracing_headers + from dl_connector_chyt.core.constants import CONNECTION_TYPE_CHYT from dl_connector_chyt.core.target_dto import BaseCHYTConnTargetDTO from dl_connector_chyt.core.utils import CHYTUtils from dl_connector_clickhouse.core.clickhouse_base.adapters import BaseAsyncClickHouseAdapter -from dl_core.utils import get_current_w3c_tracing_headers LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/connection_executors.py b/lib/dl_connector_chyt/dl_connector_chyt/core/connection_executors.py index 82449fe5b..73b5b65d2 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/connection_executors.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/connection_executors.py @@ -5,6 +5,8 @@ import attr +from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor + from dl_connector_chyt.core.adapters import ( BaseCHYTAdapter, CHYTAdapter, @@ -19,7 +21,6 @@ BaseCHYTConnTargetDTO, CHYTConnTargetDTO, ) -from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/connector.py b/lib/dl_connector_chyt/dl_connector_chyt/core/connector.py index 6ac5452dd..6e5d208bc 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/connector.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/connector.py @@ -1,5 +1,11 @@ from clickhouse_sqlalchemy.orm.query import Query as CHQuery +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreConnector, + CoreSourceDefinition, +) + from dl_connector_chyt.core.adapters import CHYTAdapter from dl_connector_chyt.core.async_adapters import AsyncCHYTAdapter from dl_connector_chyt.core.connection_executors import ( @@ -37,11 +43,6 @@ ) from dl_connector_chyt.core.type_transformer import CHYTTypeTransformer from dl_connector_chyt.core.us_connection import ConnectionCHYTToken -from dl_core.connectors.base.connector import ( - CoreConnectionDefinition, - CoreConnector, - CoreSourceDefinition, -) class CHYTCoreConnectionDefinition(CoreConnectionDefinition): diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/constants.py b/lib/dl_connector_chyt/dl_connector_chyt/core/constants.py index 12477c018..1ab51dda7 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/constants.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/constants.py @@ -1,14 +1,15 @@ -from dl_connector_clickhouse.core.clickhouse_base.constants import BACKEND_TYPE_CLICKHOUSE from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, ) +from dl_connector_clickhouse.core.clickhouse_base.constants import BACKEND_TYPE_CLICKHOUSE + BACKEND_TYPE_CHYT = BACKEND_TYPE_CLICKHOUSE CONNECTION_TYPE_CHYT = ConnectionType.declare("chyt") -SOURCE_TYPE_CHYT_YTSAURUS_TABLE = CreateDSFrom.declare("CHYT_YTSAURUS_TABLE") -SOURCE_TYPE_CHYT_YTSAURUS_SUBSELECT = CreateDSFrom.declare("CHYT_YTSAURUS_SUBSELECT") -SOURCE_TYPE_CHYT_YTSAURUS_TABLE_LIST = CreateDSFrom.declare("CHYT_YTSAURUS_TABLE_LIST") -SOURCE_TYPE_CHYT_YTSAURUS_TABLE_RANGE = CreateDSFrom.declare("CHYT_YTSAURUS_TABLE_RANGE") +SOURCE_TYPE_CHYT_YTSAURUS_TABLE = DataSourceType.declare("CHYT_YTSAURUS_TABLE") +SOURCE_TYPE_CHYT_YTSAURUS_SUBSELECT = DataSourceType.declare("CHYT_YTSAURUS_SUBSELECT") +SOURCE_TYPE_CHYT_YTSAURUS_TABLE_LIST = DataSourceType.declare("CHYT_YTSAURUS_TABLE_LIST") +SOURCE_TYPE_CHYT_YTSAURUS_TABLE_RANGE = DataSourceType.declare("CHYT_YTSAURUS_TABLE_RANGE") diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/data_source.py b/lib/dl_connector_chyt/dl_connector_chyt/core/data_source.py index 479cb9e89..3dc63504d 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/data_source.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/data_source.py @@ -14,25 +14,9 @@ import attr import sqlalchemy as sa -from dl_connector_chyt.core.constants import ( - CONNECTION_TYPE_CHYT, - SOURCE_TYPE_CHYT_YTSAURUS_SUBSELECT, - SOURCE_TYPE_CHYT_YTSAURUS_TABLE, - SOURCE_TYPE_CHYT_YTSAURUS_TABLE_LIST, - SOURCE_TYPE_CHYT_YTSAURUS_TABLE_RANGE, -) -from dl_connector_chyt.core.data_source_spec import ( - CHYTTableDataSourceSpec, - CHYTTableListDataSourceSpec, - CHYTTableRangeDataSourceSpec, -) -from dl_connector_clickhouse.core.clickhouse_base.data_source import ( - ClickHouseBaseMixin, - CommonClickHouseSubselectDataSource, -) from dl_constants.enums import ( - BIType, - CreateDSFrom, + DataSourceType, + UserDataType, ) from dl_core import exc from dl_core.connection_models.common_models import ( @@ -56,6 +40,23 @@ CHYTTableSubselect, ) +from dl_connector_chyt.core.constants import ( + CONNECTION_TYPE_CHYT, + SOURCE_TYPE_CHYT_YTSAURUS_SUBSELECT, + SOURCE_TYPE_CHYT_YTSAURUS_TABLE, + SOURCE_TYPE_CHYT_YTSAURUS_TABLE_LIST, + SOURCE_TYPE_CHYT_YTSAURUS_TABLE_RANGE, +) +from dl_connector_chyt.core.data_source_spec import ( + CHYTTableDataSourceSpec, + CHYTTableListDataSourceSpec, + CHYTTableRangeDataSourceSpec, +) +from dl_connector_clickhouse.core.clickhouse_base.data_source import ( + ClickHouseBaseMixin, + CommonClickHouseSubselectDataSource, +) + if TYPE_CHECKING: from dl_core.connection_executors.sync_base import SyncConnExecutorBase @@ -140,7 +141,7 @@ def get_schema_info(self, conn_executor_factory: Callable[[], SyncConnExecutorBa SchemaColumn( name=key, title=key, - user_type=BIType.string, + user_type=UserDataType.string, native_type=ClickHouseNativeType.normalize_name_and_create( conn_type=self.conn_type, name="string" ), @@ -271,7 +272,7 @@ class CHYTTokenAuthDataSourceMixin: conn_type = CONNECTION_TYPE_CHYT @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in { SOURCE_TYPE_CHYT_YTSAURUS_TABLE, SOURCE_TYPE_CHYT_YTSAURUS_SUBSELECT, diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/data_source_migration.py b/lib/dl_connector_chyt/dl_connector_chyt/core/data_source_migration.py index aa856793e..d08f47169 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/data_source_migration.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/data_source_migration.py @@ -6,6 +6,15 @@ import attr +from dl_constants.enums import DataSourceType +from dl_core.connectors.base.data_source_migration import ( + DataSourceMigrationInterface, + MigrationKeyMappingItem, + MigrationSpec, +) +from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator +from dl_core.data_source_spec.base import DataSourceSpec + from dl_connector_chyt.core.constants import ( SOURCE_TYPE_CHYT_YTSAURUS_SUBSELECT, SOURCE_TYPE_CHYT_YTSAURUS_TABLE, @@ -18,14 +27,6 @@ CHYTTableListDataSourceSpec, CHYTTableRangeDataSourceSpec, ) -from dl_constants.enums import CreateDSFrom -from dl_core.connectors.base.data_source_migration import ( - DataSourceMigrationInterface, - MigrationKeyMappingItem, - MigrationSpec, -) -from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator -from dl_core.data_source_spec.base import DataSourceSpec @attr.s(frozen=True) @@ -41,10 +42,10 @@ class CHYTTableRangeDSMI(DataSourceMigrationInterface): class BaseCHYTDataSourceMigrator(DefaultSQLDataSourceMigrator): - table_list_source_type: ClassVar[Optional[CreateDSFrom]] + table_list_source_type: ClassVar[Optional[DataSourceType]] table_list_dsrc_spec_cls: ClassVar[Optional[Type[DataSourceSpec]]] - table_range_source_type: ClassVar[Optional[CreateDSFrom]] + table_range_source_type: ClassVar[Optional[DataSourceType]] table_range_dsrc_spec_cls: ClassVar[Optional[Type[DataSourceSpec]]] def get_migration_specs(self) -> list[MigrationSpec]: diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/storage_schemas/connection.py b/lib/dl_connector_chyt/dl_connector_chyt/core/storage_schemas/connection.py index 0e5ad8d49..256d77d43 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/storage_schemas/connection.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/storage_schemas/connection.py @@ -5,16 +5,17 @@ from marshmallow import fields as ma_fields -from dl_connector_chyt.core.us_connection import ( - BaseConnectionCHYT, - ConnectionCHYTToken, -) from dl_core.us_manager.storage_schemas.connection import ( CacheableConnectionDataSchemaMixin, ConnectionBaseDataStorageSchema, SubselectConnectionDataSchemaMixin, ) +from dl_connector_chyt.core.us_connection import ( + BaseConnectionCHYT, + ConnectionCHYTToken, +) + _CHYT_CONN_DATA_TV = TypeVar("_CHYT_CONN_DATA_TV", bound=BaseConnectionCHYT.DataModel) diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/storage_schemas/data_source_spec.py b/lib/dl_connector_chyt/dl_connector_chyt/core/storage_schemas/data_source_spec.py index 5c1498bea..a7b5287f2 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/storage_schemas/data_source_spec.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/storage_schemas/data_source_spec.py @@ -1,16 +1,17 @@ import marshmallow.fields as ma_fields +from dl_core.us_manager.storage_schemas.data_source_spec_base import ( + BaseSQLDataSourceSpecStorageSchema, + SubselectDataSourceSpecStorageSchema, + TableSQLDataSourceSpecStorageSchemaMixin, +) + from dl_connector_chyt.core.data_source_spec import ( CHYTSubselectDataSourceSpec, CHYTTableDataSourceSpec, CHYTTableListDataSourceSpec, CHYTTableRangeDataSourceSpec, ) -from dl_core.us_manager.storage_schemas.data_source_spec_base import ( - BaseSQLDataSourceSpecStorageSchema, - SubselectDataSourceSpecStorageSchema, - TableSQLDataSourceSpecStorageSchemaMixin, -) class CHYTTableDataSourceSpecStorageSchema(TableSQLDataSourceSpecStorageSchemaMixin): diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/type_transformer.py b/lib/dl_connector_chyt/dl_connector_chyt/core/type_transformer.py index 41e8b52bf..f2fb2089a 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/type_transformer.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/type_transformer.py @@ -1,9 +1,7 @@ from typing import ClassVar -from dl_connector_chyt.core.constants import CONNECTION_TYPE_CHYT -from dl_connector_clickhouse.core.clickhouse_base.type_transformer import ClickHouseTypeTransformer -from dl_constants.enums import BIType from dl_constants.enums import ConnectionType as CT +from dl_constants.enums import UserDataType from dl_core.db.conversion_base import ( TypeCaster, YTBooleanTypeCaster, @@ -12,24 +10,27 @@ from dl_core.db.native_type import GenericNativeType from dl_sqlalchemy_chyt import types as chyt_types +from dl_connector_chyt.core.constants import CONNECTION_TYPE_CHYT +from dl_connector_clickhouse.core.clickhouse_base.type_transformer import ClickHouseTypeTransformer + -def make_chyt_native_to_user_map(conn_type: CT) -> dict[GenericNativeType, BIType]: +def make_chyt_native_to_user_map(conn_type: CT) -> dict[GenericNativeType, UserDataType]: return { **{ make_native_type(conn_type, native_type.name): bi_type for native_type, bi_type in ClickHouseTypeTransformer.native_to_user_map.items() }, - make_native_type(conn_type, chyt_types.YtBoolean): BIType.boolean, + make_native_type(conn_type, chyt_types.YtBoolean): UserDataType.boolean, } -def make_chyt_user_to_native_map(conn_type: CT) -> dict[BIType, GenericNativeType]: +def make_chyt_user_to_native_map(conn_type: CT) -> dict[UserDataType, GenericNativeType]: return { **{ bi_type: make_native_type(conn_type, native_type.name) for bi_type, native_type in ClickHouseTypeTransformer.user_to_native_map.items() }, - BIType.boolean: make_native_type(conn_type, chyt_types.YtBoolean), + UserDataType.boolean: make_native_type(conn_type, chyt_types.YtBoolean), } @@ -39,7 +40,7 @@ class CHYTTypeTransformer(ClickHouseTypeTransformer): native_to_user_map = make_chyt_native_to_user_map(CONNECTION_TYPE_CHYT) user_to_native_map = make_chyt_user_to_native_map(CONNECTION_TYPE_CHYT) - casters: ClassVar[dict[BIType, TypeCaster]] = { + casters: ClassVar[dict[UserDataType, TypeCaster]] = { **ClickHouseTypeTransformer.casters, - BIType.boolean: YTBooleanTypeCaster(), + UserDataType.boolean: YTBooleanTypeCaster(), } diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/us_connection.py b/lib/dl_connector_chyt/dl_connector_chyt/core/us_connection.py index b8fb4fdf0..9cac3901e 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/us_connection.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/us_connection.py @@ -12,18 +12,7 @@ import attr import marshmallow as ma -from dl_connector_chyt.api.i18n.localizer import Translatable -from dl_connector_chyt.core.conn_options import CHYTConnectOptions -from dl_connector_chyt.core.constants import ( - SOURCE_TYPE_CHYT_YTSAURUS_SUBSELECT, - SOURCE_TYPE_CHYT_YTSAURUS_TABLE, - SOURCE_TYPE_CHYT_YTSAURUS_TABLE_LIST, - SOURCE_TYPE_CHYT_YTSAURUS_TABLE_RANGE, -) -from dl_connector_chyt.core.dto import CHYTDTO -from dl_connector_chyt.core.settings import CHYTConnectorSettings -from dl_connector_clickhouse.core.clickhouse_base.us_connection import ConnectionClickhouseBase -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.base_models import ( ConnCacheableDataModelMixin, ConnSubselectDataModelMixin, @@ -40,6 +29,18 @@ from dl_i18n.localizer_base import Localizer from dl_utils.utils import DataKey +from dl_connector_chyt.api.i18n.localizer import Translatable +from dl_connector_chyt.core.conn_options import CHYTConnectOptions +from dl_connector_chyt.core.constants import ( + SOURCE_TYPE_CHYT_YTSAURUS_SUBSELECT, + SOURCE_TYPE_CHYT_YTSAURUS_TABLE, + SOURCE_TYPE_CHYT_YTSAURUS_TABLE_LIST, + SOURCE_TYPE_CHYT_YTSAURUS_TABLE_RANGE, +) +from dl_connector_chyt.core.dto import CHYTDTO +from dl_connector_chyt.core.settings import CHYTConnectorSettings +from dl_connector_clickhouse.core.clickhouse_base.us_connection import ConnectionClickhouseBase + if TYPE_CHECKING: from dl_core.services_registry.top_level import ServicesRegistry @@ -56,10 +57,10 @@ class BaseConnectionCHYT( is_always_user_source: ClassVar[bool] = True settings_type = CHYTConnectorSettings - chyt_table_source_type: ClassVar[CreateDSFrom] - chyt_table_list_source_type: ClassVar[CreateDSFrom] - chyt_table_range_source_type: ClassVar[CreateDSFrom] - chyt_subselect_source_type: ClassVar[CreateDSFrom] + chyt_table_source_type: ClassVar[DataSourceType] + chyt_table_list_source_type: ClassVar[DataSourceType] + chyt_table_range_source_type: ClassVar[DataSourceType] + chyt_subselect_source_type: ClassVar[DataSourceType] @attr.s(kw_only=True) class DataModel(ConnCacheableDataModelMixin, ConnSubselectDataModelMixin, ConnectionBase.DataModel): diff --git a/lib/dl_connector_chyt/dl_connector_chyt/core/utils.py b/lib/dl_connector_chyt/dl_connector_chyt/core/utils.py index d4b00cff6..735951522 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/core/utils.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/core/utils.py @@ -5,13 +5,14 @@ Type, ) +from dl_core import exc +from dl_core.connection_executors.models.scoped_rci import DBAdapterScopedRCI + from dl_connector_chyt.core import exc as chyt_exc from dl_connector_clickhouse.core.clickhouse_base.ch_commons import ( ClickHouseBaseUtils, ParsedErrorMsg, ) -from dl_core import exc -from dl_core.connection_executors.models.scoped_rci import DBAdapterScopedRCI LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_chyt/dl_connector_chyt/db_testing/connector.py b/lib/dl_connector_chyt/dl_connector_chyt/db_testing/connector.py index 165cab18d..1103bf9e5 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt/db_testing/connector.py +++ b/lib/dl_connector_chyt/dl_connector_chyt/db_testing/connector.py @@ -1,6 +1,7 @@ -from dl_connector_chyt.db_testing.engine_wrapper import CHYTEngineWrapperBase from dl_db_testing.connectors.base.connector import DbTestingConnector +from dl_connector_chyt.db_testing.engine_wrapper import CHYTEngineWrapperBase + class CHYTDbTestingConnector(DbTestingConnector): engine_wrapper_classes = (CHYTEngineWrapperBase,) diff --git a/lib/dl_connector_chyt/dl_connector_chyt_tests/unit/test_exc_codes.py b/lib/dl_connector_chyt/dl_connector_chyt_tests/unit/test_exc_codes.py index a897775b1..2756a8229 100644 --- a/lib/dl_connector_chyt/dl_connector_chyt_tests/unit/test_exc_codes.py +++ b/lib/dl_connector_chyt/dl_connector_chyt_tests/unit/test_exc_codes.py @@ -1,9 +1,10 @@ -from dl_connector_chyt.core import exc from dl_constants.exc import ( DEFAULT_ERR_CODE_API_PREFIX, GLOBAL_ERR_PREFIX, ) +from dl_connector_chyt.core import exc + def exc_cls_to_code(exc_cls: exc.CHYTQueryError | exc.CHYTCliqueError) -> str: return ".".join([GLOBAL_ERR_PREFIX, DEFAULT_ERR_CODE_API_PREFIX] + exc_cls.err_code) diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/api_schema/connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/api_schema/connection.py index 82fab8175..93de45cb4 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/api_schema/connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/api_schema/connection.py @@ -7,9 +7,10 @@ ) from dl_api_connector.api_schema.connection_sql import ClassicSQLConnectionSchema from dl_api_connector.api_schema.extras import FieldExtra -from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse import dl_core.marshmallow as core_ma_fields +from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse + class ClickHouseConnectionSchema( ConnectionMetaMixin, RawSQLLevelMixin, DataExportForbiddenMixin, ClassicSQLConnectionSchema diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connection_form/form_config.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connection_form/form_config.py index 07b9d70fe..c896a34ce 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connection_form/form_config.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connection_form/form_config.py @@ -21,6 +21,7 @@ from dl_api_connector.form_config.models.rows.base import FormRow from dl_api_connector.form_config.models.shortcuts.rows import RowConstructor from dl_configs.connectors_settings import ConnectorSettingsBase + from dl_connector_clickhouse.api.connection_info import ClickHouseConnectionInfoProvider from dl_connector_clickhouse.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connection_info.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connection_info.py index af33bb9c7..d8548ad8f 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connection_info.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connection_info.py @@ -1,4 +1,5 @@ from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_clickhouse.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connector.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connector.py index 1be279c74..552e7702d 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connector.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/connector.py @@ -9,6 +9,10 @@ ApiConnector, ApiSourceDefinition, ) +from dl_api_lib.query.registry import MQMFactorySettingItem +from dl_constants.enums import QueryProcessingMode +from dl_query_processing.multi_query.factory import NoCompengMultiQueryMutatorFactory + from dl_connector_clickhouse.api.api_schema.connection import ClickHouseConnectionSchema from dl_connector_clickhouse.api.connection_form.form_config import ClickHouseConnectionFormFactory from dl_connector_clickhouse.api.connection_info import ClickHouseConnectionInfoProvider @@ -19,7 +23,10 @@ ClickHouseSubselectCoreSourceDefinition, ClickHouseTableCoreSourceDefinition, ) -from dl_connector_clickhouse.formula.constants import DIALECT_NAME_CLICKHOUSE +from dl_connector_clickhouse.formula.constants import ( + DIALECT_NAME_CLICKHOUSE, + ClickHouseDialect, +) class ClickHouseApiTableSourceDefinition(ApiSourceDefinition): @@ -50,3 +57,10 @@ class ClickHouseApiConnector(ApiConnector): ) formula_dialect_name = DIALECT_NAME_CLICKHOUSE translation_configs = frozenset(CONFIGS) + multi_query_mutation_factories = ApiConnector.multi_query_mutation_factories + ( + MQMFactorySettingItem( + query_proc_mode=QueryProcessingMode.native_wf, + dialects=ClickHouseDialect.and_above(ClickHouseDialect.CLICKHOUSE_22_10).to_list(), + factory_cls=NoCompengMultiQueryMutatorFactory, + ), + ) diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/i18n/localizer.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/i18n/localizer.py index 96810cd67..3c705c1af 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/i18n/localizer.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/api/i18n/localizer.py @@ -2,10 +2,11 @@ import attr -import dl_connector_clickhouse as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_clickhouse as package + DOMAIN = f"{package.__name__}" CONFIGS = [ diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/connector.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/connector.py index a2d1ba1c0..cbc1498ee 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/connector.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/connector.py @@ -1,3 +1,11 @@ +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreSourceDefinition, +) +from dl_core.connectors.sql_base.connector import SQLSubselectCoreSourceDefinitionBase +from dl_core.data_source_spec.sql import StandardSQLDataSourceSpec +from dl_core.us_manager.storage_schemas.data_source_spec_base import SQLDataSourceSpecStorageSchema + from dl_connector_clickhouse.core.clickhouse.constants import ( SOURCE_TYPE_CH_SUBSELECT, SOURCE_TYPE_CH_TABLE, @@ -10,19 +18,12 @@ from dl_connector_clickhouse.core.clickhouse.storage_schemas.connection import ConnectionClickhouseDataStorageSchema from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse from dl_connector_clickhouse.core.clickhouse_base.connection_executors import ( - ClickHouseAsyncAdapterConnExecutor, - ClickHouseSyncAdapterConnExecutor, + AsyncClickHouseConnExecutor, + ClickHouseConnExecutor, ) from dl_connector_clickhouse.core.clickhouse_base.connector import ClickHouseCoreConnectorBase from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE from dl_connector_clickhouse.core.clickhouse_base.type_transformer import ClickHouseTypeTransformer -from dl_core.connectors.base.connector import ( - CoreConnectionDefinition, - CoreSourceDefinition, -) -from dl_core.connectors.sql_base.connector import SQLSubselectCoreSourceDefinitionBase -from dl_core.data_source_spec.sql import StandardSQLDataSourceSpec -from dl_core.us_manager.storage_schemas.data_source_spec_base import SQLDataSourceSpecStorageSchema class ClickHouseCoreConnectionDefinition(CoreConnectionDefinition): @@ -30,8 +31,8 @@ class ClickHouseCoreConnectionDefinition(CoreConnectionDefinition): connection_cls = ConnectionClickhouse us_storage_schema_cls = ConnectionClickhouseDataStorageSchema type_transformer_cls = ClickHouseTypeTransformer - sync_conn_executor_cls = ClickHouseSyncAdapterConnExecutor - async_conn_executor_cls = ClickHouseAsyncAdapterConnExecutor + sync_conn_executor_cls = ClickHouseConnExecutor + async_conn_executor_cls = AsyncClickHouseConnExecutor dialect_string = "bi_clickhouse" data_source_migrator_cls = ClickHouseDataSourceMigrator diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/constants.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/constants.py index d20d17ee5..95a6fa73b 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/constants.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/constants.py @@ -1,5 +1,5 @@ -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType -SOURCE_TYPE_CH_TABLE = CreateDSFrom.declare("CH_TABLE") -SOURCE_TYPE_CH_SUBSELECT = CreateDSFrom.declare("CH_SUBSELECT") +SOURCE_TYPE_CH_TABLE = DataSourceType.declare("CH_TABLE") +SOURCE_TYPE_CH_SUBSELECT = DataSourceType.declare("CH_SUBSELECT") diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/data_source.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/data_source.py index 250a68e10..2c67ed7f5 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/data_source.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/data_source.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dl_constants.enums import DataSourceType + from dl_connector_clickhouse.core.clickhouse.constants import ( SOURCE_TYPE_CH_SUBSELECT, SOURCE_TYPE_CH_TABLE, @@ -10,14 +12,13 @@ ClickHouseDataSourceBase, CommonClickHouseSubselectDataSource, ) -from dl_constants.enums import CreateDSFrom class ClickHouseDataSource(ClickHouseDataSourceBase): conn_type = CONNECTION_TYPE_CLICKHOUSE @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in { SOURCE_TYPE_CH_TABLE, SOURCE_TYPE_CH_SUBSELECT, @@ -30,7 +31,7 @@ class ClickHouseSubselectDataSource(ActualClickHouseBaseMixin, CommonClickHouseS conn_type = CONNECTION_TYPE_CLICKHOUSE @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in { SOURCE_TYPE_CH_TABLE, SOURCE_TYPE_CH_SUBSELECT, diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/data_source_migration.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/data_source_migration.py index b63b0fb34..a8bc99dfc 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/data_source_migration.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/data_source_migration.py @@ -1,8 +1,9 @@ +from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator + from dl_connector_clickhouse.core.clickhouse.constants import ( SOURCE_TYPE_CH_SUBSELECT, SOURCE_TYPE_CH_TABLE, ) -from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator class ClickHouseDataSourceMigrator(DefaultSQLDataSourceMigrator): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/testing/connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/testing/connection.py index d73e7ca0f..8fc4aa9ed 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/testing/connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/testing/connection.py @@ -4,11 +4,12 @@ ) import uuid -from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse -from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE from dl_constants.enums import RawSQLLevel from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse +from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE + def make_clickhouse_saved_connection( sync_usm: SyncUSManager, diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/testing/exec_factory.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/testing/exec_factory.py index 671173512..0e1dd2e72 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/testing/exec_factory.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/testing/exec_factory.py @@ -3,12 +3,13 @@ Type, ) -from dl_connector_clickhouse.core.clickhouse_base.adapters import ClickHouseAdapter -from dl_connector_clickhouse.core.clickhouse_base.target_dto import ClickHouseConnTargetDTO from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter from dl_core.connection_executors.models.connection_target_dto_base import BaseSQLConnTargetDTO from dl_core_testing.executors import ExecutorFactoryBase +from dl_connector_clickhouse.core.clickhouse_base.adapters import ClickHouseAdapter +from dl_connector_clickhouse.core.clickhouse_base.target_dto import ClickHouseConnTargetDTO + class ClickHouseExecutorFactory(ExecutorFactoryBase): def get_dto_class(self) -> Type[BaseSQLConnTargetDTO]: diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/us_connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/us_connection.py index b8725157b..0960b4cac 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/us_connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse/us_connection.py @@ -2,13 +2,14 @@ from typing import ClassVar +from dl_core.us_connection_base import DataSourceTemplate +from dl_i18n.localizer_base import Localizer + from dl_connector_clickhouse.core.clickhouse.constants import ( SOURCE_TYPE_CH_SUBSELECT, SOURCE_TYPE_CH_TABLE, ) from dl_connector_clickhouse.core.clickhouse_base.us_connection import ConnectionClickhouseBase -from dl_core.us_connection_base import DataSourceTemplate -from dl_i18n.localizer_base import Localizer class ConnectionClickhouse(ConnectionClickhouseBase): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/adapters.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/adapters.py index 1c692b8f6..a9f0a7a71 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/adapters.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/adapters.py @@ -33,13 +33,6 @@ from sqlalchemy.sql.type_api import TypeEngine from dl_app_tools.profiling_base import generic_profiler_async -from dl_connector_clickhouse.core.clickhouse_base.ch_commons import ( - ClickHouseBaseUtils, - ClickHouseUtils, - get_ch_settings, -) -from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE -from dl_connector_clickhouse.core.clickhouse_base.exc import CHRowTooLarge from dl_constants.enums import IndexKind from dl_core import exc from dl_core.connection_executors.adapters.adapters_base_sa_classic import ( @@ -72,12 +65,16 @@ ) from dl_core.utils import make_url +from dl_connector_clickhouse.core.clickhouse_base.ch_commons import ( + ClickHouseBaseUtils, + ClickHouseUtils, + get_ch_settings, +) +from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE +from dl_connector_clickhouse.core.clickhouse_base.exc import CHRowTooLarge + if TYPE_CHECKING: - from dl_connector_clickhouse.core.clickhouse_base.target_dto import ( # noqa: F401 - BaseClickHouseConnTargetDTO, - ClickHouseConnTargetDTO, - ) from dl_constants.types import TBIChunksGen from dl_core.connection_executors.models.scoped_rci import DBAdapterScopedRCI from dl_core.connection_models.common_models import ( @@ -85,6 +82,11 @@ SchemaIdent, ) + from dl_connector_clickhouse.core.clickhouse_base.target_dto import ( # noqa: F401 + BaseClickHouseConnTargetDTO, + ClickHouseConnTargetDTO, + ) + LOGGER = logging.getLogger(__name__) _DBA_TV = TypeVar("_DBA_TV", bound="BaseAsyncClickHouseAdapter") diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/ch_commons.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/ch_commons.py index 99a19e22d..126134198 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/ch_commons.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/ch_commons.py @@ -15,6 +15,13 @@ from sqlalchemy.sql import ddl as sa_ddl from sqlalchemy.sql import schema as sa_schema +from dl_core import exc +from dl_core.connection_executors.models.scoped_rci import DBAdapterScopedRCI +from dl_core.db import ( + SchemaColumn, + make_sa_type, +) + from dl_connector_clickhouse.core.clickhouse_base.exc import ( CannotInsertNullInOrdinaryColumn, CHIncorrectData, @@ -24,12 +31,6 @@ InvalidSplitSeparator, TooManyColumns, ) -from dl_core import exc -from dl_core.connection_executors.models.scoped_rci import DBAdapterScopedRCI -from dl_core.db import ( - SchemaColumn, - make_sa_type, -) if TYPE_CHECKING: diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/connection_executors.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/connection_executors.py index cf809b054..ec59aa00d 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/connection_executors.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/connection_executors.py @@ -7,6 +7,9 @@ import attr +from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter +from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor + from dl_connector_clickhouse.core.clickhouse_base.adapters import ( AsyncClickHouseAdapter, ClickHouseAdapter, @@ -14,15 +17,13 @@ from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions from dl_connector_clickhouse.core.clickhouse_base.dto import ClickHouseConnDTO from dl_connector_clickhouse.core.clickhouse_base.target_dto import ClickHouseConnTargetDTO -from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter -from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor _BASE_CLICKHOUSE_ADAPTER_TV = TypeVar("_BASE_CLICKHOUSE_ADAPTER_TV", bound=CommonBaseDirectAdapter) @attr.s(cmp=False, hash=False) -class BaseClickHouseConnExecutor(DefaultSqlAlchemyConnExecutor[_BASE_CLICKHOUSE_ADAPTER_TV]): +class _BaseClickHouseConnExecutor(DefaultSqlAlchemyConnExecutor[_BASE_CLICKHOUSE_ADAPTER_TV]): _conn_dto: ClickHouseConnDTO = attr.ib() _conn_options: CHConnectOptions = attr.ib() @@ -65,10 +66,10 @@ def mutate_for_dashsql(self, db_params: Optional[dict[str, str]] = None): # typ @attr.s(cmp=False, hash=False) -class ClickHouseSyncAdapterConnExecutor(BaseClickHouseConnExecutor[ClickHouseAdapter]): +class ClickHouseConnExecutor(_BaseClickHouseConnExecutor[ClickHouseAdapter]): TARGET_ADAPTER_CLS = ClickHouseAdapter @attr.s(cmp=False, hash=False) -class ClickHouseAsyncAdapterConnExecutor(BaseClickHouseConnExecutor[AsyncClickHouseAdapter]): +class AsyncClickHouseConnExecutor(_BaseClickHouseConnExecutor[AsyncClickHouseAdapter]): TARGET_ADAPTER_CLS = AsyncClickHouseAdapter diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/connector.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/connector.py index c1af939c6..499c93853 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/connector.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/connector.py @@ -1,5 +1,7 @@ from clickhouse_sqlalchemy.orm.query import Query as CHQuery +from dl_core.connectors.base.connector import CoreConnector + from dl_connector_clickhouse.core.clickhouse_base.adapters import ( AsyncClickHouseAdapter, ClickHouseAdapter, @@ -7,7 +9,6 @@ from dl_connector_clickhouse.core.clickhouse_base.constants import BACKEND_TYPE_CLICKHOUSE from dl_connector_clickhouse.core.clickhouse_base.query_compiler import ClickHouseQueryCompiler from dl_connector_clickhouse.core.clickhouse_base.sa_types import SQLALCHEMY_CLICKHOUSE_TYPES -from dl_core.connectors.base.connector import CoreConnector class ClickHouseCoreConnectorBase(CoreConnector): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/data_source.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/data_source.py index 61831726c..9a30a53fe 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/data_source.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/data_source.py @@ -7,8 +7,6 @@ ClassVar, ) -from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE -from dl_connector_clickhouse.core.clickhouse_base.query_compiler import ClickHouseQueryCompiler from dl_constants.enums import JoinType from dl_core.data_source.sql import ( BaseSQLDataSource, @@ -16,6 +14,9 @@ SubselectDataSource, ) +from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE +from dl_connector_clickhouse.core.clickhouse_base.query_compiler import ClickHouseQueryCompiler + if TYPE_CHECKING: from dl_core import us_connection # noqa diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/dto.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/dto.py index 39bb2c4b2..9a9b6368c 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/dto.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/dto.py @@ -4,12 +4,13 @@ import attr -from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE from dl_core.connection_models.dto_defs import ( ConnDTO, DefaultSQLDTO, ) +from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE + @attr.s(frozen=True) class ClickHouseBaseDTO(ConnDTO): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/sa_types.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/sa_types.py index fa0a8bc75..bbf9d6945 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/sa_types.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/sa_types.py @@ -11,12 +11,6 @@ import sqlalchemy as sa from sqlalchemy.types import TypeEngine -from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE -from dl_connector_clickhouse.core.clickhouse_base.type_transformer import ( - CH_TYPES_DATE, - CH_TYPES_FLOAT, - CH_TYPES_INT, -) from dl_constants.enums import ConnectionType from dl_core.db.native_type import ( ClickHouseDateTime64NativeType, @@ -28,6 +22,13 @@ ) from dl_core.db.sa_types_base import make_native_type +from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE +from dl_connector_clickhouse.core.clickhouse_base.type_transformer import ( + CH_TYPES_DATE, + CH_TYPES_FLOAT, + CH_TYPES_INT, +) + def _make_ch_type(nt: GenericNativeType, typeobj: TypeEngine) -> TypeEngine: nullable = True @@ -124,7 +125,7 @@ def _generate_complex_ch_types( make_native_type(conn_type, ch_types.Array(ch_types.String)): partial( _make_ch_array, inner_typecls=ch_types.String ), - # For the `BIType.unsupported`; should only be filled with `NULL`s in materialization. + # For the `UserDataType.unsupported`; should only be filled with `NULL`s in materialization. # See also: `dl_core.data_source.sql.BaseSQLDataSource._make_raw_column_select` make_native_type(conn_type, sa.sql.sqltypes.NullType): ch_fallback_type_gen, } diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/storage_schemas/connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/storage_schemas/connection.py index b482d58cd..5e3591b28 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/storage_schemas/connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/storage_schemas/connection.py @@ -5,9 +5,10 @@ from marshmallow import fields as ma_fields -from dl_connector_clickhouse.core.clickhouse_base.us_connection import ConnectionClickhouseBase from dl_core.us_manager.storage_schemas.connection import ConnectionSQLDataStorageSchema +from dl_connector_clickhouse.core.clickhouse_base.us_connection import ConnectionClickhouseBase + _CH_CONN_DATA_TV = TypeVar("_CH_CONN_DATA_TV", bound=ConnectionClickhouseBase.DataModel) diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/type_transformer.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/type_transformer.py index 6d0e649d5..6fba68be7 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/type_transformer.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/type_transformer.py @@ -3,8 +3,7 @@ from clickhouse_sqlalchemy import types as ch_types import sqlalchemy as sa -from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.backend_types import get_backend_type from dl_core.db.conversion_base import ( TypeTransformer, @@ -12,6 +11,8 @@ ) from dl_core.db.elements import GenericNativeType +from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE + CH_TYPES_INT = frozenset( ( @@ -33,54 +34,56 @@ class ClickHouseTypeTransformer(TypeTransformer): conn_type = CONNECTION_TYPE_CLICKHOUSE native_to_user_map = { - **{make_native_type(CONNECTION_TYPE_CLICKHOUSE, typecls): BIType.integer for typecls in CH_TYPES_INT}, # type: ignore # TODO: fix + **{make_native_type(CONNECTION_TYPE_CLICKHOUSE, typecls): UserDataType.integer for typecls in CH_TYPES_INT}, # type: ignore # TODO: fix **{ - make_native_type(CONNECTION_TYPE_CLICKHOUSE, typecls): BIType.string + make_native_type(CONNECTION_TYPE_CLICKHOUSE, typecls): UserDataType.string for typecls in (ch_types.String,) # TODO: FixedString }, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Enum8): BIType.string, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Enum16): BIType.string, - **{make_native_type(CONNECTION_TYPE_CLICKHOUSE, typecls): BIType.float for typecls in CH_TYPES_FLOAT}, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Date): BIType.date, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Date32): BIType.date, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Bool): BIType.boolean, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime): BIType.genericdatetime, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime64): BIType.genericdatetime, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTimeWithTZ): BIType.genericdatetime, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime64WithTZ): BIType.genericdatetime, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.UUID): BIType.uuid, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Enum8): UserDataType.string, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Enum16): UserDataType.string, + **{make_native_type(CONNECTION_TYPE_CLICKHOUSE, typecls): UserDataType.float for typecls in CH_TYPES_FLOAT}, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Date): UserDataType.date, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Date32): UserDataType.date, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Bool): UserDataType.boolean, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime): UserDataType.genericdatetime, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime64): UserDataType.genericdatetime, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTimeWithTZ): UserDataType.genericdatetime, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime64WithTZ): UserDataType.genericdatetime, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.UUID): UserDataType.uuid, **{ - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(typecls)): BIType.array_int + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(typecls)): UserDataType.array_int for typecls in CH_TYPES_INT }, **{ - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(typecls)): BIType.array_float + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(typecls)): UserDataType.array_float for typecls in CH_TYPES_FLOAT }, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.String())): BIType.array_str, - make_native_type(CONNECTION_TYPE_CLICKHOUSE, sa.sql.sqltypes.NullType): BIType.unsupported, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.String())): UserDataType.array_str, + make_native_type(CONNECTION_TYPE_CLICKHOUSE, sa.sql.sqltypes.NullType): UserDataType.unsupported, } user_to_native_map = { - BIType.integer: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Int64), - BIType.float: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Float64), - BIType.boolean: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Bool), - BIType.string: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.String), - BIType.date: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Date), - BIType.datetime: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime), # TODO: DateTime64 - BIType.genericdatetime: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime), # TODO: DateTime64 + UserDataType.integer: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Int64), + UserDataType.float: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Float64), + UserDataType.boolean: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Bool), + UserDataType.string: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.String), + UserDataType.date: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Date), + UserDataType.datetime: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime), # TODO: DateTime64 + UserDataType.genericdatetime: make_native_type( + CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTime + ), # TODO: DateTime64 # WARNING: underparametrized - BIType.datetimetz: make_native_type( + UserDataType.datetimetz: make_native_type( CONNECTION_TYPE_CLICKHOUSE, ch_types.DateTimeWithTZ ), # TODO: DateTime64WithTZ - BIType.geopoint: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.String), - BIType.geopolygon: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.String), - BIType.uuid: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.UUID), - BIType.markup: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.String), - BIType.array_int: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.Int64)), - BIType.array_float: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.Float64)), - BIType.array_str: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.String)), - BIType.tree_str: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.String)), - BIType.unsupported: make_native_type(CONNECTION_TYPE_CLICKHOUSE, sa.sql.sqltypes.NullType), + UserDataType.geopoint: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.String), + UserDataType.geopolygon: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.String), + UserDataType.uuid: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.UUID), + UserDataType.markup: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.String), + UserDataType.array_int: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.Int64)), + UserDataType.array_float: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.Float64)), + UserDataType.array_str: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.String)), + UserDataType.tree_str: make_native_type(CONNECTION_TYPE_CLICKHOUSE, ch_types.Array(ch_types.String)), + UserDataType.unsupported: make_native_type(CONNECTION_TYPE_CLICKHOUSE, sa.sql.sqltypes.NullType), } def make_foreign_native_type_conversion(self, native_t: GenericNativeType) -> GenericNativeType: diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/us_connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/us_connection.py index 6a9151291..822a13981 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/us_connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/core/clickhouse_base/us_connection.py @@ -9,8 +9,6 @@ import attr -from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions -from dl_connector_clickhouse.core.clickhouse_base.dto import ClickHouseConnDTO from dl_core.connection_executors.common_base import ConnExecutorQuery from dl_core.connection_executors.sync_base import SyncConnExecutorBase from dl_core.connection_models import ConnectOptions @@ -19,6 +17,9 @@ ConnectionBase, ) +from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions +from dl_connector_clickhouse.core.clickhouse_base.dto import ClickHouseConnDTO + @attr.s class SubselectTemplate: diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/db_testing/connector.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/db_testing/connector.py index 1b85e4e30..7a4b1eba6 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/db_testing/connector.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/db_testing/connector.py @@ -1,8 +1,9 @@ +from dl_db_testing.connectors.base.connector import DbTestingConnector + from dl_connector_clickhouse.db_testing.engine_wrapper import ( BiClickHouseEngineWrapper, ClickHouseEngineWrapper, ) -from dl_db_testing.connectors.base.connector import DbTestingConnector class ClickHouseDbTestingConnector(DbTestingConnector): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/connector.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/connector.py index 248b34c97..8875e0a38 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/connector.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/connector.py @@ -1,10 +1,11 @@ from clickhouse_sqlalchemy.drivers.base import ClickHouseDialect as SAClickHouseDialect +from dl_formula.connectors.base.connector import FormulaConnector + from dl_connector_clickhouse.formula.constants import ClickHouseDialect as ClickHouseDialectNS from dl_connector_clickhouse.formula.definitions.all import DEFINITIONS from dl_connector_clickhouse.formula.literal import ClickHouseLiteralizer from dl_connector_clickhouse.formula.type_constructor import ClickHouseTypeConstructor -from dl_formula.connectors.base.connector import FormulaConnector class ClickHouseFormulaConnector(FormulaConnector): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/conditional_blocks.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/conditional_blocks.py index 16b104e98..9f7a1943f 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/conditional_blocks.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/conditional_blocks.py @@ -1,12 +1,13 @@ import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import ( TranslationVariant, TranslationVariantWrapped, ) import dl_formula.definitions.conditional_blocks as base +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make VW = TranslationVariantWrapped.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_aggregation.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_aggregation.py index d21536c2b..4e27dc24e 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_aggregation.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_aggregation.py @@ -2,7 +2,6 @@ import sqlalchemy as sa from sqlalchemy.sql import ClauseElement -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import TranslationVariant from dl_formula.definitions.common import quantile_value import dl_formula.definitions.functions_aggregation as base @@ -12,6 +11,8 @@ ) from dl_formula.shortcuts import n +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_array.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_array.py index b2019f94e..1f39bae68 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_array.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_array.py @@ -2,11 +2,12 @@ from clickhouse_sqlalchemy.ext.clauses import Lambda import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_array as base from dl_formula.definitions.literals import un_literal +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make @@ -180,6 +181,8 @@ V(D.CLICKHOUSE, sa.func.has), ] ), + # notcontains + base.FuncArrayNotContains.for_dialect(D.CLICKHOUSE), # contains_all base.FuncArrayContainsAll( variants=[ diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_datetime.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_datetime.py index 2cb060e29..efabdb7bf 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_datetime.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_datetime.py @@ -9,7 +9,6 @@ import sqlalchemy as sa from sqlalchemy.sql.elements import ClauseElement -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import ( TranslationVariant, TranslationVariantWrapped, @@ -22,6 +21,8 @@ import dl_formula.definitions.functions_datetime as base from dl_formula.definitions.literals import un_literal +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + if TYPE_CHECKING: from dl_formula.translation.context import TranslationCtx diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_logical.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_logical.py index 345c9d425..c8d196c5c 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_logical.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_logical.py @@ -1,6 +1,5 @@ import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import ( TranslationVariant, TranslationVariantWrapped, @@ -8,6 +7,8 @@ import dl_formula.definitions.functions_logical as base from dl_formula.shortcuts import n +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make VW = TranslationVariantWrapped.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_markup.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_markup.py index 2f74705b5..9a4b0b5b3 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_markup.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_markup.py @@ -1,6 +1,7 @@ -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D import dl_formula.definitions.functions_markup as base +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + DEFINITIONS_MARKUP = [ # + diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_math.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_math.py index b21695eae..71c7ca2bd 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_math.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_math.py @@ -1,6 +1,5 @@ import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.core.datatype import DataType from dl_formula.definitions.args import ArgTypeSequence from dl_formula.definitions.base import ( @@ -10,6 +9,8 @@ import dl_formula.definitions.functions_math as base from dl_formula.shortcuts import n +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make VW = TranslationVariantWrapped.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_special.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_special.py index 75e00e81a..c73d11452 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_special.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_special.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_special as base +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_string.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_string.py index 40fd459b5..04df2d2e2 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_string.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_string.py @@ -1,10 +1,11 @@ import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_string as base from dl_formula.shortcuts import n +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make @@ -48,6 +49,10 @@ ] ), base.FuncContainsNonString.for_dialect(D.CLICKHOUSE), + # notcontains + base.FuncNotContainsConst.for_dialect(D.CLICKHOUSE), + base.FuncNotContainsNonConst.for_dialect(D.CLICKHOUSE), + base.FuncNotContainsNonString.for_dialect(D.CLICKHOUSE), # endswith base.FuncEndswithNonConst( variants=[ diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_type.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_type.py index 87847abb7..2b07e739f 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_type.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_type.py @@ -4,7 +4,6 @@ import sqlalchemy as sa from sqlalchemy.types import TypeEngine -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.core.datatype import DataType from dl_formula.core.dialect import DialectCombo from dl_formula.definitions.args import ArgTypeSequence @@ -23,6 +22,8 @@ from dl_formula.shortcuts import n from dl_formula.translation.context import TranslationCtx +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make VW = TranslationVariantWrapped.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_window.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_window.py index a2e599f7b..20627c311 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_window.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/functions_window.py @@ -11,7 +11,6 @@ ) from sqlalchemy.sql.functions import Function as SAFunction -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import ( FuncTranslationImplementationBase, TranslateCallback, @@ -21,6 +20,8 @@ from dl_formula.translation.context import TranslationCtx from dl_formula.translation.env import TranslationEnvironment +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_binary.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_binary.py index f67c0b278..370085184 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_binary.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_binary.py @@ -4,7 +4,6 @@ import sqlalchemy as sa from sqlalchemy.sql.elements import ClauseElement -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.core.datatype import DataType from dl_formula.core.dialect import DialectCombo from dl_formula.definitions.args import ArgTypeSequence @@ -22,6 +21,8 @@ from dl_formula.translation.context import TranslationCtx from dl_formula.translation.env import TranslationEnvironment +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make VW = TranslationVariantWrapped.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_ternary.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_ternary.py index 8c1266e53..001ca7acb 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_ternary.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_ternary.py @@ -1,6 +1,7 @@ -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D import dl_formula.definitions.operators_ternary as base +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + DEFINITIONS_TERNARY = [ # between diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_unary.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_unary.py index ab66fe8fb..5e59781cc 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_unary.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/definitions/operators_unary.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.operators_unary as base +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/literal.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/literal.py index b812e191c..504fd1d72 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/literal.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula/literal.py @@ -3,13 +3,14 @@ import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_formula.connectors.base.literal import ( Literal, Literalizer, ) from dl_formula.core.dialect import DialectCombo +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + class ClickHouseLiteralizer(Literalizer): __slots__ = () diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/human_dialects.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/human_dialects.py index a8f51896a..a0e7b5622 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/human_dialects.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/human_dialects.py @@ -1,6 +1,7 @@ +from dl_formula_ref.texts import StyledDialect + from dl_connector_clickhouse.formula.constants import ClickHouseDialect from dl_connector_clickhouse.formula_ref.i18n import Translatable -from dl_formula_ref.texts import StyledDialect HUMAN_DIALECTS = { diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/i18n.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/i18n.py index 64429913d..f5255bab5 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/i18n.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/i18n.py @@ -2,10 +2,11 @@ import attr -import dl_connector_clickhouse as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_clickhouse as package + DOMAIN = f"dl_formula_ref_{package.__name__}" diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/plugin.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/plugin.py index 6eae63088..ee2ebe928 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/plugin.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse/formula_ref/plugin.py @@ -1,9 +1,10 @@ +from dl_formula_ref.functions.type_conversion import DbCastExtension +from dl_formula_ref.plugins.base.plugin import FormulaRefPlugin + from dl_connector_clickhouse.formula.constants import ClickHouseDialect from dl_connector_clickhouse.formula.definitions.functions_type import FuncDbCastClickHouseBase from dl_connector_clickhouse.formula_ref.human_dialects import HUMAN_DIALECTS from dl_connector_clickhouse.formula_ref.i18n import CONFIGS -from dl_formula_ref.functions.type_conversion import DbCastExtension -from dl_formula_ref.plugins.base.plugin import FormulaRefPlugin class ClickHouseFormulaRefPlugin(FormulaRefPlugin): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/base.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/base.py index e5c6c9460..47b713f45 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/base.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/base.py @@ -4,6 +4,7 @@ from dl_api_lib_testing.connection_base import ConnectionTestBase from dl_api_lib_testing.data_api_base import StandardizedDataApiTestBase from dl_api_lib_testing.dataset_base import DatasetTestBase + from dl_connector_clickhouse.core.clickhouse.constants import SOURCE_TYPE_CH_TABLE from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE from dl_connector_clickhouse_tests.db.config import ( diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_complex_queries.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_complex_queries.py new file mode 100644 index 000000000..cd051c759 --- /dev/null +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_complex_queries.py @@ -0,0 +1,8 @@ +from dl_api_lib_testing.connector.complex_queries import DefaultBasicComplexQueryTestSuite +from dl_constants.enums import QueryProcessingMode + +from dl_connector_clickhouse_tests.db.api.base import ClickHouseDataApiTestBase + + +class TestClickHouseBasicComplexQueries(ClickHouseDataApiTestBase, DefaultBasicComplexQueryTestSuite): + query_processing_mode = QueryProcessingMode.native_wf diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_connection.py index db4913029..c52e7ce8a 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_connection.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite + from dl_connector_clickhouse_tests.db.api.base import ClickHouseConnectionTestBase diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_data.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_data.py index 1bbd78a63..21f4dfd60 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_data.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_data.py @@ -5,6 +5,7 @@ DefaultConnectorDataRangeTestSuite, DefaultConnectorDataResultTestSuite, ) + from dl_connector_clickhouse_tests.db.api.base import ClickHouseDataApiTestBase diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_dataset.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_dataset.py index e4def5324..c5670e586 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_dataset.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/api/test_dataset.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.connector.dataset_suite import DefaultConnectorDatasetTestSuite + from dl_connector_clickhouse_tests.db.api.base import ClickHouseDatasetTestBase diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/config.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/config.py index a4b5b5840..42aa437e2 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/config.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/config.py @@ -2,10 +2,11 @@ from typing import ClassVar from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_core_testing.configuration import DefaultCoreTestConfiguration from dl_testing.containers import get_test_container_hostport +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D + # Infra settings SSL_CA_FILENAME = "marsnet_ca.crt" diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/conftest.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/conftest.py index eddf68dee..44a02b2fc 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/conftest.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/conftest.py @@ -1,7 +1,8 @@ from dl_api_lib_testing.initialization import initialize_api_lib_test -from dl_connector_clickhouse_tests.db.config import API_TEST_CONFIG from dl_formula_testing.forced_literal import forced_literal_use +from dl_connector_clickhouse_tests.db.config import API_TEST_CONFIG + pytest_plugins = ("aiohttp.pytest_plugin",) # and it, in turn, includes 'pytest_asyncio.plugin' diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/base.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/base.py index ef797605b..4f5e45ca1 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/base.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/base.py @@ -6,13 +6,14 @@ import pytest import dl_configs.utils as bi_configs_utils +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.testcases.connection import BaseConnectionTestClass + from dl_connector_clickhouse.core.clickhouse.testing.connection import make_clickhouse_saved_connection from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE from dl_connector_clickhouse.db_testing.engine_wrapper import ClickhouseDbEngineConfig import dl_connector_clickhouse_tests.db.config as test_config -from dl_core.us_manager.us_manager_sync import SyncUSManager -from dl_core_testing.testcases.connection import BaseConnectionTestClass class BaseClickHouseTestClass(BaseConnectionTestClass[ConnectionClickhouse]): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_connection.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_connection.py index be79096c7..941e6d9ae 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_connection.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_connection.py @@ -4,13 +4,14 @@ import pytest +from dl_core.us_connection_base import DataSourceTemplate +from dl_core_testing.testcases.connection import DefaultConnectionTestClass + from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse from dl_connector_clickhouse_tests.db.core.base import ( BaseClickHouseTestClass, BaseSslClickHouseTestClass, ) -from dl_core.us_connection_base import DataSourceTemplate -from dl_core_testing.testcases.connection import DefaultConnectionTestClass class TestClickHouseConnection( diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_connection_executor.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_connection_executor.py index d7e3ddc06..5b106765c 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_connection_executor.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_connection_executor.py @@ -4,12 +4,6 @@ import pytest -from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse -from dl_connector_clickhouse_tests.db.config import CoreConnectionSettings -from dl_connector_clickhouse_tests.db.core.base import ( - BaseClickHouseTestClass, - BaseSslClickHouseTestClass, -) from dl_core.connection_executors import ( AsyncConnExecutorBase, SyncConnExecutorBase, @@ -22,6 +16,13 @@ ) from dl_testing.regulated_test import RegulatedTestParams +from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse +from dl_connector_clickhouse_tests.db.config import CoreConnectionSettings +from dl_connector_clickhouse_tests.db.core.base import ( + BaseClickHouseTestClass, + BaseSslClickHouseTestClass, +) + class ClickHouseSyncAsyncConnectionExecutorCheckBase( BaseClickHouseTestClass, diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_data_source.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_data_source.py index 8b4501bd0..b025feb3d 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_data_source.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_data_source.py @@ -1,5 +1,16 @@ import pytest +from dl_constants.enums import ( + RawSQLLevel, + UserDataType, +) +from dl_core.data_source_spec.sql import ( + StandardSQLDataSourceSpec, + SubselectDataSourceSpec, +) +from dl_core_testing.fixtures.sample_tables import TABLE_SPEC_SAMPLE_SUPERSTORE +from dl_core_testing.testcases.data_source import DefaultDataSourceTestClass + from dl_connector_clickhouse.core.clickhouse.constants import ( SOURCE_TYPE_CH_SUBSELECT, SOURCE_TYPE_CH_TABLE, @@ -10,16 +21,6 @@ ) from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse from dl_connector_clickhouse_tests.db.core.base import BaseClickHouseTestClass -from dl_constants.enums import ( - BIType, - RawSQLLevel, -) -from dl_core.data_source_spec.sql import ( - StandardSQLDataSourceSpec, - SubselectDataSourceSpec, -) -from dl_core_testing.fixtures.sample_tables import TABLE_SPEC_SAMPLE_SUPERSTORE -from dl_core_testing.testcases.data_source import DefaultDataSourceTestClass class TestClickHouseTableDataSource( @@ -41,7 +42,7 @@ def initial_data_source_spec(self, sample_table) -> StandardSQLDataSourceSpec: ) return dsrc_spec - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: return list(TABLE_SPEC_SAMPLE_SUPERSTORE.table_schema) @@ -65,5 +66,5 @@ def initial_data_source_spec(self, sample_table) -> SubselectDataSourceSpec: ) return dsrc_spec - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: return list(TABLE_SPEC_SAMPLE_SUPERSTORE.table_schema) diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_dataset.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_dataset.py index 2f2639463..fed5062e6 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_dataset.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/core/test_dataset.py @@ -1,7 +1,8 @@ +from dl_core_testing.testcases.dataset import DefaultDatasetTestSuite + from dl_connector_clickhouse.core.clickhouse.constants import SOURCE_TYPE_CH_TABLE from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse from dl_connector_clickhouse_tests.db.core.base import BaseClickHouseTestClass -from dl_core_testing.testcases.dataset import DefaultDatasetTestSuite class TestClickHouseDataset(BaseClickHouseTestClass, DefaultDatasetTestSuite[ConnectionClickhouse]): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/base.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/base.py index 48a54e1c2..4d3a965b9 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/base.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/base.py @@ -3,10 +3,11 @@ import pytest import pytz +from dl_formula_testing.testcases.base import FormulaConnectorTestBase + from dl_connector_clickhouse.db_testing.engine_wrapper import ClickhouseDbEngineConfig from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_connector_clickhouse_tests.db.config import DB_URLS -from dl_formula_testing.testcases.base import FormulaConnectorTestBase class ClickHouseTestBase(FormulaConnectorTestBase): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_conditional_blocks.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_conditional_blocks.py index 6ea033fbc..f530bd321 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_conditional_blocks.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_conditional_blocks.py @@ -1,8 +1,9 @@ -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula_testing.evaluator import DbEvaluator from dl_formula_testing.testcases.conditional_blocks import DefaultConditionalBlockFormulaConnectorTestSuite from dl_formula_testing.util import to_str +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class ConditionalBlockClickHouseTestSuite(DefaultConditionalBlockFormulaConnectorTestSuite): def test_case_block_returning_null(self, dbe: DbEvaluator): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_aggregation.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_aggregation.py index 0e00650de..f20d16156 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_aggregation.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_aggregation.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula_testing.evaluator import DbEvaluator from dl_formula_testing.testcases.functions_aggregation import DefaultMainAggFunctionFormulaConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class MainAggFunctionClickHouseTestSuite(DefaultMainAggFunctionFormulaConnectorTestSuite): supports_countd_approx = True diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_array.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_array.py index 597574d13..9fed6d78c 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_array.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_array.py @@ -3,10 +3,11 @@ import pytest import sqlalchemy as sa -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula_testing.evaluator import DbEvaluator from dl_formula_testing.testcases.functions_array import DefaultArrayFunctionFormulaConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class ArrayFunctionClickHouseTestSuite(DefaultArrayFunctionFormulaConnectorTestSuite): def test_startswith_string_array(self, dbe: DbEvaluator, data_table: sa.Table) -> None: diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_datetime.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_datetime.py index 92ad2fc9d..48afaf8ed 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_datetime.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_datetime.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.functions_datetime import DefaultDateTimeFunctionFormulaConnectorTestSuite + from dl_connector_clickhouse_tests.db.formula.base import ( ClickHouse_21_8TestBase, ClickHouse_22_10TestBase, ) -from dl_formula_testing.testcases.functions_datetime import DefaultDateTimeFunctionFormulaConnectorTestSuite class DateTimeFunctionClickHouseTestSuite(DefaultDateTimeFunctionFormulaConnectorTestSuite): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_logical.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_logical.py index 33cb520a8..23f892175 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_logical.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_logical.py @@ -1,9 +1,10 @@ import pytest -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula_testing.evaluator import DbEvaluator from dl_formula_testing.testcases.functions_logical import DefaultLogicalFunctionFormulaConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class LogicalFunctionClickHouseTestSuite(DefaultLogicalFunctionFormulaConnectorTestSuite): supports_nan_funcs = True diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_markup.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_markup.py index 0eb2fe11a..4c1e3a3bd 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_markup.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_markup.py @@ -1,6 +1,7 @@ -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula_testing.testcases.functions_markup import DefaultMarkupFunctionFormulaConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class TestMarkupFunctionClickHouse_21_8(ClickHouse_21_8TestBase, DefaultMarkupFunctionFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_math.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_math.py index 5667ca1c9..f8fedd731 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_math.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_math.py @@ -1,7 +1,8 @@ -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula_testing.evaluator import DbEvaluator from dl_formula_testing.testcases.functions_math import DefaultMathFunctionFormulaConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class MathFunctionClickHouseTestSuite(DefaultMathFunctionFormulaConnectorTestSuite): def test_compare(self, dbe: DbEvaluator) -> None: diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_string.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_string.py index e5dcd3554..3695772a8 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_string.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_string.py @@ -1,6 +1,5 @@ import sqlalchemy as sa -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula.core import nodes from dl_formula.core.datatype import DataType from dl_formula.translation import ext_nodes @@ -8,6 +7,8 @@ from dl_formula_testing.evaluator import DbEvaluator from dl_formula_testing.testcases.functions_string import DefaultStringFunctionFormulaConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class StringFunctionClickHouseTestSuite(DefaultStringFunctionFormulaConnectorTestSuite): def test_utf8(self, dbe: DbEvaluator) -> None: diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_type_conversion.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_type_conversion.py index 959870f03..d449c1ac9 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_type_conversion.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_type_conversion.py @@ -5,8 +5,6 @@ import pytz import sqlalchemy as sa -from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase import dl_formula.core.exc as exc from dl_formula_testing.evaluator import DbEvaluator from dl_formula_testing.testcases.base import FormulaConnectorTestBase @@ -29,6 +27,9 @@ utc_ts, ) +from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + # STR diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_window.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_window.py index 404564467..ef9ac4582 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_window.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_functions_window.py @@ -1,6 +1,7 @@ -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_22_10TestBase from dl_formula_testing.testcases.functions_window import DefaultWindowFunctionFormulaConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_22_10TestBase + class TestWindowFunctionClickHouse_22_10(ClickHouse_22_10TestBase, DefaultWindowFunctionFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_literals.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_literals.py index 293af8295..c8e63e680 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_literals.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_literals.py @@ -1,6 +1,7 @@ -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula_testing.testcases.literals import DefaultLiteralFormulaConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class LiteralFunctionClickHouseTestSuite(DefaultLiteralFormulaConnectorTestSuite): supports_microseconds = False diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_misc_funcs.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_misc_funcs.py index e42c0c152..7d48fa38e 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_misc_funcs.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_misc_funcs.py @@ -1,6 +1,7 @@ -from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase from dl_formula_testing.testcases.misc_funcs import DefaultMiscFunctionalityConnectorTestSuite +from dl_connector_clickhouse_tests.db.formula.base import ClickHouse_21_8TestBase + class TestMiscFunctionalityClickHouse_21_8(ClickHouse_21_8TestBase, DefaultMiscFunctionalityConnectorTestSuite): pass diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_operators.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_operators.py index f28c1e81a..94eb1580c 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_operators.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/db/formula/test_operators.py @@ -1,11 +1,12 @@ import datetime +from dl_formula_testing.evaluator import DbEvaluator +from dl_formula_testing.testcases.operators import DefaultOperatorFormulaConnectorTestSuite + from dl_connector_clickhouse_tests.db.formula.base import ( ClickHouse_21_8TestBase, ClickHouse_22_10TestBase, ) -from dl_formula_testing.evaluator import DbEvaluator -from dl_formula_testing.testcases.operators import DefaultOperatorFormulaConnectorTestSuite class TestOperatorClickHouse_21_8(ClickHouse_21_8TestBase, DefaultOperatorFormulaConnectorTestSuite): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/unit/formula/test_dialect.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/unit/formula/test_dialect.py index 209c529c8..083b9c294 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/unit/formula/test_dialect.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/unit/formula/test_dialect.py @@ -1,6 +1,7 @@ +from dl_formula_testing.testcases.dialect import DefaultDialectFormulaConnectorTestSuite + from dl_connector_clickhouse.formula.constants import ClickHouseDialect as D from dl_connector_clickhouse.formula.constants import DIALECT_NAME_CLICKHOUSE -from dl_formula_testing.testcases.dialect import DefaultDialectFormulaConnectorTestSuite class DialectClickHouseTestSuite(DefaultDialectFormulaConnectorTestSuite): diff --git a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/unit/test_connection_form.py b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/unit/test_connection_form.py index aa06a36b0..a1511e3f7 100644 --- a/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/unit/test_connection_form.py +++ b/lib/dl_connector_clickhouse/dl_connector_clickhouse_tests/unit/test_connection_form.py @@ -1,5 +1,6 @@ from dl_api_connector.i18n.localizer import CONFIGS as BI_API_CONNECTOR_CONFIGS from dl_api_lib_testing.connection_form_base import ConnectionFormTestBase + from dl_connector_clickhouse.api.connection_form.form_config import ClickHouseConnectionFormFactory from dl_connector_clickhouse.api.i18n.localizer import CONFIGS as BI_API_LIB_CONFIGS diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/api/connection_form/form_config.py b/lib/dl_connector_greenplum/dl_connector_greenplum/api/connection_form/form_config.py index f99d2a8cb..90c7a377c 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/api/connection_form/form_config.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/api/connection_form/form_config.py @@ -21,6 +21,7 @@ from dl_api_connector.form_config.models.rows.base import FormRow from dl_api_connector.form_config.models.shortcuts.rows import RowConstructor from dl_configs.connectors_settings import ConnectorSettingsBase + from dl_connector_greenplum.api.connection_info import GreenplumConnectionInfoProvider diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/api/connection_info.py b/lib/dl_connector_greenplum/dl_connector_greenplum/api/connection_info.py index cc281cb7c..542c41b45 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/api/connection_info.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/api/connection_info.py @@ -1,4 +1,5 @@ from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_greenplum.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/api/connector.py b/lib/dl_connector_greenplum/dl_connector_greenplum/api/connector.py index 53346201b..5fbded38e 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/api/connector.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/api/connector.py @@ -9,6 +9,7 @@ ApiConnector, ApiSourceDefinition, ) + from dl_connector_greenplum.api.api_schema.connection import GreenplumConnectionSchema from dl_connector_greenplum.api.connection_form.form_config import GreenplumConnectionFormFactory from dl_connector_greenplum.api.connection_info import GreenplumConnectionInfoProvider diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/api/i18n/localizer.py b/lib/dl_connector_greenplum/dl_connector_greenplum/api/i18n/localizer.py index acd592125..a3ad383c0 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/api/i18n/localizer.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/api/i18n/localizer.py @@ -2,10 +2,11 @@ import attr -import dl_connector_greenplum as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_greenplum as package + DOMAIN = f"{package.__name__}" CONFIGS = [ diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/core/connector.py b/lib/dl_connector_greenplum/dl_connector_greenplum/core/connector.py index 62dd1bd97..4e4575a62 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/core/connector.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/core/connector.py @@ -1,3 +1,12 @@ +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreConnector, +) +from dl_core.connectors.sql_base.connector import ( + SQLSubselectCoreSourceDefinitionBase, + SQLTableCoreSourceDefinitionBase, +) + from dl_connector_greenplum.core.constants import ( BACKEND_TYPE_GREENPLUM, CONNECTION_TYPE_GREENPLUM, @@ -19,14 +28,6 @@ ) from dl_connector_postgresql.core.postgresql_base.sa_types import SQLALCHEMY_POSTGRES_TYPES from dl_connector_postgresql.core.postgresql_base.type_transformer import PostgreSQLTypeTransformer -from dl_core.connectors.base.connector import ( - CoreConnectionDefinition, - CoreConnector, -) -from dl_core.connectors.sql_base.connector import ( - SQLSubselectCoreSourceDefinitionBase, - SQLTableCoreSourceDefinitionBase, -) class GreenplumCoreConnectionDefinition(CoreConnectionDefinition): diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/core/constants.py b/lib/dl_connector_greenplum/dl_connector_greenplum/core/constants.py index 47b951451..31c4cfb33 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/core/constants.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/core/constants.py @@ -1,11 +1,11 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, SourceBackendType, ) BACKEND_TYPE_GREENPLUM = SourceBackendType.declare("GREENPLUM") CONNECTION_TYPE_GREENPLUM = ConnectionType.declare("greenplum") -SOURCE_TYPE_GP_TABLE = CreateDSFrom.declare("GP_TABLE") -SOURCE_TYPE_GP_SUBSELECT = CreateDSFrom.declare("GP_SUBSELECT") +SOURCE_TYPE_GP_TABLE = DataSourceType.declare("GP_TABLE") +SOURCE_TYPE_GP_SUBSELECT = DataSourceType.declare("GP_SUBSELECT") diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/core/data_source.py b/lib/dl_connector_greenplum/dl_connector_greenplum/core/data_source.py index 05e2b63fb..8087c83f6 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/core/data_source.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/core/data_source.py @@ -1,17 +1,18 @@ from __future__ import annotations +from dl_constants.enums import DataSourceType +from dl_core.data_source.sql import ( + BaseSQLDataSource, + StandardSchemaSQLDataSource, + SubselectDataSource, +) + from dl_connector_greenplum.core.constants import ( CONNECTION_TYPE_GREENPLUM, SOURCE_TYPE_GP_SUBSELECT, SOURCE_TYPE_GP_TABLE, ) from dl_connector_postgresql.core.postgresql_base.query_compiler import PostgreSQLQueryCompiler -from dl_constants.enums import CreateDSFrom -from dl_core.data_source.sql import ( - BaseSQLDataSource, - StandardSchemaSQLDataSource, - SubselectDataSource, -) class GreenplumDataSourceMixin(BaseSQLDataSource): @@ -20,7 +21,7 @@ class GreenplumDataSourceMixin(BaseSQLDataSource): conn_type = CONNECTION_TYPE_GREENPLUM @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in (SOURCE_TYPE_GP_TABLE, SOURCE_TYPE_GP_SUBSELECT) diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/core/data_source_migration.py b/lib/dl_connector_greenplum/dl_connector_greenplum/core/data_source_migration.py index 05d03bf61..d85ef86bd 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/core/data_source_migration.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/core/data_source_migration.py @@ -1,8 +1,9 @@ +from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator + from dl_connector_greenplum.core.constants import ( SOURCE_TYPE_GP_SUBSELECT, SOURCE_TYPE_GP_TABLE, ) -from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator class GreenPlumDataSourceMigrator(DefaultSQLDataSourceMigrator): diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum/core/us_connection.py b/lib/dl_connector_greenplum/dl_connector_greenplum/core/us_connection.py index c1b28d194..04dfc38d7 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum/core/us_connection.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum/core/us_connection.py @@ -2,14 +2,15 @@ from typing import ClassVar +from dl_core.us_connection_base import DataSourceTemplate +from dl_i18n.localizer_base import Localizer + from dl_connector_greenplum.core.constants import ( SOURCE_TYPE_GP_SUBSELECT, SOURCE_TYPE_GP_TABLE, ) from dl_connector_greenplum.core.dto import GreenplumConnDTO from dl_connector_postgresql.core.postgresql_base.us_connection import ConnectionPostgreSQLBase -from dl_core.us_connection_base import DataSourceTemplate -from dl_i18n.localizer_base import Localizer class GreenplumConnection(ConnectionPostgreSQLBase): diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/base.py b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/base.py index 11229731a..c6b314831 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/base.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/base.py @@ -4,6 +4,10 @@ from dl_api_lib_testing.connection_base import ConnectionTestBase from dl_api_lib_testing.data_api_base import StandardizedDataApiTestBase from dl_api_lib_testing.dataset_base import DatasetTestBase +from dl_constants.enums import RawSQLLevel +from dl_core_testing.database import DbTable +from dl_core_testing.testcases.service_base import ServiceFixtureTextClass + from dl_connector_greenplum.core.constants import ( CONNECTION_TYPE_GREENPLUM, SOURCE_TYPE_GP_TABLE, @@ -14,9 +18,6 @@ CORE_TEST_CONFIG, DB_CORE_URL, ) -from dl_constants.enums import RawSQLLevel -from dl_core_testing.database import DbTable -from dl_core_testing.testcases.service_base import ServiceFixtureTextClass class GreenplumConnectionTestBase(ConnectionTestBase, ServiceFixtureTextClass): diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_connection.py b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_connection.py index eb2bb5ebe..4649178ab 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_connection.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_connection.py @@ -1,5 +1,6 @@ from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite + from dl_connector_greenplum_tests.db.api.base import GreenplumConnectionTestBase diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_dashsql.py b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_dashsql.py index 620ecdd4e..a9df57090 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_dashsql.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_dashsql.py @@ -2,6 +2,7 @@ import pytest from dl_api_lib_testing.connector.dashsql_suite import DefaultDashSQLTestSuite + from dl_connector_greenplum_tests.db.api.base import GreenplumDashSQLConnectionTest from dl_connector_greenplum_tests.db.config import DASHSQL_QUERY diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_data.py b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_data.py index 8452c7d5b..502f4ee6a 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_data.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_data.py @@ -5,16 +5,13 @@ DefaultConnectorDataRangeTestSuite, DefaultConnectorDataResultTestSuite, ) -from dl_connector_greenplum_tests.db.api.base import GreenplumDataApiTestBase from dl_testing.regulated_test import RegulatedTestParams +from dl_connector_greenplum_tests.db.api.base import GreenplumDataApiTestBase + class TestGreenplumDataResult(GreenplumDataApiTestBase, DefaultConnectorDataResultTestSuite): - test_params = RegulatedTestParams( - mark_tests_failed={ - DefaultConnectorDataResultTestSuite.test_array_not_contains_filter: "BI-4951", # TODO: FIXME - } - ) + pass class TestGreenplumDataGroupBy(GreenplumDataApiTestBase, DefaultConnectorDataGroupByFormulaTestSuite): diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_dataset.py b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_dataset.py index 00aa5a071..624886378 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_dataset.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/api/test_dataset.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.connector.dataset_suite import DefaultConnectorDatasetTestSuite + from dl_connector_greenplum_tests.db.api.base import GreenplumDatasetTestBase diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/conftest.py b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/conftest.py index 5068bb1cd..d30b6a439 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/conftest.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/db/conftest.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.initialization import initialize_api_lib_test + from dl_connector_greenplum_tests.db.config import API_TEST_CONFIG diff --git a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/unit/test_connection_form.py b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/unit/test_connection_form.py index b2a464c23..6ed7a5db4 100644 --- a/lib/dl_connector_greenplum/dl_connector_greenplum_tests/unit/test_connection_form.py +++ b/lib/dl_connector_greenplum/dl_connector_greenplum_tests/unit/test_connection_form.py @@ -1,5 +1,6 @@ from dl_api_connector.i18n.localizer import CONFIGS as BI_API_CONNECTOR_CONFIGS from dl_api_lib_testing.connection_form_base import ConnectionFormTestBase + from dl_connector_greenplum.api.connection_form.form_config import GreenplumConnectionFormFactory from dl_connector_greenplum.api.i18n.localizer import CONFIGS as BI_CONNECTOR_GREENPLUM_CONFIGS diff --git a/lib/dl_connector_metrica/LICENSE b/lib/dl_connector_metrica/LICENSE new file mode 100644 index 000000000..74ba5f6c7 --- /dev/null +++ b/lib/dl_connector_metrica/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 YANDEX LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/dl_connector_metrica/README.md b/lib/dl_connector_metrica/README.md new file mode 100644 index 000000000..d906cac4b --- /dev/null +++ b/lib/dl_connector_metrica/README.md @@ -0,0 +1 @@ +# dl_connector_metrica diff --git a/lib/dl_connector_metrica/dl_connector_metrica/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/api_handler.py b/lib/dl_connector_metrica/dl_connector_metrica/api/api_handler.py new file mode 100644 index 000000000..74124f7a2 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/api/api_handler.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from marshmallow import fields as ma_fields + +from dl_api_lib import exc +from dl_api_lib.api_decorators import schematic_request +from dl_api_lib.app.control_api.resources.base import BIResource +from dl_api_lib.app.control_api.resources.connections import ns +from dl_api_lib.enums import USPermissionKind +from dl_api_lib.utils import need_permission_on_entry +from dl_app_tools.profiling_base import generic_profiler +from dl_core.exc import ConnectionConfigurationError +from dl_core.us_connection_base import ConnectionBase +from dl_model_tools.schema.base import BaseSchema +from dl_sqlalchemy_metrica_api.exceptions import MetrikaApiAccessDeniedException + +from dl_connector_metrica.core.constants import ( + CONNECTION_TYPE_APPMETRICA_API, + CONNECTION_TYPE_METRICA_API, +) +from dl_connector_metrica.core.us_connection import MetrikaBaseMixin + + +class MetricaConnectionAvailableCountersSchema(BaseSchema): + class CounterInfo(BaseSchema): + id = ma_fields.String() + name = ma_fields.String() + + counters = ma_fields.Nested(CounterInfo, many=True) + + +@ns.route("//metrica_available_counters") +class MetricaConnectionAvailableCounters(BIResource): + @schematic_request(ns=ns, responses={200: ("Success", MetricaConnectionAvailableCountersSchema())}) + @generic_profiler("metrica-available-counters") + def get(self, connection_id: str) -> dict: + conn = self.get_us_manager().get_by_id(connection_id, expected_type=ConnectionBase) + need_permission_on_entry(conn, USPermissionKind.edit) + if conn.conn_type not in (CONNECTION_TYPE_METRICA_API, CONNECTION_TYPE_APPMETRICA_API): + raise exc.UnsupportedForEntityType("Unsupported connection type") + assert isinstance(conn, MetrikaBaseMixin) + try: + counters = conn.get_available_counters() + except MetrikaApiAccessDeniedException as ex: + raise ConnectionConfigurationError( + "Unable to load available counters. Possibly caused by invalid OAuth token." + ) from ex + else: + return dict(counters=counters) diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/api_schema/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/api/api_schema/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/api_schema/connection.py b/lib/dl_connector_metrica/dl_connector_metrica/api/api_schema/connection.py new file mode 100644 index 000000000..14ec0604d --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/api/api_schema/connection.py @@ -0,0 +1,59 @@ +from typing import ( + Any, + Optional, +) + +from marshmallow import ValidationError +from marshmallow import fields as ma_fields +from marshmallow import validates_schema + +from dl_api_connector.api_schema.connection_base import ( + ConnectionMetaMixin, + ConnectionSchema, +) +from dl_api_connector.api_schema.connection_base_fields import secret_string_field +from dl_api_connector.api_schema.extras import FieldExtra + +from dl_connector_metrica.core.us_connection import ( + AppMetricaApiConnection, + MetrikaApiConnection, + parse_metrica_ids, +) + + +class ConnectionMetrikaLikeAPI(ConnectionMetaMixin, ConnectionSchema): + token = secret_string_field(attribute="data.token") + counter_id = ma_fields.String(attribute="data.counter_id", required=True, bi_extra=FieldExtra(editable=True)) + accuracy = ma_fields.Float( + attribute="data.accuracy", + allow_none=True, + dump_default=None, + load_default=None, + bi_extra=FieldExtra(editable=True), + ) + + @validates_schema + def validate_counter_id(self, data: Optional[dict[str, Any]], *args: Any, **kwargs: Any) -> None: + if data is None or "data" not in data or "counter_id" not in data["data"]: + return + + ids_orig = data["data"]["counter_id"] + ids = list(filter(lambda t: t, parse_metrica_ids(ids_orig))) + if ids: + for id_str in ids: + try: + id_value = int(id_str) + except Exception as ex: + raise ValidationError(f"Unable to parse id: {id_str!r}") from ex + if id_value <= 0: + raise ValidationError(f"Value should be positive: {id_str!r}") + else: + raise ValidationError(f"Unable to parse id: {ids_orig!r}") + + +class ConnectionMetrikaAPISchema(ConnectionMetrikaLikeAPI): + TARGET_CLS = MetrikaApiConnection + + +class ConnectionAppMetricaAPISchema(ConnectionMetrikaLikeAPI): + TARGET_CLS = AppMetricaApiConnection diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/connection_form/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/api/connection_form/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/connection_form/form_config.py b/lib/dl_connector_metrica/dl_connector_metrica/api/connection_form/form_config.py new file mode 100644 index 000000000..2f5f8cc9a --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/api/connection_form/form_config.py @@ -0,0 +1,176 @@ +from __future__ import annotations + +import abc +from enum import unique +from typing import ( + ClassVar, + Optional, +) + +from dl_api_commons.base_models import TenantDef +from dl_api_connector.form_config.models.api_schema import ( + FormActionApiSchema, + FormApiSchema, + FormFieldApiSchema, +) +from dl_api_connector.form_config.models.base import ( + ConnectionForm, + ConnectionFormFactory, + ConnectionFormMode, +) +from dl_api_connector.form_config.models.common import ( + CommonFieldName, + FormFieldName, + OAuthApplication, +) +import dl_api_connector.form_config.models.rows as C +from dl_api_connector.form_config.models.rows.base import FormRow +from dl_api_connector.form_config.models.shortcuts.rows import RowConstructor +from dl_configs.connectors_settings import ConnectorSettingsBase + +from dl_connector_metrica.api.connection_form.rows import ( + AccuracyRow, + AppMetricaCounterRowItem, + CounterRow, + MetricaCounterRowItem, +) +from dl_connector_metrica.api.connection_info import ( + AppMetricaConnectionInfoProvider, + MetricaConnectionInfoProvider, +) +from dl_connector_metrica.api.i18n.localizer import Translatable +from dl_connector_metrica.core.settings import ( + AppmetricaConnectorSettings, + MetricaConnectorSettings, +) + + +class MetricaOAuthApplication(OAuthApplication): + metrika_api = "metrika_api" + appmetrica_api = "appmetrica_api" + + +@unique +class MetricaFieldName(FormFieldName): + counter_id = "counter_id" + accuracy = "accuracy" + + +class MetricaLikeBaseFormFactory(ConnectionFormFactory, metaclass=abc.ABCMeta): + template_name: ClassVar[str] + oauth_application: ClassVar[OAuthApplication] + + @abc.abstractmethod + def _title(self) -> str: + raise NotImplementedError + + @abc.abstractmethod + def _counter_row(self, manual_input: bool) -> CounterRow: + raise NotImplementedError + + @abc.abstractmethod + def _allow_manual_counter_input(self, connector_settings: ConnectorSettingsBase) -> bool: + raise NotImplementedError + + @abc.abstractmethod + def _allow_auto_dash_creation(self, connector_settings: ConnectorSettingsBase) -> bool: + raise NotImplementedError + + def get_form_config( + self, + connector_settings: Optional[ConnectorSettingsBase], + tenant: Optional[TenantDef], + ) -> ConnectionForm: + assert connector_settings is not None + rc = RowConstructor(localizer=self._localizer) + + rows: list[FormRow] = [ + C.OAuthTokenRow( + name=CommonFieldName.token, + fake_value="******" if self.mode == ConnectionFormMode.edit else None, + application=self.oauth_application, + label_text=self._localizer.translate(Translatable("field_oauth-token")), + button_text=self._localizer.translate(Translatable("button_get-token")), + ), + self._counter_row(manual_input=self._allow_manual_counter_input(connector_settings)), + AccuracyRow(name=MetricaFieldName.accuracy), + ] + + edit_api_schema = ( + FormActionApiSchema( + items=[ + FormFieldApiSchema(name=MetricaFieldName.counter_id, required=True), + FormFieldApiSchema(name=CommonFieldName.token), + FormFieldApiSchema(name=MetricaFieldName.accuracy, nullable=True), + ] + ) + if self.mode == ConnectionFormMode.edit + else None + ) + + create_api_schema = ( + FormActionApiSchema( + items=[ + FormFieldApiSchema(name=MetricaFieldName.counter_id, required=True), + FormFieldApiSchema(name=CommonFieldName.token, required=True), + FormFieldApiSchema(name=MetricaFieldName.accuracy, nullable=True), + *self._get_top_level_create_api_schema_items(), + ] + ) + if self.mode == ConnectionFormMode.create + else None + ) + + if self.mode == ConnectionFormMode.create and self._allow_auto_dash_creation(connector_settings): + rows.append(rc.auto_create_dash_row()) + + return ConnectionForm( + title=self._title(), + template_name=self.template_name, + rows=rows, + api_schema=FormApiSchema(create=create_api_schema, edit=edit_api_schema), + ) + + +class MetricaAPIConnectionFormFactory(MetricaLikeBaseFormFactory): + template_name = "metrica_api" + oauth_application = MetricaOAuthApplication.metrika_api + + def _title(self) -> str: + return MetricaConnectionInfoProvider.get_title(self._localizer) + + def _counter_row(self, manual_input: bool) -> MetricaCounterRowItem: + return MetricaCounterRowItem( + name=MetricaFieldName.counter_id, + allow_manual_input=manual_input, + ) + + def _allow_manual_counter_input(self, connector_settings: ConnectorSettingsBase) -> bool: + assert isinstance(connector_settings, MetricaConnectorSettings) + return connector_settings.COUNTER_ALLOW_MANUAL_INPUT + + def _allow_auto_dash_creation(self, connector_settings: ConnectorSettingsBase) -> bool: + assert isinstance(connector_settings, MetricaConnectorSettings) + return connector_settings.ALLOW_AUTO_DASH_CREATION + + +class AppMetricaAPIConnectionFormFactory(MetricaLikeBaseFormFactory): + template_name = "appmetrica_api" + oauth_application = MetricaOAuthApplication.appmetrica_api + + def _title(self) -> str: + return AppMetricaConnectionInfoProvider.get_title(self._localizer) + + def _counter_row(self, manual_input: bool) -> AppMetricaCounterRowItem: + return AppMetricaCounterRowItem( + name=MetricaFieldName.counter_id, + allow_manual_input=manual_input, + ) + + def _allow_manual_counter_input(self, connector_settings: ConnectorSettingsBase) -> bool: + assert isinstance(connector_settings, AppmetricaConnectorSettings) + return connector_settings.COUNTER_ALLOW_MANUAL_INPUT + + def _allow_auto_dash_creation(self, connector_settings: ConnectorSettingsBase) -> bool: + assert isinstance(connector_settings, AppmetricaConnectorSettings) + return connector_settings.ALLOW_AUTO_DASH_CREATION diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/connection_form/rows.py b/lib/dl_connector_metrica/dl_connector_metrica/api/connection_form/rows.py new file mode 100644 index 000000000..7f7a3eee7 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/api/connection_form/rows.py @@ -0,0 +1,34 @@ +from typing import Optional + +import attr + +from dl_api_connector.form_config.models.common import remap_skip_if_null +from dl_api_connector.form_config.models.rows.base import FormFieldMixin +from dl_api_connector.form_config.models.rows.prepared.base import ( + DisabledMixin, + PreparedRow, +) + + +@attr.s(kw_only=True, frozen=True) +class CounterRow(PreparedRow, FormFieldMixin, DisabledMixin): + label_text: Optional[str] = attr.ib(default=None, metadata=remap_skip_if_null("labelText")) + allow_manual_input: Optional[bool] = attr.ib(default=None, metadata=remap_skip_if_null("allowManualInput")) + + class Inner(PreparedRow.Inner): + counter_input_method = "counter_input_method" + + +@attr.s(kw_only=True, frozen=True) +class MetricaCounterRowItem(CounterRow): + type = "metrica_counter" + + +@attr.s(kw_only=True, frozen=True) +class AppMetricaCounterRowItem(CounterRow): + type = "appmetrica_counter" + + +@attr.s(kw_only=True, frozen=True) +class AccuracyRow(PreparedRow, FormFieldMixin, DisabledMixin): + type = "metrica_accuracy" diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/connection_info.py b/lib/dl_connector_metrica/dl_connector_metrica/api/connection_info.py new file mode 100644 index 000000000..d4adbb9b0 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/api/connection_info.py @@ -0,0 +1,13 @@ +from dl_api_connector.connection_info import ConnectionInfoProvider + +from dl_connector_metrica.api.i18n.localizer import Translatable + + +class MetricaConnectionInfoProvider(ConnectionInfoProvider): + title_translatable = Translatable("label_connector-metrica") + alias = "metrica" + + +class AppMetricaConnectionInfoProvider(ConnectionInfoProvider): + title_translatable = Translatable("label_connector-appmetrica") + alias = "appmetrica" diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/connector.py b/lib/dl_connector_metrica/dl_connector_metrica/api/connector.py new file mode 100644 index 000000000..db86331c4 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/api/connector.py @@ -0,0 +1,79 @@ +from dl_api_connector.api_schema.source_base import ( + SQLDataSourceSchema, + SQLDataSourceTemplateSchema, +) +from dl_api_connector.connector import ( + ApiConnectionDefinition, + ApiConnector, + ApiSourceDefinition, +) + +from dl_connector_metrica.api.api_schema.connection import ( + ConnectionAppMetricaAPISchema, + ConnectionMetrikaAPISchema, +) +from dl_connector_metrica.api.connection_form.form_config import ( + AppMetricaAPIConnectionFormFactory, + MetricaAPIConnectionFormFactory, +) +from dl_connector_metrica.api.connection_info import ( + AppMetricaConnectionInfoProvider, + MetricaConnectionInfoProvider, +) +from dl_connector_metrica.api.filter_compiler import MetricaApiFilterFormulaCompiler +from dl_connector_metrica.api.i18n.localizer import CONFIGS +from dl_connector_metrica.core.connector import ( + AppMetricaApiCoreConnectionDefinition, + AppMetricaApiCoreConnector, + AppMetricaApiCoreSourceDefinition, + MetricaApiCoreConnectionDefinition, + MetricaApiCoreConnector, + MetricaApiCoreSourceDefinition, +) +from dl_connector_metrica.formula.constants import DIALECT_NAME_METRICAAPI + + +class MetricaApiFilteredApiTableSourceDefinition(ApiSourceDefinition): + core_source_def_cls = MetricaApiCoreSourceDefinition + api_schema_cls = SQLDataSourceSchema + template_api_schema_cls = SQLDataSourceTemplateSchema + + +class MetricaApiApiConnectionDefinition(ApiConnectionDefinition): + core_conn_def_cls = MetricaApiCoreConnectionDefinition + api_generic_schema_cls = ConnectionMetrikaAPISchema + alias = "metrica" + info_provider_cls = MetricaConnectionInfoProvider + form_factory_cls = MetricaAPIConnectionFormFactory + + +class MetricaApiApiConnector(ApiConnector): + core_connector_cls = MetricaApiCoreConnector + connection_definitions = (MetricaApiApiConnectionDefinition,) + source_definitions = (MetricaApiFilteredApiTableSourceDefinition,) + filter_formula_compiler_cls = MetricaApiFilterFormulaCompiler + formula_dialect_name = DIALECT_NAME_METRICAAPI + translation_configs = frozenset(CONFIGS) + + +class AppMetricaApiFilteredApiTableSourceDefinition(ApiSourceDefinition): + core_source_def_cls = AppMetricaApiCoreSourceDefinition + api_schema_cls = SQLDataSourceSchema + template_api_schema_cls = SQLDataSourceTemplateSchema + + +class AppMetricaApiApiConnectionDefinition(ApiConnectionDefinition): + core_conn_def_cls = AppMetricaApiCoreConnectionDefinition + api_generic_schema_cls = ConnectionAppMetricaAPISchema + alias = "appmetrica" + info_provider_cls = AppMetricaConnectionInfoProvider + form_factory_cls = AppMetricaAPIConnectionFormFactory + + +class AppMetricaApiApiConnector(ApiConnector): + core_connector_cls = AppMetricaApiCoreConnector + connection_definitions = (AppMetricaApiApiConnectionDefinition,) + source_definitions = (AppMetricaApiFilteredApiTableSourceDefinition,) + filter_formula_compiler_cls = MetricaApiFilterFormulaCompiler + formula_dialect_name = DIALECT_NAME_METRICAAPI + translation_configs = frozenset(CONFIGS) diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/filter_compiler.py b/lib/dl_connector_metrica/dl_connector_metrica/api/filter_compiler.py new file mode 100644 index 000000000..0cea6ce29 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/api/filter_compiler.py @@ -0,0 +1,11 @@ +from dl_query_processing.compilation.filter_compiler import ( + FilterParams, + MainFilterFormulaCompiler, +) + + +class MetricaApiFilterFormulaCompiler(MainFilterFormulaCompiler): + """Does not support datetime casting used for most other sources""" + + def _mangle_date_filter(self, filter_params: FilterParams) -> FilterParams: + return filter_params diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/i18n/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/api/i18n/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/api/i18n/localizer.py b/lib/dl_connector_metrica/dl_connector_metrica/api/i18n/localizer.py new file mode 100644 index 000000000..eb61f49e5 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/api/i18n/localizer.py @@ -0,0 +1,28 @@ +import os + +import attr + +from dl_i18n.localizer_base import Translatable as BaseTranslatable +from dl_i18n.localizer_base import TranslationConfig + +import dl_connector_metrica as package + + +DOMAIN = f"{package.__name__}" +CONFIGS = [ + TranslationConfig( + path=os.path.relpath(os.path.join(os.path.dirname(__file__), "../../locales")), + domain=DOMAIN, + locale="en", + ), + TranslationConfig( + path=os.path.relpath(os.path.join(os.path.dirname(__file__), "../../locales")), + domain=DOMAIN, + locale="ru", + ), +] + + +@attr.s +class Translatable(BaseTranslatable): + domain: str = attr.ib(default=DOMAIN) diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/adapters_metrica_x.py b/lib/dl_connector_metrica/dl_connector_metrica/core/adapters_metrica_x.py new file mode 100644 index 000000000..b0e6fd80b --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/adapters_metrica_x.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from typing import ( + Any, + Dict, + Optional, + Tuple, + Type, + TypeVar, +) +from urllib.parse import ( + quote_plus, + urlencode, +) + +import sqlalchemy as sa +from sqlalchemy.engine import Engine + +from dl_core import exc +from dl_core.connection_executors.adapters.adapters_base_sa import BaseSAAdapter +from dl_core.connection_models import DBIdent +from dl_core.connectors.base.error_transformer import DBExcKWArgs +from dl_sqlalchemy_metrica_api import exceptions as sqla_metrika_exc # type: ignore + +from dl_connector_metrica.core.constants import ( + CONNECTION_TYPE_APPMETRICA_API, + CONNECTION_TYPE_METRICA_API, +) +from dl_connector_metrica.core.exc import MetricaAPIDatabaseQueryError +from dl_connector_metrica.core.target_dto import ( + AppMetricaAPIConnTargetDTO, + MetricaAPIConnTargetDTO, +) + + +_M_CONN_T_DTO_TV = TypeVar("_M_CONN_T_DTO_TV", bound=MetricaAPIConnTargetDTO) + + +class MetricaAPIDefaultAdapter(BaseSAAdapter[_M_CONN_T_DTO_TV]): + conn_type = CONNECTION_TYPE_METRICA_API + + def _get_db_engine(self, db_name: str, disable_streaming: bool = False) -> Engine: + if disable_streaming: + raise Exception("`disable_streaming` is not applicable here") + dsn = "{dialect}://:{token}@/{db_name}".format( + dialect=self.get_dialect_str(), + token=quote_plus(self._target_dto.token), + db_name=db_name, + ) + dsn_params: Dict[str, Any] = {} + if self._target_dto.accuracy is not None: + dsn_params.update(accuracy=self._target_dto.accuracy) + + if dsn_params: + dsn += "?" + urlencode(dsn_params) + + return sa.create_engine(dsn).execution_options(compiled_cache=None) + + @classmethod + def make_exc( # TODO: Move to ErrorTransformer + cls, wrapper_exc: Exception, orig_exc: Optional[Exception], debug_compiled_query: Optional[str] + ) -> Tuple[Type[exc.DatabaseQueryError], DBExcKWArgs]: + exc_cls, kw = super().make_exc(wrapper_exc, orig_exc, debug_compiled_query) + + if isinstance( + orig_exc, + ( + sqla_metrika_exc.MetrikaHttpApiException, + sqla_metrika_exc.NotSupportedError, + sqla_metrika_exc.ProgrammingError, + ), + ): + exc_cls = MetricaAPIDatabaseQueryError + + return exc_cls, kw + + def get_default_db_name(self) -> Optional[str]: + return None + + def _get_db_version(self, db_ident: DBIdent) -> Optional[str]: + return "" + + +class AppMetricaAPIDefaultAdapter(MetricaAPIDefaultAdapter[AppMetricaAPIConnTargetDTO]): # type: ignore + conn_type = CONNECTION_TYPE_APPMETRICA_API diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/connection_executors.py b/lib/dl_connector_metrica/dl_connector_metrica/core/connection_executors.py new file mode 100644 index 000000000..1e6e03bbb --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/connection_executors.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import attr + +from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor + +from dl_connector_metrica.core.adapters_metrica_x import ( + AppMetricaAPIDefaultAdapter, + MetricaAPIDefaultAdapter, +) +from dl_connector_metrica.core.dto import ( + AppMetricaAPIConnDTO, + MetricaAPIConnDTO, +) +from dl_connector_metrica.core.target_dto import ( + AppMetricaAPIConnTargetDTO, + MetricaAPIConnTargetDTO, +) + + +@attr.s(cmp=False, hash=False) +class MetricaAPIConnExecutor(DefaultSqlAlchemyConnExecutor[MetricaAPIDefaultAdapter]): + TARGET_ADAPTER_CLS = MetricaAPIDefaultAdapter + + _conn_dto: MetricaAPIConnDTO = attr.ib() + + async def _make_target_conn_dto_pool(self) -> list[MetricaAPIConnTargetDTO]: + return [ + MetricaAPIConnTargetDTO( + conn_id=self._conn_dto.conn_id, + pass_db_messages_to_user=self._conn_options.pass_db_messages_to_user, + pass_db_query_to_user=self._conn_options.pass_db_query_to_user, + accuracy=self._conn_dto.accuracy, + token=self._conn_dto.token, + ) + ] + + +@attr.s(cmp=False, hash=False) +class AppMetricaAPIConnExecutor(DefaultSqlAlchemyConnExecutor[AppMetricaAPIDefaultAdapter]): + TARGET_ADAPTER_CLS = AppMetricaAPIDefaultAdapter + + _conn_dto: AppMetricaAPIConnDTO = attr.ib() + + async def _make_target_conn_dto_pool(self) -> list[AppMetricaAPIConnTargetDTO]: + return [ + AppMetricaAPIConnTargetDTO( + conn_id=self._conn_dto.conn_id, + pass_db_messages_to_user=self._conn_options.pass_db_messages_to_user, + pass_db_query_to_user=self._conn_options.pass_db_query_to_user, + accuracy=self._conn_dto.accuracy, + token=self._conn_dto.token, + ) + ] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/connector.py b/lib/dl_connector_metrica/dl_connector_metrica/core/connector.py new file mode 100644 index 000000000..bf6c86233 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/connector.py @@ -0,0 +1,120 @@ +from __future__ import annotations + +from dl_core.connections_security.base import ( + ConnSecuritySettings, + NonUserInputConnectionSafetyChecker, +) +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreConnector, + CoreSourceDefinition, +) +from dl_core.data_source_spec.sql import StandardSQLDataSourceSpec +from dl_core.us_manager.storage_schemas.data_source_spec_base import SQLDataSourceSpecStorageSchema + +from dl_connector_metrica.core.adapters_metrica_x import ( + AppMetricaAPIDefaultAdapter, + MetricaAPIDefaultAdapter, +) +from dl_connector_metrica.core.connection_executors import ( + AppMetricaAPIConnExecutor, + MetricaAPIConnExecutor, +) +from dl_connector_metrica.core.constants import ( + BACKEND_TYPE_APPMETRICA_API, + BACKEND_TYPE_METRICA_API, + CONNECTION_TYPE_APPMETRICA_API, + CONNECTION_TYPE_METRICA_API, + SOURCE_TYPE_APPMETRICA_API, + SOURCE_TYPE_METRICA_API, +) +from dl_connector_metrica.core.data_source import ( + AppMetrikaApiDataSource, + MetrikaApiDataSource, +) +from dl_connector_metrica.core.data_source_migration import ( + AppMetricaApiDataSourceMigrator, + MetricaApiDataSourceMigrator, +) +from dl_connector_metrica.core.dto import ( + AppMetricaAPIConnDTO, + MetricaAPIConnDTO, +) +from dl_connector_metrica.core.lifecycle import MetricaConnectionLifecycleManager +from dl_connector_metrica.core.settings import ( + AppMetricaSettingDefinition, + MetricaSettingDefinition, +) +from dl_connector_metrica.core.storage_schemas.connection import ( + ConnectionAppMetricaApiDataStorageSchema, + ConnectionMetrikaApiDataStorageSchema, +) +from dl_connector_metrica.core.type_transformer import MetrikaApiTypeTransformer +from dl_connector_metrica.core.us_connection import ( + AppMetricaApiConnection, + MetrikaApiConnection, +) + + +class MetricaApiCoreConnectionDefinition(CoreConnectionDefinition): + conn_type = CONNECTION_TYPE_METRICA_API + connection_cls = MetrikaApiConnection + us_storage_schema_cls = ConnectionMetrikaApiDataStorageSchema + type_transformer_cls = MetrikaApiTypeTransformer + sync_conn_executor_cls = MetricaAPIConnExecutor + async_conn_executor_cls = MetricaAPIConnExecutor + lifecycle_manager_cls = MetricaConnectionLifecycleManager + dialect_string = "metrika_api" + settings_definition = MetricaSettingDefinition + data_source_migrator_cls = MetricaApiDataSourceMigrator + + +class MetricaApiCoreSourceDefinition(CoreSourceDefinition): + source_type = SOURCE_TYPE_METRICA_API + source_cls = MetrikaApiDataSource + source_spec_cls = StandardSQLDataSourceSpec + us_storage_schema_cls = SQLDataSourceSpecStorageSchema + + +class MetricaApiCoreConnector(CoreConnector): + backend_type = BACKEND_TYPE_METRICA_API + connection_definitions = (MetricaApiCoreConnectionDefinition,) + source_definitions = (MetricaApiCoreSourceDefinition,) + rqe_adapter_classes = frozenset({MetricaAPIDefaultAdapter}) + conn_security = frozenset( + { + ConnSecuritySettings(NonUserInputConnectionSafetyChecker, frozenset({MetricaAPIConnDTO})), + } + ) + + +class AppMetricaApiCoreConnectionDefinition(CoreConnectionDefinition): + conn_type = CONNECTION_TYPE_APPMETRICA_API + connection_cls = AppMetricaApiConnection + us_storage_schema_cls = ConnectionAppMetricaApiDataStorageSchema + type_transformer_cls = MetrikaApiTypeTransformer + sync_conn_executor_cls = AppMetricaAPIConnExecutor + async_conn_executor_cls = AppMetricaAPIConnExecutor + lifecycle_manager_cls = MetricaConnectionLifecycleManager + dialect_string = "appmetrica_api" + settings_definition = AppMetricaSettingDefinition + data_source_migrator_cls = AppMetricaApiDataSourceMigrator + + +class AppMetricaApiCoreSourceDefinition(CoreSourceDefinition): + source_type = SOURCE_TYPE_APPMETRICA_API + source_cls = AppMetrikaApiDataSource + source_spec_cls = StandardSQLDataSourceSpec + us_storage_schema_cls = SQLDataSourceSpecStorageSchema + + +class AppMetricaApiCoreConnector(CoreConnector): + backend_type = BACKEND_TYPE_APPMETRICA_API + connection_definitions = (AppMetricaApiCoreConnectionDefinition,) + source_definitions = (AppMetricaApiCoreSourceDefinition,) + rqe_adapter_classes = frozenset({AppMetricaAPIDefaultAdapter}) + conn_security = frozenset( + { + ConnSecuritySettings(NonUserInputConnectionSafetyChecker, frozenset({AppMetricaAPIConnDTO})), + } + ) diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/constants.py b/lib/dl_connector_metrica/dl_connector_metrica/core/constants.py new file mode 100644 index 000000000..588e7b10d --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/constants.py @@ -0,0 +1,14 @@ +from dl_constants.enums import ( + ConnectionType, + DataSourceType, + SourceBackendType, +) + + +BACKEND_TYPE_METRICA_API = SourceBackendType.declare("METRICA_API") +CONNECTION_TYPE_METRICA_API = ConnectionType.declare("metrika_api") # Note the K in the value +SOURCE_TYPE_METRICA_API = DataSourceType.declare("METRIKA_API") # Note the K in the value + +BACKEND_TYPE_APPMETRICA_API = SourceBackendType.declare("APPMETRICA_API") +CONNECTION_TYPE_APPMETRICA_API = ConnectionType.declare("appmetrica_api") +SOURCE_TYPE_APPMETRICA_API = DataSourceType.declare("APPMETRICA_API") diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/data_source.py b/lib/dl_connector_metrica/dl_connector_metrica/core/data_source.py new file mode 100644 index 000000000..17221965b --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/data_source.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import datetime +import logging +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Optional, + Tuple, +) + +from dl_constants.enums import UserDataType +from dl_core import exc +from dl_core.data_source.sql import PseudoSQLDataSource +from dl_core.db import ( + SchemaColumn, + SchemaInfo, +) + +from dl_connector_metrica.core.constants import ( + CONNECTION_TYPE_APPMETRICA_API, + CONNECTION_TYPE_METRICA_API, +) +from dl_connector_metrica.core.us_connection import MetrikaApiConnection + + +if TYPE_CHECKING: + from dl_core.connection_executors.async_base import AsyncConnExecutorBase + from dl_core.connection_executors.sync_base import SyncConnExecutorBase + + +LOGGER = logging.getLogger(__name__) + + +class MetrikaApiDataSource(PseudoSQLDataSource): + store_raw_schema: ClassVar[bool] = False + preview_enabled: ClassVar[bool] = False + supports_schema_update: ClassVar[bool] = False + + conn_type = CONNECTION_TYPE_METRICA_API + + def get_schema_info(self, conn_executor_factory: Callable[[], SyncConnExecutorBase]) -> SchemaInfo: + assert self.saved_raw_schema is not None + return SchemaInfo.from_schema(self.saved_raw_schema) + + def _check_existence(self, conn_executor_factory: Callable[[], SyncConnExecutorBase]) -> bool: + return True + + async def _check_existence_async(self, conn_executor_factory: Callable[[], AsyncConnExecutorBase]) -> bool: + return True + + @property + def saved_raw_schema(self) -> Optional[list[SchemaColumn]]: + assert self.conn_type is not None + db_name = self.db_name + assert db_name is not None + conn_cls = self.get_connection_cls() + assert issubclass(conn_cls, MetrikaApiConnection) + return [ + sch_column.clone(source_id=self.id) + for sch_column in conn_cls.get_raw_schema( + metrica_namespace=db_name, + actual_conn_type=self.conn_type, + ) + ] + + def get_expression_value_range(self, col_name: str) -> Tuple[Any, Any]: + """Date/datetime column value ranges are defined as ``(, )``""" + try: + assert self.saved_raw_schema is not None + column = next(col for col in self.saved_raw_schema if col.name == col_name) + except StopIteration: + raise exc.InvalidColumnError("Invalid field name") + + if column.user_type not in (UserDataType.date, UserDataType.datetime, UserDataType.genericdatetime): + raise exc.InvalidColumnError("Invalid field for value range") + + creation_date = self.connection.data.counter_creation_date + now = datetime.datetime.utcnow() + if column.user_type in (UserDataType.datetime, UserDataType.genericdatetime): + min_value = datetime.datetime(creation_date.year, creation_date.month, creation_date.day) + max_value = now + else: + min_value = creation_date + max_value = now.date() # type: ignore # TODO: fix + + return min_value, max_value + + +class AppMetrikaApiDataSource(MetrikaApiDataSource): + conn_type = CONNECTION_TYPE_APPMETRICA_API diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/data_source_migration.py b/lib/dl_connector_metrica/dl_connector_metrica/core/data_source_migration.py new file mode 100644 index 000000000..91228675b --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/data_source_migration.py @@ -0,0 +1,15 @@ +from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator + +from dl_connector_metrica.core.constants import ( + SOURCE_TYPE_APPMETRICA_API, + SOURCE_TYPE_METRICA_API, +) + + +class MetricaApiDataSourceMigrator(DefaultSQLDataSourceMigrator): + table_source_type = SOURCE_TYPE_METRICA_API + + +class AppMetricaApiDataSourceMigrator(DefaultSQLDataSourceMigrator): + table_source_type = SOURCE_TYPE_APPMETRICA_API + with_db_name = True diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/dto.py b/lib/dl_connector_metrica/dl_connector_metrica/core/dto.py new file mode 100644 index 000000000..541f48808 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/dto.py @@ -0,0 +1,21 @@ +import attr + +from dl_core.connection_models.dto_defs import ConnDTO + +from dl_connector_metrica.core.constants import ( + CONNECTION_TYPE_APPMETRICA_API, + CONNECTION_TYPE_METRICA_API, +) + + +@attr.s(frozen=True) +class MetricaAPIConnDTO(ConnDTO): # noqa + conn_type = CONNECTION_TYPE_METRICA_API + + token: str = attr.ib(repr=False, kw_only=True) + accuracy: float = attr.ib(kw_only=True) + + +@attr.s(frozen=True) +class AppMetricaAPIConnDTO(MetricaAPIConnDTO): + conn_type = CONNECTION_TYPE_APPMETRICA_API diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/exc.py b/lib/dl_connector_metrica/dl_connector_metrica/core/exc.py new file mode 100644 index 000000000..048d5f5a4 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/exc.py @@ -0,0 +1,6 @@ +from dl_core.exc import DatabaseQueryError + + +class MetricaAPIDatabaseQueryError(DatabaseQueryError): + err_code = DatabaseQueryError.err_code + ["METRICA"] + default_message = "Metrica API error." diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/lifecycle.py b/lib/dl_connector_metrica/dl_connector_metrica/core/lifecycle.py new file mode 100644 index 000000000..cc6099772 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/lifecycle.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +import logging + +from dl_core import exc +from dl_core.connectors.base.lifecycle import ConnectionLifecycleManager +from dl_sqlalchemy_metrica_api.exceptions import MetrikaApiAccessDeniedException + +from dl_connector_metrica.core.us_connection import MetrikaBaseMixin + + +LOGGER = logging.getLogger(__name__) + + +class MetricaConnectionLifecycleManager(ConnectionLifecycleManager[MetrikaBaseMixin]): + ENTRY_CLS = MetrikaBaseMixin + + # TODO FIX: split into sync and async hooks + def pre_save_hook(self) -> None: + super().pre_save_hook() + + if self.entry.counter_id != self.entry._initial_counter_id or not self.entry.data.counter_creation_date: # type: ignore # TODO: fix + LOGGER.info( + "initial counter_id = %s, current counter_id = %s. " + "Retrieving current counter creation date from Metrika API", + self.entry._initial_counter_id, + self.entry.counter_id, # noqa + ) + try: + self.entry.data.counter_creation_date = self.entry.get_counter_creation_date() # type: ignore # TODO: fix + except MetrikaApiAccessDeniedException as ex: + raise exc.ConnectionConfigurationError("No access to counter info. Check your OAuth token.") from ex diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/settings.py b/lib/dl_connector_metrica/dl_connector_metrica/core/settings.py new file mode 100644 index 000000000..553698b36 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/settings.py @@ -0,0 +1,38 @@ +import attr + +from dl_configs.connectors_settings import ( + ConnectorsConfigType, + ConnectorSettingsBase, +) +from dl_configs.settings_loaders.meta_definition import s_attrib +from dl_core.connectors.settings.primitives import ConnectorSettingsDefinition + + +@attr.s(frozen=True) +class MetricaConnectorSettings(ConnectorSettingsBase): + COUNTER_ALLOW_MANUAL_INPUT: bool = s_attrib("COUNTER_ALLOW_MANUAL_INPUT", missing=False) # type: ignore + ALLOW_AUTO_DASH_CREATION: bool = s_attrib("ALLOW_AUTO_DASH_CREATION", missing=False) # type: ignore + + +@attr.s(frozen=True) +class AppmetricaConnectorSettings(ConnectorSettingsBase): + COUNTER_ALLOW_MANUAL_INPUT: bool = s_attrib("COUNTER_ALLOW_MANUAL_INPUT", missing=False) # type: ignore + ALLOW_AUTO_DASH_CREATION: bool = s_attrib("ALLOW_AUTO_DASH_CREATION", missing=False) # type: ignore + + +def metrica_settings_fallback(full_cfg: ConnectorsConfigType) -> dict[str, ConnectorSettingsBase]: + return dict(METRIKA_API=MetricaConnectorSettings()) + + +class MetricaSettingDefinition(ConnectorSettingsDefinition): + settings_class = MetricaConnectorSettings + fallback = metrica_settings_fallback + + +def appmetrica_settings_fallback(full_cfg: ConnectorsConfigType) -> dict[str, ConnectorSettingsBase]: + return dict(APPMETRICA_API=AppmetricaConnectorSettings()) + + +class AppMetricaSettingDefinition(ConnectorSettingsDefinition): + settings_class = AppmetricaConnectorSettings + fallback = appmetrica_settings_fallback diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/storage_schemas/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/core/storage_schemas/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/storage_schemas/connection.py b/lib/dl_connector_metrica/dl_connector_metrica/core/storage_schemas/connection.py new file mode 100644 index 000000000..7caca0cc9 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/storage_schemas/connection.py @@ -0,0 +1,21 @@ +from marshmallow import fields as ma_fields + +from dl_core.us_manager.storage_schemas.connection import ConnectionBaseDataStorageSchema + +from dl_connector_metrica.core.us_connection import ( + AppMetricaApiConnection, + MetrikaApiConnection, +) + + +class ConnectionMetrikaApiDataStorageSchema(ConnectionBaseDataStorageSchema[MetrikaApiConnection.DataModel]): + TARGET_CLS = MetrikaApiConnection.DataModel + + token = ma_fields.String(required=True, allow_none=False) + counter_id = ma_fields.String(required=True, allow_none=False) + counter_creation_date = ma_fields.Date(required=False, allow_none=True, load_default=None, dump_default=None) + accuracy = ma_fields.Float(required=False, allow_none=True, load_default=None, dump_default=None) + + +class ConnectionAppMetricaApiDataStorageSchema(ConnectionMetrikaApiDataStorageSchema): + TARGET_CLS = AppMetricaApiConnection.DataModel diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/target_dto.py b/lib/dl_connector_metrica/dl_connector_metrica/core/target_dto.py new file mode 100644 index 000000000..c1c4f47d5 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/target_dto.py @@ -0,0 +1,25 @@ +from typing import Optional + +import attr + +from dl_core.connection_executors.models.connection_target_dto_base import ConnTargetDTO +from dl_core.utils import secrepr + + +@attr.s(frozen=True) +class BaseMetricaAPIConnTargetDTO(ConnTargetDTO): + token: str = attr.ib(repr=secrepr) + accuracy: float = attr.ib() + + def get_effective_host(self) -> Optional[str]: + return None + + +@attr.s(frozen=True) +class MetricaAPIConnTargetDTO(BaseMetricaAPIConnTargetDTO): + pass + + +@attr.s(frozen=True) +class AppMetricaAPIConnTargetDTO(BaseMetricaAPIConnTargetDTO): + pass diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/testing/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/core/testing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/testing/connection.py b/lib/dl_connector_metrica/dl_connector_metrica/core/testing/connection.py new file mode 100644 index 000000000..ef53d25cb --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/testing/connection.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from typing import Any +import uuid + +from dl_core.us_manager.us_manager_sync import SyncUSManager + +from dl_connector_metrica.core.constants import ( + CONNECTION_TYPE_APPMETRICA_API, + CONNECTION_TYPE_METRICA_API, +) +from dl_connector_metrica.core.us_connection import ( + AppMetricaApiConnection, + MetrikaApiConnection, +) + + +def make_saved_metrika_api_connection( + sync_usm: SyncUSManager, + counter_id: str, + token: str, + **kwargs: Any, +) -> MetrikaApiConnection: + conn_name = "metrica api test_revision_id conn %s" % uuid.uuid4() + conn = MetrikaApiConnection.create_from_dict( + MetrikaApiConnection.DataModel( + token=token, + counter_id=counter_id, + name=conn_name, + ), + ds_key=conn_name, + type_=CONNECTION_TYPE_METRICA_API.name, + us_manager=sync_usm, + **kwargs, + ) + sync_usm.save(conn) + return conn + + +def make_saved_appmetrica_api_connection( + sync_usm: SyncUSManager, counter_id: str, token: str, **kwargs: Any +) -> AppMetricaApiConnection: + conn_name = "appmetrica api test_revision_id conn %s" % uuid.uuid4() + conn = AppMetricaApiConnection.create_from_dict( + AppMetricaApiConnection.DataModel( + token=token, + counter_id=counter_id, + name=conn_name, + ), + ds_key=conn_name, + type_=CONNECTION_TYPE_APPMETRICA_API.name, + us_manager=sync_usm, + **kwargs, + ) + sync_usm.save(conn) + return conn diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/type_transformer.py b/lib/dl_connector_metrica/dl_connector_metrica/core/type_transformer.py new file mode 100644 index 000000000..1f4dc9eee --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/type_transformer.py @@ -0,0 +1,27 @@ +from dl_constants.enums import UserDataType +from dl_core.db import TypeTransformer +from dl_core.db.conversion_base import make_native_type + +from dl_connector_metrica.core.constants import CONNECTION_TYPE_METRICA_API + + +class MetrikaApiTypeTransformer(TypeTransformer): + conn_type = CONNECTION_TYPE_METRICA_API + + native_to_user_map = { + make_native_type(CONNECTION_TYPE_METRICA_API, "string"): UserDataType.string, + make_native_type(CONNECTION_TYPE_METRICA_API, "integer"): UserDataType.integer, + make_native_type(CONNECTION_TYPE_METRICA_API, "float"): UserDataType.float, + make_native_type(CONNECTION_TYPE_METRICA_API, "date"): UserDataType.date, + make_native_type(CONNECTION_TYPE_METRICA_API, "datetime"): UserDataType.genericdatetime, + } + + user_to_native_map = { + UserDataType.string: make_native_type(CONNECTION_TYPE_METRICA_API, "string"), + UserDataType.integer: make_native_type(CONNECTION_TYPE_METRICA_API, "integer"), + UserDataType.float: make_native_type(CONNECTION_TYPE_METRICA_API, "float"), + UserDataType.date: make_native_type(CONNECTION_TYPE_METRICA_API, "date"), + UserDataType.datetime: make_native_type(CONNECTION_TYPE_METRICA_API, "datetime"), + UserDataType.genericdatetime: make_native_type(CONNECTION_TYPE_METRICA_API, "datetime"), + UserDataType.datetimetz: make_native_type(CONNECTION_TYPE_METRICA_API, "datetime"), + } diff --git a/lib/dl_connector_metrica/dl_connector_metrica/core/us_connection.py b/lib/dl_connector_metrica/dl_connector_metrica/core/us_connection.py new file mode 100644 index 000000000..e29549078 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/core/us_connection.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +from datetime import date +import logging +from typing import ( + TYPE_CHECKING, + Callable, + ClassVar, + Optional, + Sequence, +) + +import attr + +from dl_constants.enums import ( + ConnectionType, + UserDataType, +) +from dl_core import exc +from dl_core.db import ( + SchemaColumn, + get_type_transformer, +) +from dl_core.us_connection_base import ( + ConnectionBase, + DataSourceTemplate, + ExecutorBasedMixin, +) +from dl_core.utils import secrepr +import dl_sqlalchemy_metrica_api +from dl_sqlalchemy_metrica_api import api_client as metrika_api_client +from dl_sqlalchemy_metrica_api.api_info.appmetrica import AppMetricaFieldsNamespaces +from dl_sqlalchemy_metrica_api.api_info.metrika import MetrikaApiCounterSource +from dl_utils.utils import DataKey + +from dl_connector_metrica.core.constants import ( + SOURCE_TYPE_APPMETRICA_API, + SOURCE_TYPE_METRICA_API, +) +from dl_connector_metrica.core.dto import ( + AppMetricaAPIConnDTO, + MetricaAPIConnDTO, +) + + +if TYPE_CHECKING: + from dl_core.connection_executors import SyncConnExecutorBase + from dl_core.services_registry.top_level import ServicesRegistry + + +LOGGER = logging.getLogger(__name__) + + +def parse_metrica_ids(ids_str: str) -> Sequence[str]: + if not ids_str: + return [] + return [id_.strip() for id_ in ids_str.split(",")] + + +class MetrikaBaseMixin(ConnectionBase): + metrica_host: Optional[str] = None + + def __init__(self, *args, **kwargs): # type: ignore # TODO: fix + super().__init__(*args, **kwargs) # type: ignore # TODO: fix + self._initial_counter_id = self.data.counter_id if self._data is not None else None # type: ignore # TODO: fix + + @property + def allow_public_usage(self) -> bool: + return False + + @property + def metrika_oauth(self): # type: ignore # TODO: fix + return self.data.token # type: ignore # TODO: fix + + @property + def counter_id(self): # type: ignore # TODO: fix + return self.data.counter_id # type: ignore # TODO: fix + + def get_metrica_api_cli(self) -> metrika_api_client.MetrikaApiClient: + return metrika_api_client.MetrikaApiClient(oauth_token=self.metrika_oauth, host=self.metrica_host) + + def get_counter_creation_date(self): # type: ignore # TODO: fix + assert isinstance(self.counter_id, str) + ids = list(filter(lambda t: t, parse_metrica_ids(self.counter_id))) + min_date = min([self.get_metrica_api_cli().get_counter_creation_date(cid) for cid in ids]) + return min_date + + @property + def counter_creation_date(self): # type: ignore # TODO: fix + return self.data.counter_creation_date # type: ignore # TODO: fix + + def get_available_counters(self) -> list[dict]: + return self.get_metrica_api_cli().get_available_counters() + + async def validate_new_data( + self, + services_registry: ServicesRegistry, + changes: Optional[dict] = None, + original_version: Optional[ConnectionBase] = None, + ) -> None: + await super().validate_new_data( # type: ignore # TODO: fix # mixin + services_registry=services_registry, + changes=changes, + original_version=original_version, + ) + if original_version is None: + return # only validating edits here + assert isinstance(changes, dict) + data_changes = changes.get("data") or {} + if data_changes.get("token"): + return # token provided, nothing to check + current_counter_id = self.data.counter_id # type: ignore # TODO: fix # mixin + if str(data_changes.get("counter_id") or "") == str(current_counter_id): + return # no counter_id change + raise exc.ConnectionConfigurationError('"token" must be specified if "counter_id" is changing.') + + +class MetrikaApiConnection(MetrikaBaseMixin, ExecutorBasedMixin, ConnectionBase): # type: ignore # TODO: fix + is_always_internal_source: ClassVar[bool] = True + allow_cache: ClassVar[bool] = True + + metrica_host = metrika_api_client.METRIKA_API_HOST + source_type = SOURCE_TYPE_METRICA_API + + @attr.s(kw_only=True) + class DataModel(ConnectionBase.DataModel): + token: str = attr.ib(repr=secrepr) + counter_id: str = attr.ib() # single counter id or comma-separated counters list + counter_creation_date: Optional[date] = attr.ib(default=None) # minimal date in case of multiple counters + accuracy: Optional[float] = attr.ib(default=None) # sample share (0; 1] + + @classmethod + def get_secret_keys(cls) -> set[DataKey]: + return { + *super().get_secret_keys(), + DataKey(parts=("token",)), + } + + @property + def metrika_oauth(self): # type: ignore # TODO: fix + return self.data.token + + @property + def table_name(self): # type: ignore # TODO: fix + return self.data.counter_id + + def get_conn_dto(self) -> MetricaAPIConnDTO: + return MetricaAPIConnDTO( + conn_id=self.uuid, + token=self.metrika_oauth, + accuracy=self.data.accuracy, + ) + + @property + def cache_ttl_sec_override(self) -> Optional[int]: + return None + + @classmethod + def get_api_fields_info(cls): # type: ignore # TODO: fix + return dl_sqlalchemy_metrica_api.api_info.metrika + + @classmethod + def get_raw_schema(cls, metrica_namespace: str, actual_conn_type: ConnectionType) -> Sequence[SchemaColumn]: + fields_info = cls.get_api_fields_info().fields_by_namespace.get( + cls.get_api_fields_info().metrica_fields_namespaces[metrica_namespace], [] + ) + + def user_type_converter(type_name: str) -> UserDataType: + return UserDataType[type_name] if type_name != "datetime" else UserDataType.genericdatetime + + raw_schema = tuple( + SchemaColumn( + name=field["name"], + title=field["title"], + user_type=user_type_converter(field["type"]), + nullable=True, + native_type=get_type_transformer(actual_conn_type).type_user_to_native( + user_t=user_type_converter(field["type"]) + ), + description=field.get("description", ""), + has_auto_aggregation=not field["is_dim"], + lock_aggregation=True, + ) + for field in fields_info + ) + + return raw_schema + + def get_parameter_combinations( + self, + conn_executor_factory: Callable[[ConnectionBase], SyncConnExecutorBase], + ) -> list[dict]: + return [dict(db_name=item.name) for item in MetrikaApiCounterSource] + + def get_data_source_templates( + self, + conn_executor_factory: Callable[[ConnectionBase], SyncConnExecutorBase], + ) -> list[DataSourceTemplate]: + return [ + DataSourceTemplate( + title=parameters["db_name"], + group=[], + source_type=self.source_type, + connection_id=self.uuid, # type: ignore # TODO: fix + parameters=parameters, + ) + for parameters in self.get_parameter_combinations(conn_executor_factory=conn_executor_factory) + ] + + @property + def allow_public_usage(self) -> bool: + return False + + +class AppMetricaApiConnection(MetrikaApiConnection): + allow_cache: ClassVar[bool] = True + + metrica_host = metrika_api_client.APPMETRICA_API_HOST + source_type = SOURCE_TYPE_APPMETRICA_API + + def get_conn_dto(self) -> AppMetricaAPIConnDTO: + return AppMetricaAPIConnDTO( + conn_id=self.uuid, + token=self.metrika_oauth, + accuracy=self.data.accuracy, + ) + + @classmethod + def get_api_fields_info(cls): # type: ignore # TODO: fix + return dl_sqlalchemy_metrica_api.api_info.appmetrica + + def get_parameter_combinations( + self, + conn_executor_factory: Callable[[ConnectionBase], SyncConnExecutorBase], + ) -> list[dict]: + return [dict(db_name=item.name) for item in AppMetricaFieldsNamespaces] + + @property + def allow_public_usage(self) -> bool: + return False diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/connector.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/connector.py new file mode 100644 index 000000000..eb8d2584f --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/connector.py @@ -0,0 +1,15 @@ +from dl_formula.connectors.base.column import UnprefixedColumnRenderer +from dl_formula.connectors.base.connector import FormulaConnector +from dl_sqlalchemy_metrica_api.base import MetrikaApiDialect as SAMetrikaApiDialect + +from dl_connector_metrica.formula.constants import MetricaDialect as MetricaDialectNS +from dl_connector_metrica.formula.definitions.all import DEFINITIONS + + +class MetricaFormulaConnector(FormulaConnector): + dialect_ns_cls = MetricaDialectNS + dialects = MetricaDialectNS.METRIKAAPI + default_dialect = MetricaDialectNS.METRIKAAPI + op_definitions = DEFINITIONS + sa_dialect = SAMetrikaApiDialect() + column_renderer_cls = UnprefixedColumnRenderer diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/constants.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/constants.py new file mode 100644 index 000000000..9d812fdde --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/constants.py @@ -0,0 +1,12 @@ +from dl_formula.core.dialect import ( + DialectName, + DialectNamespace, + simple_combo, +) + + +DIALECT_NAME_METRICAAPI = DialectName.declare("METRICAAPI") + + +class MetricaDialect(DialectNamespace): + METRIKAAPI = simple_combo(name=DIALECT_NAME_METRICAAPI) diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/all.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/all.py new file mode 100644 index 000000000..4049b87fb --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/all.py @@ -0,0 +1,18 @@ +from dl_connector_metrica.formula.definitions.functions_datetime import DEFINITIONS_DATETIME +from dl_connector_metrica.formula.definitions.functions_markup import DEFINITIONS_MARKUP +from dl_connector_metrica.formula.definitions.functions_string import DEFINITIONS_STRING +from dl_connector_metrica.formula.definitions.functions_type import DEFINITIONS_TYPE +from dl_connector_metrica.formula.definitions.operators_binary import DEFINITIONS_BINARY +from dl_connector_metrica.formula.definitions.operators_ternary import DEFINITIONS_TERNARY +from dl_connector_metrica.formula.definitions.operators_unary import DEFINITIONS_UNARY + + +DEFINITIONS = [ + *DEFINITIONS_DATETIME, + *DEFINITIONS_MARKUP, + *DEFINITIONS_STRING, + *DEFINITIONS_TYPE, + *DEFINITIONS_UNARY, + *DEFINITIONS_BINARY, + *DEFINITIONS_TERNARY, +] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_datetime.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_datetime.py new file mode 100644 index 000000000..60336eec9 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_datetime.py @@ -0,0 +1,11 @@ +import dl_formula.definitions.functions_datetime as base + +from dl_connector_metrica.formula.constants import MetricaDialect as D + + +DEFINITIONS_DATETIME = [ + # dateadd + base.FuncDateadd1.for_dialect(D.METRIKAAPI), + base.FuncDateadd2Unit.for_dialect(D.METRIKAAPI), + base.FuncDateadd2Number.for_dialect(D.METRIKAAPI), +] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_markup.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_markup.py new file mode 100644 index 000000000..9da212b14 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_markup.py @@ -0,0 +1,10 @@ +import dl_formula.definitions.functions_markup as base + +from dl_connector_metrica.formula.constants import MetricaDialect as D + + +DEFINITIONS_MARKUP = [ + # __str + base.FuncInternalStrConst.for_dialect(D.METRIKAAPI), + base.FuncInternalStr.for_dialect(D.METRIKAAPI), +] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_string.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_string.py new file mode 100644 index 000000000..69bef5d8d --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_string.py @@ -0,0 +1,31 @@ +from dl_formula.definitions.base import TranslationVariant +import dl_formula.definitions.functions_string as base + +from dl_connector_metrica.formula.constants import MetricaDialect as D + + +V = TranslationVariant.make + + +DEFINITIONS_STRING = [ + # contains + base.FuncContainsConst( + variants=[ + V(D.METRIKAAPI, lambda x, y: x.contains(y)), + ] + ), + # notcontains + base.FuncNotContainsConst.for_dialect(D.METRIKAAPI), + # endswith + base.FuncEndswithConst( + variants=[ + V(D.METRIKAAPI, lambda x, y: x.endswith(y)), + ] + ), + # startswith + base.FuncStartswithConst( + variants=[ + V(D.METRIKAAPI, lambda x, y: x.startswith(y)), + ] + ), +] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_type.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_type.py new file mode 100644 index 000000000..1943bc1b9 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/functions_type.py @@ -0,0 +1,13 @@ +import dl_formula.definitions.functions_type as base + +from dl_connector_metrica.formula.constants import MetricaDialect as D + + +DEFINITIONS_TYPE = [ + # datetime + base.FuncDatetime1FromDatetime.for_dialect(D.METRIKAAPI), + # datetimetz + base.FuncDatetimeTZConst.for_dialect(D.METRIKAAPI), + # genericdatetime + base.FuncGenericDatetime1FromDatetime.for_dialect(D.METRIKAAPI), +] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_binary.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_binary.py new file mode 100644 index 000000000..160601354 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_binary.py @@ -0,0 +1,42 @@ +import dl_formula.definitions.operators_binary as base + +from dl_connector_metrica.formula.constants import MetricaDialect as D + + +DEFINITIONS_BINARY = [ + # != + base.BinaryNotEqual.for_dialect(D.METRIKAAPI), + # * + base.BinaryMultNumbers.for_dialect(D.METRIKAAPI), + # + + base.BinaryPlusNumbers.for_dialect(D.METRIKAAPI), + # - + base.BinaryMinusNumbers.for_dialect(D.METRIKAAPI), + # / + base.BinaryDivInt.for_dialect(D.METRIKAAPI), + base.BinaryDivFloat.for_dialect(D.METRIKAAPI), + # < + base.BinaryLessThan.for_dialect(D.METRIKAAPI), + # <= + base.BinaryLessThanOrEqual.for_dialect(D.METRIKAAPI), + # == + base.BinaryEqual.for_dialect(D.METRIKAAPI), + # > + base.BinaryGreaterThan.for_dialect(D.METRIKAAPI), + # >= + base.BinaryGreaterThanOrEqual.for_dialect(D.METRIKAAPI), + # _!= + base.BinaryNotEqualInternal.for_dialect(D.METRIKAAPI), + # _== + base.BinaryEqualInternal.for_dialect(D.METRIKAAPI), + # _dneq + base.BinaryEqualDenullified.for_dialect(D.METRIKAAPI), + # and + base.BinaryAnd.for_dialect(D.METRIKAAPI), + # in + base.BinaryIn.for_dialect(D.METRIKAAPI), + # notin + base.BinaryNotIn.for_dialect(D.METRIKAAPI), + # or + base.BinaryOr.for_dialect(D.METRIKAAPI), +] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_ternary.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_ternary.py new file mode 100644 index 000000000..1dd894eaa --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_ternary.py @@ -0,0 +1,9 @@ +import dl_formula.definitions.operators_ternary as base + +from dl_connector_metrica.formula.constants import MetricaDialect as D + + +DEFINITIONS_TERNARY = [ + # between + base.TernaryBetween.for_dialect(D.METRIKAAPI), +] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_unary.py b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_unary.py new file mode 100644 index 000000000..b3f50dc6c --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula/definitions/operators_unary.py @@ -0,0 +1,9 @@ +import dl_formula.definitions.operators_unary as base + +from dl_connector_metrica.formula.constants import MetricaDialect as D + + +DEFINITIONS_UNARY = [ + # not + base.UnaryNotBool.for_dialect(D.METRIKAAPI), +] diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula_ref/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica/formula_ref/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula_ref/human_dialects.py b/lib/dl_connector_metrica/dl_connector_metrica/formula_ref/human_dialects.py new file mode 100644 index 000000000..d46a9ebee --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula_ref/human_dialects.py @@ -0,0 +1,12 @@ +from dl_formula_ref.texts import StyledDialect + +from dl_connector_metrica.formula.constants import MetricaDialect + + +HUMAN_DIALECTS = { + MetricaDialect.METRIKAAPI: StyledDialect( + "`Yandex Metrica`", + "`Yandex Metrica`", + "`Yandex Metrica`", + ), +} diff --git a/lib/dl_connector_metrica/dl_connector_metrica/formula_ref/plugin.py b/lib/dl_connector_metrica/dl_connector_metrica/formula_ref/plugin.py new file mode 100644 index 000000000..79a6acd0a --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/formula_ref/plugin.py @@ -0,0 +1,9 @@ +from dl_formula_ref.plugins.base.plugin import FormulaRefPlugin + +from dl_connector_metrica.formula.constants import MetricaDialect +from dl_connector_metrica.formula_ref.human_dialects import HUMAN_DIALECTS + + +class MetricaFormulaRefPlugin(FormulaRefPlugin): + any_dialects = frozenset((*MetricaDialect.METRIKAAPI.to_list(),)) + human_dialects = HUMAN_DIALECTS diff --git a/lib/dl_connector_metrica/dl_connector_metrica/locales/en/LC_MESSAGES/dl_connector_metrica.mo b/lib/dl_connector_metrica/dl_connector_metrica/locales/en/LC_MESSAGES/dl_connector_metrica.mo new file mode 100644 index 000000000..7d0419ba0 Binary files /dev/null and b/lib/dl_connector_metrica/dl_connector_metrica/locales/en/LC_MESSAGES/dl_connector_metrica.mo differ diff --git a/lib/dl_connector_metrica/dl_connector_metrica/locales/en/LC_MESSAGES/dl_connector_metrica.po b/lib/dl_connector_metrica/dl_connector_metrica/locales/en/LC_MESSAGES/dl_connector_metrica.po new file mode 100644 index 000000000..518e638d4 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/locales/en/LC_MESSAGES/dl_connector_metrica.po @@ -0,0 +1,22 @@ +# Copyright (c) 2023 YANDEX LLC +# This file is distributed under the same license as the DataLens package. +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: datalens-opensource@yandex-team.ru\n" +"POT-Creation-Date: 2023-09-22 08:09+0000\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "label_connector-metrica" +msgstr "Metrica" + +msgid "label_connector-appmetrica" +msgstr "AppMetrica" + +msgid "field_oauth-token" +msgstr "OAuth token" + +msgid "button_get-token" +msgstr "Get token" diff --git a/lib/dl_connector_metrica/dl_connector_metrica/locales/ru/LC_MESSAGES/dl_connector_metrica.mo b/lib/dl_connector_metrica/dl_connector_metrica/locales/ru/LC_MESSAGES/dl_connector_metrica.mo new file mode 100644 index 000000000..810dec8f8 Binary files /dev/null and b/lib/dl_connector_metrica/dl_connector_metrica/locales/ru/LC_MESSAGES/dl_connector_metrica.mo differ diff --git a/lib/dl_connector_metrica/dl_connector_metrica/locales/ru/LC_MESSAGES/dl_connector_metrica.po b/lib/dl_connector_metrica/dl_connector_metrica/locales/ru/LC_MESSAGES/dl_connector_metrica.po new file mode 100644 index 000000000..21a4d7632 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica/locales/ru/LC_MESSAGES/dl_connector_metrica.po @@ -0,0 +1,22 @@ +# Copyright (c) 2023 YANDEX LLC +# This file is distributed under the same license as the DataLens package. +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: datalens-opensource@yandex-team.ru\n" +"POT-Creation-Date: 2023-09-22 08:09+0000\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "label_connector-metrica" +msgstr "Metrica" + +msgid "label_connector-appmetrica" +msgstr "AppMetrica" + +msgid "field_oauth-token" +msgstr "OAuth-токен" + +msgid "button_get-token" +msgstr "Получить токен" diff --git a/lib/dl_connector_metrica/dl_connector_metrica/py.typed b/lib/dl_connector_metrica/dl_connector_metrica/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/conftest.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/conftest.py new file mode 100644 index 000000000..24e2d3dac --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/conftest.py @@ -0,0 +1 @@ +pytest_plugins = ("aiohttp.pytest_plugin",) # and it, in turn, includes 'pytest_asyncio.plugin' diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/base.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/base.py new file mode 100644 index 000000000..f6554747f --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/base.py @@ -0,0 +1,97 @@ +import pytest + +from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration +from dl_api_lib_testing.connection_base import ConnectionTestBase +from dl_api_lib_testing.data_api_base import ( + DataApiTestParams, + StandardizedDataApiTestBase, +) +from dl_api_lib_testing.dataset_base import DatasetTestBase +from dl_sqlalchemy_metrica_api.api_info.appmetrica import AppMetricaFieldsNamespaces +from dl_sqlalchemy_metrica_api.api_info.metrika import MetrikaApiCounterSource + +from dl_connector_metrica.core.constants import ( + CONNECTION_TYPE_APPMETRICA_API, + CONNECTION_TYPE_METRICA_API, + SOURCE_TYPE_APPMETRICA_API, + SOURCE_TYPE_METRICA_API, +) +from dl_connector_metrica_tests.ext.config import ( + API_TEST_CONFIG, + APPMETRICA_SAMPLE_COUNTER_ID, + METRIKA_SAMPLE_COUNTER_ID, +) +from dl_connector_metrica_tests.ext.core.base import ( + BaseAppMetricaTestClass, + BaseMetricaTestClass, +) + + +class MetricaConnectionTestBase(BaseMetricaTestClass, ConnectionTestBase): + conn_type = CONNECTION_TYPE_METRICA_API + bi_compeng_pg_on = False + + @pytest.fixture(scope="class") + def bi_test_config(self) -> ApiTestEnvironmentConfiguration: + return API_TEST_CONFIG + + @pytest.fixture(scope="class") + def connection_params(self, metrica_token: str) -> dict: + return dict( + counter_id=METRIKA_SAMPLE_COUNTER_ID, + token=metrica_token, + accuracy=0.01, + ) + + +class MetricaDatasetTestBase(MetricaConnectionTestBase, DatasetTestBase): + @pytest.fixture(scope="class") + def dataset_params(self) -> dict: + return dict( + source_type=SOURCE_TYPE_METRICA_API.name, + parameters=dict( + db_name=MetrikaApiCounterSource.hits.name, + ), + ) + + +class MetricaDataApiTestBase(MetricaDatasetTestBase, StandardizedDataApiTestBase): + mutation_caches_on = False + + @pytest.fixture(scope="class") + def data_api_test_params(self) -> DataApiTestParams: + return DataApiTestParams( + two_dims=("Домен страницы", "Просмотров в минуту"), + summable_field="Просмотров в минуту", + range_field="Дата и время просмотра", + distinct_field="Адрес страницы", + date_field="Дата просмотра", + ) + + +class AppMetricaConnectionTestBase(BaseAppMetricaTestClass, ConnectionTestBase): + conn_type = CONNECTION_TYPE_APPMETRICA_API + bi_compeng_pg_on = False + + @pytest.fixture(scope="class") + def bi_test_config(self) -> ApiTestEnvironmentConfiguration: + return API_TEST_CONFIG + + @pytest.fixture(scope="class") + def connection_params(self, metrica_token: str) -> dict: + return dict( + counter_id=APPMETRICA_SAMPLE_COUNTER_ID, + token=metrica_token, + accuracy=0.01, + ) + + +class AppMetricaDatasetTestBase(AppMetricaConnectionTestBase, DatasetTestBase): + @pytest.fixture(scope="class") + def dataset_params(self) -> dict: + return dict( + source_type=SOURCE_TYPE_APPMETRICA_API.name, + parameters=dict( + db_name=AppMetricaFieldsNamespaces.installs.name, + ), + ) diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_connection.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_connection.py new file mode 100644 index 000000000..5c1404f03 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_connection.py @@ -0,0 +1,67 @@ +from copy import deepcopy +import json +import uuid + +import pytest + +from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase +from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_metrica_tests.ext.api.base import ( + AppMetricaConnectionTestBase, + MetricaConnectionTestBase, +) +from dl_connector_metrica_tests.ext.config import METRIKA_SAMPLE_COUNTER_ID + + +class TestMetricaConnection(MetricaConnectionTestBase, DefaultConnectorConnectionTestSuite): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectorConnectionTestSuite.test_test_connection: "Doesn't work for Metrica", + DefaultConnectorConnectionTestSuite.test_cache_ttl_sec_override: "Unavailable for Metrica", + } + ) + + @pytest.mark.parametrize("bad_counter", ["44147844,-44147844", "44147844,asdf"]) + def test_invalid_counter( + self, control_api_sync_client: SyncHttpClientBase, connection_params: dict, bad_counter: str + ) -> None: + params = deepcopy(connection_params) + params["counter_id"] = bad_counter + params["name"] = f"metrica_{uuid.uuid4().hex}" + params["type"] = self.conn_type.name + resp = control_api_sync_client.post( + "/api/v1/connections", data=json.dumps(params), content_type="application/json" + ) + assert resp.status_code == 400, resp.json + + def test_update_connection( + self, + control_api_sync_client: SyncHttpClientBase, + saved_connection_id: str, + metrica_token: str, + ) -> None: + new_counter_id = f"{METRIKA_SAMPLE_COUNTER_ID},{METRIKA_SAMPLE_COUNTER_ID}" + update_resp = control_api_sync_client.put( + f"/api/v1/connections/{saved_connection_id}", + data=json.dumps({"counter_id": new_counter_id, "token": metrica_token}), + content_type="application/json", + ) + assert update_resp.status_code == 200, update_resp.json + + update_resp = control_api_sync_client.put( + f"/api/v1/connections/{saved_connection_id}", + data=json.dumps({"token": "asdf"}), + content_type="application/json", + ) + assert update_resp.status_code == 200, update_resp.json + + +class TestAppMetricaConnection(AppMetricaConnectionTestBase, DefaultConnectorConnectionTestSuite): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectorConnectionTestSuite.test_test_connection: "Doesn't work for AppMetrica", + DefaultConnectorConnectionTestSuite.test_cache_ttl_sec_override: "Unavailable for AppMetrica", + } + ) diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_data.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_data.py new file mode 100644 index 000000000..fb0be1461 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_data.py @@ -0,0 +1,85 @@ +import datetime + +from dl_api_client.dsmaker.api.data_api import SyncHttpDataApiV2 +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_client.dsmaker.shortcuts.result_data import get_data_rows +from dl_api_lib_testing.connector.data_api_suites import ( + DefaultConnectorDataDistinctTestSuite, + DefaultConnectorDataGroupByFormulaTestSuite, + DefaultConnectorDataRangeTestSuite, + DefaultConnectorDataResultTestSuite, +) +from dl_api_lib_testing.data_api_base import DataApiTestParams +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_metrica_tests.ext.api.base import MetricaDataApiTestBase + + +class TestMetricaDataResult(MetricaDataApiTestBase, DefaultConnectorDataResultTestSuite): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectorDataResultTestSuite.test_basic_result: "Metrica doesn't support SUM", + DefaultConnectorDataResultTestSuite.test_dates: "Metrica doesn't support DATE", + DefaultConnectorDataResultTestSuite.test_get_result_with_string_filter_operations_for_numbers: "Metrica doesn't support ICONTAINS", + }, + mark_features_skipped={ + DefaultConnectorDataResultTestSuite.array_support: "Metrica doesn't support arrays", + }, + ) + + def test_metrica_result( + self, + saved_dataset: Dataset, + data_api_test_params: DataApiTestParams, + data_api: SyncHttpDataApiV2, + ) -> None: + ds = saved_dataset + result_resp = data_api.get_result( + dataset=ds, + fields=[ + ds.find_field(title="Дата просмотра"), + ds.find_field(title="Просмотров в минуту"), + ], + filters=[ + ds.find_field(title="Дата просмотра").filter("BETWEEN", values=["2019-12-01", "2019-12-07 12:00:00"]) + ], + order_by=[ds.find_field(title="Просмотров в минуту")], + limit=3, + ) + data_rows = get_data_rows(result_resp) + assert len(data_rows) == 3 + + +class TestMetricaDataGroupBy(MetricaDataApiTestBase, DefaultConnectorDataGroupByFormulaTestSuite): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectorDataGroupByFormulaTestSuite.test_complex_result: "Metrica doesn't support LEN" + } + ) + + +class TestMetricaDataRange(MetricaDataApiTestBase, DefaultConnectorDataRangeTestSuite): + def test_basic_range( + self, + saved_dataset: Dataset, + data_api_test_params: DataApiTestParams, + data_api: SyncHttpDataApiV2, + ) -> None: + ds = saved_dataset + + range_resp = self.get_range(ds, data_api, field_name=data_api_test_params.range_field) + range_rows = get_data_rows(range_resp) + min_val, max_val = map(datetime.datetime.fromisoformat, range_rows[0]) + assert min_val <= max_val + assert max_val - min_val < datetime.timedelta(seconds=1) + + +class TestMetricaDataDistinct(MetricaDataApiTestBase, DefaultConnectorDataDistinctTestSuite): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectorDataDistinctTestSuite.test_date_filter_distinct: "Metrica doesn't support ICONTAINS" + } + ) + + +# preview is not available for Metrica diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_dataset.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_dataset.py new file mode 100644 index 000000000..0733a812b --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/api/test_dataset.py @@ -0,0 +1,86 @@ +import abc +from copy import deepcopy +from typing import ClassVar + +from dl_api_client.dsmaker.api.dataset_api import SyncHttpDatasetApiV1 +from dl_api_client.dsmaker.primitives import Dataset +from dl_api_lib_testing.connector.dataset_suite import DefaultConnectorDatasetTestSuite +from dl_constants.enums import ( + AggregationFunction, + FieldType, + UserDataType, +) +from dl_sqlalchemy_metrica_api.api_info.appmetrica.installs import installs_fields +from dl_sqlalchemy_metrica_api.api_info.metrika.hits import hits_fields + +from dl_connector_metrica_tests.ext.api.base import ( + AppMetricaDatasetTestBase, + MetricaDatasetTestBase, +) + + +class MetricaDatasetChecker(DefaultConnectorDatasetTestSuite, metaclass=abc.ABCMeta): + expected_fields: ClassVar[dict] + + def check_basic_dataset(self, ds: Dataset) -> None: + assert ds.id + assert len(ds.result_schema) + assert all(field.aggregation == AggregationFunction.none for field in ds.result_schema) + + fields = [ + { + "is_dim": field.type == FieldType.DIMENSION, + "name": field.source, + "title": field.title, + "type": ( + UserDataType.datetime.name + if field.initial_data_type == UserDataType.genericdatetime + else field.initial_data_type.name + ), + } + for field in ds.result_schema + ] + + expected = deepcopy(self.expected_fields) + for elem in expected: + elem.pop("description") + elem.pop("src_key", None) + assert fields == expected + + +class TestMetricaDataset(MetricaDatasetTestBase, MetricaDatasetChecker): + expected_fields = hits_fields + + def test_add_field_to_dataset( + self, + saved_dataset: Dataset, + control_api: SyncHttpDatasetApiV1, + ) -> None: + ds = saved_dataset + + ds.result_schema["new field"] = ds.field( + avatar_id=ds.source_avatars[0].id, + source="ym:pv:pageviewsPerMinute", + aggregation=AggregationFunction.none, + title="new field", + ) + ds_resp = control_api.apply_updates(dataset=ds) + resp_result_schema = ds_resp.json["dataset"]["result_schema"] + assert resp_result_schema[0]["title"] == "new field" + assert resp_result_schema[0]["type"] == FieldType.MEASURE.name + + def test_concat_validation( + self, + saved_dataset: Dataset, + control_api: SyncHttpDatasetApiV1, + ) -> None: + ds = saved_dataset + + ds.result_schema["Test"] = ds.field(formula='CONCAT("TEST1 ", "test2")') + ds_resp = control_api.apply_updates(dataset=ds, fail_ok=True) + assert ds_resp.status_code == 400, ds_resp.json + assert ds_resp.json["code"] == "ERR.DS_API.VALIDATION.ERROR" + + +class TestAppMetricaDataset(AppMetricaDatasetTestBase, MetricaDatasetChecker): + expected_fields = installs_fields diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/config.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/config.py new file mode 100644 index 000000000..23d3d29b1 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/config.py @@ -0,0 +1,23 @@ +from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration +from dl_core_testing.configuration import DefaultCoreTestConfiguration +from dl_testing.containers import get_test_container_hostport + + +METRIKA_SAMPLE_COUNTER_ID = "44147844" +APPMETRICA_SAMPLE_COUNTER_ID = "1111" + +# Infra settings +CORE_TEST_CONFIG = DefaultCoreTestConfiguration( + host_us_http=get_test_container_hostport("us", fallback_port=51911).host, + port_us_http=get_test_container_hostport("us", fallback_port=51911).port, + host_us_pg=get_test_container_hostport("pg-us", fallback_port=51910).host, + port_us_pg_5432=get_test_container_hostport("pg-us", fallback_port=51910).port, + us_master_token="AC1ofiek8coB", + core_connector_ep_names=["metrica_api", "appmetrica_api", "testing"], +) + +API_TEST_CONFIG = ApiTestEnvironmentConfiguration( + api_connector_ep_names=["metrica_api", "appmetrica_api"], + core_test_config=CORE_TEST_CONFIG, + ext_query_executer_secret_key="_some_test_secret_key_", +) diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/conftest.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/conftest.py new file mode 100644 index 000000000..4879f7c2c --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/conftest.py @@ -0,0 +1,23 @@ +import os + +import pytest + +from dl_api_lib_testing.initialization import initialize_api_lib_test +from dl_testing.env_params.generic import GenericEnvParamGetter + +from dl_connector_metrica_tests.ext.config import API_TEST_CONFIG + + +def pytest_configure(config): # noqa + initialize_api_lib_test(pytest_config=config, api_test_config=API_TEST_CONFIG) + + +@pytest.fixture(scope="session") +def env_param_getter() -> GenericEnvParamGetter: + filepath = os.path.join(os.path.dirname(__file__), "params.yml") + return GenericEnvParamGetter.from_yaml_file(filepath) + + +@pytest.fixture(scope="session") +def metrica_token(env_param_getter: GenericEnvParamGetter) -> str: + return env_param_getter.get_str_value("METRIKA_OAUTH") diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/base.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/base.py new file mode 100644 index 000000000..9644ff1c9 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/base.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +import asyncio +from typing import ( + Generator, + TypeVar, +) + +import pytest + +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.database import ( + CoreDbConfig, + Db, +) +from dl_core_testing.engine_wrapper import TestingEngineWrapper +from dl_core_testing.testcases.connection import BaseConnectionTestClass +import dl_sqlalchemy_metrica_api + +from dl_connector_metrica.core.constants import ( + CONNECTION_TYPE_APPMETRICA_API, + CONNECTION_TYPE_METRICA_API, +) +from dl_connector_metrica.core.testing.connection import ( + make_saved_appmetrica_api_connection, + make_saved_metrika_api_connection, +) +from dl_connector_metrica.core.us_connection import ( + AppMetricaApiConnection, + MetrikaApiConnection, +) +import dl_connector_metrica_tests.ext.config as test_config + + +_CONN_TV = TypeVar("_CONN_TV", MetrikaApiConnection, AppMetricaApiConnection) + + +class MetricaTestSetup(BaseConnectionTestClass[_CONN_TV]): + @pytest.fixture(autouse=True) + # FIXME: This fixture is a temporary solution for failing core tests when they are run together with api tests + def loop(self, event_loop: asyncio.AbstractEventLoop) -> Generator[asyncio.AbstractEventLoop, None, None]: + asyncio.set_event_loop(event_loop) + yield event_loop + # Attempt to cover an old version of pytest-asyncio: + # https://github.com/pytest-dev/pytest-asyncio/commit/51d986cec83fdbc14fa08015424c79397afc7ad9 + asyncio.set_event_loop_policy(None) + + @pytest.fixture(scope="class") + def db_url(self) -> str: + return "" + + @pytest.fixture(scope="class") + def db(self, db_config: CoreDbConfig) -> Db: + engine_wrapper = TestingEngineWrapper(config=db_config.engine_config) + return Db(config=db_config, engine_wrapper=engine_wrapper) + + @pytest.fixture(scope="function", autouse=True) + def shrink_metrika_default_date_period(self, monkeypatch): + """ + To reduce load for Metrika API and tests run time. + """ + monkeypatch.setattr(dl_sqlalchemy_metrica_api.base, "DEFAULT_DATE_PERIOD", 3) + + +class BaseMetricaTestClass(MetricaTestSetup[MetrikaApiConnection]): + conn_type = CONNECTION_TYPE_METRICA_API + core_test_config = test_config.CORE_TEST_CONFIG + + @pytest.fixture(scope="function") + def connection_creation_params(self, metrica_token: str) -> dict: + return dict( + counter_id=test_config.METRIKA_SAMPLE_COUNTER_ID, + token=metrica_token, + ) + + @pytest.fixture(scope="function") + def saved_connection( + self, + sync_us_manager: SyncUSManager, + connection_creation_params: dict, + ) -> MetrikaApiConnection: + return make_saved_metrika_api_connection(sync_usm=sync_us_manager, **connection_creation_params) + + +class BaseAppMetricaTestClass(MetricaTestSetup[AppMetricaApiConnection]): + conn_type = CONNECTION_TYPE_APPMETRICA_API + core_test_config = test_config.CORE_TEST_CONFIG + + @pytest.fixture(scope="function") + def connection_creation_params(self, metrica_token: str) -> dict: + return dict( + counter_id=test_config.APPMETRICA_SAMPLE_COUNTER_ID, + token=metrica_token, + ) + + @pytest.fixture(scope="function") + def saved_connection( + self, + sync_us_manager: SyncUSManager, + connection_creation_params: dict, + ) -> AppMetricaApiConnection: + return make_saved_appmetrica_api_connection(sync_usm=sync_us_manager, **connection_creation_params) diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_connection.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_connection.py new file mode 100644 index 000000000..c7d3d9145 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_connection.py @@ -0,0 +1,60 @@ +from dl_core.us_connection_base import DataSourceTemplate +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.testcases.connection import DefaultConnectionTestClass +from dl_sqlalchemy_metrica_api.api_info.metrika import MetrikaApiCounterSource +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_metrica.core.constants import SOURCE_TYPE_METRICA_API +from dl_connector_metrica.core.us_connection import MetrikaApiConnection +from dl_connector_metrica_tests.ext.core.base import BaseMetricaTestClass + + +class TestMetricaConnection(BaseMetricaTestClass, DefaultConnectionTestClass[MetrikaApiConnection]): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectionTestClass.test_connection_test: "in Metrica, you can't run constant queries like SELECT 1", + } + ) + + def check_saved_connection(self, conn: MetrikaApiConnection, params: dict) -> None: + pass + + def test_make_connection( + self, saved_connection: MetrikaApiConnection, conn_default_sync_us_manager: SyncUSManager + ) -> None: + conn = saved_connection + usm = conn_default_sync_us_manager + assert conn.uuid is not None + assert conn.data.counter_creation_date + + usm_conn = usm.get_by_id(conn.uuid) + assert conn.data.counter_creation_date == usm_conn.data.counter_creation_date + revision_id_after_save = usm_conn.revision_id + assert isinstance(revision_id_after_save, str) + assert revision_id_after_save + + conn.data.name = "{} (changed)".format(conn.data.name) + usm.save(conn) + revision_id_after_modify = conn.revision_id + assert isinstance(revision_id_after_modify, str) + assert revision_id_after_modify + assert revision_id_after_modify != revision_id_after_save + + loaded_conn = usm.get_by_id(conn.uuid) + assert loaded_conn.revision_id == revision_id_after_modify + + def check_data_source_templates(self, conn: MetrikaApiConnection, dsrc_templates: list[DataSourceTemplate]) -> None: + expected_templates = sorted( + [ + DataSourceTemplate( + title=ns.name, + group=[], + connection_id=conn.uuid, + source_type=SOURCE_TYPE_METRICA_API, + parameters={"db_name": ns.name}, + ) + for ns in MetrikaApiCounterSource + ], + key=lambda el: el.title, + ) + assert expected_templates == sorted(dsrc_templates, key=lambda el: el.title) diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_data_source.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_data_source.py new file mode 100644 index 000000000..11e5fdbb2 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_data_source.py @@ -0,0 +1,46 @@ +import datetime + +import pytest + +from dl_core.data_source_spec.sql import StandardSQLDataSourceSpec +from dl_core_testing.testcases.data_source import BaseDataSourceTestClass +from dl_sqlalchemy_metrica_api.api_info.metrika import MetrikaApiCounterSource + +from dl_connector_metrica.core.constants import SOURCE_TYPE_METRICA_API +from dl_connector_metrica.core.data_source import MetrikaApiDataSource +from dl_connector_metrica.core.us_connection import MetrikaApiConnection +from dl_connector_metrica_tests.ext.core.base import BaseMetricaTestClass + + +class TestMetricaDataSource( + BaseMetricaTestClass, + BaseDataSourceTestClass[ + MetrikaApiConnection, + StandardSQLDataSourceSpec, + MetrikaApiDataSource, + ], +): + DSRC_CLS = MetrikaApiDataSource + + @pytest.fixture(scope="class") + def initial_data_source_spec(self) -> StandardSQLDataSourceSpec: + return StandardSQLDataSourceSpec( + source_type=SOURCE_TYPE_METRICA_API, + db_name=MetrikaApiCounterSource.hits.name, + ) + + def test_expression_value_range(self, data_source: MetrikaApiDataSource, saved_connection: MetrikaApiConnection): + conn = saved_connection + dsrc = data_source + + # datetime + min_value, max_value = dsrc.get_expression_value_range(col_name="ym:pv:dateTime") + now = datetime.datetime.utcnow() + assert min_value.date() == conn.data.counter_creation_date + assert max_value - now < datetime.timedelta(seconds=1) + + # date + min_value, max_value = dsrc.get_expression_value_range(col_name="ym:pv:startOfQuarter") + now = datetime.datetime.utcnow() + assert min_value == conn.data.counter_creation_date + assert max_value == now.date() diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_dataset.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_dataset.py new file mode 100644 index 000000000..e4bb24b4b --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/core/test_dataset.py @@ -0,0 +1,307 @@ +from typing import Optional + +import pytest +import sqlalchemy as sa + +from dl_constants.enums import ( + DataSourceRole, + UserDataType, +) +from dl_core.dataset_capabilities import DatasetCapabilities +from dl_core.query.bi_query import BIQuery +from dl_core.query.expression import ( + ExpressionCtx, + OrderByExpressionCtx, +) +from dl_core.services_registry.top_level import ServicesRegistry +from dl_core.us_dataset import Dataset +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.connection import make_saved_connection +from dl_core_testing.connector import ( + CONNECTION_TYPE_TESTING, + SOURCE_TYPE_TESTING, +) +from dl_core_testing.dataset_wrappers import DatasetTestWrapper +from dl_core_testing.testcases.dataset import DefaultDatasetTestSuite +from dl_sqlalchemy_metrica_api.api_info.appmetrica import AppMetricaFieldsNamespaces +from dl_sqlalchemy_metrica_api.api_info.metrika import MetrikaApiCounterSource +from dl_testing.regulated_test import RegulatedTestParams + +from dl_connector_metrica.core.constants import ( + SOURCE_TYPE_APPMETRICA_API, + SOURCE_TYPE_METRICA_API, +) +from dl_connector_metrica.core.us_connection import ( + AppMetricaApiConnection, + MetrikaApiConnection, +) +from dl_connector_metrica_tests.ext.core.base import ( + BaseAppMetricaTestClass, + BaseMetricaTestClass, +) + + +class TestMetricaDataset(BaseMetricaTestClass, DefaultDatasetTestSuite[MetrikaApiConnection]): + source_type = SOURCE_TYPE_METRICA_API + + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultDatasetTestSuite.test_simple_select_from_subquery: "Not allowed for Metrica", + }, + mark_tests_failed={ + DefaultDatasetTestSuite.test_get_param_hash: "", # TODO: FIXME + }, + ) + + @pytest.fixture(scope="function") + def dsrc_params(self) -> dict: + return dict( + db_name=MetrikaApiCounterSource.hits.name, + ) + + def _check_simple_select( + self, + dataset_wrapper: DatasetTestWrapper, + saved_dataset: Dataset, + async_service_registry: ServicesRegistry, + sync_us_manager: SyncUSManager, + result_cnt: int, + limit: Optional[int] = None, + from_subquery: bool = False, + subquery_limit: Optional[int] = None, + ) -> None: + assert limit is not None or (from_subquery and subquery_limit is not None) + avatar_id = dataset_wrapper.get_root_avatar_strict().id + bi_query = BIQuery( + select_expressions=[ + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:pv:startOfHour", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col1", + user_type=UserDataType.datetime, + ), # is a dimension + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:pv:pageviewsPerMinute", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col2", + user_type=UserDataType.integer, + ), # is a measure + ], + group_by_expressions=[ + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:pv:startOfHour", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col1", + user_type=UserDataType.datetime, + ), + ], + limit=limit, + ) + + data = self.fetch_data( + saved_dataset=saved_dataset, + service_registry=async_service_registry, + sync_us_manager=sync_us_manager, + bi_query=bi_query, + from_subquery=from_subquery, + subquery_limit=subquery_limit, + ) + assert len(list(data.data)) == result_cnt + + def test_select_data_distinct( + self, + dataset_wrapper: DatasetTestWrapper, + saved_dataset: Dataset, + conn_async_service_registry: ServicesRegistry, + sync_us_manager: SyncUSManager, + ) -> None: + avatar_id = dataset_wrapper.get_root_avatar_strict().id + bi_query = BIQuery( + select_expressions=[ + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:pv:startOfHour", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col1", + user_type=UserDataType.datetime, + ), # is a dimension + ], + order_by_expressions=[ + OrderByExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:pv:startOfHour", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col1", + user_type=UserDataType.datetime, + ), + ], + distinct=True, + ) + + data = self.fetch_data( + saved_dataset=saved_dataset, + service_registry=conn_async_service_registry, + sync_us_manager=sync_us_manager, + bi_query=bi_query, + ) + values = [row[0] for row in data.data] + assert values == sorted(set(values)) + + def test_select_with_quotes( + self, + dataset_wrapper: DatasetTestWrapper, + saved_dataset: Dataset, + conn_async_service_registry: ServicesRegistry, + sync_us_manager: SyncUSManager, + ) -> None: + avatar_id = dataset_wrapper.get_root_avatar_strict().id + bi_query = BIQuery( + select_expressions=[ + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:pv:startOfHour", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col1", + user_type=UserDataType.datetime, + ), # is a dimension + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:pv:pageviewsPerMinute", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col2", + user_type=UserDataType.integer, + ), # is a measure + ], + group_by_expressions=[ + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:pv:startOfHour", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col1", + user_type=UserDataType.datetime, + ), + ], + dimension_filters=[ + ExpressionCtx( + expression=sa.literal_column("ym:pv:openstatCampaign").in_( + ("Nizhny Novgorod Oblast'", "'m'a'n'y'q'u'o't'e's'") + ), + avatar_ids=[avatar_id], + user_type=UserDataType.boolean, + ), + ], + ) + + data = self.fetch_data( + saved_dataset=saved_dataset, + service_registry=conn_async_service_registry, + sync_us_manager=sync_us_manager, + bi_query=bi_query, + ) + assert not list(data.data) # not expecting any data, just checking that the request was successful + + def test_source_cannot_be_added( + self, + dataset_wrapper: DatasetTestWrapper, + saved_dataset: Dataset, + saved_connection: MetrikaApiConnection, + sync_us_manager: SyncUSManager, + ) -> None: + testing_conn = make_saved_connection(sync_us_manager, conn_type=CONNECTION_TYPE_TESTING) + try: + capabilities = DatasetCapabilities( + dataset=saved_dataset, dsrc_coll_factory=dataset_wrapper.dsrc_coll_factory + ) + assert not capabilities.source_can_be_added( + connection_id=saved_connection.uuid, created_from=SOURCE_TYPE_METRICA_API + ) + assert not capabilities.source_can_be_added( + connection_id=testing_conn.uuid, created_from=SOURCE_TYPE_TESTING + ) + finally: + sync_us_manager.delete(testing_conn) + + +class TestAppMetricaDataset(BaseAppMetricaTestClass, DefaultDatasetTestSuite[AppMetricaApiConnection]): + source_type = SOURCE_TYPE_APPMETRICA_API + + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultDatasetTestSuite.test_simple_select_from_subquery: "Not allowed for AppMetrica", + }, + mark_tests_failed={ + DefaultDatasetTestSuite.test_get_param_hash: "", # TODO: FIXME + }, + ) + + @pytest.fixture(scope="function") + def dsrc_params(self) -> dict: + return dict( + db_name=AppMetricaFieldsNamespaces.installs.name, + ) + + def _check_simple_select( + self, + dataset_wrapper: DatasetTestWrapper, + saved_dataset: Dataset, + async_service_registry: ServicesRegistry, + sync_us_manager: SyncUSManager, + result_cnt: int, + limit: Optional[int] = None, + from_subquery: bool = False, + subquery_limit: Optional[int] = None, + ) -> None: + assert limit is not None or (from_subquery and subquery_limit is not None) + avatar_id = dataset_wrapper.get_root_avatar_strict().id + bi_query = BIQuery( + select_expressions=[ + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:ts:regionCityName", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col1", + user_type=UserDataType.string, + ), # is a dimension + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:ts:advInstallDevices", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col2", + user_type=UserDataType.integer, + ), # is a measure + ], + group_by_expressions=[ + ExpressionCtx( + expression=sa.literal_column( + dataset_wrapper.quote("ym:ts:regionCityName", role=DataSourceRole.origin) + ), + avatar_ids=[avatar_id], + alias="col1", + user_type=UserDataType.string, + ), + ], + limit=limit, + ) + + data = self.fetch_data( + saved_dataset=saved_dataset, + service_registry=async_service_registry, + sync_us_manager=sync_us_manager, + bi_query=bi_query, + from_subquery=from_subquery, + subquery_limit=subquery_limit, + ) + assert len(list(data.data)) == result_cnt diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/params.yml b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/params.yml new file mode 100644 index 000000000..02536435c --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/ext/params.yml @@ -0,0 +1,2 @@ +params: + METRIKA_OAUTH: {getter: $osenv, key: METRIKA_OAUTH} diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/unit/__init__.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/unit/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/unit/conftest.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/unit/conftest.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_connector_metrica/dl_connector_metrica_tests/unit/test_connection_form.py b/lib/dl_connector_metrica/dl_connector_metrica_tests/unit/test_connection_form.py new file mode 100644 index 000000000..f3a6486b4 --- /dev/null +++ b/lib/dl_connector_metrica/dl_connector_metrica_tests/unit/test_connection_form.py @@ -0,0 +1,62 @@ +import pytest + +from dl_api_connector.i18n.localizer import CONFIGS as BI_API_CONNECTOR_CONFIGS +from dl_api_lib_testing.connection_form_base import ConnectionFormTestBase + +from dl_connector_metrica.api.connection_form.form_config import ( + AppMetricaAPIConnectionFormFactory, + MetricaAPIConnectionFormFactory, +) +from dl_connector_metrica.api.i18n.localizer import CONFIGS as BI_CONNECTOR_METRICA_CONFIGS +from dl_connector_metrica.core.settings import ( + AppmetricaConnectorSettings, + MetricaConnectorSettings, +) + + +class MetricaLikeConnectionFormTestBase(ConnectionFormTestBase): + @pytest.fixture( + params=(True, False), + ids=("auto_dash_True", "auto_dash_False"), + ) + def allow_auto_dash_creation(self, request) -> bool: + return request.param + + @pytest.fixture( + params=(True, False), + ids=("manual_counter_True", "manual_counter_False"), + ) + def allow_counter_manual_input(self, request) -> bool: + return request.param + + +class TestMetricaAPIConnectionForm(MetricaLikeConnectionFormTestBase): + CONN_FORM_FACTORY_CLS = MetricaAPIConnectionFormFactory + TRANSLATION_CONFIGS = BI_API_CONNECTOR_CONFIGS + BI_CONNECTOR_METRICA_CONFIGS + + @pytest.fixture + def connectors_settings( # noqa + self, + allow_auto_dash_creation, + allow_counter_manual_input, + ) -> MetricaConnectorSettings: + return MetricaConnectorSettings( + COUNTER_ALLOW_MANUAL_INPUT=allow_counter_manual_input, + ALLOW_AUTO_DASH_CREATION=allow_auto_dash_creation, + ) + + +class TestAppMetricaAPIConnectionForm(MetricaLikeConnectionFormTestBase): + CONN_FORM_FACTORY_CLS = AppMetricaAPIConnectionFormFactory + TRANSLATION_CONFIGS = BI_API_CONNECTOR_CONFIGS + BI_CONNECTOR_METRICA_CONFIGS + + @pytest.fixture + def connectors_settings( # noqa + self, + allow_auto_dash_creation, + allow_counter_manual_input, + ) -> AppmetricaConnectorSettings: + return AppmetricaConnectorSettings( + COUNTER_ALLOW_MANUAL_INPUT=allow_counter_manual_input, + ALLOW_AUTO_DASH_CREATION=allow_auto_dash_creation, + ) diff --git a/lib/dl_connector_metrica/docker-compose.yml b/lib/dl_connector_metrica/docker-compose.yml new file mode 100644 index 000000000..c2bd6769d --- /dev/null +++ b/lib/dl_connector_metrica/docker-compose.yml @@ -0,0 +1,30 @@ +version: '3.7' + +x-constants: + US_MASTER_TOKEN: &c-us-master-token "AC1ofiek8coB" + +services: + # INFRA + pg-us: + build: + context: ../testenv-common/images + dockerfile: Dockerfile.pg-us + environment: + POSTGRES_DB: us-db-ci_purgeable + POSTGRES_USER: us + POSTGRES_PASSWORD: us + ports: + - "51910:5432" + + us: + build: + context: ../testenv-common/images + dockerfile: Dockerfile.us + depends_on: + - pg-us + environment: + POSTGRES_DSN_LIST: "postgres://us:us@pg-us:5432/us-db-ci_purgeable" + AUTH_POLICY: "required" + MASTER_TOKEN: *c-us-master-token + ports: + - "51911:80" diff --git a/lib/dl_connector_metrica/pyproject.toml b/lib/dl_connector_metrica/pyproject.toml new file mode 100644 index 000000000..a6d528fe1 --- /dev/null +++ b/lib/dl_connector_metrica/pyproject.toml @@ -0,0 +1,78 @@ +[tool.poetry] +name = "datalens-connector-metrica" +version = "0.0.1" +description = "" +authors = ["DataLens Team "] +packages = [{include = "dl_connector_metrica"}] +license = "Apache 2.0" +readme = "README.md" + +[tool.poetry.dependencies] +attrs = ">=22.2.0" +marshmallow = ">=3.19.0" +python = ">=3.10, <3.12" +sqlalchemy = ">=1.4.46, <2.0" +datalens-api-commons = {path = "../dl_api_commons"} +datalens-utils = {path = "../dl_utils"} +datalens-constants = {path = "../dl_constants"} +datalens-formula-ref = {path = "../dl_formula_ref"} +datalens-i18n = {path = "../dl_i18n"} +datalens-formula = {path = "../dl_formula"} +datalens-configs = {path = "../dl_configs"} +datalens-api-connector = {path = "../dl_api_connector"} +datalens-core = {path = "../dl_core"} +datalens-sqlalchemy-metrica-api = {path = "../dl_sqlalchemy_metrica_api"} + +[tool.poetry.plugins] +[tool.poetry.plugins."dl_api_lib.connectors"] +appmetrica_api = "dl_connector_metrica.api.connector:AppMetricaApiApiConnector" +metrica_api = "dl_connector_metrica.api.connector:MetricaApiApiConnector" + +[tool.poetry.plugins."dl_core.connectors"] +appmetrica_api = "dl_connector_metrica.core.connector:AppMetricaApiCoreConnector" +metrica_api = "dl_connector_metrica.core.connector:MetricaApiCoreConnector" + +[tool.poetry.plugins."dl_formula.connectors"] +metrica = "dl_connector_metrica.formula.connector:MetricaFormulaConnector" + +[tool.poetry.plugins."dl_formula_ref.plugins"] +metrica = "dl_connector_metrica.formula_ref.plugin:MetricaFormulaRefPlugin" + +[tool.poetry.group.tests.dependencies] +pytest = ">=7.2.2" +datalens-formula-testing = {path = "../dl_formula_testing"} +datalens-testing = {path = "../dl_testing"} +datalens-core-testing = {path = "../dl_core_testing"} + +[build-system] +build-backend = "poetry.core.masonry.api" +requires = [ + "poetry-core", +] + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = [] + +[datalens.pytest.ext] +root_dir = "dl_connector_metrica_tests/" +target_path = "ext" +labels = ["ext_public"] + +[datalens.pytest.unit] +root_dir = "dl_connector_metrica_tests/" +target_path = "unit" +skip_compose = "true" + +[tool.mypy] +warn_unused_configs = true +disallow_untyped_defs = true +check_untyped_defs = true +strict_optional = true + +[datalens.i18n.domains] +dl_connector_metrica = [ + {path = "dl_connector_metrica/api"}, + {path = "dl_connector_metrica/core"}, +] diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/api/api_schema/connection.py b/lib/dl_connector_postgresql/dl_connector_postgresql/api/api_schema/connection.py index c3e885d9f..867c534f4 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/api/api_schema/connection.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/api/api_schema/connection.py @@ -9,9 +9,10 @@ ) from dl_api_connector.api_schema.connection_sql import ClassicSQLConnectionSchema from dl_api_connector.api_schema.extras import FieldExtra +import dl_core.marshmallow as core_ma_fields + from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode -import dl_core.marshmallow as core_ma_fields class PostgreSQLConnectionSchema( diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/api/connection_form/form_config.py b/lib/dl_connector_postgresql/dl_connector_postgresql/api/connection_form/form_config.py index 8e095f7df..251dfdea4 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/api/connection_form/form_config.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/api/connection_form/form_config.py @@ -25,6 +25,7 @@ from dl_api_connector.form_config.models.rows.base import FormRow from dl_api_connector.form_config.models.shortcuts.rows import RowConstructor from dl_configs.connectors_settings import ConnectorSettingsBase + from dl_connector_postgresql.api.connection_info import PostgreSQLConnectionInfoProvider from dl_connector_postgresql.api.i18n.localizer import Translatable from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/api/connection_info.py b/lib/dl_connector_postgresql/dl_connector_postgresql/api/connection_info.py index 4939cd5d4..d837f87ca 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/api/connection_info.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/api/connection_info.py @@ -1,4 +1,5 @@ from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_postgresql.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/api/connector.py b/lib/dl_connector_postgresql/dl_connector_postgresql/api/connector.py index 2ebf050ec..5c8439f9c 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/api/connector.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/api/connector.py @@ -9,6 +9,10 @@ ApiConnector, ApiSourceDefinition, ) +from dl_api_lib.query.registry import MQMFactorySettingItem +from dl_constants.enums import QueryProcessingMode +from dl_query_processing.multi_query.factory import NoCompengMultiQueryMutatorFactory + from dl_connector_postgresql.api.api_schema.connection import PostgreSQLConnectionSchema from dl_connector_postgresql.api.connection_form.form_config import PostgreSQLConnectionFormFactory from dl_connector_postgresql.api.connection_info import PostgreSQLConnectionInfoProvider @@ -54,3 +58,10 @@ class PostgreSQLApiConnector(ApiConnector): formula_dialect_name = DIALECT_NAME_POSTGRESQL translation_configs = frozenset(CONFIGS) compeng_dialect = PostgreSQLDialect.COMPENG + multi_query_mutation_factories = ApiConnector.multi_query_mutation_factories + ( + MQMFactorySettingItem( + query_proc_mode=QueryProcessingMode.native_wf, + dialects=PostgreSQLDialect.and_above(PostgreSQLDialect.POSTGRESQL_9_4).to_list(), + factory_cls=NoCompengMultiQueryMutatorFactory, + ), + ) diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/api/i18n/localizer.py b/lib/dl_connector_postgresql/dl_connector_postgresql/api/i18n/localizer.py index afa5cf012..9acef8d9e 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/api/i18n/localizer.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/api/i18n/localizer.py @@ -2,10 +2,11 @@ import attr -import dl_connector_postgresql as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_postgresql as package + DOMAIN = f"{package.__name__}" CONFIGS = [ diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/connector.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/connector.py index b854f2ef6..4b326bb22 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/connector.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/connector.py @@ -1,3 +1,12 @@ +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreConnector, +) +from dl_core.connectors.sql_base.connector import ( + SQLSubselectCoreSourceDefinitionBase, + SQLTableCoreSourceDefinitionBase, +) + from dl_connector_postgresql.core.postgresql.constants import ( BACKEND_TYPE_POSTGRES, CONNECTION_TYPE_POSTGRES, @@ -19,14 +28,6 @@ ) from dl_connector_postgresql.core.postgresql_base.sa_types import SQLALCHEMY_POSTGRES_TYPES from dl_connector_postgresql.core.postgresql_base.type_transformer import PostgreSQLTypeTransformer -from dl_core.connectors.base.connector import ( - CoreConnectionDefinition, - CoreConnector, -) -from dl_core.connectors.sql_base.connector import ( - SQLSubselectCoreSourceDefinitionBase, - SQLTableCoreSourceDefinitionBase, -) class PostgreSQLCoreConnectionDefinition(CoreConnectionDefinition): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/constants.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/constants.py index 549f47993..3db40a7b0 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/constants.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/constants.py @@ -1,11 +1,11 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, SourceBackendType, ) BACKEND_TYPE_POSTGRES = SourceBackendType.declare("POSTGRES") CONNECTION_TYPE_POSTGRES = ConnectionType.declare("postgres") -SOURCE_TYPE_PG_TABLE = CreateDSFrom.declare("PG_TABLE") -SOURCE_TYPE_PG_SUBSELECT = CreateDSFrom.declare("PG_SUBSELECT") +SOURCE_TYPE_PG_TABLE = DataSourceType.declare("PG_TABLE") +SOURCE_TYPE_PG_SUBSELECT = DataSourceType.declare("PG_SUBSELECT") diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/data_source.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/data_source.py index 05598d173..9e983f0c6 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/data_source.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/data_source.py @@ -5,14 +5,8 @@ FrozenSet, ) -from dl_connector_postgresql.core.postgresql.constants import ( - CONNECTION_TYPE_POSTGRES, - SOURCE_TYPE_PG_SUBSELECT, - SOURCE_TYPE_PG_TABLE, -) -from dl_connector_postgresql.core.postgresql_base.query_compiler import PostgreSQLQueryCompiler from dl_constants.enums import ( - CreateDSFrom, + DataSourceType, JoinType, ) from dl_core.data_source.sql import ( @@ -21,6 +15,13 @@ SubselectDataSource, ) +from dl_connector_postgresql.core.postgresql.constants import ( + CONNECTION_TYPE_POSTGRES, + SOURCE_TYPE_PG_SUBSELECT, + SOURCE_TYPE_PG_TABLE, +) +from dl_connector_postgresql.core.postgresql_base.query_compiler import PostgreSQLQueryCompiler + class PostgreSQLDataSourceMixin(BaseSQLDataSource): compiler_cls = PostgreSQLQueryCompiler @@ -36,7 +37,7 @@ class PostgreSQLDataSourceMixin(BaseSQLDataSource): conn_type = CONNECTION_TYPE_POSTGRES @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in (SOURCE_TYPE_PG_TABLE, SOURCE_TYPE_PG_SUBSELECT) diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/data_source_migration.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/data_source_migration.py index bda682373..c243107fa 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/data_source_migration.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/data_source_migration.py @@ -1,8 +1,9 @@ +from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator + from dl_connector_postgresql.core.postgresql.constants import ( SOURCE_TYPE_PG_SUBSELECT, SOURCE_TYPE_PG_TABLE, ) -from dl_core.connectors.sql_base.data_source_migration import DefaultSQLDataSourceMigrator class PostgreSQLDataSourceMigrator(DefaultSQLDataSourceMigrator): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/testing/connection.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/testing/connection.py index f443c6d75..00116dfe9 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/testing/connection.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/testing/connection.py @@ -4,11 +4,12 @@ ) import uuid -from dl_connector_postgresql.core.postgresql.constants import CONNECTION_TYPE_POSTGRES -from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL from dl_constants.enums import RawSQLLevel from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_connector_postgresql.core.postgresql.constants import CONNECTION_TYPE_POSTGRES +from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL + def make_postgresql_saved_connection( sync_usm: SyncUSManager, diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/testing/exec_factory.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/testing/exec_factory.py index 38964609b..853063811 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/testing/exec_factory.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/testing/exec_factory.py @@ -1,11 +1,12 @@ from typing import Type -from dl_connector_postgresql.core.postgresql_base.adapters_postgres import PostgresAdapter -from dl_connector_postgresql.core.postgresql_base.target_dto import PostgresConnTargetDTO from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter from dl_core.connection_executors.models.connection_target_dto_base import BaseSQLConnTargetDTO from dl_core_testing.executors import ExecutorFactoryBase +from dl_connector_postgresql.core.postgresql_base.adapters_postgres import PostgresAdapter +from dl_connector_postgresql.core.postgresql_base.target_dto import PostgresConnTargetDTO + class PostgresExecutorFactory(ExecutorFactoryBase): def get_dto_class(self) -> Type[BaseSQLConnTargetDTO]: diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/us_connection.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/us_connection.py index c2e6c4813..b23ed5b83 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/us_connection.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql/us_connection.py @@ -2,14 +2,15 @@ from typing import ClassVar +from dl_core.us_connection_base import DataSourceTemplate +from dl_i18n.localizer_base import Localizer + from dl_connector_postgresql.core.postgresql.constants import ( SOURCE_TYPE_PG_SUBSELECT, SOURCE_TYPE_PG_TABLE, ) from dl_connector_postgresql.core.postgresql.dto import PostgresConnDTO from dl_connector_postgresql.core.postgresql_base.us_connection import ConnectionPostgreSQLBase -from dl_core.us_connection_base import DataSourceTemplate -from dl_i18n.localizer_base import Localizer class ConnectionPostgreSQL(ConnectionPostgreSQLBase): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/adapters_base_postgres.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/adapters_base_postgres.py index b612b84be..30b3f36f5 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/adapters_base_postgres.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/adapters_base_postgres.py @@ -9,11 +9,12 @@ import attr import sqlalchemy.dialects.postgresql as sa_pg +from dl_core.connectors.ssl_common.adapter import BaseSSLCertAdapter +from dl_core.db.native_type import SATypeSpec + from dl_connector_postgresql.core.postgresql.constants import CONNECTION_TYPE_POSTGRES from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode from dl_connector_postgresql.core.postgresql_base.target_dto import PostgresConnTargetDTO -from dl_core.connectors.ssl_common.adapter import BaseSSLCertAdapter -from dl_core.db.native_type import SATypeSpec # One way to obtain this data: diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/adapters_postgres.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/adapters_postgres.py index fc5ef984d..2cb3c0eef 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/adapters_postgres.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/adapters_postgres.py @@ -15,6 +15,9 @@ import attr import sqlalchemy as sa +from dl_core.connection_executors.adapters.adapters_base_sa_classic import BaseClassicAdapter +from dl_core.connection_models.common_models import TableIdent + from dl_connector_postgresql.core.postgresql_base.adapters_base_postgres import ( OID_KNOWLEDGE, PG_LIST_SOURCES_ALL_SCHEMAS_SQL, @@ -22,8 +25,6 @@ ) from dl_connector_postgresql.core.postgresql_base.error_transformer import sync_pg_db_error_transformer from dl_connector_postgresql.core.postgresql_base.target_dto import PostgresConnTargetDTO -from dl_core.connection_executors.adapters.adapters_base_sa_classic import BaseClassicAdapter -from dl_core.connection_models.common_models import TableIdent if TYPE_CHECKING: diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/async_adapters_postgres.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/async_adapters_postgres.py index 356e23a8f..e71e6df05 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/async_adapters_postgres.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/async_adapters_postgres.py @@ -26,14 +26,6 @@ import sqlalchemy as sa from dl_app_tools.profiling_base import generic_profiler_async -from dl_connector_postgresql.core.postgresql_base.adapters_base_postgres import ( - OID_KNOWLEDGE, - PG_LIST_SOURCES_ALL_SCHEMAS_SQL, - BasePostgresAdapter, -) -from dl_connector_postgresql.core.postgresql_base.error_transformer import make_async_pg_error_transformer -from dl_connector_postgresql.core.postgresql_base.target_dto import PostgresConnTargetDTO -from dl_connector_postgresql.core.postgresql_base.utils import compile_pg_query from dl_constants.types import ( TBIChunksGen, TBIDataRow, @@ -68,6 +60,15 @@ from dl_sqlalchemy_postgres import AsyncBIPGDialect from dl_sqlalchemy_postgres.asyncpg import DBAPIMock +from dl_connector_postgresql.core.postgresql_base.adapters_base_postgres import ( + OID_KNOWLEDGE, + PG_LIST_SOURCES_ALL_SCHEMAS_SQL, + BasePostgresAdapter, +) +from dl_connector_postgresql.core.postgresql_base.error_transformer import make_async_pg_error_transformer +from dl_connector_postgresql.core.postgresql_base.target_dto import PostgresConnTargetDTO +from dl_connector_postgresql.core.postgresql_base.utils import compile_pg_query + if TYPE_CHECKING: from dl_core.connection_models.common_models import SchemaIdent diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/connection_executors.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/connection_executors.py index 345999dd0..724ce5ff3 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/connection_executors.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/connection_executors.py @@ -8,13 +8,14 @@ import attr +from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter +from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor + from dl_connector_postgresql.core.postgresql_base.adapters_postgres import PostgresAdapter from dl_connector_postgresql.core.postgresql_base.async_adapters_postgres import AsyncPostgresAdapter from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode from dl_connector_postgresql.core.postgresql_base.dto import PostgresConnDTOBase from dl_connector_postgresql.core.postgresql_base.target_dto import PostgresConnTargetDTO -from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter -from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor _BASE_POSTGRES_ADAPTER_TV = TypeVar("_BASE_POSTGRES_ADAPTER_TV", bound=CommonBaseDirectAdapter) diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/dto.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/dto.py index 74d9ddc17..3c3902fd6 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/dto.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/dto.py @@ -4,9 +4,10 @@ import attr -from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode from dl_core.connection_models.dto_defs import DefaultSQLDTO +from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode + @attr.s(frozen=True) class PostgresConnDTOBase(DefaultSQLDTO): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/error_transformer.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/error_transformer.py index 3b9f26b12..7162523d9 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/error_transformer.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/error_transformer.py @@ -1,10 +1,6 @@ import asyncpg.exceptions as asyncpg_exc import psycopg2.errors -from dl_connector_postgresql.core.postgresql_base.exc import ( - PgDoublePrecisionRoundError, - PostgresSourceDoesNotExistError, -) from dl_core.connectors.base.error_transformer import ( ChainedDbErrorTransformer, DbErrorTransformer, @@ -17,6 +13,11 @@ from dl_core.connectors.base.error_transformer import ErrorTransformerRule as Rule import dl_core.exc as exc +from dl_connector_postgresql.core.postgresql_base.exc import ( + PgDoublePrecisionRoundError, + PostgresSourceDoesNotExistError, +) + sync_pg_db_error_transformer: DbErrorTransformer = ChainedDbErrorTransformer( [ diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/sa_types.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/sa_types.py index c70817237..e6656c2a7 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/sa_types.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/sa_types.py @@ -2,12 +2,6 @@ from sqlalchemy.dialects import postgresql as pg_types -from dl_connector_postgresql.core.postgresql.constants import CONNECTION_TYPE_POSTGRES -from dl_connector_postgresql.core.postgresql_base.type_transformer import ( - PG_TYPES_FLOAT, - PG_TYPES_INT, - PG_TYPES_STRING, -) from dl_core.db.sa_types_base import ( make_native_type, simple_instantiator, @@ -15,6 +9,13 @@ typed_instantiator, ) +from dl_connector_postgresql.core.postgresql.constants import CONNECTION_TYPE_POSTGRES +from dl_connector_postgresql.core.postgresql_base.type_transformer import ( + PG_TYPES_FLOAT, + PG_TYPES_INT, + PG_TYPES_STRING, +) + SQLALCHEMY_POSTGRES_BASE_TYPES = ( *PG_TYPES_INT, diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/storage_schemas/connection.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/storage_schemas/connection.py index f5dca7942..09025cd4a 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/storage_schemas/connection.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/storage_schemas/connection.py @@ -1,8 +1,9 @@ from marshmallow import fields as ma_fields +from dl_core.us_manager.storage_schemas.connection import ConnectionSQLDataStorageSchema + from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode from dl_connector_postgresql.core.postgresql_base.us_connection import ConnectionPostgreSQLBase -from dl_core.us_manager.storage_schemas.connection import ConnectionSQLDataStorageSchema class ConnectionPostgreSQLBaseDataStorageSchema( diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/target_dto.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/target_dto.py index 122bd6df1..2724016ec 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/target_dto.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/target_dto.py @@ -9,9 +9,10 @@ import attr -from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode from dl_core.connection_executors.models.connection_target_dto_base import BaseSQLConnTargetDTO +from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode + if TYPE_CHECKING: from dl_constants.types import TJSONLike diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/type_transformer.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/type_transformer.py index 024ea0abe..56f5f5110 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/type_transformer.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/type_transformer.py @@ -1,8 +1,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import postgresql as pg_types -from dl_connector_postgresql.core.postgresql.constants import CONNECTION_TYPE_POSTGRES -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.db.conversion_base import ( TypeTransformer, UTCDatetimeTypeCaster, @@ -11,6 +10,8 @@ ) from dl_sqlalchemy_postgres.base import CITEXT +from dl_connector_postgresql.core.postgresql.constants import CONNECTION_TYPE_POSTGRES + PG_TYPES_INT = frozenset((pg_types.SMALLINT, pg_types.INTEGER, pg_types.BIGINT)) PG_TYPES_FLOAT = frozenset((pg_types.REAL, pg_types.DOUBLE_PRECISION, pg_types.NUMERIC)) @@ -23,47 +24,47 @@ class PostgreSQLTypeTransformer(TypeTransformer): **TypeTransformer.casters, # type: ignore # TODO: fix # Preliminary asyncpg-related hack: before inserting, make all datetimes UTC-naive. # A correct fix would require different BI-types for naive/aware datetimes. - BIType.datetime: UTCDatetimeTypeCaster(), - BIType.genericdatetime: UTCTimezoneDatetimeTypeCaster(), + UserDataType.datetime: UTCDatetimeTypeCaster(), + UserDataType.genericdatetime: UTCTimezoneDatetimeTypeCaster(), } native_to_user_map = { - **{make_native_type(CONNECTION_TYPE_POSTGRES, t): BIType.integer for t in PG_TYPES_INT}, # type: ignore # TODO: fix - **{make_native_type(CONNECTION_TYPE_POSTGRES, t): BIType.float for t in PG_TYPES_FLOAT}, - make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.BOOLEAN): BIType.boolean, - **{make_native_type(CONNECTION_TYPE_POSTGRES, t): BIType.string for t in PG_TYPES_STRING}, - make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.DATE): BIType.date, - make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TIMESTAMP): BIType.genericdatetime, - make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.UUID): BIType.uuid, - make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ENUM): BIType.string, + **{make_native_type(CONNECTION_TYPE_POSTGRES, t): UserDataType.integer for t in PG_TYPES_INT}, # type: ignore # TODO: fix + **{make_native_type(CONNECTION_TYPE_POSTGRES, t): UserDataType.float for t in PG_TYPES_FLOAT}, + make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.BOOLEAN): UserDataType.boolean, + **{make_native_type(CONNECTION_TYPE_POSTGRES, t): UserDataType.string for t in PG_TYPES_STRING}, + make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.DATE): UserDataType.date, + make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TIMESTAMP): UserDataType.genericdatetime, + make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.UUID): UserDataType.uuid, + make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ENUM): UserDataType.string, **{ - make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(typecls)): BIType.array_int + make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(typecls)): UserDataType.array_int for typecls in PG_TYPES_INT }, **{ - make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(typecls)): BIType.array_float + make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(typecls)): UserDataType.array_float for typecls in PG_TYPES_FLOAT }, **{ - make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(typecls)): BIType.array_str + make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(typecls)): UserDataType.array_str for typecls in PG_TYPES_STRING }, - make_native_type(CONNECTION_TYPE_POSTGRES, sa.sql.sqltypes.NullType): BIType.unsupported, + make_native_type(CONNECTION_TYPE_POSTGRES, sa.sql.sqltypes.NullType): UserDataType.unsupported, } user_to_native_map = { - BIType.integer: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.BIGINT), - BIType.float: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.DOUBLE_PRECISION), - BIType.boolean: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.BOOLEAN), - BIType.string: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TEXT), - BIType.date: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.DATE), - BIType.datetime: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TIMESTAMP), - BIType.genericdatetime: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TIMESTAMP), - BIType.geopoint: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TEXT), - BIType.geopolygon: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TEXT), - BIType.uuid: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.UUID), - BIType.markup: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TEXT), - BIType.array_int: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(pg_types.BIGINT)), - BIType.array_float: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(pg_types.DOUBLE_PRECISION)), - BIType.array_str: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(pg_types.TEXT)), - BIType.tree_str: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(pg_types.TEXT)), - BIType.unsupported: make_native_type(CONNECTION_TYPE_POSTGRES, sa.sql.sqltypes.NullType), + UserDataType.integer: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.BIGINT), + UserDataType.float: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.DOUBLE_PRECISION), + UserDataType.boolean: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.BOOLEAN), + UserDataType.string: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TEXT), + UserDataType.date: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.DATE), + UserDataType.datetime: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TIMESTAMP), + UserDataType.genericdatetime: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TIMESTAMP), + UserDataType.geopoint: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TEXT), + UserDataType.geopolygon: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TEXT), + UserDataType.uuid: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.UUID), + UserDataType.markup: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.TEXT), + UserDataType.array_int: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(pg_types.BIGINT)), + UserDataType.array_float: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(pg_types.DOUBLE_PRECISION)), + UserDataType.array_str: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(pg_types.TEXT)), + UserDataType.tree_str: make_native_type(CONNECTION_TYPE_POSTGRES, pg_types.ARRAY(pg_types.TEXT)), + UserDataType.unsupported: make_native_type(CONNECTION_TYPE_POSTGRES, sa.sql.sqltypes.NullType), } diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/us_connection.py b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/us_connection.py index 7a056d9e5..9ae7b4292 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/us_connection.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/core/postgresql_base/us_connection.py @@ -7,13 +7,14 @@ import attr -from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode from dl_core.connection_executors.sync_base import SyncConnExecutorBase from dl_core.us_connection_base import ( ClassicConnectionSQL, ConnectionBase, ) +from dl_connector_postgresql.core.postgresql_base.constants import PGEnforceCollateMode + class ConnectionPostgreSQLBase(ClassicConnectionSQL): has_schema = True diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/db_testing/connector.py b/lib/dl_connector_postgresql/dl_connector_postgresql/db_testing/connector.py index 1157be5ab..dc371e42d 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/db_testing/connector.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/db_testing/connector.py @@ -1,8 +1,9 @@ +from dl_db_testing.connectors.base.connector import DbTestingConnector + from dl_connector_postgresql.db_testing.engine_wrapper import ( BiPGEngineWrapper, PGEngineWrapper, ) -from dl_db_testing.connectors.base.connector import DbTestingConnector class PostgreSQLDbTestingConnector(DbTestingConnector): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/connector.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/connector.py index 5d55dd6ba..e7c752662 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/connector.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/connector.py @@ -1,10 +1,11 @@ from sqlalchemy.dialects.postgresql.base import PGDialect +from dl_formula.connectors.base.connector import FormulaConnector + from dl_connector_postgresql.formula.constants import PostgreSQLDialect from dl_connector_postgresql.formula.definitions.all import DEFINITIONS from dl_connector_postgresql.formula.literal import GenericPostgreSQLLiteralizer from dl_connector_postgresql.formula.type_constructor import PostgreSQLTypeConstructor -from dl_formula.connectors.base.connector import FormulaConnector class PostgreSQLFormulaConnector(FormulaConnector): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/conditional_blocks.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/conditional_blocks.py index 930eaeaef..5bf9701d8 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/conditional_blocks.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/conditional_blocks.py @@ -1,6 +1,7 @@ -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D import dl_formula.definitions.conditional_blocks as base +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + DEFINITIONS_COND_BLOCKS = [ # _case_block_ diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_aggregation.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_aggregation.py index 590826f4e..aa9362d6d 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_aggregation.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_aggregation.py @@ -1,7 +1,6 @@ import sqlalchemy as sa from sqlalchemy.sql import ClauseElement -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_formula.definitions.base import TranslationVariant from dl_formula.definitions.common import ( quantile_value, @@ -10,6 +9,8 @@ import dl_formula.definitions.functions_aggregation as base from dl_formula.definitions.literals import un_literal +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_array.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_array.py index cbb9c38a6..beb653936 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_array.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_array.py @@ -7,8 +7,6 @@ ) from sqlalchemy.sql.type_api import TypeEngine -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D -from dl_connector_postgresql.formula.definitions.common import PG_INT_64_TO_CHAR_FMT from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_array as base from dl_formula.definitions.literals import ( @@ -17,6 +15,9 @@ ) from dl_formula.shortcuts import n +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D +from dl_connector_postgresql.formula.definitions.common import PG_INT_64_TO_CHAR_FMT + V = TranslationVariant.make @@ -32,6 +33,19 @@ def _array_contains(array: ClauseElement, value: ClauseElement) -> ClauseElement ) +def _array_notcontains(array: ClauseElement, value: ClauseElement) -> ClauseElement: + if isinstance(value, Null): + return array == sa.func.array_remove(array, None) + elif is_literal(value): + return value != sa.func.ALL(sa.func.array_remove(array, None)) + else: + return n.func.IF( + n.func.ISNULL(value.self_group()), + array == sa.func.array_remove(array, None), + value != sa.func.ALL(sa.func.array_remove(array, None)), + ) + + DEFINITIONS_ARRAY = [ # arr_remove base.FuncArrayRemoveLiteralNull( @@ -137,6 +151,12 @@ def _array_contains(array: ClauseElement, value: ClauseElement) -> ClauseElement V(D.POSTGRESQL, _array_contains), ] ), + # notcontains + base.FuncArrayNotContains( + variants=[ + V(D.POSTGRESQL, _array_notcontains), + ] + ), # contains_all base.FuncArrayContainsAll( variants=[ diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_datetime.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_datetime.py index b3cd12e42..25a876b4d 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_datetime.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_datetime.py @@ -5,7 +5,6 @@ import sqlalchemy as sa from sqlalchemy.sql.elements import ClauseElement -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_formula.definitions.base import ( TranslationVariant, TranslationVariantWrapped, @@ -19,6 +18,8 @@ from dl_formula.definitions.literals import un_literal from dl_formula.shortcuts import n +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + if TYPE_CHECKING: from dl_formula.translation.context import TranslationCtx diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_logical.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_logical.py index dd46f8c95..72f0e078b 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_logical.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_logical.py @@ -1,6 +1,5 @@ import sqlalchemy as sa -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_formula.definitions.base import ( TranslationVariant, TranslationVariantWrapped, @@ -8,6 +7,8 @@ import dl_formula.definitions.functions_logical as base from dl_formula.shortcuts import n +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + V = TranslationVariant.make VW = TranslationVariantWrapped.make diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_markup.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_markup.py index 468eb1233..353c6a16f 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_markup.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_markup.py @@ -1,6 +1,7 @@ -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D import dl_formula.definitions.functions_markup as base +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + DEFINITIONS_MARKUP = [ # + diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_math.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_math.py index 8a9ce2d2c..ccb477a7f 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_math.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_math.py @@ -3,7 +3,6 @@ import sqlalchemy as sa from sqlalchemy.sql.elements import ClauseElement -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_formula.core.datatype import DataType from dl_formula.definitions.args import ArgTypeSequence from dl_formula.definitions.base import TranslationVariant @@ -13,6 +12,8 @@ un_literal, ) +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_special.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_special.py index fb468d720..0255a9192 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_special.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_special.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_special as base +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_string.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_string.py index 2d2dacc7d..802a658d9 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_string.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_string.py @@ -1,7 +1,6 @@ import sqlalchemy as sa import sqlalchemy.dialects.postgresql as sa_postgresql -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_formula.definitions.base import ( TranslationVariant, TranslationVariantWrapped, @@ -10,6 +9,8 @@ import dl_formula.definitions.functions_string as base from dl_formula.shortcuts import n +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + V = TranslationVariant.make VW = TranslationVariantWrapped.make @@ -48,6 +49,10 @@ ] ), base.FuncContainsNonString.for_dialect(D.POSTGRESQL), + # notcontains + base.FuncNotContainsConst.for_dialect(D.POSTGRESQL), + base.FuncNotContainsNonConst.for_dialect(D.POSTGRESQL), + base.FuncNotContainsNonString.for_dialect(D.POSTGRESQL), # endswith base.FuncEndswithConst.for_dialect(D.POSTGRESQL), base.FuncEndswithNonConst( diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_type.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_type.py index c43df3df4..c44808190 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_type.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_type.py @@ -1,8 +1,6 @@ import sqlalchemy as sa import sqlalchemy.dialects.postgresql as sa_postgresql -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D -from dl_connector_postgresql.formula.definitions.common import PG_INT_64_TO_CHAR_FMT from dl_formula.core.datatype import DataType from dl_formula.definitions.args import ArgTypeSequence from dl_formula.definitions.base import ( @@ -16,6 +14,9 @@ from dl_formula.definitions.scope import Scope from dl_formula.shortcuts import n +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D +from dl_connector_postgresql.formula.definitions.common import PG_INT_64_TO_CHAR_FMT + V = TranslationVariant.make VW = TranslationVariantWrapped.make diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_window.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_window.py index f0f714fca..135564083 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_window.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/functions_window.py @@ -1,6 +1,7 @@ -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D import dl_formula.definitions.functions_window as base +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + SUPPORTED_DIALECTS = D.COMPENG | D.and_above(D.POSTGRESQL_9_4) diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_binary.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_binary.py index 2e505be0f..9182a28d3 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_binary.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_binary.py @@ -1,11 +1,12 @@ import sqlalchemy as sa import sqlalchemy.dialects.postgresql as sa_postgresql -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_formula.definitions.base import TranslationVariant from dl_formula.definitions.common_datetime import DAY_SEC import dl_formula.definitions.operators_binary as base +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_ternary.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_ternary.py index 049625cf8..0e7637ebf 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_ternary.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_ternary.py @@ -1,6 +1,7 @@ -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D import dl_formula.definitions.operators_ternary as base +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + DEFINITIONS_TERNARY = [ # between diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_unary.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_unary.py index 13e943e4e..aa02b9e8f 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_unary.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/definitions/operators_unary.py @@ -1,6 +1,7 @@ -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D import dl_formula.definitions.operators_unary as base +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + DEFINITIONS_UNARY = [ # isfalse diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/literal.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/literal.py index daff0f39c..2ebef96db 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula/literal.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula/literal.py @@ -9,7 +9,6 @@ import sqlalchemy.dialects.postgresql as sa_postgresql from sqlalchemy.sql.sqltypes import Integer -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_formula.connectors.base.literal import ( Literal, Literalizer, @@ -17,6 +16,8 @@ ) from dl_formula.core.dialect import DialectCombo +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + PG_SMALLINT_MIN = -32768 PG_SMALLINT_MAX = 32767 diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/human_dialects.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/human_dialects.py index a4329374d..aa37a859b 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/human_dialects.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/human_dialects.py @@ -1,6 +1,7 @@ +from dl_formula_ref.texts import StyledDialect + from dl_connector_postgresql.formula.constants import PostgreSQLDialect from dl_connector_postgresql.formula_ref.i18n import Translatable -from dl_formula_ref.texts import StyledDialect HUMAN_DIALECTS = { diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/i18n.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/i18n.py index c5d6e9772..7df52ccad 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/i18n.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/i18n.py @@ -2,10 +2,11 @@ import attr -import dl_connector_postgresql as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_postgresql as package + DOMAIN = f"dl_formula_ref_{package.__name__}" diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/plugin.py b/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/plugin.py index 41f242664..0a9c91256 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/plugin.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql/formula_ref/plugin.py @@ -1,3 +1,6 @@ +from dl_formula_ref.functions.type_conversion import DbCastExtension +from dl_formula_ref.plugins.base.plugin import FormulaRefPlugin + from dl_connector_postgresql.formula.constants import PostgreSQLDialect from dl_connector_postgresql.formula.definitions.functions_type import FuncDbCastPostgreSQLBase from dl_connector_postgresql.formula_ref.human_dialects import HUMAN_DIALECTS @@ -5,8 +8,6 @@ CONFIGS, Translatable, ) -from dl_formula_ref.functions.type_conversion import DbCastExtension -from dl_formula_ref.plugins.base.plugin import FormulaRefPlugin class PostgresSQLFormulaRefPlugin(FormulaRefPlugin): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/base.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/base.py index 2f56f80ef..77c5de051 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/base.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/base.py @@ -4,6 +4,8 @@ from dl_api_lib_testing.connection_base import ConnectionTestBase from dl_api_lib_testing.data_api_base import StandardizedDataApiTestBase from dl_api_lib_testing.dataset_base import DatasetTestBase +from dl_constants.enums import RawSQLLevel + from dl_connector_postgresql.core.postgresql.constants import ( CONNECTION_TYPE_POSTGRES, SOURCE_TYPE_PG_TABLE, @@ -13,7 +15,6 @@ CoreConnectionSettings, ) from dl_connector_postgresql_tests.db.core.base import BasePostgreSQLTestClass -from dl_constants.enums import RawSQLLevel class PostgreSQLConnectionTestBase(BasePostgreSQLTestClass, ConnectionTestBase): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_complex_queries.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_complex_queries.py new file mode 100644 index 000000000..3f52e7ea4 --- /dev/null +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_complex_queries.py @@ -0,0 +1,8 @@ +from dl_api_lib_testing.connector.complex_queries import DefaultBasicComplexQueryTestSuite +from dl_constants.enums import QueryProcessingMode + +from dl_connector_postgresql_tests.db.api.base import PostgreSQLDataApiTestBase + + +class TestPostgreSQLBasicComplexQueries(PostgreSQLDataApiTestBase, DefaultBasicComplexQueryTestSuite): + query_processing_mode = QueryProcessingMode.native_wf diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_connection.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_connection.py index 18044d8ee..45bbd7247 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_connection.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_connection.py @@ -1,5 +1,6 @@ from dl_api_client.dsmaker.api.http_sync_base import SyncHttpClientBase from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite + from dl_connector_postgresql_tests.db.api.base import PostgreSQLConnectionTestBase diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_dashsql.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_dashsql.py index 9b2c5b7c0..40350fcc7 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_dashsql.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_dashsql.py @@ -2,13 +2,14 @@ import pytest from dl_api_lib_testing.connector.dashsql_suite import DefaultDashSQLTestSuite +from dl_testing.test_data.sql_queries import DASHSQL_EXAMPLE_PARAMS + from dl_connector_postgresql_tests.db.api.base import PostgreSQLDashSQLConnectionTest from dl_connector_postgresql_tests.db.config import ( DASHSQL_QUERY, QUERY_WITH_PARAMS, SUBSELECT_QUERY_FULL, ) -from dl_testing.test_data.sql_queries import DASHSQL_EXAMPLE_PARAMS class TestPostgresDashSQL(PostgreSQLDashSQLConnectionTest, DefaultDashSQLTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_data.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_data.py index 653ccf6e9..aba9b2bec 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_data.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_data.py @@ -13,9 +13,8 @@ DefaultConnectorDataResultTestSuite, ) from dl_api_lib_testing.data_api_base import DataApiTestParams -from dl_connector_postgresql_tests.db.api.base import PostgreSQLDataApiTestBase from dl_constants.enums import ( - BIType, + UserDataType, WhereClauseOperation, ) from dl_core_testing.database import ( @@ -29,14 +28,10 @@ from dl_sqlalchemy_postgres.base import CITEXT from dl_testing.regulated_test import RegulatedTestParams +from dl_connector_postgresql_tests.db.api.base import PostgreSQLDataApiTestBase + class TestPostgreSQLDataResult(PostgreSQLDataApiTestBase, DefaultConnectorDataResultTestSuite): - test_params = RegulatedTestParams( - mark_tests_failed={ - DefaultConnectorDataResultTestSuite.test_array_not_contains_filter: "BI-4951", # TODO: FIXME - } - ) - def test_isnull( self, saved_dataset: Dataset, data_api_test_params: DataApiTestParams, data_api: SyncHttpDataApiV2 ) -> None: @@ -155,7 +150,7 @@ def test_bigint_literal( data_api: SyncHttpDataApiV2, ) -> None: columns = [ - C(name="bigint_value", user_type=BIType.integer, vg=lambda rn, **kwargs: 10000002877 + rn), + C(name="bigint_value", user_type=UserDataType.integer, vg=lambda rn, **kwargs: 10000002877 + rn), ] db_table = make_table(db, columns=columns) params = self.get_dataset_params(dataset_params, db_table) diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_dataset.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_dataset.py index af9387ec6..f901d1fbf 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_dataset.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/api/test_dataset.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.connector.dataset_suite import DefaultConnectorDatasetTestSuite + from dl_connector_postgresql_tests.db.api.base import PostgreSQLDatasetTestBase diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/config.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/config.py index 0f6182337..5688057e4 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/config.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/config.py @@ -2,10 +2,11 @@ from typing import ClassVar from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration -from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_core_testing.configuration import DefaultCoreTestConfiguration from dl_testing.containers import get_test_container_hostport +from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D + # Infra settings CORE_TEST_CONFIG = DefaultCoreTestConfiguration( diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/conftest.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/conftest.py index 8434b8c53..11ba5d3df 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/conftest.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/conftest.py @@ -1,7 +1,8 @@ from dl_api_lib_testing.initialization import initialize_api_lib_test -from dl_connector_postgresql_tests.db.config import API_TEST_CONFIG from dl_formula_testing.forced_literal import forced_literal_use +from dl_connector_postgresql_tests.db.config import API_TEST_CONFIG + pytest_plugins = ("aiohttp.pytest_plugin",) # and it, in turn, includes 'pytest_asyncio.plugin' diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/base.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/base.py index 8e1da979a..35e33a92f 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/base.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/base.py @@ -6,13 +6,14 @@ import shortuuid import dl_configs.utils as bi_configs_utils +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.database import Db +from dl_core_testing.testcases.connection import BaseConnectionTestClass + from dl_connector_postgresql.core.postgresql.constants import CONNECTION_TYPE_POSTGRES from dl_connector_postgresql.core.postgresql.testing.connection import make_postgresql_saved_connection from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL import dl_connector_postgresql_tests.db.config as test_config -from dl_core.us_manager.us_manager_sync import SyncUSManager -from dl_core_testing.database import Db -from dl_core_testing.testcases.connection import BaseConnectionTestClass class BasePostgreSQLTestClass(BaseConnectionTestClass[ConnectionPostgreSQL]): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_connection.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_connection.py index 636c717c3..0636bc74d 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_connection.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_connection.py @@ -5,17 +5,18 @@ import pytest -from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL -from dl_connector_postgresql_tests.db.core.base import ( - BasePostgreSQLTestClass, - BaseSslPostgreSQLTestClass, -) from dl_core.us_connection_base import ( ConnectionSQL, DataSourceTemplate, ) from dl_core_testing.testcases.connection import DefaultConnectionTestClass +from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL +from dl_connector_postgresql_tests.db.core.base import ( + BasePostgreSQLTestClass, + BaseSslPostgreSQLTestClass, +) + _CONN_TV = TypeVar("_CONN_TV", bound=ConnectionSQL) diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_connection_executor.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_connection_executor.py index c0bbc053e..cdd3886c1 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_connection_executor.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_connection_executor.py @@ -10,13 +10,7 @@ import sqlalchemy as sa from sqlalchemy.types import TypeEngine -from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL -from dl_connector_postgresql_tests.db.config import CoreConnectionSettings -from dl_connector_postgresql_tests.db.core.base import ( - BasePostgreSQLTestClass, - BaseSslPostgreSQLTestClass, -) -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.connection_executors import AsyncConnExecutorBase from dl_core.connection_executors.sync_base import SyncConnExecutorBase from dl_core.connection_models.common_models import ( @@ -32,6 +26,13 @@ from dl_sqlalchemy_postgres.base import CITEXT from dl_testing.regulated_test import RegulatedTestParams +from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL +from dl_connector_postgresql_tests.db.config import CoreConnectionSettings +from dl_connector_postgresql_tests.db.core.base import ( + BasePostgreSQLTestClass, + BaseSslPostgreSQLTestClass, +) + class PostgreSQLSyncAsyncConnectionExecutorCheckBase( BasePostgreSQLTestClass, @@ -51,15 +52,15 @@ def check_db_version(self, db_version: Optional[str]) -> None: assert db_version is not None assert "." in db_version - def get_schemas_for_type_recognition(self) -> dict[str, Sequence[tuple[TypeEngine, BIType]]]: + def get_schemas_for_type_recognition(self) -> dict[str, Sequence[tuple[TypeEngine, UserDataType]]]: return { "types_postgres": [ - (sa.Integer(), BIType.integer), - (sa.Float(), BIType.float), - (sa.String(length=256), BIType.string), - (sa.Date(), BIType.date), - (sa.DateTime(), BIType.genericdatetime), - (CITEXT(), BIType.string), + (sa.Integer(), UserDataType.integer), + (sa.Float(), UserDataType.float), + (sa.String(length=256), UserDataType.string), + (sa.Date(), UserDataType.date), + (sa.DateTime(), UserDataType.genericdatetime), + (CITEXT(), UserDataType.string), ], } diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_data_source.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_data_source.py index 74edfce07..6546004a3 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_data_source.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_data_source.py @@ -1,19 +1,8 @@ import pytest -from dl_connector_postgresql.core.postgresql.constants import ( - SOURCE_TYPE_PG_SUBSELECT, - SOURCE_TYPE_PG_TABLE, -) -from dl_connector_postgresql.core.postgresql.data_source import ( - PostgreSQLDataSource, - PostgreSQLSubselectDataSource, -) -from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL -from dl_connector_postgresql_tests.db.config import SUBSELECT_QUERY_FULL -from dl_connector_postgresql_tests.db.core.base import BasePostgreSQLTestClass from dl_constants.enums import ( - BIType, RawSQLLevel, + UserDataType, ) from dl_core.data_source_spec.sql import ( StandardSchemaSQLDataSourceSpec, @@ -25,6 +14,18 @@ DefaultDataSourceTestClass, ) +from dl_connector_postgresql.core.postgresql.constants import ( + SOURCE_TYPE_PG_SUBSELECT, + SOURCE_TYPE_PG_TABLE, +) +from dl_connector_postgresql.core.postgresql.data_source import ( + PostgreSQLDataSource, + PostgreSQLSubselectDataSource, +) +from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL +from dl_connector_postgresql_tests.db.config import SUBSELECT_QUERY_FULL +from dl_connector_postgresql_tests.db.core.base import BasePostgreSQLTestClass + class TestPostgreSQLTableDataSource( BasePostgreSQLTestClass, @@ -46,7 +47,7 @@ def initial_data_source_spec(self, sample_table) -> StandardSchemaSQLDataSourceS ) return dsrc_spec - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: return list(TABLE_SPEC_SAMPLE_SUPERSTORE.table_schema) @@ -70,7 +71,7 @@ def initial_data_source_spec(self, sample_table) -> SubselectDataSourceSpec: ) return dsrc_spec - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: return list(TABLE_SPEC_SAMPLE_SUPERSTORE.table_schema) diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_dataset.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_dataset.py index 9c067aa88..410e659cb 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_dataset.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/core/test_dataset.py @@ -1,8 +1,9 @@ +from dl_core_testing.testcases.dataset import DefaultDatasetTestSuite +from dl_testing.regulated_test import RegulatedTestParams + from dl_connector_postgresql.core.postgresql.constants import SOURCE_TYPE_PG_TABLE from dl_connector_postgresql.core.postgresql.us_connection import ConnectionPostgreSQL from dl_connector_postgresql_tests.db.core.base import BasePostgreSQLTestClass -from dl_core_testing.testcases.dataset import DefaultDatasetTestSuite -from dl_testing.regulated_test import RegulatedTestParams class TestPostgreSQLDataset(BasePostgreSQLTestClass, DefaultDatasetTestSuite[ConnectionPostgreSQL]): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/base.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/base.py index 96004fd2b..76f24d4bd 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/base.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/base.py @@ -1,8 +1,9 @@ import pytest +from dl_formula_testing.testcases.base import FormulaConnectorTestBase + from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D from dl_connector_postgresql_tests.db.config import DB_URLS -from dl_formula_testing.testcases.base import FormulaConnectorTestBase class PostgreSQLTestBase(FormulaConnectorTestBase): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_conditional_blocks.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_conditional_blocks.py index 36805fb7a..a3ac1470a 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_conditional_blocks.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_conditional_blocks.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.conditional_blocks import DefaultConditionalBlockFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.conditional_blocks import DefaultConditionalBlockFormulaConnectorTestSuite class TestConditionalBlockPostgreSQL_9_3(PostgreSQL_9_3TestBase, DefaultConditionalBlockFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_aggregation.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_aggregation.py index fd6ee3bf6..efa24fca3 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_aggregation.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_aggregation.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.functions_aggregation import DefaultMainAggFunctionFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.functions_aggregation import DefaultMainAggFunctionFormulaConnectorTestSuite class MainAggFunctionPostgreTestSuite(DefaultMainAggFunctionFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_array.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_array.py index 61be0f3b2..0163150ad 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_array.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_array.py @@ -1,11 +1,12 @@ import sqlalchemy as sa +from dl_formula_testing.evaluator import DbEvaluator +from dl_formula_testing.testcases.functions_array import DefaultArrayFunctionFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.evaluator import DbEvaluator -from dl_formula_testing.testcases.functions_array import DefaultArrayFunctionFormulaConnectorTestSuite class ArrayFunctionPostgreTestSuite(DefaultArrayFunctionFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_datetime.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_datetime.py index 414f997e5..4fe3c5e93 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_datetime.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_datetime.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.functions_datetime import DefaultDateTimeFunctionFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.functions_datetime import DefaultDateTimeFunctionFormulaConnectorTestSuite class DateTimeFunctionPostgreSQLTestSuite(DefaultDateTimeFunctionFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_logical.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_logical.py index a25f3d8ed..d2bb8392c 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_logical.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_logical.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.functions_logical import DefaultLogicalFunctionFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.functions_logical import DefaultLogicalFunctionFormulaConnectorTestSuite class LogicalFunctionPostgreTestSuite(DefaultLogicalFunctionFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_markup.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_markup.py index 04078d55f..910c183ae 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_markup.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_markup.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.functions_markup import DefaultMarkupFunctionFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.functions_markup import DefaultMarkupFunctionFormulaConnectorTestSuite class TestMarkupFunctionPostgreSQL_9_3(PostgreSQL_9_3TestBase, DefaultMarkupFunctionFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_math.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_math.py index db2cad1e0..2617cf56a 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_math.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_math.py @@ -1,11 +1,12 @@ import pytest +from dl_formula_testing.evaluator import DbEvaluator +from dl_formula_testing.testcases.functions_math import DefaultMathFunctionFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.evaluator import DbEvaluator -from dl_formula_testing.testcases.functions_math import DefaultMathFunctionFormulaConnectorTestSuite class PostgreSQLMatchTestMixin: diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_string.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_string.py index 7e4c9e55d..035b8e0f7 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_string.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_string.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.functions_string import DefaultStringFunctionFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.functions_string import DefaultStringFunctionFormulaConnectorTestSuite class TestStringFunctionPostgreSQL_9_3(PostgreSQL_9_3TestBase, DefaultStringFunctionFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_type_conversion.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_type_conversion.py index 72f893a9e..6643c634a 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_type_conversion.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_type_conversion.py @@ -6,10 +6,6 @@ import sqlalchemy as sa import sqlalchemy.exc as sa_exc -from dl_connector_postgresql_tests.db.formula.base import ( - PostgreSQL_9_3TestBase, - PostgreSQL_9_4TestBase, -) import dl_formula.core.exc as exc from dl_formula_testing.evaluator import DbEvaluator from dl_formula_testing.testcases.functions_type_conversion import ( @@ -30,6 +26,11 @@ to_str, ) +from dl_connector_postgresql_tests.db.formula.base import ( + PostgreSQL_9_3TestBase, + PostgreSQL_9_4TestBase, +) + # STR diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_window.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_window.py index 3702e0e7d..8aadc06aa 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_window.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_functions_window.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.functions_window import DefaultWindowFunctionFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( CompengTestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.functions_window import DefaultWindowFunctionFormulaConnectorTestSuite class TestWindowFunctionPostgreSQL_9_4(PostgreSQL_9_4TestBase, DefaultWindowFunctionFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_literals.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_literals.py index 5be2fa094..3003098ce 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_literals.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_literals.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.literals import DefaultLiteralFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.literals import DefaultLiteralFormulaConnectorTestSuite class ConditionalBlockPostgreSQLTestSuite(DefaultLiteralFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_misc_funcs.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_misc_funcs.py index 32af30b68..6aac2ac43 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_misc_funcs.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_misc_funcs.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.misc_funcs import DefaultMiscFunctionalityConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.misc_funcs import DefaultMiscFunctionalityConnectorTestSuite class TestMiscFunctionalityPostgreSQL_9_3(PostgreSQL_9_3TestBase, DefaultMiscFunctionalityConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_operators.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_operators.py index 40b17e926..fa3a755e2 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_operators.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/db/formula/test_operators.py @@ -1,8 +1,9 @@ +from dl_formula_testing.testcases.operators import DefaultOperatorFormulaConnectorTestSuite + from dl_connector_postgresql_tests.db.formula.base import ( PostgreSQL_9_3TestBase, PostgreSQL_9_4TestBase, ) -from dl_formula_testing.testcases.operators import DefaultOperatorFormulaConnectorTestSuite class OperatorPostgreSQLTestSuite(DefaultOperatorFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/formula/test_dialect.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/formula/test_dialect.py index 4e3ad0fbb..1157805bb 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/formula/test_dialect.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/formula/test_dialect.py @@ -1,6 +1,7 @@ +from dl_formula_testing.testcases.dialect import DefaultDialectFormulaConnectorTestSuite + from dl_connector_postgresql.formula.constants import DIALECT_NAME_POSTGRESQL from dl_connector_postgresql.formula.constants import PostgreSQLDialect as D -from dl_formula_testing.testcases.dialect import DefaultDialectFormulaConnectorTestSuite class DialectPostgreSQLTestSuite(DefaultDialectFormulaConnectorTestSuite): diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_connection_form.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_connection_form.py index 525fa2cb4..6bea68897 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_connection_form.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_connection_form.py @@ -1,5 +1,6 @@ from dl_api_connector.i18n.localizer import CONFIGS as BI_API_CONNECTOR_CONFIGS from dl_api_lib_testing.connection_form_base import ConnectionFormTestBase + from dl_connector_postgresql.api.connection_form.form_config import PostgreSQLConnectionFormFactory from dl_connector_postgresql.api.i18n.localizer import CONFIGS as BI_CONNECTOR_POSTGRESQL_CONFIGS diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_error_transformer.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_error_transformer.py index 20add6e2b..6d75eeaa1 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_error_transformer.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_error_transformer.py @@ -3,11 +3,12 @@ from _socket import gaierror import psycopg2 +from dl_core.exc import SourceHostNotKnownError + from dl_connector_postgresql.core.postgresql_base.error_transformer import ( make_async_pg_error_transformer, sync_pg_db_error_transformer, ) -from dl_core.exc import SourceHostNotKnownError NAME_OR_SERVICE_NOT_KNOWN_MSG = """ diff --git a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_type_transformer.py b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_type_transformer.py index 451f7a6ff..3cb05f73a 100644 --- a/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_type_transformer.py +++ b/lib/dl_connector_postgresql/dl_connector_postgresql_tests/unit/test_type_transformer.py @@ -1,11 +1,11 @@ import pytest -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_connector_postgresql.core.postgresql_base.type_transformer import PostgreSQLTypeTransformer -@pytest.mark.parametrize("array_type", (BIType.array_int, BIType.array_float, BIType.array_str)) +@pytest.mark.parametrize("array_type", (UserDataType.array_int, UserDataType.array_float, UserDataType.array_str)) def test_null_array_conversion(array_type): tt = PostgreSQLTypeTransformer assert tt.cast_for_output(None, user_t=array_type) is None diff --git a/lib/dl_connector_promql/dl_connector_promql/api/api_schema/connection.py b/lib/dl_connector_promql/dl_connector_promql/api/api_schema/connection.py index 0908f2a36..a337be411 100644 --- a/lib/dl_connector_promql/dl_connector_promql/api/api_schema/connection.py +++ b/lib/dl_connector_promql/dl_connector_promql/api/api_schema/connection.py @@ -6,6 +6,7 @@ from dl_api_connector.api_schema.connection_base_fields import secret_string_field from dl_api_connector.api_schema.connection_sql import ClassicSQLConnectionSchema from dl_api_connector.api_schema.extras import FieldExtra + from dl_connector_promql.core.us_connection import PromQLConnection diff --git a/lib/dl_connector_promql/dl_connector_promql/api/connection_form/form_config.py b/lib/dl_connector_promql/dl_connector_promql/api/connection_form/form_config.py index e41020aa8..38597f093 100644 --- a/lib/dl_connector_promql/dl_connector_promql/api/connection_form/form_config.py +++ b/lib/dl_connector_promql/dl_connector_promql/api/connection_form/form_config.py @@ -18,6 +18,7 @@ import dl_api_connector.form_config.models.rows as C from dl_api_connector.form_config.models.shortcuts.rows import RowConstructor from dl_configs.connectors_settings import ConnectorSettingsBase + from dl_connector_promql.api.connection_info import PromQLConnectionInfoProvider diff --git a/lib/dl_connector_promql/dl_connector_promql/api/connection_info.py b/lib/dl_connector_promql/dl_connector_promql/api/connection_info.py index a406a69ee..b9e0fe4d5 100644 --- a/lib/dl_connector_promql/dl_connector_promql/api/connection_info.py +++ b/lib/dl_connector_promql/dl_connector_promql/api/connection_info.py @@ -1,4 +1,5 @@ from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_promql.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_promql/dl_connector_promql/api/connector.py b/lib/dl_connector_promql/dl_connector_promql/api/connector.py index 55d176641..5436d5772 100644 --- a/lib/dl_connector_promql/dl_connector_promql/api/connector.py +++ b/lib/dl_connector_promql/dl_connector_promql/api/connector.py @@ -9,6 +9,7 @@ ApiConnector, ApiSourceDefinition, ) + from dl_connector_promql.api.api_schema.connection import PromQLConnectionSchema from dl_connector_promql.api.connection_form.form_config import PromQLConnectionFormFactory from dl_connector_promql.api.connection_info import PromQLConnectionInfoProvider diff --git a/lib/dl_connector_promql/dl_connector_promql/api/i18n/localizer.py b/lib/dl_connector_promql/dl_connector_promql/api/i18n/localizer.py index 1ccccbe47..372feeab9 100644 --- a/lib/dl_connector_promql/dl_connector_promql/api/i18n/localizer.py +++ b/lib/dl_connector_promql/dl_connector_promql/api/i18n/localizer.py @@ -2,10 +2,11 @@ import attr -import dl_connector_promql as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_promql as package + DOMAIN = f"{package.__name__}" CONFIGS = [ diff --git a/lib/dl_connector_promql/dl_connector_promql/core/adapter.py b/lib/dl_connector_promql/dl_connector_promql/core/adapter.py index e408cf412..424deff73 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/adapter.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/adapter.py @@ -20,7 +20,6 @@ import sqlalchemy as sa from dl_app_tools.profiling_base import generic_profiler_async -from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL from dl_constants.enums import ConnectionType from dl_core.connection_executors.adapters.adapters_base_sa_classic import ( BaseClassicAdapter, @@ -32,9 +31,10 @@ from dl_core.exc import DatabaseQueryError from dl_core.utils import make_url +from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL + if TYPE_CHECKING: - from dl_connector_promql.core.target_dto import PromQLConnTargetDTO from dl_constants.types import TBIChunksGen from dl_core.connection_executors.models.db_adapter_data import ( DBAdapterQuery, @@ -47,6 +47,8 @@ TableIdent, ) + from dl_connector_promql.core.target_dto import PromQLConnTargetDTO + LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_promql/dl_connector_promql/core/connection_executors.py b/lib/dl_connector_promql/dl_connector_promql/core/connection_executors.py index 56b9f783b..447034d51 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/connection_executors.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/connection_executors.py @@ -7,12 +7,13 @@ import attr +from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor + from dl_connector_promql.core.adapter import ( AsyncPromQLAdapter, PromQLAdapter, ) from dl_connector_promql.core.target_dto import PromQLConnTargetDTO -from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor if TYPE_CHECKING: diff --git a/lib/dl_connector_promql/dl_connector_promql/core/connector.py b/lib/dl_connector_promql/dl_connector_promql/core/connector.py index 62b30ad09..30369090d 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/connector.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/connector.py @@ -1,3 +1,9 @@ +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreConnector, + CoreSourceDefinition, +) + from dl_connector_promql.core.adapter import ( AsyncPromQLAdapter, PromQLAdapter, @@ -15,11 +21,6 @@ from dl_connector_promql.core.storage_schemas.connection import PromQLConnectionDataStorageSchema from dl_connector_promql.core.type_transformer import PromQLTypeTransformer from dl_connector_promql.core.us_connection import PromQLConnection -from dl_core.connectors.base.connector import ( - CoreConnectionDefinition, - CoreConnector, - CoreSourceDefinition, -) class PromQLCoreConnectionDefinition(CoreConnectionDefinition): diff --git a/lib/dl_connector_promql/dl_connector_promql/core/constants.py b/lib/dl_connector_promql/dl_connector_promql/core/constants.py index 8bc4e9e67..30deea251 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/constants.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/constants.py @@ -1,10 +1,10 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, SourceBackendType, ) BACKEND_TYPE_PROMQL = SourceBackendType.declare("PROMQL") CONNECTION_TYPE_PROMQL = ConnectionType.declare("promql") -SOURCE_TYPE_PROMQL = CreateDSFrom.declare("PROMQL") +SOURCE_TYPE_PROMQL = DataSourceType.declare("PROMQL") diff --git a/lib/dl_connector_promql/dl_connector_promql/core/data_source.py b/lib/dl_connector_promql/dl_connector_promql/core/data_source.py index 5513cd3e1..be6659844 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/data_source.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/data_source.py @@ -5,9 +5,10 @@ Optional, ) -from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL from dl_core.data_source.sql import PseudoSQLDataSource +from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL + class PromQLDataSource(PseudoSQLDataSource): preview_enabled: ClassVar[bool] = False diff --git a/lib/dl_connector_promql/dl_connector_promql/core/dto.py b/lib/dl_connector_promql/dl_connector_promql/core/dto.py index 74dde3bcc..d2383dcee 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/dto.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/dto.py @@ -2,9 +2,10 @@ import attr -from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL from dl_core.connection_models.dto_defs import DefaultSQLDTO +from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL + @attr.s(frozen=True) class PromQLConnDTO(DefaultSQLDTO): diff --git a/lib/dl_connector_promql/dl_connector_promql/core/storage_schemas/connection.py b/lib/dl_connector_promql/dl_connector_promql/core/storage_schemas/connection.py index 89be90ae0..c57324982 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/storage_schemas/connection.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/storage_schemas/connection.py @@ -1,8 +1,9 @@ from marshmallow import fields as ma_fields -from dl_connector_promql.core.us_connection import PromQLConnection from dl_core.us_manager.storage_schemas.connection import ConnectionSQLDataStorageSchema +from dl_connector_promql.core.us_connection import PromQLConnection + class PromQLConnectionDataStorageSchema(ConnectionSQLDataStorageSchema[PromQLConnection.DataModel]): TARGET_CLS = PromQLConnection.DataModel diff --git a/lib/dl_connector_promql/dl_connector_promql/core/type_transformer.py b/lib/dl_connector_promql/dl_connector_promql/core/type_transformer.py index 50f8aab16..4108a876c 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/type_transformer.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/type_transformer.py @@ -1,23 +1,24 @@ from __future__ import annotations -from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.db.conversion_base import ( TypeTransformer, make_native_type, ) +from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL + class PromQLTypeTransformer(TypeTransformer): conn_type = CONNECTION_TYPE_PROMQL native_to_user_map = { - make_native_type(CONNECTION_TYPE_PROMQL, "unix_timestamp"): BIType.genericdatetime, - make_native_type(CONNECTION_TYPE_PROMQL, "float64"): BIType.float, - make_native_type(CONNECTION_TYPE_PROMQL, "string"): BIType.string, + make_native_type(CONNECTION_TYPE_PROMQL, "unix_timestamp"): UserDataType.genericdatetime, + make_native_type(CONNECTION_TYPE_PROMQL, "float64"): UserDataType.float, + make_native_type(CONNECTION_TYPE_PROMQL, "string"): UserDataType.string, } user_to_native_map = dict( [(bi_type, native_type) for native_type, bi_type in native_to_user_map.items()] + [ - (BIType.datetime, make_native_type(CONNECTION_TYPE_PROMQL, "unix_timestamp")), + (UserDataType.datetime, make_native_type(CONNECTION_TYPE_PROMQL, "unix_timestamp")), ] ) diff --git a/lib/dl_connector_promql/dl_connector_promql/core/us_connection.py b/lib/dl_connector_promql/dl_connector_promql/core/us_connection.py index a25cbe2b7..9a77b4aea 100644 --- a/lib/dl_connector_promql/dl_connector_promql/core/us_connection.py +++ b/lib/dl_connector_promql/dl_connector_promql/core/us_connection.py @@ -4,9 +4,10 @@ import attr -from dl_connector_promql.core.dto import PromQLConnDTO from dl_core.us_connection_base import ClassicConnectionSQL +from dl_connector_promql.core.dto import PromQLConnDTO + class PromQLConnection(ClassicConnectionSQL): allow_cache: ClassVar[bool] = True diff --git a/lib/dl_connector_promql/dl_connector_promql_tests/db/api/base.py b/lib/dl_connector_promql/dl_connector_promql_tests/db/api/base.py index eed9d3b69..5fd335729 100644 --- a/lib/dl_connector_promql/dl_connector_promql_tests/db/api/base.py +++ b/lib/dl_connector_promql/dl_connector_promql_tests/db/api/base.py @@ -2,11 +2,6 @@ from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration from dl_api_lib_testing.connection_base import ConnectionTestBase -from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL -from dl_connector_promql_tests.db.config import ( - API_CONNECTION_SETTINGS, - API_TEST_CONFIG, -) from dl_constants.enums import ( ConnectionType, RawSQLLevel, @@ -17,6 +12,12 @@ ) from dl_core_testing.engine_wrapper import TestingEngineWrapper +from dl_connector_promql.core.constants import CONNECTION_TYPE_PROMQL +from dl_connector_promql_tests.db.config import ( + API_CONNECTION_SETTINGS, + API_TEST_CONFIG, +) + class PromQLConnectionTestBase(ConnectionTestBase): bi_compeng_pg_on = False diff --git a/lib/dl_connector_promql/dl_connector_promql_tests/db/api/test_connection.py b/lib/dl_connector_promql/dl_connector_promql_tests/db/api/test_connection.py index 8fcc85688..5005b9972 100644 --- a/lib/dl_connector_promql/dl_connector_promql_tests/db/api/test_connection.py +++ b/lib/dl_connector_promql/dl_connector_promql_tests/db/api/test_connection.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite + from dl_connector_promql_tests.db.api.base import PromQLConnectionTestBase diff --git a/lib/dl_connector_promql/dl_connector_promql_tests/db/api/test_dashsql.py b/lib/dl_connector_promql/dl_connector_promql_tests/db/api/test_dashsql.py index 1891990e9..0bc1a01a4 100644 --- a/lib/dl_connector_promql/dl_connector_promql_tests/db/api/test_dashsql.py +++ b/lib/dl_connector_promql/dl_connector_promql_tests/db/api/test_dashsql.py @@ -2,6 +2,7 @@ import pytest from dl_api_lib_testing.connector.dashsql_suite import DefaultDashSQLTestSuite + from dl_connector_promql_tests.db.api.base import PromQLConnectionTestBase diff --git a/lib/dl_connector_promql/dl_connector_promql_tests/db/conftest.py b/lib/dl_connector_promql/dl_connector_promql_tests/db/conftest.py index 92e53af1d..dab890872 100644 --- a/lib/dl_connector_promql/dl_connector_promql_tests/db/conftest.py +++ b/lib/dl_connector_promql/dl_connector_promql_tests/db/conftest.py @@ -1,4 +1,5 @@ from dl_api_lib_testing.initialization import initialize_api_lib_test + from dl_connector_promql_tests.db.config import API_TEST_CONFIG diff --git a/lib/dl_connector_promql/dl_connector_promql_tests/unit/test_connection_form.py b/lib/dl_connector_promql/dl_connector_promql_tests/unit/test_connection_form.py index 2fbe438f4..81f11904b 100644 --- a/lib/dl_connector_promql/dl_connector_promql_tests/unit/test_connection_form.py +++ b/lib/dl_connector_promql/dl_connector_promql_tests/unit/test_connection_form.py @@ -1,5 +1,6 @@ from dl_api_connector.i18n.localizer import CONFIGS as BI_API_CONNECTOR_CONFIGS from dl_api_lib_testing.connection_form_base import ConnectionFormTestBase + from dl_connector_promql.api.connection_form.form_config import PromQLConnectionFormFactory from dl_connector_promql.api.i18n.localizer import CONFIGS as BI_CONNECTOR_PROMQL_CONFIGS diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/api/api_schema/connection.py b/lib/dl_connector_snowflake/dl_connector_snowflake/api/api_schema/connection.py index 07e473af5..9411a7c3f 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/api/api_schema/connection.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/api/api_schema/connection.py @@ -4,6 +4,7 @@ from dl_api_connector.api_schema.connection_base_fields import secret_string_field from dl_api_connector.api_schema.connection_mixins import RawSQLLevelMixin from dl_api_connector.api_schema.extras import FieldExtra + from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/api/connection_form/form_config.py b/lib/dl_connector_snowflake/dl_connector_snowflake/api/connection_form/form_config.py index c7df0882d..4ca435ba3 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/api/connection_form/form_config.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/api/connection_form/form_config.py @@ -30,6 +30,7 @@ ) from dl_api_connector.form_config.models.rows.prepared.base import PreparedRow from dl_configs.connectors_settings import ConnectorSettingsBase + from dl_connector_snowflake.api.connection_info import SnowflakeConnectionInfoProvider from dl_connector_snowflake.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/api/connection_info.py b/lib/dl_connector_snowflake/dl_connector_snowflake/api/connection_info.py index 6b67de06f..92beed0dc 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/api/connection_info.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/api/connection_info.py @@ -1,4 +1,5 @@ from dl_api_connector.connection_info import ConnectionInfoProvider + from dl_connector_snowflake.api.i18n.localizer import Translatable diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/api/connector.py b/lib/dl_connector_snowflake/dl_connector_snowflake/api/connector.py index 895fc80d3..6494b62df 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/api/connector.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/api/connector.py @@ -3,6 +3,7 @@ ApiConnector, ApiSourceDefinition, ) + from dl_connector_snowflake.api.api_schema.connection import SnowFlakeConnectionSchema from dl_connector_snowflake.api.api_schema.source import ( # type: ignore SnowFlakeTableDataSourceSchema, diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/api/i18n/localizer.py b/lib/dl_connector_snowflake/dl_connector_snowflake/api/i18n/localizer.py index bf09e715d..7a6fae4d0 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/api/i18n/localizer.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/api/i18n/localizer.py @@ -2,10 +2,11 @@ import attr -import dl_connector_snowflake as package from dl_i18n.localizer_base import Translatable as BaseTranslatable from dl_i18n.localizer_base import TranslationConfig +import dl_connector_snowflake as package + DOMAIN = f"{package.__name__}" CONFIGS = [ diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/auth.py b/lib/dl_connector_snowflake/dl_connector_snowflake/auth.py index bb1866f36..6ed9066b1 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/auth.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/auth.py @@ -15,6 +15,7 @@ PredefinedIntervalsRetrier, THeaders, ) + from dl_connector_snowflake.core.dto import SnowFlakeConnDTO from dl_connector_snowflake.core.exc import SnowflakeGetAccessTokenError diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/adapters.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/adapters.py index 3ca5e4067..594485eff 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/adapters.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/adapters.py @@ -13,9 +13,6 @@ from sqlalchemy.engine import Engine from sqlalchemy.sql.type_api import TypeEngine -from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE -from dl_connector_snowflake.core.error_transformer import snowflake_error_transformer -from dl_connector_snowflake.core.target_dto import SnowFlakeConnTargetDTO from dl_core.connection_executors.adapters.adapters_base_sa import BaseSAAdapter from dl_core.connection_executors.adapters.adapters_base_sa_classic import BaseClassicAdapter from dl_core.connection_executors.models.db_adapter_data import ( @@ -28,6 +25,10 @@ ) from dl_core.db.native_type import SATypeSpec +from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE +from dl_connector_snowflake.core.error_transformer import snowflake_error_transformer +from dl_connector_snowflake.core.target_dto import SnowFlakeConnTargetDTO + def construct_creator_func(target_dto: SnowFlakeConnTargetDTO) -> Callable: def get_connection() -> Any: diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/connection_executors.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/connection_executors.py index c35db2dcb..ba2f3f658 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/connection_executors.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/connection_executors.py @@ -5,15 +5,16 @@ import attr +from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter +from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor +from dl_core.reporting.notifications import get_notification_record + from dl_connector_snowflake.auth import SFAuthProvider from dl_connector_snowflake.core.adapters import SnowFlakeDefaultAdapter from dl_connector_snowflake.core.constants import NOTIF_TYPE_SF_REFRESH_TOKEN_SOON_TO_EXPIRE from dl_connector_snowflake.core.dto import SnowFlakeConnDTO from dl_connector_snowflake.core.exc import SnowflakeRefreshTokenInvalid from dl_connector_snowflake.core.target_dto import SnowFlakeConnTargetDTO -from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter -from dl_core.connection_executors.async_sa_executors import DefaultSqlAlchemyConnExecutor -from dl_core.reporting.notifications import get_notification_record LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/connector.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/connector.py index e0f6a88f5..305373991 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/connector.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/connector.py @@ -1,3 +1,9 @@ +from dl_core.connectors.base.connector import ( + CoreConnectionDefinition, + CoreConnector, + CoreSourceDefinition, +) + from dl_connector_snowflake.core.adapters import SnowFlakeDefaultAdapter from dl_connector_snowflake.core.connection_executors import SnowFlakeSyncConnExecutor from dl_connector_snowflake.core.constants import ( @@ -23,11 +29,6 @@ ) from dl_connector_snowflake.core.type_transformer import SnowFlakeTypeTransformer from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake -from dl_core.connectors.base.connector import ( - CoreConnectionDefinition, - CoreConnector, - CoreSourceDefinition, -) class SnowFlakeCoreConnectionDefinition(CoreConnectionDefinition): diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/constants.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/constants.py index f0ba2d6d0..c1b8e7bdf 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/constants.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/constants.py @@ -1,6 +1,6 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, NotificationType, SourceBackendType, ) @@ -10,8 +10,8 @@ CONNECTION_TYPE_SNOWFLAKE = ConnectionType.declare("snowflake") # FIXME: Move the declaration here -SOURCE_TYPE_SNOWFLAKE_TABLE = CreateDSFrom.declare("SNOWFLAKE_TABLE") -SOURCE_TYPE_SNOWFLAKE_SUBSELECT = CreateDSFrom.declare("SNOWFLAKE_SUBSELECT") +SOURCE_TYPE_SNOWFLAKE_TABLE = DataSourceType.declare("SNOWFLAKE_TABLE") +SOURCE_TYPE_SNOWFLAKE_SUBSELECT = DataSourceType.declare("SNOWFLAKE_SUBSELECT") NOTIF_TYPE_SF_REFRESH_TOKEN_SOON_TO_EXPIRE = NotificationType.declare("snowflake_refresh_token_soon_to_expire") diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/data_source.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/data_source.py index 48a4f7c9b..1e48b664c 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/data_source.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/data_source.py @@ -8,17 +8,7 @@ Type, ) -from dl_connector_snowflake.core.constants import ( - CONNECTION_TYPE_SNOWFLAKE, - SOURCE_TYPE_SNOWFLAKE_SUBSELECT, - SOURCE_TYPE_SNOWFLAKE_TABLE, -) -from dl_connector_snowflake.core.data_source_spec import ( - SnowFlakeSubselectDataSourceSpec, - SnowFlakeTableDataSourceSpec, -) -from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.connection_models import ( TableDefinition, TableIdent, @@ -33,6 +23,17 @@ from dl_core.db import SchemaInfo from dl_core.utils import sa_plain_text +from dl_connector_snowflake.core.constants import ( + CONNECTION_TYPE_SNOWFLAKE, + SOURCE_TYPE_SNOWFLAKE_SUBSELECT, + SOURCE_TYPE_SNOWFLAKE_TABLE, +) +from dl_connector_snowflake.core.data_source_spec import ( + SnowFlakeSubselectDataSourceSpec, + SnowFlakeTableDataSourceSpec, +) +from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake + if TYPE_CHECKING: from dl_core.connection_executors.sync_base import SyncConnExecutorBase @@ -46,7 +47,7 @@ def get_connection_cls(cls) -> Type[ConnectionSQLSnowFlake]: return ConnectionSQLSnowFlake @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return source_type in (SOURCE_TYPE_SNOWFLAKE_TABLE, SOURCE_TYPE_SNOWFLAKE_SUBSELECT) diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/dto.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/dto.py index f49d1af96..b7931144b 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/dto.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/dto.py @@ -5,10 +5,11 @@ import attr -from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE from dl_core.connection_models.dto_defs import ConnDTO from dl_core.utils import secrepr +from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE + @attr.s(frozen=True) class SnowFlakeConnDTO(ConnDTO): diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/error_transformer.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/error_transformer.py index 2defc18fc..6c6a57ae7 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/error_transformer.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/error_transformer.py @@ -1,6 +1,5 @@ import sqlalchemy -from dl_connector_snowflake.core.exc import SnowflakeAccessTokenError from dl_core.connectors.base.error_transformer import ( ChainedDbErrorTransformer, DbErrorTransformer, @@ -8,6 +7,8 @@ from dl_core.connectors.base.error_transformer import ErrorTransformerRule as Rule from dl_core.connectors.base.error_transformer import ExcMatchCondition +from dl_connector_snowflake.core.exc import SnowflakeAccessTokenError + def is_access_token_error() -> ExcMatchCondition: def func(exc: Exception) -> bool: diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/lifecycle.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/lifecycle.py index 2bbfa59cf..390b49af6 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/lifecycle.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/lifecycle.py @@ -1,8 +1,9 @@ import logging -from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake from dl_core.connectors.base.lifecycle import ConnectionLifecycleManager +from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake + LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/notifications.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/notifications.py index 3b0787639..2b7ab766a 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/notifications.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/notifications.py @@ -1,7 +1,8 @@ -from dl_connector_snowflake.core.constants import NOTIF_TYPE_SF_REFRESH_TOKEN_SOON_TO_EXPIRE from dl_constants.enums import NotificationLevel from dl_core.reporting.notifications import BaseNotification +from dl_connector_snowflake.core.constants import NOTIF_TYPE_SF_REFRESH_TOKEN_SOON_TO_EXPIRE + class SnowflakeRefreshTokenSoonToExpire(BaseNotification): type = NOTIF_TYPE_SF_REFRESH_TOKEN_SOON_TO_EXPIRE diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/storage_schemas/connection.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/storage_schemas/connection.py index e4769a9a4..fb4843188 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/storage_schemas/connection.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/storage_schemas/connection.py @@ -1,14 +1,15 @@ from marshmallow import fields as ma_fields from marshmallow.validate import Regexp -from dl_connector_snowflake.core.constants import ACCOUNT_NAME_RE -from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake from dl_core.us_manager.storage_schemas.connection import ( BaseConnectionDataStorageSchema, CacheableConnectionDataSchemaMixin, SubselectConnectionDataSchemaMixin, ) +from dl_connector_snowflake.core.constants import ACCOUNT_NAME_RE +from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake + class SnowFlakeConnectionDataStorageSchema( BaseConnectionDataStorageSchema[ConnectionSQLSnowFlake.DataModel], diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/storage_schemas/data_source_spec.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/storage_schemas/data_source_spec.py index a76e2507e..d345a910a 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/storage_schemas/data_source_spec.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/storage_schemas/data_source_spec.py @@ -1,13 +1,14 @@ -from dl_connector_snowflake.core.data_source_spec import ( - SnowFlakeSubselectDataSourceSpec, - SnowFlakeTableDataSourceSpec, -) from dl_core.us_manager.storage_schemas.data_source_spec_base import ( BaseSQLDataSourceSpecStorageSchema, SubselectSQLDataSourceSpecStorageSchemaMixin, TableSQLDataSourceSpecStorageSchemaMixin, ) +from dl_connector_snowflake.core.data_source_spec import ( + SnowFlakeSubselectDataSourceSpec, + SnowFlakeTableDataSourceSpec, +) + class SnowFlakeTableDataSourceSpecStorageSchema( TableSQLDataSourceSpecStorageSchemaMixin, diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/target_dto.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/target_dto.py index a7d92e49e..21fcc3f42 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/target_dto.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/target_dto.py @@ -4,10 +4,11 @@ import attr -from dl_connector_snowflake.core.dto import SnowFlakeConnDTO from dl_core.connection_executors.models.connection_target_dto_base import ConnTargetDTO from dl_core.utils import secrepr +from dl_connector_snowflake.core.dto import SnowFlakeConnDTO + @attr.s(frozen=True, kw_only=True) class SnowFlakeConnTargetDTO(ConnTargetDTO): diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/testing/connection.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/testing/connection.py index 2315d6faf..8e1d1ac2f 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/testing/connection.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/testing/connection.py @@ -2,11 +2,12 @@ from typing import Optional import uuid -from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE -from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake from dl_constants.enums import RawSQLLevel from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE +from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake + def make_snowflake_saved_connection( sync_usm: SyncUSManager, diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/type_transformer.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/type_transformer.py index 7b2d5606e..e9464b8b1 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/type_transformer.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/type_transformer.py @@ -1,12 +1,13 @@ from snowflake import sqlalchemy as ssa -from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.db.conversion_base import ( TypeTransformer, make_native_type, ) +from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE + SNOW_TYPES_INT = frozenset( ( @@ -42,25 +43,25 @@ class SnowFlakeTypeTransformer(TypeTransformer): conn_type = CONNECTION_TYPE_SNOWFLAKE native_to_user_map = { - make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.DATE): BIType.date, - make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.DATETIME): BIType.genericdatetime, - **{make_native_type(CONNECTION_TYPE_SNOWFLAKE, t): BIType.integer for t in SNOW_TYPES_INT}, - **{make_native_type(CONNECTION_TYPE_SNOWFLAKE, t): BIType.float for t in SNOW_TYPES_FLOAT}, - **{make_native_type(CONNECTION_TYPE_SNOWFLAKE, t): BIType.string for t in SNOW_TYPES_STRING}, - make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.BOOLEAN): BIType.boolean, + make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.DATE): UserDataType.date, + make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.DATETIME): UserDataType.genericdatetime, + **{make_native_type(CONNECTION_TYPE_SNOWFLAKE, t): UserDataType.integer for t in SNOW_TYPES_INT}, + **{make_native_type(CONNECTION_TYPE_SNOWFLAKE, t): UserDataType.float for t in SNOW_TYPES_FLOAT}, + **{make_native_type(CONNECTION_TYPE_SNOWFLAKE, t): UserDataType.string for t in SNOW_TYPES_STRING}, + make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.BOOLEAN): UserDataType.boolean, # todo: review datetime/genericdatime/datetimetz/timestamp # todo: array, geo } user_to_native_map = { - BIType.date: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.DATE), - BIType.genericdatetime: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.DATETIME), - BIType.integer: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.INT), - BIType.string: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.STRING), - BIType.boolean: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.BOOLEAN), + UserDataType.date: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.DATE), + UserDataType.genericdatetime: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.DATETIME), + UserDataType.integer: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.INT), + UserDataType.string: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.STRING), + UserDataType.boolean: make_native_type(CONNECTION_TYPE_SNOWFLAKE, ssa.BOOLEAN), } casters = { **TypeTransformer.casters, - # BIType.date: GSheetsDateTypeCaster(), - # BIType.datetime: GSheetsDatetimeTypeCaster(), - # BIType.genericdatetime: GSheetsGenericDatetimeTypeCaster(), + # UserDataType.date: GSheetsDateTypeCaster(), + # UserDataType.datetime: GSheetsDatetimeTypeCaster(), + # UserDataType.genericdatetime: GSheetsGenericDatetimeTypeCaster(), } diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/core/us_connection.py b/lib/dl_connector_snowflake/dl_connector_snowflake/core/us_connection.py index 801f15ed1..5ffd27a46 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/core/us_connection.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/core/us_connection.py @@ -13,15 +13,6 @@ import marshmallow as ma from dl_api_commons.reporting.models import NotificationReportingRecord -from dl_connector_snowflake.auth import SFAuthProvider -from dl_connector_snowflake.core.constants import ( - ACCOUNT_NAME_RE, - CONNECTION_TYPE_SNOWFLAKE, - NOTIF_TYPE_SF_REFRESH_TOKEN_SOON_TO_EXPIRE, - SOURCE_TYPE_SNOWFLAKE_SUBSELECT, - SOURCE_TYPE_SNOWFLAKE_TABLE, -) -from dl_connector_snowflake.core.dto import SnowFlakeConnDTO from dl_core.base_models import ( ConnCacheableDataModelMixin, ConnectionDataModelBase, @@ -37,6 +28,16 @@ from dl_i18n.localizer_base import Localizer from dl_utils.utils import DataKey +from dl_connector_snowflake.auth import SFAuthProvider +from dl_connector_snowflake.core.constants import ( + ACCOUNT_NAME_RE, + CONNECTION_TYPE_SNOWFLAKE, + NOTIF_TYPE_SF_REFRESH_TOKEN_SOON_TO_EXPIRE, + SOURCE_TYPE_SNOWFLAKE_SUBSELECT, + SOURCE_TYPE_SNOWFLAKE_TABLE, +) +from dl_connector_snowflake.core.dto import SnowFlakeConnDTO + if TYPE_CHECKING: from dl_core.services_registry import ServicesRegistry diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/db_testing/connector.py b/lib/dl_connector_snowflake/dl_connector_snowflake/db_testing/connector.py index ac1b643b4..534bb609f 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/db_testing/connector.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/db_testing/connector.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake.db_testing.engine_wrapper import SnowFlakeEngineWrapper from dl_db_testing.connectors.base.connector import DbTestingConnector +from dl_connector_snowflake.db_testing.engine_wrapper import SnowFlakeEngineWrapper + class SnowFlakeDbTestingConnector(DbTestingConnector): engine_wrapper_classes = (SnowFlakeEngineWrapper,) diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/connector.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/connector.py index 40d2af8e4..c3306a7bc 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/connector.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/connector.py @@ -1,9 +1,10 @@ from snowflake.sqlalchemy.snowdialect import SnowflakeDialect as SASnowflakeDialect +from dl_formula.connectors.base.connector import FormulaConnector + from dl_connector_snowflake.formula.constants import SnowFlakeDialect as SnowFlakeDialectNS from dl_connector_snowflake.formula.definitions.all import DEFINITIONS from dl_connector_snowflake.formula.literal import SnowFlakeLiteralizer -from dl_formula.connectors.base.connector import FormulaConnector class SnowFlakeFormulaConnector(FormulaConnector): diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/conditional_blocks.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/conditional_blocks.py index da0e5f061..2d7d25055 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/conditional_blocks.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/conditional_blocks.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.conditional_blocks as base +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_aggregation.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_aggregation.py index 071660863..2b3c5e532 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_aggregation.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_aggregation.py @@ -1,7 +1,6 @@ import sqlalchemy as sa from sqlalchemy import within_group -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.core.datatype import DataType from dl_formula.definitions.args import ArgTypeSequence from dl_formula.definitions.base import TranslationVariant @@ -9,6 +8,8 @@ import dl_formula.definitions.functions_aggregation as base from dl_formula.definitions.literals import un_literal +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_datetime.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_datetime.py index d1801b689..c0d652d6f 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_datetime.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_datetime.py @@ -1,10 +1,11 @@ import sqlalchemy as sa -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.definitions.base import TranslationVariant from dl_formula.definitions.common import raw_sql import dl_formula.definitions.functions_datetime as base +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_logical.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_logical.py index e4963b460..18d3fefec 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_logical.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_logical.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_logical as base +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_markup.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_markup.py index e08246a19..cfd1651ee 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_markup.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_markup.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D import dl_formula.definitions.functions_markup as base +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + DEFINITIONS_MARKUP = [ # + diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_math.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_math.py index 7831c7784..99b20d4c2 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_math.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_math.py @@ -2,12 +2,13 @@ import sqlalchemy as sa -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.core.datatype import DataType from dl_formula.definitions.args import ArgTypeSequence from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.functions_math as base +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_string.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_string.py index f070e37d6..91e3e766f 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_string.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_string.py @@ -1,6 +1,5 @@ import sqlalchemy as sa -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.definitions.base import ( TranslationVariant, TranslationVariantWrapped, @@ -8,6 +7,8 @@ import dl_formula.definitions.functions_string as base from dl_formula.shortcuts import n +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make VW = TranslationVariantWrapped.make @@ -43,6 +44,9 @@ class FuncIEndswithNonConstSF(base.FuncIEndswithNonConst): ] ), base.FuncContainsNonString.for_dialect(D.SNOWFLAKE), + # notcontains + base.FuncNotContainsNonConst.for_dialect(D.SNOWFLAKE), + base.FuncNotContainsNonString.for_dialect(D.SNOWFLAKE), # endswith base.FuncEndswithNonString.for_dialect(D.SNOWFLAKE), base.FuncEndswithNonConst(variants=[V(D.SNOWFLAKE, sa.func.ENDSWITH)]), diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_type.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_type.py index e375c507d..c3895a1d7 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_type.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/functions_type.py @@ -1,6 +1,5 @@ import sqlalchemy as sa -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.core.datatype import DataType from dl_formula.definitions.args import ArgTypeSequence from dl_formula.definitions.base import ( @@ -12,6 +11,8 @@ from dl_formula.definitions.scope import Scope from dl_formula.definitions.type_strategy import Fixed +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_binary.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_binary.py index 4df838cad..352a64e18 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_binary.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_binary.py @@ -1,10 +1,11 @@ import sqlalchemy as sa -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.definitions.base import TranslationVariant from dl_formula.definitions.common_datetime import DAY_SEC import dl_formula.definitions.operators_binary as base +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_ternary.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_ternary.py index 431c9d953..e14350755 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_ternary.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_ternary.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D import dl_formula.definitions.operators_ternary as base +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + DEFINITIONS_TERNARY = [ # between diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_unary.py b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_unary.py index fcd1f02ab..81b64e0ed 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_unary.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake/formula/definitions/operators_unary.py @@ -1,7 +1,8 @@ -from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D from dl_formula.definitions.base import TranslationVariant import dl_formula.definitions.operators_unary as base +from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D + V = TranslationVariant.make diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/base.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/base.py index f9b20bf0f..01f1a09f0 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/base.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/base.py @@ -6,6 +6,7 @@ from dl_api_lib_testing.connection_base import ConnectionTestBase from dl_api_lib_testing.data_api_base import StandardizedDataApiTestBase from dl_api_lib_testing.dataset_base import DatasetTestBase + from dl_connector_snowflake.core.constants import ( CONNECTION_TYPE_SNOWFLAKE, SOURCE_TYPE_SNOWFLAKE_TABLE, diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_connection.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_connection.py index 3d54fee4b..38642affe 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_connection.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_connection.py @@ -2,10 +2,18 @@ import uuid from dl_api_lib_testing.connector.connection_suite import DefaultConnectorConnectionTestSuite +from dl_testing.regulated_test import RegulatedTestParams + from dl_connector_snowflake_tests.ext.api.base import SnowFlakeConnectionTestBase class TestSnowFlakeConnection(SnowFlakeConnectionTestBase, DefaultConnectorConnectionTestSuite): + test_params = RegulatedTestParams( + mark_tests_skipped={ + DefaultConnectorConnectionTestSuite.test_cache_ttl_sec_override: "Unavailable for SnowFlake", + } + ) + def test_connection_parameters_tester(self, client, connection_params): params = { "type": "snowflake", diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_data.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_data.py index cb2a54143..349cd2665 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_data.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_data.py @@ -5,9 +5,10 @@ DefaultConnectorDataRangeTestSuite, DefaultConnectorDataResultTestSuite, ) -from dl_connector_snowflake_tests.ext.api.base import SnowFlakeDataApiTestBase from dl_testing.regulated_test import RegulatedTestParams +from dl_connector_snowflake_tests.ext.api.base import SnowFlakeDataApiTestBase + class TestSnowFlakeDataResult(SnowFlakeDataApiTestBase, DefaultConnectorDataResultTestSuite): test_params = RegulatedTestParams( diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_dataset.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_dataset.py index a94baa74d..0f0138156 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_dataset.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/api/test_dataset.py @@ -1,5 +1,6 @@ from dl_api_client.dsmaker.primitives import Dataset from dl_api_lib_testing.connector.dataset_suite import DefaultConnectorDatasetTestSuite + from dl_connector_snowflake_tests.ext.api.base import SnowFlakeDatasetTestBase diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/config.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/config.py index 35c1a9790..168f8c077 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/config.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/config.py @@ -1,5 +1,5 @@ from dl_api_lib_testing.configuration import ApiTestEnvironmentConfiguration -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core_testing.configuration import DefaultCoreTestConfiguration from dl_testing.containers import get_test_container_hostport @@ -19,27 +19,27 @@ # todo: maybe support also certificates and store them in sec storage, to avoid refresh token updates SAMPLE_TABLE_SIMPLIFIED_SCHEMA = [ - ("Category", BIType.string), - ("City", BIType.string), - ("Country", BIType.string), - ("Customer ID", BIType.string), - ("Customer Name", BIType.string), - ("Discount", BIType.float), - ("Order Date", BIType.date), - ("Order ID", BIType.string), - ("Postal Code", BIType.integer), - ("Product ID", BIType.string), - ("Product Name", BIType.string), - ("Profit", BIType.float), - ("Quantity", BIType.integer), - ("Region", BIType.string), - ("Row ID", BIType.integer), - ("Sales", BIType.float), - ("Segment", BIType.string), - ("Ship Date", BIType.date), - ("Ship Mode", BIType.string), - ("State", BIType.string), - ("Sub-Category", BIType.string), + ("Category", UserDataType.string), + ("City", UserDataType.string), + ("Country", UserDataType.string), + ("Customer ID", UserDataType.string), + ("Customer Name", UserDataType.string), + ("Discount", UserDataType.float), + ("Order Date", UserDataType.date), + ("Order ID", UserDataType.string), + ("Postal Code", UserDataType.integer), + ("Product ID", UserDataType.string), + ("Product Name", UserDataType.string), + ("Profit", UserDataType.float), + ("Quantity", UserDataType.integer), + ("Region", UserDataType.string), + ("Row ID", UserDataType.integer), + ("Sales", UserDataType.float), + ("Segment", UserDataType.string), + ("Ship Date", UserDataType.date), + ("Ship Mode", UserDataType.string), + ("State", UserDataType.string), + ("Sub-Category", UserDataType.string), ] diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/conftest.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/conftest.py index 6ac12fc7c..528956376 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/conftest.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/conftest.py @@ -4,11 +4,12 @@ from dl_api_lib.loader import load_api_lib from dl_api_lib_testing.initialization import initialize_api_lib_test -from dl_connector_snowflake.core.testing.secrets import SnowFlakeSecretReader -from dl_connector_snowflake_tests.ext.config import API_TEST_CONFIG from dl_formula_testing.forced_literal import forced_literal_use from dl_testing.env_params.generic import GenericEnvParamGetter +from dl_connector_snowflake.core.testing.secrets import SnowFlakeSecretReader +from dl_connector_snowflake_tests.ext.config import API_TEST_CONFIG + pytest_plugins = ("aiohttp.pytest_plugin",) # and it, in turn, includes 'pytest_asyncio.plugin' diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/base.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/base.py index 419c6d3df..6488d64a1 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/base.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/base.py @@ -3,14 +3,15 @@ import pytest +from dl_core.us_manager.us_manager_sync import SyncUSManager +from dl_core_testing.database import CoreDbConfig +from dl_core_testing.testcases.connection import BaseConnectionTestClass + from dl_connector_snowflake.core.constants import CONNECTION_TYPE_SNOWFLAKE from dl_connector_snowflake.core.testing.connection import make_snowflake_saved_connection from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake from dl_connector_snowflake.db_testing.engine_wrapper import SnowFlakeDbEngineConfig import dl_connector_snowflake_tests.ext.config as test_config # noqa -from dl_core.us_manager.us_manager_sync import SyncUSManager -from dl_core_testing.database import CoreDbConfig -from dl_core_testing.testcases.connection import BaseConnectionTestClass class BaseSnowFlakeTestClass(BaseConnectionTestClass[ConnectionSQLSnowFlake]): diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/test_connection.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/test_connection.py index 06b40a09e..1e03e07b9 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/test_connection.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/test_connection.py @@ -3,6 +3,11 @@ from mock import MagicMock import pytest +from dl_core.connection_executors.common_base import ConnExecutorQuery +from dl_core.services_registry.top_level import ServicesRegistry +from dl_core.us_connection_base import DataSourceTemplate +from dl_core_testing.testcases.connection import DefaultConnectionTestClass + from dl_connector_snowflake.auth import SFAuthProvider from dl_connector_snowflake.core.exc import ( SnowflakeAccessTokenError, @@ -10,10 +15,6 @@ ) from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake from dl_connector_snowflake_tests.ext.core.base import BaseSnowFlakeTestClass # noqa -from dl_core.connection_executors.common_base import ConnExecutorQuery -from dl_core.services_registry.top_level import ServicesRegistry -from dl_core.us_connection_base import DataSourceTemplate -from dl_core_testing.testcases.connection import DefaultConnectionTestClass class TestSnowFlakeConnection( diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/test_data_source.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/test_data_source.py index c21ffd995..2d3ba3a47 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/test_data_source.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/core/test_data_source.py @@ -1,5 +1,11 @@ import pytest +from dl_constants.enums import ( + RawSQLLevel, + UserDataType, +) +from dl_core_testing.testcases.data_source import DefaultDataSourceTestClass + from dl_connector_snowflake.core.constants import ( SOURCE_TYPE_SNOWFLAKE_SUBSELECT, SOURCE_TYPE_SNOWFLAKE_TABLE, @@ -15,11 +21,6 @@ from dl_connector_snowflake.core.us_connection import ConnectionSQLSnowFlake from dl_connector_snowflake_tests.ext.config import SAMPLE_TABLE_SIMPLIFIED_SCHEMA from dl_connector_snowflake_tests.ext.core.base import BaseSnowFlakeTestClass -from dl_constants.enums import ( - BIType, - RawSQLLevel, -) -from dl_core_testing.testcases.data_source import DefaultDataSourceTestClass class TestSnowFlakeTableDataSource( @@ -42,7 +43,7 @@ def initial_data_source_spec(self, sf_secrets) -> SnowFlakeTableDataSourceSpec: ) return dsrc_spec - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: return SAMPLE_TABLE_SIMPLIFIED_SCHEMA @@ -66,5 +67,5 @@ def initial_data_source_spec(self, sf_secrets) -> SnowFlakeSubselectDataSourceSp ) return dsrc_spec - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: return SAMPLE_TABLE_SIMPLIFIED_SCHEMA diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/base.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/base.py index b8fcda93a..4447b0e0b 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/base.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/base.py @@ -3,6 +3,8 @@ import pytest +from dl_formula_testing.testcases.base import FormulaConnectorTestBase + from dl_connector_snowflake.auth import SFAuthProvider from dl_connector_snowflake.core.adapters import construct_creator_func from dl_connector_snowflake.core.dto import SnowFlakeConnDTO @@ -10,7 +12,6 @@ from dl_connector_snowflake.db_testing.engine_wrapper import SnowFlakeDbEngineConfig from dl_connector_snowflake.formula.constants import SnowFlakeDialect as D import dl_connector_snowflake_tests.ext.config as test_config # noqa -from dl_formula_testing.testcases.base import FormulaConnectorTestBase class SnowFlakeTestBase(FormulaConnectorTestBase): diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_conditional_blocks.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_conditional_blocks.py index a7e76abd8..93aea4361 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_conditional_blocks.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_conditional_blocks.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.conditional_blocks import DefaultConditionalBlockFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestConditionalBlockSnowFlake(SnowFlakeTestBase, DefaultConditionalBlockFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_aggregation.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_aggregation.py index e8ee8ba54..b35046764 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_aggregation.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_aggregation.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.functions_aggregation import DefaultMainAggFunctionFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestMainAggFunctionSnowFlake(SnowFlakeTestBase, DefaultMainAggFunctionFormulaConnectorTestSuite): supports_countd_approx = True diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_datetime.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_datetime.py index 31c2c0111..207ed0fb1 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_datetime.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_datetime.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.functions_datetime import DefaultDateTimeFunctionFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestDateTimeFunctionSnowFlake(SnowFlakeTestBase, DefaultDateTimeFunctionFormulaConnectorTestSuite): supports_addition_to_feb_29 = True diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_logical.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_logical.py index 0f1a94edd..0b533e1c8 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_logical.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_logical.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.functions_logical import DefaultLogicalFunctionFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestLogicalFunctionSnowFlake(SnowFlakeTestBase, DefaultLogicalFunctionFormulaConnectorTestSuite): supports_nan_funcs = False diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_markup.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_markup.py index 0fd267d94..765d622cb 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_markup.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_markup.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.functions_markup import DefaultMarkupFunctionFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestMarkupFunctionSnowFlake(SnowFlakeTestBase, DefaultMarkupFunctionFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_math.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_math.py index 7ad3c2a44..940fd1fc9 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_math.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_math.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.functions_math import DefaultMathFunctionFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestMathFunctionSnowFlake(SnowFlakeTestBase, DefaultMathFunctionFormulaConnectorTestSuite): supports_float_div = True diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_string.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_string.py index c799de8de..59f6e1fc8 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_string.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_string.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.functions_string import DefaultStringFunctionFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestStringFunctionSnowFlake(SnowFlakeTestBase, DefaultStringFunctionFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_type_conversion.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_type_conversion.py index 8a50068b8..6eac79e04 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_type_conversion.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_functions_type_conversion.py @@ -1,4 +1,3 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.functions_type_conversion import ( DefaultBoolTypeFunctionFormulaConnectorTestSuite, DefaultDateTypeFunctionFormulaConnectorTestSuite, @@ -8,6 +7,8 @@ DefaultStrTypeFunctionFormulaConnectorTestSuite, ) +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + # STR diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_literals.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_literals.py index ef0de58b6..31a004591 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_literals.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_literals.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.literals import DefaultLiteralFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestLiteralSnowFlake(SnowFlakeTestBase, DefaultLiteralFormulaConnectorTestSuite): supports_microseconds = True diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_misc_funcs.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_misc_funcs.py index ad2e564f5..89fc53b7c 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_misc_funcs.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_misc_funcs.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.misc_funcs import DefaultMiscFunctionalityConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestMiscFunctionalitySnowFlake(SnowFlakeTestBase, DefaultMiscFunctionalityConnectorTestSuite): pass diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_operators.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_operators.py index ec498ce3e..833e9c976 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_operators.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/ext/formula/test_operators.py @@ -1,6 +1,7 @@ -from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa from dl_formula_testing.testcases.operators import DefaultOperatorFormulaConnectorTestSuite +from dl_connector_snowflake_tests.ext.formula.base import SnowFlakeTestBase # noqa + class TestOperatorSnowFlake(SnowFlakeTestBase, DefaultOperatorFormulaConnectorTestSuite): pass diff --git a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/unit/test_connection_form.py b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/unit/test_connection_form.py index 5c9a0b9cd..c2984748f 100644 --- a/lib/dl_connector_snowflake/dl_connector_snowflake_tests/unit/test_connection_form.py +++ b/lib/dl_connector_snowflake/dl_connector_snowflake_tests/unit/test_connection_form.py @@ -1,5 +1,6 @@ from dl_api_connector.i18n.localizer import CONFIGS as BI_API_CONNECTOR_CONFIGS from dl_api_lib_testing.connection_form_base import ConnectionFormTestBase + from dl_connector_snowflake.api.connection_form.form_config import SnowFlakeConnectionFormFactory from dl_connector_snowflake.api.i18n.localizer import CONFIGS as BI_CONNECTOR_SNOWFLAKE_CONFIGS diff --git a/lib/dl_constants/dl_constants/enums.py b/lib/dl_constants/dl_constants/enums.py index c6d9c96e9..9995b82f1 100644 --- a/lib/dl_constants/dl_constants/enums.py +++ b/lib/dl_constants/dl_constants/enums.py @@ -37,7 +37,7 @@ def normalize(cls, value: Union[_ENUM_TYPE, str, None]) -> Optional[_ENUM_TYPE]: @unique -class BIType(_Normalizable["BIType"], Enum): +class UserDataType(_Normalizable["UserDataType"], Enum): string = auto() integer = auto() float = auto() @@ -216,12 +216,12 @@ class WhereClauseOperation(Enum): BETWEEN = "between" -class CreateDSFrom(DynamicEnum): +class DataSourceType(DynamicEnum): @classmethod - def normalize(cls, value: CreateDSFrom | str | None) -> Optional[CreateDSFrom]: + def normalize(cls, value: DataSourceType | str | None) -> Optional[DataSourceType]: # FIXME: Remove this hack (used only in dsmaker) if isinstance(value, str): - value = CreateDSFrom(value) + value = DataSourceType(value) return value @@ -262,7 +262,7 @@ class RLSPatternType(Enum): userid = "userid" -class QueryType(Enum): +class ReportingQueryType(Enum): internal = "internal" external = "external" @@ -383,3 +383,9 @@ class NotificationType(DynamicEnum): class ConnectorAvailability(Enum): free = "free" whitelist = "whitelist" + + +class QueryProcessingMode(DynamicEnum): + basic = AutoEnumValue() + no_compeng = AutoEnumValue() + native_wf = AutoEnumValue() diff --git a/lib/dl_core/dl_core/aio/web_app_services/data_processing/data_processor.py b/lib/dl_core/dl_core/aio/web_app_services/data_processing/data_processor.py index 3b38a55d0..37475cd43 100644 --- a/lib/dl_core/dl_core/aio/web_app_services/data_processing/data_processor.py +++ b/lib/dl_core/dl_core/aio/web_app_services/data_processing/data_processor.py @@ -41,9 +41,9 @@ async def init_hook(self, target_app: web.Application) -> None: await self.initialize() async def tear_down_hook(self, target_app: web.Application) -> None: - LOGGER.info("Tear down {type(self).__name__} data processor service...") + LOGGER.info(f"Tear down {type(self).__name__} data processor service...") await self.tear_down(target_app) - LOGGER.info("Tear down {type(self).__name__} data processor service: done.") + LOGGER.info(f"Tear down {type(self).__name__} data processor service: done.") async def tear_down(self, target_app: web.Application) -> None: await self.finalize() diff --git a/lib/dl_core/dl_core/aio/web_app_services/s3.py b/lib/dl_core/dl_core/aio/web_app_services/s3.py index ff494622d..4a4c3a995 100644 --- a/lib/dl_core/dl_core/aio/web_app_services/s3.py +++ b/lib/dl_core/dl_core/aio/web_app_services/s3.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import logging from typing import ( + TYPE_CHECKING, Any, ClassVar, ) @@ -8,8 +11,12 @@ from aiobotocore.session import get_session from aiohttp import web import attr -import botocore.client -import botocore.session +import boto3 + + +if TYPE_CHECKING: + from mypy_boto3_s3.client import S3Client as SyncS3Client + from types_aiobotocore_s3 import S3Client as AsyncS3Client LOGGER = logging.getLogger(__name__) @@ -26,9 +33,13 @@ class S3Service: tmp_bucket_name: str = attr.ib() persistent_bucket_name: str = attr.ib() - client: AioBaseClient = attr.ib(init=False, repr=False, hash=False, cmp=False) + _client: AioBaseClient = attr.ib(init=False, repr=False, hash=False, cmp=False) _client_init_params: dict[str, Any] = attr.ib(init=False, repr=False, hash=False, cmp=False) + @property + def client(self) -> AsyncS3Client: + return self._client # type: ignore + @classmethod def get_full_app_key(cls) -> str: return cls.APP_KEY @@ -50,12 +61,12 @@ async def initialize(self) -> None: ) session = get_session() - client = await session._create_client(**self._client_init_params) # noqa - self.client = await client.__aenter__() + client = session.create_client(**self._client_init_params) + self._client = await client.__aenter__() async def tear_down(self) -> None: LOGGER.info("Tear down S3 service") - await self.client.close() + await self._client.close() @classmethod def get_app_instance(cls, app: web.Application) -> "S3Service": @@ -65,10 +76,10 @@ def get_app_instance(cls, app: web.Application) -> "S3Service": return service - def get_client(self) -> AioBaseClient: + def get_client(self) -> AsyncS3Client: return self.client - def get_sync_client(self) -> botocore.client.BaseClient: - session = botocore.session.get_session() - client = session.create_client(**self._client_init_params) + def get_sync_client(self) -> SyncS3Client: + session = boto3.session.Session() + client = session.client(**self._client_init_params) return client diff --git a/lib/dl_core/dl_core/components/editor.py b/lib/dl_core/dl_core/components/editor.py index a303c51f0..964634481 100644 --- a/lib/dl_core/dl_core/components/editor.py +++ b/lib/dl_core/dl_core/components/editor.py @@ -13,9 +13,9 @@ import attr from dl_constants.enums import ( - CreateDSFrom, DataSourceCreatedVia, DataSourceRole, + DataSourceType, JoinType, ManagedBy, ) @@ -125,7 +125,7 @@ def add_data_source( *, source_id: str, role: DataSourceRole = DataSourceRole.origin, - created_from: CreateDSFrom, + created_from: DataSourceType, connection_id: Optional[str] = None, title: Optional[str] = None, raw_schema: Optional[list[SchemaColumn]] = None, @@ -170,7 +170,7 @@ def update_data_source( source_id: str, role: Optional[DataSourceRole] = None, connection_id: Optional[str] = None, - created_from: Optional[CreateDSFrom] = None, + created_from: Optional[DataSourceType] = None, raw_schema: Optional[list] = None, index_info_set: Optional[FrozenSet[IndexInfo]] = None, **parameters: Any, diff --git a/lib/dl_core/dl_core/connection_executors/async_base.py b/lib/dl_core/dl_core/connection_executors/async_base.py index cc4fac707..8aca3663f 100644 --- a/lib/dl_core/dl_core/connection_executors/async_base.py +++ b/lib/dl_core/dl_core/connection_executors/async_base.py @@ -28,7 +28,7 @@ if TYPE_CHECKING: - from dl_constants.enums import BIType + from dl_constants.enums import UserDataType from dl_constants.types import TBIDataTable from dl_core.connection_models.common_models import ( DBIdent, @@ -46,7 +46,7 @@ class AsyncExecutionResult: cursor_info: dict = attr.ib() result: AsyncIterable[TBIDataTable] = attr.ib() # iterable of tables (chunks) # for `autodetect_user_types` result - user_types: Optional[List[BIType]] = attr.ib(default=None) + user_types: Optional[List[UserDataType]] = attr.ib(default=None) # DB-specific result. Should be mutable, and get filled after `result` is consumed. result_footer: dict = attr.ib(factory=dict) diff --git a/lib/dl_core/dl_core/connection_executors/async_sa_executors.py b/lib/dl_core/dl_core/connection_executors/async_sa_executors.py index bfa9425d9..d833d5947 100644 --- a/lib/dl_core/dl_core/connection_executors/async_sa_executors.py +++ b/lib/dl_core/dl_core/connection_executors/async_sa_executors.py @@ -22,7 +22,7 @@ from typing_extensions import final from dl_api_commons.base_models import RequestContextInfo -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core import exc from dl_core.connection_executors.adapters.adapters_base import SyncDirectDBAdapter from dl_core.connection_executors.adapters.async_adapters_base import ( @@ -238,7 +238,7 @@ def executor_query_to_db_adapter_query(self, conn_exec_query: ConnExecutorQuery) async def _execute_query(self, query: DBAdapterQuery) -> AsyncRawExecutionResult: return await self._target_dba.execute(query) - def _autodetect_user_types(self, raw_cursor_info: dict) -> Optional[List[BIType]]: + def _autodetect_user_types(self, raw_cursor_info: dict) -> Optional[List[UserDataType]]: db_types = raw_cursor_info.get("db_types") if not db_types: return None @@ -246,7 +246,7 @@ def _autodetect_user_types(self, raw_cursor_info: dict) -> Optional[List[BIType] result = [] for native_type in db_types: - bi_type = BIType.unsupported + bi_type = UserDataType.unsupported if native_type: try: diff --git a/lib/dl_core/dl_core/connection_executors/common_base.py b/lib/dl_core/dl_core/connection_executors/common_base.py index 8dace54ed..59598b736 100644 --- a/lib/dl_core/dl_core/connection_executors/common_base.py +++ b/lib/dl_core/dl_core/connection_executors/common_base.py @@ -23,7 +23,7 @@ from sqlalchemy import sql as sasql from dl_api_commons.base_models import RequestContextInfo -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_constants.exc import DLBaseException from dl_constants.types import TBIDataValue from dl_core.connection_executors.models.common import RemoteQueryExecutorData @@ -62,7 +62,7 @@ class ExecutionMode(enum.Enum): @attr.s class ConnExecutorQuery: query: Union[sasql.Select, str] = attr.ib() - user_types: Optional[List[BIType]] = attr.ib(default=None) + user_types: Optional[List[UserDataType]] = attr.ib(default=None) debug_compiled_query: Optional[str] = attr.ib(default=None) chunk_size: Optional[int] = attr.ib(default=None) connector_specific_params: Optional[Dict[str, TJSONExt]] = attr.ib(default=None) @@ -122,7 +122,7 @@ def is_context_info_equals(self, another: RequestContextInfo) -> bool: LOGGER.exception("Exception during request context info comparision") return False - def cast_row_to_output(self, row: Sequence, user_types: Optional[Sequence[BIType]]) -> Sequence[TBIDataValue]: + def cast_row_to_output(self, row: Sequence, user_types: Optional[Sequence[UserDataType]]) -> Sequence[TBIDataValue]: if user_types is None: return row @@ -150,7 +150,7 @@ def create_schema_info_from_raw_schema_info( if require_all: raise LOGGER.warning("Unable to detect type of field: %s", raw_column_info) - user_type = BIType.unsupported + user_type = UserDataType.unsupported schema_col = SchemaColumn( name=raw_column_info.name, diff --git a/lib/dl_core/dl_core/connection_models/conn_options.py b/lib/dl_core/dl_core/connection_models/conn_options.py index 35708f3b9..d9c347332 100644 --- a/lib/dl_core/dl_core/connection_models/conn_options.py +++ b/lib/dl_core/dl_core/connection_models/conn_options.py @@ -17,7 +17,6 @@ class ConnectOptions: rqe_total_timeout: Optional[int] = attr.ib(default=None) rqe_sock_read_timeout: Optional[int] = attr.ib(default=None) - use_managed_network: bool = attr.ib(default=True) # TODO: temporary - remove in favor of MDBConnectOptionsMixin fetch_table_indexes: bool = attr.ib(default=False) pass_db_messages_to_user: bool = attr.ib(default=False) pass_db_query_to_user: bool = attr.ib(default=False) diff --git a/lib/dl_core/dl_core/connections_security/base.py b/lib/dl_core/dl_core/connections_security/base.py index b811bcef2..50ac2ba94 100644 --- a/lib/dl_core/dl_core/connections_security/base.py +++ b/lib/dl_core/dl_core/connections_security/base.py @@ -10,7 +10,10 @@ import attr -from dl_core.connection_models import ConnDTO +from dl_core.connection_models import ( + ConnDTO, + ConnectOptions, +) LOGGER = logging.getLogger(__name__) @@ -33,7 +36,7 @@ class ConnectionSecurityManager(metaclass=abc.ABCMeta): db_domain_manager: DBDomainManager = attr.ib(factory=DBDomainManager) @abc.abstractmethod - def is_safe_connection(self, conn_dto: ConnDTO) -> bool: + def is_safe_connection(self, conn_dto: ConnDTO, conn_options: ConnectOptions) -> bool: """ Must return False if connection is potentially unsafe and should be executed in isolated environment. """ @@ -56,7 +59,7 @@ def register_dto_types(cls, dto_classes: AbstractSet[Type[ConnDTO]]) -> None: cls._DTO_TYPES.update(dto_classes) @abc.abstractmethod - def is_safe_connection(self, conn_dto: ConnDTO) -> bool: + def is_safe_connection(self, conn_dto: ConnDTO, conn_options: ConnectOptions) -> bool: raise NotImplementedError @@ -66,7 +69,7 @@ class InsecureConnectionSafetyChecker(ConnectionSafetyChecker): _DTO_TYPES: ClassVar[set[Type[ConnDTO]]] = set() - def is_safe_connection(self, conn_dto: ConnDTO) -> bool: + def is_safe_connection(self, conn_dto: ConnDTO, conn_options: ConnectOptions) -> bool: return True @@ -76,7 +79,7 @@ class NonUserInputConnectionSafetyChecker(ConnectionSafetyChecker): _DTO_TYPES: ClassVar[set[Type[ConnDTO]]] = set() - def is_safe_connection(self, conn_dto: ConnDTO) -> bool: + def is_safe_connection(self, conn_dto: ConnDTO, conn_options: ConnectOptions) -> bool: if type(conn_dto) in self._DTO_TYPES: LOGGER.info("%r in safe DTO types", type(conn_dto)) return True @@ -87,8 +90,10 @@ def is_safe_connection(self, conn_dto: ConnDTO) -> bool: class GenericConnectionSecurityManager(ConnectionSecurityManager, metaclass=abc.ABCMeta): conn_sec_checkers: list[ConnectionSafetyChecker] = attr.ib() - def is_safe_connection(self, conn_dto: ConnDTO) -> bool: - return any(conn_sec_checker.is_safe_connection(conn_dto) for conn_sec_checker in self.conn_sec_checkers) + def is_safe_connection(self, conn_dto: ConnDTO, conn_options: ConnectOptions) -> bool: + return any( + conn_sec_checker.is_safe_connection(conn_dto, conn_options) for conn_sec_checker in self.conn_sec_checkers + ) @attr.s(kw_only=True) diff --git a/lib/dl_core/dl_core/connectors/base/connector.py b/lib/dl_core/dl_core/connectors/base/connector.py index 9abf50b20..4a3adc7b6 100644 --- a/lib/dl_core/dl_core/connectors/base/connector.py +++ b/lib/dl_core/dl_core/connectors/base/connector.py @@ -35,7 +35,7 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, ) from dl_core.connection_executors.adapters.common_base import CommonBaseDirectAdapter from dl_core.connection_executors.async_base import AsyncConnExecutorBase @@ -47,7 +47,7 @@ class CoreSourceDefinition(abc.ABC): - source_type: ClassVar[CreateDSFrom] + source_type: ClassVar[DataSourceType] source_cls: ClassVar[Type[DataSource]] = DataSource # type: ignore source_spec_cls: ClassVar[Type[DataSourceSpec]] = DataSourceSpec us_storage_schema_cls: ClassVar[Type[DataSourceSpecStorageSchema]] = DataSourceSpecStorageSchema diff --git a/lib/dl_core/dl_core/connectors/base/data_source_migration.py b/lib/dl_core/dl_core/connectors/base/data_source_migration.py index 66f8304b3..6d58faa3a 100644 --- a/lib/dl_core/dl_core/connectors/base/data_source_migration.py +++ b/lib/dl_core/dl_core/connectors/base/data_source_migration.py @@ -11,7 +11,7 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, ) from dl_core.base_models import ConnectionRef from dl_core.data_source_spec.base import DataSourceSpec @@ -88,7 +88,7 @@ class MigrationKeyMappingItem: @attr.s(frozen=True) class MigrationSpec: - source_type: CreateDSFrom = attr.ib(kw_only=True) + source_type: DataSourceType = attr.ib(kw_only=True) dto_cls: Type[DataSourceMigrationInterface] = attr.ib(kw_only=True) dsrc_spec_cls: Type[DataSourceSpec] = attr.ib(kw_only=True) migration_mapping_items: tuple[MigrationKeyMappingItem, ...] = attr.ib(kw_only=True) diff --git a/lib/dl_core/dl_core/connectors/sql_base/data_source_migration.py b/lib/dl_core/dl_core/connectors/sql_base/data_source_migration.py index 67718d204..72eb17e7b 100644 --- a/lib/dl_core/dl_core/connectors/sql_base/data_source_migration.py +++ b/lib/dl_core/dl_core/connectors/sql_base/data_source_migration.py @@ -6,7 +6,7 @@ import attr -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.connectors.base.data_source_migration import ( DataSourceMigrationInterface, MigrationKeyMappingItem, @@ -37,11 +37,11 @@ class SQLSubselectDSMI(DataSourceMigrationInterface): class DefaultSQLDataSourceMigrator(SpecBasedSourceMigrator): - table_source_type: ClassVar[Optional[CreateDSFrom]] = None + table_source_type: ClassVar[Optional[DataSourceType]] = None table_dsrc_spec_cls: ClassVar[Optional[Type[DataSourceSpec]]] = StandardSQLDataSourceSpec with_db_name: ClassVar[bool] = False - subselect_source_type: ClassVar[Optional[CreateDSFrom]] = None + subselect_source_type: ClassVar[Optional[DataSourceType]] = None subselect_dsrc_spec_cls: ClassVar[Optional[Type[DataSourceSpec]]] = SubselectDataSourceSpec default_schema_name: ClassVar[Optional[str]] = None diff --git a/lib/dl_core/dl_core/data_processing/cache/utils.py b/lib/dl_core/dl_core/data_processing/cache/utils.py index 15f6c4cfa..c3904af85 100644 --- a/lib/dl_core/dl_core/data_processing/cache/utils.py +++ b/lib/dl_core/dl_core/data_processing/cache/utils.py @@ -30,7 +30,7 @@ from sqlalchemy.engine.default import DefaultDialect from sqlalchemy.sql import Select - from dl_constants.enums import BIType + from dl_constants.enums import UserDataType from dl_constants.types import TJSONExt from dl_core.data_processing.prepared_components.primitives import PreparedMultiFromInfo from dl_core.data_processing.stream_base import DataStreamBase @@ -168,7 +168,7 @@ def get_cache_options( role: DataSourceRole, joint_dsrc_info: PreparedMultiFromInfo, query: Select, - user_types: list[BIType], + user_types: list[UserDataType], dataset: Dataset, ) -> BIQueryCacheOptions: """Returns cache key, TTL for new entries, refresh TTL flag""" @@ -211,7 +211,7 @@ def make_data_select_cache_key( self, joint_dsrc_info: PreparedMultiFromInfo, compiled_query: str, - user_types: list[BIType], + user_types: list[UserDataType], data_dump_id: str, is_bleeding_edge_user: bool, ) -> LocalKeyRepresentation: diff --git a/lib/dl_core/dl_core/data_processing/dashsql.py b/lib/dl_core/dl_core/data_processing/dashsql.py index 3b2e9f92c..ef10c65a5 100644 --- a/lib/dl_core/dl_core/data_processing/dashsql.py +++ b/lib/dl_core/dl_core/data_processing/dashsql.py @@ -28,6 +28,7 @@ ) from dl_constants.types import TJSONExt # not under `TYPE_CHECKING`, need to define new type aliases. from dl_core import exc +from dl_core.base_models import WorkbookEntryLocation from dl_core.connection_executors.common_base import ConnExecutorQuery from dl_core.connectors.base.dashsql import get_custom_dash_sql_key_names from dl_core.data_processing.cache.processing_helper import ( @@ -336,6 +337,10 @@ async def _generate_func() -> Optional[TJSONExtChunkStream]: conn_id = conn.uuid assert conn_id + workbook_id = ( + self.conn.entry_key.workbook_id if isinstance(self.conn.entry_key, WorkbookEntryLocation) else None + ) + service_registry = self._service_registry reporting_registry = service_registry.get_reporting_registry() @@ -348,6 +353,7 @@ async def _generate_func() -> Optional[TJSONExtChunkStream]: connection_type=conn.conn_type, conn_reporting_data=self.conn.get_conn_dto().conn_reporting_data(), query=str(ce_query.query), + workbook_id=workbook_id, ) ) diff --git a/lib/dl_core/dl_core/data_processing/prepared_components/primitives.py b/lib/dl_core/dl_core/data_processing/prepared_components/primitives.py index 1c145dcfa..172c59eec 100644 --- a/lib/dl_core/dl_core/data_processing/prepared_components/primitives.py +++ b/lib/dl_core/dl_core/data_processing/prepared_components/primitives.py @@ -11,8 +11,8 @@ import attr from dl_constants.enums import ( - BIType, JoinType, + UserDataType, ) from dl_core.connectors.base.query_compiler import QueryCompiler from dl_core.query.bi_query import SqlSourceType @@ -69,4 +69,4 @@ class PreparedSingleFromInfo(PreparedFromInfo): id: str = attr.ib(kw_only=True) alias: str = attr.ib(kw_only=True) col_names: Sequence[str] = attr.ib(kw_only=True) - user_types: Sequence[BIType] = attr.ib(kw_only=True) + user_types: Sequence[UserDataType] = attr.ib(kw_only=True) diff --git a/lib/dl_core/dl_core/data_processing/processing/db_base/exec_adapter_base.py b/lib/dl_core/dl_core/data_processing/processing/db_base/exec_adapter_base.py index a25186dde..ea106a568 100644 --- a/lib/dl_core/dl_core/data_processing/processing/db_base/exec_adapter_base.py +++ b/lib/dl_core/dl_core/data_processing/processing/db_base/exec_adapter_base.py @@ -20,7 +20,7 @@ if TYPE_CHECKING: - from dl_constants.enums import BIType + from dl_constants.enums import UserDataType from dl_core.data_processing.cache.primitives import LocalKeyRepresentation from dl_core.data_processing.prepared_components.primitives import PreparedMultiFromInfo @@ -39,7 +39,7 @@ async def _execute_and_fetch( self, *, query: Union[Select, str], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], chunk_size: int, joint_dsrc_info: Optional[PreparedMultiFromInfo] = None, query_id: str, @@ -50,7 +50,7 @@ async def _execute_and_fetch( """ raise NotImplementedError - async def scalar(self, query: Union[str, Select], user_type: BIType) -> TBIDataValue: + async def scalar(self, query: Union[str, Select], user_type: UserDataType) -> TBIDataValue: """Execute a statement returning a scalar value.""" data_stream = await self._execute_and_fetch( query_id=make_id(), @@ -67,7 +67,7 @@ async def fetch_data_from_select( self, *, query: Union[str, sa.sql.selectable.Select], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], chunk_size: Optional[int] = None, joint_dsrc_info: Optional[PreparedMultiFromInfo] = None, query_id: str, @@ -90,7 +90,7 @@ def get_data_key( *, query_id: str, query: Union[str, Select], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], joint_dsrc_info: Optional[PreparedMultiFromInfo] = None, ) -> Optional[LocalKeyRepresentation]: return None diff --git a/lib/dl_core/dl_core/data_processing/processing/source_db/selector_exec_adapter.py b/lib/dl_core/dl_core/data_processing/processing/source_db/selector_exec_adapter.py index a820dfe90..ceb460fbd 100644 --- a/lib/dl_core/dl_core/data_processing/processing/source_db/selector_exec_adapter.py +++ b/lib/dl_core/dl_core/data_processing/processing/source_db/selector_exec_adapter.py @@ -9,7 +9,7 @@ import attr -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.data_processing.prepared_components.default_manager import DefaultPreparedComponentManager from dl_core.data_processing.processing.db_base.exec_adapter_base import ProcessorDbExecAdapterBase from dl_core.data_processing.selectors.dataset_base import DatasetDataSelectorAsyncBase @@ -53,7 +53,7 @@ def get_prep_component_manager(self) -> PreparedComponentManagerBase: def _make_query_res_info( self, query: Union[str, Select], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], ) -> QueryAndResultInfo: query_res_info = QueryAndResultInfo( query=query, # type: ignore # TODO: fix @@ -69,7 +69,7 @@ async def _execute_and_fetch( self, *, query: Union[str, Select], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], chunk_size: int, joint_dsrc_info: Optional[PreparedMultiFromInfo] = None, query_id: str, @@ -92,7 +92,7 @@ def get_data_key( *, query_id: str, query: Union[str, Select], - user_types: Sequence[BIType], + user_types: Sequence[UserDataType], joint_dsrc_info: Optional[PreparedMultiFromInfo] = None, ) -> Optional[LocalKeyRepresentation]: selector = self._selector diff --git a/lib/dl_core/dl_core/data_processing/selectors/base.py b/lib/dl_core/dl_core/data_processing/selectors/base.py index 8c4a5b37a..e58521676 100644 --- a/lib/dl_core/dl_core/data_processing/selectors/base.py +++ b/lib/dl_core/dl_core/data_processing/selectors/base.py @@ -15,8 +15,8 @@ from sqlalchemy.sql.selectable import Select from dl_constants.enums import ( - BIType, DataSourceRole, + UserDataType, ) from dl_core.data_processing.cache.primitives import BIQueryCacheOptions from dl_core.data_processing.prepared_components.primitives import PreparedMultiFromInfo @@ -35,7 +35,7 @@ class BIQueryExecutionContext: query: Select compiled_query: str # for logs only target_connection: ExecutorBasedMixin - requested_bi_types: List[BIType] + requested_bi_types: List[UserDataType] result_col_names: Sequence[str] target_db_name: Optional[str] = attr.ib(default=None) cache_options: Optional[BIQueryCacheOptions] = attr.ib(default=None) diff --git a/lib/dl_core/dl_core/data_processing/selectors/dataset_base.py b/lib/dl_core/dl_core/data_processing/selectors/dataset_base.py index b70cb054c..68ee917fc 100644 --- a/lib/dl_core/dl_core/data_processing/selectors/dataset_base.py +++ b/lib/dl_core/dl_core/data_processing/selectors/dataset_base.py @@ -18,6 +18,7 @@ ) from dl_constants.enums import DataSourceRole from dl_core import utils +from dl_core.base_models import WorkbookEntryLocation from dl_core.data_processing.selectors.base import ( BIQueryExecutionContext, DataSelectorAsyncBase, @@ -64,6 +65,9 @@ def _save_start_exec_reporting_record( query_execution_ctx: BIQueryExecutionContext, ) -> None: connection = query_execution_ctx.target_connection + workbook_id = ( + connection.entry_key.workbook_id if isinstance(connection.entry_key, WorkbookEntryLocation) else None + ) report = QueryExecutionStartReportingRecord( timestamp=time.time(), query_id=query_execution_ctx.query_id, @@ -75,6 +79,7 @@ def _save_start_exec_reporting_record( connection_type=connection.conn_type, conn_reporting_data=connection.get_conn_dto().conn_reporting_data(), query=query_execution_ctx.compiled_query, + workbook_id=workbook_id, ) self.reporting_registry.save_reporting_record(report=report) diff --git a/lib/dl_core/dl_core/data_processing/selectors/utils.py b/lib/dl_core/dl_core/data_processing/selectors/utils.py index 664c6691d..8480290b4 100644 --- a/lib/dl_core/dl_core/data_processing/selectors/utils.py +++ b/lib/dl_core/dl_core/data_processing/selectors/utils.py @@ -12,7 +12,7 @@ from dl_constants.enums import ( DataSourceRole, - QueryType, + ReportingQueryType, ) import dl_core.exc as exc from dl_core.query.bi_query import BIQuery @@ -61,12 +61,12 @@ def get_value_range_query(expression: ExpressionCtx, dimension_filters: Sequence ) -def get_query_type(connection: ConnectionBase, conn_sec_mgr: ConnectionSecurityManager) -> QueryType: +def get_query_type(connection: ConnectionBase, conn_sec_mgr: ConnectionSecurityManager) -> ReportingQueryType: if connection.is_always_internal_source: - return QueryType.internal + return ReportingQueryType.internal if isinstance(connection, ClassicConnectionSQL): if conn_sec_mgr.is_internal_connection(connection.get_conn_dto()): - return QueryType.internal + return ReportingQueryType.internal - return QueryType.external + return ReportingQueryType.external diff --git a/lib/dl_core/dl_core/data_processing/stream_base.py b/lib/dl_core/dl_core/data_processing/stream_base.py index 029d2bc92..3d7709e50 100644 --- a/lib/dl_core/dl_core/data_processing/stream_base.py +++ b/lib/dl_core/dl_core/data_processing/stream_base.py @@ -12,7 +12,7 @@ if TYPE_CHECKING: - from dl_constants.enums import BIType + from dl_constants.enums import UserDataType from dl_constants.types import TBIDataRow from dl_core.components.ids import AvatarId from dl_core.data_processing.cache.primitives import LocalKeyRepresentation @@ -28,7 +28,7 @@ class AbstractStream: id: str = attr.ib(kw_only=True) names: Sequence[str] = attr.ib(kw_only=True) - user_types: Sequence[BIType] = attr.ib(kw_only=True) + user_types: Sequence[UserDataType] = attr.ib(kw_only=True) @attr.s diff --git a/lib/dl_core/dl_core/data_source/base.py b/lib/dl_core/dl_core/data_source/base.py index bf7e436e8..ab80b6719 100644 --- a/lib/dl_core/dl_core/data_source/base.py +++ b/lib/dl_core/dl_core/data_source/base.py @@ -17,7 +17,7 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, JoinType, ) from dl_core.base_models import ( @@ -125,7 +125,7 @@ def initialize(self) -> None: pass @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: raise NotImplementedError def get_parameters(self) -> dict: @@ -240,5 +240,5 @@ def data_export_forbidden(self) -> bool: class IncompatibleDataSourceMixin(DataSource): @classmethod - def is_compatible_with_type(cls, source_type: CreateDSFrom) -> bool: + def is_compatible_with_type(cls, source_type: DataSourceType) -> bool: return False diff --git a/lib/dl_core/dl_core/data_source/collection.py b/lib/dl_core/dl_core/data_source/collection.py index 2f861eea2..0011a56d0 100644 --- a/lib/dl_core/dl_core/data_source/collection.py +++ b/lib/dl_core/dl_core/data_source/collection.py @@ -11,9 +11,9 @@ import attr from dl_constants.enums import ( - CreateDSFrom, DataSourceCollectionType, DataSourceRole, + DataSourceType, JoinType, ManagedBy, ) @@ -96,7 +96,7 @@ def effective_connection_id(self) -> Optional[str]: return self.get_connection_id() @property - def source_type(self) -> CreateDSFrom: + def source_type(self) -> DataSourceType: return self.get_strict().spec.source_type def supports_join_type( diff --git a/lib/dl_core/dl_core/data_source/type_mapping.py b/lib/dl_core/dl_core/data_source/type_mapping.py index 3ae536376..d0c64e772 100644 --- a/lib/dl_core/dl_core/data_source/type_mapping.py +++ b/lib/dl_core/dl_core/data_source/type_mapping.py @@ -8,22 +8,22 @@ if TYPE_CHECKING: - from dl_constants.enums import CreateDSFrom + from dl_constants.enums import DataSourceType from dl_core.data_source.base import DataSource -_DSRC_TYPES: dict[CreateDSFrom, Type[DataSource]] = {} +_DSRC_TYPES: dict[DataSourceType, Type[DataSource]] = {} -def list_registered_source_types() -> Collection[CreateDSFrom]: +def list_registered_source_types() -> Collection[DataSourceType]: return set(_DSRC_TYPES) -def get_data_source_class(ds_type: CreateDSFrom) -> Type[DataSource]: +def get_data_source_class(ds_type: DataSourceType) -> Type[DataSource]: """Return ``DataSource`` subclass to be used for given dataset type.""" return _DSRC_TYPES[ds_type] -def register_data_source_class(source_type: CreateDSFrom, source_cls: Type[DataSource]) -> None: +def register_data_source_class(source_type: DataSourceType, source_cls: Type[DataSource]) -> None: """Register ``DataSource`` subclass in the mapping.""" _DSRC_TYPES[source_type] = source_cls diff --git a/lib/dl_core/dl_core/data_source/utils.py b/lib/dl_core/dl_core/data_source/utils.py index a6b63005d..217bae49f 100644 --- a/lib/dl_core/dl_core/data_source/utils.py +++ b/lib/dl_core/dl_core/data_source/utils.py @@ -8,13 +8,13 @@ import xxhash -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType _IGNORE_IN_HASH = frozenset(("db_version",)) -def get_parameters_hash(source_type: CreateDSFrom, connection_id: Optional[str], **parameters: Any) -> str: +def get_parameters_hash(source_type: DataSourceType, connection_id: Optional[str], **parameters: Any) -> str: data = ( source_type, connection_id, diff --git a/lib/dl_core/dl_core/data_source_merge_tools.py b/lib/dl_core/dl_core/data_source_merge_tools.py index 2410f9e95..3d454d8bd 100644 --- a/lib/dl_core/dl_core/data_source_merge_tools.py +++ b/lib/dl_core/dl_core/data_source_merge_tools.py @@ -7,12 +7,12 @@ import attr -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.data_source_spec.base import DataSourceSpec from dl_core.data_source_spec.type_mapping import get_data_source_spec_class -def make_spec_from_dict(source_type: CreateDSFrom, data: Dict[str, Any]) -> DataSourceSpec: +def make_spec_from_dict(source_type: DataSourceType, data: Dict[str, Any]) -> DataSourceSpec: dsrc_spec_cls = get_data_source_spec_class(ds_type=source_type) field_names = { field.name.lstrip("_") @@ -26,7 +26,9 @@ def make_spec_from_dict(source_type: CreateDSFrom, data: Dict[str, Any]) -> Data return dsrc_spec_cls(**filtered_data) -def update_spec_from_dict(source_type: CreateDSFrom, data: Dict[str, Any], old_spec: DataSourceSpec) -> DataSourceSpec: +def update_spec_from_dict( + source_type: DataSourceType, data: Dict[str, Any], old_spec: DataSourceSpec +) -> DataSourceSpec: merged_data = { # collect old attributes field.name.lstrip("_"): getattr(old_spec, field.name) diff --git a/lib/dl_core/dl_core/data_source_spec/base.py b/lib/dl_core/dl_core/data_source_spec/base.py index 9a10df674..32ba46fe2 100644 --- a/lib/dl_core/dl_core/data_source_spec/base.py +++ b/lib/dl_core/dl_core/data_source_spec/base.py @@ -4,14 +4,14 @@ import attr -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.base_models import ConnectionRef from dl_core.db.elements import SchemaColumn @attr.s class DataSourceSpec: - source_type: CreateDSFrom = attr.ib(kw_only=True) + source_type: DataSourceType = attr.ib(kw_only=True) connection_ref: Optional[ConnectionRef] = attr.ib(kw_only=True, default=None) raw_schema: Optional[list[SchemaColumn]] = attr.ib(kw_only=True, default=None) data_dump_id: Optional[str] = attr.ib(kw_only=True, default=None) diff --git a/lib/dl_core/dl_core/data_source_spec/type_mapping.py b/lib/dl_core/dl_core/data_source_spec/type_mapping.py index 666dc9d61..24fc68f52 100644 --- a/lib/dl_core/dl_core/data_source_spec/type_mapping.py +++ b/lib/dl_core/dl_core/data_source_spec/type_mapping.py @@ -2,18 +2,18 @@ from typing import Type -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.data_source_spec.base import DataSourceSpec -_DSRC_SPEC_CLASSES: dict[CreateDSFrom, Type[DataSourceSpec]] = {} +_DSRC_SPEC_CLASSES: dict[DataSourceType, Type[DataSourceSpec]] = {} -def get_data_source_spec_class(ds_type: CreateDSFrom) -> Type[DataSourceSpec]: +def get_data_source_spec_class(ds_type: DataSourceType) -> Type[DataSourceSpec]: """Return ``DataSourceSpec`` subclass to be used for given dataset type.""" return _DSRC_SPEC_CLASSES[ds_type] -def register_data_source_spec_class(source_type: CreateDSFrom, spec_cls: Type[DataSourceSpec]) -> None: +def register_data_source_spec_class(source_type: DataSourceType, spec_cls: Type[DataSourceSpec]) -> None: """Register ``DataSourceSpec`` subclass in the mapping.""" _DSRC_SPEC_CLASSES[source_type] = spec_cls diff --git a/lib/dl_core/dl_core/dataset_capabilities.py b/lib/dl_core/dl_core/dataset_capabilities.py index 23a6a3995..d299bc221 100644 --- a/lib/dl_core/dl_core/dataset_capabilities.py +++ b/lib/dl_core/dl_core/dataset_capabilities.py @@ -17,8 +17,8 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, DataSourceRole, + DataSourceType, JoinType, SourceBackendType, ) @@ -42,7 +42,7 @@ @lru_cache(maxsize=200) -def get_compatible_source_types(source_type: CreateDSFrom) -> FrozenSet[CreateDSFrom]: +def get_compatible_source_types(source_type: DataSourceType) -> FrozenSet[DataSourceType]: """Return frozen set of data source types compatible with ``ds_type``""" raw_comp_types = frozenset(list_registered_source_types()) @@ -56,7 +56,7 @@ def get_compatible_source_types(source_type: CreateDSFrom) -> FrozenSet[CreateDS return frozenset(compat_types) -_SOURCE_CONNECTION_COMPATIBILITY: Dict[CreateDSFrom, FrozenSet[ConnectionType]] = {} +_SOURCE_CONNECTION_COMPATIBILITY: Dict[DataSourceType, FrozenSet[ConnectionType]] = {} def _populate_compatibility_map(): # type: ignore # TODO: fix @@ -68,7 +68,7 @@ def _populate_compatibility_map(): # type: ignore # TODO: fix @lru_cache(maxsize=100) -def get_conn_types_compatible_with_src_types(source_types: FrozenSet[CreateDSFrom]) -> FrozenSet[ConnectionType]: +def get_conn_types_compatible_with_src_types(source_types: FrozenSet[DataSourceType]) -> FrozenSet[ConnectionType]: if not _SOURCE_CONNECTION_COMPATIBILITY: _populate_compatibility_map() assert _SOURCE_CONNECTION_COMPATIBILITY @@ -129,7 +129,7 @@ def get_supported_join_types( def source_can_be_added( self, connection_id: Optional[str], - created_from: CreateDSFrom, + created_from: DataSourceType, ignore_source_ids: Optional[Collection[str]] = None, ) -> bool: """ @@ -156,7 +156,7 @@ def source_can_be_added( def get_compatible_source_types( self, ignore_source_ids: Optional[Collection[str]] = None, - ) -> FrozenSet[CreateDSFrom]: + ) -> FrozenSet[DataSourceType]: """Return a frozen set of source types compatible with dataset's current state""" ignore_source_ids = ignore_source_ids or () diff --git a/lib/dl_core/dl_core/db/conversion_base.py b/lib/dl_core/dl_core/db/conversion_base.py index 620adde4d..beb4a9c6f 100644 --- a/lib/dl_core/dl_core/db/conversion_base.py +++ b/lib/dl_core/dl_core/db/conversion_base.py @@ -23,8 +23,8 @@ import pytz from dl_constants.enums import ( - BIType, ConnectionType, + UserDataType, ) from dl_core import ( converter_types_cast, @@ -219,33 +219,33 @@ class LowercaseTypeCaster(TypeCaster): class TypeTransformer: conn_type: ClassVar[ConnectionType] - native_to_user_map: ClassVar[dict[GenericNativeType, BIType]] = {} - user_to_native_map: ClassVar[dict[BIType, GenericNativeType]] = {} - casters: ClassVar[dict[BIType, TypeCaster]] = { - BIType.integer: IntegerTypeCaster(), - BIType.float: FloatTypeCaster(), - BIType.boolean: BooleanTypeCaster(), - BIType.string: StringTypeCaster(), - BIType.date: DateTypeCaster(), - BIType.datetime: DatetimeTypeCaster(), - BIType.datetimetz: DatetimeTZTypeCaster(), - BIType.genericdatetime: GenericDatetimeTypeCaster(), - BIType.geopoint: StringTypeCaster(), - BIType.geopolygon: StringTypeCaster(), - BIType.uuid: StringTypeCaster(), - BIType.markup: StringTypeCaster(), - BIType.array_int: ArrayIntTypeCaster(), - BIType.array_float: ArrayFloatTypeCaster(), - BIType.array_str: ArrayStrTypeCaster(), - BIType.tree_str: ArrayStrTypeCaster(), # Same as array - BIType.unsupported: UnsupportedCaster(), + native_to_user_map: ClassVar[dict[GenericNativeType, UserDataType]] = {} + user_to_native_map: ClassVar[dict[UserDataType, GenericNativeType]] = {} + casters: ClassVar[dict[UserDataType, TypeCaster]] = { + UserDataType.integer: IntegerTypeCaster(), + UserDataType.float: FloatTypeCaster(), + UserDataType.boolean: BooleanTypeCaster(), + UserDataType.string: StringTypeCaster(), + UserDataType.date: DateTypeCaster(), + UserDataType.datetime: DatetimeTypeCaster(), + UserDataType.datetimetz: DatetimeTZTypeCaster(), + UserDataType.genericdatetime: GenericDatetimeTypeCaster(), + UserDataType.geopoint: StringTypeCaster(), + UserDataType.geopolygon: StringTypeCaster(), + UserDataType.uuid: StringTypeCaster(), + UserDataType.markup: StringTypeCaster(), + UserDataType.array_int: ArrayIntTypeCaster(), + UserDataType.array_float: ArrayFloatTypeCaster(), + UserDataType.array_str: ArrayStrTypeCaster(), + UserDataType.tree_str: ArrayStrTypeCaster(), # Same as array + UserDataType.unsupported: UnsupportedCaster(), } def type_native_to_user( self, native_t: GenericNativeType, - user_t: Optional[BIType] = None, - ) -> BIType: + user_t: Optional[UserDataType] = None, + ) -> UserDataType: if user_t is not None: # original UT is given, try to validate against NT. # read as 'native type might have been made from the provided user type'. @@ -268,7 +268,9 @@ def make_foreign_native_type_conversion(self, native_t: GenericNativeType) -> Ge """ return native_t # no known conversion - def type_user_to_native(self, user_t: BIType, native_t: Optional[GenericNativeType] = None) -> GenericNativeType: + def type_user_to_native( + self, user_t: UserDataType, native_t: Optional[GenericNativeType] = None + ) -> GenericNativeType: if native_t is not None: # original NT is given, try to do a direct conversion @@ -296,12 +298,12 @@ def type_user_to_native(self, user_t: BIType, native_t: Optional[GenericNativeTy return result @classmethod - def cast_for_input(cls, value: Any, user_t: BIType) -> Any: + def cast_for_input(cls, value: Any, user_t: UserDataType) -> Any: """Prepare value for insertion into the database""" return cls.casters[user_t].cast_for_input(value=value) @classmethod - def cast_for_output(cls, value: Any, user_t: Optional[BIType] = None) -> Any: + def cast_for_output(cls, value: Any, user_t: Optional[UserDataType] = None) -> Any: """Convert value from DB to Python value conforming to given ``user_t``""" if user_t is None: return value diff --git a/lib/dl_core/dl_core/db/elements.py b/lib/dl_core/dl_core/db/elements.py index a02d37933..c88ea40ee 100644 --- a/lib/dl_core/dl_core/db/elements.py +++ b/lib/dl_core/dl_core/db/elements.py @@ -13,8 +13,8 @@ import attr from dl_constants.enums import ( - BIType, IndexKind, + UserDataType, ) from dl_core.db.native_type import GenericNativeType from dl_utils.utils import get_type_full_name @@ -41,7 +41,7 @@ def __new__( # type: ignore # TODO: fix cls, name: str, title: Optional[str] = None, - user_type: Optional[Union[BIType, str]] = None, + user_type: Optional[Union[UserDataType, str]] = None, nullable: Optional[bool] = True, native_type: Optional[GenericNativeType] = None, source_id: Any = None, @@ -51,7 +51,7 @@ def __new__( # type: ignore # TODO: fix ): title = title or name if isinstance(user_type, str): - user_type = BIType[user_type] + user_type = UserDataType[user_type] has_auto_aggregation = has_auto_aggregation if has_auto_aggregation is not None else False return super().__new__( cls, diff --git a/lib/dl_core/dl_core/fields.py b/lib/dl_core/dl_core/fields.py index 150a6c6d5..8e901ab8f 100644 --- a/lib/dl_core/dl_core/fields.py +++ b/lib/dl_core/dl_core/fields.py @@ -21,12 +21,12 @@ from dl_constants.enums import ( AggregationFunction, - BIType, CalcMode, FieldType, ManagedBy, ParameterValueConstraintType, TopLevelComponentId, + UserDataType, ) from dl_core.components.ids import FieldId from dl_core.exc import FieldNotFound @@ -113,7 +113,7 @@ class FormulaCalculationSpec(CalculationSpec): mode = CalcMode.formula - # The formula itself. Parsed and handled mostly by the bi_formula package. + # The formula itself. Parsed and handled mostly by the dl_formula package. # In this formula other fields are referred to exclusively by title, # Use empty string (`''`) for non-formula fields. formula: str = attr.ib(kw_only=True, default="") @@ -201,11 +201,11 @@ class BIField(NamedTuple): # TODO: Convert to attr.s # - direct: it corresponds to the user_type of the referenced raw_schema column; # - formula: automatically derived from the formula; # - parameter: defined by the user. - initial_data_type: Optional[BIType] + initial_data_type: Optional[UserDataType] # redefines the data type in `initial_data_type`, is set by the user. # For parameter calc_mode, it is the same as `initial_data_type`. - cast: Optional[BIType] + cast: Optional[UserDataType] # An explicitly set aggregation (via UI). # Value "none" corresponds to no aggregation. @@ -213,7 +213,7 @@ class BIField(NamedTuple): # TODO: Convert to attr.s aggregation: AggregationFunction # The data type automatically determined after the aggregation is applied - data_type: Optional[BIType] + data_type: Optional[UserDataType] # Flag indicates that the field is automatically aggregated # and an explicit aggregation (`aggregation` attribute) is not applicable. @@ -249,9 +249,9 @@ def make( type: Union[FieldType, str, None] = None, hidden: bool = False, description: str = "", - cast: Union[BIType, str, None] = None, - initial_data_type: Union[BIType, str, None] = None, - data_type: Union[BIType, str, None] = None, + cast: Union[UserDataType, str, None] = None, + initial_data_type: Union[UserDataType, str, None] = None, + data_type: Union[UserDataType, str, None] = None, valid: Optional[bool] = None, has_auto_aggregation: bool = False, lock_aggregation: bool = False, @@ -276,9 +276,9 @@ def make( type = FieldType.DIMENSION assert type is not None - cast = BIType.normalize(cast) - initial_data_type = BIType.normalize(initial_data_type) - data_type = BIType.normalize(data_type) + cast = UserDataType.normalize(cast) + initial_data_type = UserDataType.normalize(initial_data_type) + data_type = UserDataType.normalize(data_type) valid = valid if valid is not None else True managed_by = ManagedBy.normalize(managed_by) or ManagedBy.user diff --git a/lib/dl_core/dl_core/query/bi_query.py b/lib/dl_core/dl_core/query/bi_query.py index 8c4be3746..d40d9ed88 100644 --- a/lib/dl_core/dl_core/query/bi_query.py +++ b/lib/dl_core/dl_core/query/bi_query.py @@ -16,7 +16,7 @@ Select, ) -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.query.expression import ( ExpressionCtx, OrderByExpressionCtx, @@ -62,5 +62,5 @@ def get_required_avatar_ids(self) -> Set[str]: @attr.s class QueryAndResultInfo: query: Select = attr.ib(kw_only=True) - user_types: List[BIType] = attr.ib(kw_only=True) + user_types: List[UserDataType] = attr.ib(kw_only=True) col_names: List[str] = attr.ib(kw_only=True) diff --git a/lib/dl_core/dl_core/query/expression.py b/lib/dl_core/dl_core/query/expression.py index 75b2ab69a..0fb9da213 100644 --- a/lib/dl_core/dl_core/query/expression.py +++ b/lib/dl_core/dl_core/query/expression.py @@ -17,8 +17,8 @@ from sqlalchemy.sql.elements import ClauseElement from dl_constants.enums import ( - BIType, JoinType, + UserDataType, ) from dl_core.components.ids import AvatarId @@ -30,7 +30,7 @@ class ExpressionCtx: expression: ClauseElement avatar_ids: Optional[Sequence[str]] = None # TODO: make required - user_type: Optional[BIType] = None + user_type: Optional[UserDataType] = None alias: Optional[str] = None original_field_id: Optional[Any] = None diff --git a/lib/dl_core/dl_core/reporting/reports.py b/lib/dl_core/dl_core/reporting/reports.py index 56255c917..b20877480 100644 --- a/lib/dl_core/dl_core/reporting/reports.py +++ b/lib/dl_core/dl_core/reporting/reports.py @@ -13,7 +13,7 @@ from dl_constants.enums import ( ConnectionType, - QueryType, + ReportingQueryType, ) @@ -40,7 +40,7 @@ class DbQueryExecutionReport: cache_full_hit: bool # not in action fields: - query_type: QueryType + query_type: ReportingQueryType is_public: bool def convert_for_logging_extras(self, value) -> Union[str, int, bool, None]: # type: ignore # TODO: fix diff --git a/lib/dl_core/dl_core/services_registry/conn_executor_factory.py b/lib/dl_core/dl_core/services_registry/conn_executor_factory.py index dde904a78..f1d379c64 100644 --- a/lib/dl_core/dl_core/services_registry/conn_executor_factory.py +++ b/lib/dl_core/dl_core/services_registry/conn_executor_factory.py @@ -178,9 +178,10 @@ def _get_exec_mode_and_rqe_attrs( self, conn: ExecutorBasedMixin, executor_cls: Type[AsyncConnExecutorBase] ) -> Tuple[ExecutionMode, Optional[RemoteQueryExecutorData]]: conn_dto = conn.get_conn_dto() + conn_options = conn.get_conn_options() ce_cls = self.get_async_conn_executor_cls(conn) - if not self.conn_sec_mgr.is_safe_connection(conn_dto): + if not self.conn_sec_mgr.is_safe_connection(conn_dto, conn_options): # Only RQE mode with external RQE supported for unsafe connection if ExecutionMode.RQE not in executor_cls.supported_exec_mode: raise CEFactoryError( diff --git a/lib/dl_core/dl_core/services_registry/file_uploader_client_factory.py b/lib/dl_core/dl_core/services_registry/file_uploader_client_factory.py index 70f9e0633..c84ebab69 100644 --- a/lib/dl_core/dl_core/services_registry/file_uploader_client_factory.py +++ b/lib/dl_core/dl_core/services_registry/file_uploader_client_factory.py @@ -21,7 +21,7 @@ THeaders, ) from dl_constants.api_constants import DLHeadersCommon -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.db.elements import SchemaColumn from dl_core.db.native_type_schema import OneOfNativeTypeSchema from dl_utils.aio import await_sync @@ -52,7 +52,7 @@ class GSheetsFileSourceDesc(FileSourceDesc): @attr.s(frozen=True) class SourceInternalParams: preview_id: str = attr.ib() - raw_schema: list[RawSchemaType] = attr.ib() + raw_schema: RawSchemaType = attr.ib() @attr.s(frozen=True) @@ -67,7 +67,7 @@ class RawSchemaColumnSchema(ma.Schema): native_type = ma.fields.Nested(OneOfNativeTypeSchema, allow_none=True) - user_type = ma.fields.Enum(BIType) + user_type = ma.fields.Enum(UserDataType) description = ma.fields.String(dump_default="", allow_none=True) has_auto_aggregation = ma.fields.Boolean(dump_default=False, allow_none=True) lock_aggregation = ma.fields.Boolean(dump_default=False, allow_none=True) diff --git a/lib/dl_core/dl_core/united_storage_client.py b/lib/dl_core/dl_core/united_storage_client.py index 93caa21e5..692ff9989 100644 --- a/lib/dl_core/dl_core/united_storage_client.py +++ b/lib/dl_core/dl_core/united_storage_client.py @@ -401,6 +401,7 @@ def _req_data_create_entry( # type: ignore # TODO: fix type_=None, hidden=None, links=None, + mode="publish", unversioned_data=None, **kwargs, ) -> RequestData: @@ -424,6 +425,7 @@ def _req_data_create_entry( # type: ignore # TODO: fix "recursion": True, "hidden": hidden, "links": links, + "mode": mode, **kwargs, }, ) @@ -723,7 +725,6 @@ def update_entry( # type: ignore # TODO: fix data: Optional[dict[str, Any]] = None, unversioned_data: Optional[dict[str, Any]] = None, meta: Optional[dict[str, str]] = None, - mode: str = "save", lock: Optional[str] = None, hidden: Optional[bool] = None, links: Optional[dict[str, Any]] = None, @@ -734,7 +735,6 @@ def update_entry( # type: ignore # TODO: fix data=data, unversioned_data=unversioned_data, meta=meta, - mode=mode, lock=lock, hidden=hidden, links=links, diff --git a/lib/dl_core/dl_core/united_storage_client_aio.py b/lib/dl_core/dl_core/united_storage_client_aio.py index 7c755ec7f..21609fd27 100644 --- a/lib/dl_core/dl_core/united_storage_client_aio.py +++ b/lib/dl_core/dl_core/united_storage_client_aio.py @@ -207,7 +207,7 @@ async def create_entry( # type: ignore # TODO: fix return await self._request(rq_data) async def update_entry( # type: ignore # TODO: fix - self, entry_id, data=None, unversioned_data=None, meta=None, mode="publish", lock=None, hidden=None, links=None + self, entry_id, data=None, unversioned_data=None, meta=None, lock=None, hidden=None, links=None ): return await self._request( self._req_data_update_entry( @@ -215,7 +215,6 @@ async def update_entry( # type: ignore # TODO: fix data=data, unversioned_data=unversioned_data, meta=meta, - mode=mode, lock=lock, hidden=hidden, links=links, diff --git a/lib/dl_core/dl_core/us_connection_base.py b/lib/dl_core/dl_core/us_connection_base.py index d5faefc27..b318a033d 100644 --- a/lib/dl_core/dl_core/us_connection_base.py +++ b/lib/dl_core/dl_core/us_connection_base.py @@ -24,8 +24,8 @@ from dl_constants.enums import ( ConnectionState, ConnectionType, - CreateDSFrom, DataSourceRole, + DataSourceType, RawSQLLevel, ) from dl_core import connection_models @@ -81,7 +81,7 @@ class DataSourceTemplate(NamedTuple): title: str group: list[str] # main properties - source_type: CreateDSFrom + source_type: DataSourceType connection_id: str # type-specific parameters: dict @@ -107,8 +107,8 @@ class ConnectionBase(USEntry, metaclass=abc.ABCMeta): scope: ClassVar[str] = "connection" # type: ignore # TODO: fix conn_type: ConnectionType - source_type: ClassVar[Optional[CreateDSFrom]] = None - allowed_source_types: ClassVar[Optional[frozenset[CreateDSFrom]]] = None + source_type: ClassVar[Optional[DataSourceType]] = None + allowed_source_types: ClassVar[Optional[frozenset[DataSourceType]]] = None allow_dashsql: ClassVar[bool] = False allow_cache: ClassVar[bool] = False is_always_internal_source: ClassVar[bool] = False @@ -177,7 +177,7 @@ def data_export_forbidden(self) -> bool: return self.data.data_export_forbidden if hasattr(self.data, "data_export_forbidden") else False @classmethod - def get_provided_source_types(cls) -> frozenset[CreateDSFrom]: + def get_provided_source_types(cls) -> frozenset[DataSourceType]: if cls.allowed_source_types is not None: return cls.allowed_source_types if cls.source_type is not None: @@ -440,7 +440,7 @@ def is_dashsql_allowed(self) -> bool: def _make_subselect_templates( self, - source_type: CreateDSFrom, + source_type: DataSourceType, localizer: Localizer, title: str = "SQL", field_doc_key: str = "ANY_SUBSELECT/subsql", diff --git a/lib/dl_core/dl_core/us_dataset.py b/lib/dl_core/dl_core/us_dataset.py index d4617e8e6..c29a66f5a 100644 --- a/lib/dl_core/dl_core/us_dataset.py +++ b/lib/dl_core/dl_core/us_dataset.py @@ -12,12 +12,12 @@ from dl_constants.enums import ( AggregationFunction, - BIType, - CreateDSFrom, DataSourceCreatedVia, DataSourceRole, + DataSourceType, FieldType, ManagedBy, + UserDataType, ) from dl_core import multisource from dl_core.base_models import ( @@ -113,7 +113,7 @@ def get_own_materialized_tables(self, source_id: Optional[str] = None) -> Genera def find_data_source_configuration( # type: ignore # TODO: fix self, connection_id: Optional[str], - created_from: Optional[CreateDSFrom] = None, + created_from: Optional[DataSourceType] = None, title: Optional[str] = None, parameters: Optional[dict] = None, ) -> Optional[str]: @@ -169,7 +169,7 @@ def create_result_schema_field( guid = field_id_generator.make_field_id(title=title) hidden = False - if column.user_type == BIType.unsupported: + if column.user_type == UserDataType.unsupported: # Auto-hide because it's unselectable. hidden = True diff --git a/lib/dl_core/dl_core/us_manager/storage_schemas/base_types.py b/lib/dl_core/dl_core/us_manager/storage_schemas/base_types.py index 8ad927334..873eebefd 100644 --- a/lib/dl_core/dl_core/us_manager/storage_schemas/base_types.py +++ b/lib/dl_core/dl_core/us_manager/storage_schemas/base_types.py @@ -9,8 +9,8 @@ ) from dl_constants.enums import ( - BIType, ConnectionType, + UserDataType, ) from dl_core.db.elements import SchemaColumn from dl_core.db.native_type import GenericNativeType @@ -33,7 +33,7 @@ class SchemaColumnStorageSchema(Schema): name = fields.String(allow_none=False) title = fields.String(allow_none=True) - user_type = fields.Enum(BIType, by_value=False) + user_type = fields.Enum(UserDataType, by_value=False) nullable = fields.Boolean() native_type = fields.Nested(NativeTypeSchema, allow_none=True) source_id = fields.String(allow_none=True) diff --git a/lib/dl_core/dl_core/us_manager/storage_schemas/data_source_spec.py b/lib/dl_core/dl_core/us_manager/storage_schemas/data_source_spec.py index 570afabdc..d6c5f515f 100644 --- a/lib/dl_core/dl_core/us_manager/storage_schemas/data_source_spec.py +++ b/lib/dl_core/dl_core/us_manager/storage_schemas/data_source_spec.py @@ -7,7 +7,7 @@ from marshmallow_oneofschema import OneOfSchema -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.data_source_spec.base import DataSourceSpec from dl_core.us_manager.storage_schemas.data_source_spec_base import DataSourceSpecStorageSchema @@ -22,5 +22,5 @@ def get_obj_type(self, obj: Any) -> str: return obj.source_type.name -def register_data_source_schema(source_type: CreateDSFrom, schema_cls: Type[DataSourceSpecStorageSchema]) -> None: +def register_data_source_schema(source_type: DataSourceType, schema_cls: Type[DataSourceSpecStorageSchema]) -> None: GenericDataSourceSpecStorageSchema.type_schemas[source_type.name] = schema_cls diff --git a/lib/dl_core/dl_core/us_manager/storage_schemas/data_source_spec_base.py b/lib/dl_core/dl_core/us_manager/storage_schemas/data_source_spec_base.py index 43fce6ece..9d33ec1a7 100644 --- a/lib/dl_core/dl_core/us_manager/storage_schemas/data_source_spec_base.py +++ b/lib/dl_core/dl_core/us_manager/storage_schemas/data_source_spec_base.py @@ -4,7 +4,7 @@ import marshmallow.fields as ma_fields -from dl_constants.enums import CreateDSFrom +from dl_constants.enums import DataSourceType from dl_core.base_models import InternalMaterializationConnectionRef from dl_core.data_source.type_mapping import get_data_source_class from dl_core.data_source_spec.base import DataSourceSpec @@ -38,7 +38,7 @@ class DataSourceSpecStorageSchema(BaseStorageSchema): # noqa load_default=list, allow_none=True, ) - created_from = DynamicEnumField(CreateDSFrom, attribute="source_type") + created_from = DynamicEnumField(DataSourceType, attribute="source_type") def pre_process_input_data(self, data: dict[str, Any]) -> dict: data = data.copy() @@ -63,7 +63,7 @@ def post_process_output_data(self, data: dict[str, Any]) -> dict[str, Any]: # n return data def push_ctx(self, data: dict): # type: ignore # TODO: fix - dsrc_cls = get_data_source_class(CreateDSFrom[data["created_from"]]) + dsrc_cls = get_data_source_class(DataSourceType[data["created_from"]]) self.context[CtxKey.ds_conn_type] = dsrc_cls.conn_type def pop_ctx(self, data: dict): # type: ignore # TODO: fix diff --git a/lib/dl_core/dl_core/us_manager/storage_schemas/dataset.py b/lib/dl_core/dl_core/us_manager/storage_schemas/dataset.py index 3defc75c3..b620ba51e 100644 --- a/lib/dl_core/dl_core/us_manager/storage_schemas/dataset.py +++ b/lib/dl_core/dl_core/us_manager/storage_schemas/dataset.py @@ -17,7 +17,6 @@ from dl_constants.enums import ( AggregationFunction, BinaryJoinOperator, - BIType, CalcMode, ConditionPartCalcMode, FieldType, @@ -27,6 +26,7 @@ ParameterValueConstraintType, RLSPatternType, RLSSubjectType, + UserDataType, WhereClauseOperation, ) from dl_core import multisource @@ -175,7 +175,7 @@ def flatten_items(self, data, **_): # type: ignore # TODO: fix class BaseValueSchema(DefaultStorageSchema): - type = ma_fields.Enum(BIType) + type = ma_fields.Enum(UserDataType) value = ma_fields.Field() @@ -262,22 +262,22 @@ class TreeStrValueSchema(BaseValueSchema): class ValueSchema(OneOfSchema): type_field = "type" type_schemas = { - BIType.string.name: StringValueSchema, - BIType.integer.name: IntegerValueSchema, - BIType.float.name: FloatValueSchema, - BIType.date.name: DateValueSchema, - BIType.datetime.name: DateTimeValueSchema, - BIType.datetimetz.name: DateTimeTZValueSchema, - BIType.genericdatetime.name: GenericDateTimeValueSchema, - BIType.boolean.name: BooleanValueSchema, - BIType.geopoint.name: GeoPointValueSchema, - BIType.geopolygon.name: GeoPolygonValueSchema, - BIType.uuid.name: UuidValueSchema, - BIType.markup.name: MarkupValueSchema, - BIType.array_str.name: ArrayStrValueSchema, - BIType.array_int.name: ArrayIntValueSchema, - BIType.array_float.name: ArrayFloatValueSchema, - BIType.tree_str.name: TreeStrValueSchema, + UserDataType.string.name: StringValueSchema, + UserDataType.integer.name: IntegerValueSchema, + UserDataType.float.name: FloatValueSchema, + UserDataType.date.name: DateValueSchema, + UserDataType.datetime.name: DateTimeValueSchema, + UserDataType.datetimetz.name: DateTimeTZValueSchema, + UserDataType.genericdatetime.name: GenericDateTimeValueSchema, + UserDataType.boolean.name: BooleanValueSchema, + UserDataType.geopoint.name: GeoPointValueSchema, + UserDataType.geopolygon.name: GeoPolygonValueSchema, + UserDataType.uuid.name: UuidValueSchema, + UserDataType.markup.name: MarkupValueSchema, + UserDataType.array_str.name: ArrayStrValueSchema, + UserDataType.array_int.name: ArrayIntValueSchema, + UserDataType.array_float.name: ArrayFloatValueSchema, + UserDataType.tree_str.name: TreeStrValueSchema, } def get_obj_type(self, obj: BIValue) -> str: @@ -360,9 +360,9 @@ def get_obj_type(self, obj: CalculationSpec) -> str: type = ma_fields.Enum(FieldType) hidden = ma_fields.Boolean() description = ma_fields.String() - cast = ma_fields.Enum(BIType, allow_none=True) - initial_data_type = ma_fields.Enum(BIType, allow_none=True) - data_type = ma_fields.Enum(BIType, allow_none=True) + cast = ma_fields.Enum(UserDataType, allow_none=True) + initial_data_type = ma_fields.Enum(UserDataType, allow_none=True) + data_type = ma_fields.Enum(UserDataType, allow_none=True) valid = ma_fields.Boolean(allow_none=True) has_auto_aggregation = ma_fields.Boolean(allow_none=True) lock_aggregation = ma_fields.Boolean(allow_none=True) diff --git a/lib/dl_core/dl_core/us_manager/storage_schemas/raw_schema.py b/lib/dl_core/dl_core/us_manager/storage_schemas/raw_schema.py index dd29a9467..20c5c717f 100644 --- a/lib/dl_core/dl_core/us_manager/storage_schemas/raw_schema.py +++ b/lib/dl_core/dl_core/us_manager/storage_schemas/raw_schema.py @@ -8,7 +8,7 @@ from marshmallow import fields -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.db import SchemaColumn from dl_core.db.native_type_schema import OneOfNativeTypeSchema from dl_core.us_manager.storage_schemas.base import ( @@ -25,7 +25,7 @@ class DataSourceRawSchemaEntryStorageSchema(BaseStorageSchema[SchemaColumn]): title = fields.String(required=False, allow_none=True) description = fields.String(required=False, allow_none=True) - type = fields.Enum(BIType, attribute="user_type") + type = fields.Enum(UserDataType, attribute="user_type") nullable = fields.Boolean(required=False, allow_none=True) lock_aggregation = fields.Boolean(required=False, allow_none=True) diff --git a/lib/dl_core/dl_core/us_manager/us_manager_sync.py b/lib/dl_core/dl_core/us_manager/us_manager_sync.py index c73d06f9e..fe831ca02 100644 --- a/lib/dl_core/dl_core/us_manager/us_manager_sync.py +++ b/lib/dl_core/dl_core/us_manager/us_manager_sync.py @@ -161,9 +161,6 @@ def delete(self, entry: USEntry) -> None: except Exception: LOGGER.exception("Error during post-delete hook execution for entry %s", entry.uuid) - def publish(self, entry: USEntry) -> None: - self._us_client.update_entry(entry.uuid, data=entry.data, mode="publish") # type: ignore # TODO: fix - @overload def get_by_id(self, entry_id: str, expected_type: type(None) = None) -> USEntry: # type: ignore # TODO: fix pass diff --git a/lib/dl_core/dl_core/us_manager/us_manager_sync_mock.py b/lib/dl_core/dl_core/us_manager/us_manager_sync_mock.py index 588eaca67..c9abb00ee 100644 --- a/lib/dl_core/dl_core/us_manager/us_manager_sync_mock.py +++ b/lib/dl_core/dl_core/us_manager/us_manager_sync_mock.py @@ -105,7 +105,6 @@ def update_entry( data: Optional[Dict[str, Any]] = None, unversioned_data: Optional[Dict[str, Any]] = None, meta: Optional[Dict[str, str]] = None, - mode: str = "save", lock: Optional[str] = None, hidden: Optional[bool] = None, links: Optional[Dict[str, Any]] = None, diff --git a/lib/dl_core/dl_core/values.py b/lib/dl_core/dl_core/values.py index c02a7c90b..da16d9881 100644 --- a/lib/dl_core/dl_core/values.py +++ b/lib/dl_core/dl_core/values.py @@ -14,7 +14,7 @@ import attr -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType _INNER_TYPE = TypeVar("_INNER_TYPE") @@ -22,85 +22,85 @@ @attr.s(frozen=True) class BIValue(Generic[_INNER_TYPE]): - type: ClassVar[BIType] + type: ClassVar[UserDataType] value: _INNER_TYPE = attr.ib() @attr.s(frozen=True) class StringValue(BIValue[str]): - type: ClassVar[BIType] = BIType.string + type: ClassVar[UserDataType] = UserDataType.string @attr.s(frozen=True) class IntegerValue(BIValue[int]): - type: ClassVar[BIType] = BIType.integer + type: ClassVar[UserDataType] = UserDataType.integer @attr.s(frozen=True) class FloatValue(BIValue[float]): - type: ClassVar[BIType] = BIType.float + type: ClassVar[UserDataType] = UserDataType.float @attr.s(frozen=True) class DateValue(BIValue[date]): - type: ClassVar[BIType] = BIType.date + type: ClassVar[UserDataType] = UserDataType.date @attr.s(frozen=True) class DateTimeValue(BIValue[datetime]): - type: ClassVar[BIType] = BIType.datetime + type: ClassVar[UserDataType] = UserDataType.datetime @attr.s(frozen=True) class DateTimeTZValue(BIValue[datetime]): - type: ClassVar[BIType] = BIType.datetimetz + type: ClassVar[UserDataType] = UserDataType.datetimetz @attr.s(frozen=True) class GenericDateTimeValue(BIValue[datetime]): - type: ClassVar[BIType] = BIType.genericdatetime + type: ClassVar[UserDataType] = UserDataType.genericdatetime @attr.s(frozen=True) class BooleanValue(BIValue[bool]): - type: ClassVar[BIType] = BIType.boolean + type: ClassVar[UserDataType] = UserDataType.boolean @attr.s(frozen=True) class GeoPointValue(BIValue[List[Union[int, float]]]): - type: ClassVar[BIType] = BIType.geopoint + type: ClassVar[UserDataType] = UserDataType.geopoint @attr.s(frozen=True) class GeoPolygonValue(BIValue[List[List[List[Union[int, float]]]]]): - type: ClassVar[BIType] = BIType.geopolygon + type: ClassVar[UserDataType] = UserDataType.geopolygon @attr.s(frozen=True) class UuidValue(BIValue[str]): - type: ClassVar[BIType] = BIType.uuid + type: ClassVar[UserDataType] = UserDataType.uuid @attr.s(frozen=True) class MarkupValue(BIValue[str]): - type: ClassVar[BIType] = BIType.markup + type: ClassVar[UserDataType] = UserDataType.markup @attr.s(frozen=True) class ArrayStrValue(BIValue[List[str]]): - type: ClassVar[BIType] = BIType.array_str + type: ClassVar[UserDataType] = UserDataType.array_str @attr.s(frozen=True) class TreeStrValue(BIValue[List[str]]): - type: ClassVar[BIType] = BIType.tree_str + type: ClassVar[UserDataType] = UserDataType.tree_str @attr.s(frozen=True) class ArrayIntValue(BIValue[List[int]]): - type: ClassVar[BIType] = BIType.array_int + type: ClassVar[UserDataType] = UserDataType.array_int @attr.s(frozen=True) class ArrayFloatValue(BIValue[List[float]]): - type: ClassVar[BIType] = BIType.array_float + type: ClassVar[UserDataType] = UserDataType.array_float diff --git a/lib/dl_core/dl_core_tests/db/base.py b/lib/dl_core/dl_core_tests/db/base.py index 1cc5f28aa..2001176fd 100644 --- a/lib/dl_core/dl_core_tests/db/base.py +++ b/lib/dl_core/dl_core_tests/db/base.py @@ -3,11 +3,6 @@ import pytest -from dl_connector_clickhouse.core.clickhouse.constants import SOURCE_TYPE_CH_TABLE -from dl_connector_clickhouse.core.clickhouse.testing.connection import make_clickhouse_saved_connection -from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse -from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE -from dl_connector_clickhouse.db_testing.engine_wrapper import ClickhouseDbEngineConfig from dl_core.services_registry.top_level import ServicesRegistry from dl_core.us_connection_base import ConnectionBase from dl_core.us_manager.us_manager_sync import SyncUSManager @@ -19,6 +14,12 @@ from dl_core_testing.testcases.dataset import BaseDatasetTestClass import dl_core_tests.db.config as test_config +from dl_connector_clickhouse.core.clickhouse.constants import SOURCE_TYPE_CH_TABLE +from dl_connector_clickhouse.core.clickhouse.testing.connection import make_clickhouse_saved_connection +from dl_connector_clickhouse.core.clickhouse.us_connection import ConnectionClickhouse +from dl_connector_clickhouse.core.clickhouse_base.constants import CONNECTION_TYPE_CLICKHOUSE +from dl_connector_clickhouse.db_testing.engine_wrapper import ClickhouseDbEngineConfig + class DefaultCoreTestClass(BaseDatasetTestClass[ConnectionClickhouse]): """Base class for generic, non-connectorized core tests""" diff --git a/lib/dl_core/dl_core_tests/unit/components/test_dependencies.py b/lib/dl_core/dl_core_tests/unit/components/test_dependencies.py index 07dd04155..daa718e76 100644 --- a/lib/dl_core/dl_core_tests/unit/components/test_dependencies.py +++ b/lib/dl_core/dl_core_tests/unit/components/test_dependencies.py @@ -11,9 +11,9 @@ from dl_constants.enums import ( AggregationFunction, BinaryJoinOperator, - BIType, FieldType, ManagedBy, + UserDataType, ) from dl_core.components.dependencies.field_avatar import FieldAvatarDependencyManager from dl_core.components.dependencies.field_deep import FieldDeepInterDependencyManager @@ -67,9 +67,9 @@ def make_direct_field(field_id: FieldId, avatar_id: Optional[AvatarId]) -> BIFie title=shortuuid.uuid(), type=FieldType.DIMENSION, aggregation=AggregationFunction.none, - initial_data_type=BIType.integer, - cast=BIType.integer, - data_type=BIType.integer, + initial_data_type=UserDataType.integer, + cast=UserDataType.integer, + data_type=UserDataType.integer, has_auto_aggregation=False, lock_aggregation=False, valid=True, @@ -87,9 +87,9 @@ def make_formula_field(field_id: FieldId) -> BIField: title=shortuuid.uuid(), type=FieldType.DIMENSION, aggregation=AggregationFunction.none, - initial_data_type=BIType.integer, - cast=BIType.integer, - data_type=BIType.integer, + initial_data_type=UserDataType.integer, + cast=UserDataType.integer, + data_type=UserDataType.integer, has_auto_aggregation=False, lock_aggregation=False, valid=True, diff --git a/lib/dl_core/pyproject.toml b/lib/dl_core/pyproject.toml index 45efdc859..d967006da 100644 --- a/lib/dl_core/pyproject.toml +++ b/lib/dl_core/pyproject.toml @@ -92,7 +92,16 @@ check_untyped_defs = true strict_optional = true [[tool.mypy.overrides]] -module = ["aiodns.*", "aiogoogle.*", "anyascii.*", "lz4.*", "marshmallow_oneofschema.*", "raven.*"] +module = [ + "aiodns.*", + "aiogoogle.*", + "anyascii.*", + "lz4.*", + "marshmallow_oneofschema.*", + "raven.*", + "types_aiobotocore_s3.*", + "mypy_boto3_s3.*" +] ignore_missing_imports = true [datalens.i18n.domains] diff --git a/lib/dl_core_testing/dl_core_testing/connector.py b/lib/dl_core_testing/dl_core_testing/connector.py index d76cabaf7..b1295fd6f 100644 --- a/lib/dl_core_testing/dl_core_testing/connector.py +++ b/lib/dl_core_testing/dl_core_testing/connector.py @@ -1,6 +1,6 @@ from dl_constants.enums import ( ConnectionType, - CreateDSFrom, + DataSourceType, ) from dl_core.connectors.base.connector import ( CoreConnectionDefinition, @@ -13,7 +13,7 @@ CONNECTION_TYPE_TESTING = ConnectionType.declare("testing") -SOURCE_TYPE_TESTING = CreateDSFrom.declare("TESTING") +SOURCE_TYPE_TESTING = DataSourceType.declare("TESTING") class TestingConnection(ConnectionBase): diff --git a/lib/dl_core_testing/dl_core_testing/csv_table_dumper.py b/lib/dl_core_testing/dl_core_testing/csv_table_dumper.py index 3815479c8..2db25b269 100644 --- a/lib/dl_core_testing/dl_core_testing/csv_table_dumper.py +++ b/lib/dl_core_testing/dl_core_testing/csv_table_dumper.py @@ -13,7 +13,7 @@ import attr import shortuuid -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core_testing.database import ( C, Db, @@ -42,29 +42,30 @@ def _datetime_or_none(v: Optional[str]) -> Optional[datetime.datetime]: class CsvTableDumper: db: Db = attr.ib(kw_only=True) - _PY_CONVERTERS_BY_BITYPE: ClassVar[dict[BIType, Callable[[Optional[str]], Any]]] = { - BIType.string: lambda v: v, - BIType.integer: _int_or_none, - BIType.float: _float_or_none, - BIType.date: _date_or_none, - BIType.datetime: _datetime_or_none, + _PY_CONVERTERS_BY_BITYPE: ClassVar[dict[UserDataType, Callable[[Optional[str]], Any]]] = { + UserDataType.string: lambda v: v, + UserDataType.integer: _int_or_none, + UserDataType.float: _float_or_none, + UserDataType.date: _date_or_none, + UserDataType.datetime: _datetime_or_none, } - def _convert_value(self, value: Optional[str], user_type: BIType) -> Any: + def _convert_value(self, value: Optional[str], user_type: UserDataType) -> Any: return self._PY_CONVERTERS_BY_BITYPE[user_type](value) - def _convert_row(self, row: Sequence[Optional[str]], type_schema: Sequence[BIType]) -> list[Any]: + def _convert_row(self, row: Sequence[Optional[str]], type_schema: Sequence[UserDataType]) -> list[Any]: return [self._convert_value(v, t) for v, t in zip(row, type_schema)] - def _load_table_data(self, raw_csv_data: str, type_schema: Sequence[BIType]) -> list[list[Any]]: + def _load_table_data(self, raw_csv_data: str, type_schema: Sequence[UserDataType]) -> list[list[Any]]: reader = csv.reader(io.StringIO(raw_csv_data)) return [self._convert_row(row=row, type_schema=type_schema) for row in reader] def make_table_from_csv( self, raw_csv_data: str, - table_schema: Sequence[tuple[str, BIType]], + table_schema: Sequence[tuple[str, UserDataType]], table_name_prefix: Optional[str] = None, + nullable: bool = True, ) -> DbTable: table_name_prefix = table_name_prefix or "table_" if not table_name_prefix.endswith("_"): @@ -81,7 +82,7 @@ def _value_gen(rn: int, ts: datetime.datetime, rnd: random.Random) -> Any: return _value_gen columns = [ - C(name=name, user_type=user_type, vg=_value_gen_factory(_col_idx=col_idx)) # type: ignore + C(name=name, user_type=user_type, vg=_value_gen_factory(_col_idx=col_idx), nullable=nullable) # type: ignore for col_idx, (name, user_type) in enumerate(table_schema) ] diff --git a/lib/dl_core_testing/dl_core_testing/database.py b/lib/dl_core_testing/dl_core_testing/database.py index 48d340266..a510b8c66 100644 --- a/lib/dl_core_testing/dl_core_testing/database.py +++ b/lib/dl_core_testing/dl_core_testing/database.py @@ -19,8 +19,8 @@ from sqlalchemy.sql.type_api import TypeEngine from dl_constants.enums import ( - BIType, ConnectionType, + UserDataType, ) from dl_core.db import ( get_type_transformer, @@ -59,7 +59,7 @@ def eval( self, expr: sa.sql.ClauseElement, from_: Optional[sa.sql.ClauseElement] = None, - user_t: Optional[BIType] = None, + user_t: Optional[UserDataType] = None, ) -> Any: value = self.base_eval(expr, from_=from_) if user_t is not None: @@ -118,23 +118,23 @@ class DbTable(DbTableBase): @attr.s() class C: name: str = attr.ib() - user_type: BIType = attr.ib() + user_type: UserDataType = attr.ib() nullable: Optional[bool] = attr.ib(default=None) _sa_type: TypeEngine = attr.ib(default=None) _vg: Callable[[int, datetime.datetime], Any] = attr.ib(default=None) DEFAULT_VALUE_GENERATORS = { - BIType.string: lambda rn, **kwargs: f"str_value_{rn}", - BIType.integer: lambda rn, **kwargs: rn, - BIType.float: lambda rn, **kwargs: rn + (rn / 10), - BIType.date: lambda rn, ts, **kwargs: ts.date() + datetime.timedelta(days=rn), - BIType.datetime: lambda rn, ts, **kwargs: ts + datetime.timedelta(days=rn / math.pi), - BIType.genericdatetime: lambda rn, ts, **kwargs: ts + datetime.timedelta(days=rn / math.pi), - BIType.boolean: lambda rn, **kwargs: bool(int(rn) % 2), - BIType.uuid: lambda rn, **kwargs: str(uuid.UUID(int=rn)), - BIType.array_int: lambda rn, **kwargs: [rn * idx for idx in range(5)], - BIType.array_str: lambda rn, **kwargs: [f"str_{str(rn * idx)}" for idx in range(5)], - BIType.array_float: lambda rn, **kwargs: [float(rn * idx) * 1.1 for idx in range(5)], + UserDataType.string: lambda rn, **kwargs: f"str_value_{rn}", + UserDataType.integer: lambda rn, **kwargs: rn, + UserDataType.float: lambda rn, **kwargs: rn + (rn / 10), + UserDataType.date: lambda rn, ts, **kwargs: ts.date() + datetime.timedelta(days=rn), + UserDataType.datetime: lambda rn, ts, **kwargs: ts + datetime.timedelta(days=rn / math.pi), + UserDataType.genericdatetime: lambda rn, ts, **kwargs: ts + datetime.timedelta(days=rn / math.pi), + UserDataType.boolean: lambda rn, **kwargs: bool(int(rn) % 2), + UserDataType.uuid: lambda rn, **kwargs: str(uuid.UUID(int=rn)), + UserDataType.array_int: lambda rn, **kwargs: [rn * idx for idx in range(5)], + UserDataType.array_str: lambda rn, **kwargs: [f"str_{str(rn * idx)}" for idx in range(5)], + UserDataType.array_float: lambda rn, **kwargs: [float(rn * idx) * 1.1 for idx in range(5)], } @attr.s(auto_attribs=True, frozen=True) @@ -165,34 +165,34 @@ def array_data_getter(cls, data_container) -> "C.ArrayDataGetter": # type: igno @classmethod def int_value(cls, name: str = "int_value"): # type: ignore # TODO: fix - return cls(name, BIType.integer) + return cls(name, UserDataType.integer) @classmethod def datetime_value(cls, name: str = "datetime_value"): # type: ignore # TODO: fix - return cls(name, BIType.datetime) + return cls(name, UserDataType.datetime) @classmethod def full_house(cls) -> list[C]: return [ - cls("string_value", BIType.string, nullable=False), - cls("n_string_value", BIType.string, nullable=True), - cls("int_value", BIType.integer, nullable=False), - cls("n_int_value", BIType.integer, nullable=True), - cls("float_value", BIType.float), - cls("datetime_value", BIType.genericdatetime, nullable=False), - cls("n_datetime_value", BIType.genericdatetime, nullable=True), - cls("date_value", BIType.date), - cls("boolean_value", BIType.boolean), - cls("uuid_value", BIType.uuid), + cls("string_value", UserDataType.string, nullable=False), + cls("n_string_value", UserDataType.string, nullable=True), + cls("int_value", UserDataType.integer, nullable=False), + cls("n_int_value", UserDataType.integer, nullable=True), + cls("float_value", UserDataType.float), + cls("datetime_value", UserDataType.genericdatetime, nullable=False), + cls("n_datetime_value", UserDataType.genericdatetime, nullable=True), + cls("date_value", UserDataType.date), + cls("boolean_value", UserDataType.boolean), + cls("uuid_value", UserDataType.uuid), # Not included: arrays, geopoint, geopolygon, markup (not db-types, generally). ] @classmethod def array_columns(cls): # type: ignore # TODO: fix return [ - cls("array_int_value", BIType.array_int), - cls("array_str_value", BIType.array_str), - cls("array_float_value", BIType.array_float), + cls("array_int_value", UserDataType.array_int), + cls("array_str_value", UserDataType.array_str), + cls("array_float_value", UserDataType.array_float), ] diff --git a/lib/dl_core_testing/dl_core_testing/dataset.py b/lib/dl_core_testing/dl_core_testing/dataset.py index 365a53110..a9f466aa4 100644 --- a/lib/dl_core_testing/dl_core_testing/dataset.py +++ b/lib/dl_core_testing/dl_core_testing/dataset.py @@ -8,9 +8,9 @@ import uuid from dl_constants.enums import ( - CreateDSFrom, DataSourceCreatedVia, DataSourceRole, + DataSourceType, ) from dl_core.us_connection import get_connection_class from dl_core.us_connection_base import ExecutorBasedMixin @@ -38,7 +38,7 @@ def make_dataset( # type: ignore # TODO: fix db_table: Optional[DbTable] = None, schema_name: Optional[str] = None, table_name: Optional[str] = None, - created_from: Optional[CreateDSFrom] = None, + created_from: Optional[DataSourceType] = None, db_name: Optional[str] = None, yt_path: Optional[str] = None, yt_cluster: Optional[str] = None, @@ -104,7 +104,7 @@ def conn_executor_factory() -> SyncConnExecutorBase: return dataset -def get_created_from(db: Db) -> CreateDSFrom: +def get_created_from(db: Db) -> DataSourceType: conn_cls = get_connection_class(conn_type=db.conn_type) source_type = conn_cls.source_type assert source_type is not None diff --git a/lib/dl_core_testing/dl_core_testing/dataset_builder.py b/lib/dl_core_testing/dl_core_testing/dataset_builder.py index b83ea273c..d15ba6b9d 100644 --- a/lib/dl_core_testing/dl_core_testing/dataset_builder.py +++ b/lib/dl_core_testing/dl_core_testing/dataset_builder.py @@ -7,8 +7,8 @@ from dl_constants.enums import ( BinaryJoinOperator, - CreateDSFrom, DataSourceRole, + DataSourceType, ) from dl_core.base_models import DefaultConnectionRef from dl_core.data_source.base import DataSource @@ -33,7 +33,7 @@ @attr.s(frozen=True) class DataSourceCreationSpec: connection: ConnectionBase = attr.ib(kw_only=True) - source_type: CreateDSFrom = attr.ib(kw_only=True) + source_type: DataSourceType = attr.ib(kw_only=True) dsrc_params: dict = attr.ib(kw_only=True) diff --git a/lib/dl_core_testing/dl_core_testing/dataset_wrappers.py b/lib/dl_core_testing/dl_core_testing/dataset_wrappers.py index cb48823de..f6b7ca223 100644 --- a/lib/dl_core_testing/dl_core_testing/dataset_wrappers.py +++ b/lib/dl_core_testing/dl_core_testing/dataset_wrappers.py @@ -13,9 +13,9 @@ import attr from dl_constants.enums import ( - CreateDSFrom, DataSourceCreatedVia, DataSourceRole, + DataSourceType, JoinType, ManagedBy, ) @@ -250,7 +250,7 @@ def add_data_source( *, source_id: str, role: DataSourceRole = DataSourceRole.origin, - created_from: CreateDSFrom, + created_from: DataSourceType, connection_id: Optional[str] = None, title: Optional[str] = None, raw_schema: Optional[list[SchemaColumn]] = None, @@ -275,7 +275,7 @@ def update_data_source( source_id: str, role: Optional[DataSourceRole] = None, connection_id: Optional[str] = None, - created_from: Optional[CreateDSFrom] = None, + created_from: Optional[DataSourceType] = None, raw_schema: Optional[list] = None, index_info_set: FrozenSet[IndexInfo] = None, **parameters: Any, diff --git a/lib/dl_core_testing/dl_core_testing/fixtures/dispenser.py b/lib/dl_core_testing/dl_core_testing/fixtures/dispenser.py index e953a802b..45b22df82 100644 --- a/lib/dl_core_testing/dl_core_testing/fixtures/dispenser.py +++ b/lib/dl_core_testing/dl_core_testing/fixtures/dispenser.py @@ -23,7 +23,9 @@ def _get_raw_csv_data(self, path: str) -> str: def _make_new_csv_table(self, db: Db, spec: FixtureTableSpec) -> DbTable: dumper = CsvTableDumper(db=db) db_table = dumper.make_table_from_csv( - raw_csv_data=self._get_raw_csv_data(spec.csv_name), table_schema=spec.table_schema + raw_csv_data=self._get_raw_csv_data(spec.csv_name), + table_schema=spec.table_schema, + nullable=spec.nullable, ) if db.config not in self._tables: self._tables[db.config] = {} diff --git a/lib/dl_core_testing/dl_core_testing/fixtures/primitives.py b/lib/dl_core_testing/dl_core_testing/fixtures/primitives.py index 41b68d663..f111c31c2 100644 --- a/lib/dl_core_testing/dl_core_testing/fixtures/primitives.py +++ b/lib/dl_core_testing/dl_core_testing/fixtures/primitives.py @@ -1,9 +1,13 @@ import attr -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType @attr.s(frozen=True) class FixtureTableSpec: csv_name: str = attr.ib(kw_only=True) - table_schema: tuple[tuple[str, BIType], ...] = attr.ib(kw_only=True) + table_schema: tuple[tuple[str, UserDataType], ...] = attr.ib(kw_only=True) + nullable: bool = attr.ib(kw_only=True, default=True) + + def get_user_type_for_col(self, col_name: str) -> UserDataType: + return next(tbl_col[1] for tbl_col in self.table_schema if tbl_col[0] == col_name) diff --git a/lib/dl_core_testing/dl_core_testing/fixtures/sample_tables.py b/lib/dl_core_testing/dl_core_testing/fixtures/sample_tables.py index 12ee074c9..d553e4f82 100644 --- a/lib/dl_core_testing/dl_core_testing/fixtures/sample_tables.py +++ b/lib/dl_core_testing/dl_core_testing/fixtures/sample_tables.py @@ -1,30 +1,31 @@ -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core_testing.fixtures.primitives import FixtureTableSpec TABLE_SPEC_SAMPLE_SUPERSTORE = FixtureTableSpec( csv_name="sample_superstore.csv", table_schema=( - ("category", BIType.string), - ("city", BIType.string), - ("country", BIType.string), - ("customer_id", BIType.string), - ("customer_name", BIType.string), - ("discount", BIType.float), - ("order_date", BIType.date), - ("order_id", BIType.string), - ("postal_code", BIType.integer), - ("product_id", BIType.string), - ("product_name", BIType.string), - ("profit", BIType.float), - ("quantity", BIType.integer), - ("region", BIType.string), - ("row_id", BIType.integer), - ("sales", BIType.float), - ("segment", BIType.string), - ("ship_date", BIType.date), - ("ship_mode", BIType.string), - ("state", BIType.string), - ("sub_category", BIType.string), + ("category", UserDataType.string), + ("city", UserDataType.string), + ("country", UserDataType.string), + ("customer_id", UserDataType.string), + ("customer_name", UserDataType.string), + ("discount", UserDataType.float), + ("order_date", UserDataType.date), + ("order_id", UserDataType.string), + ("postal_code", UserDataType.integer), + ("product_id", UserDataType.string), + ("product_name", UserDataType.string), + ("profit", UserDataType.float), + ("quantity", UserDataType.integer), + ("region", UserDataType.string), + ("row_id", UserDataType.integer), + ("sales", UserDataType.float), + ("segment", UserDataType.string), + ("ship_date", UserDataType.date), + ("ship_mode", UserDataType.string), + ("state", UserDataType.string), + ("sub_category", UserDataType.string), ), + nullable=False, ) diff --git a/lib/dl_core_testing/dl_core_testing/testcases/connection.py b/lib/dl_core_testing/dl_core_testing/testcases/connection.py index 40b4021bf..a7cbfb868 100644 --- a/lib/dl_core_testing/dl_core_testing/testcases/connection.py +++ b/lib/dl_core_testing/dl_core_testing/testcases/connection.py @@ -17,11 +17,13 @@ ConnectionBase, DataSourceTemplate, ) +from dl_core.us_manager.us_manager_async import AsyncUSManager from dl_core.us_manager.us_manager_sync import SyncUSManager from dl_core_testing.database import ( Db, DbTable, ) +from dl_core_testing.fixtures.primitives import FixtureTableSpec from dl_core_testing.fixtures.sample_tables import TABLE_SPEC_SAMPLE_SUPERSTORE from dl_core_testing.testcases.service_base import ( DbServiceFixtureTextClass, @@ -48,6 +50,10 @@ class BaseConnectionTestClass( def sync_us_manager(self, conn_default_sync_us_manager: SyncUSManager) -> SyncUSManager: return conn_default_sync_us_manager + @pytest.fixture(scope="function") + def async_us_manager(self, conn_default_async_us_manager: AsyncUSManager) -> AsyncUSManager: + return conn_default_async_us_manager + @abc.abstractmethod @pytest.fixture(scope="function") def connection_creation_params(self) -> dict: @@ -83,8 +89,12 @@ def factory() -> AsyncConnExecutorBase: return factory @pytest.fixture(scope="class") - def sample_table(self, db: Db) -> DbTable: - return self.db_table_dispenser.get_csv_table(db=db, spec=TABLE_SPEC_SAMPLE_SUPERSTORE) + def sample_table_spec(self) -> FixtureTableSpec: + return TABLE_SPEC_SAMPLE_SUPERSTORE + + @pytest.fixture(scope="class") + def sample_table(self, sample_table_spec: FixtureTableSpec, db: Db) -> DbTable: + return self.db_table_dispenser.get_csv_table(db=db, spec=sample_table_spec) class DefaultConnectionTestClass(RegulatedTestCase, BaseConnectionTestClass[_CONN_TV], Generic[_CONN_TV]): diff --git a/lib/dl_core_testing/dl_core_testing/testcases/connection_executor.py b/lib/dl_core_testing/dl_core_testing/testcases/connection_executor.py index e8cf39397..9e6361749 100644 --- a/lib/dl_core_testing/dl_core_testing/testcases/connection_executor.py +++ b/lib/dl_core_testing/dl_core_testing/testcases/connection_executor.py @@ -18,7 +18,7 @@ import sqlalchemy as sa from sqlalchemy.types import TypeEngine -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.connection_executors.common_base import ConnExecutorQuery from dl_core.connection_models.common_models import ( DBIdent, @@ -132,14 +132,14 @@ def test_table_not_exists( ) -> None: assert not sync_connection_executor.is_table_exists(nonexistent_table_ident) - def get_schemas_for_type_recognition(self) -> dict[str, Sequence[tuple[TypeEngine, BIType]]]: + def get_schemas_for_type_recognition(self) -> dict[str, Sequence[tuple[TypeEngine, UserDataType]]]: return { "standard_types": [ - (sa.Integer(), BIType.integer), - (sa.Float(), BIType.float), - (sa.String(length=256), BIType.string), - (sa.Date(), BIType.date), - (sa.DateTime(), BIType.genericdatetime), + (sa.Integer(), UserDataType.integer), + (sa.Float(), UserDataType.float), + (sa.String(length=256), UserDataType.string), + (sa.Date(), UserDataType.date), + (sa.DateTime(), UserDataType.genericdatetime), ], } diff --git a/lib/dl_core_testing/dl_core_testing/testcases/data_source.py b/lib/dl_core_testing/dl_core_testing/testcases/data_source.py index 1babd0ee3..580e53d31 100644 --- a/lib/dl_core_testing/dl_core_testing/testcases/data_source.py +++ b/lib/dl_core_testing/dl_core_testing/testcases/data_source.py @@ -15,7 +15,7 @@ import pytest import shortuuid -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.connection_models import TableIdent from dl_core.connectors.base.data_source_migration import ( DataSourceMigrationInterface, @@ -89,7 +89,7 @@ def test_data_source_exists( assert result @abc.abstractmethod - def get_expected_simplified_schema(self) -> list[tuple[str, BIType]]: + def get_expected_simplified_schema(self) -> list[tuple[str, UserDataType]]: raise NotImplementedError def test_get_raw_schema( diff --git a/lib/dl_core_testing/dl_core_testing/testcases/dataset.py b/lib/dl_core_testing/dl_core_testing/testcases/dataset.py index 14bc4eaa0..5132168e6 100644 --- a/lib/dl_core_testing/dl_core_testing/testcases/dataset.py +++ b/lib/dl_core_testing/dl_core_testing/testcases/dataset.py @@ -11,8 +11,8 @@ import sqlalchemy as sa from dl_constants.enums import ( - CreateDSFrom, DataSourceRole, + DataSourceType, ) from dl_core.data_processing.stream_base import DataStream from dl_core.query.bi_query import BIQuery @@ -36,7 +36,7 @@ class BaseDatasetTestClass(BaseConnectionTestClass[_CONN_TV], Generic[_CONN_TV]): - source_type: ClassVar[CreateDSFrom] + source_type: ClassVar[DataSourceType] @pytest.fixture(scope="function") def dataset_table(self, sample_table: DbTable) -> DbTable: diff --git a/lib/dl_core_testing/dl_core_testing/testcases/service_base.py b/lib/dl_core_testing/dl_core_testing/testcases/service_base.py index 75a869b0f..229c2b5ba 100644 --- a/lib/dl_core_testing/dl_core_testing/testcases/service_base.py +++ b/lib/dl_core_testing/dl_core_testing/testcases/service_base.py @@ -2,6 +2,7 @@ import abc from typing import ( + Any, ClassVar, NamedTuple, Optional, @@ -24,6 +25,7 @@ ) from dl_core.united_storage_client import USAuthContextMaster from dl_core.us_manager.mutation_cache.usentry_mutation_cache_factory import DefaultUSEntryMutationCacheFactory +from dl_core.us_manager.us_manager_async import AsyncUSManager from dl_core.us_manager.us_manager_sync import SyncUSManager from dl_core.utils import FutureRef from dl_core_testing.configuration import CoreTestEnvironmentConfigurationBase @@ -71,6 +73,7 @@ def service_registry_factory( self, conn_exec_factory_async_env: bool, conn_bi_context: RequestContextInfo, + **kwargs: Any, ) -> ServicesRegistry: sr_future_ref: FutureRef[ServicesRegistry] = FutureRef() service_registry = DefaultServicesRegistry( @@ -93,6 +96,7 @@ def service_registry_factory( if self.inst_specific_sr_factory is not None else None ), + **kwargs, ) sr_future_ref.fulfill(service_registry) return service_registry @@ -144,6 +148,22 @@ def conn_default_sync_us_manager( ) return us_manager + @pytest.fixture(scope="class") + def conn_default_async_us_manager( + self, + conn_us_config: USConfig, + conn_bi_context: RequestContextInfo, + conn_async_service_registry: ServicesRegistry, + ) -> AsyncUSManager: + us_manager = AsyncUSManager( + bi_context=conn_bi_context, + services_registry=conn_async_service_registry, + us_base_url=conn_us_config.us_base_url, + us_auth_context=conn_us_config.us_auth_context, + crypto_keys_config=conn_us_config.us_crypto_keys_config, + ) + return us_manager + class DbServiceFixtureTextClass(metaclass=abc.ABCMeta): conn_type: ClassVar[ConnectionType] # FIXME: Remove after conn_type is removed from Db diff --git a/lib/dl_db_testing/dl_db_testing/database/dispenser.py b/lib/dl_db_testing/dl_db_testing/database/dispenser.py index dc7f7dc19..d423dea8c 100644 --- a/lib/dl_db_testing/dl_db_testing/database/dispenser.py +++ b/lib/dl_db_testing/dl_db_testing/database/dispenser.py @@ -23,19 +23,34 @@ _DB_TV = TypeVar("_DB_TV", bound=DbBase) +@attr.s class DbDispenserBase(abc.ABC, Generic[_DB_CONFIG_TV, _DB_TV]): + _wait_on_init: bool = attr.ib(kw_only=True, default=True) + _default_reconnect_timeout: int = attr.ib(kw_only=True, default=600) + @abc.abstractmethod def make_database(self, db_config: _DB_CONFIG_TV) -> _DB_TV: raise NotImplementedError + def wait_for_db(self, db: _DB_TV, reconnect_timeout: Optional[int] = None) -> None: + if reconnect_timeout is None: + reconnect_timeout = self._default_reconnect_timeout + wait_for(name=f"test_db_{db.config}", condition=db.test, timeout=reconnect_timeout) + + def initialize_db(self, db: _DB_TV) -> None: + if self._wait_on_init: + if not db.test(): + self.wait_for_db(db=db) + def get_database(self, db_config: _DB_CONFIG_TV) -> _DB_TV: - return self.make_database(db_config) + db = self.make_database(db_config) + self.initialize_db(db) + return db @attr.s class ReInitableDbDispenser(DbDispenserBase[_DB_CONFIG_TV, _DB_TV], Generic[_DB_CONFIG_TV, _DB_TV]): _max_reinit_count: int = attr.ib(kw_only=True, default=4) - _default_reconnect_timeout: int = attr.ib(kw_only=True, default=600) _db_cache: Dict[_DB_CONFIG_TV, _DB_TV] = attr.ib(init=False, factory=dict) _reinit_hooks: Dict[_DB_CONFIG_TV, Callable[[], None]] = attr.ib(init=False, factory=dict) _db_reinit_counts: Dict[_DB_CONFIG_TV, int] = attr.ib(init=False, factory=lambda: defaultdict(lambda: 0)) @@ -60,13 +75,10 @@ def _check_reinit_db(self, db_config: DbConfig, reconnect_timeout: Optional[int] return False if not db.test(): - if reconnect_timeout is None: - reconnect_timeout = self._default_reconnect_timeout - assert reconnect_timeout is not None # DB is unavailable, so re-initialize the DB is possible reinit_hook() # Wait until it comes up - wait_for(name=f"test_db_{db_config}", condition=db.test, timeout=reconnect_timeout) + self.wait_for_db(db=db, reconnect_timeout=reconnect_timeout) self._db_reinit_counts[db_config] += 1 return True diff --git a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib/schemas/sources.py b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib/schemas/sources.py index 5dad141bb..445b64c20 100644 --- a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib/schemas/sources.py +++ b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib/schemas/sources.py @@ -9,8 +9,8 @@ from marshmallow_oneofschema import OneOfSchema from dl_constants.enums import ( - BIType, FileProcessingStatus, + UserDataType, ) from dl_core.db.elements import SchemaColumn from dl_core.db.native_type_schema import OneOfNativeTypeSchema @@ -46,7 +46,7 @@ class RawSchemaColumnSchema(ma.Schema): native_type = ma.fields.Nested(OneOfNativeTypeSchema, allow_none=True, load_default=None) - user_type = ma.fields.Enum(BIType) + user_type = ma.fields.Enum(UserDataType) description = ma.fields.String(dump_default="", allow_none=True, load_default="") has_auto_aggregation = ma.fields.Boolean(dump_default=False, allow_none=True, load_default=False) lock_aggregation = ma.fields.Boolean(dump_default=False, allow_none=True, load_default=False) @@ -87,7 +87,7 @@ class SourceInfoSchemaBase(ma.Schema): class RawSchemaColumnSchemaShorten(ma.Schema): name = ma.fields.String() title = ma.fields.String() - user_type = ma.fields.Enum(BIType) + user_type = ma.fields.Enum(UserDataType) source_id = ma.fields.String() title = ma.fields.String() @@ -124,7 +124,7 @@ class CSVSettingsOptionsSchema(ma.Schema): class OptionsSchema(ma.Schema): class ColumnsOptionsSchema(ma.Schema): name = ma.fields.String() - user_type = ma.fields.List(ma.fields.Enum(BIType)) + user_type = ma.fields.List(ma.fields.Enum(UserDataType)) data_settings = ma.fields.Nested(CSVSettingsOptionsSchema) columns = ma.fields.Nested(ColumnsOptionsSchema, many=True) diff --git a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib/views/sources.py b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib/views/sources.py index cf5f79051..4a9e3222a 100644 --- a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib/views/sources.py +++ b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib/views/sources.py @@ -13,10 +13,9 @@ RequiredResource, RequiredResourceCommon, ) -from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE from dl_constants.enums import ( - BIType, FileProcessingStatus, + UserDataType, ) from dl_core.db import get_type_transformer from dl_core.db.elements import SchemaColumn @@ -41,6 +40,8 @@ ) from dl_file_uploader_task_interface.tasks import ParseFileTask +from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE + LOGGER = logging.getLogger(__name__) @@ -75,26 +76,26 @@ async def get(self) -> web.StreamResponse: ) -def get_compatible_user_type(user_type: Optional[BIType]) -> tuple[BIType, ...]: - compatible_types: dict[Optional[BIType], tuple[BIType, ...]] = { - BIType.string: (BIType.string,), - BIType.integer: ( - BIType.integer, - BIType.float, - BIType.string, +def get_compatible_user_type(user_type: Optional[UserDataType]) -> tuple[UserDataType, ...]: + compatible_types: dict[Optional[UserDataType], tuple[UserDataType, ...]] = { + UserDataType.string: (UserDataType.string,), + UserDataType.integer: ( + UserDataType.integer, + UserDataType.float, + UserDataType.string, ), - BIType.float: ( - BIType.float, - BIType.string, + UserDataType.float: ( + UserDataType.float, + UserDataType.string, ), - BIType.date: ( - BIType.date, - BIType.genericdatetime, - BIType.string, + UserDataType.date: ( + UserDataType.date, + UserDataType.genericdatetime, + UserDataType.string, ), - BIType.genericdatetime: ( - BIType.genericdatetime, - BIType.string, + UserDataType.genericdatetime: ( + UserDataType.genericdatetime, + UserDataType.string, ), } return compatible_types.get(user_type, tuple()) @@ -113,7 +114,7 @@ def cast_preview_data( def get_raw_schema_with_overrides(raw_schema: RawSchemaType, schema_overrides: RawSchemaType) -> RawSchemaType: - orig_column_types_by_name: dict[str, BIType] = {sch.name: sch.user_type for sch in raw_schema} + orig_column_types_by_name: dict[str, UserDataType] = {sch.name: sch.user_type for sch in raw_schema} column_type_overrides = dict() for col in schema_overrides: col_name = col.name diff --git a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/conftest.py b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/conftest.py index 01993469f..961e1a024 100644 --- a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/conftest.py +++ b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/conftest.py @@ -4,9 +4,11 @@ import logging import os import sys -from typing import Any +from typing import ( + TYPE_CHECKING, + Any, +) -import aiobotocore.client import aiohttp.pytest_plugin import aiohttp.test_utils import aiohttp.web @@ -34,7 +36,6 @@ RedisSettings, S3Settings, ) -from dl_connector_bundle_chs3.chs3_base.core.settings import FileS3ConnectorSettings from dl_constants.api_constants import DLHeadersCommon from dl_core.loader import load_core_lib from dl_core.services_registry.top_level import DummyServiceRegistry @@ -68,9 +69,15 @@ ) from dl_testing.utils import wait_for_initdb +from dl_connector_bundle_chs3.chs3_base.core.settings import FileS3ConnectorSettings + from .config import TestingUSConfig +if TYPE_CHECKING: + from types_aiobotocore_s3 import S3Client as AsyncS3Client + + LOGGER = logging.getLogger(__name__) @@ -222,7 +229,7 @@ def fu_client(bi_file_uploader_app) -> DLCommonAPIClient: @pytest.fixture(scope="function") -async def s3_client(s3_settings) -> aiobotocore.client.AioBaseClient: +async def s3_client(s3_settings) -> AsyncS3Client: async with create_s3_client(s3_settings) as client: yield client diff --git a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/ext/test_update_data.py b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/ext/test_update_data.py index f0189bc4e..7b1d799b9 100644 --- a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/ext/test_update_data.py +++ b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/ext/test_update_data.py @@ -6,11 +6,9 @@ import pytest -from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 -from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection from dl_constants.enums import ( - BIType, FileProcessingStatus, + UserDataType, ) from dl_core.db import SchemaColumn from dl_core.us_manager.us_manager_async import AsyncUSManager @@ -19,6 +17,10 @@ from dl_file_uploader_lib import exc from dl_testing.s3_utils import s3_file_exists +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 +from dl_connector_bundle_chs3.chs3_gsheets.core.lifecycle import GSheetsFileS3ConnectionLifecycleManager +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection + LOGGER = logging.getLogger(__name__) @@ -27,9 +29,11 @@ async def saved_gsheets_v2_connection(loop, bi_context, default_async_usm_per_test, s3_persistent_bucket, s3_client): us_manager = default_async_usm_per_test conn_name = "gsheets_v2 test conn {}".format(uuid.uuid4()) - long_long_ago = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(minutes=31) + long_long_ago = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta( + seconds=GSheetsFileS3ConnectionLifecycleManager.STALE_THRESHOLD_SECONDS + 60, # just in case + ) - dummy_raw_schema = [SchemaColumn("dummy_column", user_type=BIType.string)] + dummy_raw_schema = [SchemaColumn("dummy_column", user_type=UserDataType.string)] data = GSheetsFileS3Connection.DataModel( sources=[ GSheetsFileS3Connection.FileDataSource( # this is a valid source diff --git a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/req_builder.py b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/req_builder.py index b971cd289..e7df337ba 100644 --- a/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/req_builder.py +++ b/lib/dl_file_uploader_api_lib/dl_file_uploader_api_lib_tests/req_builder.py @@ -7,12 +7,13 @@ import aiohttp from dl_api_commons.client.common import Req -from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection from dl_constants.api_constants import ( DLHeaders, DLHeadersCommon, ) +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection + class ReqBuilder: ORIGIN: ClassVar[str] = "https://foo.bar" diff --git a/lib/dl_file_uploader_lib/dl_file_uploader_lib/data_sink/json_each_row.py b/lib/dl_file_uploader_lib/dl_file_uploader_lib/data_sink/json_each_row.py index 5c546df60..f19bd9f00 100644 --- a/lib/dl_file_uploader_lib/dl_file_uploader_lib/data_sink/json_each_row.py +++ b/lib/dl_file_uploader_lib/dl_file_uploader_lib/data_sink/json_each_row.py @@ -2,15 +2,18 @@ import logging from typing import ( + TYPE_CHECKING, ClassVar, Optional, ) -from aiobotocore.client import AioBaseClient -import botocore.client + +if TYPE_CHECKING: + from types_aiobotocore_s3 import S3Client as AsyncS3Client + from mypy_boto3_s3.client import S3Client as SyncS3Client + import ujson as json -from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE from dl_constants.enums import ConnectionType from dl_core.data_sink import ( DataSink, @@ -27,6 +30,8 @@ ) from dl_file_uploader_lib import exc +from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE + LOGGER = logging.getLogger(__name__) @@ -44,7 +49,7 @@ class S3JsonEachRowFileDataSink(DataSink): def __init__( # type: ignore self, bi_schema: list[SchemaColumn], - s3: botocore.client.BaseClient, + s3: SyncS3Client, s3_key: str, bucket_name: str, ): @@ -73,6 +78,7 @@ def initialize(self) -> None: def finalize(self) -> None: LOGGER.info(f"Completing S3 multipart upload. {self._part_number - 1} parts were uploaded.") + assert self._upload_id is not None if self._multipart_upload_started: if self._part_tags: self._s3.complete_multipart_upload( @@ -87,6 +93,7 @@ def finalize(self) -> None: def cleanup(self) -> None: if self._multipart_upload_started: LOGGER.info("Aborting S3 multipart upload,") + assert self._upload_id is not None self._s3.abort_multipart_upload( Bucket=self._bucket_name, Key=self._s3_key, @@ -100,6 +107,7 @@ def _prepare_chunk_body(self, batch: list[bytes]) -> bytes: def _dump_data_batch(self, batch: list[bytes], progress: int) -> None: LOGGER.info(f"Dumping {len(batch)} data rows into s3 file {self._s3_key}.") + assert self._upload_id is not None part_resp = self._s3.upload_part( Bucket=self._bucket_name, Key=self._s3_key, @@ -150,7 +158,7 @@ def dump_data_stream(self, data_stream: DataStreamBase) -> None: class S3JsonEachRowUntypedFileDataSink(S3JsonEachRowFileDataSink): def __init__( # type: ignore self, - s3: botocore.client.BaseClient, + s3: SyncS3Client, s3_key: str, bucket_name: str, ): @@ -180,7 +188,7 @@ class S3JsonEachRowUntypedFileAsyncDataSink(DataSinkAsync[SimpleUntypedAsyncData _part_number: int = 1 _multipart_upload_started: bool = False - def __init__(self, s3: AioBaseClient, s3_key: str, bucket_name: str): + def __init__(self, s3: AsyncS3Client, s3_key: str, bucket_name: str): self._s3 = s3 self._s3_key = s3_key self._bucket_name = bucket_name @@ -205,6 +213,7 @@ async def initialize(self) -> None: async def finalize(self) -> None: if self._multipart_upload_started: LOGGER.info(f"Completing S3 multipart upload. {self._part_number - 1} parts were uploaded.") + assert self._upload_id is not None await self._s3.complete_multipart_upload( Bucket=self._bucket_name, Key=self._s3_key, @@ -219,6 +228,7 @@ async def finalize(self) -> None: async def cleanup(self) -> None: if self._multipart_upload_started: LOGGER.exception("Aborting S3 multipart upload,") + assert self._upload_id is not None await self._s3.abort_multipart_upload( Bucket=self._bucket_name, Key=self._s3_key, @@ -232,6 +242,7 @@ def _prepare_chunk_body(self, batch: list[bytes]) -> bytes: async def _dump_data_batch(self, batch: list[bytes], progress: int) -> None: LOGGER.info(f"Dumping {len(batch)} data rows into s3 file {self._s3_key}.") + assert self._upload_id is not None batch_to_write = self._prepare_chunk_body(batch) part_resp = await self._s3.upload_part( Bucket=self._bucket_name, diff --git a/lib/dl_file_uploader_lib/dl_file_uploader_lib/data_sink/raw_bytes.py b/lib/dl_file_uploader_lib/dl_file_uploader_lib/data_sink/raw_bytes.py index 80e6e7a01..885492795 100644 --- a/lib/dl_file_uploader_lib/dl_file_uploader_lib/data_sink/raw_bytes.py +++ b/lib/dl_file_uploader_lib/dl_file_uploader_lib/data_sink/raw_bytes.py @@ -2,19 +2,23 @@ import logging from typing import ( + TYPE_CHECKING, AsyncIterator, ClassVar, Optional, ) -from aiobotocore.client import AioBaseClient -from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE +if TYPE_CHECKING: + from types_aiobotocore_s3 import S3Client + from dl_constants.enums import ConnectionType from dl_core.data_sink import DataSinkAsync from dl_core.raw_data_streaming.stream import AsyncDataStreamBase from dl_file_uploader_lib import exc +from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE + LOGGER = logging.getLogger(__name__) @@ -52,7 +56,7 @@ class S3RawFileAsyncDataSink(DataSinkAsync[RawBytesAsyncDataStream]): _chunks_saved: int = 0 _bytes_saved: int = 0 - def __init__(self, s3: AioBaseClient, s3_key: str, bucket_name: str): + def __init__(self, s3: S3Client, s3_key: str, bucket_name: str): self._s3 = s3 self._s3_key = s3_key self._bucket_name = bucket_name @@ -75,6 +79,7 @@ async def initialize(self) -> None: async def finalize(self) -> None: if self._multipart_upload_started: LOGGER.info(f"Completing S3 multipart upload. {self._part_number - 1} parts were uploaded.") + assert self._upload_id is not None await self._s3.complete_multipart_upload( Bucket=self._bucket_name, Key=self._s3_key, @@ -89,6 +94,7 @@ async def finalize(self) -> None: async def cleanup(self) -> None: if self._multipart_upload_started: LOGGER.exception("Aborting S3 multipart upload,") + assert self._upload_id is not None await self._s3.abort_multipart_upload( Bucket=self._bucket_name, Key=self._s3_key, @@ -99,6 +105,7 @@ async def cleanup(self) -> None: async def _dump_data_batch(self, batch: bytes, progress: int) -> None: LOGGER.info(f"Dumping {len(batch)} data rows into s3 file {self._s3_key}.") + assert self._upload_id is not None part_resp = await self._s3.upload_part( Bucket=self._bucket_name, Key=self._s3_key, diff --git a/lib/dl_file_uploader_lib/dl_file_uploader_lib/gsheets_client.py b/lib/dl_file_uploader_lib/dl_file_uploader_lib/gsheets_client.py index 1022393ee..f4ec82fea 100644 --- a/lib/dl_file_uploader_lib/dl_file_uploader_lib/gsheets_client.py +++ b/lib/dl_file_uploader_lib/dl_file_uploader_lib/gsheets_client.py @@ -39,7 +39,7 @@ import attr from yarl import URL -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.aio.web_app_services.gsheets import ( Cell, GSheetsSettings, @@ -159,22 +159,22 @@ async def _request( return resp -def make_type(value: Any, user_type: BIType | str) -> Any: +def make_type(value: Any, user_type: UserDataType | str) -> Any: if value is None or value == "": return None - if user_type == BIType.integer: + if user_type == UserDataType.integer: if isinstance(value, str): # overflow return None return int(value) - if user_type == BIType.float: + if user_type == UserDataType.float: if isinstance(value, str): # overflow return None return float(value) - if user_type == BIType.boolean: + if user_type == UserDataType.boolean: return bool(value) - if user_type in (BIType.date, BIType.genericdatetime, BIType.datetime, BIType.datetimetz): + if user_type in (UserDataType.date, UserDataType.genericdatetime, UserDataType.datetime, UserDataType.datetimetz): actual_dt = GSHEETS_EPOCH + datetime.timedelta(days=value) - if user_type == BIType.date: + if user_type == UserDataType.date: dt_str = actual_dt.strftime("%Y-%m-%d") else: dt_str = actual_dt.strftime("%Y-%m-%d %H:%M:%S") @@ -185,7 +185,7 @@ def make_type(value: Any, user_type: BIType | str) -> Any: hours, remainder = divmod(time_value.total_seconds(), 3600) minutes, seconds = divmod(remainder, 60) return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}" - if user_type == BIType.string: + if user_type == UserDataType.string: return str(value) raise ValueError(f"Type {user_type} is not supported here") @@ -227,8 +227,8 @@ async def __aexit__(self, *args: Any) -> None: await self._aiogoogle.__aexit__(*args) def _process_values( - self, raw_values: list[list[Any]], user_types: list[BIType | str] - ) -> Tuple[list[list[Any]], list[BIType | str]]: + self, raw_values: list[list[Any]], user_types: list[UserDataType | str] + ) -> Tuple[list[list[Any]], list[UserDataType | str]]: """ Tries to convert values to passed BITypes and falls back to string when fails to do so But the fallback happens only on return, i.e. it tries to convert all values to the original passed type @@ -246,7 +246,7 @@ def _process_values( try: raw_values[row_idx][col_idx] = make_type(value, user_type) except (ValueError, TypeError): - new_user_types[col_idx] = BIType.string + new_user_types[col_idx] = UserDataType.string raw_values[row_idx][col_idx] = str(value) except OverflowError: raw_values[row_idx][col_idx] = None @@ -571,8 +571,8 @@ async def get_single_range(self, spreadsheet_id: str, range: Range) -> Sheet: return sheet async def get_single_values_range( - self, spreadsheet_id: str, range: Range, user_types: list[BIType | str] - ) -> Tuple[list[list[Any]], list[BIType | str]]: + self, spreadsheet_id: str, range: Range, user_types: list[UserDataType | str] + ) -> Tuple[list[list[Any]], list[UserDataType | str]]: resp_json = await self._request_values(spreadsheet_id=spreadsheet_id, range=str(range)) raw_values = resp_json.get("values", []) diff --git a/lib/dl_file_uploader_lib/dl_file_uploader_lib/settings.py b/lib/dl_file_uploader_lib/dl_file_uploader_lib/settings.py index c4791da0a..6a609123f 100644 --- a/lib/dl_file_uploader_lib/dl_file_uploader_lib/settings.py +++ b/lib/dl_file_uploader_lib/dl_file_uploader_lib/settings.py @@ -6,6 +6,7 @@ import attr from dl_configs.crypto_keys import CryptoKeysConfig +from dl_configs.enums import RedisMode from dl_configs.environments import is_setting_applicable from dl_configs.settings_loaders.meta_definition import ( required, @@ -21,7 +22,7 @@ def _make_redis_persistent_settings(cfg: Any, db: int) -> Optional[RedisSettings # TODO: move this values to a separate key return ( RedisSettings( # type: ignore - MODE=cfg.REDIS_PERSISTENT_MODE, # type: ignore + MODE=RedisMode(cfg.REDIS_PERSISTENT_MODE), # type: ignore CLUSTER_NAME=cfg.REDIS_PERSISTENT_CLUSTER_NAME, # type: ignore HOSTS=cfg.REDIS_PERSISTENT_HOSTS, # type: ignore PORT=cfg.REDIS_PERSISTENT_PORT, # type: ignore diff --git a/lib/dl_file_uploader_lib/pyproject.toml b/lib/dl_file_uploader_lib/pyproject.toml index b03f645fb..32fc9c442 100644 --- a/lib/dl_file_uploader_lib/pyproject.toml +++ b/lib/dl_file_uploader_lib/pyproject.toml @@ -1,4 +1,3 @@ - [tool.poetry] name = "datalens-file-uploader-lib" version = "0.0.1" @@ -8,7 +7,6 @@ packages = [{include = "dl_file_uploader_lib"}] license = "Apache 2.0" readme = "README.md" - [tool.poetry.dependencies] aiobotocore = ">=2.4.2" aiogoogle = ">=5.2.0" @@ -44,8 +42,6 @@ minversion = "6.0" addopts = "-ra" testpaths = ["dl_file_uploader_lib_tests/db", "dl_file_uploader_lib_tests/ext"] - - [datalens.pytest.db] root_dir = "dl_file_uploader_lib_tests" target_path = "db" @@ -62,5 +58,5 @@ check_untyped_defs = true strict_optional = true [[tool.mypy.overrides]] -module = ["aiogoogle.*", "marshmallow_oneofschema.*"] +module = ["aiogoogle.*", "marshmallow_oneofschema.*", "types_aiobotocore_s3.*", "mypy_boto3_s3.*"] ignore_missing_imports = true diff --git a/lib/dl_file_uploader_task_interface/dl_file_uploader_task_interface/utils_service_registry.py b/lib/dl_file_uploader_task_interface/dl_file_uploader_task_interface/utils_service_registry.py index 5989616b0..60e037b5e 100644 --- a/lib/dl_file_uploader_task_interface/dl_file_uploader_task_interface/utils_service_registry.py +++ b/lib/dl_file_uploader_task_interface/dl_file_uploader_task_interface/utils_service_registry.py @@ -12,9 +12,6 @@ from dl_api_commons.base_models import RequestContextInfo from dl_configs.crypto_keys import CryptoKeysConfig from dl_configs.rqe import rqe_config_from_env -from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 -from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE -from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions from dl_core.services_registry.env_manager_factory import InsecureEnvManagerFactory from dl_core.services_registry.sr_factories import DefaultSRFactory from dl_core.services_registry.top_level import ServicesRegistry @@ -22,6 +19,10 @@ from dl_core.us_manager.us_manager_async import AsyncUSManager from dl_file_uploader_worker_lib.settings import FileUploaderConnectorsSettings +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 +from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE +from dl_connector_clickhouse.core.clickhouse_base.conn_options import CHConnectOptions + if TYPE_CHECKING: from dl_core.connection_models import ConnectOptions diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/settings.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/settings.py index f6586c424..0ed457758 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/settings.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/settings.py @@ -6,9 +6,10 @@ from dl_configs.settings_loaders.meta_definition import s_attrib from dl_configs.settings_loaders.settings_obj_base import SettingsBase from dl_configs.settings_submodels import GoogleAppSettings +from dl_file_uploader_lib.settings import FileUploaderBaseSettings + from dl_connector_bundle_chs3.chs3_base.core.settings import FileS3ConnectorSettings from dl_connector_bundle_chs3.file.core.settings import file_s3_settings_fallback -from dl_file_uploader_lib.settings import FileUploaderBaseSettings @attr.s(frozen=True) diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/cleanup.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/cleanup.py index 3966f35d6..195fb7b23 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/cleanup.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/cleanup.py @@ -9,9 +9,6 @@ from botocore.exceptions import ClientError from redis.asyncio.lock import Lock as RedisLock -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection -from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 -from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE from dl_constants.enums import DataSourceRole from dl_core.us_entry import USMigrationEntry from dl_file_uploader_lib.enums import RenameTenantStatus @@ -34,6 +31,10 @@ TaskResult, ) +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 +from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE + LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/download.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/download.py index f38beb1db..8750015e8 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/download.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/download.py @@ -13,10 +13,9 @@ import aiogoogle import attr -from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection from dl_constants.enums import ( - BIType, FileProcessingStatus, + UserDataType, ) from dl_core.aio.web_app_services.gsheets import ( Range, @@ -54,6 +53,8 @@ TaskResult, ) +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection + LOGGER = logging.getLogger(__name__) @@ -84,7 +85,7 @@ async def _values_data_iter( sheets_client: GSheetsClient, spreadsheet_id: str, sheet_sample: Sheet, - user_types: list[BIType | str], + user_types: list[UserDataType | str], raw_schema_body: list[SchemaColumn], ) -> AsyncIterator[list]: """ @@ -136,8 +137,8 @@ async def _values_data_iter( for idx, (col, new_user_type) in enumerate(zip(raw_schema_body, new_user_types)): # fall back to string if new_user_type == "time": - new_user_type = BIType.string - if new_user_type != col.user_type and new_user_type == BIType.string: + new_user_type = UserDataType.string + if new_user_type != col.user_type and new_user_type == UserDataType.string: raw_schema_body[idx] = raw_schema_body[idx].clone(user_type=new_user_type) for row in sheet_portion_values: @@ -261,9 +262,9 @@ async def run(self) -> TaskResult: has_header = src.file_source_settings.first_line_is_header assert has_header is not None - orig_user_types: list[BIType | str] = [] + orig_user_types: list[UserDataType | str] = [] for idx, col in enumerate(raw_schema_body): - if col.user_type == BIType.string and sheet_sample.col_is_time(idx, has_header): + if col.user_type == UserDataType.string and sheet_sample.col_is_time(idx, has_header): orig_user_types.append("time") else: orig_user_types.append(col.user_type) diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/excel.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/excel.py index 4ea9d2c95..826751273 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/excel.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/excel.py @@ -155,5 +155,9 @@ def data_iter() -> Iterator[list]: if dfile is None: return Retry(attempts=3) else: + dfile.status = FileProcessingStatus.failed + exc_to_save = ex if isinstance(ex, exc.DLFileUploaderBaseError) else exc.ParseFailed() + dfile.error = FileProcessingError.from_exception(exc_to_save) + await dfile.save() return Fail() return Success() diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/save.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/save.py index 0e4f9cae0..d273133e8 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/save.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/tasks/save.py @@ -10,7 +10,6 @@ from redis.asyncio.lock import Lock as RedisLock import shortuuid -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection from dl_constants.enums import ( DataSourceRole, FileProcessingStatus, @@ -49,6 +48,8 @@ ) from dl_utils.aio import ContextVarExecutor +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection + LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/connection_error_tracker.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/connection_error_tracker.py index 315e6b642..468c4a924 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/connection_error_tracker.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/connection_error_tracker.py @@ -5,7 +5,6 @@ import attr import redis.asyncio -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection from dl_constants.enums import ( ComponentErrorLevel, ComponentType, @@ -22,6 +21,8 @@ ) from dl_task_processor.processor import TaskProcessor +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection + LOGGER = logging.getLogger(__name__) diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/converter_parsing_utils.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/converter_parsing_utils.py index 8ae2eacc8..8cbce8eb2 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/converter_parsing_utils.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/converter_parsing_utils.py @@ -23,7 +23,7 @@ GenericProfiler, generic_profiler, ) -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core import converter_types_cast from dl_core.aio.web_app_services.gsheets import ( Cell, @@ -308,7 +308,7 @@ def _check_date_re(value): # type: ignore # TODO: fix @attr.s(frozen=True) class ParsingDataType: - bi_type: BIType = attr.ib() + bi_type: UserDataType = attr.ib() type: Any = attr.ib() order: int = attr.ib() group: int = attr.ib() @@ -318,18 +318,22 @@ class ParsingDataType: format_desc: str = attr.ib(default=None) -_NONE_PARSING_DATA_TYPE = ParsingDataType(bi_type=BIType.string, type=str, order=-2, group=_TYPE_GROUP_NONE) +_NONE_PARSING_DATA_TYPE = ParsingDataType(bi_type=UserDataType.string, type=str, order=-2, group=_TYPE_GROUP_NONE) _BOOLEAN_PARSING_DATA_TYPE = ParsingDataType( - bi_type=BIType.boolean, type=bool, order=-1, cast_func=converter_types_cast._to_boolean, group=_TYPE_GROUP_BOOLEAN + bi_type=UserDataType.boolean, + type=bool, + order=-1, + cast_func=converter_types_cast._to_boolean, + group=_TYPE_GROUP_BOOLEAN, ) _INTEGER_PARSING_DATA_TYPE = ParsingDataType( - bi_type=BIType.integer, type=int, order=0, cast_func=converter_types_cast._to_int, group=_TYPE_GROUP_NUMBER + bi_type=UserDataType.integer, type=int, order=0, cast_func=converter_types_cast._to_int, group=_TYPE_GROUP_NUMBER ) _FLOAT_PARSING_DATA_TYPE = ParsingDataType( - bi_type=BIType.float, type=float, order=1, cast_func=converter_types_cast._to_float, group=_TYPE_GROUP_NUMBER + bi_type=UserDataType.float, type=float, order=1, cast_func=converter_types_cast._to_float, group=_TYPE_GROUP_NUMBER ) _DATE_PARSING_DATA_TYPE = ParsingDataType( - bi_type=BIType.date, + bi_type=UserDataType.date, order=2, type=datetime.date, cast_func=converter_types_cast._to_date, @@ -337,14 +341,14 @@ class ParsingDataType: group=_TYPE_GROUP_DATETIME, ) _DATETIME_PARSING_DATA_TYPE = ParsingDataType( - bi_type=BIType.genericdatetime, + bi_type=UserDataType.genericdatetime, order=3, type=datetime.datetime, cast_func=converter_types_cast._to_datetime, check_func=_check_datetime_re, group=_TYPE_GROUP_DATETIME, ) -_STRING_PARSING_DATA_TYPE = ParsingDataType(bi_type=BIType.string, type=str, order=4, group=_TYPE_GROUP_STRING) +_STRING_PARSING_DATA_TYPE = ParsingDataType(bi_type=UserDataType.string, type=str, order=4, group=_TYPE_GROUP_STRING) ALLOWED_DATA_TYPES = ( _INTEGER_PARSING_DATA_TYPE, @@ -458,13 +462,13 @@ def merge_column_types(header_types: TColumnTypes, column_types: TColumnTypes, h def raw_schema_to_column_types(raw_schema: list[SchemaColumn]) -> TColumnTypes: bi_type_to_parsing_data_type_map = { - BIType.integer: _INTEGER_PARSING_DATA_TYPE, - BIType.float: _FLOAT_PARSING_DATA_TYPE, - BIType.date: _DATE_PARSING_DATA_TYPE, - BIType.genericdatetime: _DATETIME_PARSING_DATA_TYPE, - BIType.datetime: _DATETIME_PARSING_DATA_TYPE, - BIType.string: _STRING_PARSING_DATA_TYPE, - BIType.boolean: _BOOLEAN_PARSING_DATA_TYPE, + UserDataType.integer: _INTEGER_PARSING_DATA_TYPE, + UserDataType.float: _FLOAT_PARSING_DATA_TYPE, + UserDataType.date: _DATE_PARSING_DATA_TYPE, + UserDataType.genericdatetime: _DATETIME_PARSING_DATA_TYPE, + UserDataType.datetime: _DATETIME_PARSING_DATA_TYPE, + UserDataType.string: _STRING_PARSING_DATA_TYPE, + UserDataType.boolean: _BOOLEAN_PARSING_DATA_TYPE, } column_types = {} for col_index, col in enumerate(raw_schema): diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/parsing_utils.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/parsing_utils.py index 5dd8d3aa5..f085d6334 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/parsing_utils.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/parsing_utils.py @@ -16,11 +16,9 @@ import cchardet as chardet from dl_app_tools.profiling_base import generic_profiler -from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 -from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE from dl_constants.enums import ( - BIType, ConnectionType, + UserDataType, ) from dl_core.aio.web_app_services.gsheets import Sheet from dl_core.components.ids import ( @@ -44,6 +42,9 @@ raw_schema_to_column_types, ) +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 +from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE + LOGGER = logging.getLogger(__name__) @@ -180,7 +181,7 @@ def result_column_types_to_raw_schema( raw_schema: list[SchemaColumn] = [] for col in column_types: - user_type = getattr(BIType, col["cast"]) # type: ignore # TODO: FIX + user_type = getattr(UserDataType, col["cast"]) # type: ignore # TODO: FIX title: str = col["title"] # type: ignore # TODO: FIX sch_col = SchemaColumn( name=field_id_gen.make_field_id(col), diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/s3_utils.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/s3_utils.py index 92a511024..a580f00b3 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/s3_utils.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib/utils/s3_utils.py @@ -1,17 +1,17 @@ +from __future__ import annotations + import io import json from typing import ( + TYPE_CHECKING, + BinaryIO, Iterator, NamedTuple, Optional, ) -import botocore.client from clickhouse_sqlalchemy.quoting import Quoter -from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection -from dl_connector_bundle_chs3.file.core.adapter import AsyncFileS3Adapter -from dl_connector_clickhouse.core.clickhouse_base.ch_commons import create_column_sql from dl_core.db import ( SchemaColumn, get_type_transformer, @@ -28,6 +28,14 @@ ) from dl_file_uploader_worker_lib.utils.parsing_utils import get_csv_raw_data_iterator +from dl_connector_bundle_chs3.chs3_base.core.us_connection import BaseFileS3Connection +from dl_connector_bundle_chs3.file.core.adapter import AsyncFileS3Adapter +from dl_connector_clickhouse.core.clickhouse_base.ch_commons import create_column_sql + + +if TYPE_CHECKING: + from mypy_boto3_s3.client import S3Client as SyncS3Client + def make_s3_table_func_sql_source( conn: BaseFileS3Connection, @@ -63,7 +71,7 @@ class S3Object(NamedTuple): def copy_from_s3_to_s3( - s3_sync_cli: botocore.client.BaseClient, + s3_sync_cli: SyncS3Client, src_file: S3Object, dst_file: S3Object, file_type: FileType, @@ -72,7 +80,7 @@ def copy_from_s3_to_s3( raw_schema: list[SchemaColumn], ) -> None: s3_sync_resp = s3_sync_cli.get_object(Bucket=src_file.bucket, Key=src_file.key) - s3_data_stream = s3_sync_resp["Body"] + s3_data_stream: BinaryIO = s3_sync_resp["Body"] # type: ignore # TODO: fix def spreadsheet_data_iter() -> Iterator[dict]: fieldnames = tuple(sch.name for sch in raw_schema) diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/conftest.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/conftest.py index 980150871..01d27e8ea 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/conftest.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/conftest.py @@ -4,10 +4,9 @@ import logging import os import sys +from typing import TYPE_CHECKING -import aiobotocore.client import attr -import botocore.client import pytest import redis.asyncio @@ -22,7 +21,6 @@ RedisSettings, S3Settings, ) -from dl_connector_bundle_chs3.chs3_base.core.settings import FileS3ConnectorSettings from dl_core.loader import load_core_lib from dl_core.services_registry.top_level import DummyServiceRegistry from dl_core.united_storage_client import USAuthContextMaster @@ -64,9 +62,16 @@ ) from dl_testing.utils import wait_for_initdb +from dl_connector_bundle_chs3.chs3_base.core.settings import FileS3ConnectorSettings + from .config import TestingUSConfig +if TYPE_CHECKING: + from mypy_boto3_s3.client import S3Client as SyncS3Client + from types_aiobotocore_s3 import S3Client as AsyncS3Client + + LOGGER = logging.getLogger(__name__) @@ -260,13 +265,13 @@ def task_processor_client(request, task_processor_arq_client, task_processor_loc @pytest.fixture(scope="function") -async def s3_client(s3_settings) -> aiobotocore.client.AioBaseClient: +async def s3_client(s3_settings) -> AsyncS3Client: async with create_s3_client(s3_settings) as client: yield client @pytest.fixture(scope="function") -def s3_client_sync(s3_settings) -> botocore.client.BaseClient: +def s3_client_sync(s3_settings) -> SyncS3Client: return create_sync_s3_client(s3_settings) diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/conftest.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/conftest.py index 521ae46d5..d51dc5063 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/conftest.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/conftest.py @@ -117,6 +117,36 @@ async def uploaded_excel_id(uploaded_excel) -> str: yield uploaded_excel.id +@pytest.fixture(scope="function") +async def uploaded_excel_with_one_row(s3_tmp_bucket, s3_persistent_bucket, s3_client, redis_model_manager) -> DataFile: + filename = "one_row_table.xlsx" + data_file_desc = DataFile( + manager=redis_model_manager, + filename=filename, + file_type=FileType.xlsx, + status=FileProcessingStatus.in_progress, + ) + + dirname = os.path.dirname(os.path.abspath(__file__)) + path = os.path.join(dirname, filename) + + with open(path, "rb") as fd: + await s3_client.put_object( + ACL="private", + Bucket=s3_tmp_bucket, + Key=data_file_desc.s3_key, + Body=fd.read(), + ) + + await data_file_desc.save() + yield data_file_desc + + +@pytest.fixture(scope="function") +async def uploaded_excel_with_one_row_id(uploaded_excel_with_one_row) -> str: + yield uploaded_excel_with_one_row.id + + @pytest.fixture(scope="function") def reader_app(loop, secure_reader): current_app = create_reader_app() diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/one_row_table.xlsx b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/one_row_table.xlsx new file mode 100644 index 000000000..ee2cdc24a Binary files /dev/null and b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/one_row_table.xlsx differ diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/test_excel.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/test_excel.py index 3f47ce8b3..77e90701c 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/test_excel.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/test_excel.py @@ -4,8 +4,8 @@ import pytest from dl_constants.enums import ( - BIType, FileProcessingStatus, + UserDataType, ) from dl_file_uploader_lib.redis_model.models import ( DataFile, @@ -46,31 +46,31 @@ async def test_parse_excel_task( assert dsrc.status == FileProcessingStatus.ready assert dsrc.title == "data.xlsx – Orders" assert [sch.user_type for sch in dsrc.raw_schema] == [ - BIType.integer, - BIType.string, - BIType.genericdatetime, - BIType.genericdatetime, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.string, - BIType.float, - BIType.integer, - BIType.float, - BIType.float, - BIType.float, - BIType.string, - BIType.float, + UserDataType.integer, + UserDataType.string, + UserDataType.genericdatetime, + UserDataType.genericdatetime, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.string, + UserDataType.float, + UserDataType.integer, + UserDataType.float, + UserDataType.float, + UserDataType.float, + UserDataType.string, + UserDataType.float, ] assert [sch.name for sch in dsrc.raw_schema] == [ "row", @@ -156,3 +156,27 @@ async def test_parse_excel_task( "Medium", "39.24", ] + + +@pytest.mark.asyncio +async def test_parse_excel_with_one_row_task( + task_processor_client, + task_state, + s3_client, + redis_model_manager, + uploaded_excel_with_one_row_id, + reader_app, +): + uploaded_excel_id = uploaded_excel_with_one_row_id + rmm = redis_model_manager + df = await DataFile.get(manager=rmm, obj_id=uploaded_excel_id) + assert df.status == FileProcessingStatus.in_progress + + task = await task_processor_client.schedule(ProcessExcelTask(file_id=uploaded_excel_id)) + result = await wait_task(task, task_state) + await sleep(60) + + assert result[-1] == "failed" + + df = await DataFile.get(manager=rmm, obj_id=uploaded_excel_id) + assert df.status == FileProcessingStatus.failed diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/test_tasks.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/test_tasks.py index e048497ba..205cb4a07 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/test_tasks.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/test_tasks.py @@ -10,8 +10,8 @@ import pytest from dl_constants.enums import ( - BIType, FileProcessingStatus, + UserDataType, ) from dl_file_uploader_lib import exc from dl_file_uploader_lib.enums import ( @@ -69,11 +69,11 @@ async def test_parse_file_task( assert dsrc.status == FileProcessingStatus.ready assert dsrc.title == "test_file.csv" assert [sch.user_type for sch in dsrc.raw_schema] == [ - BIType.string, - BIType.integer, - BIType.float, - BIType.date, - BIType.genericdatetime, + UserDataType.string, + UserDataType.integer, + UserDataType.float, + UserDataType.date, + UserDataType.genericdatetime, ] assert [sch.name for sch in dsrc.raw_schema] == ["f1", "f2", "f3", "data", "data_i_vremya"] assert [sch.title for sch in dsrc.raw_schema] == ["f1", "f2", "f3", "Дата", "Дата и время"] @@ -153,7 +153,6 @@ async def test_parse_10mb_file_task( assert df.status == FileProcessingStatus.ready -# @pytest.mark.skip(reason='Some US problem in CI.') # TODO @pytest.mark.asyncio async def test_save_source_task( task_processor_client, @@ -190,7 +189,6 @@ async def test_save_source_task( assert conn1.get_file_source_by_id(source.id).status == FileProcessingStatus.ready -# @pytest.mark.skip(reason='Some US problem in CI.') # TODO @pytest.mark.asyncio async def test_save_source_task_on_replace( task_processor_client, @@ -391,7 +389,6 @@ async def put_lifecycle_config(lc_rules: list[dict[str, Any]]) -> None: assert new_n_lc_rules == 1 -# @pytest.mark.skip(reason='Some US problem in CI.') # TODO @pytest.mark.asyncio async def test_datetime64( task_processor_client, @@ -459,7 +456,6 @@ async def test_datetime64( assert conn.get_file_source_by_id(source.id).status == FileProcessingStatus.ready -# @pytest.mark.skip(reason='Some US problem in CI.') # TODO @pytest.mark.asyncio async def test_datetime_tz( task_processor_client, diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/utils.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/utils.py index 8c4949dcd..0264ef11a 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/utils.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/db/utils.py @@ -1,7 +1,5 @@ import uuid -from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE -from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection from dl_constants.enums import ( DataSourceRole, FileProcessingStatus, @@ -9,6 +7,9 @@ from dl_core.us_manager.us_manager_async import AsyncUSManager from dl_core_testing.connection import make_conn_key +from dl_connector_bundle_chs3.file.core.constants import CONNECTION_TYPE_FILE +from dl_connector_bundle_chs3.file.core.us_connection import FileS3Connection + async def create_file_connection(us_manager: AsyncUSManager, file_id, source_id, raw_schema, src_title="Source 1"): conn_name = "file test conn {}".format(uuid.uuid4()) diff --git a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/ext/test_gsheets.py b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/ext/test_gsheets.py index 05b95e8b0..2d5266791 100644 --- a/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/ext/test_gsheets.py +++ b/lib/dl_file_uploader_worker_lib/dl_file_uploader_worker_lib_tests/ext/test_gsheets.py @@ -4,11 +4,9 @@ import pytest -from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 -from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection from dl_constants.enums import ( - BIType, FileProcessingStatus, + UserDataType, ) from dl_core.us_manager.us_manager_async import AsyncUSManager from dl_core_testing.connection import make_conn_key @@ -33,17 +31,20 @@ from dl_file_uploader_worker_lib.utils.converter_parsing_utils import idx_to_alphabet_notation from dl_task_processor.state import wait_task +from dl_connector_bundle_chs3.chs3_gsheets.core.constants import CONNECTION_TYPE_GSHEETS_V2 +from dl_connector_bundle_chs3.chs3_gsheets.core.us_connection import GSheetsFileS3Connection + LOGGER = logging.getLogger(__name__) -# BIType aliases -STR = BIType.string -INT = BIType.integer -F = BIType.float -D = BIType.date -DT = BIType.genericdatetime -B = BIType.boolean +# UserDataType aliases +STR = UserDataType.string +INT = UserDataType.integer +F = UserDataType.float +D = UserDataType.date +DT = UserDataType.genericdatetime +B = UserDataType.boolean SPREADHEET_ID = "1rnUFa7AiSKD5O80IKCvMy2cSZvLU1kRw9dxbtZbDMWc" @@ -252,14 +253,14 @@ async def test_download_gsheet_task( elaborate_source = df.sources[TEST_SHEET_TITLES_INDICES["elaborate"]] actual_user_types = [col.user_type for col in elaborate_source.raw_schema] expected_user_types = [ - BIType.integer, - BIType.date, - BIType.float, - BIType.string, - BIType.boolean, - BIType.genericdatetime, - BIType.string, - BIType.string, + UserDataType.integer, + UserDataType.date, + UserDataType.float, + UserDataType.string, + UserDataType.boolean, + UserDataType.genericdatetime, + UserDataType.string, + UserDataType.string, ] assert actual_user_types == expected_user_types @@ -291,7 +292,7 @@ async def test_parse_gsheet( assert elaborate_source_no_types.status == FileProcessingStatus.ready actual_user_types = [col.user_type for col in elaborate_source_no_types.raw_schema] - expected_user_types = [BIType.string] * 10 + expected_user_types = [UserDataType.string] * 10 assert actual_user_types == expected_user_types preview = await DataSourcePreview.get(manager=redis_model_manager, obj_id=elaborate_source_no_types.preview_id) @@ -384,7 +385,7 @@ async def assert_parsing_results( assert file_source_settings.first_line_is_header == has_header_expected if not has_header_expected: # mixed header => everything is string - assert all(col.user_type == BIType.string for col in dsrc.raw_schema) + assert all(col.user_type == UserDataType.string for col in dsrc.raw_schema) assert len(preview.preview_data) == sheet_len else: # set header => correct type @@ -565,7 +566,6 @@ async def create_gsheets_v2_connection(us_manager: AsyncUSManager, dfile: DataFi return conn -@pytest.mark.skip(reason="Some US problem in CI.") # TODO @pytest.mark.asyncio async def test_save_source_task( task_processor_client, @@ -675,7 +675,6 @@ async def test_download_and_parse_big_gsheets( ) -@pytest.mark.skip(reason="Some US problem in CI.") # TODO @pytest.mark.asyncio @pytest.mark.parametrize( "spreadsheet_id, expected_has_header, expected_headers, expected_user_types", diff --git a/lib/dl_file_uploader_worker_lib/pyproject.toml b/lib/dl_file_uploader_worker_lib/pyproject.toml index 008b7b746..4cb1792aa 100644 --- a/lib/dl_file_uploader_worker_lib/pyproject.toml +++ b/lib/dl_file_uploader_worker_lib/pyproject.toml @@ -67,5 +67,5 @@ check_untyped_defs = true strict_optional = true [[tool.mypy.overrides]] -module = ["aiogoogle.*", "cchardet.*"] +module = ["aiogoogle.*", "cchardet.*", "mypy_boto3_s3.*", "clickhouse_sqlalchemy.*"] ignore_missing_imports = true diff --git a/lib/dl_formula/dl_formula/definitions/functions_array.py b/lib/dl_formula/dl_formula/definitions/functions_array.py index bb21e551e..9ccf9c53a 100644 --- a/lib/dl_formula/dl_formula/definitions/functions_array.py +++ b/lib/dl_formula/dl_formula/definitions/functions_array.py @@ -21,6 +21,7 @@ from dl_formula.definitions.functions_string import ( FuncContains, FuncLen, + FuncNotContains, FuncStartswith, ) from dl_formula.definitions.literals import un_literal @@ -28,6 +29,7 @@ Fixed, FromArgs, ) +from dl_formula.shortcuts import n from dl_formula.translation.context import TranslationCtx @@ -239,6 +241,21 @@ class FuncArrayContains(FuncContains): ] +class FuncArrayNotContains(FuncNotContains): + arg_names = ["array", "value"] + argument_types = [ + ArgTypeSequence([DataType.ARRAY_INT, DataType.INTEGER]), + ArgTypeSequence([DataType.ARRAY_FLOAT, DataType.FLOAT]), + ArgTypeSequence([DataType.ARRAY_STR, DataType.STRING]), + ] + variants = [ + VW( + D.DUMMY, + lambda arr, val: n.not_(n.func.CONTAINS(arr, val)), + ), + ] + + class FuncArrayContainsAll(ArrayFunction): name = "contains_all" arg_names = ["array_1", "array_2"] @@ -581,6 +598,8 @@ class FuncArrayRemoveDefault(FuncArrayRemoveBase): FuncStringArrayFromStringArray, # contains FuncArrayContains, + # notcontains + FuncArrayNotContains, # contains_all FuncArrayContainsAll, # contains_any diff --git a/lib/dl_formula/dl_formula/definitions/functions_string.py b/lib/dl_formula/dl_formula/definitions/functions_string.py index a9e2f3eb3..2892e7a03 100644 --- a/lib/dl_formula/dl_formula/definitions/functions_string.py +++ b/lib/dl_formula/dl_formula/definitions/functions_string.py @@ -189,6 +189,35 @@ class FuncContainsNonString(FuncContains): ] +class FuncNotContains(StringFunction): + name = "notcontains" + arg_names = ["string", "substring"] + arg_cnt = 2 + return_type = Fixed(DataType.BOOLEAN) + return_flags = ContextFlag.IS_CONDITION + variants = [ + VW(D.DUMMY, lambda x, y: n.not_(n.func.CONTAINS(x, y))), + ] + + +class FuncNotContainsConst(FuncNotContains): + argument_types = [ + ArgTypeSequence([DataType.STRING, DataType.CONST_STRING]), + ] + + +class FuncNotContainsNonConst(FuncNotContains): + argument_types = [ + ArgTypeSequence([DataType.STRING, DataType.STRING]), + ] + + +class FuncNotContainsNonString(FuncNotContains): + argument_types = [ + ArgTypeSequence([NON_STR_CONTAINMENT_TYPES, DataType.STRING]), + ] + + class FuncIContains(StringFunction): name = "icontains" arg_names = ["string", "substring"] @@ -688,6 +717,10 @@ class FuncUtf8(StringFunction): FuncContainsConst, FuncContainsNonConst, FuncContainsNonString, + # notcontains + FuncNotContainsConst, + FuncNotContainsNonConst, + FuncNotContainsNonString, # endswith FuncEndswithConst, FuncEndswithNonConst, diff --git a/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensLexer.py b/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensLexer.py index 2428d56a8..85b6eb29a 100644 --- a/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensLexer.py +++ b/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensLexer.py @@ -1,13 +1,6 @@ import sys -from antlr4 import ( - DFA, - ATNDeserializer, - Lexer, - LexerATNSimulator, - PredictionContextCache, -) - +from antlr4 import DFA, ATNDeserializer, Lexer, LexerATNSimulator, PredictionContextCache if sys.version_info[1] > 5: from typing import TextIO @@ -17,5243 +10,229 @@ def serializedATN(): return [ - 4, - 0, - 49, - 594, - 6, - -1, - 2, - 0, - 7, - 0, - 2, - 1, - 7, - 1, - 2, - 2, - 7, - 2, - 2, - 3, - 7, - 3, - 2, - 4, - 7, - 4, - 2, - 5, - 7, - 5, - 2, - 6, - 7, - 6, - 2, - 7, - 7, - 7, - 2, - 8, - 7, - 8, - 2, - 9, - 7, - 9, - 2, - 10, - 7, - 10, - 2, - 11, - 7, - 11, - 2, - 12, - 7, - 12, - 2, - 13, - 7, - 13, - 2, - 14, - 7, - 14, - 2, - 15, - 7, - 15, - 2, - 16, - 7, - 16, - 2, - 17, - 7, - 17, - 2, - 18, - 7, - 18, - 2, - 19, - 7, - 19, - 2, - 20, - 7, - 20, - 2, - 21, - 7, - 21, - 2, - 22, - 7, - 22, - 2, - 23, - 7, - 23, - 2, - 24, - 7, - 24, - 2, - 25, - 7, - 25, - 2, - 26, - 7, - 26, - 2, - 27, - 7, - 27, - 2, - 28, - 7, - 28, - 2, - 29, - 7, - 29, - 2, - 30, - 7, - 30, - 2, - 31, - 7, - 31, - 2, - 32, - 7, - 32, - 2, - 33, - 7, - 33, - 2, - 34, - 7, - 34, - 2, - 35, - 7, - 35, - 2, - 36, - 7, - 36, - 2, - 37, - 7, - 37, - 2, - 38, - 7, - 38, - 2, - 39, - 7, - 39, - 2, - 40, - 7, - 40, - 2, - 41, - 7, - 41, - 2, - 42, - 7, - 42, - 2, - 43, - 7, - 43, - 2, - 44, - 7, - 44, - 2, - 45, - 7, - 45, - 2, - 46, - 7, - 46, - 2, - 47, - 7, - 47, - 2, - 48, - 7, - 48, - 2, - 49, - 7, - 49, - 2, - 50, - 7, - 50, - 2, - 51, - 7, - 51, - 2, - 52, - 7, - 52, - 2, - 53, - 7, - 53, - 2, - 54, - 7, - 54, - 2, - 55, - 7, - 55, - 2, - 56, - 7, - 56, - 2, - 57, - 7, - 57, - 2, - 58, - 7, - 58, - 2, - 59, - 7, - 59, - 2, - 60, - 7, - 60, - 2, - 61, - 7, - 61, - 2, - 62, - 7, - 62, - 2, - 63, - 7, - 63, - 2, - 64, - 7, - 64, - 2, - 65, - 7, - 65, - 2, - 66, - 7, - 66, - 2, - 67, - 7, - 67, - 2, - 68, - 7, - 68, - 2, - 69, - 7, - 69, - 2, - 70, - 7, - 70, - 2, - 71, - 7, - 71, - 2, - 72, - 7, - 72, - 2, - 73, - 7, - 73, - 2, - 74, - 7, - 74, - 2, - 75, - 7, - 75, - 1, - 0, - 1, - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 2, - 1, - 2, - 1, - 3, - 1, - 3, - 1, - 3, - 1, - 3, - 5, - 3, - 165, - 8, - 3, - 10, - 3, - 12, - 3, - 168, - 9, - 3, - 1, - 3, - 3, - 3, - 171, - 8, - 3, - 1, - 3, - 1, - 3, - 1, - 4, - 1, - 4, - 1, - 4, - 1, - 4, - 1, - 4, - 4, - 4, - 180, - 8, - 4, - 11, - 4, - 12, - 4, - 181, - 1, - 4, - 5, - 4, - 185, - 8, - 4, - 10, - 4, - 12, - 4, - 188, - 9, - 4, - 1, - 4, - 5, - 4, - 191, - 8, - 4, - 10, - 4, - 12, - 4, - 194, - 9, - 4, - 1, - 4, - 1, - 4, - 1, - 4, - 1, - 4, - 1, - 4, - 1, - 5, - 4, - 5, - 202, - 8, - 5, - 11, - 5, - 12, - 5, - 203, - 1, - 5, - 1, - 5, - 1, - 6, - 1, - 6, - 1, - 7, - 1, - 7, - 1, - 8, - 1, - 8, - 1, - 9, - 1, - 9, - 1, - 10, - 1, - 10, - 1, - 11, - 1, - 11, - 1, - 12, - 1, - 12, - 1, - 13, - 1, - 13, - 1, - 14, - 1, - 14, - 1, - 15, - 1, - 15, - 1, - 16, - 1, - 16, - 1, - 17, - 1, - 17, - 1, - 18, - 1, - 18, - 1, - 19, - 1, - 19, - 1, - 20, - 1, - 20, - 1, - 21, - 1, - 21, - 1, - 22, - 1, - 22, - 1, - 23, - 1, - 23, - 1, - 24, - 1, - 24, - 1, - 25, - 1, - 25, - 1, - 26, - 1, - 26, - 1, - 27, - 1, - 27, - 1, - 27, - 1, - 27, - 1, - 27, - 1, - 27, - 1, - 28, - 1, - 28, - 1, - 28, - 1, - 28, - 1, - 29, - 1, - 29, - 1, - 29, - 1, - 29, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 30, - 4, - 30, - 271, - 8, - 30, - 11, - 30, - 12, - 30, - 272, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 30, - 4, - 30, - 282, - 8, - 30, - 11, - 30, - 12, - 30, - 283, - 1, - 30, - 1, - 30, - 1, - 30, - 1, - 31, - 1, - 31, - 1, - 31, - 1, - 31, - 1, - 31, - 1, - 31, - 1, - 31, - 1, - 31, - 1, - 32, - 1, - 32, - 1, - 32, - 1, - 32, - 1, - 32, - 1, - 33, - 1, - 33, - 1, - 33, - 1, - 33, - 1, - 33, - 1, - 34, - 1, - 34, - 1, - 34, - 1, - 34, - 1, - 34, - 1, - 35, - 1, - 35, - 1, - 35, - 1, - 35, - 1, - 35, - 1, - 35, - 1, - 35, - 1, - 36, - 1, - 36, - 1, - 36, - 1, - 36, - 1, - 37, - 1, - 37, - 1, - 37, - 1, - 37, - 1, - 37, - 1, - 37, - 1, - 37, - 1, - 37, - 1, - 38, - 1, - 38, - 1, - 38, - 1, - 38, - 1, - 38, - 1, - 38, - 1, - 39, - 1, - 39, - 1, - 39, - 1, - 39, - 1, - 39, - 1, - 39, - 1, - 40, - 1, - 40, - 1, - 40, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 4, - 41, - 353, - 8, - 41, - 11, - 41, - 12, - 41, - 354, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 41, - 1, - 42, - 1, - 42, - 1, - 42, - 1, - 43, - 1, - 43, - 1, - 43, - 1, - 43, - 1, - 43, - 1, - 43, - 1, - 43, - 1, - 43, - 1, - 44, - 1, - 44, - 1, - 44, - 1, - 45, - 1, - 45, - 1, - 45, - 1, - 45, - 1, - 45, - 1, - 46, - 1, - 46, - 1, - 46, - 1, - 46, - 1, - 47, - 1, - 47, - 1, - 47, - 1, - 47, - 1, - 47, - 1, - 48, - 1, - 48, - 1, - 48, - 1, - 49, - 1, - 49, - 1, - 49, - 1, - 49, - 1, - 49, - 1, - 49, - 4, - 49, - 405, - 8, - 49, - 11, - 49, - 12, - 49, - 406, - 1, - 49, - 1, - 49, - 1, - 49, - 1, - 50, - 1, - 50, - 1, - 50, - 1, - 50, - 1, - 50, - 1, - 51, - 1, - 51, - 1, - 51, - 1, - 51, - 1, - 51, - 1, - 51, - 1, - 52, - 1, - 52, - 1, - 52, - 1, - 52, - 1, - 52, - 1, - 53, - 1, - 53, - 1, - 53, - 1, - 53, - 1, - 53, - 1, - 54, - 1, - 54, - 1, - 54, - 1, - 54, - 1, - 54, - 1, - 54, - 1, - 54, - 1, - 55, - 1, - 55, - 1, - 56, - 1, - 56, - 1, - 57, - 1, - 57, - 1, - 58, - 1, - 58, - 1, - 59, - 1, - 59, - 1, - 59, - 1, - 59, - 1, - 59, - 1, - 59, - 1, - 59, - 1, - 59, - 1, - 59, - 1, - 59, - 1, - 59, - 3, - 59, - 459, - 8, - 59, - 1, - 60, - 1, - 60, - 1, - 61, - 1, - 61, - 1, - 62, - 1, - 62, - 1, - 63, - 1, - 63, - 1, - 64, - 1, - 64, - 1, - 64, - 3, - 64, - 472, - 8, - 64, - 1, - 64, - 1, - 64, - 3, - 64, - 476, - 8, - 64, - 1, - 65, - 1, - 65, - 3, - 65, - 480, - 8, - 65, - 1, - 65, - 1, - 65, - 1, - 66, - 4, - 66, - 485, - 8, - 66, - 11, - 66, - 12, - 66, - 486, - 1, - 67, - 1, - 67, - 1, - 67, - 1, - 67, - 1, - 67, - 3, - 67, - 494, - 8, - 67, - 3, - 67, - 496, - 8, - 67, - 1, - 68, - 1, - 68, - 1, - 68, - 1, - 68, - 5, - 68, - 502, - 8, - 68, - 10, - 68, - 12, - 68, - 505, - 9, - 68, - 1, - 68, - 1, - 68, - 1, - 68, - 1, - 68, - 1, - 68, - 5, - 68, - 512, - 8, - 68, - 10, - 68, - 12, - 68, - 515, - 9, - 68, - 1, - 68, - 3, - 68, - 518, - 8, - 68, - 1, - 69, - 1, - 69, - 4, - 69, - 522, - 8, - 69, - 11, - 69, - 12, - 69, - 523, - 1, - 69, - 1, - 69, - 1, - 70, - 1, - 70, - 3, - 70, - 530, - 8, - 70, - 1, - 70, - 1, - 70, - 1, - 70, - 5, - 70, - 535, - 8, - 70, - 10, - 70, - 12, - 70, - 538, - 9, - 70, - 1, - 71, - 1, - 71, - 1, - 71, - 1, - 72, - 1, - 72, - 1, - 72, - 1, - 72, - 1, - 72, - 1, - 73, - 1, - 73, - 1, - 73, - 1, - 73, - 1, - 73, - 1, - 73, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 1, - 74, - 3, - 74, - 591, - 8, - 74, - 1, - 75, - 1, - 75, - 0, - 0, - 76, - 1, - 1, - 3, - 2, - 5, - 3, - 7, - 4, - 9, - 5, - 11, - 6, - 13, - 0, - 15, - 0, - 17, - 0, - 19, - 0, - 21, - 0, - 23, - 0, - 25, - 0, - 27, - 0, - 29, - 0, - 31, - 0, - 33, - 0, - 35, - 0, - 37, - 0, - 39, - 0, - 41, - 0, - 43, - 0, - 45, - 0, - 47, - 0, - 49, - 0, - 51, - 0, - 53, - 0, - 55, - 7, - 57, - 8, - 59, - 9, - 61, - 10, - 63, - 11, - 65, - 12, - 67, - 13, - 69, - 14, - 71, - 15, - 73, - 16, - 75, - 17, - 77, - 18, - 79, - 19, - 81, - 20, - 83, - 21, - 85, - 22, - 87, - 23, - 89, - 24, - 91, - 25, - 93, - 26, - 95, - 27, - 97, - 28, - 99, - 29, - 101, - 30, - 103, - 31, - 105, - 32, - 107, - 33, - 109, - 34, - 111, - 35, - 113, - 36, - 115, - 37, - 117, - 38, - 119, - 39, - 121, - 40, - 123, - 41, - 125, - 0, - 127, - 0, - 129, - 0, - 131, - 0, - 133, - 42, - 135, - 43, - 137, - 44, - 139, - 45, - 141, - 46, - 143, - 0, - 145, - 0, - 147, - 47, - 149, - 48, - 151, - 49, - 1, - 0, - 34, - 1, - 0, - 10, - 10, - 1, - 1, - 10, - 10, - 1, - 0, - 42, - 42, - 2, - 0, - 42, - 42, - 47, - 47, - 4, - 0, - 9, - 10, - 12, - 13, - 32, - 32, - 160, - 160, - 2, - 0, - 65, - 65, - 97, - 97, - 2, - 0, - 66, - 66, - 98, - 98, - 2, - 0, - 67, - 67, - 99, - 99, - 2, - 0, - 68, - 68, - 100, - 100, - 2, - 0, - 69, - 69, - 101, - 101, - 2, - 0, - 70, - 70, - 102, - 102, - 2, - 0, - 71, - 71, - 103, - 103, - 2, - 0, - 72, - 72, - 104, - 104, - 2, - 0, - 73, - 73, - 105, - 105, - 2, - 0, - 75, - 75, - 107, - 107, - 2, - 0, - 76, - 76, - 108, - 108, - 2, - 0, - 77, - 77, - 109, - 109, - 2, - 0, - 78, - 78, - 110, - 110, - 2, - 0, - 79, - 79, - 111, - 111, - 2, - 0, - 82, - 82, - 114, - 114, - 2, - 0, - 83, - 83, - 115, - 115, - 2, - 0, - 84, - 84, - 116, - 116, - 2, - 0, - 85, - 85, - 117, - 117, - 2, - 0, - 87, - 87, - 119, - 119, - 2, - 0, - 88, - 88, - 120, - 120, - 2, - 0, - 89, - 89, - 121, - 121, - 3, - 0, - 37, - 37, - 42, - 42, - 47, - 47, - 1, - 0, - 48, - 57, - 2, - 0, - 65, - 90, - 97, - 122, - 2, - 0, - 43, - 43, - 45, - 45, - 1, - 0, - 39, - 39, - 1, - 0, - 34, - 34, - 4, - 0, - 91, - 91, - 93, - 93, - 123, - 123, - 125, - 125, - 3, - 0, - 32, - 32, - 84, - 84, - 116, - 116, - 601, - 0, - 1, - 1, - 0, - 0, - 0, - 0, - 3, - 1, - 0, - 0, - 0, - 0, - 5, - 1, - 0, - 0, - 0, - 0, - 7, - 1, - 0, - 0, - 0, - 0, - 9, - 1, - 0, - 0, - 0, - 0, - 11, - 1, - 0, - 0, - 0, - 0, - 55, - 1, - 0, - 0, - 0, - 0, - 57, - 1, - 0, - 0, - 0, - 0, - 59, - 1, - 0, - 0, - 0, - 0, - 61, - 1, - 0, - 0, - 0, - 0, - 63, - 1, - 0, - 0, - 0, - 0, - 65, - 1, - 0, - 0, - 0, - 0, - 67, - 1, - 0, - 0, - 0, - 0, - 69, - 1, - 0, - 0, - 0, - 0, - 71, - 1, - 0, - 0, - 0, - 0, - 73, - 1, - 0, - 0, - 0, - 0, - 75, - 1, - 0, - 0, - 0, - 0, - 77, - 1, - 0, - 0, - 0, - 0, - 79, - 1, - 0, - 0, - 0, - 0, - 81, - 1, - 0, - 0, - 0, - 0, - 83, - 1, - 0, - 0, - 0, - 0, - 85, - 1, - 0, - 0, - 0, - 0, - 87, - 1, - 0, - 0, - 0, - 0, - 89, - 1, - 0, - 0, - 0, - 0, - 91, - 1, - 0, - 0, - 0, - 0, - 93, - 1, - 0, - 0, - 0, - 0, - 95, - 1, - 0, - 0, - 0, - 0, - 97, - 1, - 0, - 0, - 0, - 0, - 99, - 1, - 0, - 0, - 0, - 0, - 101, - 1, - 0, - 0, - 0, - 0, - 103, - 1, - 0, - 0, - 0, - 0, - 105, - 1, - 0, - 0, - 0, - 0, - 107, - 1, - 0, - 0, - 0, - 0, - 109, - 1, - 0, - 0, - 0, - 0, - 111, - 1, - 0, - 0, - 0, - 0, - 113, - 1, - 0, - 0, - 0, - 0, - 115, - 1, - 0, - 0, - 0, - 0, - 117, - 1, - 0, - 0, - 0, - 0, - 119, - 1, - 0, - 0, - 0, - 0, - 121, - 1, - 0, - 0, - 0, - 0, - 123, - 1, - 0, - 0, - 0, - 0, - 133, - 1, - 0, - 0, - 0, - 0, - 135, - 1, - 0, - 0, - 0, - 0, - 137, - 1, - 0, - 0, - 0, - 0, - 139, - 1, - 0, - 0, - 0, - 0, - 141, - 1, - 0, - 0, - 0, - 0, - 147, - 1, - 0, - 0, - 0, - 0, - 149, - 1, - 0, - 0, - 0, - 0, - 151, - 1, - 0, - 0, - 0, - 1, - 153, - 1, - 0, - 0, - 0, - 3, - 155, - 1, - 0, - 0, - 0, - 5, - 158, - 1, - 0, - 0, - 0, - 7, - 160, - 1, - 0, - 0, - 0, - 9, - 174, - 1, - 0, - 0, - 0, - 11, - 201, - 1, - 0, - 0, - 0, - 13, - 207, - 1, - 0, - 0, - 0, - 15, - 209, - 1, - 0, - 0, - 0, - 17, - 211, - 1, - 0, - 0, - 0, - 19, - 213, - 1, - 0, - 0, - 0, - 21, - 215, - 1, - 0, - 0, - 0, - 23, - 217, - 1, - 0, - 0, - 0, - 25, - 219, - 1, - 0, - 0, - 0, - 27, - 221, - 1, - 0, - 0, - 0, - 29, - 223, - 1, - 0, - 0, - 0, - 31, - 225, - 1, - 0, - 0, - 0, - 33, - 227, - 1, - 0, - 0, - 0, - 35, - 229, - 1, - 0, - 0, - 0, - 37, - 231, - 1, - 0, - 0, - 0, - 39, - 233, - 1, - 0, - 0, - 0, - 41, - 235, - 1, - 0, - 0, - 0, - 43, - 237, - 1, - 0, - 0, - 0, - 45, - 239, - 1, - 0, - 0, - 0, - 47, - 241, - 1, - 0, - 0, - 0, - 49, - 243, - 1, - 0, - 0, - 0, - 51, - 245, - 1, - 0, - 0, - 0, - 53, - 247, - 1, - 0, - 0, - 0, - 55, - 249, - 1, - 0, - 0, - 0, - 57, - 255, - 1, - 0, - 0, - 0, - 59, - 259, - 1, - 0, - 0, - 0, - 61, - 263, - 1, - 0, - 0, - 0, - 63, - 288, - 1, - 0, - 0, - 0, - 65, - 296, - 1, - 0, - 0, - 0, - 67, - 301, - 1, - 0, - 0, - 0, - 69, - 306, - 1, - 0, - 0, - 0, - 71, - 311, - 1, - 0, - 0, - 0, - 73, - 318, - 1, - 0, - 0, - 0, - 75, - 322, - 1, - 0, - 0, - 0, - 77, - 330, - 1, - 0, - 0, - 0, - 79, - 336, - 1, - 0, - 0, - 0, - 81, - 342, - 1, - 0, - 0, - 0, - 83, - 345, - 1, - 0, - 0, - 0, - 85, - 367, - 1, - 0, - 0, - 0, - 87, - 370, - 1, - 0, - 0, - 0, - 89, - 378, - 1, - 0, - 0, - 0, - 91, - 381, - 1, - 0, - 0, - 0, - 93, - 386, - 1, - 0, - 0, - 0, - 95, - 390, - 1, - 0, - 0, - 0, - 97, - 395, - 1, - 0, - 0, - 0, - 99, - 398, - 1, - 0, - 0, - 0, - 101, - 411, - 1, - 0, - 0, - 0, - 103, - 416, - 1, - 0, - 0, - 0, - 105, - 422, - 1, - 0, - 0, - 0, - 107, - 427, - 1, - 0, - 0, - 0, - 109, - 432, - 1, - 0, - 0, - 0, - 111, - 439, - 1, - 0, - 0, - 0, - 113, - 441, - 1, - 0, - 0, - 0, - 115, - 443, - 1, - 0, - 0, - 0, - 117, - 445, - 1, - 0, - 0, - 0, - 119, - 458, - 1, - 0, - 0, - 0, - 121, - 460, - 1, - 0, - 0, - 0, - 123, - 462, - 1, - 0, - 0, - 0, - 125, - 464, - 1, - 0, - 0, - 0, - 127, - 466, - 1, - 0, - 0, - 0, - 129, - 475, - 1, - 0, - 0, - 0, - 131, - 477, - 1, - 0, - 0, - 0, - 133, - 484, - 1, - 0, - 0, - 0, - 135, - 495, - 1, - 0, - 0, - 0, - 137, - 517, - 1, - 0, - 0, - 0, - 139, - 519, - 1, - 0, - 0, - 0, - 141, - 529, - 1, - 0, - 0, - 0, - 143, - 539, - 1, - 0, - 0, - 0, - 145, - 542, - 1, - 0, - 0, - 0, - 147, - 547, - 1, - 0, - 0, - 0, - 149, - 590, - 1, - 0, - 0, - 0, - 151, - 592, - 1, - 0, - 0, - 0, - 153, - 154, - 5, - 35, - 0, - 0, - 154, - 2, - 1, - 0, - 0, - 0, - 155, - 156, - 5, - 35, - 0, - 0, - 156, - 157, - 5, - 35, - 0, - 0, - 157, - 4, - 1, - 0, - 0, - 0, - 158, - 159, - 5, - 44, - 0, - 0, - 159, - 6, - 1, - 0, - 0, - 0, - 160, - 161, - 5, - 45, - 0, - 0, - 161, - 162, - 5, - 45, - 0, - 0, - 162, - 166, - 1, - 0, - 0, - 0, - 163, - 165, - 8, - 0, - 0, - 0, - 164, - 163, - 1, - 0, - 0, - 0, - 165, - 168, - 1, - 0, - 0, - 0, - 166, - 164, - 1, - 0, - 0, - 0, - 166, - 167, - 1, - 0, - 0, - 0, - 167, - 170, - 1, - 0, - 0, - 0, - 168, - 166, - 1, - 0, - 0, - 0, - 169, - 171, - 7, - 1, - 0, - 0, - 170, - 169, - 1, - 0, - 0, - 0, - 171, - 172, - 1, - 0, - 0, - 0, - 172, - 173, - 6, - 3, - 0, - 0, - 173, - 8, - 1, - 0, - 0, - 0, - 174, - 175, - 5, - 47, - 0, - 0, - 175, - 176, - 5, - 42, - 0, - 0, - 176, - 186, - 1, - 0, - 0, - 0, - 177, - 185, - 8, - 2, - 0, - 0, - 178, - 180, - 7, - 2, - 0, - 0, - 179, - 178, - 1, - 0, - 0, - 0, - 180, - 181, - 1, - 0, - 0, - 0, - 181, - 179, - 1, - 0, - 0, - 0, - 181, - 182, - 1, - 0, - 0, - 0, - 182, - 183, - 1, - 0, - 0, - 0, - 183, - 185, - 8, - 3, - 0, - 0, - 184, - 177, - 1, - 0, - 0, - 0, - 184, - 179, - 1, - 0, - 0, - 0, - 185, - 188, - 1, - 0, - 0, - 0, - 186, - 184, - 1, - 0, - 0, - 0, - 186, - 187, - 1, - 0, - 0, - 0, - 187, - 192, - 1, - 0, - 0, - 0, - 188, - 186, - 1, - 0, - 0, - 0, - 189, - 191, - 7, - 2, - 0, - 0, - 190, - 189, - 1, - 0, - 0, - 0, - 191, - 194, - 1, - 0, - 0, - 0, - 192, - 190, - 1, - 0, - 0, - 0, - 192, - 193, - 1, - 0, - 0, - 0, - 193, - 195, - 1, - 0, - 0, - 0, - 194, - 192, - 1, - 0, - 0, - 0, - 195, - 196, - 5, - 42, - 0, - 0, - 196, - 197, - 5, - 47, - 0, - 0, - 197, - 198, - 1, - 0, - 0, - 0, - 198, - 199, - 6, - 4, - 0, - 0, - 199, - 10, - 1, - 0, - 0, - 0, - 200, - 202, - 7, - 4, - 0, - 0, - 201, - 200, - 1, - 0, - 0, - 0, - 202, - 203, - 1, - 0, - 0, - 0, - 203, - 201, - 1, - 0, - 0, - 0, - 203, - 204, - 1, - 0, - 0, - 0, - 204, - 205, - 1, - 0, - 0, - 0, - 205, - 206, - 6, - 5, - 0, - 0, - 206, - 12, - 1, - 0, - 0, - 0, - 207, - 208, - 7, - 5, - 0, - 0, - 208, - 14, - 1, - 0, - 0, - 0, - 209, - 210, - 7, - 6, - 0, - 0, - 210, - 16, - 1, - 0, - 0, - 0, - 211, - 212, - 7, - 7, - 0, - 0, - 212, - 18, - 1, - 0, - 0, - 0, - 213, - 214, - 7, - 8, - 0, - 0, - 214, - 20, - 1, - 0, - 0, - 0, - 215, - 216, - 7, - 9, - 0, - 0, - 216, - 22, - 1, - 0, - 0, - 0, - 217, - 218, - 7, - 10, - 0, - 0, - 218, - 24, - 1, - 0, - 0, - 0, - 219, - 220, - 7, - 11, - 0, - 0, - 220, - 26, - 1, - 0, - 0, - 0, - 221, - 222, - 7, - 12, - 0, - 0, - 222, - 28, - 1, - 0, - 0, - 0, - 223, - 224, - 7, - 13, - 0, - 0, - 224, - 30, - 1, - 0, - 0, - 0, - 225, - 226, - 7, - 14, - 0, - 0, - 226, - 32, - 1, - 0, - 0, - 0, - 227, - 228, - 7, - 15, - 0, - 0, - 228, - 34, - 1, - 0, - 0, - 0, - 229, - 230, - 7, - 16, - 0, - 0, - 230, - 36, - 1, - 0, - 0, - 0, - 231, - 232, - 7, - 17, - 0, - 0, - 232, - 38, - 1, - 0, - 0, - 0, - 233, - 234, - 7, - 18, - 0, - 0, - 234, - 40, - 1, - 0, - 0, - 0, - 235, - 236, - 7, - 19, - 0, - 0, - 236, - 42, - 1, - 0, - 0, - 0, - 237, - 238, - 7, - 20, - 0, - 0, - 238, - 44, - 1, - 0, - 0, - 0, - 239, - 240, - 7, - 21, - 0, - 0, - 240, - 46, - 1, - 0, - 0, - 0, - 241, - 242, - 7, - 22, - 0, - 0, - 242, - 48, - 1, - 0, - 0, - 0, - 243, - 244, - 7, - 23, - 0, - 0, - 244, - 50, - 1, - 0, - 0, - 0, - 245, - 246, - 7, - 24, - 0, - 0, - 246, - 52, - 1, - 0, - 0, - 0, - 247, - 248, - 7, - 25, - 0, - 0, - 248, - 54, - 1, - 0, - 0, - 0, - 249, - 250, - 3, - 13, - 6, - 0, - 250, - 251, - 3, - 35, - 17, - 0, - 251, - 252, - 3, - 39, - 19, - 0, - 252, - 253, - 3, - 37, - 18, - 0, - 253, - 254, - 3, - 25, - 12, - 0, - 254, - 56, - 1, - 0, - 0, - 0, - 255, - 256, - 3, - 13, - 6, - 0, - 256, - 257, - 3, - 37, - 18, - 0, - 257, - 258, - 3, - 19, - 9, - 0, - 258, - 58, - 1, - 0, - 0, - 0, - 259, - 260, - 3, - 13, - 6, - 0, - 260, - 261, - 3, - 43, - 21, - 0, - 261, - 262, - 3, - 17, - 8, - 0, - 262, - 60, - 1, - 0, - 0, - 0, - 263, - 264, - 3, - 15, - 7, - 0, - 264, - 265, - 3, - 21, - 10, - 0, - 265, - 266, - 3, - 23, - 11, - 0, - 266, - 267, - 3, - 39, - 19, - 0, - 267, - 268, - 3, - 41, - 20, - 0, - 268, - 270, - 3, - 21, - 10, - 0, - 269, - 271, - 5, - 32, - 0, - 0, - 270, - 269, - 1, - 0, - 0, - 0, - 271, - 272, - 1, - 0, - 0, - 0, - 272, - 270, - 1, - 0, - 0, - 0, - 272, - 273, - 1, - 0, - 0, - 0, - 273, - 274, - 1, - 0, - 0, - 0, - 274, - 275, - 3, - 23, - 11, - 0, - 275, - 276, - 3, - 29, - 14, - 0, - 276, - 277, - 3, - 33, - 16, - 0, - 277, - 278, - 3, - 45, - 22, - 0, - 278, - 279, - 3, - 21, - 10, - 0, - 279, - 281, - 3, - 41, - 20, - 0, - 280, - 282, - 5, - 32, - 0, - 0, - 281, - 280, - 1, - 0, - 0, - 0, - 282, - 283, - 1, - 0, - 0, - 0, - 283, - 281, - 1, - 0, - 0, - 0, - 283, - 284, - 1, - 0, - 0, - 0, - 284, - 285, - 1, - 0, - 0, - 0, - 285, - 286, - 3, - 15, - 7, - 0, - 286, - 287, - 3, - 53, - 26, - 0, - 287, - 62, - 1, - 0, - 0, - 0, - 288, - 289, - 3, - 15, - 7, - 0, - 289, - 290, - 3, - 21, - 10, - 0, - 290, - 291, - 3, - 45, - 22, - 0, - 291, - 292, - 3, - 49, - 24, - 0, - 292, - 293, - 3, - 21, - 10, - 0, - 293, - 294, - 3, - 21, - 10, - 0, - 294, - 295, - 3, - 37, - 18, - 0, - 295, - 64, - 1, - 0, - 0, - 0, - 296, - 297, - 3, - 17, - 8, - 0, - 297, - 298, - 3, - 13, - 6, - 0, - 298, - 299, - 3, - 43, - 21, - 0, - 299, - 300, - 3, - 21, - 10, - 0, - 300, - 66, - 1, - 0, - 0, - 0, - 301, - 302, - 3, - 19, - 9, - 0, - 302, - 303, - 3, - 21, - 10, - 0, - 303, - 304, - 3, - 43, - 21, - 0, - 304, - 305, - 3, - 17, - 8, - 0, - 305, - 68, - 1, - 0, - 0, - 0, - 306, - 307, - 3, - 21, - 10, - 0, - 307, - 308, - 3, - 33, - 16, - 0, - 308, - 309, - 3, - 43, - 21, - 0, - 309, - 310, - 3, - 21, - 10, - 0, - 310, - 70, - 1, - 0, - 0, - 0, - 311, - 312, - 3, - 21, - 10, - 0, - 312, - 313, - 3, - 33, - 16, - 0, - 313, - 314, - 3, - 43, - 21, - 0, - 314, - 315, - 3, - 21, - 10, - 0, - 315, - 316, - 3, - 29, - 14, - 0, - 316, - 317, - 3, - 23, - 11, - 0, - 317, - 72, - 1, - 0, - 0, - 0, - 318, - 319, - 3, - 21, - 10, - 0, - 319, - 320, - 3, - 37, - 18, - 0, - 320, - 321, - 3, - 19, - 9, - 0, - 321, - 74, - 1, - 0, - 0, - 0, - 322, - 323, - 3, - 21, - 10, - 0, - 323, - 324, - 3, - 51, - 25, - 0, - 324, - 325, - 3, - 17, - 8, - 0, - 325, - 326, - 3, - 33, - 16, - 0, - 326, - 327, - 3, - 47, - 23, - 0, - 327, - 328, - 3, - 19, - 9, - 0, - 328, - 329, - 3, - 21, - 10, - 0, - 329, - 76, - 1, - 0, - 0, - 0, - 330, - 331, - 3, - 23, - 11, - 0, - 331, - 332, - 3, - 13, - 6, - 0, - 332, - 333, - 3, - 33, - 16, - 0, - 333, - 334, - 3, - 43, - 21, - 0, - 334, - 335, - 3, - 21, - 10, - 0, - 335, - 78, - 1, - 0, - 0, - 0, - 336, - 337, - 3, - 23, - 11, - 0, - 337, - 338, - 3, - 29, - 14, - 0, - 338, - 339, - 3, - 51, - 25, - 0, - 339, - 340, - 3, - 21, - 10, - 0, - 340, - 341, - 3, - 19, - 9, - 0, - 341, - 80, - 1, - 0, - 0, - 0, - 342, - 343, - 3, - 29, - 14, - 0, - 343, - 344, - 3, - 23, - 11, - 0, - 344, - 82, - 1, - 0, - 0, - 0, - 345, - 346, - 3, - 29, - 14, - 0, - 346, - 347, - 3, - 25, - 12, - 0, - 347, - 348, - 3, - 37, - 18, - 0, - 348, - 349, - 3, - 39, - 19, - 0, - 349, - 350, - 3, - 41, - 20, - 0, - 350, - 352, - 3, - 21, - 10, - 0, - 351, - 353, - 5, - 32, - 0, - 0, - 352, - 351, - 1, - 0, - 0, - 0, - 353, - 354, - 1, - 0, - 0, - 0, - 354, - 352, - 1, - 0, - 0, - 0, - 354, - 355, - 1, - 0, - 0, - 0, - 355, - 356, - 1, - 0, - 0, - 0, - 356, - 357, - 3, - 19, - 9, - 0, - 357, - 358, - 3, - 29, - 14, - 0, - 358, - 359, - 3, - 35, - 17, - 0, - 359, - 360, - 3, - 21, - 10, - 0, - 360, - 361, - 3, - 37, - 18, - 0, - 361, - 362, - 3, - 43, - 21, - 0, - 362, - 363, - 3, - 29, - 14, - 0, - 363, - 364, - 3, - 39, - 19, - 0, - 364, - 365, - 3, - 37, - 18, - 0, - 365, - 366, - 3, - 43, - 21, - 0, - 366, - 84, - 1, - 0, - 0, - 0, - 367, - 368, - 3, - 29, - 14, - 0, - 368, - 369, - 3, - 37, - 18, - 0, - 369, - 86, - 1, - 0, - 0, - 0, - 370, - 371, - 3, - 29, - 14, - 0, - 371, - 372, - 3, - 37, - 18, - 0, - 372, - 373, - 3, - 17, - 8, - 0, - 373, - 374, - 3, - 33, - 16, - 0, - 374, - 375, - 3, - 47, - 23, - 0, - 375, - 376, - 3, - 19, - 9, - 0, - 376, - 377, - 3, - 21, - 10, - 0, - 377, - 88, - 1, - 0, - 0, - 0, - 378, - 379, - 3, - 29, - 14, - 0, - 379, - 380, - 3, - 43, - 21, - 0, - 380, - 90, - 1, - 0, - 0, - 0, - 381, - 382, - 3, - 33, - 16, - 0, - 382, - 383, - 3, - 29, - 14, - 0, - 383, - 384, - 3, - 31, - 15, - 0, - 384, - 385, - 3, - 21, - 10, - 0, - 385, - 92, - 1, - 0, - 0, - 0, - 386, - 387, - 3, - 37, - 18, - 0, - 387, - 388, - 3, - 39, - 19, - 0, - 388, - 389, - 3, - 45, - 22, - 0, - 389, - 94, - 1, - 0, - 0, - 0, - 390, - 391, - 3, - 37, - 18, - 0, - 391, - 392, - 3, - 47, - 23, - 0, - 392, - 393, - 3, - 33, - 16, - 0, - 393, - 394, - 3, - 33, - 16, - 0, - 394, - 96, - 1, - 0, - 0, - 0, - 395, - 396, - 3, - 39, - 19, - 0, - 396, - 397, - 3, - 41, - 20, - 0, - 397, - 98, - 1, - 0, - 0, - 0, - 398, - 399, - 3, - 39, - 19, - 0, - 399, - 400, - 3, - 41, - 20, - 0, - 400, - 401, - 3, - 19, - 9, - 0, - 401, - 402, - 3, - 21, - 10, - 0, - 402, - 404, - 3, - 41, - 20, - 0, - 403, - 405, - 5, - 32, - 0, - 0, - 404, - 403, - 1, - 0, - 0, - 0, - 405, - 406, - 1, - 0, - 0, - 0, - 406, - 404, - 1, - 0, - 0, - 0, - 406, - 407, - 1, - 0, - 0, - 0, - 407, - 408, - 1, - 0, - 0, - 0, - 408, - 409, - 3, - 15, - 7, - 0, - 409, - 410, - 3, - 53, - 26, - 0, - 410, - 100, - 1, - 0, - 0, - 0, - 411, - 412, - 3, - 45, - 22, - 0, - 412, - 413, - 3, - 27, - 13, - 0, - 413, - 414, - 3, - 21, - 10, - 0, - 414, - 415, - 3, - 37, - 18, - 0, - 415, - 102, - 1, - 0, - 0, - 0, - 416, - 417, - 3, - 45, - 22, - 0, - 417, - 418, - 3, - 39, - 19, - 0, - 418, - 419, - 3, - 45, - 22, - 0, - 419, - 420, - 3, - 13, - 6, - 0, - 420, - 421, - 3, - 33, - 16, - 0, - 421, - 104, - 1, - 0, - 0, - 0, - 422, - 423, - 3, - 45, - 22, - 0, - 423, - 424, - 3, - 41, - 20, - 0, - 424, - 425, - 3, - 47, - 23, - 0, - 425, - 426, - 3, - 21, - 10, - 0, - 426, - 106, - 1, - 0, - 0, - 0, - 427, - 428, - 3, - 49, - 24, - 0, - 428, - 429, - 3, - 27, - 13, - 0, - 429, - 430, - 3, - 21, - 10, - 0, - 430, - 431, - 3, - 37, - 18, - 0, - 431, - 108, - 1, - 0, - 0, - 0, - 432, - 433, - 3, - 49, - 24, - 0, - 433, - 434, - 3, - 29, - 14, - 0, - 434, - 435, - 3, - 45, - 22, - 0, - 435, - 436, - 3, - 27, - 13, - 0, - 436, - 437, - 3, - 29, - 14, - 0, - 437, - 438, - 3, - 37, - 18, - 0, - 438, - 110, - 1, - 0, - 0, - 0, - 439, - 440, - 5, - 43, - 0, - 0, - 440, - 112, - 1, - 0, - 0, - 0, - 441, - 442, - 5, - 45, - 0, - 0, - 442, - 114, - 1, - 0, - 0, - 0, - 443, - 444, - 5, - 94, - 0, - 0, - 444, - 116, - 1, - 0, - 0, - 0, - 445, - 446, - 7, - 26, - 0, - 0, - 446, - 118, - 1, - 0, - 0, - 0, - 447, - 459, - 5, - 61, - 0, - 0, - 448, - 449, - 5, - 33, - 0, - 0, - 449, - 459, - 5, - 61, - 0, - 0, - 450, - 451, - 5, - 60, - 0, - 0, - 451, - 459, - 5, - 62, - 0, - 0, - 452, - 459, - 5, - 62, - 0, - 0, - 453, - 454, - 5, - 62, - 0, - 0, - 454, - 459, - 5, - 61, - 0, - 0, - 455, - 459, - 5, - 60, - 0, - 0, - 456, - 457, - 5, - 60, - 0, - 0, - 457, - 459, - 5, - 61, - 0, - 0, - 458, - 447, - 1, - 0, - 0, - 0, - 458, - 448, - 1, - 0, - 0, - 0, - 458, - 450, - 1, - 0, - 0, - 0, - 458, - 452, - 1, - 0, - 0, - 0, - 458, - 453, - 1, - 0, - 0, - 0, - 458, - 455, - 1, - 0, - 0, - 0, - 458, - 456, - 1, - 0, - 0, - 0, - 459, - 120, - 1, - 0, - 0, - 0, - 460, - 461, - 5, - 40, - 0, - 0, - 461, - 122, - 1, - 0, - 0, - 0, - 462, - 463, - 5, - 41, - 0, - 0, - 463, - 124, - 1, - 0, - 0, - 0, - 464, - 465, - 7, - 27, - 0, - 0, - 465, - 126, - 1, - 0, - 0, - 0, - 466, - 467, - 7, - 28, - 0, - 0, - 467, - 128, - 1, - 0, - 0, - 0, - 468, - 469, - 3, - 133, - 66, - 0, - 469, - 471, - 5, - 46, - 0, - 0, - 470, - 472, - 3, - 133, - 66, - 0, - 471, - 470, - 1, - 0, - 0, - 0, - 471, - 472, - 1, - 0, - 0, - 0, - 472, - 476, - 1, - 0, - 0, - 0, - 473, - 474, - 5, - 46, - 0, - 0, - 474, - 476, - 3, - 133, - 66, - 0, - 475, - 468, - 1, - 0, - 0, - 0, - 475, - 473, - 1, - 0, - 0, - 0, - 476, - 130, - 1, - 0, - 0, - 0, - 477, - 479, - 7, - 9, - 0, - 0, - 478, - 480, - 7, - 29, - 0, - 0, - 479, - 478, - 1, - 0, - 0, - 0, - 479, - 480, - 1, - 0, - 0, - 0, - 480, - 481, - 1, - 0, - 0, - 0, - 481, - 482, - 3, - 133, - 66, - 0, - 482, - 132, - 1, - 0, - 0, - 0, - 483, - 485, - 3, - 125, - 62, - 0, - 484, - 483, - 1, - 0, - 0, - 0, - 485, - 486, - 1, - 0, - 0, - 0, - 486, - 484, - 1, - 0, - 0, - 0, - 486, - 487, - 1, - 0, - 0, - 0, - 487, - 134, - 1, - 0, - 0, - 0, - 488, - 489, - 3, - 133, - 66, - 0, - 489, - 490, - 3, - 131, - 65, - 0, - 490, - 496, - 1, - 0, - 0, - 0, - 491, - 493, - 3, - 129, - 64, - 0, - 492, - 494, - 3, - 131, - 65, - 0, - 493, - 492, - 1, - 0, - 0, - 0, - 493, - 494, - 1, - 0, - 0, - 0, - 494, - 496, - 1, - 0, - 0, - 0, - 495, - 488, - 1, - 0, - 0, - 0, - 495, - 491, - 1, - 0, - 0, - 0, - 496, - 136, - 1, - 0, - 0, - 0, - 497, - 503, - 5, - 39, - 0, - 0, - 498, - 499, - 5, - 92, - 0, - 0, - 499, - 502, - 5, - 39, - 0, - 0, - 500, - 502, - 8, - 30, - 0, - 0, - 501, - 498, - 1, - 0, - 0, - 0, - 501, - 500, - 1, - 0, - 0, - 0, - 502, - 505, - 1, - 0, - 0, - 0, - 503, - 501, - 1, - 0, - 0, - 0, - 503, - 504, - 1, - 0, - 0, - 0, - 504, - 506, - 1, - 0, - 0, - 0, - 505, - 503, - 1, - 0, - 0, - 0, - 506, - 518, - 5, - 39, - 0, - 0, - 507, - 513, - 5, - 34, - 0, - 0, - 508, - 509, - 5, - 92, - 0, - 0, - 509, - 512, - 5, - 34, - 0, - 0, - 510, - 512, - 8, - 31, - 0, - 0, - 511, - 508, - 1, - 0, - 0, - 0, - 511, - 510, - 1, - 0, - 0, - 0, - 512, - 515, - 1, - 0, - 0, - 0, - 513, - 511, - 1, - 0, - 0, - 0, - 513, - 514, - 1, - 0, - 0, - 0, - 514, - 516, - 1, - 0, - 0, - 0, - 515, - 513, - 1, - 0, - 0, - 0, - 516, - 518, - 5, - 34, - 0, - 0, - 517, - 497, - 1, - 0, - 0, - 0, - 517, - 507, - 1, - 0, - 0, - 0, - 518, - 138, - 1, - 0, - 0, - 0, - 519, - 521, - 5, - 91, - 0, - 0, - 520, - 522, - 8, - 32, - 0, - 0, - 521, - 520, - 1, - 0, - 0, - 0, - 522, - 523, - 1, - 0, - 0, - 0, - 523, - 521, - 1, - 0, - 0, - 0, - 523, - 524, - 1, - 0, - 0, - 0, - 524, - 525, - 1, - 0, - 0, - 0, - 525, - 526, - 5, - 93, - 0, - 0, - 526, - 140, - 1, - 0, - 0, - 0, - 527, - 530, - 3, - 127, - 63, - 0, - 528, - 530, - 5, - 95, - 0, - 0, - 529, - 527, - 1, - 0, - 0, - 0, - 529, - 528, - 1, - 0, - 0, - 0, - 530, - 536, - 1, - 0, - 0, - 0, - 531, - 535, - 3, - 127, - 63, - 0, - 532, - 535, - 3, - 125, - 62, - 0, - 533, - 535, - 5, - 95, - 0, - 0, - 534, - 531, - 1, - 0, - 0, - 0, - 534, - 532, - 1, - 0, - 0, - 0, - 534, - 533, - 1, - 0, - 0, - 0, - 535, - 538, - 1, - 0, - 0, - 0, - 536, - 534, - 1, - 0, - 0, - 0, - 536, - 537, - 1, - 0, - 0, - 0, - 537, - 142, - 1, - 0, - 0, - 0, - 538, - 536, - 1, - 0, - 0, - 0, - 539, - 540, - 7, - 27, - 0, - 0, - 540, - 541, - 7, - 27, - 0, - 0, - 541, - 144, - 1, - 0, - 0, - 0, - 542, - 543, - 7, - 27, - 0, - 0, - 543, - 544, - 7, - 27, - 0, - 0, - 544, - 545, - 7, - 27, - 0, - 0, - 545, - 546, - 7, - 27, - 0, - 0, - 546, - 146, - 1, - 0, - 0, - 0, - 547, - 548, - 3, - 145, - 72, - 0, - 548, - 549, - 5, - 45, - 0, - 0, - 549, - 550, - 3, - 143, - 71, - 0, - 550, - 551, - 5, - 45, - 0, - 0, - 551, - 552, - 3, - 143, - 71, - 0, - 552, - 148, - 1, - 0, - 0, - 0, - 553, - 554, - 3, - 145, - 72, - 0, - 554, - 555, - 5, - 45, - 0, - 0, - 555, - 556, - 3, - 143, - 71, - 0, - 556, - 557, - 5, - 45, - 0, - 0, - 557, - 558, - 3, - 143, - 71, - 0, - 558, - 559, - 7, - 33, - 0, - 0, - 559, - 560, - 3, - 143, - 71, - 0, - 560, - 561, - 5, - 58, - 0, - 0, - 561, - 562, - 3, - 143, - 71, - 0, - 562, - 563, - 5, - 58, - 0, - 0, - 563, - 564, - 3, - 143, - 71, - 0, - 564, - 591, - 1, - 0, - 0, - 0, - 565, - 566, - 3, - 145, - 72, - 0, - 566, - 567, - 5, - 45, - 0, - 0, - 567, - 568, - 3, - 143, - 71, - 0, - 568, - 569, - 5, - 45, - 0, - 0, - 569, - 570, - 3, - 143, - 71, - 0, - 570, - 571, - 7, - 33, - 0, - 0, - 571, - 572, - 3, - 143, - 71, - 0, - 572, - 573, - 5, - 58, - 0, - 0, - 573, - 574, - 3, - 143, - 71, - 0, - 574, - 591, - 1, - 0, - 0, - 0, - 575, - 576, - 3, - 145, - 72, - 0, - 576, - 577, - 5, - 45, - 0, - 0, - 577, - 578, - 3, - 143, - 71, - 0, - 578, - 579, - 5, - 45, - 0, - 0, - 579, - 580, - 3, - 143, - 71, - 0, - 580, - 581, - 7, - 33, - 0, - 0, - 581, - 582, - 3, - 143, - 71, - 0, - 582, - 591, - 1, - 0, - 0, - 0, - 583, - 584, - 3, - 145, - 72, - 0, - 584, - 585, - 5, - 45, - 0, - 0, - 585, - 586, - 3, - 143, - 71, - 0, - 586, - 587, - 5, - 45, - 0, - 0, - 587, - 588, - 3, - 143, - 71, - 0, - 588, - 589, - 7, - 21, - 0, - 0, - 589, - 591, - 1, - 0, - 0, - 0, - 590, - 553, - 1, - 0, - 0, - 0, - 590, - 565, - 1, - 0, - 0, - 0, - 590, - 575, - 1, - 0, - 0, - 0, - 590, - 583, - 1, - 0, - 0, - 0, - 591, - 150, - 1, - 0, - 0, - 0, - 592, - 593, - 9, - 0, - 0, - 0, - 593, - 152, - 1, - 0, - 0, - 0, - 29, - 0, - 166, - 170, - 181, - 184, - 186, - 192, - 203, - 272, - 283, - 354, - 406, - 458, - 471, - 475, - 479, - 486, - 493, - 495, - 501, - 503, - 511, - 513, - 517, - 523, - 529, - 534, - 536, - 590, - 1, - 6, - 0, - 0, + 4, 0, 49, 594, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, + 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, + 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, + 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, + 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, + 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, + 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, + 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, + 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, + 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, + 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, + 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, + 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 5, 3, 165, 8, 3, 10, 3, 12, 3, 168, 9, 3, 1, 3, 3, 3, 171, + 8, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 4, 4, 180, 8, 4, 11, 4, 12, 4, 181, 1, 4, 5, + 4, 185, 8, 4, 10, 4, 12, 4, 188, 9, 4, 1, 4, 5, 4, 191, 8, 4, 10, 4, 12, 4, 194, 9, 4, 1, + 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 4, 5, 202, 8, 5, 11, 5, 12, 5, 203, 1, 5, 1, 5, 1, 6, 1, 6, + 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, + 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, + 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, + 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, + 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 4, 30, 271, 8, 30, 11, 30, 12, 30, + 272, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 4, 30, 282, 8, 30, 11, 30, 12, 30, + 283, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, + 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, + 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, + 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, + 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, + 1, 41, 1, 41, 1, 41, 4, 41, 353, 8, 41, 11, 41, 12, 41, 354, 1, 41, 1, 41, 1, 41, 1, 41, + 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, + 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, + 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, + 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 4, 49, 405, 8, 49, 11, 49, 12, 49, 406, 1, 49, 1, 49, + 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, + 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, + 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, + 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 3, 59, 459, 8, 59, 1, 60, + 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 3, 64, 472, 8, 64, + 1, 64, 1, 64, 3, 64, 476, 8, 64, 1, 65, 1, 65, 3, 65, 480, 8, 65, 1, 65, 1, 65, 1, 66, 4, + 66, 485, 8, 66, 11, 66, 12, 66, 486, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 3, 67, 494, 8, + 67, 3, 67, 496, 8, 67, 1, 68, 1, 68, 1, 68, 1, 68, 5, 68, 502, 8, 68, 10, 68, 12, 68, 505, + 9, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 5, 68, 512, 8, 68, 10, 68, 12, 68, 515, 9, 68, + 1, 68, 3, 68, 518, 8, 68, 1, 69, 1, 69, 4, 69, 522, 8, 69, 11, 69, 12, 69, 523, 1, 69, + 1, 69, 1, 70, 1, 70, 3, 70, 530, 8, 70, 1, 70, 1, 70, 1, 70, 5, 70, 535, 8, 70, 10, 70, + 12, 70, 538, 9, 70, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, + 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, + 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, + 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, + 1, 74, 1, 74, 3, 74, 591, 8, 74, 1, 75, 1, 75, 0, 0, 76, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, + 6, 13, 0, 15, 0, 17, 0, 19, 0, 21, 0, 23, 0, 25, 0, 27, 0, 29, 0, 31, 0, 33, 0, 35, 0, 37, + 0, 39, 0, 41, 0, 43, 0, 45, 0, 47, 0, 49, 0, 51, 0, 53, 0, 55, 7, 57, 8, 59, 9, 61, 10, 63, + 11, 65, 12, 67, 13, 69, 14, 71, 15, 73, 16, 75, 17, 77, 18, 79, 19, 81, 20, 83, 21, 85, + 22, 87, 23, 89, 24, 91, 25, 93, 26, 95, 27, 97, 28, 99, 29, 101, 30, 103, 31, 105, 32, + 107, 33, 109, 34, 111, 35, 113, 36, 115, 37, 117, 38, 119, 39, 121, 40, 123, 41, 125, + 0, 127, 0, 129, 0, 131, 0, 133, 42, 135, 43, 137, 44, 139, 45, 141, 46, 143, 0, 145, + 0, 147, 47, 149, 48, 151, 49, 1, 0, 34, 1, 0, 10, 10, 1, 1, 10, 10, 1, 0, 42, 42, 2, 0, + 42, 42, 47, 47, 4, 0, 9, 10, 12, 13, 32, 32, 160, 160, 2, 0, 65, 65, 97, 97, 2, 0, 66, + 66, 98, 98, 2, 0, 67, 67, 99, 99, 2, 0, 68, 68, 100, 100, 2, 0, 69, 69, 101, 101, 2, 0, + 70, 70, 102, 102, 2, 0, 71, 71, 103, 103, 2, 0, 72, 72, 104, 104, 2, 0, 73, 73, 105, + 105, 2, 0, 75, 75, 107, 107, 2, 0, 76, 76, 108, 108, 2, 0, 77, 77, 109, 109, 2, 0, 78, + 78, 110, 110, 2, 0, 79, 79, 111, 111, 2, 0, 82, 82, 114, 114, 2, 0, 83, 83, 115, 115, + 2, 0, 84, 84, 116, 116, 2, 0, 85, 85, 117, 117, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, + 120, 120, 2, 0, 89, 89, 121, 121, 3, 0, 37, 37, 42, 42, 47, 47, 1, 0, 48, 57, 2, 0, 65, + 90, 97, 122, 2, 0, 43, 43, 45, 45, 1, 0, 39, 39, 1, 0, 34, 34, 4, 0, 91, 91, 93, 93, 123, + 123, 125, 125, 3, 0, 32, 32, 84, 84, 116, 116, 601, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, + 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, + 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, + 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, + 77, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, + 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 0, 95, 1, 0, 0, 0, 0, + 97, 1, 0, 0, 0, 0, 99, 1, 0, 0, 0, 0, 101, 1, 0, 0, 0, 0, 103, 1, 0, 0, 0, 0, 105, 1, 0, 0, + 0, 0, 107, 1, 0, 0, 0, 0, 109, 1, 0, 0, 0, 0, 111, 1, 0, 0, 0, 0, 113, 1, 0, 0, 0, 0, 115, + 1, 0, 0, 0, 0, 117, 1, 0, 0, 0, 0, 119, 1, 0, 0, 0, 0, 121, 1, 0, 0, 0, 0, 123, 1, 0, 0, 0, + 0, 133, 1, 0, 0, 0, 0, 135, 1, 0, 0, 0, 0, 137, 1, 0, 0, 0, 0, 139, 1, 0, 0, 0, 0, 141, 1, + 0, 0, 0, 0, 147, 1, 0, 0, 0, 0, 149, 1, 0, 0, 0, 0, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 3, + 155, 1, 0, 0, 0, 5, 158, 1, 0, 0, 0, 7, 160, 1, 0, 0, 0, 9, 174, 1, 0, 0, 0, 11, 201, 1, 0, + 0, 0, 13, 207, 1, 0, 0, 0, 15, 209, 1, 0, 0, 0, 17, 211, 1, 0, 0, 0, 19, 213, 1, 0, 0, 0, + 21, 215, 1, 0, 0, 0, 23, 217, 1, 0, 0, 0, 25, 219, 1, 0, 0, 0, 27, 221, 1, 0, 0, 0, 29, 223, + 1, 0, 0, 0, 31, 225, 1, 0, 0, 0, 33, 227, 1, 0, 0, 0, 35, 229, 1, 0, 0, 0, 37, 231, 1, 0, + 0, 0, 39, 233, 1, 0, 0, 0, 41, 235, 1, 0, 0, 0, 43, 237, 1, 0, 0, 0, 45, 239, 1, 0, 0, 0, + 47, 241, 1, 0, 0, 0, 49, 243, 1, 0, 0, 0, 51, 245, 1, 0, 0, 0, 53, 247, 1, 0, 0, 0, 55, 249, + 1, 0, 0, 0, 57, 255, 1, 0, 0, 0, 59, 259, 1, 0, 0, 0, 61, 263, 1, 0, 0, 0, 63, 288, 1, 0, + 0, 0, 65, 296, 1, 0, 0, 0, 67, 301, 1, 0, 0, 0, 69, 306, 1, 0, 0, 0, 71, 311, 1, 0, 0, 0, + 73, 318, 1, 0, 0, 0, 75, 322, 1, 0, 0, 0, 77, 330, 1, 0, 0, 0, 79, 336, 1, 0, 0, 0, 81, 342, + 1, 0, 0, 0, 83, 345, 1, 0, 0, 0, 85, 367, 1, 0, 0, 0, 87, 370, 1, 0, 0, 0, 89, 378, 1, 0, + 0, 0, 91, 381, 1, 0, 0, 0, 93, 386, 1, 0, 0, 0, 95, 390, 1, 0, 0, 0, 97, 395, 1, 0, 0, 0, + 99, 398, 1, 0, 0, 0, 101, 411, 1, 0, 0, 0, 103, 416, 1, 0, 0, 0, 105, 422, 1, 0, 0, 0, 107, + 427, 1, 0, 0, 0, 109, 432, 1, 0, 0, 0, 111, 439, 1, 0, 0, 0, 113, 441, 1, 0, 0, 0, 115, + 443, 1, 0, 0, 0, 117, 445, 1, 0, 0, 0, 119, 458, 1, 0, 0, 0, 121, 460, 1, 0, 0, 0, 123, + 462, 1, 0, 0, 0, 125, 464, 1, 0, 0, 0, 127, 466, 1, 0, 0, 0, 129, 475, 1, 0, 0, 0, 131, + 477, 1, 0, 0, 0, 133, 484, 1, 0, 0, 0, 135, 495, 1, 0, 0, 0, 137, 517, 1, 0, 0, 0, 139, + 519, 1, 0, 0, 0, 141, 529, 1, 0, 0, 0, 143, 539, 1, 0, 0, 0, 145, 542, 1, 0, 0, 0, 147, + 547, 1, 0, 0, 0, 149, 590, 1, 0, 0, 0, 151, 592, 1, 0, 0, 0, 153, 154, 5, 35, 0, 0, 154, + 2, 1, 0, 0, 0, 155, 156, 5, 35, 0, 0, 156, 157, 5, 35, 0, 0, 157, 4, 1, 0, 0, 0, 158, 159, + 5, 44, 0, 0, 159, 6, 1, 0, 0, 0, 160, 161, 5, 45, 0, 0, 161, 162, 5, 45, 0, 0, 162, 166, + 1, 0, 0, 0, 163, 165, 8, 0, 0, 0, 164, 163, 1, 0, 0, 0, 165, 168, 1, 0, 0, 0, 166, 164, + 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 170, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 169, 171, + 7, 1, 0, 0, 170, 169, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 6, 3, 0, 0, 173, 8, 1, + 0, 0, 0, 174, 175, 5, 47, 0, 0, 175, 176, 5, 42, 0, 0, 176, 186, 1, 0, 0, 0, 177, 185, + 8, 2, 0, 0, 178, 180, 7, 2, 0, 0, 179, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 179, + 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 185, 8, 3, 0, 0, 184, 177, + 1, 0, 0, 0, 184, 179, 1, 0, 0, 0, 185, 188, 1, 0, 0, 0, 186, 184, 1, 0, 0, 0, 186, 187, + 1, 0, 0, 0, 187, 192, 1, 0, 0, 0, 188, 186, 1, 0, 0, 0, 189, 191, 7, 2, 0, 0, 190, 189, + 1, 0, 0, 0, 191, 194, 1, 0, 0, 0, 192, 190, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 195, + 1, 0, 0, 0, 194, 192, 1, 0, 0, 0, 195, 196, 5, 42, 0, 0, 196, 197, 5, 47, 0, 0, 197, 198, + 1, 0, 0, 0, 198, 199, 6, 4, 0, 0, 199, 10, 1, 0, 0, 0, 200, 202, 7, 4, 0, 0, 201, 200, 1, + 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 201, 1, 0, 0, 0, 203, 204, 1, 0, 0, 0, 204, 205, 1, + 0, 0, 0, 205, 206, 6, 5, 0, 0, 206, 12, 1, 0, 0, 0, 207, 208, 7, 5, 0, 0, 208, 14, 1, 0, + 0, 0, 209, 210, 7, 6, 0, 0, 210, 16, 1, 0, 0, 0, 211, 212, 7, 7, 0, 0, 212, 18, 1, 0, 0, + 0, 213, 214, 7, 8, 0, 0, 214, 20, 1, 0, 0, 0, 215, 216, 7, 9, 0, 0, 216, 22, 1, 0, 0, 0, + 217, 218, 7, 10, 0, 0, 218, 24, 1, 0, 0, 0, 219, 220, 7, 11, 0, 0, 220, 26, 1, 0, 0, 0, + 221, 222, 7, 12, 0, 0, 222, 28, 1, 0, 0, 0, 223, 224, 7, 13, 0, 0, 224, 30, 1, 0, 0, 0, + 225, 226, 7, 14, 0, 0, 226, 32, 1, 0, 0, 0, 227, 228, 7, 15, 0, 0, 228, 34, 1, 0, 0, 0, + 229, 230, 7, 16, 0, 0, 230, 36, 1, 0, 0, 0, 231, 232, 7, 17, 0, 0, 232, 38, 1, 0, 0, 0, + 233, 234, 7, 18, 0, 0, 234, 40, 1, 0, 0, 0, 235, 236, 7, 19, 0, 0, 236, 42, 1, 0, 0, 0, + 237, 238, 7, 20, 0, 0, 238, 44, 1, 0, 0, 0, 239, 240, 7, 21, 0, 0, 240, 46, 1, 0, 0, 0, + 241, 242, 7, 22, 0, 0, 242, 48, 1, 0, 0, 0, 243, 244, 7, 23, 0, 0, 244, 50, 1, 0, 0, 0, + 245, 246, 7, 24, 0, 0, 246, 52, 1, 0, 0, 0, 247, 248, 7, 25, 0, 0, 248, 54, 1, 0, 0, 0, + 249, 250, 3, 13, 6, 0, 250, 251, 3, 35, 17, 0, 251, 252, 3, 39, 19, 0, 252, 253, 3, 37, + 18, 0, 253, 254, 3, 25, 12, 0, 254, 56, 1, 0, 0, 0, 255, 256, 3, 13, 6, 0, 256, 257, 3, + 37, 18, 0, 257, 258, 3, 19, 9, 0, 258, 58, 1, 0, 0, 0, 259, 260, 3, 13, 6, 0, 260, 261, + 3, 43, 21, 0, 261, 262, 3, 17, 8, 0, 262, 60, 1, 0, 0, 0, 263, 264, 3, 15, 7, 0, 264, 265, + 3, 21, 10, 0, 265, 266, 3, 23, 11, 0, 266, 267, 3, 39, 19, 0, 267, 268, 3, 41, 20, 0, + 268, 270, 3, 21, 10, 0, 269, 271, 5, 32, 0, 0, 270, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, + 0, 272, 270, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 274, 1, 0, 0, 0, 274, 275, 3, 23, 11, + 0, 275, 276, 3, 29, 14, 0, 276, 277, 3, 33, 16, 0, 277, 278, 3, 45, 22, 0, 278, 279, + 3, 21, 10, 0, 279, 281, 3, 41, 20, 0, 280, 282, 5, 32, 0, 0, 281, 280, 1, 0, 0, 0, 282, + 283, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, + 286, 3, 15, 7, 0, 286, 287, 3, 53, 26, 0, 287, 62, 1, 0, 0, 0, 288, 289, 3, 15, 7, 0, 289, + 290, 3, 21, 10, 0, 290, 291, 3, 45, 22, 0, 291, 292, 3, 49, 24, 0, 292, 293, 3, 21, 10, + 0, 293, 294, 3, 21, 10, 0, 294, 295, 3, 37, 18, 0, 295, 64, 1, 0, 0, 0, 296, 297, 3, 17, + 8, 0, 297, 298, 3, 13, 6, 0, 298, 299, 3, 43, 21, 0, 299, 300, 3, 21, 10, 0, 300, 66, + 1, 0, 0, 0, 301, 302, 3, 19, 9, 0, 302, 303, 3, 21, 10, 0, 303, 304, 3, 43, 21, 0, 304, + 305, 3, 17, 8, 0, 305, 68, 1, 0, 0, 0, 306, 307, 3, 21, 10, 0, 307, 308, 3, 33, 16, 0, + 308, 309, 3, 43, 21, 0, 309, 310, 3, 21, 10, 0, 310, 70, 1, 0, 0, 0, 311, 312, 3, 21, + 10, 0, 312, 313, 3, 33, 16, 0, 313, 314, 3, 43, 21, 0, 314, 315, 3, 21, 10, 0, 315, 316, + 3, 29, 14, 0, 316, 317, 3, 23, 11, 0, 317, 72, 1, 0, 0, 0, 318, 319, 3, 21, 10, 0, 319, + 320, 3, 37, 18, 0, 320, 321, 3, 19, 9, 0, 321, 74, 1, 0, 0, 0, 322, 323, 3, 21, 10, 0, + 323, 324, 3, 51, 25, 0, 324, 325, 3, 17, 8, 0, 325, 326, 3, 33, 16, 0, 326, 327, 3, 47, + 23, 0, 327, 328, 3, 19, 9, 0, 328, 329, 3, 21, 10, 0, 329, 76, 1, 0, 0, 0, 330, 331, 3, + 23, 11, 0, 331, 332, 3, 13, 6, 0, 332, 333, 3, 33, 16, 0, 333, 334, 3, 43, 21, 0, 334, + 335, 3, 21, 10, 0, 335, 78, 1, 0, 0, 0, 336, 337, 3, 23, 11, 0, 337, 338, 3, 29, 14, 0, + 338, 339, 3, 51, 25, 0, 339, 340, 3, 21, 10, 0, 340, 341, 3, 19, 9, 0, 341, 80, 1, 0, + 0, 0, 342, 343, 3, 29, 14, 0, 343, 344, 3, 23, 11, 0, 344, 82, 1, 0, 0, 0, 345, 346, 3, + 29, 14, 0, 346, 347, 3, 25, 12, 0, 347, 348, 3, 37, 18, 0, 348, 349, 3, 39, 19, 0, 349, + 350, 3, 41, 20, 0, 350, 352, 3, 21, 10, 0, 351, 353, 5, 32, 0, 0, 352, 351, 1, 0, 0, 0, + 353, 354, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 354, 355, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, + 356, 357, 3, 19, 9, 0, 357, 358, 3, 29, 14, 0, 358, 359, 3, 35, 17, 0, 359, 360, 3, 21, + 10, 0, 360, 361, 3, 37, 18, 0, 361, 362, 3, 43, 21, 0, 362, 363, 3, 29, 14, 0, 363, 364, + 3, 39, 19, 0, 364, 365, 3, 37, 18, 0, 365, 366, 3, 43, 21, 0, 366, 84, 1, 0, 0, 0, 367, + 368, 3, 29, 14, 0, 368, 369, 3, 37, 18, 0, 369, 86, 1, 0, 0, 0, 370, 371, 3, 29, 14, 0, + 371, 372, 3, 37, 18, 0, 372, 373, 3, 17, 8, 0, 373, 374, 3, 33, 16, 0, 374, 375, 3, 47, + 23, 0, 375, 376, 3, 19, 9, 0, 376, 377, 3, 21, 10, 0, 377, 88, 1, 0, 0, 0, 378, 379, 3, + 29, 14, 0, 379, 380, 3, 43, 21, 0, 380, 90, 1, 0, 0, 0, 381, 382, 3, 33, 16, 0, 382, 383, + 3, 29, 14, 0, 383, 384, 3, 31, 15, 0, 384, 385, 3, 21, 10, 0, 385, 92, 1, 0, 0, 0, 386, + 387, 3, 37, 18, 0, 387, 388, 3, 39, 19, 0, 388, 389, 3, 45, 22, 0, 389, 94, 1, 0, 0, 0, + 390, 391, 3, 37, 18, 0, 391, 392, 3, 47, 23, 0, 392, 393, 3, 33, 16, 0, 393, 394, 3, + 33, 16, 0, 394, 96, 1, 0, 0, 0, 395, 396, 3, 39, 19, 0, 396, 397, 3, 41, 20, 0, 397, 98, + 1, 0, 0, 0, 398, 399, 3, 39, 19, 0, 399, 400, 3, 41, 20, 0, 400, 401, 3, 19, 9, 0, 401, + 402, 3, 21, 10, 0, 402, 404, 3, 41, 20, 0, 403, 405, 5, 32, 0, 0, 404, 403, 1, 0, 0, 0, + 405, 406, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 406, 407, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, + 408, 409, 3, 15, 7, 0, 409, 410, 3, 53, 26, 0, 410, 100, 1, 0, 0, 0, 411, 412, 3, 45, + 22, 0, 412, 413, 3, 27, 13, 0, 413, 414, 3, 21, 10, 0, 414, 415, 3, 37, 18, 0, 415, 102, + 1, 0, 0, 0, 416, 417, 3, 45, 22, 0, 417, 418, 3, 39, 19, 0, 418, 419, 3, 45, 22, 0, 419, + 420, 3, 13, 6, 0, 420, 421, 3, 33, 16, 0, 421, 104, 1, 0, 0, 0, 422, 423, 3, 45, 22, 0, + 423, 424, 3, 41, 20, 0, 424, 425, 3, 47, 23, 0, 425, 426, 3, 21, 10, 0, 426, 106, 1, + 0, 0, 0, 427, 428, 3, 49, 24, 0, 428, 429, 3, 27, 13, 0, 429, 430, 3, 21, 10, 0, 430, + 431, 3, 37, 18, 0, 431, 108, 1, 0, 0, 0, 432, 433, 3, 49, 24, 0, 433, 434, 3, 29, 14, + 0, 434, 435, 3, 45, 22, 0, 435, 436, 3, 27, 13, 0, 436, 437, 3, 29, 14, 0, 437, 438, + 3, 37, 18, 0, 438, 110, 1, 0, 0, 0, 439, 440, 5, 43, 0, 0, 440, 112, 1, 0, 0, 0, 441, 442, + 5, 45, 0, 0, 442, 114, 1, 0, 0, 0, 443, 444, 5, 94, 0, 0, 444, 116, 1, 0, 0, 0, 445, 446, + 7, 26, 0, 0, 446, 118, 1, 0, 0, 0, 447, 459, 5, 61, 0, 0, 448, 449, 5, 33, 0, 0, 449, 459, + 5, 61, 0, 0, 450, 451, 5, 60, 0, 0, 451, 459, 5, 62, 0, 0, 452, 459, 5, 62, 0, 0, 453, + 454, 5, 62, 0, 0, 454, 459, 5, 61, 0, 0, 455, 459, 5, 60, 0, 0, 456, 457, 5, 60, 0, 0, + 457, 459, 5, 61, 0, 0, 458, 447, 1, 0, 0, 0, 458, 448, 1, 0, 0, 0, 458, 450, 1, 0, 0, 0, + 458, 452, 1, 0, 0, 0, 458, 453, 1, 0, 0, 0, 458, 455, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, + 459, 120, 1, 0, 0, 0, 460, 461, 5, 40, 0, 0, 461, 122, 1, 0, 0, 0, 462, 463, 5, 41, 0, + 0, 463, 124, 1, 0, 0, 0, 464, 465, 7, 27, 0, 0, 465, 126, 1, 0, 0, 0, 466, 467, 7, 28, + 0, 0, 467, 128, 1, 0, 0, 0, 468, 469, 3, 133, 66, 0, 469, 471, 5, 46, 0, 0, 470, 472, + 3, 133, 66, 0, 471, 470, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 476, 1, 0, 0, 0, 473, 474, + 5, 46, 0, 0, 474, 476, 3, 133, 66, 0, 475, 468, 1, 0, 0, 0, 475, 473, 1, 0, 0, 0, 476, + 130, 1, 0, 0, 0, 477, 479, 7, 9, 0, 0, 478, 480, 7, 29, 0, 0, 479, 478, 1, 0, 0, 0, 479, + 480, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 3, 133, 66, 0, 482, 132, 1, 0, 0, 0, 483, + 485, 3, 125, 62, 0, 484, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 484, 1, 0, 0, 0, 486, + 487, 1, 0, 0, 0, 487, 134, 1, 0, 0, 0, 488, 489, 3, 133, 66, 0, 489, 490, 3, 131, 65, + 0, 490, 496, 1, 0, 0, 0, 491, 493, 3, 129, 64, 0, 492, 494, 3, 131, 65, 0, 493, 492, + 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 488, 1, 0, 0, 0, 495, 491, + 1, 0, 0, 0, 496, 136, 1, 0, 0, 0, 497, 503, 5, 39, 0, 0, 498, 499, 5, 92, 0, 0, 499, 502, + 5, 39, 0, 0, 500, 502, 8, 30, 0, 0, 501, 498, 1, 0, 0, 0, 501, 500, 1, 0, 0, 0, 502, 505, + 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 506, 1, 0, 0, 0, 505, 503, + 1, 0, 0, 0, 506, 518, 5, 39, 0, 0, 507, 513, 5, 34, 0, 0, 508, 509, 5, 92, 0, 0, 509, 512, + 5, 34, 0, 0, 510, 512, 8, 31, 0, 0, 511, 508, 1, 0, 0, 0, 511, 510, 1, 0, 0, 0, 512, 515, + 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 516, 1, 0, 0, 0, 515, 513, + 1, 0, 0, 0, 516, 518, 5, 34, 0, 0, 517, 497, 1, 0, 0, 0, 517, 507, 1, 0, 0, 0, 518, 138, + 1, 0, 0, 0, 519, 521, 5, 91, 0, 0, 520, 522, 8, 32, 0, 0, 521, 520, 1, 0, 0, 0, 522, 523, + 1, 0, 0, 0, 523, 521, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, + 5, 93, 0, 0, 526, 140, 1, 0, 0, 0, 527, 530, 3, 127, 63, 0, 528, 530, 5, 95, 0, 0, 529, + 527, 1, 0, 0, 0, 529, 528, 1, 0, 0, 0, 530, 536, 1, 0, 0, 0, 531, 535, 3, 127, 63, 0, 532, + 535, 3, 125, 62, 0, 533, 535, 5, 95, 0, 0, 534, 531, 1, 0, 0, 0, 534, 532, 1, 0, 0, 0, + 534, 533, 1, 0, 0, 0, 535, 538, 1, 0, 0, 0, 536, 534, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, + 537, 142, 1, 0, 0, 0, 538, 536, 1, 0, 0, 0, 539, 540, 7, 27, 0, 0, 540, 541, 7, 27, 0, + 0, 541, 144, 1, 0, 0, 0, 542, 543, 7, 27, 0, 0, 543, 544, 7, 27, 0, 0, 544, 545, 7, 27, + 0, 0, 545, 546, 7, 27, 0, 0, 546, 146, 1, 0, 0, 0, 547, 548, 3, 145, 72, 0, 548, 549, + 5, 45, 0, 0, 549, 550, 3, 143, 71, 0, 550, 551, 5, 45, 0, 0, 551, 552, 3, 143, 71, 0, + 552, 148, 1, 0, 0, 0, 553, 554, 3, 145, 72, 0, 554, 555, 5, 45, 0, 0, 555, 556, 3, 143, + 71, 0, 556, 557, 5, 45, 0, 0, 557, 558, 3, 143, 71, 0, 558, 559, 7, 33, 0, 0, 559, 560, + 3, 143, 71, 0, 560, 561, 5, 58, 0, 0, 561, 562, 3, 143, 71, 0, 562, 563, 5, 58, 0, 0, + 563, 564, 3, 143, 71, 0, 564, 591, 1, 0, 0, 0, 565, 566, 3, 145, 72, 0, 566, 567, 5, + 45, 0, 0, 567, 568, 3, 143, 71, 0, 568, 569, 5, 45, 0, 0, 569, 570, 3, 143, 71, 0, 570, + 571, 7, 33, 0, 0, 571, 572, 3, 143, 71, 0, 572, 573, 5, 58, 0, 0, 573, 574, 3, 143, 71, + 0, 574, 591, 1, 0, 0, 0, 575, 576, 3, 145, 72, 0, 576, 577, 5, 45, 0, 0, 577, 578, 3, + 143, 71, 0, 578, 579, 5, 45, 0, 0, 579, 580, 3, 143, 71, 0, 580, 581, 7, 33, 0, 0, 581, + 582, 3, 143, 71, 0, 582, 591, 1, 0, 0, 0, 583, 584, 3, 145, 72, 0, 584, 585, 5, 45, 0, + 0, 585, 586, 3, 143, 71, 0, 586, 587, 5, 45, 0, 0, 587, 588, 3, 143, 71, 0, 588, 589, + 7, 21, 0, 0, 589, 591, 1, 0, 0, 0, 590, 553, 1, 0, 0, 0, 590, 565, 1, 0, 0, 0, 590, 575, + 1, 0, 0, 0, 590, 583, 1, 0, 0, 0, 591, 150, 1, 0, 0, 0, 592, 593, 9, 0, 0, 0, 593, 152, + 1, 0, 0, 0, 29, 0, 166, 170, 181, 184, 186, 192, 203, 272, 283, 354, 406, 458, 471, + 475, 479, 486, 493, 495, 501, 503, 511, 513, 517, 523, 529, 534, 536, 590, 1, 6, + 0, 0 ] class DataLensLexer(Lexer): + atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)] @@ -5308,140 +287,35 @@ class DataLensLexer(Lexer): DATETIME_INNER = 48 UNEXPECTED_CHARACTER = 49 - channelNames = ["DEFAULT_TOKEN_CHANNEL", "HIDDEN"] + channelNames = [u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN"] modeNames = ["DEFAULT_MODE"] - literalNames = ["", "'#'", "'##'", "','", "'+'", "'-'", "'^'", "'('", "')'"] + literalNames = ["", + "'#'", "'##'", "','", "'+'", "'-'", "'^'", "'('", "')'"] - symbolicNames = [ - "", - "SINGLE_LINE_COMMENT", - "MULTI_LINE_COMMENT", - "WS", - "AMONG", - "AND", - "ASC", - "BEFORE_FILTER_BY", - "BETWEEN", - "CASE", - "DESC", - "ELSE", - "ELSEIF", - "END", - "EXCLUDE", - "FALSE", - "FIXED", - "IF", - "IGNORE_DIMENSIONS", - "IN", - "INCLUDE", - "IS", - "LIKE", - "NOT", - "NULL", - "OR", - "ORDER_BY", - "THEN", - "TOTAL", - "TRUE", - "WHEN", - "WITHIN", - "PLUS", - "MINUS", - "POWER", - "MULDIV", - "COMPARISON", - "OPENING_PAR", - "CLOSING_PAR", - "INT", - "FLOAT", - "ESCAPED_STRING", - "FIELD_NAME", - "FUNC_NAME", - "DATE_INNER", - "DATETIME_INNER", - "UNEXPECTED_CHARACTER", - ] + symbolicNames = ["", + "SINGLE_LINE_COMMENT", "MULTI_LINE_COMMENT", "WS", "AMONG", + "AND", "ASC", "BEFORE_FILTER_BY", "BETWEEN", "CASE", "DESC", + "ELSE", "ELSEIF", "END", "EXCLUDE", "FALSE", "FIXED", "IF", + "IGNORE_DIMENSIONS", "IN", "INCLUDE", "IS", "LIKE", "NOT", "NULL", + "OR", "ORDER_BY", "THEN", "TOTAL", "TRUE", "WHEN", "WITHIN", + "PLUS", "MINUS", "POWER", "MULDIV", "COMPARISON", "OPENING_PAR", + "CLOSING_PAR", "INT", "FLOAT", "ESCAPED_STRING", "FIELD_NAME", + "FUNC_NAME", "DATE_INNER", "DATETIME_INNER", "UNEXPECTED_CHARACTER"] - ruleNames = [ - "T__0", - "T__1", - "T__2", - "SINGLE_LINE_COMMENT", - "MULTI_LINE_COMMENT", - "WS", - "A", - "B", - "C", - "D", - "E", - "F", - "G", - "H", - "I", - "K", - "L", - "M", - "N", - "O", - "R", - "S", - "T", - "U", - "W", - "X", - "Y", - "AMONG", - "AND", - "ASC", - "BEFORE_FILTER_BY", - "BETWEEN", - "CASE", - "DESC", - "ELSE", - "ELSEIF", - "END", - "EXCLUDE", - "FALSE", - "FIXED", - "IF", - "IGNORE_DIMENSIONS", - "IN", - "INCLUDE", - "IS", - "LIKE", - "NOT", - "NULL", - "OR", - "ORDER_BY", - "THEN", - "TOTAL", - "TRUE", - "WHEN", - "WITHIN", - "PLUS", - "MINUS", - "POWER", - "MULDIV", - "COMPARISON", - "OPENING_PAR", - "CLOSING_PAR", - "DIGIT", - "LETTER", - "DECIMAL", - "EXP", - "INT", - "FLOAT", - "ESCAPED_STRING", - "FIELD_NAME", - "FUNC_NAME", - "DD", - "DDDD", - "DATE_INNER", - "DATETIME_INNER", - "UNEXPECTED_CHARACTER", - ] + ruleNames = ["T__0", "T__1", "T__2", "SINGLE_LINE_COMMENT", "MULTI_LINE_COMMENT", + "WS", "A", "B", "C", "D", "E", "F", "G", "H", "I", "K", + "L", "M", "N", "O", "R", "S", "T", "U", "W", "X", "Y", + "AMONG", "AND", "ASC", "BEFORE_FILTER_BY", "BETWEEN", + "CASE", "DESC", "ELSE", "ELSEIF", "END", "EXCLUDE", "FALSE", + "FIXED", "IF", "IGNORE_DIMENSIONS", "IN", "INCLUDE", "IS", + "LIKE", "NOT", "NULL", "OR", "ORDER_BY", "THEN", "TOTAL", + "TRUE", "WHEN", "WITHIN", "PLUS", "MINUS", "POWER", "MULDIV", + "COMPARISON", "OPENING_PAR", "CLOSING_PAR", "DIGIT", "LETTER", + "DECIMAL", "EXP", "INT", "FLOAT", "ESCAPED_STRING", "FIELD_NAME", + "FUNC_NAME", "DD", "DDDD", "DATE_INNER", "DATETIME_INNER", + "UNEXPECTED_CHARACTER"] grammarFileName = "DataLens.g4" diff --git a/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensParser.py b/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensParser.py index e3d159f6f..b280d25a5 100644 --- a/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensParser.py +++ b/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensParser.py @@ -1,22 +1,8 @@ # encoding: utf-8 import sys -from antlr4 import ( - ATN, - DFA, - ATNDeserializer, - NoViableAltException, - Parser, - ParserATNSimulator, - ParserRuleContext, - ParseTreeVisitor, - PredictionContextCache, - RecognitionException, - RuleContext, - Token, - TokenStream, -) - +from antlr4 import (ATN, DFA, ATNDeserializer, NoViableAltException, Parser, ParserATNSimulator, ParserRuleContext, ParseTreeVisitor, PredictionContextCache, RecognitionException, RuleContext, Token, + TokenStream) if sys.version_info[1] > 5: from typing import TextIO @@ -26,3304 +12,147 @@ def serializedATN(): return [ - 4, - 1, - 49, - 365, - 2, - 0, - 7, - 0, - 2, - 1, - 7, - 1, - 2, - 2, - 7, - 2, - 2, - 3, - 7, - 3, - 2, - 4, - 7, - 4, - 2, - 5, - 7, - 5, - 2, - 6, - 7, - 6, - 2, - 7, - 7, - 7, - 2, - 8, - 7, - 8, - 2, - 9, - 7, - 9, - 2, - 10, - 7, - 10, - 2, - 11, - 7, - 11, - 2, - 12, - 7, - 12, - 2, - 13, - 7, - 13, - 2, - 14, - 7, - 14, - 2, - 15, - 7, - 15, - 2, - 16, - 7, - 16, - 2, - 17, - 7, - 17, - 2, - 18, - 7, - 18, - 2, - 19, - 7, - 19, - 2, - 20, - 7, - 20, - 2, - 21, - 7, - 21, - 2, - 22, - 7, - 22, - 2, - 23, - 7, - 23, - 2, - 24, - 7, - 24, - 2, - 25, - 7, - 25, - 2, - 26, - 7, - 26, - 2, - 27, - 7, - 27, - 2, - 28, - 7, - 28, - 1, - 0, - 1, - 0, - 1, - 1, - 1, - 1, - 1, - 2, - 1, - 2, - 1, - 3, - 1, - 3, - 1, - 3, - 1, - 3, - 1, - 4, - 1, - 4, - 1, - 4, - 1, - 4, - 1, - 5, - 1, - 5, - 1, - 5, - 1, - 5, - 1, - 6, - 1, - 6, - 1, - 6, - 1, - 6, - 1, - 7, - 1, - 7, - 1, - 8, - 1, - 8, - 1, - 9, - 1, - 9, - 1, - 10, - 1, - 10, - 1, - 10, - 1, - 10, - 1, - 10, - 1, - 10, - 1, - 10, - 3, - 10, - 94, - 8, - 10, - 1, - 11, - 1, - 11, - 1, - 11, - 1, - 11, - 5, - 11, - 100, - 8, - 11, - 10, - 11, - 12, - 11, - 103, - 9, - 11, - 1, - 12, - 1, - 12, - 1, - 12, - 1, - 12, - 5, - 12, - 109, - 8, - 12, - 10, - 12, - 12, - 12, - 112, - 9, - 12, - 3, - 12, - 114, - 8, - 12, - 1, - 12, - 1, - 12, - 1, - 12, - 1, - 12, - 5, - 12, - 120, - 8, - 12, - 10, - 12, - 12, - 12, - 123, - 9, - 12, - 3, - 12, - 125, - 8, - 12, - 1, - 12, - 1, - 12, - 1, - 12, - 1, - 12, - 5, - 12, - 131, - 8, - 12, - 10, - 12, - 12, - 12, - 134, - 9, - 12, - 3, - 12, - 136, - 8, - 12, - 3, - 12, - 138, - 8, - 12, - 1, - 13, - 1, - 13, - 1, - 13, - 1, - 13, - 1, - 13, - 5, - 13, - 145, - 8, - 13, - 10, - 13, - 12, - 13, - 148, - 9, - 13, - 3, - 13, - 150, - 8, - 13, - 1, - 13, - 1, - 13, - 1, - 13, - 1, - 13, - 5, - 13, - 156, - 8, - 13, - 10, - 13, - 12, - 13, - 159, - 9, - 13, - 3, - 13, - 161, - 8, - 13, - 3, - 13, - 163, - 8, - 13, - 1, - 14, - 1, - 14, - 1, - 14, - 1, - 14, - 5, - 14, - 169, - 8, - 14, - 10, - 14, - 12, - 14, - 172, - 9, - 14, - 3, - 14, - 174, - 8, - 14, - 1, - 15, - 1, - 15, - 1, - 15, - 1, - 15, - 5, - 15, - 180, - 8, - 15, - 10, - 15, - 12, - 15, - 183, - 9, - 15, - 3, - 15, - 185, - 8, - 15, - 1, - 16, - 1, - 16, - 1, - 16, - 1, - 16, - 1, - 16, - 5, - 16, - 192, - 8, - 16, - 10, - 16, - 12, - 16, - 195, - 9, - 16, - 3, - 16, - 197, - 8, - 16, - 1, - 16, - 3, - 16, - 200, - 8, - 16, - 1, - 16, - 3, - 16, - 203, - 8, - 16, - 1, - 16, - 3, - 16, - 206, - 8, - 16, - 1, - 16, - 3, - 16, - 209, - 8, - 16, - 1, - 16, - 3, - 16, - 212, - 8, - 16, - 1, - 16, - 1, - 16, - 1, - 17, - 1, - 17, - 1, - 17, - 1, - 17, - 1, - 17, - 1, - 18, - 1, - 18, - 1, - 18, - 1, - 19, - 1, - 19, - 1, - 19, - 1, - 19, - 1, - 19, - 1, - 20, - 1, - 20, - 5, - 20, - 231, - 8, - 20, - 10, - 20, - 12, - 20, - 234, - 9, - 20, - 1, - 20, - 3, - 20, - 237, - 8, - 20, - 1, - 20, - 1, - 20, - 1, - 21, - 1, - 21, - 1, - 21, - 1, - 21, - 1, - 21, - 1, - 22, - 1, - 22, - 1, - 22, - 4, - 22, - 249, - 8, - 22, - 11, - 22, - 12, - 22, - 250, - 1, - 22, - 3, - 22, - 254, - 8, - 22, - 1, - 22, - 1, - 22, - 1, - 23, - 1, - 23, - 1, - 23, - 1, - 23, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 1, - 24, - 3, - 24, - 276, - 8, - 24, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 3, - 25, - 284, - 8, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 3, - 25, - 297, - 8, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 3, - 25, - 308, - 8, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 3, - 25, - 317, - 8, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 3, - 25, - 322, - 8, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 1, - 25, - 5, - 25, - 329, - 8, - 25, - 10, - 25, - 12, - 25, - 332, - 9, - 25, - 1, - 25, - 3, - 25, - 335, - 8, - 25, - 1, - 25, - 5, - 25, - 338, - 8, - 25, - 10, - 25, - 12, - 25, - 341, - 9, - 25, - 1, - 26, - 1, - 26, - 1, - 26, - 1, - 26, - 1, - 26, - 1, - 26, - 1, - 26, - 1, - 26, - 1, - 26, - 5, - 26, - 352, - 8, - 26, - 10, - 26, - 12, - 26, - 355, - 9, - 26, - 1, - 27, - 1, - 27, - 1, - 28, - 1, - 28, - 1, - 28, - 1, - 28, - 3, - 28, - 363, - 8, - 28, - 1, - 28, - 0, - 2, - 50, - 52, - 29, - 0, - 2, - 4, - 6, - 8, - 10, - 12, - 14, - 16, - 18, - 20, - 22, - 24, - 26, - 28, - 30, - 32, - 34, - 36, - 38, - 40, - 42, - 44, - 46, - 48, - 50, - 52, - 54, - 56, - 0, - 4, - 2, - 0, - 18, - 18, - 32, - 32, - 6, - 0, - 8, - 8, - 11, - 12, - 20, - 20, - 25, - 26, - 28, - 28, - 46, - 46, - 1, - 0, - 35, - 36, - 3, - 0, - 18, - 18, - 27, - 27, - 32, - 32, - 399, - 0, - 58, - 1, - 0, - 0, - 0, - 2, - 60, - 1, - 0, - 0, - 0, - 4, - 62, - 1, - 0, - 0, - 0, - 6, - 64, - 1, - 0, - 0, - 0, - 8, - 68, - 1, - 0, - 0, - 0, - 10, - 72, - 1, - 0, - 0, - 0, - 12, - 76, - 1, - 0, - 0, - 0, - 14, - 80, - 1, - 0, - 0, - 0, - 16, - 82, - 1, - 0, - 0, - 0, - 18, - 84, - 1, - 0, - 0, - 0, - 20, - 93, - 1, - 0, - 0, - 0, - 22, - 95, - 1, - 0, - 0, - 0, - 24, - 137, - 1, - 0, - 0, - 0, - 26, - 162, - 1, - 0, - 0, - 0, - 28, - 164, - 1, - 0, - 0, - 0, - 30, - 175, - 1, - 0, - 0, - 0, - 32, - 186, - 1, - 0, - 0, - 0, - 34, - 215, - 1, - 0, - 0, - 0, - 36, - 220, - 1, - 0, - 0, - 0, - 38, - 223, - 1, - 0, - 0, - 0, - 40, - 228, - 1, - 0, - 0, - 0, - 42, - 240, - 1, - 0, - 0, - 0, - 44, - 245, - 1, - 0, - 0, - 0, - 46, - 257, - 1, - 0, - 0, - 0, - 48, - 275, - 1, - 0, - 0, - 0, - 50, - 283, - 1, - 0, - 0, - 0, - 52, - 342, - 1, - 0, - 0, - 0, - 54, - 356, - 1, - 0, - 0, - 0, - 56, - 362, - 1, - 0, - 0, - 0, - 58, - 59, - 5, - 42, - 0, - 0, - 59, - 1, - 1, - 0, - 0, - 0, - 60, - 61, - 5, - 43, - 0, - 0, - 61, - 3, - 1, - 0, - 0, - 0, - 62, - 63, - 5, - 44, - 0, - 0, - 63, - 5, - 1, - 0, - 0, - 0, - 64, - 65, - 5, - 1, - 0, - 0, - 65, - 66, - 5, - 47, - 0, - 0, - 66, - 67, - 5, - 1, - 0, - 0, - 67, - 7, - 1, - 0, - 0, - 0, - 68, - 69, - 5, - 1, - 0, - 0, - 69, - 70, - 5, - 48, - 0, - 0, - 70, - 71, - 5, - 1, - 0, - 0, - 71, - 9, - 1, - 0, - 0, - 0, - 72, - 73, - 5, - 2, - 0, - 0, - 73, - 74, - 5, - 47, - 0, - 0, - 74, - 75, - 5, - 2, - 0, - 0, - 75, - 11, - 1, - 0, - 0, - 0, - 76, - 77, - 5, - 2, - 0, - 0, - 77, - 78, - 5, - 48, - 0, - 0, - 78, - 79, - 5, - 2, - 0, - 0, - 79, - 13, - 1, - 0, - 0, - 0, - 80, - 81, - 7, - 0, - 0, - 0, - 81, - 15, - 1, - 0, - 0, - 0, - 82, - 83, - 5, - 27, - 0, - 0, - 83, - 17, - 1, - 0, - 0, - 0, - 84, - 85, - 5, - 45, - 0, - 0, - 85, - 19, - 1, - 0, - 0, - 0, - 86, - 94, - 3, - 54, - 27, - 0, - 87, - 88, - 3, - 54, - 27, - 0, - 88, - 89, - 5, - 9, - 0, - 0, - 89, - 94, - 1, - 0, - 0, - 0, - 90, - 91, - 3, - 54, - 27, - 0, - 91, - 92, - 5, - 13, - 0, - 0, - 92, - 94, - 1, - 0, - 0, - 0, - 93, - 86, - 1, - 0, - 0, - 0, - 93, - 87, - 1, - 0, - 0, - 0, - 93, - 90, - 1, - 0, - 0, - 0, - 94, - 21, - 1, - 0, - 0, - 0, - 95, - 96, - 5, - 29, - 0, - 0, - 96, - 101, - 3, - 20, - 10, - 0, - 97, - 98, - 5, - 3, - 0, - 0, - 98, - 100, - 3, - 20, - 10, - 0, - 99, - 97, - 1, - 0, - 0, - 0, - 100, - 103, - 1, - 0, - 0, - 0, - 101, - 99, - 1, - 0, - 0, - 0, - 101, - 102, - 1, - 0, - 0, - 0, - 102, - 23, - 1, - 0, - 0, - 0, - 103, - 101, - 1, - 0, - 0, - 0, - 104, - 113, - 5, - 19, - 0, - 0, - 105, - 110, - 3, - 54, - 27, - 0, - 106, - 107, - 5, - 3, - 0, - 0, - 107, - 109, - 3, - 54, - 27, - 0, - 108, - 106, - 1, - 0, - 0, - 0, - 109, - 112, - 1, - 0, - 0, - 0, - 110, - 108, - 1, - 0, - 0, - 0, - 110, - 111, - 1, - 0, - 0, - 0, - 111, - 114, - 1, - 0, - 0, - 0, - 112, - 110, - 1, - 0, - 0, - 0, - 113, - 105, - 1, - 0, - 0, - 0, - 113, - 114, - 1, - 0, - 0, - 0, - 114, - 138, - 1, - 0, - 0, - 0, - 115, - 124, - 5, - 23, - 0, - 0, - 116, - 121, - 3, - 54, - 27, - 0, - 117, - 118, - 5, - 3, - 0, - 0, - 118, - 120, - 3, - 54, - 27, - 0, - 119, - 117, - 1, - 0, - 0, - 0, - 120, - 123, - 1, - 0, - 0, - 0, - 121, - 119, - 1, - 0, - 0, - 0, - 121, - 122, - 1, - 0, - 0, - 0, - 122, - 125, - 1, - 0, - 0, - 0, - 123, - 121, - 1, - 0, - 0, - 0, - 124, - 116, - 1, - 0, - 0, - 0, - 124, - 125, - 1, - 0, - 0, - 0, - 125, - 138, - 1, - 0, - 0, - 0, - 126, - 135, - 5, - 17, - 0, - 0, - 127, - 132, - 3, - 54, - 27, - 0, - 128, - 129, - 5, - 3, - 0, - 0, - 129, - 131, - 3, - 54, - 27, - 0, - 130, - 128, - 1, - 0, - 0, - 0, - 131, - 134, - 1, - 0, - 0, - 0, - 132, - 130, - 1, - 0, - 0, - 0, - 132, - 133, - 1, - 0, - 0, - 0, - 133, - 136, - 1, - 0, - 0, - 0, - 134, - 132, - 1, - 0, - 0, - 0, - 135, - 127, - 1, - 0, - 0, - 0, - 135, - 136, - 1, - 0, - 0, - 0, - 136, - 138, - 1, - 0, - 0, - 0, - 137, - 104, - 1, - 0, - 0, - 0, - 137, - 115, - 1, - 0, - 0, - 0, - 137, - 126, - 1, - 0, - 0, - 0, - 138, - 25, - 1, - 0, - 0, - 0, - 139, - 163, - 5, - 31, - 0, - 0, - 140, - 149, - 5, - 7, - 0, - 0, - 141, - 146, - 3, - 54, - 27, - 0, - 142, - 143, - 5, - 3, - 0, - 0, - 143, - 145, - 3, - 54, - 27, - 0, - 144, - 142, - 1, - 0, - 0, - 0, - 145, - 148, - 1, - 0, - 0, - 0, - 146, - 144, - 1, - 0, - 0, - 0, - 146, - 147, - 1, - 0, - 0, - 0, - 147, - 150, - 1, - 0, - 0, - 0, - 148, - 146, - 1, - 0, - 0, - 0, - 149, - 141, - 1, - 0, - 0, - 0, - 149, - 150, - 1, - 0, - 0, - 0, - 150, - 163, - 1, - 0, - 0, - 0, - 151, - 160, - 5, - 34, - 0, - 0, - 152, - 157, - 3, - 54, - 27, - 0, - 153, - 154, - 5, - 3, - 0, - 0, - 154, - 156, - 3, - 54, - 27, - 0, - 155, - 153, - 1, - 0, - 0, - 0, - 156, - 159, - 1, - 0, - 0, - 0, - 157, - 155, - 1, - 0, - 0, - 0, - 157, - 158, - 1, - 0, - 0, - 0, - 158, - 161, - 1, - 0, - 0, - 0, - 159, - 157, - 1, - 0, - 0, - 0, - 160, - 152, - 1, - 0, - 0, - 0, - 160, - 161, - 1, - 0, - 0, - 0, - 161, - 163, - 1, - 0, - 0, - 0, - 162, - 139, - 1, - 0, - 0, - 0, - 162, - 140, - 1, - 0, - 0, - 0, - 162, - 151, - 1, - 0, - 0, - 0, - 163, - 27, - 1, - 0, - 0, - 0, - 164, - 173, - 5, - 10, - 0, - 0, - 165, - 170, - 3, - 18, - 9, - 0, - 166, - 167, - 5, - 3, - 0, - 0, - 167, - 169, - 3, - 18, - 9, - 0, - 168, - 166, - 1, - 0, - 0, - 0, - 169, - 172, - 1, - 0, - 0, - 0, - 170, - 168, - 1, - 0, - 0, - 0, - 170, - 171, - 1, - 0, - 0, - 0, - 171, - 174, - 1, - 0, - 0, - 0, - 172, - 170, - 1, - 0, - 0, - 0, - 173, - 165, - 1, - 0, - 0, - 0, - 173, - 174, - 1, - 0, - 0, - 0, - 174, - 29, - 1, - 0, - 0, - 0, - 175, - 184, - 5, - 21, - 0, - 0, - 176, - 181, - 3, - 18, - 9, - 0, - 177, - 178, - 5, - 3, - 0, - 0, - 178, - 180, - 3, - 18, - 9, - 0, - 179, - 177, - 1, - 0, - 0, - 0, - 180, - 183, - 1, - 0, - 0, - 0, - 181, - 179, - 1, - 0, - 0, - 0, - 181, - 182, - 1, - 0, - 0, - 0, - 182, - 185, - 1, - 0, - 0, - 0, - 183, - 181, - 1, - 0, - 0, - 0, - 184, - 176, - 1, - 0, - 0, - 0, - 184, - 185, - 1, - 0, - 0, - 0, - 185, - 31, - 1, - 0, - 0, - 0, - 186, - 187, - 7, - 1, - 0, - 0, - 187, - 196, - 5, - 40, - 0, - 0, - 188, - 193, - 3, - 54, - 27, - 0, - 189, - 190, - 5, - 3, - 0, - 0, - 190, - 192, - 3, - 54, - 27, - 0, - 191, - 189, - 1, - 0, - 0, - 0, - 192, - 195, - 1, - 0, - 0, - 0, - 193, - 191, - 1, - 0, - 0, - 0, - 193, - 194, - 1, - 0, - 0, - 0, - 194, - 197, - 1, - 0, - 0, - 0, - 195, - 193, - 1, - 0, - 0, - 0, - 196, - 188, - 1, - 0, - 0, - 0, - 196, - 197, - 1, - 0, - 0, - 0, - 197, - 199, - 1, - 0, - 0, - 0, - 198, - 200, - 3, - 26, - 13, - 0, - 199, - 198, - 1, - 0, - 0, - 0, - 199, - 200, - 1, - 0, - 0, - 0, - 200, - 202, - 1, - 0, - 0, - 0, - 201, - 203, - 3, - 22, - 11, - 0, - 202, - 201, - 1, - 0, - 0, - 0, - 202, - 203, - 1, - 0, - 0, - 0, - 203, - 205, - 1, - 0, - 0, - 0, - 204, - 206, - 3, - 24, - 12, - 0, - 205, - 204, - 1, - 0, - 0, - 0, - 205, - 206, - 1, - 0, - 0, - 0, - 206, - 208, - 1, - 0, - 0, - 0, - 207, - 209, - 3, - 28, - 14, - 0, - 208, - 207, - 1, - 0, - 0, - 0, - 208, - 209, - 1, - 0, - 0, - 0, - 209, - 211, - 1, - 0, - 0, - 0, - 210, - 212, - 3, - 30, - 15, - 0, - 211, - 210, - 1, - 0, - 0, - 0, - 211, - 212, - 1, - 0, - 0, - 0, - 212, - 213, - 1, - 0, - 0, - 0, - 213, - 214, - 5, - 41, - 0, - 0, - 214, - 33, - 1, - 0, - 0, - 0, - 215, - 216, - 5, - 15, - 0, - 0, - 216, - 217, - 3, - 54, - 27, - 0, - 217, - 218, - 5, - 30, - 0, - 0, - 218, - 219, - 3, - 54, - 27, - 0, - 219, - 35, - 1, - 0, - 0, - 0, - 220, - 221, - 5, - 14, - 0, - 0, - 221, - 222, - 3, - 54, - 27, - 0, - 222, - 37, - 1, - 0, - 0, - 0, - 223, - 224, - 5, - 20, - 0, - 0, - 224, - 225, - 3, - 54, - 27, - 0, - 225, - 226, - 5, - 30, - 0, - 0, - 226, - 227, - 3, - 54, - 27, - 0, - 227, - 39, - 1, - 0, - 0, - 0, - 228, - 232, - 3, - 38, - 19, - 0, - 229, - 231, - 3, - 34, - 17, - 0, - 230, - 229, - 1, - 0, - 0, - 0, - 231, - 234, - 1, - 0, - 0, - 0, - 232, - 230, - 1, - 0, - 0, - 0, - 232, - 233, - 1, - 0, - 0, - 0, - 233, - 236, - 1, - 0, - 0, - 0, - 234, - 232, - 1, - 0, - 0, - 0, - 235, - 237, - 3, - 36, - 18, - 0, - 236, - 235, - 1, - 0, - 0, - 0, - 236, - 237, - 1, - 0, - 0, - 0, - 237, - 238, - 1, - 0, - 0, - 0, - 238, - 239, - 5, - 16, - 0, - 0, - 239, - 41, - 1, - 0, - 0, - 0, - 240, - 241, - 5, - 33, - 0, - 0, - 241, - 242, - 3, - 54, - 27, - 0, - 242, - 243, - 5, - 30, - 0, - 0, - 243, - 244, - 3, - 54, - 27, - 0, - 244, - 43, - 1, - 0, - 0, - 0, - 245, - 246, - 5, - 12, - 0, - 0, - 246, - 248, - 3, - 54, - 27, - 0, - 247, - 249, - 3, - 42, - 21, - 0, - 248, - 247, - 1, - 0, - 0, - 0, - 249, - 250, - 1, - 0, - 0, - 0, - 250, - 248, - 1, - 0, - 0, - 0, - 250, - 251, - 1, - 0, - 0, - 0, - 251, - 253, - 1, - 0, - 0, - 0, - 252, - 254, - 3, - 36, - 18, - 0, - 253, - 252, - 1, - 0, - 0, - 0, - 253, - 254, - 1, - 0, - 0, - 0, - 254, - 255, - 1, - 0, - 0, - 0, - 255, - 256, - 5, - 16, - 0, - 0, - 256, - 45, - 1, - 0, - 0, - 0, - 257, - 258, - 5, - 40, - 0, - 0, - 258, - 259, - 3, - 54, - 27, - 0, - 259, - 260, - 5, - 41, - 0, - 0, - 260, - 47, - 1, - 0, - 0, - 0, - 261, - 276, - 3, - 0, - 0, - 0, - 262, - 276, - 3, - 2, - 1, - 0, - 263, - 276, - 3, - 14, - 7, - 0, - 264, - 276, - 3, - 16, - 8, - 0, - 265, - 276, - 3, - 40, - 20, - 0, - 266, - 276, - 3, - 44, - 22, - 0, - 267, - 276, - 3, - 4, - 2, - 0, - 268, - 276, - 3, - 18, - 9, - 0, - 269, - 276, - 3, - 6, - 3, - 0, - 270, - 276, - 3, - 8, - 4, - 0, - 271, - 276, - 3, - 10, - 5, - 0, - 272, - 276, - 3, - 12, - 6, - 0, - 273, - 276, - 3, - 32, - 16, - 0, - 274, - 276, - 3, - 46, - 23, - 0, - 275, - 261, - 1, - 0, - 0, - 0, - 275, - 262, - 1, - 0, - 0, - 0, - 275, - 263, - 1, - 0, - 0, - 0, - 275, - 264, - 1, - 0, - 0, - 0, - 275, - 265, - 1, - 0, - 0, - 0, - 275, - 266, - 1, - 0, - 0, - 0, - 275, - 267, - 1, - 0, - 0, - 0, - 275, - 268, - 1, - 0, - 0, - 0, - 275, - 269, - 1, - 0, - 0, - 0, - 275, - 270, - 1, - 0, - 0, - 0, - 275, - 271, - 1, - 0, - 0, - 0, - 275, - 272, - 1, - 0, - 0, - 0, - 275, - 273, - 1, - 0, - 0, - 0, - 275, - 274, - 1, - 0, - 0, - 0, - 276, - 49, - 1, - 0, - 0, - 0, - 277, - 278, - 6, - 25, - -1, - 0, - 278, - 279, - 5, - 36, - 0, - 0, - 279, - 284, - 3, - 50, - 25, - 10, - 280, - 281, - 5, - 26, - 0, - 0, - 281, - 284, - 3, - 50, - 25, - 2, - 282, - 284, - 3, - 48, - 24, - 0, - 283, - 277, - 1, - 0, - 0, - 0, - 283, - 280, - 1, - 0, - 0, - 0, - 283, - 282, - 1, - 0, - 0, - 0, - 284, - 339, - 1, - 0, - 0, - 0, - 285, - 286, - 10, - 11, - 0, - 0, - 286, - 287, - 5, - 37, - 0, - 0, - 287, - 338, - 3, - 50, - 25, - 12, - 288, - 289, - 10, - 9, - 0, - 0, - 289, - 290, - 5, - 38, - 0, - 0, - 290, - 338, - 3, - 50, - 25, - 10, - 291, - 292, - 10, - 8, - 0, - 0, - 292, - 293, - 7, - 2, - 0, - 0, - 293, - 338, - 3, - 50, - 25, - 9, - 294, - 296, - 10, - 6, - 0, - 0, - 295, - 297, - 5, - 26, - 0, - 0, - 296, - 295, - 1, - 0, - 0, - 0, - 296, - 297, - 1, - 0, - 0, - 0, - 297, - 298, - 1, - 0, - 0, - 0, - 298, - 299, - 5, - 25, - 0, - 0, - 299, - 338, - 3, - 50, - 25, - 7, - 300, - 301, - 10, - 5, - 0, - 0, - 301, - 302, - 5, - 39, - 0, - 0, - 302, - 338, - 3, - 50, - 25, - 6, - 303, - 307, - 10, - 4, - 0, - 0, - 304, - 308, - 5, - 11, - 0, - 0, - 305, - 306, - 5, - 26, - 0, - 0, - 306, - 308, - 5, - 11, - 0, - 0, - 307, - 304, - 1, - 0, - 0, - 0, - 307, - 305, - 1, - 0, - 0, - 0, - 308, - 309, - 1, - 0, - 0, - 0, - 309, - 310, - 3, - 50, - 25, - 0, - 310, - 311, - 5, - 8, - 0, - 0, - 311, - 312, - 3, - 50, - 25, - 5, - 312, - 338, - 1, - 0, - 0, - 0, - 313, - 314, - 10, - 7, - 0, - 0, - 314, - 316, - 5, - 24, - 0, - 0, - 315, - 317, - 5, - 26, - 0, - 0, - 316, - 315, - 1, - 0, - 0, - 0, - 316, - 317, - 1, - 0, - 0, - 0, - 317, - 318, - 1, - 0, - 0, - 0, - 318, - 338, - 7, - 3, - 0, - 0, - 319, - 321, - 10, - 3, - 0, - 0, - 320, - 322, - 5, - 26, - 0, - 0, - 321, - 320, - 1, - 0, - 0, - 0, - 321, - 322, - 1, - 0, - 0, - 0, - 322, - 323, - 1, - 0, - 0, - 0, - 323, - 324, - 5, - 22, - 0, - 0, - 324, - 334, - 5, - 40, - 0, - 0, - 325, - 326, - 3, - 54, - 27, - 0, - 326, - 327, - 5, - 3, - 0, - 0, - 327, - 329, - 1, - 0, - 0, - 0, - 328, - 325, - 1, - 0, - 0, - 0, - 329, - 332, - 1, - 0, - 0, - 0, - 330, - 328, - 1, - 0, - 0, - 0, - 330, - 331, - 1, - 0, - 0, - 0, - 331, - 333, - 1, - 0, - 0, - 0, - 332, - 330, - 1, - 0, - 0, - 0, - 333, - 335, - 3, - 54, - 27, - 0, - 334, - 330, - 1, - 0, - 0, - 0, - 334, - 335, - 1, - 0, - 0, - 0, - 335, - 336, - 1, - 0, - 0, - 0, - 336, - 338, - 5, - 41, - 0, - 0, - 337, - 285, - 1, - 0, - 0, - 0, - 337, - 288, - 1, - 0, - 0, - 0, - 337, - 291, - 1, - 0, - 0, - 0, - 337, - 294, - 1, - 0, - 0, - 0, - 337, - 300, - 1, - 0, - 0, - 0, - 337, - 303, - 1, - 0, - 0, - 0, - 337, - 313, - 1, - 0, - 0, - 0, - 337, - 319, - 1, - 0, - 0, - 0, - 338, - 341, - 1, - 0, - 0, - 0, - 339, - 337, - 1, - 0, - 0, - 0, - 339, - 340, - 1, - 0, - 0, - 0, - 340, - 51, - 1, - 0, - 0, - 0, - 341, - 339, - 1, - 0, - 0, - 0, - 342, - 343, - 6, - 26, - -1, - 0, - 343, - 344, - 3, - 50, - 25, - 0, - 344, - 353, - 1, - 0, - 0, - 0, - 345, - 346, - 10, - 3, - 0, - 0, - 346, - 347, - 5, - 8, - 0, - 0, - 347, - 352, - 3, - 52, - 26, - 4, - 348, - 349, - 10, - 2, - 0, - 0, - 349, - 350, - 5, - 28, - 0, - 0, - 350, - 352, - 3, - 52, - 26, - 3, - 351, - 345, - 1, - 0, - 0, - 0, - 351, - 348, - 1, - 0, - 0, - 0, - 352, - 355, - 1, - 0, - 0, - 0, - 353, - 351, - 1, - 0, - 0, - 0, - 353, - 354, - 1, - 0, - 0, - 0, - 354, - 53, - 1, - 0, - 0, - 0, - 355, - 353, - 1, - 0, - 0, - 0, - 356, - 357, - 3, - 52, - 26, - 0, - 357, - 55, - 1, - 0, - 0, - 0, - 358, - 359, - 3, - 54, - 27, - 0, - 359, - 360, - 5, - 0, - 0, - 1, - 360, - 363, - 1, - 0, - 0, - 0, - 361, - 363, - 5, - 0, - 0, - 1, - 362, - 358, - 1, - 0, - 0, - 0, - 362, - 361, - 1, - 0, - 0, - 0, - 363, - 57, - 1, - 0, - 0, - 0, - 42, - 93, - 101, - 110, - 113, - 121, - 124, - 132, - 135, - 137, - 146, - 149, - 157, - 160, - 162, - 170, - 173, - 181, - 184, - 193, - 196, - 199, - 202, - 205, - 208, - 211, - 232, - 236, - 250, - 253, - 275, - 283, - 296, - 307, - 316, - 321, - 330, - 334, - 337, - 339, - 351, - 353, - 362, + 4, 1, 49, 365, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, + 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, + 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, + 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, + 2, 27, 7, 27, 2, 28, 7, 28, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, + 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, + 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 94, 8, 10, 1, 11, 1, 11, 1, + 11, 1, 11, 5, 11, 100, 8, 11, 10, 11, 12, 11, 103, 9, 11, 1, 12, 1, 12, 1, 12, 1, 12, 5, + 12, 109, 8, 12, 10, 12, 12, 12, 112, 9, 12, 3, 12, 114, 8, 12, 1, 12, 1, 12, 1, 12, 1, + 12, 5, 12, 120, 8, 12, 10, 12, 12, 12, 123, 9, 12, 3, 12, 125, 8, 12, 1, 12, 1, 12, 1, + 12, 1, 12, 5, 12, 131, 8, 12, 10, 12, 12, 12, 134, 9, 12, 3, 12, 136, 8, 12, 3, 12, 138, + 8, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 145, 8, 13, 10, 13, 12, 13, 148, 9, 13, + 3, 13, 150, 8, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 156, 8, 13, 10, 13, 12, 13, 159, + 9, 13, 3, 13, 161, 8, 13, 3, 13, 163, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 169, 8, + 14, 10, 14, 12, 14, 172, 9, 14, 3, 14, 174, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 180, + 8, 15, 10, 15, 12, 15, 183, 9, 15, 3, 15, 185, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, + 5, 16, 192, 8, 16, 10, 16, 12, 16, 195, 9, 16, 3, 16, 197, 8, 16, 1, 16, 3, 16, 200, 8, + 16, 1, 16, 3, 16, 203, 8, 16, 1, 16, 3, 16, 206, 8, 16, 1, 16, 3, 16, 209, 8, 16, 1, 16, + 3, 16, 212, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, + 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 5, 20, 231, 8, 20, 10, 20, 12, 20, 234, + 9, 20, 1, 20, 3, 20, 237, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, + 1, 22, 1, 22, 4, 22, 249, 8, 22, 11, 22, 12, 22, 250, 1, 22, 3, 22, 254, 8, 22, 1, 22, + 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, + 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 276, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, + 1, 25, 1, 25, 3, 25, 284, 8, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, + 1, 25, 1, 25, 1, 25, 3, 25, 297, 8, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, + 1, 25, 1, 25, 3, 25, 308, 8, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, + 317, 8, 25, 1, 25, 1, 25, 1, 25, 3, 25, 322, 8, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, + 25, 329, 8, 25, 10, 25, 12, 25, 332, 9, 25, 1, 25, 3, 25, 335, 8, 25, 1, 25, 5, 25, 338, + 8, 25, 10, 25, 12, 25, 341, 9, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, + 1, 26, 5, 26, 352, 8, 26, 10, 26, 12, 26, 355, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, + 1, 28, 3, 28, 363, 8, 28, 1, 28, 0, 2, 50, 52, 29, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, + 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 0, 4, 2, 0, 18, + 18, 32, 32, 6, 0, 8, 8, 11, 12, 20, 20, 25, 26, 28, 28, 46, 46, 1, 0, 35, 36, 3, 0, 18, + 18, 27, 27, 32, 32, 399, 0, 58, 1, 0, 0, 0, 2, 60, 1, 0, 0, 0, 4, 62, 1, 0, 0, 0, 6, 64, 1, + 0, 0, 0, 8, 68, 1, 0, 0, 0, 10, 72, 1, 0, 0, 0, 12, 76, 1, 0, 0, 0, 14, 80, 1, 0, 0, 0, 16, + 82, 1, 0, 0, 0, 18, 84, 1, 0, 0, 0, 20, 93, 1, 0, 0, 0, 22, 95, 1, 0, 0, 0, 24, 137, 1, 0, + 0, 0, 26, 162, 1, 0, 0, 0, 28, 164, 1, 0, 0, 0, 30, 175, 1, 0, 0, 0, 32, 186, 1, 0, 0, 0, + 34, 215, 1, 0, 0, 0, 36, 220, 1, 0, 0, 0, 38, 223, 1, 0, 0, 0, 40, 228, 1, 0, 0, 0, 42, 240, + 1, 0, 0, 0, 44, 245, 1, 0, 0, 0, 46, 257, 1, 0, 0, 0, 48, 275, 1, 0, 0, 0, 50, 283, 1, 0, + 0, 0, 52, 342, 1, 0, 0, 0, 54, 356, 1, 0, 0, 0, 56, 362, 1, 0, 0, 0, 58, 59, 5, 42, 0, 0, + 59, 1, 1, 0, 0, 0, 60, 61, 5, 43, 0, 0, 61, 3, 1, 0, 0, 0, 62, 63, 5, 44, 0, 0, 63, 5, 1, 0, + 0, 0, 64, 65, 5, 1, 0, 0, 65, 66, 5, 47, 0, 0, 66, 67, 5, 1, 0, 0, 67, 7, 1, 0, 0, 0, 68, 69, + 5, 1, 0, 0, 69, 70, 5, 48, 0, 0, 70, 71, 5, 1, 0, 0, 71, 9, 1, 0, 0, 0, 72, 73, 5, 2, 0, 0, + 73, 74, 5, 47, 0, 0, 74, 75, 5, 2, 0, 0, 75, 11, 1, 0, 0, 0, 76, 77, 5, 2, 0, 0, 77, 78, 5, + 48, 0, 0, 78, 79, 5, 2, 0, 0, 79, 13, 1, 0, 0, 0, 80, 81, 7, 0, 0, 0, 81, 15, 1, 0, 0, 0, 82, + 83, 5, 27, 0, 0, 83, 17, 1, 0, 0, 0, 84, 85, 5, 45, 0, 0, 85, 19, 1, 0, 0, 0, 86, 94, 3, 54, + 27, 0, 87, 88, 3, 54, 27, 0, 88, 89, 5, 9, 0, 0, 89, 94, 1, 0, 0, 0, 90, 91, 3, 54, 27, 0, + 91, 92, 5, 13, 0, 0, 92, 94, 1, 0, 0, 0, 93, 86, 1, 0, 0, 0, 93, 87, 1, 0, 0, 0, 93, 90, 1, + 0, 0, 0, 94, 21, 1, 0, 0, 0, 95, 96, 5, 29, 0, 0, 96, 101, 3, 20, 10, 0, 97, 98, 5, 3, 0, + 0, 98, 100, 3, 20, 10, 0, 99, 97, 1, 0, 0, 0, 100, 103, 1, 0, 0, 0, 101, 99, 1, 0, 0, 0, + 101, 102, 1, 0, 0, 0, 102, 23, 1, 0, 0, 0, 103, 101, 1, 0, 0, 0, 104, 113, 5, 19, 0, 0, + 105, 110, 3, 54, 27, 0, 106, 107, 5, 3, 0, 0, 107, 109, 3, 54, 27, 0, 108, 106, 1, 0, + 0, 0, 109, 112, 1, 0, 0, 0, 110, 108, 1, 0, 0, 0, 110, 111, 1, 0, 0, 0, 111, 114, 1, 0, + 0, 0, 112, 110, 1, 0, 0, 0, 113, 105, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 138, 1, 0, + 0, 0, 115, 124, 5, 23, 0, 0, 116, 121, 3, 54, 27, 0, 117, 118, 5, 3, 0, 0, 118, 120, 3, + 54, 27, 0, 119, 117, 1, 0, 0, 0, 120, 123, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 121, 122, + 1, 0, 0, 0, 122, 125, 1, 0, 0, 0, 123, 121, 1, 0, 0, 0, 124, 116, 1, 0, 0, 0, 124, 125, + 1, 0, 0, 0, 125, 138, 1, 0, 0, 0, 126, 135, 5, 17, 0, 0, 127, 132, 3, 54, 27, 0, 128, 129, + 5, 3, 0, 0, 129, 131, 3, 54, 27, 0, 130, 128, 1, 0, 0, 0, 131, 134, 1, 0, 0, 0, 132, 130, + 1, 0, 0, 0, 132, 133, 1, 0, 0, 0, 133, 136, 1, 0, 0, 0, 134, 132, 1, 0, 0, 0, 135, 127, + 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 138, 1, 0, 0, 0, 137, 104, 1, 0, 0, 0, 137, 115, + 1, 0, 0, 0, 137, 126, 1, 0, 0, 0, 138, 25, 1, 0, 0, 0, 139, 163, 5, 31, 0, 0, 140, 149, + 5, 7, 0, 0, 141, 146, 3, 54, 27, 0, 142, 143, 5, 3, 0, 0, 143, 145, 3, 54, 27, 0, 144, + 142, 1, 0, 0, 0, 145, 148, 1, 0, 0, 0, 146, 144, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, + 150, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 149, 141, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, + 163, 1, 0, 0, 0, 151, 160, 5, 34, 0, 0, 152, 157, 3, 54, 27, 0, 153, 154, 5, 3, 0, 0, 154, + 156, 3, 54, 27, 0, 155, 153, 1, 0, 0, 0, 156, 159, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 157, + 158, 1, 0, 0, 0, 158, 161, 1, 0, 0, 0, 159, 157, 1, 0, 0, 0, 160, 152, 1, 0, 0, 0, 160, + 161, 1, 0, 0, 0, 161, 163, 1, 0, 0, 0, 162, 139, 1, 0, 0, 0, 162, 140, 1, 0, 0, 0, 162, + 151, 1, 0, 0, 0, 163, 27, 1, 0, 0, 0, 164, 173, 5, 10, 0, 0, 165, 170, 3, 18, 9, 0, 166, + 167, 5, 3, 0, 0, 167, 169, 3, 18, 9, 0, 168, 166, 1, 0, 0, 0, 169, 172, 1, 0, 0, 0, 170, + 168, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 174, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, + 165, 1, 0, 0, 0, 173, 174, 1, 0, 0, 0, 174, 29, 1, 0, 0, 0, 175, 184, 5, 21, 0, 0, 176, + 181, 3, 18, 9, 0, 177, 178, 5, 3, 0, 0, 178, 180, 3, 18, 9, 0, 179, 177, 1, 0, 0, 0, 180, + 183, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, + 181, 1, 0, 0, 0, 184, 176, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 31, 1, 0, 0, 0, 186, 187, + 7, 1, 0, 0, 187, 196, 5, 40, 0, 0, 188, 193, 3, 54, 27, 0, 189, 190, 5, 3, 0, 0, 190, 192, + 3, 54, 27, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, + 1, 0, 0, 0, 194, 197, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 188, 1, 0, 0, 0, 196, 197, + 1, 0, 0, 0, 197, 199, 1, 0, 0, 0, 198, 200, 3, 26, 13, 0, 199, 198, 1, 0, 0, 0, 199, 200, + 1, 0, 0, 0, 200, 202, 1, 0, 0, 0, 201, 203, 3, 22, 11, 0, 202, 201, 1, 0, 0, 0, 202, 203, + 1, 0, 0, 0, 203, 205, 1, 0, 0, 0, 204, 206, 3, 24, 12, 0, 205, 204, 1, 0, 0, 0, 205, 206, + 1, 0, 0, 0, 206, 208, 1, 0, 0, 0, 207, 209, 3, 28, 14, 0, 208, 207, 1, 0, 0, 0, 208, 209, + 1, 0, 0, 0, 209, 211, 1, 0, 0, 0, 210, 212, 3, 30, 15, 0, 211, 210, 1, 0, 0, 0, 211, 212, + 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 5, 41, 0, 0, 214, 33, 1, 0, 0, 0, 215, 216, + 5, 15, 0, 0, 216, 217, 3, 54, 27, 0, 217, 218, 5, 30, 0, 0, 218, 219, 3, 54, 27, 0, 219, + 35, 1, 0, 0, 0, 220, 221, 5, 14, 0, 0, 221, 222, 3, 54, 27, 0, 222, 37, 1, 0, 0, 0, 223, + 224, 5, 20, 0, 0, 224, 225, 3, 54, 27, 0, 225, 226, 5, 30, 0, 0, 226, 227, 3, 54, 27, + 0, 227, 39, 1, 0, 0, 0, 228, 232, 3, 38, 19, 0, 229, 231, 3, 34, 17, 0, 230, 229, 1, 0, + 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 236, 1, 0, + 0, 0, 234, 232, 1, 0, 0, 0, 235, 237, 3, 36, 18, 0, 236, 235, 1, 0, 0, 0, 236, 237, 1, + 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 5, 16, 0, 0, 239, 41, 1, 0, 0, 0, 240, 241, 5, + 33, 0, 0, 241, 242, 3, 54, 27, 0, 242, 243, 5, 30, 0, 0, 243, 244, 3, 54, 27, 0, 244, + 43, 1, 0, 0, 0, 245, 246, 5, 12, 0, 0, 246, 248, 3, 54, 27, 0, 247, 249, 3, 42, 21, 0, + 248, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, + 251, 253, 1, 0, 0, 0, 252, 254, 3, 36, 18, 0, 253, 252, 1, 0, 0, 0, 253, 254, 1, 0, 0, + 0, 254, 255, 1, 0, 0, 0, 255, 256, 5, 16, 0, 0, 256, 45, 1, 0, 0, 0, 257, 258, 5, 40, 0, + 0, 258, 259, 3, 54, 27, 0, 259, 260, 5, 41, 0, 0, 260, 47, 1, 0, 0, 0, 261, 276, 3, 0, + 0, 0, 262, 276, 3, 2, 1, 0, 263, 276, 3, 14, 7, 0, 264, 276, 3, 16, 8, 0, 265, 276, 3, + 40, 20, 0, 266, 276, 3, 44, 22, 0, 267, 276, 3, 4, 2, 0, 268, 276, 3, 18, 9, 0, 269, 276, + 3, 6, 3, 0, 270, 276, 3, 8, 4, 0, 271, 276, 3, 10, 5, 0, 272, 276, 3, 12, 6, 0, 273, 276, + 3, 32, 16, 0, 274, 276, 3, 46, 23, 0, 275, 261, 1, 0, 0, 0, 275, 262, 1, 0, 0, 0, 275, + 263, 1, 0, 0, 0, 275, 264, 1, 0, 0, 0, 275, 265, 1, 0, 0, 0, 275, 266, 1, 0, 0, 0, 275, + 267, 1, 0, 0, 0, 275, 268, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, + 271, 1, 0, 0, 0, 275, 272, 1, 0, 0, 0, 275, 273, 1, 0, 0, 0, 275, 274, 1, 0, 0, 0, 276, + 49, 1, 0, 0, 0, 277, 278, 6, 25, -1, 0, 278, 279, 5, 36, 0, 0, 279, 284, 3, 50, 25, 10, + 280, 281, 5, 26, 0, 0, 281, 284, 3, 50, 25, 2, 282, 284, 3, 48, 24, 0, 283, 277, 1, 0, + 0, 0, 283, 280, 1, 0, 0, 0, 283, 282, 1, 0, 0, 0, 284, 339, 1, 0, 0, 0, 285, 286, 10, 11, + 0, 0, 286, 287, 5, 37, 0, 0, 287, 338, 3, 50, 25, 12, 288, 289, 10, 9, 0, 0, 289, 290, + 5, 38, 0, 0, 290, 338, 3, 50, 25, 10, 291, 292, 10, 8, 0, 0, 292, 293, 7, 2, 0, 0, 293, + 338, 3, 50, 25, 9, 294, 296, 10, 6, 0, 0, 295, 297, 5, 26, 0, 0, 296, 295, 1, 0, 0, 0, + 296, 297, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 299, 5, 25, 0, 0, 299, 338, 3, 50, 25, + 7, 300, 301, 10, 5, 0, 0, 301, 302, 5, 39, 0, 0, 302, 338, 3, 50, 25, 6, 303, 307, 10, + 4, 0, 0, 304, 308, 5, 11, 0, 0, 305, 306, 5, 26, 0, 0, 306, 308, 5, 11, 0, 0, 307, 304, + 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 310, 3, 50, 25, 0, 310, 311, + 5, 8, 0, 0, 311, 312, 3, 50, 25, 5, 312, 338, 1, 0, 0, 0, 313, 314, 10, 7, 0, 0, 314, 316, + 5, 24, 0, 0, 315, 317, 5, 26, 0, 0, 316, 315, 1, 0, 0, 0, 316, 317, 1, 0, 0, 0, 317, 318, + 1, 0, 0, 0, 318, 338, 7, 3, 0, 0, 319, 321, 10, 3, 0, 0, 320, 322, 5, 26, 0, 0, 321, 320, + 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 324, 5, 22, 0, 0, 324, 334, + 5, 40, 0, 0, 325, 326, 3, 54, 27, 0, 326, 327, 5, 3, 0, 0, 327, 329, 1, 0, 0, 0, 328, 325, + 1, 0, 0, 0, 329, 332, 1, 0, 0, 0, 330, 328, 1, 0, 0, 0, 330, 331, 1, 0, 0, 0, 331, 333, + 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 333, 335, 3, 54, 27, 0, 334, 330, 1, 0, 0, 0, 334, 335, + 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 338, 5, 41, 0, 0, 337, 285, 1, 0, 0, 0, 337, 288, + 1, 0, 0, 0, 337, 291, 1, 0, 0, 0, 337, 294, 1, 0, 0, 0, 337, 300, 1, 0, 0, 0, 337, 303, + 1, 0, 0, 0, 337, 313, 1, 0, 0, 0, 337, 319, 1, 0, 0, 0, 338, 341, 1, 0, 0, 0, 339, 337, + 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 51, 1, 0, 0, 0, 341, 339, 1, 0, 0, 0, 342, 343, 6, + 26, -1, 0, 343, 344, 3, 50, 25, 0, 344, 353, 1, 0, 0, 0, 345, 346, 10, 3, 0, 0, 346, 347, + 5, 8, 0, 0, 347, 352, 3, 52, 26, 4, 348, 349, 10, 2, 0, 0, 349, 350, 5, 28, 0, 0, 350, + 352, 3, 52, 26, 3, 351, 345, 1, 0, 0, 0, 351, 348, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, + 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 53, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 357, + 3, 52, 26, 0, 357, 55, 1, 0, 0, 0, 358, 359, 3, 54, 27, 0, 359, 360, 5, 0, 0, 1, 360, 363, + 1, 0, 0, 0, 361, 363, 5, 0, 0, 1, 362, 358, 1, 0, 0, 0, 362, 361, 1, 0, 0, 0, 363, 57, 1, + 0, 0, 0, 42, 93, 101, 110, 113, 121, 124, 132, 135, 137, 146, 149, 157, 160, 162, + 170, 173, 181, 184, 193, 196, 199, 202, 205, 208, 211, 232, 236, 250, 253, 275, + 283, 296, 307, 316, 321, 330, 334, 337, 339, 351, 353, 362 ] -class DataLensParser(Parser): +class DataLensParser (Parser): + grammarFileName = "DataLens.g4" atn = ATNDeserializer().deserialize(serializedATN()) @@ -3332,103 +161,27 @@ class DataLensParser(Parser): sharedContextCache = PredictionContextCache() - literalNames = [ - "", - "'#'", - "'##'", - "','", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "'+'", - "'-'", - "'^'", - "", - "", - "'('", - "')'", - ] - - symbolicNames = [ - "", - "", - "", - "", - "SINGLE_LINE_COMMENT", - "MULTI_LINE_COMMENT", - "WS", - "AMONG", - "AND", - "ASC", - "BEFORE_FILTER_BY", - "BETWEEN", - "CASE", - "DESC", - "ELSE", - "ELSEIF", - "END", - "EXCLUDE", - "FALSE", - "FIXED", - "IF", - "IGNORE_DIMENSIONS", - "IN", - "INCLUDE", - "IS", - "LIKE", - "NOT", - "NULL", - "OR", - "ORDER_BY", - "THEN", - "TOTAL", - "TRUE", - "WHEN", - "WITHIN", - "PLUS", - "MINUS", - "POWER", - "MULDIV", - "COMPARISON", - "OPENING_PAR", - "CLOSING_PAR", - "INT", - "FLOAT", - "ESCAPED_STRING", - "FIELD_NAME", - "FUNC_NAME", - "DATE_INNER", - "DATETIME_INNER", - "UNEXPECTED_CHARACTER", - ] + literalNames = ["", "'#'", "'##'", "','", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "'+'", "'-'", "'^'", "", "", + "'('", "')'"] + + symbolicNames = ["", "", "", "", + "SINGLE_LINE_COMMENT", "MULTI_LINE_COMMENT", "WS", + "AMONG", "AND", "ASC", "BEFORE_FILTER_BY", "BETWEEN", + "CASE", "DESC", "ELSE", "ELSEIF", "END", "EXCLUDE", + "FALSE", "FIXED", "IF", "IGNORE_DIMENSIONS", "IN", + "INCLUDE", "IS", "LIKE", "NOT", "NULL", "OR", "ORDER_BY", + "THEN", "TOTAL", "TRUE", "WHEN", "WITHIN", "PLUS", + "MINUS", "POWER", "MULDIV", "COMPARISON", "OPENING_PAR", + "CLOSING_PAR", "INT", "FLOAT", "ESCAPED_STRING", "FIELD_NAME", + "FUNC_NAME", "DATE_INNER", "DATETIME_INNER", "UNEXPECTED_CHARACTER"] RULE_integerLiteral = 0 RULE_floatLiteral = 1 @@ -3460,37 +213,14 @@ class DataLensParser(Parser): RULE_expression = 27 RULE_parse = 28 - ruleNames = [ - "integerLiteral", - "floatLiteral", - "stringLiteral", - "dateLiteral", - "datetimeLiteral", - "genericDateLiteral", - "genericDatetimeLiteral", - "boolLiteral", - "nullLiteral", - "fieldName", - "orderingItem", - "ordering", - "lodSpecifier", - "winGrouping", - "beforeFilterBy", - "ignoreDimensions", - "function", - "elseifPart", - "elsePart", - "ifPart", - "ifBlock", - "whenPart", - "caseBlock", - "parenthesizedExpr", - "exprBasic", - "exprMain", - "exprSecondary", - "expression", - "parse", - ] + ruleNames = ["integerLiteral", "floatLiteral", "stringLiteral", "dateLiteral", + "datetimeLiteral", "genericDateLiteral", "genericDatetimeLiteral", + "boolLiteral", "nullLiteral", "fieldName", "orderingItem", + "ordering", "lodSpecifier", "winGrouping", "beforeFilterBy", + "ignoreDimensions", "function", "elseifPart", "elsePart", + "ifPart", "ifBlock", "whenPart", "caseBlock", "parenthesizedExpr", + "exprBasic", "exprMain", "exprSecondary", "expression", + "parse"] EOF = Token.EOF T__0 = 1 @@ -3550,7 +280,7 @@ def __init__(self, input: TokenStream, output: TextIO = sys.stdout): self._predicates = None class IntegerLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3569,6 +299,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def integerLiteral(self): + localctx = DataLensParser.IntegerLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_integerLiteral) try: @@ -3584,7 +315,7 @@ def integerLiteral(self): return localctx class FloatLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3603,6 +334,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def floatLiteral(self): + localctx = DataLensParser.FloatLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 2, self.RULE_floatLiteral) try: @@ -3618,7 +350,7 @@ def floatLiteral(self): return localctx class StringLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3637,6 +369,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def stringLiteral(self): + localctx = DataLensParser.StringLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 4, self.RULE_stringLiteral) try: @@ -3652,7 +385,7 @@ def stringLiteral(self): return localctx class DateLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3671,6 +404,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def dateLiteral(self): + localctx = DataLensParser.DateLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 6, self.RULE_dateLiteral) try: @@ -3690,7 +424,7 @@ def dateLiteral(self): return localctx class DatetimeLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3709,6 +443,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def datetimeLiteral(self): + localctx = DataLensParser.DatetimeLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 8, self.RULE_datetimeLiteral) try: @@ -3728,7 +463,7 @@ def datetimeLiteral(self): return localctx class GenericDateLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3747,6 +482,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def genericDateLiteral(self): + localctx = DataLensParser.GenericDateLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 10, self.RULE_genericDateLiteral) try: @@ -3766,7 +502,7 @@ def genericDateLiteral(self): return localctx class GenericDatetimeLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3785,6 +521,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def genericDatetimeLiteral(self): + localctx = DataLensParser.GenericDatetimeLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 12, self.RULE_genericDatetimeLiteral) try: @@ -3804,7 +541,7 @@ def genericDatetimeLiteral(self): return localctx class BoolLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3826,6 +563,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def boolLiteral(self): + localctx = DataLensParser.BoolLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 14, self.RULE_boolLiteral) self._la = 0 # Token type @@ -3847,7 +585,7 @@ def boolLiteral(self): return localctx class NullLiteralContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3866,6 +604,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def nullLiteral(self): + localctx = DataLensParser.NullLiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 16, self.RULE_nullLiteral) try: @@ -3881,7 +620,7 @@ def nullLiteral(self): return localctx class FieldNameContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3900,6 +639,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def fieldName(self): + localctx = DataLensParser.FieldNameContext(self, self._ctx, self.state) self.enterRule(localctx, 18, self.RULE_fieldName) try: @@ -3915,7 +655,7 @@ def fieldName(self): return localctx class OrderingItemContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3940,6 +680,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def orderingItem(self): + localctx = DataLensParser.OrderingItemContext(self, self._ctx, self.state) self.enterRule(localctx, 20, self.RULE_orderingItem) try: @@ -3974,7 +715,7 @@ def orderingItem(self): return localctx class OrderingContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -3999,6 +740,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def ordering(self): + localctx = DataLensParser.OrderingContext(self, self._ctx, self.state) self.enterRule(localctx, 22, self.RULE_ordering) self._la = 0 # Token type @@ -4029,7 +771,7 @@ def ordering(self): return localctx class LodSpecifierContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4060,6 +802,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def lodSpecifier(self): + localctx = DataLensParser.LodSpecifierContext(self, self._ctx, self.state) self.enterRule(localctx, 24, self.RULE_lodSpecifier) self._la = 0 # Token type @@ -4074,7 +817,7 @@ def lodSpecifier(self): self.state = 113 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 137512472549638) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 137512472549638) != 0: self.state = 105 self.expression() self.state = 110 @@ -4096,7 +839,7 @@ def lodSpecifier(self): self.state = 124 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 137512472549638) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 137512472549638) != 0: self.state = 116 self.expression() self.state = 121 @@ -4118,7 +861,7 @@ def lodSpecifier(self): self.state = 135 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 137512472549638) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 137512472549638) != 0: self.state = 127 self.expression() self.state = 132 @@ -4145,7 +888,7 @@ def lodSpecifier(self): return localctx class WinGroupingContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4176,6 +919,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def winGrouping(self): + localctx = DataLensParser.WinGroupingContext(self, self._ctx, self.state) self.enterRule(localctx, 26, self.RULE_winGrouping) self._la = 0 # Token type @@ -4194,7 +938,7 @@ def winGrouping(self): self.state = 149 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 137512472549638) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 137512472549638) != 0: self.state = 141 self.expression() self.state = 146 @@ -4216,7 +960,7 @@ def winGrouping(self): self.state = 160 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 137512472549638) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 137512472549638) != 0: self.state = 152 self.expression() self.state = 157 @@ -4243,7 +987,7 @@ def winGrouping(self): return localctx class BeforeFilterByContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4268,6 +1012,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def beforeFilterBy(self): + localctx = DataLensParser.BeforeFilterByContext(self, self._ctx, self.state) self.enterRule(localctx, 28, self.RULE_beforeFilterBy) self._la = 0 # Token type @@ -4302,7 +1047,7 @@ def beforeFilterBy(self): return localctx class IgnoreDimensionsContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4327,6 +1072,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def ignoreDimensions(self): + localctx = DataLensParser.IgnoreDimensionsContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_ignoreDimensions) self._la = 0 # Token type @@ -4361,7 +1107,7 @@ def ignoreDimensions(self): return localctx class FunctionContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4428,6 +1174,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def function(self): + localctx = DataLensParser.FunctionContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_function) self._la = 0 # Token type @@ -4435,7 +1182,7 @@ def function(self): self.enterOuterAlt(localctx, 1) self.state = 186 _la = self._input.LA(1) - if not (((_la) & ~0x3F) == 0 and ((1 << _la) & 70369114331392) != 0): + if not (((_la) & ~0x3f) == 0 and ((1 << _la) & 70369114331392) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -4445,7 +1192,7 @@ def function(self): self.state = 196 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 137512472549638) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 137512472549638) != 0: self.state = 188 self.expression() self.state = 193 @@ -4463,7 +1210,7 @@ def function(self): self.state = 199 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 19327352960) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 19327352960) != 0: self.state = 198 self.winGrouping() @@ -4477,7 +1224,7 @@ def function(self): self.state = 205 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 9043968) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 9043968) != 0: self.state = 204 self.lodSpecifier() @@ -4506,7 +1253,7 @@ def function(self): return localctx class ElseifPartContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4534,6 +1281,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def elseifPart(self): + localctx = DataLensParser.ElseifPartContext(self, self._ctx, self.state) self.enterRule(localctx, 34, self.RULE_elseifPart) try: @@ -4555,7 +1303,7 @@ def elseifPart(self): return localctx class ElsePartContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4577,6 +1325,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def elsePart(self): + localctx = DataLensParser.ElsePartContext(self, self._ctx, self.state) self.enterRule(localctx, 36, self.RULE_elsePart) try: @@ -4594,7 +1343,7 @@ def elsePart(self): return localctx class IfPartContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4622,6 +1371,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def ifPart(self): + localctx = DataLensParser.IfPartContext(self, self._ctx, self.state) self.enterRule(localctx, 38, self.RULE_ifPart) try: @@ -4643,7 +1393,7 @@ def ifPart(self): return localctx class IfBlockContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4674,6 +1424,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def ifBlock(self): + localctx = DataLensParser.IfBlockContext(self, self._ctx, self.state) self.enterRule(localctx, 40, self.RULE_ifBlock) self._la = 0 # Token type @@ -4709,7 +1460,7 @@ def ifBlock(self): return localctx class WhenPartContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4737,6 +1488,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def whenPart(self): + localctx = DataLensParser.WhenPartContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_whenPart) try: @@ -4758,7 +1510,7 @@ def whenPart(self): return localctx class CaseBlockContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4792,6 +1544,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def caseBlock(self): + localctx = DataLensParser.CaseBlockContext(self, self._ctx, self.state) self.enterRule(localctx, 44, self.RULE_caseBlock) self._la = 0 # Token type @@ -4831,7 +1584,7 @@ def caseBlock(self): return localctx class ParenthesizedExprContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4856,6 +1609,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def parenthesizedExpr(self): + localctx = DataLensParser.ParenthesizedExprContext(self, self._ctx, self.state) self.enterRule(localctx, 46, self.RULE_parenthesizedExpr) try: @@ -4875,7 +1629,7 @@ def parenthesizedExpr(self): return localctx class ExprBasicContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -4933,6 +1687,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def exprBasic(self): + localctx = DataLensParser.ExprBasicContext(self, self._ctx, self.state) self.enterRule(localctx, 48, self.RULE_exprBasic) try: @@ -5018,7 +1773,7 @@ def exprBasic(self): return localctx class ExprMainContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -5031,6 +1786,7 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class ExprBasicAltContext(ExprMainContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprMainContext super().__init__(parser) self.copyFrom(ctx) @@ -5045,6 +1801,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) class UnaryPrefixContext(ExprMainContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprMainContext super().__init__(parser) self.copyFrom(ctx) @@ -5065,6 +1822,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) class InExprContext(ExprMainContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprMainContext super().__init__(parser) self.copyFrom(ctx) @@ -5097,6 +1855,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) class BinaryExprContext(ExprMainContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprMainContext super().__init__(parser) self.copyFrom(ctx) @@ -5132,6 +1891,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) class ComparisonChainContext(ExprMainContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprMainContext super().__init__(parser) self.copyFrom(ctx) @@ -5152,6 +1912,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) class UnaryPostfixContext(ExprMainContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprMainContext super().__init__(parser) self.copyFrom(ctx) @@ -5181,6 +1942,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) class BetweenExprContext(ExprMainContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprMainContext super().__init__(parser) self.copyFrom(ctx) @@ -5257,14 +2019,11 @@ def exprMain(self, _p: int = 0): self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 37, self._ctx) if la_ == 1: - localctx = DataLensParser.BinaryExprContext( - self, DataLensParser.ExprMainContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.BinaryExprContext(self, DataLensParser.ExprMainContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprMain) self.state = 285 if not self.precpred(self._ctx, 11): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 11)") self.state = 286 self.match(DataLensParser.POWER) @@ -5272,14 +2031,11 @@ def exprMain(self, _p: int = 0): self.exprMain(12) elif la_ == 2: - localctx = DataLensParser.BinaryExprContext( - self, DataLensParser.ExprMainContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.BinaryExprContext(self, DataLensParser.ExprMainContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprMain) self.state = 288 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") self.state = 289 self.match(DataLensParser.MULDIV) @@ -5287,14 +2043,11 @@ def exprMain(self, _p: int = 0): self.exprMain(10) elif la_ == 3: - localctx = DataLensParser.BinaryExprContext( - self, DataLensParser.ExprMainContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.BinaryExprContext(self, DataLensParser.ExprMainContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprMain) self.state = 291 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") self.state = 292 _la = self._input.LA(1) @@ -5307,14 +2060,11 @@ def exprMain(self, _p: int = 0): self.exprMain(9) elif la_ == 4: - localctx = DataLensParser.BinaryExprContext( - self, DataLensParser.ExprMainContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.BinaryExprContext(self, DataLensParser.ExprMainContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprMain) self.state = 294 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") self.state = 296 self._errHandler.sync(self) @@ -5329,14 +2079,11 @@ def exprMain(self, _p: int = 0): self.exprMain(7) elif la_ == 5: - localctx = DataLensParser.ComparisonChainContext( - self, DataLensParser.ExprMainContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.ComparisonChainContext(self, DataLensParser.ExprMainContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprMain) self.state = 300 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") self.state = 301 self.match(DataLensParser.COMPARISON) @@ -5344,14 +2091,11 @@ def exprMain(self, _p: int = 0): self.exprMain(6) elif la_ == 6: - localctx = DataLensParser.BetweenExprContext( - self, DataLensParser.ExprMainContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.BetweenExprContext(self, DataLensParser.ExprMainContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprMain) self.state = 303 if not self.precpred(self._ctx, 4): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 4)") self.state = 307 self._errHandler.sync(self) @@ -5375,14 +2119,11 @@ def exprMain(self, _p: int = 0): self.exprMain(5) elif la_ == 7: - localctx = DataLensParser.UnaryPostfixContext( - self, DataLensParser.ExprMainContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.UnaryPostfixContext(self, DataLensParser.ExprMainContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprMain) self.state = 313 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") self.state = 314 self.match(DataLensParser.IS) @@ -5395,21 +2136,18 @@ def exprMain(self, _p: int = 0): self.state = 318 _la = self._input.LA(1) - if not (((_la) & ~0x3F) == 0 and ((1 << _la) & 4429447168) != 0): + if not (((_la) & ~0x3f) == 0 and ((1 << _la) & 4429447168) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() elif la_ == 8: - localctx = DataLensParser.InExprContext( - self, DataLensParser.ExprMainContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.InExprContext(self, DataLensParser.ExprMainContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprMain) self.state = 319 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") self.state = 321 self._errHandler.sync(self) @@ -5425,7 +2163,7 @@ def exprMain(self, _p: int = 0): self.state = 334 self._errHandler.sync(self) _la = self._input.LA(1) - if ((_la) & ~0x3F) == 0 and ((1 << _la) & 137512472549638) != 0: + if ((_la) & ~0x3f) == 0 and ((1 << _la) & 137512472549638) != 0: self.state = 330 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input, 35, self._ctx) @@ -5458,7 +2196,7 @@ def exprMain(self, _p: int = 0): return localctx class ExprSecondaryContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -5471,6 +2209,7 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class BinaryExprSecContext(ExprSecondaryContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprSecondaryContext super().__init__(parser) self.copyFrom(ctx) @@ -5494,6 +2233,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) class ExprMainAltContext(ExprSecondaryContext): + def __init__(self, parser, ctx: ParserRuleContext): # actually a DataLensParser.ExprSecondaryContext super().__init__(parser) self.copyFrom(ctx) @@ -5534,14 +2274,11 @@ def exprSecondary(self, _p: int = 0): self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 39, self._ctx) if la_ == 1: - localctx = DataLensParser.BinaryExprSecContext( - self, DataLensParser.ExprSecondaryContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.BinaryExprSecContext(self, DataLensParser.ExprSecondaryContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprSecondary) self.state = 345 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") self.state = 346 self.match(DataLensParser.AND) @@ -5549,14 +2286,11 @@ def exprSecondary(self, _p: int = 0): self.exprSecondary(4) elif la_ == 2: - localctx = DataLensParser.BinaryExprSecContext( - self, DataLensParser.ExprSecondaryContext(self, _parentctx, _parentState) - ) + localctx = DataLensParser.BinaryExprSecContext(self, DataLensParser.ExprSecondaryContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_exprSecondary) self.state = 348 if not self.precpred(self._ctx, 2): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") self.state = 349 self.match(DataLensParser.OR) @@ -5576,7 +2310,7 @@ def exprSecondary(self, _p: int = 0): return localctx class ExpressionContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -5595,6 +2329,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def expression(self): + localctx = DataLensParser.ExpressionContext(self, self._ctx, self.state) self.enterRule(localctx, 54, self.RULE_expression) try: @@ -5610,7 +2345,7 @@ def expression(self): return localctx class ParseContext(ParserRuleContext): - __slots__ = "parser" + __slots__ = 'parser' def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) @@ -5632,6 +2367,7 @@ def accept(self, visitor: ParseTreeVisitor): return visitor.visitChildren(self) def parse(self): + localctx = DataLensParser.ParseContext(self, self._ctx, self.state) self.enterRule(localctx, 56, self.RULE_parse) try: diff --git a/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensVisitor.py b/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensVisitor.py index 0c05b264f..52d468bd5 100644 --- a/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensVisitor.py +++ b/lib/dl_formula/dl_formula/parser/antlr/gen/DataLensVisitor.py @@ -1,6 +1,5 @@ from antlr4 import ParseTreeVisitor - if __name__ is not None and "." in __name__: from .DataLensParser import DataLensParser else: @@ -10,6 +9,7 @@ class DataLensVisitor(ParseTreeVisitor): + # Visit a parse tree produced by DataLensParser#integerLiteral. def visitIntegerLiteral(self, ctx: DataLensParser.IntegerLiteralContext): return self.visitChildren(ctx) diff --git a/lib/dl_formula/pyproject.toml b/lib/dl_formula/pyproject.toml index e9bc2670f..8fc412a3a 100644 --- a/lib/dl_formula/pyproject.toml +++ b/lib/dl_formula/pyproject.toml @@ -62,7 +62,7 @@ module = "pytz.tzinfo.*" ignore_missing_imports = true [[tool.mypy.overrides]] -module = "sqlalchemy_metrika_api.*" +module = "dl_sqlalchemy_metrica_api.*" ignore_missing_imports = true [[tool.mypy.overrides]] diff --git a/lib/dl_formula_ref/dl_formula_ref_tests/db/conftest.py b/lib/dl_formula_ref/dl_formula_ref_tests/db/conftest.py index 858a008bb..1596e92f5 100644 --- a/lib/dl_formula_ref/dl_formula_ref_tests/db/conftest.py +++ b/lib/dl_formula_ref/dl_formula_ref_tests/db/conftest.py @@ -5,7 +5,6 @@ import pytest -from dl_connector_clickhouse.formula.constants import ClickHouseDialect from dl_formula.core.dialect import DialectCombo from dl_formula.definitions.literals import literal from dl_formula_testing.database import ( @@ -16,6 +15,8 @@ from dl_formula_testing.evaluator import DbEvaluator from dl_testing.containers import get_test_container_hostport +from dl_connector_clickhouse.formula.constants import ClickHouseDialect + ALL_DB_CONFIGURATIONS = { ClickHouseDialect.CLICKHOUSE_22_10: ( diff --git a/lib/dl_formula_ref/dl_formula_ref_tests/db/examples/test_preparation.py b/lib/dl_formula_ref/dl_formula_ref_tests/db/examples/test_preparation.py index 9b8301e5d..d552579d3 100644 --- a/lib/dl_formula_ref/dl_formula_ref_tests/db/examples/test_preparation.py +++ b/lib/dl_formula_ref/dl_formula_ref_tests/db/examples/test_preparation.py @@ -3,13 +3,14 @@ import pytest -from dl_connector_clickhouse.formula.constants import ClickHouseDialect from dl_formula_ref.config import DOC_GEN_CONFIG_DEFAULT from dl_formula_ref.generator import ( ConfigVersion, ReferenceDocGenerator, ) +from dl_connector_clickhouse.formula.constants import ClickHouseDialect + @pytest.fixture(scope="function") def example_db_conf_patch(monkeypatch, all_db_configurations, dbe): diff --git a/lib/dl_formula_ref/dl_formula_ref_tests/unit/examples/test_query_gen.py b/lib/dl_formula_ref/dl_formula_ref_tests/unit/examples/test_query_gen.py index 6f80c8ac9..ede7527bb 100644 --- a/lib/dl_formula_ref/dl_formula_ref_tests/unit/examples/test_query_gen.py +++ b/lib/dl_formula_ref/dl_formula_ref_tests/unit/examples/test_query_gen.py @@ -2,7 +2,6 @@ from typing import TYPE_CHECKING -from dl_connector_clickhouse.formula.constants import ClickHouseDialect from dl_formula.core.datatype import DataType from dl_formula.parser.factory import get_parser from dl_formula.shortcuts import n @@ -17,6 +16,8 @@ ) from dl_formula_ref.examples.query_gen import QueryGenerator +from dl_connector_clickhouse.formula.constants import ClickHouseDialect + if TYPE_CHECKING: from dl_formula.core import nodes diff --git a/lib/dl_formula_ref/dl_formula_ref_tests/unit/scripts/test_formula_doc.py b/lib/dl_formula_ref/dl_formula_ref_tests/unit/scripts/test_formula_doc.py index b52b010ef..23889bf57 100644 --- a/lib/dl_formula_ref/dl_formula_ref_tests/unit/scripts/test_formula_doc.py +++ b/lib/dl_formula_ref/dl_formula_ref_tests/unit/scripts/test_formula_doc.py @@ -7,7 +7,6 @@ import pytest -from dl_connector_clickhouse.formula.constants import ClickHouseDialect import dl_formula_ref from dl_formula_ref.config import ( _CONFIGS_BY_VERSION, @@ -19,6 +18,8 @@ ) from dl_formula_testing.tool_runner import ToolRunner +from dl_connector_clickhouse.formula.constants import ClickHouseDialect + @pytest.fixture(scope="module") def tool(): diff --git a/lib/dl_formula_ref/dl_formula_ref_tests/unit/test_signature_gen.py b/lib/dl_formula_ref/dl_formula_ref_tests/unit/test_signature_gen.py index d14fabf1c..52d929574 100644 --- a/lib/dl_formula_ref/dl_formula_ref_tests/unit/test_signature_gen.py +++ b/lib/dl_formula_ref/dl_formula_ref_tests/unit/test_signature_gen.py @@ -1,4 +1,3 @@ -from dl_connector_clickhouse.formula.constants import ClickHouseDialect from dl_formula.definitions.scope import Scope from dl_formula_ref.categories.aggregation import CATEGORY_AGGREGATION from dl_formula_ref.categories.logical import CATEGORY_LOGICAL @@ -12,6 +11,8 @@ ) from dl_formula_ref.registry.tools import populate_registry_from_definitions +from dl_connector_clickhouse.formula.constants import ClickHouseDialect + def check_function(func_name: str, exp_signature: list, category_name: str) -> None: func_key = RefFunctionKey.normalized(name=func_name, category_name=category_name) diff --git a/lib/dl_formula_testing/dl_formula_testing/testcases/functions_array.py b/lib/dl_formula_testing/dl_formula_testing/testcases/functions_array.py index 5c5c17459..258df8f5b 100644 --- a/lib/dl_formula_testing/dl_formula_testing/testcases/functions_array.py +++ b/lib/dl_formula_testing/dl_formula_testing/testcases/functions_array.py @@ -165,6 +165,24 @@ def test_array_contains_any_string_array(self, dbe: DbEvaluator, data_table: sa. assert dbe.eval('CONTAINS_ANY([arr_str_value], ARRAY("123", NULL))', from_=data_table) assert dbe.eval('CONTAINS_ANY(ARRAY("cde"), [arr_str_value])', from_=data_table) + def test_array_not_contains(self, dbe: DbEvaluator, data_table: sa.Table) -> None: + assert not dbe.eval("NOTCONTAINS(ARRAY(1, 2, 3), 1)", from_=data_table) + assert not dbe.eval("NOTCONTAINS(ARRAY(1.1, 2.2, 3.3), 3.3)", from_=data_table) + assert not dbe.eval('NOTCONTAINS(ARRAY("a", "b", "c"), "a")', from_=data_table) + assert dbe.eval('NOTCONTAINS(ARRAY("a", "b", "c"), "d")', from_=data_table) + assert not dbe.eval('NOTCONTAINS(ARRAY("a", NULL, "c"), NULL)', from_=data_table) + assert dbe.eval("NOTCONTAINS(ARRAY(1.1, 2.2, 3.3), NULL)", from_=data_table) + assert not dbe.eval("NOTCONTAINS([arr_int_value], 23)", from_=data_table) + assert dbe.eval("NOTCONTAINS([arr_int_value], 24)", from_=data_table) + assert not dbe.eval('NOTCONTAINS([arr_str_value], "cde")', from_=data_table) + assert not dbe.eval("NOTCONTAINS([arr_str_value], NULL)", from_=data_table) + + assert not dbe.eval("NOTCONTAINS([arr_str_value], GET_ITEM([arr_str_value], 1))", from_=data_table) + assert not dbe.eval("NOTCONTAINS([arr_str_value], GET_ITEM([arr_str_value], 2))", from_=data_table) + assert not dbe.eval("NOTCONTAINS([arr_str_value], GET_ITEM([arr_str_value], 3))", from_=data_table) + assert not dbe.eval("NOTCONTAINS([arr_str_value], GET_ITEM([arr_str_value], 4))", from_=data_table) + assert not dbe.eval("NOTCONTAINS([arr_str_value], GET_ITEM([arr_str_value], 4))", from_=data_table) + def test_array_slice(self, dbe: DbEvaluator, data_table: sa.Table) -> None: assert dbe.eval("SLICE([arr_int_value], 2, 2)", from_=data_table) == dbe.eval( "ARRAY(23, 456)", from_=data_table diff --git a/lib/dl_formula_testing/dl_formula_testing/testcases/functions_string.py b/lib/dl_formula_testing/dl_formula_testing/testcases/functions_string.py index 0331d2481..4cc26d560 100644 --- a/lib/dl_formula_testing/dl_formula_testing/testcases/functions_string.py +++ b/lib/dl_formula_testing/dl_formula_testing/testcases/functions_string.py @@ -196,6 +196,22 @@ def test_contains_simple(self, dbe: DbEvaluator, forced_literal_use: Any) -> Non assert dbe.eval('CONTAINS(#2019-03-04#, "019")') assert dbe.eval('CONTAINS(#2019-03-04T12:34:56#, "019")') + def test_notcontains_simple(self, dbe: DbEvaluator, forced_literal_use: Any) -> None: + assert not dbe.eval('NOTCONTAINS("Lorem ipsum", "ips")') + assert dbe.eval('NOTCONTAINS("Lorem ipsum", "abc")') + assert not dbe.eval('NOTCONTAINS(__LIT__("Lorem ipsum"), __LIT__("ips"))') + assert dbe.eval('NOTCONTAINS(__LIT__("Lorem ipsum"), __LIT__("abc"))') + assert not dbe.eval('NOTCONTAINS("Lorem %ipsum", "em %ip")') + assert not dbe.eval('NOTCONTAINS(__LIT__("Lorem %ipsum"), __LIT__("em %ip"))') + assert not dbe.eval('NOTCONTAINS("Карл у Клары украл кораллы", "Клары")') + assert not dbe.eval('NOTCONTAINS(__LIT__("Карл у Клары украл кораллы"), __LIT__("Клары"))') + # Non-string + assert not dbe.eval('NOTCONTAINS(123456, "234")') + assert dbe.eval('NOTCONTAINS(123456, "432")') + assert not dbe.eval('NOTCONTAINS(TRUE, "ru")') + assert not dbe.eval('NOTCONTAINS(#2019-03-04#, "019")') + assert not dbe.eval('NOTCONTAINS(#2019-03-04T12:34:56#, "019")') + @pytest.mark.parametrize("value_fl,pattern_fl,expected", CONTAINS_TESTS) def test_contains_extended( self, @@ -218,6 +234,29 @@ def test_contains_extended( statement = "CONTAINS(__LIT__({}), __LIT__({}))".format(value_fl, pattern_fl) assert dbe.eval(statement) is expected, (statement, expected) + @pytest.mark.parametrize("value_fl,pattern_fl,expected", CONTAINS_TESTS) + def test_notcontains_extended( + self, + dbe: DbEvaluator, + data_table: sa.Table, + forced_literal_use: Any, + value_fl: str, + pattern_fl: str, + expected: bool, + ) -> None: + if self.empty_str_is_null and pattern_fl == '""': + # hopeless? + return + + # const: + statement = "NOTCONTAINS(__LIT__({}), {})".format(value_fl, pattern_fl) + a = dbe.eval(statement) + assert dbe.eval(statement) is not expected, (statement, expected) + + # var: + statement = "NOTCONTAINS(__LIT__({}), __LIT__({}))".format(value_fl, pattern_fl) + assert dbe.eval(statement) is not expected, (statement, expected) + def test_icontains_simple(self, dbe: DbEvaluator, forced_literal_use: Any) -> None: assert dbe.eval('ICONTAINS("Lorem ipsum", "IPS")') assert not dbe.eval('ICONTAINS("Lorem ipsum", "ABC")') diff --git a/lib/dl_query_processing/dl_query_processing/compilation/filter_compiler.py b/lib/dl_query_processing/dl_query_processing/compilation/filter_compiler.py index ab5688659..18174a573 100644 --- a/lib/dl_query_processing/dl_query_processing/compilation/filter_compiler.py +++ b/lib/dl_query_processing/dl_query_processing/compilation/filter_compiler.py @@ -39,8 +39,6 @@ LOGGER = logging.getLogger(__name__) -USE_DATE_TO_DATETIME_CONV = os.environ.get("USE_DATE_TO_DATETIME_CONV", "1") == "1" - _FILTER_PARAMS_TV = TypeVar("_FILTER_PARAMS_TV", bound="FilterParams") @@ -111,9 +109,7 @@ class FilterFormulaCompiler: WhereClauseOperation.IENDSWITH: FilterDefinition(arg_cnt=1, callable=n.func.IENDSWITH), WhereClauseOperation.CONTAINS: FilterDefinition(arg_cnt=1, callable=n.func.CONTAINS), WhereClauseOperation.ICONTAINS: FilterDefinition(arg_cnt=1, callable=n.func.ICONTAINS), - WhereClauseOperation.NOTCONTAINS: FilterDefinition( - arg_cnt=1, callable=lambda f, val: n.not_(n.func.CONTAINS(f, val)) - ), + WhereClauseOperation.NOTCONTAINS: FilterDefinition(arg_cnt=1, callable=n.func.NOTCONTAINS), WhereClauseOperation.NOTICONTAINS: FilterDefinition( arg_cnt=1, callable=lambda f, val: n.not_(n.func.ICONTAINS(f, val)) ), @@ -404,6 +400,5 @@ def _mangle_array_filter(self, filter_params: FilterParams) -> FilterParams: def _custom_filter_cast(self, filter_params: FilterParams) -> FilterParams: filter_params = self._mangle_containment_filter(filter_params) filter_params = self._mangle_array_filter(filter_params) - if USE_DATE_TO_DATETIME_CONV: - filter_params = self._mangle_date_filter(filter_params) + filter_params = self._mangle_date_filter(filter_params) return filter_params diff --git a/lib/dl_query_processing/dl_query_processing/compilation/formula_compiler.py b/lib/dl_query_processing/dl_query_processing/compilation/formula_compiler.py index cfda049fe..b240de645 100644 --- a/lib/dl_query_processing/dl_query_processing/compilation/formula_compiler.py +++ b/lib/dl_query_processing/dl_query_processing/compilation/formula_compiler.py @@ -28,12 +28,12 @@ from dl_constants.enums import ( AggregationFunction, BinaryJoinOperator, - BIType, CalcMode, ConditionPartCalcMode, FieldType, ManagedBy, OrderDirection, + UserDataType, ) from dl_core.components.ids import ( AvatarId, @@ -203,7 +203,7 @@ def get_result(self, field: BIField, stage: ProcessingStage) -> Optional[formula def get_data_type(self, field: BIField, stage: ProcessingStage) -> DataType: return self._data_types[field.guid][stage] - def get_user_type(self, field: BIField, stage: ProcessingStage) -> BIType: + def get_user_type(self, field: BIField, stage: ProcessingStage) -> UserDataType: return FORMULA_TO_BI_TYPES[self.get_data_type(field=field, stage=stage)] def raise_if_any(self, field: BIField, stage: ProcessingStage) -> None: @@ -302,28 +302,28 @@ def _unsupported_cast(typename): # type: ignore # TODO: fix _SUPPORTED_CASTS_FUNCTIONS = { - BIType.boolean: "bool", - BIType.date: "date", - BIType.datetime: "datetime", - BIType.genericdatetime: "genericdatetime", - BIType.float: "float", - BIType.integer: "int", - BIType.geopoint: "geopoint", - BIType.geopolygon: "geopolygon", - BIType.string: "str", - BIType.markup: "markup", + UserDataType.boolean: "bool", + UserDataType.date: "date", + UserDataType.datetime: "datetime", + UserDataType.genericdatetime: "genericdatetime", + UserDataType.float: "float", + UserDataType.integer: "int", + UserDataType.geopoint: "geopoint", + UserDataType.geopolygon: "geopolygon", + UserDataType.string: "str", + UserDataType.markup: "markup", } _CAST_FUNCTIONS = { - bi_type: _SUPPORTED_CASTS_FUNCTIONS.get(bi_type) or _unsupported_cast(bi_type.name) for bi_type in BIType + bi_type: _SUPPORTED_CASTS_FUNCTIONS.get(bi_type) or _unsupported_cast(bi_type.name) for bi_type in UserDataType } _ALLOWED_PARAMETER_TYPES = { - BIType.string, - BIType.integer, - BIType.float, - BIType.boolean, - BIType.date, - BIType.datetime, - BIType.genericdatetime, + UserDataType.string, + UserDataType.integer, + UserDataType.float, + UserDataType.boolean, + UserDataType.date, + UserDataType.datetime, + UserDataType.genericdatetime, } @@ -824,7 +824,7 @@ def _apply_cast( self, formula_obj: formula_nodes.Formula, current_dtype: DataType, - cast: Optional[BIType], + cast: Optional[UserDataType], ) -> formula_nodes.Formula: """Apply a type cast to given expression""" @@ -838,7 +838,7 @@ def apply_cast_to_formula( self, formula_obj: formula_nodes.Formula, current_dtype: DataType, - cast: Optional[BIType], + cast: Optional[UserDataType], ) -> formula_nodes.Formula: return self._apply_cast(formula_obj=formula_obj, current_dtype=current_dtype, cast=cast) @@ -963,14 +963,14 @@ def get_field_validity(self, field: BIField) -> bool: """Return boolean flag indicating whether the field is valid.""" return not self.get_field_errors(field) - def get_field_initial_data_type(self, field: BIField) -> Optional[BIType]: + def get_field_initial_data_type(self, field: BIField) -> Optional[UserDataType]: """Return automatically determined data type of given field before cast and aggregation""" self._require_field_formula_preparation(field) return self._stage_manager.get_user_type(field=field, stage=ProcessingStage.substitution) - def get_field_final_data_type(self, field: BIField) -> Optional[BIType]: + def get_field_final_data_type(self, field: BIField) -> Optional[UserDataType]: """ - Return automatically determined user data type (``BIType``) + Return automatically determined user data type (``UserDataType``) of given field after cast and aggregation """ self._require_field_formula_preparation(field) @@ -984,7 +984,7 @@ def get_field_final_formula_data_type(self, field: BIField) -> Optional[DataType self._require_field_formula_preparation(field) return self._stage_manager.get_data_type(field=field, stage=ProcessingStage.aggregation) - def get_field_type(self, field: BIField) -> Optional[BIType]: + def get_field_type(self, field: BIField) -> Optional[UserDataType]: """Return automatically determined field type""" self._require_field_formula_preparation(field) return self._field_types.get(field.guid, FieldType.DIMENSION) # type: ignore # TODO: fix diff --git a/lib/dl_query_processing/dl_query_processing/compilation/type_mapping.py b/lib/dl_query_processing/dl_query_processing/compilation/type_mapping.py index 623836dfa..65a80eaf2 100644 --- a/lib/dl_query_processing/dl_query_processing/compilation/type_mapping.py +++ b/lib/dl_query_processing/dl_query_processing/compilation/type_mapping.py @@ -1,31 +1,31 @@ from __future__ import annotations -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_formula.core.datatype import DataType BI_TO_FORMULA_TYPES = { - BIType.integer: DataType.INTEGER, - BIType.float: DataType.FLOAT, - BIType.boolean: DataType.BOOLEAN, - BIType.string: DataType.STRING, - BIType.date: DataType.DATE, - BIType.datetime: DataType.DATETIME, - BIType.datetimetz: DataType.DATETIMETZ, - BIType.genericdatetime: DataType.GENERICDATETIME, - BIType.geopoint: DataType.GEOPOINT, - BIType.geopolygon: DataType.GEOPOLYGON, - BIType.uuid: DataType.UUID, - BIType.markup: DataType.MARKUP, - BIType.unsupported: DataType.UNSUPPORTED, - BIType.array_float: DataType.ARRAY_FLOAT, - BIType.array_int: DataType.ARRAY_INT, - BIType.array_str: DataType.ARRAY_STR, - BIType.tree_str: DataType.TREE_STR, + UserDataType.integer: DataType.INTEGER, + UserDataType.float: DataType.FLOAT, + UserDataType.boolean: DataType.BOOLEAN, + UserDataType.string: DataType.STRING, + UserDataType.date: DataType.DATE, + UserDataType.datetime: DataType.DATETIME, + UserDataType.datetimetz: DataType.DATETIMETZ, + UserDataType.genericdatetime: DataType.GENERICDATETIME, + UserDataType.geopoint: DataType.GEOPOINT, + UserDataType.geopolygon: DataType.GEOPOLYGON, + UserDataType.uuid: DataType.UUID, + UserDataType.markup: DataType.MARKUP, + UserDataType.unsupported: DataType.UNSUPPORTED, + UserDataType.array_float: DataType.ARRAY_FLOAT, + UserDataType.array_int: DataType.ARRAY_INT, + UserDataType.array_str: DataType.ARRAY_STR, + UserDataType.tree_str: DataType.TREE_STR, } FORMULA_TO_BI_TYPES = { **{ft: bit for bit, ft in BI_TO_FORMULA_TYPES.items()}, **{ft.const_type: bit for bit, ft in BI_TO_FORMULA_TYPES.items()}, - DataType.NULL: BIType.string, # NULL can in theory be any type, but we need to choose one + DataType.NULL: UserDataType.string, # NULL can in theory be any type, but we need to choose one } DEFAULT_DATA_TYPE = DataType.STRING diff --git a/lib/dl_query_processing/dl_query_processing/exc.py b/lib/dl_query_processing/dl_query_processing/exc.py index 2a62f7883..b661d44dc 100644 --- a/lib/dl_query_processing/dl_query_processing/exc.py +++ b/lib/dl_query_processing/dl_query_processing/exc.py @@ -22,10 +22,6 @@ class LogicError(DLBaseException): pass -class ObligatoryFilterMissing(DLBaseException): - err_code = DLBaseException.err_code + ["OBLIG_FILTER_MISSING"] - - class InvalidGroupByConfiguration(DLBaseException): err_code = DLBaseException.err_code + ["INVALID_GROUP_BY_CONFIGURATION"] default_message = "Invalid GROUP BY configuration." diff --git a/lib/dl_query_processing/dl_query_processing/legend/field_legend.py b/lib/dl_query_processing/dl_query_processing/legend/field_legend.py index ae2872ea4..b51c46925 100644 --- a/lib/dl_query_processing/dl_query_processing/legend/field_legend.py +++ b/lib/dl_query_processing/dl_query_processing/legend/field_legend.py @@ -14,13 +14,13 @@ import attr from dl_constants.enums import ( - BIType, FieldRole, FieldType, FieldVisibility, LegendItemType, OrderDirection, RangeType, + UserDataType, WhereClauseOperation, ) from dl_constants.internal_constants import ( @@ -115,7 +115,7 @@ class LegendItem: legend_item_id: int = attr.ib(kw_only=True) obj: ObjSpec = attr.ib(kw_only=True) role_spec: RoleSpec = attr.ib(kw_only=True, factory=RoleSpec) - data_type: BIType = attr.ib(kw_only=True) + data_type: UserDataType = attr.ib(kw_only=True) field_type: FieldType = attr.ib(kw_only=True) block_id: Optional[int] = attr.ib(kw_only=True, default=None) diff --git a/lib/dl_query_processing/dl_query_processing/multi_query/factory.py b/lib/dl_query_processing/dl_query_processing/multi_query/factory.py index c7703513e..ee005dc68 100644 --- a/lib/dl_query_processing/dl_query_processing/multi_query/factory.py +++ b/lib/dl_query_processing/dl_query_processing/multi_query/factory.py @@ -34,7 +34,7 @@ def get_mutators(self) -> list[MultiQueryMutatorBase]: @attr.s -class DefaultNativeWFMultiQueryMutatorFactory(MultiQueryMutatorFactoryBase): +class NoCompengMultiQueryMutatorFactory(MultiQueryMutatorFactoryBase): def get_mutators(self) -> list[MultiQueryMutatorBase]: return [ SplitterMultiQueryMutator( diff --git a/lib/dl_query_processing/dl_query_processing/postprocessing/postprocessors/all.py b/lib/dl_query_processing/dl_query_processing/postprocessing/postprocessors/all.py index 0532f298a..dd9238dc6 100644 --- a/lib/dl_query_processing/dl_query_processing/postprocessing/postprocessors/all.py +++ b/lib/dl_query_processing/dl_query_processing/postprocessing/postprocessors/all.py @@ -10,7 +10,7 @@ Sequence, ) -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_formula.core.datatype import DataType from dl_query_processing.postprocessing.postprocessors.datetime import ( make_postprocess_datetimetz, @@ -42,16 +42,16 @@ def postprocess_array(value: Optional[Iterable[Any]]) -> Optional[Iterable[Optio TYPE_PROCESSORS = { - BIType.datetime: postprocess_datetime, - # parametrized: BIType.datetimetz - BIType.genericdatetime: postprocess_genericdatetime, - BIType.geopoint: postprocess_geopoint, - BIType.geopolygon: postprocess_geopolygon, - BIType.markup: postprocess_markup, - BIType.array_int: postprocess_array, - BIType.array_float: postprocess_array, - BIType.array_str: postprocess_array, - BIType.tree_str: postprocess_array, + UserDataType.datetime: postprocess_datetime, + # parametrized: UserDataType.datetimetz + UserDataType.genericdatetime: postprocess_genericdatetime, + UserDataType.geopoint: postprocess_geopoint, + UserDataType.geopolygon: postprocess_geopolygon, + UserDataType.markup: postprocess_markup, + UserDataType.array_int: postprocess_array, + UserDataType.array_float: postprocess_array, + UserDataType.array_str: postprocess_array, + UserDataType.tree_str: postprocess_array, } @@ -65,7 +65,7 @@ def get_type_processor(field_type_info: Optional[DetailedType]) -> Callable[[Any return result # type: ignore # TODO: fix # Parmetrized - if field_type_info.data_type == BIType.datetimetz: + if field_type_info.data_type == UserDataType.datetimetz: assert field_type_info.formula_data_type == DataType.DATETIMETZ assert field_type_info.formula_data_type_params assert field_type_info.formula_data_type_params.timezone diff --git a/lib/dl_query_processing/dl_query_processing/translation/primitives.py b/lib/dl_query_processing/dl_query_processing/translation/primitives.py index 712f4187c..e203734ba 100644 --- a/lib/dl_query_processing/dl_query_processing/translation/primitives.py +++ b/lib/dl_query_processing/dl_query_processing/translation/primitives.py @@ -15,7 +15,7 @@ import attr -from dl_constants.enums import BIType +from dl_constants.enums import UserDataType from dl_core.components.ids import ( AvatarId, FieldId, @@ -38,7 +38,7 @@ class DetailedType(NamedTuple): field_id: str - data_type: BIType + data_type: UserDataType # TODO: native_type: Optional[GenericNativeType] = None formula_data_type: Optional[DataType] = None formula_data_type_params: Optional[DataTypeParams] = None diff --git a/lib/dl_query_processing/dl_query_processing/utils/datetime.py b/lib/dl_query_processing/dl_query_processing/utils/datetime.py index 66fabd7b0..80312d93a 100644 --- a/lib/dl_query_processing/dl_query_processing/utils/datetime.py +++ b/lib/dl_query_processing/dl_query_processing/utils/datetime.py @@ -9,7 +9,7 @@ def parse_datetime(value: str) -> datetime.datetime: """ Parse an ISO8601 datetime value, e.g. from API parameters. - See also: `bi_formula.utils.datetime.parse_dt_string` + See also: `dl_formula.utils.datetime.parse_dt_string` >>> parse_datetime('2020-01-02T03:04:05') datetime.datetime(2020, 1, 2, 3, 4, 5) diff --git a/lib/dl_sqlalchemy_bitrix/LICENSE b/lib/dl_sqlalchemy_bitrix/LICENSE new file mode 100644 index 000000000..74ba5f6c7 --- /dev/null +++ b/lib/dl_sqlalchemy_bitrix/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 YANDEX LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/dl_sqlalchemy_bitrix/README.md b/lib/dl_sqlalchemy_bitrix/README.md new file mode 100644 index 000000000..816070f03 --- /dev/null +++ b/lib/dl_sqlalchemy_bitrix/README.md @@ -0,0 +1 @@ +# dl_sqlalchemy_bitrix diff --git a/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix/__init__.py b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix/base.py b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix/base.py new file mode 100644 index 000000000..f5f496a8b --- /dev/null +++ b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix/base.py @@ -0,0 +1,201 @@ +import datetime +from typing import Any + +import sqlalchemy as sa +from sqlalchemy.engine import default + + +class BitrixCompiler(sa.sql.compiler.SQLCompiler): + def visit_label( + self, + label, + add_to_result_map=None, + within_label_clause=False, + within_columns_clause=False, + render_label_as_label=None, + **kw, + ): + # Labels are not supported at all + return label.element._compiler_dispatch( + self, + within_columns_clause=True, + within_label_clause=False, + **kw, + ) + + def render_literal_value(self, value, type_): + if isinstance(value, (datetime.datetime, datetime.date)): + return value.isoformat() + return super().render_literal_value(value, type_) + + +class BitrixIdentifierPreparer(sa.sql.compiler.IdentifierPreparer): + def __init__(self, *args, **kwargs): + quote = "`" + kwargs = { + **kwargs, + "initial_quote": quote, + "escape_quote": quote, + } + super().__init__(*args, **kwargs) + + +class BitrixDialect(default.DefaultDialect): + name = "bitrix" + poolclass = sa.pool.NullPool + + statement_compiler = BitrixCompiler + ddl_compiler = sa.sql.compiler.DDLCompiler + type_compiler = sa.sql.compiler.GenericTypeCompiler + preparer = BitrixIdentifierPreparer + + supports_alter = False + supports_comments = False + inline_comments = False + + supports_views = False + supports_sequences = False + sequences_optional = False + preexecute_autoincrement_sequences = False + postfetch_lastrowid = True + implicit_returning = False + + supports_right_nested_joins = False + cte_follows_insert = False + + supports_native_enum = False + supports_native_boolean = False # Uncertain + non_native_boolean_check_constraint = False + + supports_simple_order_by_label = False + + tuple_in_values = False + + supports_native_decimal = False + + supports_unicode_statements = True + supports_unicode_binds = True + returns_unicode_strings = True + description_encoding = None + + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + + supports_default_values = False + supports_empty_insert = False + supports_multivalues_insert = False + + supports_is_distinct_from = True # Uncertain + + supports_server_side_cursors = False + + supports_for_update_of = False + + # Clear out: + ischema_names: dict[str, Any] = {} + + @classmethod + def dbapi(cls): + return None # Not Applicable... if possible. + + def _check_unicode_returns(self, connection, additional_tests=None): + return True + + def _check_unicode_description(self, connection): + return True + + def do_rollback(self, dbapi_connection): + pass + + def get_columns(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + return [] + + def get_indexes(self, connection, table_name, schema=None, **kw): + return [] + + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + return [] + + def _get_default_schema_name(self, *args, **kwargs): + raise Exception("Not Implemented") + + def _get_server_version_info(self, *args, **kwargs): + raise Exception("Not Implemented") + + def denormalize_name(self, *args, **kwargs): + raise Exception("Not Implemented") + + def do_begin_twophase(self, *args, **kwargs): + raise Exception("Not Implemented") + + def do_commit_twophase(self, *args, **kwargs): + raise Exception("Not Implemented") + + def do_prepare_twophase(self, *args, **kwargs): + raise Exception("Not Implemented") + + def do_recover_twophase(self, *args, **kwargs): + raise Exception("Not Implemented") + + def do_rollback_twophase(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_check_constraints(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_isolation_level(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_primary_keys(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_table_comment(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_table_names(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_temp_table_names(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_temp_view_names(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_unique_constraints(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_view_definition(self, *args, **kwargs): + raise Exception("Not Implemented") + + def get_view_names(self, *args, **kwargs): + raise Exception("Not Implemented") + + def has_sequence(self, *args, **kwargs): + raise Exception("Not Implemented") + + def has_table(self, *args, **kwargs): + raise Exception("Not Implemented") + + def normalize_name(self, *args, **kwargs): + raise Exception("Not Implemented") + + def set_isolation_level(self, *args, **kwargs): + raise Exception("Not Implemented") + + +dialect = BitrixDialect + + +def register_dialect( + name="bi_bitrix", + module="dl_sqlalchemy_bitrix.base", + cls="BitrixDialect", +): + """ + Make sure the dialect is registered + (normally should happen automagically because of the `entry_point`) + """ + return sa.dialects.registry.register(name, module, cls) diff --git a/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/__init__.py b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/conftest.py b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/conftest.py new file mode 100644 index 000000000..7227430a0 --- /dev/null +++ b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/conftest.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +import pytest +import sqlalchemy as sa + +from dl_sqlalchemy_bitrix.base import register_dialect + + +def pytest_configure(config): # noqa + register_dialect() + + +@pytest.fixture(scope="session") +def engine_url(): + return "bi_bitrix://" + + +@pytest.fixture(scope="session") +def sa_engine(engine_url): + return sa.create_engine(engine_url) diff --git a/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/unit/__init__.py b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/unit/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/unit/test_dialect.py b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/unit/test_dialect.py new file mode 100644 index 000000000..5060015c9 --- /dev/null +++ b/lib/dl_sqlalchemy_bitrix/dl_sqlalchemy_bitrix_tests/unit/test_dialect.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +import sqlalchemy as sa + +from dl_sqlalchemy_bitrix.base import BitrixCompiler + + +def test_engine(sa_engine): + assert sa_engine + assert sa_engine.dialect + + +def test_select_compiler(sa_engine): + query = sa.select([sa.column("COLUMN_ONE"), sa.column("COLUMN_TWO")]).where( + sa.column("DATE_CREATE") == "2000-01-01", + ) + query_compiled = str(BitrixCompiler(sa_engine.dialect, query)) + + assert not query_compiled.startswith("SELECT *") diff --git a/lib/dl_sqlalchemy_bitrix/pyproject.toml b/lib/dl_sqlalchemy_bitrix/pyproject.toml new file mode 100644 index 000000000..a049db184 --- /dev/null +++ b/lib/dl_sqlalchemy_bitrix/pyproject.toml @@ -0,0 +1,38 @@ +[tool.poetry] +name = "datalens-sqlalchemy-bitrix" +version = "0.0.1" +description = "DataLens Bitrix SQLAlchemy Dialect" +authors = ["DataLens Team "] +packages = [{include = "dl_sqlalchemy_bitrix"}] +license = "Apache 2.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = ">=3.10, <3.12" +sqlalchemy = ">=1.4.46, <2.0" + +[tool.poetry.plugins."sqlalchemy.dialects"] +bi_bitrix = "dl_sqlalchemy_bitrix.base:BitrixDialect" + +[tool.poetry.group.tests.dependencies] +[build-system] +build-backend = "poetry.core.masonry.api" +requires = [ + "poetry-core", +] + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = ["dl_sqlalchemy_bitrix_tests/unit"] + +[datalens.pytest.unit] +root_dir = "dl_sqlalchemy_bitrix_tests/" +target_path = "unit" +skip_compose = "true" + +[tool.mypy] +warn_unused_configs = true +disallow_untyped_defs = true +check_untyped_defs = true +strict_optional = true diff --git a/lib/dl_sqlalchemy_metrica_api/LICENSE b/lib/dl_sqlalchemy_metrica_api/LICENSE new file mode 100644 index 000000000..74ba5f6c7 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 YANDEX LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/dl_sqlalchemy_metrica_api/README.md b/lib/dl_sqlalchemy_metrica_api/README.md new file mode 100644 index 000000000..360e05563 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/README.md @@ -0,0 +1,5 @@ +Yandex Metrika API dialect for SQLAlchemy. + +https://metrika.yandex.ru/ + +https://tech.yandex.ru/metrika/doc/api2/api_v1/intro-docpage/ diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/__init__.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/__init__.py new file mode 100644 index 000000000..007cc95c5 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/__init__.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from dl_sqlalchemy_metrica_api import base + + +base.dialect = dialect = base.MetrikaApiDialect + + +__all__ = ("dialect",) diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_client.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_client.py new file mode 100644 index 000000000..9d9a6e19a --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_client.py @@ -0,0 +1,237 @@ +from __future__ import annotations + +import datetime +from json.decoder import JSONDecodeError +import logging +from typing import ( + List, + Optional, +) + +import requests +from requests.exceptions import RequestException + +import dl_sqlalchemy_metrica_api as package +from dl_sqlalchemy_metrica_api.exceptions import ( # noqa + ConnectionClosedException, + CursorClosedException, + DatabaseError, + DataError, + Error, + IntegrityError, + InterfaceError, + InternalError, + MetrikaApiAccessDeniedException, + MetrikaApiException, + MetrikaApiObjectNotFoundException, + MetrikaHttpApiException, + NotSupportedError, + OperationalError, + ProgrammingError, + Warning, +) + + +LOGGER = logging.getLogger(__name__) + + +METRIKA_API_HOST = "https://api-metrika.yandex.net" +APPMETRICA_API_HOST = "https://api.appmetrica.yandex.ru" + + +def _get_retriable_requests_session(): + session = requests.Session() + for schema in ["http://", "https://"]: + session.mount( + schema, + # noinspection PyUnresolvedReferences + requests.adapters.HTTPAdapter( + max_retries=requests.packages.urllib3.util.Retry( + total=5, + backoff_factor=0.5, + status_forcelist=[500, 501, 502, 504, 521], + redirect=10, + method_whitelist=frozenset(["HEAD", "TRACE", "GET", "PUT", "OPTIONS", "DELETE", "POST"]), + ), + ), + ) + # TODO: allow to customize UA + ua = "{}, {}".format(requests.utils.default_user_agent(), package.__name__) + session.headers.update({"User-Agent": ua}) + return session + + +def _parse_metrika_error(response): + msg = "Unknown error" + try: + resp_data = response.json() + msg = resp_data["message"] + except Exception: + LOGGER.exception("Unable to fetch error message.") + return msg + + +class MetrikaApiClient(object): + """ + Simple HTTP client for Metrika API + https://tech.yandex.ru/metrika/doc/api2/api_v1/intro-docpage/ + """ + + host = METRIKA_API_HOST + default_timeout = 60 + + def __init__(self, oauth_token: str, host: Optional[str] = None, default_timeout=-1, **kwargs): + if host is not None: + self.host = host + if default_timeout != -1: + self.default_timeout = default_timeout + self.oauth_token = oauth_token + self._session = _get_retriable_requests_session() + self._session.headers.update({"Authorization": "OAuth {}".format(oauth_token)}) + + @property + def _is_appmetrica(self): + return self.host == APPMETRICA_API_HOST + + def _request(self, method: str, uri: str, timeout: int = -1, _raw_resp: bool = False, **kwargs): + if timeout == -1: + timeout = self.default_timeout + full_url = "/".join(map(lambda s: s.strip("/"), (self.host, uri))) + + LOGGER.info( + "Requesting Metrika API: method: %s, url: %s, params:(%s), json:(%s)", + method, + full_url, + kwargs.get("params", {}), + kwargs.get("json", {}), + ) + + response = None + try: + response = self._session.request( + method, + full_url, + timeout=timeout, + allow_redirects=False, + **kwargs, + ) + LOGGER.info( + "Got %s from Metrika API (%s %s), content length: %s", + response.status_code, + method, + uri, + response.headers.get("Content-Length"), + ) + if response.status_code >= 400: + LOGGER.error( + "Metrika API error on %s %s (%s): %s", + method, + uri, + kwargs.get("json", {}), + response.text, + ) + # TODO: wrap 429 and maybe retry + if _raw_resp: + return response + + response.raise_for_status() + except RequestException as ex: + msg = _parse_metrika_error(response) + if response.status_code == 403: + raise MetrikaApiAccessDeniedException(msg, orig_exc=ex) from ex + elif response.status_code == 404: + raise MetrikaApiObjectNotFoundException(msg, orig_exc=ex) from ex + else: + raise MetrikaHttpApiException(msg, orig_exc=ex) from ex + + try: + parsed_resp = response.json() + except JSONDecodeError as ex: + raise MetrikaHttpApiException("Unable to parse response.", orig_exc=ex) + return parsed_resp + + def get(self, uri, **kwargs): + return self._request("GET", uri, **kwargs) + + def post(self, uri, **kwargs): + return self._request("POST", uri, **kwargs) + + def _parse_data_resp(self, resp, result_columns=None, req_metrics=None): + """ + https://tech.yandex.ru/metrika/doc/api2/api_v1/data-docpage/ + """ + LOGGER.info( + "Received data response: total_rows: %s, sample_share: %s", resp["total_rows"], resp["sample_share"] + ) + + rows = [] + try: + q_metrics = resp["query"]["metrics"] + q_dims = resp["query"]["dimensions"] + if req_metrics is not None and len(req_metrics) == 1: + req_metrics = req_metrics[0].split(",") + if len(q_metrics) != len(req_metrics): + raise MetrikaApiException("Unexpected response metrics count.") + if q_metrics != req_metrics: + LOGGER.info( + "Response query metrics not matching requested metrics: %s, %s.", + q_metrics, + req_metrics, + ) + q_metrics = req_metrics + + rc_dict = {col["name"]: col for col in result_columns} + dims_src_keys = [rc_dict.get(dim, {}).get("src_key") or "name" for dim in q_dims] + + if not result_columns: + result_columns = [dict(name=col_name, label=None) for col_name in (*q_dims, *q_metrics)] + + for slice in resp["data"]: + # TODO: date dimensions values better retrieve from 'id' key instead of 'name'? + row_map = {q_dims[i]: dim_item[dims_src_keys[i]] for i, dim_item in enumerate(slice["dimensions"])} + row_map.update({q_metrics[i]: metr_val for i, metr_val in enumerate(slice["metrics"])}) + rows.append( + tuple( + col["cast_processor"](row_map[col["name"]]) if "cast_processor" in col else row_map[col["name"]] + for col in result_columns + ) + ) + except (KeyError, ValueError) as ex: + raise MetrikaApiException(orig_exc=ex) + + return dict( + fields=result_columns, + data=rows, + ) + + def get_table_data(self, params, result_columns=None, **kwargs): + resp = self.get("/stat/v1/data", params=params, **kwargs) + return self._parse_data_resp( + resp, + result_columns=result_columns, + req_metrics=params.get("metrics"), + ) + + def get_available_counters(self, **kwargs) -> List[dict]: + obj_name = "applications" if self._is_appmetrica else "counters" + uri = "/management/v1/{}".format(obj_name) + resp = self.get(uri, **kwargs) + return [dict(id=c_info["id"], name=c_info["name"]) for c_info in resp[obj_name]] + + def get_counter_info(self, counter_id): + """ + https://tech.yandex.ru/metrika/doc/api2/management/counters/counter-docpage/ + """ + obj_name = "application" if self._is_appmetrica else "counter" + uri = "/management/v1/{obj_name}/{counter_id}".format(obj_name=obj_name, counter_id=counter_id) + resp = self.get(uri) + return resp[obj_name] + + def get_counter_creation_date(self, counter_id) -> datetime.date: + counter_info = self.get_counter_info(counter_id) + try: + date_str = counter_info.get("create_time", counter_info.get("create_date")).split("T")[0] + creation_date = datetime.datetime.strptime(date_str, "%Y-%m-%d").date() + except (ValueError, KeyError) as ex: + raise MetrikaApiException(orig_exc=ex) + return creation_date diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/__init__.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/__init__.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/__init__.py new file mode 100644 index 000000000..8326d3acf --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/__init__.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +from enum import Enum + +from dl_sqlalchemy_metrica_api.api_info.appmetrica.audience import audience_fields +from dl_sqlalchemy_metrica_api.api_info.appmetrica.audience_socdem import audience_socdem_fields +from dl_sqlalchemy_metrica_api.api_info.appmetrica.client_events import client_events_fields +from dl_sqlalchemy_metrica_api.api_info.appmetrica.crash_events import crash_events_fields +from dl_sqlalchemy_metrica_api.api_info.appmetrica.installs import installs_fields +from dl_sqlalchemy_metrica_api.api_info.appmetrica.push_events import push_events_fields + + +class AppMetricaFieldsNamespaces(Enum): + installs = "installs" + push_events = "push_events" + client_events = "client_events" + audience = "audience" + audience_socdem = "audience_socdem" + crash_events = "crash_events" + + +metrica_fields_namespaces = AppMetricaFieldsNamespaces + + +fields_by_namespace = { + AppMetricaFieldsNamespaces.installs: installs_fields, + AppMetricaFieldsNamespaces.push_events: push_events_fields, + AppMetricaFieldsNamespaces.client_events: client_events_fields, + AppMetricaFieldsNamespaces.audience: audience_fields, + AppMetricaFieldsNamespaces.audience_socdem: audience_socdem_fields, + AppMetricaFieldsNamespaces.crash_events: crash_events_fields, +} + + +fields_by_name = {f["name"]: f for f in sum(fields_by_namespace.values(), [])} + + +metrics_by_namespace = { + cs: list(filter(lambda f: not f["is_dim"], fields)) for cs, fields in fields_by_namespace.items() +} + + +namespace_by_prefix = { + "ym:ts": AppMetricaFieldsNamespaces.installs, + "ym:pc": AppMetricaFieldsNamespaces.push_events, + "ym:ce": AppMetricaFieldsNamespaces.client_events, + "ym:u": AppMetricaFieldsNamespaces.audience, + "ym:cr2": AppMetricaFieldsNamespaces.crash_events, +} + + +def get_namespace_by_name(name): + name_parts = name.split(":", 2) + if len(name_parts) != 3: + raise ValueError('Unexpected name format: "%s".' % name) + prefix = ":".join(name_parts[:2]) + if prefix not in namespace_by_prefix: + raise ValueError('Unknown field prefix: "%s"' % prefix) + return namespace_by_prefix[prefix] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/audience.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/audience.py new file mode 100644 index 000000000..0bc5ba27f --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/audience.py @@ -0,0 +1,324 @@ +audience_fields = [ + { + "description": "", + "is_dim": False, + "name": "ym:u:sessions", + "title": "Количество сессий", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:u:newUsers", + "title": "Новые пользователи", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:u:activeUsers", + "title": "Пользователи", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:isRooted", + "title": "Root-статус", + "type": "string", + }, + { + "description": "Идентификатор приложения", + "is_dim": True, + "name": "ym:u:apiKey", + "title": "ID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:UUID", + "title": "UUID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:clientKitVersion", + "title": "Версия SDK клиента", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:operatingSystemVersionInfo", + "title": "Версия платформы", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:appVersion", + "title": "Версия приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:year", + "title": "Год", + "type": "integer", + }, + { + "description": "Название города, к которому принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:u:regionCityName", + "title": "Город", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:date", + "title": "Дата", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.", + "is_dim": True, + "name": "ym:u:startOfYear", + "title": "Дата (год)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:u:startOfDekaminute", + "title": "Дата (декаминута)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:startOfQuarter", + "title": "Дата (квартал)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.", + "is_dim": True, + "name": "ym:u:startOfMonth", + "title": "Дата (месяц)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + "name": "ym:u:startOfMinute", + "title": "Дата (минута)", + "type": "datetime", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:u:startOfHour", + "title": "Дата (час)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:dekaminute", + "title": "Декаминута", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:dayOfMonth", + "title": "День месяца", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:dayOfWeekName", + "title": "День недели", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:appID", + "title": "Идентификатор приложения (в магазине приложений)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:googleAID", + "title": "Идентификатор рекламы Google", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:iosIFA", + "title": "Идентификатор рекламы на iOS", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:deviceID", + "title": "Идентификатор, полученный средствами системного API или выдаваемый Яндексом", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:osMajorVersionInfoName", + "title": "Мажорная версия ОС", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:clientKitVersionDetails", + "title": "Информация о версии SDK", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:month", + "title": "Месяц", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:minute", + "title": "Минута", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:mobileDeviceModel", + "title": "Модель", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:mpcName", + "title": "Название сотового оператора на основе кода сотового оператора (MPC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:mccName", + "title": "Название страны на основе мобильного кода страны оператора (MCC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:buildNumber", + "title": "Номер сборки приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:regionAreaName", + "title": "Область", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:operatingSystemInfoName", + "title": "Операционная система", + "type": "string", + }, + { + "description": 'Возможные значения: "yes", "no", "undefined".', + "is_dim": True, + "name": "ym:u:limitAdTracking", + "title": "Признак ограничения рекламного трекинга", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:mobileDeviceBranding", + "title": "Производитель", + "type": "string", + }, + { + "description": 'Подробная информация о разрешении экрана. Например, "2560x1440 px (xxxhdpi)".', + "is_dim": True, + "name": "ym:u:screenResolutionDetailed", + "title": "Разрешение (подробно)", + "type": "string", + }, + { + "description": "Название страны, к которой принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:u:regionCountryName", + "title": "Страна", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:connectionType", + "title": "Тип подключения: Cellular или Wi-Fi", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:networkType", + "title": "Тип сети: 3G, EDGE и т.д.", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:deviceTypeName", + "title": "Тип устройства", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:device", + "title": "Устройство", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:hour", + "title": "Час", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:hourMinute", + "title": "Час и минута", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:locale", + "title": "Язык интерфейса", + "type": "string", + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/audience_socdem.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/audience_socdem.py new file mode 100644 index 000000000..47c592364 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/audience_socdem.py @@ -0,0 +1,189 @@ +audience_socdem_fields = [ + {"description": "", "is_dim": False, "name": "ym:u:sessions", "title": "Количество сессий", "type": "integer"}, + {"description": "", "is_dim": False, "name": "ym:u:newUsers", "title": "Новые пользователи", "type": "integer"}, + {"description": "", "is_dim": False, "name": "ym:u:activeUsers", "title": "Пользователи", "type": "integer"}, + { + "description": 'Пол посетителя. Возможные значения: "мужской" или "женский".', + "is_dim": True, + "name": "ym:u:genderName", + "title": "Пол", + "type": "string", + }, + { + "description": "Возраст посетителя, интервал", + "is_dim": True, + "name": "ym:u:ageIntervalName", + "title": "Возраст", + "type": "string", + }, + {"description": None, "is_dim": True, "name": "ym:u:isRooted", "title": "Root-статус", "type": "string"}, + { + "description": "Идентификатор приложения", + "is_dim": True, + "name": "ym:u:apiKey", + "title": "ID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:clientKitVersion", + "title": "Версия SDK клиента", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:operatingSystemVersionInfo", + "title": "Версия платформы", + "type": "string", + }, + {"description": None, "is_dim": True, "name": "ym:u:appVersion", "title": "Версия приложения", "type": "string"}, + {"description": None, "is_dim": True, "name": "ym:u:year", "title": "Год", "type": "integer"}, + { + "description": "Название города, к которому принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:u:regionCityName", + "title": "Город", + "type": "string", + }, + {"description": None, "is_dim": True, "name": "ym:u:date", "title": "Дата", "type": "date"}, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.", + "is_dim": True, + "name": "ym:u:startOfYear", + "title": "Дата (год)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:u:startOfDekaminute", + "title": "Дата (декаминута)", + "type": "datetime", + }, + {"description": None, "is_dim": True, "name": "ym:u:startOfQuarter", "title": "Дата (квартал)", "type": "date"}, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.", + "is_dim": True, + "name": "ym:u:startOfMonth", + "title": "Дата (месяц)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + "name": "ym:u:startOfMinute", + "title": "Дата (минута)", + "type": "datetime", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:u:startOfHour", + "title": "Дата (час)", + "type": "datetime", + }, + {"description": None, "is_dim": True, "name": "ym:u:dekaminute", "title": "Декаминута", "type": "string"}, + {"description": None, "is_dim": True, "name": "ym:u:dayOfMonth", "title": "День месяца", "type": "integer"}, + {"description": None, "is_dim": True, "name": "ym:u:dayOfWeekName", "title": "День недели", "type": "string"}, + { + "description": None, + "is_dim": True, + "name": "ym:u:appID", + "title": "Идентификатор приложения (в магазине приложений)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:osMajorVersionInfoName", + "title": "Мажорная версия ОС", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:clientKitVersionDetails", + "title": "Информация о версии SDK", + "type": "string", + }, + {"description": None, "is_dim": True, "name": "ym:u:month", "title": "Месяц", "type": "integer"}, + {"description": None, "is_dim": True, "name": "ym:u:minute", "title": "Минута", "type": "integer"}, + {"description": None, "is_dim": True, "name": "ym:u:mobileDeviceModel", "title": "Модель", "type": "string"}, + { + "description": None, + "is_dim": True, + "name": "ym:u:mpcName", + "title": "Название сотового оператора на основе кода сотового оператора (MPC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:mccName", + "title": "Название страны на основе мобильного кода страны оператора (MCC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:buildNumber", + "title": "Номер сборки приложения", + "type": "string", + }, + {"description": None, "is_dim": True, "name": "ym:u:regionAreaName", "title": "Область", "type": "string"}, + { + "description": None, + "is_dim": True, + "name": "ym:u:operatingSystemInfoName", + "title": "Операционная система", + "type": "string", + }, + { + "description": 'Возможные значения: "yes", "no", "undefined".', + "is_dim": True, + "name": "ym:u:limitAdTracking", + "title": "Признак ограничения рекламного трекинга", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:mobileDeviceBranding", + "title": "Производитель", + "type": "string", + }, + { + "description": 'Подробная информация о разрешении экрана. Например, "2560x1440 px (xxxhdpi)".', + "is_dim": True, + "name": "ym:u:screenResolutionDetailed", + "title": "Разрешение (подробно)", + "type": "string", + }, + { + "description": "Название страны, к которой принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:u:regionCountryName", + "title": "Страна", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:connectionType", + "title": "Тип подключения: Cellular или Wi-Fi", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:u:networkType", + "title": "Тип сети: 3G, EDGE и т.д.", + "type": "string", + }, + {"description": None, "is_dim": True, "name": "ym:u:deviceTypeName", "title": "Тип устройства", "type": "string"}, + {"description": None, "is_dim": True, "name": "ym:u:hour", "title": "Час", "type": "string"}, + {"description": None, "is_dim": True, "name": "ym:u:hourMinute", "title": "Час и минута", "type": "string"}, + {"description": None, "is_dim": True, "name": "ym:u:locale", "title": "Язык интерфейса", "type": "string"}, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/client_events.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/client_events.py new file mode 100644 index 000000000..a1a322d89 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/client_events.py @@ -0,0 +1,380 @@ +client_events_fields = [ + { + "description": "", + "is_dim": False, + "name": "ym:ce:users", + "title": "Количество пользователей", + "type": "integer", + }, + { + "description": "Количество клиентских событий", + "is_dim": False, + "name": "ym:ce:clientEvents", + "title": "Количество событий", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:ce:devices", + "title": "Количество устройств", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:isRooted", + "title": "Root-статус", + "type": "string", + }, + { + "description": "Идентификатор приложения", + "is_dim": True, + "name": "ym:ce:apiKey", + "title": "ID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:UUID", + "title": "UUID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:clientKitVersion", + "title": "Версия SDK клиента", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:operatingSystemVersionInfo", + "title": "Версия платформы", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:appVersion", + "title": "Версия приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:year", + "title": "Год", + "type": "integer", + }, + { + "description": "Название города, к которому принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:ce:regionCityName", + "title": "Город", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:date", + "title": "Дата", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.", + "is_dim": True, + "name": "ym:ce:startOfYear", + "title": "Дата (год)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:ce:startOfDekaminute", + "title": "Дата (декаминута)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:startOfQuarter", + "title": "Дата (квартал)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.", + "is_dim": True, + "name": "ym:ce:startOfMonth", + "title": "Дата (месяц)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + "name": "ym:ce:startOfMinute", + "title": "Дата (минута)", + "type": "datetime", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:ce:startOfHour", + "title": "Дата (час)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:dekaminute", + "title": "Декаминута", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:dayOfMonth", + "title": "День месяца", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:dayOfWeekName", + "title": "День недели", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:appID", + "title": "Идентификатор приложения (в магазине приложений)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:googleAID", + "title": "Идентификатор рекламы Google", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:iosIFA", + "title": "Идентификатор рекламы на iOS", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:deviceID", + "title": "Идентификатор, полученный средствами системного API или выдаваемый Яндексом", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:osMajorVersionInfoName", + "title": "Мажорная версия ОС", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:clientKitVersionDetails", + "title": "Информация о версии SDK", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:month", + "title": "Месяц", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:minute", + "title": "Минута", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:mobileDeviceModel", + "title": "Модель", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:eventLabel", + "title": "Название события", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:mpcName", + "title": "Название сотового оператора на основе кода сотового оператора (MPC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:mccName", + "title": "Название страны на основе мобильного кода страны оператора (MCC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:buildNumber", + "title": "Номер сборки приложения", + "type": "string", + }, + { + "description": "Области, к которым принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:ce:regionAreaName", + "title": "Область", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:operatingSystemInfoName", + "title": "Операционная система", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:paramsValueNormalized", + "title": "Параметры событий", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:paramsLevel1", + "title": "Параметры событий: уровень 1", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:paramsLevel2", + "title": "Параметры событий: уровень 2", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:paramsLevel3", + "title": "Параметры событий: уровень 3", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:paramsLevel4", + "title": "Параметры событий: уровень 4", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:paramsLevel5", + "title": "Параметры событий: уровень 5", + "type": "string", + }, + { + "description": 'Возможные значения: "yes", "no", "undefined".', + "is_dim": True, + "name": "ym:ce:limitAdTracking", + "title": "Признак ограничения рекламного трекинга", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:mobileDeviceBranding", + "title": "Производитель", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:profileOrigin", + "title": "Профиль", + "type": "string", + }, + { + "description": 'Подробная информация о разрешении экрана. Например, "2560x1440 px (xxxhdpi)".', + "is_dim": True, + "name": "ym:ce:screenResolutionDetailed", + "title": "Разрешение (подробно)", + "type": "string", + }, + { + "description": "Название страны, к которой принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:ce:regionCountryName", + "title": "Страна", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:connectionType", + "title": "Тип подключения: Cellular или Wi-Fi", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:networkType", + "title": "Тип сети: 3G, EDGE и т.д.", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:deviceTypeName", + "title": "Тип устройства", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:device", + "title": "Устройство", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:hour", + "title": "Час", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:hourMinute", + "title": "Час и минута", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ce:locale", + "title": "Язык интерфейса", + "type": "string", + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/crash_events.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/crash_events.py new file mode 100644 index 000000000..d272dcafc --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/crash_events.py @@ -0,0 +1,359 @@ +crash_events_fields = [ + { + "name": "ym:cr2:crashes", + "title": "Количество крэшей", + "type": "integer", + "description": None, + "is_dim": False, + }, + { + "name": "ym:cr2:crashDevices", + "title": "Количество устройств, на которых возникли аварийные остановки приложения", + "type": "integer", + "description": None, + "is_dim": False, + }, + { + "name": "ym:cr2:crashesFreeDevicesPercentage", + "title": "Процент устройств без крэшей", + "type": "float", + "description": None, + "is_dim": False, + }, + { + "name": "ym:cr2:crashesDevicesPercentage", + "title": "Процент устройств с крэшами", + "type": "float", + "description": None, + "is_dim": False, + }, + { + "name": "ym:cr2:isRooted", + "title": "Root-статус", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "description": "Идентификатор приложения", + "is_dim": True, + "name": "ym:cr2:apiKey", + "title": "ID приложения", + "type": "string", + }, + { + "name": "ym:cr2:UUID", + "title": "UUID приложения", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:clientKitVersion", + "title": "Версия SDK клиента", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:appVersion", + "title": "Версия приложения", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:year", + "title": "Год", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:regionCityName", + "title": "Город", + "type": "string", + "description": "Название города, к которому принадлежат посетители сайта.", + "is_dim": True, + }, + { + "name": "ym:cr2:date", + "title": "Дата", + "type": "date", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:startOfYear", + "title": "Дата (год)", + "type": "date", + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.", + "is_dim": True, + }, + { + "name": "ym:cr2:startOfDekaminute", + "title": "Дата (декаминута)", + "type": "datetime", + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + }, + { + "name": "ym:cr2:startOfQuarter", + "title": "Дата (квартал)", + "type": "date", + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала квартала.", + "is_dim": True, + }, + { + "name": "ym:cr2:startOfMonth", + "title": "Дата (месяц)", + "type": "date", + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.", + "is_dim": True, + }, + { + "name": "ym:cr2:startOfMinute", + "title": "Дата (минута)", + "type": "datetime", + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + }, + { + "name": "ym:cr2:startOfHour", + "title": "Дата (час)", + "type": "datetime", + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + }, + { + "name": "ym:cr2:dekaminute", + "title": "Декаминута", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:dayOfMonth", + "title": "День месяца", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:dayOfWeekName", + "title": "День недели", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:crashGroupName", + "title": "Идентификатор группы крэшей", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:appID", + "title": "Идентификатор приложения (в магазине приложений)", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:googleAID", + "title": "Идентификатор рекламы Google", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:iosIFA", + "title": "Идентификатор рекламы на iOS", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:deviceID", + "title": "Идентификатор, полученный средствами системного API или выдаваемый Яндексом", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:crashMethodName", + "title": "Имя метода исходного кода, в котором произошёл крэш", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:crashBinaryName", + "title": "Имя образа, в котором произошёл крэш", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:crashFileName", + "title": "Имя файла исходного кода, в котором произошёл крэш", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:clientKitVersionDetails", + "title": "Информация о версии SDK", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:osMajorVersionInfoName", + "title": "Мажорная версия ОС", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:month", + "title": "Месяц", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:minute", + "title": "Минута", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:mobileDeviceModel", + "title": "Модель", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:mpcName", + "title": "Название сотового оператора на основе кода сотового оператора (MPC)", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:mccName", + "title": "Название страны на основе мобильного кода страны оператора (MCC)", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:buildNumber", + "title": "Номер сборки приложения", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:crashSourceLine", + "title": "Номер строки исходного кода, в котором произошёл крэш", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:regionAreaName", + "title": "Область", + "type": "string", + "description": "Области, в которых находятся посетители сайта.", + "is_dim": True, + }, + { + "name": "ym:cr2:operatingSystemInfoName", + "title": "Операционная система", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:limitAdTracking", + "title": "Признак ограничения рекламного трекинга", + "type": "string", + "description": 'Возможные значения: "yes", "no", "undefined".', + "is_dim": True, + }, + { + "name": "ym:cr2:mobileDeviceBranding", + "title": "Производитель", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:profileOrigin", + "title": "Профиль", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:screenResolutionDetailed", + "title": "Разрешение (подробно)", + "type": "string", + "description": 'Подробная информация о разрешении экрана. Например, "2560x1440 px (xxxhdpi)".', + "is_dim": True, + }, + { + "name": "ym:cr2:regionCountryName", + "title": "Страна", + "type": "string", + "description": "Название страны, к которой принадлежат посетители сайта.", + "is_dim": True, + }, + { + "name": "ym:cr2:connectionType", + "title": "Тип подключения: Cellular или Wi-Fi", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:networkType", + "title": "Тип сети: 3G, EDGE и т.д.", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:deviceTypeName", + "title": "Тип устройства", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:device", + "title": "Устройство", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:hourMinute", + "title": "Час и минута", + "type": "string", + "description": None, + "is_dim": True, + }, + { + "name": "ym:cr2:locale", + "title": "Язык интерфейса", + "type": "string", + "description": None, + "is_dim": True, + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/installs.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/installs.py new file mode 100644 index 000000000..eab7e298a --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/installs.py @@ -0,0 +1,303 @@ +installs_fields = [ + { + "description": None, + "is_dim": False, + "name": "ym:ts:openDevices", + "title": "Deeplinks", + "type": "integer", + }, + { + "description": "Новые установки + реатрибутированные установки", + "is_dim": False, + "name": "ym:ts:advInstallDevices", + "title": "Все установки", + "type": "integer", + }, + { + "description": None, + "is_dim": False, + "name": "ym:ts:userClicks", + "title": "Клики", + "type": "integer", + }, + { + "description": None, + "is_dim": False, + "name": "ym:ts:userConversion", + "title": "Конверсия из кликов в установки", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:ts:advNewInstallDevices", + "title": "Новые установки", + "type": "integer", + }, + { + "description": None, + "is_dim": False, + "name": "ym:ts:advReattributedDevices", + "title": "Реатрибутированные установки", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:campaignID", + "title": "Tracking ID кампании", + "type": "string", + }, + { + "description": "Идентификатор приложения", + "is_dim": True, + "name": "ym:ts:apiKey", + "title": "ID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:UUID", + "title": "UUID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:clientKitVersion", + "title": "Версия SDK клиента", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:operatingSystemVersionInfo", + "title": "Версия платформы", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:appVersion", + "title": "Версия приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:year", + "title": "Год", + "type": "integer", + }, + { + "description": "Название города, к которому принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:ts:regionCityName", + "title": "Город", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:date", + "title": "Дата", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.", + "is_dim": True, + "name": "ym:ts:startOfYear", + "title": "Дата (год)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:ts:startOfDekaminute", + "title": "Дата (декаминута)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:startOfQuarter", + "title": "Дата (квартал)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.", + "is_dim": True, + "name": "ym:ts:startOfMonth", + "title": "Дата (месяц)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + "name": "ym:ts:startOfMinute", + "title": "Дата (минута)", + "type": "datetime", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:ts:startOfHour", + "title": "Дата (час)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:dekaminute", + "title": "Декаминута", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:dayOfMonth", + "title": "День месяца", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:dayOfWeekName", + "title": "День недели", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:urlParamValue", + "title": "Значения параметров трекинговой ссылки", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:googleAID", + "title": "Идентификатор рекламы Google", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:iosIFA", + "title": "Идентификатор рекламы на iOS", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:deviceID", + "title": "Идентификатор, полученный средствами системного API или выдаваемый Яндексом", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:urlParamKey", + "title": "Имена параметров трекинговой ссылки", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:campaignName", + "title": "Кампания", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:osMajorVersionInfoName", + "title": "Мажорная версия ОС", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:month", + "title": "Месяц", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:minute", + "title": "Минута", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:mpcName", + "title": "Название сотового оператора на основе кода сотового оператора (MPC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:mccName", + "title": "Название страны на основе мобильного кода страны оператора (MCC)", + "type": "string", + }, + { + "description": "Области, к которым принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:ts:regionAreaName", + "title": "Область", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:operatingSystemInfoName", + "title": "Операционная система", + "type": "string", + }, + { + "description": 'Возможные значения: "yes", "no", "undefined".', + "is_dim": True, + "name": "ym:ts:limitAdTracking", + "title": "Признак ограничения рекламного трекинга", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:publisherName", + "title": "Рекламная сеть", + "type": "string", + }, + { + "description": "Название страны, к которой принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:ts:regionCountryName", + "title": "Страна", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:device", + "title": "Устройство", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:hour", + "title": "Час", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ts:hourMinute", + "title": "Час и минута", + "type": "string", + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/push_events.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/push_events.py new file mode 100644 index 000000000..c7dd69a6c --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/appmetrica/push_events.py @@ -0,0 +1,366 @@ +push_events_fields = [ + { + "description": "", + "is_dim": False, + "name": "ym:pc:devices", + "title": "Количество устройств", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:pc:receivedDevices", + "title": "Количество устройств, которые получили пуш", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:pc:sentDevices", + "title": "Количество устройств, на которые был отправлен пуш", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:pc:openedDevices", + "title": "Количество устройств, открывших пуш", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:isRooted", + "title": "Root-статус", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:tag", + "title": "Tag push-api кампании", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:transfer", + "title": "Transfer_id push-api кампании", + "type": "string", + }, + { + "description": "Идентификатор приложения", + "is_dim": True, + "name": "ym:pc:apiKey", + "title": "ID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:UUID", + "title": "UUID приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:clientKitVersion", + "title": "Версия SDK клиента", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:operatingSystemVersionInfo", + "title": "Версия ОС", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:appVersion", + "title": "Версия приложения", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:hypothesisName", + "title": "Гипотеза push-кампании", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:year", + "title": "Год", + "type": "integer", + }, + { + "description": "Название города, к которому принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:pc:regionCityName", + "title": "Город", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:date", + "title": "Дата", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.", + "is_dim": True, + "name": "ym:pc:startOfYear", + "title": "Дата (год)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:pc:startOfDekaminute", + "title": "Дата (декаминута)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:startOfQuarter", + "title": "Дата (квартал)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.", + "is_dim": True, + "name": "ym:pc:startOfMonth", + "title": "Дата (месяц)", + "type": "date", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + "name": "ym:pc:startOfMinute", + "title": "Дата (минута)", + "type": "datetime", + }, + { + "description": "Дата и время в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:pc:startOfHour", + "title": "Дата (час)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:dekaminute", + "title": "Декаминута", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:dayOfMonth", + "title": "День месяца", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:dayOfWeekName", + "title": "День недели", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:appID", + "title": "Идентификатор приложения (в магазине приложений)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:googleAID", + "title": "Идентификатор рекламы Google", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:iosIFA", + "title": "Идентификатор рекламы на iOS", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:deviceID", + "title": "Идентификатор, полученный средствами системного API или выдаваемый Яндексом", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:groupName", + "title": "Имя группы push-api кампании", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:clientKitVersionDetails", + "title": "Информация о версии SDK", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:campaignName", + "title": "Кампания", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:osMajorVersionInfoName", + "title": "Мажорная версия ОС", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:month", + "title": "Месяц", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:minute", + "title": "Минута", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:mobileDeviceModel", + "title": "Модель", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:mpcName", + "title": "Название сотового оператора на основе кода сотового оператора (MPC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:mccName", + "title": "Название страны на основе мобильного кода страны оператора (MCC)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:buildNumber", + "title": "Номер сборки приложения", + "type": "string", + }, + { + "description": "Области, к которым принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:pc:regionAreaName", + "title": "Область", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:operatingSystemInfoName", + "title": "Операционная система", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:isFromCampaign", + "title": "Признак для разделения push-api кампаний и интерфейсных кампаний", + "type": "string", + }, + { + "description": 'Возможные значения: "yes", "no", "undefined".', + "is_dim": True, + "name": "ym:pc:limitAdTracking", + "title": "Признак ограничения рекламного трекинга", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:mobileDeviceBranding", + "title": "Производитель", + "type": "string", + }, + { + "description": 'Подробная информация о разрешении экрана. Например, "2560x1440 px (xxxhdpi)".', + "is_dim": True, + "name": "ym:pc:screenResolutionDetailed", + "title": "Разрешение (подробно)", + "type": "string", + }, + { + "description": "Название страны, к которой принадлежат посетители сайта.", + "is_dim": True, + "name": "ym:pc:regionCountryName", + "title": "Страна", + "type": "string", + }, + { + "description": "Возможные значения: Отправлено, Получено, Проигнорировано, Открыто, Контрольная выборка", + "is_dim": True, + "name": "ym:pc:actionTypeName", + "title": "Тип события Push", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:deviceTypeName", + "title": "Тип устройства", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:device", + "title": "Устройство", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:hour", + "title": "Час", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:hourMinute", + "title": "Час и минута", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pc:locale", + "title": "Язык интерфейса", + "type": "string", + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/__init__.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/__init__.py new file mode 100644 index 000000000..cb95b1a02 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/__init__.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from enum import Enum + +from dl_sqlalchemy_metrica_api.api_info.metrika.advertising import advertising_fields +from dl_sqlalchemy_metrica_api.api_info.metrika.hits import hits_fields +from dl_sqlalchemy_metrica_api.api_info.metrika.user_param import user_param_fields +from dl_sqlalchemy_metrica_api.api_info.metrika.visits import visits_fields + + +class MetrikaApiCounterSource(Enum): + hits = "hits" + visits = "visits" + advertising = "advertising" + user_param = "user_param" + + +metrica_fields_namespaces = MetrikaApiCounterSource + + +fields_by_namespace = { + MetrikaApiCounterSource.hits: hits_fields, + MetrikaApiCounterSource.visits: visits_fields, + MetrikaApiCounterSource.advertising: advertising_fields, + MetrikaApiCounterSource.user_param: user_param_fields, +} + + +fields_by_name = {f["name"]: f for f in sum(fields_by_namespace.values(), [])} + + +metrics_by_namespace = { + cs: list(filter(lambda f: not f["is_dim"], fields)) for cs, fields in fields_by_namespace.items() +} + + +namespace_by_prefix = { + "ym:pv": MetrikaApiCounterSource.hits, + "ym:s": MetrikaApiCounterSource.visits, + "ym:ad": MetrikaApiCounterSource.advertising, + "ym:up": MetrikaApiCounterSource.user_param, +} + + +def get_namespace_by_name(name): + name_parts = name.split(":", 2) + if len(name_parts) != 3: + raise ValueError('Unexpected name format: "%s".' % name) + prefix = ":".join(name_parts[:2]) + if prefix not in namespace_by_prefix: + raise ValueError('Unknown field prefix: "%s"' % prefix) + return namespace_by_prefix[prefix] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/advertising.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/advertising.py new file mode 100644 index 000000000..36c676dc4 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/advertising.py @@ -0,0 +1,771 @@ +advertising_fields = [ + { + "description": "Количество посетителей, купивших хотя бы один товар, суммарно по всем визитам.", + "is_dim": False, + "name": "ym:ad:productPurchasedUniq", + "title": "Количество посетителей, купивших товар", + "type": "integer", + }, + { + "description": "Среднее время от предпоследнего визита в днях.", + "is_dim": False, + "name": "ym:ad:userRecencyDays", + "title": "Дней от предыдущего визита", + "type": "float", + }, + { + "description": "Количество новых посетителей.", + "is_dim": False, + "name": "ym:ad:newUsers", + "title": "Новые посетители", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых менее 18 лет.", + "is_dim": False, + "name": "ym:ad:under18AgePercentage", + "title": "Менее 18 лет", + "type": "float", + }, + { + "description": "Доля визитов и хитов, совершенных с мобильных устройств.", + "is_dim": False, + "name": "ym:ad:mobilePercentage", + "title": "Мобильность", + "type": "float", + }, + { + "description": "Средняя продолжительность визита в минутах и секундах.", + "is_dim": False, + "name": "ym:ad:avgVisitDurationSeconds", + "title": "Время на сайте", + "type": "float", + }, + { + "description": "Сумма значений параметров визитов. Вычисляется, если в качестве значений параметров визитов передаётся число.", + "is_dim": False, + "name": "ym:ad:sumParams", + "title": "Сумма параметров визитов", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через более чем 365 дней.", + "is_dim": False, + "name": "ym:ad:overYearUserRecencyPercentage", + "title": "Вернувшиеся: более чем 365 дней", + "type": "float", + }, + { + "description": "Суммарное количество кликов по рекламе.", + "is_dim": False, + "name": "ym:ad:clicks", + "title": "Клики", + "type": "integer", + }, + { + "description": "Доля визитов и хитов с поддержкой Silverlight.", + "is_dim": False, + "name": "ym:ad:silverlightEnabledPercentage", + "title": "Поддержка Silverlight", + "type": "float", + }, + { + "description": "Количество уникальных посетителей.", + "is_dim": False, + "name": "ym:ad:users", + "title": "Посетители", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:ad:ecommercePurchases", + "title": "Количество покупок", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, возраст которых от 25 до 34 лет.", + "is_dim": False, + "name": "ym:ad:upTo34AgePercentage", + "title": "25‑34 лет", + "type": "float", + }, + { + "description": "Доля визитов и хитов с поддержкой JavaScript.", + "is_dim": False, + "name": "ym:ad:jsEnabledPercentage", + "title": "Поддержка JavaScript", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:ad:visitsPerDay", + "title": "Визитов в день", + "type": "float", + }, + { + "description": "Количество посетителей, добавивших в корзину хотя бы один товар суммарно по всем визитам.", + "is_dim": False, + "name": "ym:ad:productBasketsUniq", + "title": "Количество посетителей, добавивших товар в корзину", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, известных меньше одного дня.", + "is_dim": False, + "name": "ym:ad:upToDaySinceFirstVisitPercentage", + "title": "Новизна менее одного дня", + "type": "float", + }, + { + "description": "Число просмотров страниц на сайте за отчетный период.", + "is_dim": False, + "name": "ym:ad:pageviews", + "title": "Просмотры", + "type": "integer", + }, + { + "description": "Доля визитов и хитов с поддержкой Flash.", + "is_dim": False, + "name": "ym:ad:flashEnabledPercentage", + "title": "Поддержка Flash", + "type": "float", + }, + { + "description": 'Доля визитов с меткой "GCLID".', + "is_dim": False, + "name": "ym:ad:GCLIDPercentage", + "title": "Доля GCLID", + "type": "float", + }, + { + "description": "Доля визитов посетителей, известных более 365 дней.", + "is_dim": False, + "name": "ym:ad:overYearSinceFirstVisitPercentage", + "title": "Новизна более 365 дней", + "type": "float", + }, + { + "description": "Доля посетителей, купивших товар на сайте, по отношению к общему числу посетителей.", + "is_dim": False, + "name": "ym:ad:usersPurchasePercentage", + "title": "Доля посетителей, купивших товар", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых от 45 до 55 лет.", + "is_dim": False, + "name": "ym:ad:upTo54AgePercentage", + "title": "45‑55 лет", + "type": "float", + }, + { + "description": "Количество купленных единиц товара суммарно по всем визитам.", + "is_dim": False, + "name": "ym:ad:productPurchasedQuantity", + "title": "Товаров куплено", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:ad:womanPercentage", + "title": "Доля женщин", + "type": "float", + }, + { + "description": "Доля визитов новых посетителей.", + "is_dim": False, + "name": "ym:ad:newUserVisitsPercentage", + "title": "Доля визитов новых посетителей", + "type": "float", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 2‑7 дней.", + "is_dim": False, + "name": "ym:ad:upToWeekUserRecencyPercentage", + "title": "Вернувшиеся: 2‑7 дней", + "type": "float", + }, + { + "description": "Доля визитов посетителей с частотой визитов один раз в 1 день. Учитываются посетители, совершившие больше двух визитов, которые произошли в разные дни.", + "is_dim": False, + "name": "ym:ad:oneDayBetweenVisitsPercentage", + "title": "Периодичность 1 день", + "type": "float", + }, + { + "description": "Доля визитов посетителей, известных от 91 до 365 дней.", + "is_dim": False, + "name": "ym:ad:upToYearSinceFirstVisitPercentage", + "title": "Новизна 91‑365 дней", + "type": "float", + }, + { + "description": "Доля визитов посетителей с 4‑7 визитами.", + "is_dim": False, + "name": "ym:ad:upTo7VisitsPerUserPercentage", + "title": "4‑7 визитов посетителя", + "type": "float", + }, + { + "description": "Процент уникальных посетителей, посетивших сайт в отчетном периоде, активность которых включала их самый первый за всю историю накопления данных визит на сайт.", + "is_dim": False, + "name": "ym:ad:percentNewVisitors", + "title": "Доля новых посетителей", + "type": "float", + }, + { + "description": "Доля посетителей, предположительно являющихся роботами.", + "is_dim": False, + "name": "ym:ad:robotPercentage", + "title": "Роботность", + "type": "float", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 32‑90 дней.", + "is_dim": False, + "name": "ym:ad:upToQuarterUserRecencyPercentage", + "title": "Вернувшиеся: 32‑90 дней", + "type": "float", + }, + { + "description": "Среднее время от первого визита в днях.", + "is_dim": False, + "name": "ym:ad:avgDaysSinceFirstVisit", + "title": "Дней от первого визита", + "type": "float", + }, + { + "description": "Среднее значение параметра визита. Вычисляется, если в качестве значения параметра визита передаётся число.", + "is_dim": False, + "name": "ym:ad:avgParams", + "title": "Среднее параметров визитов", + "type": "float", + }, + { + "description": "Доля визитов посетителей, известных от 1 до 7 дней.", + "is_dim": False, + "name": "ym:ad:upToWeekSinceFirstVisitPercentage", + "title": "Новизна 1‑7 дней", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых от 35 до 45 лет.", + "is_dim": False, + "name": "ym:ad:upTo44AgePercentage", + "title": "35‑45 лет", + "type": "float", + }, + { + "description": "Количество единиц товара, помещённых в корзину, суммарно по всем визитам.", + "is_dim": False, + "name": "ym:ad:productBasketsRemoveQuantity", + "title": "Товаров удалено из корзины", + "type": "integer", + }, + { + "description": "Доля визитов посетителей с одним визитом (первый визит посетителя).", + "is_dim": False, + "name": "ym:ad:oneVisitPerUserPercentage", + "title": "1 визит посетителя", + "type": "float", + }, + { + "description": ( + "Отношение доли посетителей с заданным интересом на этом сайте к " + "среднестатистической доле посетителей с этим же интересом на " + "всех сайтах интернета. Разрешена только при использовании " + 'группировки "Категория интересов".' + ), + "is_dim": False, + "name": "ym:ad:affinityIndexInterests", + "title": "Аффинити‑индекс (старая версия)", + "type": "float", + }, + { + "description": "Доля визитов посетителей с частотой визитов один раз в более чем 31 день. Учитываются посетители, совершившие больше двух визитов, которые произошли в разные дни.", + "is_dim": False, + "name": "ym:ad:overMonthBetweenVisitsPercentage", + "title": "Периодичность более чем 31 день", + "type": "float", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 8‑31 день.", + "is_dim": False, + "name": "ym:ad:upToMonthUserRecencyPercentage", + "title": "Вернувшиеся: 8‑31 день", + "type": "float", + }, + { + "description": "Доля визитов посетителей, совершивших больше 32 визитов.", + "is_dim": False, + "name": "ym:ad:over32VisitsPerUserPercentage", + "title": "Более чем 32 визита посетителя", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых более 55 лет.", + "is_dim": False, + "name": "ym:ad:over54AgePercentage", + "title": "Более 55 лет", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:ad:visitsPerMinute", + "title": "Визитов в минуту", + "type": "float", + }, + { + "description": "Суммарное количество визитов.", + "is_dim": False, + "name": "ym:ad:visits", + "title": "Визиты", + "type": "integer", + }, + { + "description": "", + "is_dim": False, + "name": "ym:ad:manPercentage", + "title": "Доля мужчин", + "type": "float", + }, + { + "description": "Количество просмотров страницы с информацией о товарах суммарно по всем визитам.", + "is_dim": False, + "name": "ym:ad:productImpressions", + "title": "Просмотры товаров", + "type": "integer", + }, + { + "description": "Количество единиц товара, добавленных в корзину, суммарно по всем визитам.", + "is_dim": False, + "name": "ym:ad:productBasketsQuantity", + "title": "Товаров добавлено в корзину", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, известных от 8 до 31 день.", + "is_dim": False, + "name": "ym:ad:upToMonthSinceFirstVisitPercentage", + "title": "Новизна 8‑31 день", + "type": "float", + }, + { + "description": "Среднее количество дней между визитами посетителей.", + "is_dim": False, + "name": "ym:ad:avgDaysBetweenVisits", + "title": "Дней между визитами", + "type": "float", + }, + { + "description": "Доля визитов с блокировщиками рекламы.", + "is_dim": False, + "name": "ym:ad:blockedPercentage", + "title": "Блокировка рекламы", + "type": "float", + }, + { + "description": "Количество посетителей, просмотревших информацию хотя бы об одном товаре.", + "is_dim": False, + "name": "ym:ad:productImpressionsUniq", + "title": "Посетители, просмотревшие товар", + "type": "integer", + }, + { + "description": "Количество страниц, просмотренных посетителем во время визита.", + "is_dim": False, + "name": "ym:ad:pageDepth", + "title": "Глубина просмотра", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых от 18 до 24 лет.", + "is_dim": False, + "name": "ym:ad:upTo24AgePercentage", + "title": "18‑24 лет", + "type": "float", + }, + { + "description": "Доля визитов и хитов с включенными cookies.", + "is_dim": False, + "name": "ym:ad:cookieEnabledPercentage", + "title": "Поддержка сookies", + "type": "float", + }, + { + "description": "Доля визитов и хитов с поддержкой Java.", + "is_dim": False, + "name": "ym:ad:javaEnabledPercentage", + "title": "Поддержка Java", + "type": "float", + }, + { + "description": "Доля визитов посетителей с 8‑31 визитами.", + "is_dim": False, + "name": "ym:ad:upTo31VisitsPerUserPercentage", + "title": "8‑31 визит посетителя", + "type": "float", + }, + { + "description": "Количество раз, когда в визите встретился параметр визита.", + "is_dim": False, + "name": "ym:ad:paramsNumber", + "title": "Количество параметров визитов", + "type": "integer", + }, + { + "description": None, + "is_dim": False, + "name": "ym:ad:visitsPerHour", + "title": "Визитов в час", + "type": "float", + }, + { + "description": "Доля визитов посетителей с частотой визитов один раз в 8‑31 день. Учитываются посетители, совершившие больше двух визитов. При этом число дней визитов больше 1 дня.", + "is_dim": False, + "name": "ym:ad:upToMonthBetweenVisitsPercentage", + "title": "Периодичность 8‑31 день", + "type": "float", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 1 день.", + "is_dim": False, + "name": "ym:ad:upToDayUserRecencyPercentage", + "title": "Вернувшиеся: 1 день", + "type": "float", + }, + { + "description": "Доля визитов, в рамках которых состоялся лишь один просмотр страницы, продолжавшийся менее 15 секунд.", + "is_dim": False, + "name": "ym:ad:bounceRate", + "title": "Отказы", + "type": "float", + }, + { + "description": "Конверсия по любой цели.", + "is_dim": False, + "name": "ym:ad:anyGoalConversionRate", + "title": "Конверсия по любой цели", + "type": "float", + }, + { + "description": "Количество достижений любой цели.", + "is_dim": False, + "name": "ym:ad:sumGoalReachesAny", + "title": "Достижения любой цели", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 91‑365 дней.", + "is_dim": False, + "name": "ym:ad:upToYearUserRecencyPercentage", + "title": "Вернувшиеся: 91‑365 дней", + "type": "float", + }, + { + "description": "Доля визитов посетителей, известных от 32 до 90 дней.", + "is_dim": False, + "name": "ym:ad:upToQuarterSinceFirstVisitPercentage", + "title": "Новизна 32‑90 дней", + "type": "float", + }, + { + "description": "Доля визитов посетителей с 2‑3 визитами.", + "is_dim": False, + "name": "ym:ad:upTo3VisitsPerUserPercentage", + "title": "2‑3 визита посетителя", + "type": "float", + }, + { + "description": "Доля визитов посетителей с частотой визитов один раз в 2‑7 дней. Учитываются посетители, совершившие больше двух визитов. При этом число дней визитов больше 1 дня.", + "is_dim": False, + "name": "ym:ad:upToWeekBetweenVisitsPercentage", + "title": "Периодичность 2‑7 дней", + "type": "float", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ad:counterID", + "title": "Счетчик (id)", + "type": "string", + "src_key": "id", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ad:counterIDName", + "title": "Счетчик", + "type": "string", + }, + { + "description": "Поисковая фраза последнего перехода по объявлению Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:lastSignDirectSearchPhrase", + "title": "Поисковая фраза (Директ) (last sign)", + "type": "string", + }, + { + "description": "Рекламная кампания Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:lastSignDirectOrder", + "title": "Кампания Яндекс.Директа (last sign)", + "type": "string", + }, + { + "description": "Валюта, установленная в Яндекс.Директе для рекламной кампании.", + "is_dim": True, + "name": "ym:ad:firstCurrencyIDName", + "title": "Валюта (first)", + "type": "string", + }, + { + "description": "Тип условия показа объявления.", + "is_dim": True, + "name": "ym:ad:lastDirectConditionType", + "title": "Тип условия показа объявления (last)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ad:lastDirectBanner", + "title": "Объявление Яндекс.Директа (last)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ad:directBanner", + "title": "Объявление Яндекс.Директа", + "type": "string", + }, + { + "description": "Округа, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:ad:regionDistrict", + "title": "Округ", + "type": "string", + }, + { + "description": "Континенты, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:ad:regionContinent", + "title": "Континент", + "type": "string", + }, + { + "description": "Категория коммерческих интересов посетителей.", + "is_dim": True, + "name": "ym:ad:interest", + "title": "Категория интересов", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ad:directBannerText", + "title": "Текст объявления", + "type": "string", + }, + { + "description": "Условие показа объявления. Условием могут быть либо ключевые слова, либо условие ретаргетинга.", + "is_dim": True, + "name": "ym:ad:firstDirectPhraseOrCond", + "title": "Условие показа объявления (first)", + "type": "string", + }, + { + "description": "Рекламная площадка Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:directPlatform", + "title": "Площадка", + "type": "string", + }, + { + "description": "Возраст посетителя, интервал", + "is_dim": True, + "name": "ym:ad:ageInterval", + "title": "Возраст", + "type": "string", + }, + { + "description": "Условие показа объявления. Условием могут быть либо ключевые слова, либо условие ретаргетинга.", + "is_dim": True, + "name": "ym:ad:lastSignDirectPhraseOrCond", + "title": "Условие показа объявления (last sign)", + "type": "string", + }, + { + "description": "Рекламная кампания Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:directOrder", + "title": "Кампания Яндекс.Директа", + "type": "string", + }, + { + "description": "Рекламная кампания Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:lastDirectOrder", + "title": "Кампания Яндекс.Директа (last)", + "type": "string", + }, + { + "description": "Валюта, установленная в Яндекс.Директе для рекламной кампании.", + "is_dim": True, + "name": "ym:ad:lastCurrencyIDName", + "title": "Валюта (last)", + "type": "string", + }, + { + "description": "Условие показа объявления. Условием могут быть либо ключевые слова, либо условие ретаргетинга.", + "is_dim": True, + "name": "ym:ad:directPhraseOrCond", + "title": "Условие показа объявления", + "type": "string", + }, + { + "description": "Рекламная кампания Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:firstDirectOrder", + "title": "Кампания Яндекс.Директа (first)", + "type": "string", + }, + { + "description": "Города, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:ad:regionCity", + "title": "Город", + "type": "string", + }, + { + "description": "Условие показа объявления. Условием могут быть либо ключевые слова, либо условие ретаргетинга.", + "is_dim": True, + "name": "ym:ad:lastDirectPhraseOrCond", + "title": "Условие показа объявления (last)", + "type": "string", + }, + { + "description": "Валюта, установленная в Яндекс.Директе для рекламной кампании.", + "is_dim": True, + "name": "ym:ad:lastSignCurrencyIDName", + "title": "Валюта (last sign)", + "type": "string", + }, + { + "description": 'Пол посетителя. Возможные значения: "мужской" или "женский".', + "is_dim": True, + "name": "ym:ad:gender", + "title": "Пол", + "type": "string", + }, + { + "description": "Валюта, установленная в Яндекс.Директе для рекламной кампании.", + "is_dim": True, + "name": "ym:ad:currency", + "title": "Валюта", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ad:firstDirectBanner", + "title": "Объявление Яндекс.Директа (first)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ad:lastSignDirectBanner", + "title": "Объявление Яндекс.Директа (last sign)", + "type": "string", + }, + { + "description": "Тип условия показа объявления.", + "is_dim": True, + "name": "ym:ad:firstDirectConditionType", + "title": "Тип условия показа объявления (first)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:ad:displayCampaign", + "title": "Номер заказа Яндекс.Дисплея", + "type": "string", + }, + { + "description": "Тип условия показа объявления.", + "is_dim": True, + "name": "ym:ad:lastSignDirectConditionType", + "title": "Тип условия показа объявления (last sign)", + "type": "string", + }, + { + "description": "Группа объявлений Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:directBannerGroup", + "title": "Группа объявлений", + "type": "string", + }, + { + "description": "Поисковая фраза последнего перехода по объявлению Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:lastDirectSearchPhrase", + "title": "Поисковая фраза (Директ) (last)", + "type": "string", + }, + { + "description": "Поисковая фраза последнего перехода по объявлению Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:firstDirectSearchPhrase", + "title": "Поисковая фраза (Директ) (first)", + "type": "string", + }, + { + "description": "Размер города по населению.", + "is_dim": True, + "name": "ym:ad:regionCitySize", + "title": "Размер города", + "type": "string", + }, + { + "description": "Поисковая фраза последнего перехода по объявлению Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:directSearchPhrase", + "title": "Поисковая фраза (Директ)", + "type": "string", + }, + { + "description": "Тип условия показа объявления.", + "is_dim": True, + "name": "ym:ad:directConditionType", + "title": "Тип условия показа объявления", + "type": "string", + }, + { + "description": "Тип рекламной площадки Яндекс.Директа.", + "is_dim": True, + "name": "ym:ad:directPlatformType", + "title": "Тип площадки", + "type": "string", + }, + { + "description": "Области, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:ad:regionArea", + "title": "Область", + "type": "string", + }, + { + "description": "Страны, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:ad:regionCountry", + "title": "Страна", + "type": "string", + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/hits.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/hits.py new file mode 100644 index 000000000..9e98e3dfe --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/hits.py @@ -0,0 +1,876 @@ +hits_fields = [ + { + "description": "Среднее количество хитов в минуту.", + "is_dim": False, + "name": "ym:pv:pageviewsPerMinute", + "title": "Просмотров в минуту", + "type": "float", + }, + { + "description": "Доля визитов и хитов с поддержкой JavaScript.", + "is_dim": False, + "name": "ym:pv:jsEnabledPercentage", + "title": "Поддержка JavaScript", + "type": "float", # percents + }, + { + "description": "Число просмотров страниц на сайте за отчетный период.", + "is_dim": False, + "name": "ym:pv:pageviews", + "title": "Просмотры", + "type": "integer", + }, + { + "description": "Средняя глубина просмотра в день.", + "is_dim": False, + "name": "ym:pv:pageviewsPerDay", + "title": "Просмотров в день", + "type": "float", + }, + { + "description": "Количество уникальных посетителей.", + "is_dim": False, + "name": "ym:pv:users", + "title": "Посетители", + "type": "integer", + }, + { + "description": "Среднее количество хитов в час.", + "is_dim": False, + "name": "ym:pv:pageviewsPerHour", + "title": "Просмотров в час", + "type": "float", + }, + { + "description": "Доля визитов с блокировщиками рекламы.", + "is_dim": False, + "name": "ym:pv:blockedPercentage", + "title": "Блокировка рекламы", + "type": "float", # percents + }, + { + "description": "Доля визитов и хитов с включенными cookies.", + "is_dim": False, + "name": "ym:pv:cookieEnabledPercentage", + "title": "Поддержка сookies", + "type": "float", # percents + }, + { + "description": "Доля визитов и хитов, совершенных с мобильных устройств.", + "is_dim": False, + "name": "ym:pv:mobilePercentage", + "title": "Мобильность", + "type": "float", # percents + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:URLDomain", + "title": "Домен страницы", + "type": "string", + }, + { + "description": ( + 'Часть URL страницы, следующая сразу после домена до "?" (секция параметров), ' + '"#" (секция фрагмента) либо до конца строки. Например, для страницы ' + '"http://news.yandex.ru/quotes/1.html" путем является "/quotes/1.html", ' + 'для "http://news.yandex.ru/" - "/", а для "http://news.yandex.ru" - пустая строка' + ), + "is_dim": True, + "name": "ym:pv:URLPath", + "title": "Путь страницы", + "type": "string", + }, + { + "description": 'IPv4/IPv6-адрес посетителя с обнуленными 1/8 последними байтами соответственно. Например, "77.88.21.0"/"2001:db8:85a3::".', + "is_dim": True, + "name": "ym:pv:ipAddress", + "title": "IP-адрес", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserHourMinute", + "title": "Час и минута на компьютере посетителя", + "type": "string", + }, + { + "description": 'Ориентация экрана. Возможные значения: "landscape", "portrait".', + "is_dim": True, + "name": "ym:pv:screenOrientationName", + "title": "Ориентация экрана", + "type": "string", + }, + { + "description": "Дата и время просмотра в формате YYYY-MM-DD HH:mm:ss.", + "is_dim": True, + "name": "ym:pv:dateTime", + "title": "Дата и время просмотра", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:clientTimeZone", + "title": "Часовой пояс на компьютере посетителя", + "type": "string", + }, + { + "description": "Дата просмотра в формате YYYY-MM-DD, округленное до начала квартала.", + "is_dim": True, + "name": "ym:pv:startOfQuarter", + "title": "Квартал просмотра", + "type": "date", + }, + { + "description": "Источник перехода", + "is_dim": True, + "name": "ym:pv:UTMSource", + "title": "UTM Source", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала квартала.", + "is_dim": True, + "name": "ym:pv:browserStartOfQuarter", + "title": "Квартал на компьютере посетителя", + "type": "datetime", + }, + { + "description": 'Первый уровень URL страницы. Например, для "http://ya.ru/1/2/3/4.html" первым уровнем URL является "http://ya.ru/".', + "is_dim": True, + "name": "ym:pv:URLPathLevel1", + "title": "Адрес, ур. 1", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:URL", + "title": "Адрес страницы", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:month", + "title": "Месяц просмотра", + "type": "string", + }, + { + "description": 'Глубина цвета экрана в битах. Например, "24".', + "is_dim": True, + "name": "ym:pv:screenColorsName", + "title": "Глубина цвета", + "type": "string", + }, + { + "description": 'Пятый уровень URL страницы входа. Например, для "http://ya.ru/1/2/3/4.html" пятым уровнем URL является "http://ya.ru/1/2/3/4.html".', + "is_dim": True, + "name": "ym:pv:URLPathLevel5", + "title": "Адрес, ур. 5", + "type": "string", + }, + { + "description": 'Четвертый уровень URL страницы. Например, для "http://ya.ru/1/2/3/4.html" четвертым уровнем URL является "http://ya.ru/1/2/3/".', + "is_dim": True, + "name": "ym:pv:URLPathLevel4", + "title": "Адрес, ур. 4", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:pv:browserStartOfDekaminute", + "title": "Начало декаминуты на компьютере посетителя", + "type": "datetime", + }, + { + "description": 'Третий уровень URL страницы. Например, для "http://ya.ru/1/2/3/4.html" третьим уровнем URL является "http://ya.ru/1/2/".', + "is_dim": True, + "name": "ym:pv:URLPathLevel3", + "title": "Адрес, ур. 3", + "type": "string", + }, + { + "description": 'Второй уровень URL страницы. Например, для "http://ya.ru/1/2/3/4.html" вторым уровнем URL является "http://ya.ru/1/".', + "is_dim": True, + "name": "ym:pv:URLPathLevel2", + "title": "Адрес, ур. 2", + "type": "string", + }, + { + "description": 'Тип устройства, с которого было посещение. Возможные значения: "desktop", "mobile", "tablet", "tv".', + "is_dim": True, + "name": "ym:pv:deviceCategoryName", + "title": "Тип устройства", + "type": "string", + }, + { + "description": "Дата на компьютере посетителя в формате YYYY-MM-DD.", + "is_dim": True, + "name": "ym:pv:browserDate", + "title": "Дата на компьютере посетителя", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:hourName", + "title": "Час просмотра в формате HH:00", + "type": "string", + }, + { + "description": 'Разрешение экрана. Например, "1920х1080".', + "is_dim": True, + "name": "ym:pv:screenResolution", + "title": "Разрешение", + "type": "string", + }, + { + "description": "Континенты, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:pv:regionContinentName", + "title": "Континент", + "type": "string", + }, + # { + # 'description': 'Дата просмотра в формате YYYY-MM-DD, округленное до начала месяца.', + # 'is_dim': True, + # 'name': 'ym:pv:startOfMonth', + # 'title': 'Месяц просмотра', + # 'type': 'date', + # }, + { + "description": "Объявление", + "is_dim": True, + "name": "ym:pv:UTMContent", + "title": "UTM Content", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:mobilePhoneName", + "title": "Производитель устройства", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:hasAdBlockerName", + "title": "Наличие блокировщиков рекламы", + "type": "string", + }, + { + "description": "Возраст посетителя, интервал", + "is_dim": True, + "name": "ym:pv:ageIntervalName", + "title": "Возраст", + "type": "string", + }, + { + "description": 'Движок браузера посетителя. Например, "WebKit".', + "is_dim": True, + "name": "ym:pv:browserEngine", + "title": "Движок браузера", + "type": "string", + }, + { + "description": "Рекламная кампания", + "is_dim": True, + "name": "ym:pv:openstatCampaign", + "title": "Openstat Campaign", + "type": "string", + }, + { + "description": 'Третий уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" третьим уровнем URL является "http://ya.ru/1/2/".', + "is_dim": True, + "name": "ym:pv:refererPathLevel3", + "title": "Реферер, ур. 3", + "type": "string", + }, + { + "description": 'Второй уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" вторым уровнем URL является "http://ya.ru/1/".', + "is_dim": True, + "name": "ym:pv:refererPathLevel2", + "title": "Реферер, ур. 2", + "type": "string", + }, + { + "description": 'Первый уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" первым уровнем URL является "http://ya.ru/".', + "is_dim": True, + "name": "ym:pv:refererPathLevel1", + "title": "Реферер, ур. 1", + "type": "string", + }, + { + "description": "Рекламный сервис", + "is_dim": True, + "name": "ym:pv:openstatService", + "title": "Openstat Service", + "type": "string", + }, + { + "description": "Средство маркетинга", + "is_dim": True, + "name": "ym:pv:UTMMedium", + "title": "UTM Medium", + "type": "string", + }, + { + "description": 'Реальное разрешение экрана исходя из данных "device pixel ratio". Например, "1920х1080".', + "is_dim": True, + "name": "ym:pv:physicalScreenResolution", + "title": "Реальное разрешение", + "type": "string", + }, + { + "description": 'Пятый уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" пятым уровнем URL является "http://ya.ru/1/2/3/4.html".', + "is_dim": True, + "name": "ym:pv:refererPathLevel5", + "title": "Реферер, ур. 5", + "type": "string", + }, + { + "description": 'Четвертый уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" четвертым уровнем URL является "http://ya.ru/1/2/3/".', + "is_dim": True, + "name": "ym:pv:refererPathLevel4", + "title": "Реферер, ур. 4", + "type": "string", + }, + { + "description": "Название браузера и его версия.", + "is_dim": True, + "name": "ym:pv:browserAndVersionMajorName", + "title": "Версия браузера", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserHourName", + "title": "Час на компьютере посетителя в формате HH:00", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:operatingSystemName", + "title": "Операционная система (детально)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:dayOfWeekName", + "title": "День недели просмотра", + "type": "string", + }, + { + "description": "Размер города по населению.", + "is_dim": True, + "name": "ym:pv:regionCitySizeName", + "title": "Размер города", + "type": "string", + }, + { + "description": "Области, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:pv:regionAreaName", + "title": "Область", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:mobilePhoneModel", + "title": "Модель устройства", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:refererProto", + "title": "Протокол реферера", + "type": "string", + }, + { + "description": "URL реферера", + "is_dim": True, + "name": "ym:pv:referer", + "title": "Реферер", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:URLParamNameAndValue", + "title": "Значение параметра URL", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:from", + "title": "Метка from", + "type": "string", + }, + # { + # 'description': None, + # 'is_dim': True, + # 'name': 'ym:pv:browserDatePeriod', + # 'title': 'Дата на компьютере посетителя', + # 'type': 'string', + # }, + { + "description": "Дата просмотра в формате YYYY-MM-DD.", + "is_dim": True, + "name": "ym:pv:date", + "title": "Дата просмотра", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:URLProto", + "title": "Протокол страницы", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:operatingSystemRootName", + "title": "Группа операционных систем", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserMonth", + "title": "Месяц на компьютере посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:counterID", + "title": "Счетчик (id)", + "type": "string", + "src_key": "id", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:counterIDName", + "title": "Счетчик", + "type": "string", + }, + { + "description": "Города, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:pv:regionCityName", + "title": "Город", + "type": "string", + }, + { + "description": 'Наличие метки "GCLID".', + "is_dim": True, + "name": "ym:pv:hasGCLIDName", + "title": "Наличие GCLID", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:cookieEnabledName", + "title": "Наличие Cookie", + "type": "string", + }, + { + "description": "Часть URL реферера, следующая сразу после домена. В отличие от простого пути также содержит секции параметров и фрагмента.", + "is_dim": True, + "name": "ym:pv:refererPathFull", + "title": "Путь (полный) реферера", + "type": "string", + }, + # { + # 'description': 'Дата просмотра в формате YYYY-MM-DD, округленное до начала года.', + # 'is_dim': True, + # 'name': 'ym:pv:startOfYear', + # 'title': 'Год просмотра', + # 'type': 'string', + # }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:dayOfMonth", + "title": "День месяца просмотра", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:isTurboPageName", + "title": "Турбо-страница", + "type": "string", + }, + { + "description": "Часть URL страницы, следующая сразу после домена. В отличие от простого пути также содержит секции параметров и фрагмента", + "is_dim": True, + "name": "ym:pv:URLPathFull", + "title": "Путь (полный) страницы", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:title", + "title": "Заголовок страницы", + "type": "string", + }, + # { + # 'description': 'Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.', + # 'is_dim': True, + # 'name': 'ym:pv:browserStartOfYear', + # 'title': 'Год на компьютере посетителя', + # 'type': 'string', + # }, + { + "description": "Ключевые слова", + "is_dim": True, + "name": "ym:pv:UTMTerm", + "title": "UTM Term", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:refererDomain", + "title": "Домен реферера", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss.", + "is_dim": True, + "name": "ym:pv:browserDateTime", + "title": "Дата и время на компьютере посетителя", + "type": "datetime", + }, + { + "description": "Страны, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:pv:regionCountryName", + "title": "Страна", + "type": "string", + }, + { + "description": "Рекламное объявление", + "is_dim": True, + "name": "ym:pv:openstatAd", + "title": "Openstat Ad", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:year", + "title": "Год просмотра", + "type": "string", + }, + { + "description": "Название проводимой рекламной кампании", + "is_dim": True, + "name": "ym:pv:UTMCampaign", + "title": "UTM Campaign", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:URLParamName", + "title": "Параметр URL", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserYear", + "title": "Год на компьютере посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserDekaminute", + "title": "Декаминута на компьютере посетителя", + "type": "string", + }, + { + "description": ( + 'Часть URL реферера, следующая сразу после домена до "?" (секция параметров), ' + '"#" (секция фрагмента) либо до конца строки. Например, для страницы ' + '"http://news.yandex.ru/quotes/1.html" путем является "/quotes/1.html", ' + 'для "http://news.yandex.ru/" — "/", ' + 'а для "http://news.yandex.ru" — пустая строка.' + ), + "is_dim": True, + "name": "ym:pv:refererPath", + "title": "Путь реферера", + "type": "string", + }, + { + "description": 'Отношение ширины к высоте экрана. Например, "16:9".', + "is_dim": True, + "name": "ym:pv:screenFormat", + "title": "Соотношение сторон", + "type": "string", + }, + { + "description": "Место размещения", + "is_dim": True, + "name": "ym:pv:openstatSource", + "title": "Openstat Source", + "type": "string", + }, + { + "description": 'Пол посетителя. Возможные значения: "мужской" или "женский".', + "is_dim": True, + "name": "ym:pv:genderName", + "title": "Пол", + "type": "string", + }, + { + "description": "Логическая ширина экрана.", + "is_dim": True, + "name": "ym:pv:screenWidth", + "title": "Логическая ширина", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserLanguage", + "title": "Язык браузера", + "type": "string", + }, + { + "description": 'Физическая высота экрана исходя из данных "device pixel ratio".', + "is_dim": True, + "name": "ym:pv:physicalScreenHeight", + "title": "Физическая высота", + "type": "string", + }, + # { + # 'description': 'Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.', + # 'is_dim': True, + # 'name': 'ym:pv:browserStartOfMonth', + # 'title': 'Месяц на компьютере посетителя', + # 'type': 'string', + # }, + { + "description": "Дата и время просмотра в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:pv:startOfDekaminute", + "title": "Начало декаминуты просмотра", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:javascriptEnabledName", + "title": "Наличие Javascript", + "type": "string", + }, + { + "description": "Первое число в версии движка браузера посетителя.", + "is_dim": True, + "name": "ym:pv:browserEngineVersion1", + "title": "Major-версия движка браузера", + "type": "string", + }, + { + "description": "Второе число в версии движка браузера посетителя.", + "is_dim": True, + "name": "ym:pv:browserEngineVersion2", + "title": "Minor-версия движка браузера", + "type": "string", + }, + { + "description": "Ширина клиентской части окна браузера.", + "is_dim": True, + "name": "ym:pv:windowClientWidth", + "title": "Ширина окна", + "type": "string", + }, + { + "description": "Третье число (обычно номер сборки) в версии движка браузера посетителя.", + "is_dim": True, + "name": "ym:pv:browserEngineVersion3", + "title": "Build-версия движка браузера", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала недели.", + "is_dim": True, + "name": "ym:pv:browserStartOfWeek", + "title": "Неделя на компьютере посетителя", + "type": "string", + }, + { + "description": 'Браузер посетителя. Например, "Яндекс.Браузер".', + "is_dim": True, + "name": "ym:pv:browserName", + "title": "Браузер", + "type": "string", + }, + { + "description": "Четвертое число (обычно номер ревизии) в версии движка браузера посетителя.", + "is_dim": True, + "name": "ym:pv:browserEngineVersion4", + "title": "Revision-версия движка браузера", + "type": "string", + }, + # # Currently we don't support parametrized dimensions + # { + # 'description': 'Интервал дат с параметризацией размера интервала', + # 'is_dim': True, + # 'name': 'ym:pv:datePeriod', + # 'title': 'Дата', + # 'type': 'date', + # }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:dekaminute", + "title": "Декаминута просмотра", + "type": "string", + }, + { + "description": "Логическая высота экрана.", + "is_dim": True, + "name": "ym:pv:screenHeight", + "title": "Логическая высота", + "type": "string", + }, + { + "description": 'Физическая ширина экрана исходя из данных "device pixel ratio".', + "is_dim": True, + "name": "ym:pv:physicalScreenWidth", + "title": "Физическая ширина", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:pv:browserStartOfHour", + "title": "Час на компьютере посетителя", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:hourMinute", + "title": "Час и минута просмотра", + "type": "string", + }, + # { + # 'description': 'Дата и время просмотра в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.', + # 'is_dim': True, + # 'name': 'ym:pv:startOfMinute', + # 'title': 'Минута просмотра', + # 'type': 'datetime', + # }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserCountry", + "title": "Страна браузера", + "type": "string", + }, + { + "description": "Округа, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:pv:regionDistrictName", + "title": "Округ", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:turboPageID", + "title": "Турбо-страница (ID)", + "type": "string", + }, + { + "description": "Браузер и его полная версия.", + "is_dim": True, + "name": "ym:pv:browserAndVersionName", + "title": "Полная версия браузера", + "type": "string", + }, + # { + # 'description': 'Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.', + # 'is_dim': True, + # 'name': 'ym:pv:browserStartOfMinute', + # 'title': 'Минута на компьютере посетителя', + # 'type': 'datetime', + # }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserDayOfWeekName", + "title": "День недели на компьютере посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserMinute", + "title": "Минута на компьютере посетителя", + "type": "string", + }, + { + "description": "Дата и время просмотра в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:pv:startOfHour", + "title": "Час просмотра", + "type": "datetime", + }, + { + "description": "Высота клиентской части окна браузера.", + "is_dim": True, + "name": "ym:pv:windowClientHeight", + "title": "Высота окна", + "type": "string", + }, + { + "description": 'Размер клиентской части окна браузера. Например, "1920х1080".', + "is_dim": True, + "name": "ym:pv:windowClientArea", + "title": "Размер окна", + "type": "string", + }, + { + "description": "Дата просмотра в формате YYYY-MM-DD, округленное до начала недели.", + "is_dim": True, + "name": "ym:pv:startOfWeek", + "title": "Неделя просмотра", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:minute", + "title": "Минута просмотра", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:pv:browserDayOfMonth", + "title": "День месяца на компьютере посетителя", + "type": "string", + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/user_param.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/user_param.py new file mode 100644 index 000000000..ca3a95c26 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/user_param.py @@ -0,0 +1,66 @@ +user_param_fields = [ + { + "description": "Количество параметров.", + "is_dim": False, + "name": "ym:up:params", + "title": "Параметры", + "type": "integer", + }, + { + "description": "Количество уникальных посетителей.", + "is_dim": False, + "name": "ym:up:users", + "title": "Посетители", + "type": "integer", + }, + { + "description": None, + "is_dim": True, + "name": "ym:up:counterID", + "title": "Счетчик (id)", + "type": "string", + "src_key": "id", + }, + { + "description": None, + "is_dim": True, + "name": "ym:up:counterIDName", + "title": "Счетчик", + "type": "string", + }, + { + "description": "Первый уровень вложенности параметров посетителя", + "is_dim": True, + "name": "ym:up:paramsLevel1", + "title": "Параметр посетителя, ур. 1", + "type": "string", + }, + { + "description": "Второй уровень вложенности параметров посетителя", + "is_dim": True, + "name": "ym:up:paramsLevel2", + "title": "Параметр посетителя, ур. 2", + "type": "string", + }, + { + "description": "Третий уровень вложенности параметров посетителя", + "is_dim": True, + "name": "ym:up:paramsLevel3", + "title": "Параметр посетителя, ур. 3", + "type": "string", + }, + { + "description": "Четвертый уровень вложенности параметров посетителя", + "is_dim": True, + "name": "ym:up:paramsLevel4", + "title": "Параметр посетителя, ур. 4", + "type": "string", + }, + { + "description": "Пятый уровень вложенности параметров посетителя", + "is_dim": True, + "name": "ym:up:paramsLevel5", + "title": "Параметр посетителя, ур. 5", + "type": "string", + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/visits.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/visits.py new file mode 100644 index 000000000..25b30f16b --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/api_info/metrika/visits.py @@ -0,0 +1,2693 @@ +visits_fields = [ + { + "description": "Доля визитов посетителей, известных от 8 до 31 день.", + "is_dim": False, + "name": "ym:s:upToMonthSinceFirstVisitPercentage", + "title": "Новизна 8‑31 день", + "type": "float", + }, + { + "description": ( + "Доля визитов посетителей с частотой визитов один раз в 1 день. " + "Учитываются посетители, совершившие больше двух визитов, " + "которые произошли в разные дни." + ), + "is_dim": False, + "name": "ym:s:oneDayBetweenVisitsPercentage", + "title": "Периодичность 1 день", + "type": "float", + }, + { + "description": "", + "is_dim": False, + "name": "ym:s:manPercentage", + "title": "Доля мужчин", + "type": "float", + }, + { + "description": "", + "is_dim": False, + "name": "ym:s:womanPercentage", + "title": "Доля женщин", + "type": "float", + }, + { + "description": "Количество посетителей, добавивших в корзину хотя бы один товар суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productBasketsUniq", + "title": "Количество посетителей, добавивших товар в корзину", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, возраст которых от 35 до 45 лет.", + "is_dim": False, + "name": "ym:s:upTo44AgePercentage", + "title": "35‑45 лет", + "type": "float", + }, + { + "description": "Процент уникальных посетителей, посетивших сайт в отчетном периоде, активность которых включала их самый первый за всю историю накопления данных визит на сайт.", + "is_dim": False, + "name": "ym:s:percentNewVisitors", + "title": "Доля новых посетителей", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallsFirstTimeCallerPercentage", + "title": "Доля первичных звонков", + "type": "float", + }, + { + "description": "Доля визитов, в рамках которых состоялся лишь один просмотр страницы, продолжавшийся менее 15 секунд.", + "is_dim": False, + "name": "ym:s:bounceRate", + "title": "Отказы", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых более 55 лет.", + "is_dim": False, + "name": "ym:s:over54AgePercentage", + "title": "Более 55 лет", + "type": "float", + }, + { + "description": "Количество уникальных посетителей.", + "is_dim": False, + "name": "ym:s:users", + "title": "Посетители", + "type": "integer", + }, + { + "description": "Среднее время от первого визита в днях.", + "is_dim": False, + "name": "ym:s:avgDaysSinceFirstVisit", + "title": "Дней от первого визита (среднее)", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallsFirstTimeCaller", + "title": "Количество первичных звонков", + "type": "integer", + }, + { + "description": "Доля визитов посетителей с одним визитом (первый визит посетителя).", + "is_dim": False, + "name": "ym:s:oneVisitPerUserPercentage", + "title": "1 визит посетителя", + "type": "float", + }, + { + "description": "", + "is_dim": False, + "name": "ym:s:ecommerceRevenuePerPurchase", + "title": "Средний доход покупки", + "type": "float", + }, + { + "description": "Количество раз, когда в визите встретился параметр визита.", + "is_dim": False, + "name": "ym:s:paramsNumber", + "title": "Количество параметров визитов", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, известных более 365 дней.", + "is_dim": False, + "name": "ym:s:overYearSinceFirstVisitPercentage", + "title": "Новизна более 365 дней", + "type": "float", + }, + { + "description": "Доля визитов посетителей с 8‑31 визитами.", + "is_dim": False, + "name": "ym:s:upTo31VisitsPerUserPercentage", + "title": "8‑31 визит посетителя", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCalls", + "title": "Количество звонков", + "type": "integer", + }, + { + "description": "Доля посетителей, купивших товар на сайте, по отношению к общему числу посетителей.", + "is_dim": False, + "name": "ym:s:usersPurchasePercentage", + "title": "Доля посетителей, купивших товар", + "type": "float", + }, + { + "description": "Доля визитов посетителей, известных от 32 до 90 дней.", + "is_dim": False, + "name": "ym:s:upToQuarterSinceFirstVisitPercentage", + "title": "Новизна 32‑90 дней", + "type": "float", + }, + { + "description": "Среднее количество дней между визитами посетителей.", + "is_dim": False, + "name": "ym:s:avgDaysBetweenVisits", + "title": "Дней между визитами", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallsMissed", + "title": "Количество пропущенных звонков", + "type": "integer", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:visitsPerDay", + "title": "Визитов в день", + "type": "float", + }, + { + "description": "Доля визитов посетителей с 2‑3 визитами.", + "is_dim": False, + "name": "ym:s:upTo3VisitsPerUserPercentage", + "title": "2‑3 визита посетителя", + "type": "float", + }, + { + "description": ( + "Доля визитов посетителей с частотой визитов один раз в 8‑31 день. " + "Учитываются посетители, совершившие больше двух визитов. " + "При этом число дней визитов больше 1 дня." + ), + "is_dim": False, + "name": "ym:s:upToMonthBetweenVisitsPercentage", + "title": "Периодичность 8‑31 день", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallRevenueAvg", + "title": "Средняя цена звонка", + "type": "float", + }, + { + "description": "Стоимость купленных товаров суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productPurchasedPrice", + "title": "Стоимость купленных товаров", + "type": "float", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 91‑365 дней.", + "is_dim": False, + "name": "ym:s:upToYearUserRecencyPercentage", + "title": "Вернувшиеся: 91‑365 дней", + "type": "float", + }, + { + "description": "Доля визитов посетителей, известных от 1 до 7 дней.", + "is_dim": False, + "name": "ym:s:upToWeekSinceFirstVisitPercentage", + "title": "Новизна 1‑7 дней", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallsMissedPercentage", + "title": "Доля пропущенных звонков", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallHoldDurationTillAnswerAvg", + "title": "Среднее время ожидания до ответа", + "type": "float", + }, + { + "description": "Количество посетителей, купивших хотя бы один товар, суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productPurchasedUniq", + "title": "Количество посетителей, купивших товар", + "type": "integer", + }, + { + "description": "Количество единиц товара, добавленных в корзину, суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productBasketsQuantity", + "title": "Товаров добавлено в корзину", + "type": "integer", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallDurationAvg", + "title": "Среднее время звонка", + "type": "float", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 8‑31 день.", + "is_dim": False, + "name": "ym:s:upToMonthUserRecencyPercentage", + "title": "Вернувшиеся: 8‑31 день", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:pvlAll3Window", + "title": "PVL‑3", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых от 45 до 55 лет.", + "is_dim": False, + "name": "ym:s:upTo54AgePercentage", + "title": "45‑55 лет", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:pvlAll1Window", + "title": "PVL‑1", + "type": "float", + }, + { + "description": "Количество достижений любой цели.", + "is_dim": False, + "name": "ym:s:sumGoalReachesAny", + "title": "Достижения любой цели", + "type": "integer", + }, + { + "description": "Количество посетителей, просмотревших информацию хотя бы об одном товаре.", + "is_dim": False, + "name": "ym:s:productImpressionsUniq", + "title": "Посетители, просмотревшие товар", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 1 день.", + "is_dim": False, + "name": "ym:s:upToDayUserRecencyPercentage", + "title": "Вернувшиеся: 1 день", + "type": "float", + }, + { + "description": "Конверсия по любой цели.", + "is_dim": False, + "name": "ym:s:anyGoalConversionRate", + "title": "Конверсия по любой цели", + "type": "float", + }, + { + "description": "Суммарное количество визитов.", + "is_dim": False, + "name": "ym:s:visits", + "title": "Визиты", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, совершивших больше 32 визитов.", + "is_dim": False, + "name": "ym:s:over32VisitsPerUserPercentage", + "title": "Более чем 32 визита посетителя", + "type": "float", + }, + { + "description": 'Доля визитов с меткой "GCLID".', + "is_dim": False, + "name": "ym:s:GCLIDPercentage", + "title": "Доля GCLID", + "type": "float", + }, + { + "description": ( + "Доля визитов посетителей с частотой визитов один раз в более чем 31 день. " + "Учитываются посетители, совершившие больше двух визитов, " + "которые произошли в разные дни." + ), + "is_dim": False, + "name": "ym:s:overMonthBetweenVisitsPercentage", + "title": "Периодичность более чем 31 день", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых от 25 до 34 лет.", + "is_dim": False, + "name": "ym:s:upTo34AgePercentage", + "title": "25‑34 лет", + "type": "float", + }, + { + "description": "Доля визитов посетителей, известных от 91 до 365 дней.", + "is_dim": False, + "name": "ym:s:upToYearSinceFirstVisitPercentage", + "title": "Новизна 91‑365 дней", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallRevenue", + "title": "Суммарная цена звонка", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallHoldDurationTillMissAvg", + "title": "Среднее время ожидания до отказа", + "type": "float", + }, + { + "description": "", + "is_dim": False, + "name": "ym:s:ecommercePurchases", + "title": "Количество покупок", + "type": "integer", + }, + { + "description": "Количество страниц, просмотренных посетителем во время визита.", + "is_dim": False, + "name": "ym:s:pageDepth", + "title": "Глубина просмотра", + "type": "float", + }, + { + "description": "Доля визитов и хитов с поддержкой JavaScript.", + "is_dim": False, + "name": "ym:s:jsEnabledPercentage", + "title": "Поддержка JavaScript", + "type": "float", + }, + { + "description": "Доля посетителей, предположительно являющихся роботами.", + "is_dim": False, + "name": "ym:s:robotPercentage", + "title": "Роботность (доля)", + "type": "float", + }, + { + "description": "Число просмотров страниц на сайте за отчетный период.", + "is_dim": False, + "name": "ym:s:pageviews", + "title": "Просмотры", + "type": "integer", + }, + { + "description": "Доля визитов новых посетителей.", + "is_dim": False, + "name": "ym:s:newUserVisitsPercentage", + "title": "Доля визитов новых посетителей", + "type": "float", + }, + { + "description": "Количество единиц товара, помещённых в корзину, суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productBasketsRemoveQuantity", + "title": "Товаров удалено из корзины", + "type": "integer", + }, + { + "description": "Количество просмотров страницы с информацией о товарах суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productImpressions", + "title": "Просмотры товаров", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, возраст которых от 18 до 24 лет.", + "is_dim": False, + "name": "ym:s:upTo24AgePercentage", + "title": "18‑24 лет", + "type": "float", + }, + { + "description": ( + "Доля визитов посетителей с частотой визитов один раз в 2‑7 дней. " + "Учитываются посетители, совершившие больше двух визитов. " + "При этом число дней визитов больше 1 дня." + ), + "is_dim": False, + "name": "ym:s:upToWeekBetweenVisitsPercentage", + "title": "Периодичность 2‑7 дней", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallsUniq", + "title": "Количество звонков с уникальных номеров", + "type": "integer", + }, + { + "description": "Средняя продолжительность визита в минутах и секундах.", + "is_dim": False, + "name": "ym:s:avgVisitDurationSeconds", + "title": "Время на сайте (в секундах)", + "type": "float", + }, + { + "description": "Доля визитов посетителей, известных меньше одного дня.", + "is_dim": False, + "name": "ym:s:upToDaySinceFirstVisitPercentage", + "title": "Новизна менее одного дня", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:visitsPerHour", + "title": "Визитов в час", + "type": "float", + }, + { + "description": ( + "Отношение доли посетителей с заданным интересом на этом сайте к " + "среднестатистической доле посетителей с этим же интересом " + "на всех сайтах интернета. " + 'Разрешена только при использовании группировки "Категория интересов".' + ), + "is_dim": False, + "name": "ym:s:affinityIndexInterests2", + "title": "Аффинити‑индекс", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:offlineCallTalkDurationAvg", + "title": "Среднее время разговора", + "type": "float", + }, + { + "description": "Стоимость товаров, добавленных в корзину, суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productBasketsPrice", + "title": "Стоимость добавленных в корзину товаров", + "type": "float", + }, + { + "description": "Доля визитов посетителей с 4‑7 визитами.", + "is_dim": False, + "name": "ym:s:upTo7VisitsPerUserPercentage", + "title": "4‑7 визитов посетителя", + "type": "float", + }, + { + "description": ( + "Полученный доход суммарно по всем визитам. " + "Присылается в поле revenue при покупке товара, " + "либо вычисляется автоматически как сумма цен всех товаров, " + "ассоциированных с покупкой." + ), + "is_dim": False, + "name": "ym:s:ecommerceRevenue", + "title": "Доход", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:pvlAll7Window", + "title": "PVL‑7", + "type": "float", + }, + { + "description": "", + "is_dim": False, + "name": "ym:s:ecommerceRevenuePerVisit", + "title": "Средний доход визита", + "type": "float", + }, + { + "description": "Доля визитов с блокировщиками рекламы.", + "is_dim": False, + "name": "ym:s:blockedPercentage", + "title": "Блокировка рекламы", + "type": "float", + }, + { + "description": "Доля визитов и хитов, совершенных с мобильных устройств.", + "is_dim": False, + "name": "ym:s:mobilePercentage", + "title": "Мобильность", + "type": "float", + }, + { + "description": ( + "Среднее значение параметра визита. " + "Вычисляется, если в качестве значения параметра визита передаётся число." + ), + "is_dim": False, + "name": "ym:s:avgParams", + "title": "Среднее параметров визитов", + "type": "float", + }, + { + "description": "Количество купленных единиц товара суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productPurchasedQuantity", + "title": "Товаров куплено", + "type": "integer", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через более чем 365 дней.", + "is_dim": False, + "name": "ym:s:overYearUserRecencyPercentage", + "title": "Вернувшиеся: более чем 365 дней", + "type": "float", + }, + { + "description": "Доля визитов посетителей, возраст которых менее 18 лет.", + "is_dim": False, + "name": "ym:s:under18AgePercentage", + "title": "Менее 18 лет", + "type": "float", + }, + { + "description": "Доля визитов и хитов с включенными cookies.", + "is_dim": False, + "name": "ym:s:cookieEnabledPercentage", + "title": "Поддержка сookies", + "type": "float", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 32‑90 дней.", + "is_dim": False, + "name": "ym:s:upToQuarterUserRecencyPercentage", + "title": "Вернувшиеся: 32‑90 дней", + "type": "float", + }, + { + "description": None, + "is_dim": False, + "name": "ym:s:visitsPerMinute", + "title": "Визитов в минуту", + "type": "float", + }, + { + "description": "Среднее время от предпоследнего визита в днях.", + "is_dim": False, + "name": "ym:s:userRecencyDays", + "title": "Дней от предыдущего визита", + "type": "float", + }, + { + "description": "Стоимость товаров, удалённых из корзины, суммарно по всем визитам.", + "is_dim": False, + "name": "ym:s:productBasketsRemovePrice", + "title": "Стоимость удалённых из корзины товаров", + "type": "float", + }, + { + "description": "Доля визитов посетителей, вернувшихся на сайт через 2‑7 дней.", + "is_dim": False, + "name": "ym:s:upToWeekUserRecencyPercentage", + "title": "Вернувшиеся: 2‑7 дней", + "type": "float", + }, + { + "description": "Количество новых посетителей.", + "is_dim": False, + "name": "ym:s:newUsers", + "title": "Новые посетители", + "type": "integer", + }, + { + "description": "Сумма значений параметров визитов. Вычисляется, если в качестве значений параметров визитов передаётся число.", + "is_dim": False, + "name": "ym:s:sumParams", + "title": "Сумма параметров визитов", + "type": "integer", + }, + { + "description": "Среднее число дней между визитами, интервал.", + "is_dim": True, + "name": "ym:s:userVisitsPeriodIntervalName", + "title": "Периодичность", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productSum", + "title": "Стоимость товара", + "type": "string", + }, + { + "description": "Рекламный сервис", + "is_dim": True, + "name": "ym:s:lastOpenstatService", + "title": "Last Openstat Service", + "type": "string", + }, + { + "description": "Страница социальной сети, с которой был совершён первый за всю историю переход посетителя.", + "is_dim": True, + "name": "ym:s:firstSocialNetworkProfile", + "title": "Первая страница соц. сети", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productCoupon", + "title": "Промокод товара", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserHourMinute", + "title": "Час и минута на компьютере посетителя", + "type": "string", + }, + { + "description": "Количество дней с первого визита посетителя, интервал.", + "is_dim": True, + "name": "ym:s:daysSinceFirstVisitIntervalName", + "title": "Дней от первого визита", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:dekaminute", + "title": "Декаминута визита", + "type": "string", + }, + { + "description": 'Глубина цвета экрана в битах. Например, "24".', + "is_dim": True, + "name": "ym:s:screenColorsName", + "title": "Глубина цвета", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:advEngineName", + "title": "Последняя рекламная система", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productID", + "title": "ID товара", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:mobilePhoneModel", + "title": "Модель устройства", + "type": "string", + }, + { + "description": "Количество просмотров во время визита или глубина просмотра. Просмотры исключают обновления страницы в интервале менее 15 секунд.", + "is_dim": True, + "name": "ym:s:pageViews", + "title": "Глубина просмотра (детально)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstVisitDekaminute", + "title": "Декаминута первого визита", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала квартала.", + "is_dim": True, + "name": "ym:s:browserStartOfQuarter", + "title": "Квартал на компьютере посетителя", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:purchaseRevenue", + "title": "Доход от покупки", + "type": "string", + }, + { + "description": 'Наличие метки "GCLID".', + "is_dim": True, + "name": "ym:s:lastSignHasGCLIDName", + "title": "Наличие GCLID (last sign)", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:s:browserStartOfHour", + "title": "Час на компьютере посетителя", + "type": "datetime", + }, + { + "description": "Дата и время визита в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:s:startOfHour", + "title": "Час визита (начало часа)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:purchaseCoupon", + "title": "Промокод покупки", + "type": "string", + }, + { + "description": "Объявление", + "is_dim": True, + "name": "ym:s:lastUTMContent", + "title": "Last UTM Content", + "type": "string", + }, + { + "description": "Рекламная кампания", + "is_dim": True, + "name": "ym:s:lastOpenstatCampaign", + "title": "Last Openstat Campaign", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:hasRecommendationsClicksName", + "title": "Переходы по рекомендациям", + "type": "string", + }, + { + "description": "Место размещения", + "is_dim": True, + "name": "ym:s:openstatSource", + "title": "Openstat Source", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала недели.", + "is_dim": True, + "name": "ym:s:browserStartOfWeek", + "title": "Неделя на компьютере посетителя", + "type": "date", + }, + { + "description": 'Ориентация экрана. Возможные значения: "landscape", "portrait".', + "is_dim": True, + "name": "ym:s:screenOrientationName", + "title": "Ориентация экрана", + "type": "string", + }, + { + "description": "Логическая высота экрана.", + "is_dim": True, + "name": "ym:s:screenHeight", + "title": "Логическая высота", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:javascriptEnabledName", + "title": "Наличие Javascript", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:endURLDomain", + "title": "Домен страницы выхода", + "type": "string", + }, + { + "description": "Валюта, установленная в Яндекс.Директе для рекламной кампании.", + "is_dim": True, + "name": "ym:s:lastCurrencyIDName", + "title": "Валюта (last)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserDayOfMonth", + "title": "День месяца на компьютере посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:turboPageID", + "title": "Турбо-страница (ID)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productCurrency", + "title": "Валюта", + "type": "string", + }, + { + "description": "Посетитель предположительно является роботом", + "is_dim": True, + "name": "ym:s:isRobotName", + "title": "Роботность", + "type": "string", + }, + { + "description": "Браузер и его полная версия.", + "is_dim": True, + "name": "ym:s:browserAndVersionName", + "title": "Полная версия браузера", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:offlineCallHoldDuration", + "title": "Время ожидания", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserCountry", + "title": "Страна браузера", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:offlinePointLocationName", + "title": "Точка", + "type": "string", + }, + { + "description": "Тип условия показа объявления.", + "is_dim": True, + "name": "ym:s:lastDirectConditionTypeName", + "title": "Тип условия показа объявления (last)", + "type": "string", + }, + { + "description": "Количество визитов посетителя за всю историю, интервал.", + "is_dim": True, + "name": "ym:s:userVisitsInterval", + "title": "Визитов в истории", + "type": "string", + }, + { + "description": "Области, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:s:regionAreaName", + "title": "Область", + "type": "string", + }, + { + "description": "Название проводимой рекламной кампании", + "is_dim": True, + "name": "ym:s:firstUTMCampaign", + "title": "First UTM Campaign", + "type": "string", + }, + { + "description": "Социальная сеть, из которой был совершён последний значимый переход посетителя.", + "is_dim": True, + "name": "ym:s:lastSignSocialNetworkName", + "title": "Последняя значимая социальная сеть", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.", + "is_dim": True, + "name": "ym:s:browserStartOfYear", + "title": "Год на компьютере посетителя (начало года)", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstVisitMinute", + "title": "Минута первого визита", + "type": "string", + }, + { + "description": ( + 'Часть URL реферера, следующая сразу после домена до "?" (секция параметров), ' + '"#" (секция фрагмента) либо до конца строки. ' + 'Например, для страницы "http://news.yandex.ru/quotes/1.html" путем является "/quotes/1.html", ' + 'для "http://news.yandex.ru/" — "/", а для "http://news.yandex.ru" — пустая строка.' + ), + "is_dim": True, + "name": "ym:s:refererPath", + "title": "Путь реферера", + "type": "string", + }, + { + "description": "Количество дней с предпоследнего визита посетителя, интервал.", + "is_dim": True, + "name": "ym:s:daysSincePreviousVisitIntervalName", + "title": "Время возврата", + "type": "string", + }, + { + "description": "Идентификатор посетителя сайта, назначаемый Метрикой", + "is_dim": True, + "name": "ym:s:clientID", + "title": "ClientID", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:operatingSystemRootName", + "title": "Группа операционных систем", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:from", + "title": "Метка from", + "type": "string", + }, + { + "description": "Дата и время визита в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:s:startOfDekaminute", + "title": "Начало декаминуты визита", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productBrandCart", + "title": "Бренд товара в корзине", + "type": "string", + }, + { + "description": "Дата и время первого визита посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:s:firstVisitStartOfDekaminute", + "title": "Начало декаминуты первого визита", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:offlinePointRegionName", + "title": "Регион", + "type": "string", + }, + # { + # 'description': 'Интервал дат с параметризацией размера интервала', + # # 'is_dim': True, + # 'name': 'ym:s:previousVisitDatePeriod', + # 'title': 'Дата предыдущего визита (интервал)', + # 'type': 'date', + # }, + { + "description": "Идентификатор уточненной поисковой системы.", + "is_dim": True, + "name": "ym:s:searchEngineName", + "title": "Последняя поисковая система (детально)", + "type": "string", + }, + { + "description": "Рекламное объявление", + "is_dim": True, + "name": "ym:s:firstOpenstatAd", + "title": "First Openstat Ad", + "type": "string", + }, + # { + # 'description': None, + # # 'is_dim': True, + # 'name': 'ym:s:browserDatePeriod', + # 'title': 'Дата на компьютере посетителя', + # 'type': 'string', + # }, + { + "description": "Дата визита в формате YYYY-MM-DD, округленная до начала квартала.", + "is_dim": True, + "name": "ym:s:startOfQuarter", + "title": "Квартал визита", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserYear", + "title": "Год на компьютере посетителя", + "type": "string", + }, + { + "description": "Количество дней с предпоследнего визита посетителя.", + "is_dim": True, + "name": "ym:s:daysSincePreviousVisit", + "title": "Время c предыдущего визита", + "type": "string", + }, + { + "description": "Место размещения", + "is_dim": True, + "name": "ym:s:lastSignOpenstatSource", + "title": "Last Sign Openstat Source", + "type": "string", + }, + { + "description": "Источник перехода", + "is_dim": True, + "name": "ym:s:UTMSource", + "title": "UTM Source", + "type": "string", + }, + { + "description": "Дата первого визита посетителя в формате YYYY-MM-DD, округленная до начала месяца.", + "is_dim": True, + "name": "ym:s:firstVisitStartOfMonth", + "title": "Месяц первого визита (начало месяца)", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:purchaseExistsVisit", + "title": "Наличие покупок в визите", + "type": "string", + }, + { + "description": "Дата первого визита посетителя в формате YYYY-MM-DD, округленная до начала недели (понедельник).", + "is_dim": True, + "name": "ym:s:firstVisitStartOfWeek", + "title": "Неделя первого визита", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:externalRefererDomain", + "title": "Домен внешнего реферера", + "type": "string", + }, + { + "description": "Категория коммерческих интересов посетителей.", + "is_dim": True, + "name": "ym:s:interestName", + "title": "Категория интересов", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:purchaseCountVisit", + "title": "Количество покупок в визите", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstAdvEngineName", + "title": "Первая рекламная система", + "type": "string", + }, + { + "description": "Количество просмотров в визите или глубина просмотра, сгруппированная по интервалам.", + "is_dim": True, + "name": "ym:s:pageViewsInterval", + "title": "Глубина просмотра (по интервалам)", + "type": "string", + }, + { + "description": "Ключевые слова", + "is_dim": True, + "name": "ym:s:firstUTMTerm", + "title": "First UTM Term", + "type": "string", + }, + # { + # 'description': 'Интервал дат с параметризацией размера интервала', + # # 'is_dim': True, + # 'name': 'ym:s:firstVisitDatePeriod', + # 'title': 'Дата первого визита (интервал)', + # 'type': 'string', + # }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productVariant", + "title": "Вариант товара", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:startURLProto", + "title": "Протокол страницы входа", + "type": "string", + }, + { + "description": 'Идентификатор уточненной поисковой системы первого визита посетителя. Например, "Яндекс.Картинки".', + "is_dim": True, + "name": "ym:s:firstSearchEngineName", + "title": "Первая поисковая система (детально)", + "type": "string", + }, + { + "description": 'Размер клиентской части окна браузера. Например, "1920х1080".', + "is_dim": True, + "name": "ym:s:windowClientArea", + "title": "Размер окна", + "type": "string", + }, + { + "description": "Страны, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:s:regionCountryName", + "title": "Страна", + "type": "string", + }, + { + "description": "Дата и время предпоследнего визита посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала квартала.", + "is_dim": True, + "name": "ym:s:previousVisitStartOfQuarter", + "title": "Квартал предпоследнего визита", + "type": "date", + }, + { + "description": "Валюта, установленная в Яндекс.Директе для рекламной кампании.", + "is_dim": True, + "name": "ym:s:firstCurrencyIDName", + "title": "Валюта (first)", + "type": "string", + }, + { + "description": "Время ― начало часового периода в формате HH:MM.", + "is_dim": True, + "name": "ym:s:hourName", + "title": "Час визита", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.", + "is_dim": True, + "name": "ym:s:browserStartOfMonth", + "title": "Месяц на компьютере посетителя (начало месяца)", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:previousVisitMonth", + "title": "Месяц предыдущего визита", + "type": "string", + }, + { + "description": "Возраст посетителя, интервал", + "is_dim": True, + "name": "ym:s:ageIntervalName", + "title": "Возраст", + "type": "string", + }, + { + "description": 'Идентификатор уточненной поисковой системы первого визита посетителя. Например, "Яндекс.Картинки".', + "is_dim": True, + "name": "ym:s:lastSignSearchEngineName", + "title": "Последняя значимая поисковая система (детально)", + "type": "string", + }, + { + "description": "Ширина клиентской части окна браузера.", + "is_dim": True, + "name": "ym:s:windowClientWidth", + "title": "Ширина окна", + "type": "string", + }, + { + "description": "Название браузера и его версия.", + "is_dim": True, + "name": "ym:s:browserAndVersionMajorName", + "title": "Версия браузера", + "type": "string", + }, + { + "description": 'Отношение ширины к высоте экрана. Например, "16:9".', + "is_dim": True, + "name": "ym:s:screenFormat", + "title": "Соотношение сторон", + "type": "string", + }, + { + "description": "Часть URL страницы входа, следующая сразу после домена. В отличие от простого пути также содержит секции параметров и фрагмента", + "is_dim": True, + "name": "ym:s:startURLPathFull", + "title": "Путь (полный) страницы входа", + "type": "string", + }, + { + "description": 'Идентификатор верхнего уровня иерархии поисковых систем первого визита посетителя. Например, "Яндекс".', + "is_dim": True, + "name": "ym:s:firstSearchEngineRootName", + "title": "Первая поисковая система", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserLanguage", + "title": "Язык браузера", + "type": "string", + }, + { + "description": "Ключевые слова", + "is_dim": True, + "name": "ym:s:UTMTerm", + "title": "UTM Term", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:dayOfWeekName", + "title": "День недели визита", + "type": "string", + }, + { + "description": 'Четвертый уровень URL страницы входа. Например, для "http://ya.ru/1/2/3/4.html" четвертым уровнем URL является "http://ya.ru/1/2/3/".', + "is_dim": True, + "name": "ym:s:startURLPathLevel4", + "title": "Страница входа, ур. 4", + "type": "string", + }, + { + "description": 'Третий уровень URL страницы входа. Например, для "http://ya.ru/1/2/3/4.html" третьим уровнем URL является "http://ya.ru/1/2/".', + "is_dim": True, + "name": "ym:s:startURLPathLevel3", + "title": "Страница входа, ур. 3", + "type": "string", + }, + { + "description": "Средство маркетинга", + "is_dim": True, + "name": "ym:s:UTMMedium", + "title": "UTM Medium", + "type": "string", + }, + { + "description": 'Второй уровень URL страницы входа. Например, для "http://ya.ru/1/2/3/4.html" вторым уровнем URL является "http://ya.ru/1/".', + "is_dim": True, + "name": "ym:s:startURLPathLevel2", + "title": "Страница входа, ур. 2", + "type": "string", + }, + { + "description": 'Первый уровень URL страницы входа. Например, для "http://ya.ru/1/2/3/4.html" первым уровнем URL является "http://ya.ru/".', + "is_dim": True, + "name": "ym:s:startURLPathLevel1", + "title": "Страница входа, ур. 1", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:offlineCallTag", + "title": "Метка", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:refererProto", + "title": "Протокол реферера", + "type": "string", + }, + { + "description": "Объявление", + "is_dim": True, + "name": "ym:s:firstUTMContent", + "title": "First UTM Content", + "type": "string", + }, + { + "description": "Дата предыдущего визита посетителя в формате YYYY-MM-DD.", + "is_dim": True, + "name": "ym:s:previousVisitDate", + "title": "Дата предыдущего визита", + "type": "date", + }, + { + "description": 'Пятый уровень URL страницы входа. Например, для "http://ya.ru/1/2/3/4.html" пятым уровнем URL является "http://ya.ru/1/2/3/4.html".', + "is_dim": True, + "name": "ym:s:startURLPathLevel5", + "title": "Страница входа, ур. 5", + "type": "string", + }, + { + "description": ( + 'Часть URL страницы входа, следующая сразу после домена до "?" (секция параметров), ' + '"#" (секция фрагмента) либо до конца строки. ' + 'Например, для страницы "http://news.yandex.ru/quotes/1.html" путем является "/quotes/1.html", ' + 'для "http://news.yandex.ru/" - "/", а для "http://news.yandex.ru" - пустая строка' + ), + "is_dim": True, + "name": "ym:s:startURLPath", + "title": "Путь страницы входа", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:PProductBrand", + "title": "Бренд товара в заказе", + "type": "string", + }, + { + "description": "Дата и время первого визита посетителя в формате YYYY-MM-DD HH:mm:ss.", + "is_dim": True, + "name": "ym:s:firstVisitDateTime", + "title": "Дата и время первого визита", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstTrafficSourceName", + "title": "Первый источник трафика", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:trafficSourceName", + "title": "Последний источник трафика", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:lastSignTrafficSourceName", + "title": "Последний значимый источник трафика", + "type": "string", + }, + { + "description": "Второй уровень источников трафика", + "is_dim": True, + "name": "ym:s:firstSourceEngineName", + "title": "Первый источник трафика (детально)", + "type": "string", + }, + { + "description": "Второй уровень источников трафика", + "is_dim": True, + "name": "ym:s:sourceEngineName", + "title": "Последний источник трафика (детально)", + "type": "string", + }, + { + "description": "Второй уровень источников трафика", + "is_dim": True, + "name": "ym:s:lastSignSourceEngineName", + "title": "Последний значимый источник трафика (детально)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:endURL", + "title": "Страница выхода", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:hourMinute", + "title": "Час и минута визита", + "type": "string", + }, + { + "description": 'Наличие метки "GCLID".', + "is_dim": True, + "name": "ym:s:lastHasGCLIDName", + "title": "Наличие GCLID (last)", + "type": "string", + }, + { + "description": "Объявление", + "is_dim": True, + "name": "ym:s:UTMContent", + "title": "UTM Content", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productBrand", + "title": "Бренд", + "type": "string", + }, + { + "description": "Время на сайте, разбитое по группам.", + "is_dim": True, + "name": "ym:s:visitDurationIntervalName", + "title": "Время на сайте (по группам)", + "type": "string", + }, + { + "description": 'Четвертый уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" четвертым уровнем URL является "http://ya.ru/1/2/3/".', + "is_dim": True, + "name": "ym:s:refererPathLevel4", + "title": "Реферер, ур. 4", + "type": "string", + }, + { + "description": 'Третий уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" третьим уровнем URL является "http://ya.ru/1/2/".', + "is_dim": True, + "name": "ym:s:refererPathLevel3", + "title": "Реферер, ур. 3", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:cookieEnabledName", + "title": "Наличие Cookie", + "type": "string", + }, + { + "description": 'Разрешение экрана. Например, "1920х1080".', + "is_dim": True, + "name": "ym:s:screenResolution", + "title": "Разрешение", + "type": "string", + }, + { + "description": "Название проводимой рекламной кампании", + "is_dim": True, + "name": "ym:s:lastUTMCampaign", + "title": "Last UTM Campaign", + "type": "string", + }, + { + "description": "Рекламная кампания", + "is_dim": True, + "name": "ym:s:firstOpenstatCampaign", + "title": "First Openstat Campaign", + "type": "string", + }, + { + "description": 'Пятый уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" пятым уровнем URL является "http://ya.ru/1/2/3/4.html".', + "is_dim": True, + "name": "ym:s:refererPathLevel5", + "title": "Реферер, ур. 5", + "type": "string", + }, + { + "description": "С каких сайтов осуществлялись переходы на сайт.", + "is_dim": True, + "name": "ym:s:referalSource", + "title": "Последний переход с сайтов", + "type": "string", + }, + { + "description": 'Третий уровень URL внешнего реферера. Например, для "http://ya.ru/1/2/3/4.html" третьим уровнем URL является "http://ya.ru/1/2/".', + "is_dim": True, + "name": "ym:s:externalRefererPathLevel3", + "title": "Внешний реферер, ур. 3", + "type": "string", + }, + { + "description": 'Второй уровень URL внешнего реферера. Например, для "http://ya.ru/1/2/3/4.html" вторым уровнем URL является "http://ya.ru/1/".', + "is_dim": True, + "name": "ym:s:externalRefererPathLevel2", + "title": "Внешний реферер, ур. 2", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:offlineCallTalkDuration", + "title": "Время разговора", + "type": "string", + }, + { + "description": 'Второй уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" вторым уровнем URL является "http://ya.ru/1/".', + "is_dim": True, + "name": "ym:s:refererPathLevel2", + "title": "Реферер, ур. 2", + "type": "string", + }, + { + "description": 'Пятый уровень URL внешнего реферера. Например, для "http://ya.ru/1/2/3/4.html" пятым уровнем URL является "http://ya.ru/1/2/3/4.html".', + "is_dim": True, + "name": "ym:s:externalRefererPathLevel5", + "title": "Внешний реферер, ур. 5", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstVisitDayOfMonth", + "title": "День месяца первого визита", + "type": "string", + }, + { + "description": 'Первый уровень URL реферера. Например, для "http://ya.ru/1/2/3/4.html" первым уровнем URL является "http://ya.ru/".', + "is_dim": True, + "name": "ym:s:refererPathLevel1", + "title": "Реферер, ур. 1", + "type": "string", + }, + { + "description": 'Четвертый уровень URL внешнего реферера. Например, для "http://ya.ru/1/2/3/4.html" четвертым уровнем URL является "http://ya.ru/1/2/3/".', + "is_dim": True, + "name": "ym:s:externalRefererPathLevel4", + "title": "Внешний реферер, ур. 4", + "type": "string", + }, + { + "description": 'Первый уровень URL внешнего реферера. Например, для "http://ya.ru/1/2/3/4.html" первым уровнем URL является "http://ya.ru/".', + "is_dim": True, + "name": "ym:s:externalRefererPathLevel1", + "title": "Внешний реферер, ур. 1", + "type": "string", + }, + { + "description": "С каких сайтов осуществлялись переходы на сайт.", + "is_dim": True, + "name": "ym:s:lastSignReferalSource", + "title": "Последний значимый переход с сайтов", + "type": "string", + }, + { + "description": "Тип условия показа объявления.", + "is_dim": True, + "name": "ym:s:lastSignDirectConditionTypeName", + "title": "Тип условия показа объявления (last sign)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:previousVisitDayOfMonth", + "title": "День месяца предыдущего визита", + "type": "string", + }, + { + "description": "Дата и время первого визита посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала часа.", + "is_dim": True, + "name": "ym:s:firstVisitStartOfHour", + "title": "Час первого визита (начало часа)", + "type": "datetime", + }, + { + "description": "Логическая ширина экрана.", + "is_dim": True, + "name": "ym:s:screenWidth", + "title": "Логическая ширина", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:year", + "title": "Год визита", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:lastSignAdvEngineName", + "title": "Последняя значимая рекламная система", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:startURLDomain", + "title": "Домен страницы входа", + "type": "string", + }, + { + "description": "Страница социальной сети.", + "is_dim": True, + "name": "ym:s:socialNetworkProfile", + "title": "Последняя страница соц. сети", + "type": "string", + }, + { + "description": "Рекламное объявление", + "is_dim": True, + "name": "ym:s:lastOpenstatAd", + "title": "Last Openstat Ad", + "type": "string", + }, + { + "description": "Страница социальной сети последнего значимого перехода посетителя за всю историю.", + "is_dim": True, + "name": "ym:s:lastSignSocialNetworkProfile", + "title": "Страница социальной сети, с которой был совершён последний значимый переход.", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel10", + "title": "Параметр визита, ур. 10", + "type": "string", + }, + { + "description": "Среднее число дней между визитами.", + "is_dim": True, + "name": "ym:s:userVisitsPeriod", + "title": "Периодичность (детально)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:impressionCountVisit", + "title": "Количество просмотров товаров в визите", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel3", + "title": "Параметр визита, ур. 3", + "type": "string", + }, + { + "description": "Часть URL реферера, следующая сразу после домена. В отличие от простого пути также содержит секции параметров и фрагмента.", + "is_dim": True, + "name": "ym:s:refererPathFull", + "title": "Путь (полный) реферера", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel4", + "title": "Параметр визита, ур. 4", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel1", + "title": "Параметр визита, ур. 1", + "type": "string", + }, + { + "description": 'Физическая ширина экрана исходя из данных "device pixel ratio".', + "is_dim": True, + "name": "ym:s:physicalScreenWidth", + "title": "Физическая ширина", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel2", + "title": "Параметр визита, ур. 2", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel7", + "title": "Параметр визита, ур. 7", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel8", + "title": "Параметр визита, ур. 8", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel5", + "title": "Параметр визита, ур. 5", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel6", + "title": "Параметр визита, ур. 6", + "type": "string", + }, + { + "description": "Средство маркетинга", + "is_dim": True, + "name": "ym:s:firstUTMMedium", + "title": "First UTM Medium", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:PProductName", + "title": "Название товара в заказе", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:paramsLevel9", + "title": "Параметр визита, ур. 9", + "type": "string", + }, + { + "description": "Средство маркетинга", + "is_dim": True, + "name": "ym:s:lastUTMMedium", + "title": "Last UTM Medium", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserMinute", + "title": "Минута на компьютере посетителя", + "type": "string", + }, + { + "description": 'Наличие метки "GCLID".', + "is_dim": True, + "name": "ym:s:hasGCLIDName", + "title": "Наличие GCLID", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:month", + "title": "Месяц визита", + "type": "string", + }, + { + "description": "URL реферера", + "is_dim": True, + "name": "ym:s:referer", + "title": "Реферер", + "type": "string", + }, + { + "description": "Часть URL страницы выхода, следующая сразу после домена. В отличие от простого пути также содержит секции параметров и фрагмента", + "is_dim": True, + "name": "ym:s:endURLPathFull", + "title": "Путь (полный) страницы выхода", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:previousVisitYear", + "title": "Год предыдущего визита", + "type": "string", + }, + { + "description": 'Физическая высота экрана исходя из данных "device pixel ratio".', + "is_dim": True, + "name": "ym:s:physicalScreenHeight", + "title": "Физическая высота", + "type": "string", + }, + { + "description": "Место размещения", + "is_dim": True, + "name": "ym:s:firstOpenstatSource", + "title": "First Openstat Source", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:hasAdBlockerName", + "title": "Наличие блокировщиков рекламы", + "type": "string", + }, + { + "description": 'Первый уровень URL страницы выхода. Например, для "http://ya.ru/1/2/3/4.html" первым уровнем URL является "http://ya.ru/".', + "is_dim": True, + "name": "ym:s:endURLPathLevel1", + "title": "Страница выхода, ур. 1", + "type": "string", + }, + { + "description": 'Второй уровень URL страницы выхода. Например, для "http://ya.ru/1/2/3/4.html" вторым уровнем URL является "http://ya.ru/1/".', + "is_dim": True, + "name": "ym:s:endURLPathLevel2", + "title": "Страница выхода, ур. 2", + "type": "string", + }, + { + "description": 'Третий уровень URL страницы выхода. Например, для "http://ya.ru/1/2/3/4.html" третьим уровнем URL является "http://ya.ru/1/2/".', + "is_dim": True, + "name": "ym:s:endURLPathLevel3", + "title": "Страница выхода, ур. 3", + "type": "string", + }, + { + "description": 'Четвертый уровень URL страницы выхода. Например, для "http://ya.ru/1/2/3/4.html" четвертым уровнем URL является "http://ya.ru/1/2/3/".', + "is_dim": True, + "name": "ym:s:endURLPathLevel4", + "title": "Страница выхода, ур. 4", + "type": "string", + }, + { + "description": 'Пятый уровень URL страницы выхода. Например, для "http://ya.ru/1/2/3/4.html" пятым уровнем URL является "http://ya.ru/1/2/3/4.html".', + "is_dim": True, + "name": "ym:s:endURLPathLevel5", + "title": "Страница выхода, ур. 5", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserDayOfWeekName", + "title": "День недели на компьютере посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserHourName", + "title": "Час на компьютере посетителя в формате HH:00", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:topLevelDomain", + "title": "Домен верхнего уровня страницы входа", + "type": "string", + }, + { + "description": "Дата первого визита посетителя в формате YYYY-MM-DD, округленная до начала квартала.", + "is_dim": True, + "name": "ym:s:firstVisitStartOfQuarter", + "title": "Квартал первого визита", + "type": "date", + }, + { + "description": "Округа, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:s:regionDistrictName", + "title": "Округ", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:purchaseRevenueVisit", + "title": "Сумма покупок в визите", + "type": "string", + }, + { + "description": "Дата и время первого визита посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + "name": "ym:s:firstVisitStartOfMinute", + "title": "Минута первого визита (начало минуты)", + "type": "datetime", + }, + { + "description": "Дата визита в формате YYYY-MM-DD, округленная до начала месяца.", + "is_dim": True, + "name": "ym:s:startOfMonth", + "title": "Месяц визита (начало месяца)", + "type": "date", + }, + { + "description": "Города, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:s:regionCityName", + "title": "Город", + "type": "string", + }, + { + "description": "Источник трафика - идентификатор социальной сети.", + "is_dim": True, + "name": "ym:s:socialNetworkName", + "title": "Последняя социальная сеть", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:endURLProto", + "title": "Протокол страницы выхода", + "type": "string", + }, + { + "description": "Источник перехода", + "is_dim": True, + "name": "ym:s:firstUTMSource", + "title": "First UTM Source", + "type": "string", + }, + { + "description": "Рекламный сервис", + "is_dim": True, + "name": "ym:s:openstatService", + "title": "Openstat Service", + "type": "string", + }, + { + "description": "Третье число (обычно номер сборки) в версии движка браузера посетителя.", + "is_dim": True, + "name": "ym:s:browserEngineVersion3", + "title": "Build-версия движка браузера", + "type": "string", + }, + { + "description": 'URL внешнего реферера для источника "Переходы по ссылкам на сайтах"', + "is_dim": True, + "name": "ym:s:externalReferer", + "title": "Внешний реферер", + "type": "string", + }, + { + "description": "Четвертое число (обычно номер ревизии) в версии движка браузера посетителя.", + "is_dim": True, + "name": "ym:s:browserEngineVersion4", + "title": "Revision-версия движка браузера", + "type": "string", + }, + { + "description": "Дата и время визита в формате YYYY-MM-DD HH:mm:ss.", + "is_dim": True, + "name": "ym:s:dateTime", + "title": "Дата и время визита", + "type": "datetime", + }, + { + "description": ( + "Часть URL-адреса внешнего реферера, следующая сразу после домена. " + "В отличие от простого пути также содержит секции параметров и фрагмента." + ), + "is_dim": True, + "name": "ym:s:externalRefererPathFull", + "title": "Путь (полный) внешнего реферера", + "type": "string", + }, + { + "description": "Дата и время визита в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + "name": "ym:s:startOfMinute", + "title": "Минута визита (начало минуты)", + "type": "datetime", + }, + { + "description": ( + 'Часть URL внешнего реферера, следующая сразу после домена до "?" (секция параметров), ' + '"#" (секция фрагмента) либо до конца строки. ' + 'Например, для страницы "http://news.yandex.ru/quotes/1.html" ' + 'путем является "/quotes/1.html", для "http://news.yandex.ru/" — "/", ' + 'а для "http://news.yandex.ru" — пустая строка.' + ), + "is_dim": True, + "name": "ym:s:externalRefererPath", + "title": "Путь внешнего реферера", + "type": "string", + }, + { + "description": "Дата первого визита посетителя в формате YYYY-MM-DD.", + "is_dim": True, + "name": "ym:s:firstVisitDate", + "title": "Дата первого визита", + "type": "date", + }, + { + "description": "Тип условия показа объявления.", + "is_dim": True, + "name": "ym:s:firstDirectConditionTypeName", + "title": "Тип условия показа объявления (first)", + "type": "string", + }, + { + "description": "Дата первого визита посетителя в формате YYYY-MM-DD, округленная до начала года.", + "is_dim": True, + "name": "ym:s:firstVisitStartOfYear", + "title": "Год первого визита (начало года)", + "type": "date", + }, + { + "description": "Первое число в версии движка браузера посетителя.", + "is_dim": True, + "name": "ym:s:browserEngineVersion1", + "title": "Major-версия движка браузера", + "type": "string", + }, + { + "description": "Второе число в версии движка браузера посетителя.", + "is_dim": True, + "name": "ym:s:browserEngineVersion2", + "title": "Minor-версия движка браузера", + "type": "string", + }, + { + "description": "Рекламный сервис", + "is_dim": True, + "name": "ym:s:firstOpenstatService", + "title": "First Openstat Service", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productName", + "title": "Название товара", + "type": "string", + }, + { + "description": "Поисковая фраза последнего перехода по объявлению Яндекс.Маркета.", + "is_dim": True, + "name": "ym:s:marketSearchPhrase", + "title": "Фраза (Маркет)", + "type": "string", + }, + { + "description": "Континенты, в которых находятся посетители сайта.", + "is_dim": True, + "name": "ym:s:regionContinentName", + "title": "Континент", + "type": "string", + }, + { + "description": 'Реальное разрешение экрана исходя из данных "device pixel ratio". Например, "1920х1080".', + "is_dim": True, + "name": "ym:s:physicalScreenResolution", + "title": "Реальное разрешение", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:lastFrom", + "title": "Метка from (last)", + "type": "string", + }, + { + "description": "Категория коммерческих интересов посетителей.", + "is_dim": True, + "name": "ym:s:interest2Name1", + "title": "Категория интересов, ур. 1", + "type": "string", + }, + { + "description": "Категория коммерческих интересов посетителей.", + "is_dim": True, + "name": "ym:s:interest2Name2", + "title": "Категория интересов, ур. 2", + "type": "string", + }, + { + "description": "Категория коммерческих интересов посетителей.", + "is_dim": True, + "name": "ym:s:interest2Name3", + "title": "Категория интересов, ур. 3", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserDekaminute", + "title": "Декаминута на компьютере посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productPrice", + "title": "Стоимость единицы товара", + "type": "string", + }, + { + "description": "Место размещения", + "is_dim": True, + "name": "ym:s:lastOpenstatSource", + "title": "Last Openstat Source", + "type": "string", + }, + { + "description": "Ключевые слова", + "is_dim": True, + "name": "ym:s:lastSignUTMTerm", + "title": "Last Sign UTM Term", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:offlineCallMissedName", + "title": "Пропущенный / Отвеченный", + "type": "string", + }, + { + "description": "Социальная сеть первого перехода посетителя.", + "is_dim": True, + "name": "ym:s:firstSocialNetworkName", + "title": "Первая социальная сеть", + "type": "string", + }, + { + "description": "Рекламное объявление", + "is_dim": True, + "name": "ym:s:lastSignOpenstatAd", + "title": "Last Sing Openstat Ad", + "type": "string", + }, + { + "description": 'IPv4/IPv6-адрес посетителя с обнуленными 1/8 последними байтами соответственно. Например, "77.88.21.0"/"2001:db8:85a3::".', + "is_dim": True, + "name": "ym:s:ipAddress", + "title": "IP-адрес", + "type": "string", + }, + { + "description": "Дата визита в формате YYYY-MM-DD.", + "is_dim": True, + "name": "ym:s:date", + "title": "Дата визита", + "type": "date", + }, + { + "description": "Рекламная кампания", + "is_dim": True, + "name": "ym:s:lastSignOpenstatCampaign", + "title": "Last Sign Openstat Campaign", + "type": "string", + }, + { + "description": 'Браузер посетителя. Например, "Яндекс.Браузер".', + "is_dim": True, + "name": "ym:s:browserName", + "title": "Браузер", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:minute", + "title": "Минута визита", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss.", + "is_dim": True, + "name": "ym:s:browserDateTime", + "title": "Дата и время на компьютере посетителя", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstVisitMonth", + "title": "Месяц первого визита", + "type": "string", + }, + { + "description": "Название проводимой рекламной кампании", + "is_dim": True, + "name": "ym:s:UTMCampaign", + "title": "UTM Campaign", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:clientTimeZone", + "title": "Часовой пояс на компьютере посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:externalRefererProto", + "title": "Протокол внешнего реферера", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productPosition", + "title": "Позиция товара", + "type": "string", + }, + { + "description": "Дата визита в формате YYYY-MM-DD, округленная до начала года.", + "is_dim": True, + "name": "ym:s:startOfYear", + "title": "Год визита (начало года)", + "type": "date", + }, + { + "description": "Дата и время предпоследнего визита посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала года.", + "is_dim": True, + "name": "ym:s:previousVisitStartOfYear", + "title": "Год предпоследнего визита", + "type": "date", + }, + { + "description": 'Движок браузера посетителя. Например, "WebKit".', + "is_dim": True, + "name": "ym:s:browserEngine", + "title": "Движок браузера", + "type": "string", + }, + { + "description": ( + 'Часть URL страницы выхода, следующая сразу после домена до "?" (секция параметров), ' + '"#" (секция фрагмента) либо до конца строки. ' + 'Например, для страницы "http://news.yandex.ru/quotes/1.html" ' + 'путем является "/quotes/1.html", для "http://news.yandex.ru/" - "/", ' + 'а для "http://news.yandex.ru" - пустая строка' + ), + "is_dim": True, + "name": "ym:s:endURLPath", + "title": "Путь страницы выхода", + "type": "string", + }, + { + "description": "Количество визитов посетителя за всю историю.", + "is_dim": True, + "name": "ym:s:userVisits", + "title": "Всего визитов", + "type": "string", + }, + { + "description": "Средство маркетинга", + "is_dim": True, + "name": "ym:s:lastSignUTMMedium", + "title": "Last Sign UTM Medium", + "type": "string", + }, + { + "description": "Время ― начало часового периода в формате HH:MM.", + "is_dim": True, + "name": "ym:s:firstVisitHourName", + "title": "Час первого визита", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstVisitHourMinute", + "title": "Час и минута первого визита", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:offlineCallFirstTimeCallerName", + "title": "Первичный / Повторный", + "type": "string", + }, + { + "description": "Название проводимой рекламной кампании", + "is_dim": True, + "name": "ym:s:lastSignUTMCampaign", + "title": "Last Sign UTM Campaign", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала минуты.", + "is_dim": True, + "name": "ym:s:browserStartOfMinute", + "title": "Минута на компьютере посетителя (начало минуты)", + "type": "datetime", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstVisitDayOfWeekName", + "title": "День недели первого визита посетителя", + "type": "string", + }, + { + "description": "Рекламное объявление", + "is_dim": True, + "name": "ym:s:openstatAd", + "title": "Openstat Ad", + "type": "string", + }, + { + "description": 'Тип устройства, с которого было посещение. Возможные значения: "desktop", "mobile", "tablet", "tv".', + "is_dim": True, + "name": "ym:s:deviceCategoryName", + "title": "Тип устройства", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:startURL", + "title": "Страница входа", + "type": "string", + }, + { + "description": ( + "Был ли данный визит отказом с учетом точного показателя отказов. " + "Если страница просматривалась больше 15 секунд, " + 'то визит не считается отказом. Возможные значения: "Yes", "No".' + ), + "is_dim": True, + "name": "ym:s:bounceName", + "title": "Отказность", + "type": "string", + }, + { + "description": "Количество дней с первого визита посетителя.", + "is_dim": True, + "name": "ym:s:daysSinceFirstVisit", + "title": "Дней от первого визита (детально)", + "type": "string", + }, + { + "description": "Ключевые слова", + "is_dim": True, + "name": "ym:s:lastUTMTerm", + "title": "Last UTM Term", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:operatingSystemName", + "title": "Операционная система (детально)", + "type": "string", + }, + { + "description": "Продолжительность визита в секундах.", + "is_dim": True, + "name": "ym:s:visitDuration", + "title": "Время на сайте (детально)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productNameCart", + "title": "Название товара в корзине", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:isTurboPageName", + "title": "Турбо-страница", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:dayOfMonth", + "title": "День месяца визита", + "type": "string", + }, + { + "description": "Валюта, установленная в Яндекс.Директе для рекламной кампании.", + "is_dim": True, + "name": "ym:s:lastSignCurrencyIDName", + "title": "Валюта (last sign)", + "type": "string", + }, + { + "description": 'Наличие метки "GCLID".', + "is_dim": True, + "name": "ym:s:firstHasGCLIDName", + "title": "Наличие GCLID (first)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:counterID", + "title": "Счетчик (id)", + "type": "string", + "src_key": "id", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:counterIDName", + "title": "Счетчик", + "type": "string", + }, + { + "description": "Дата и время на компьютере посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала 10-минутного интервала.", + "is_dim": True, + "name": "ym:s:browserStartOfDekaminute", + "title": "Начало декаминуты на компьютере посетителя", + "type": "datetime", + }, + { + "description": "Рекламная кампания", + "is_dim": True, + "name": "ym:s:openstatCampaign", + "title": "Openstat Campaign", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:PProductID", + "title": "ID товара в заказе", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:purchaseID", + "title": "ID покупки", + "type": "string", + }, + { + "description": "Рекламный сервис", + "is_dim": True, + "name": "ym:s:lastSignOpenstatService", + "title": "Last Sign Openstat Service", + "type": "string", + }, + # { + # 'description': 'Интервал дат с параметризацией размера интервала', + # # 'is_dim': True, + # 'name': 'ym:s:datePeriod', + # 'title': 'Интервал дат визита', + # 'type': 'string', + # }, + { + "description": "Источник перехода", + "is_dim": True, + "name": "ym:s:lastUTMSource", + "title": "Last UTM Source", + "type": "string", + }, + { + "description": "Дата и время предпоследнего визита посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала месяца.", + "is_dim": True, + "name": "ym:s:previousVisitStartOfMonth", + "title": "Месяц предпоследнего визита", + "type": "date", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productCategoryLevel4", + "title": "Категория товара, ур. 4", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productCategoryLevel5", + "title": "Категория товара, ур. 5", + "type": "string", + }, + { + "description": "С каких сайтов осуществлялись переходы на сайт.", + "is_dim": True, + "name": "ym:s:firstReferalSource", + "title": "Первый переход с сайтов", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productCategoryLevel2", + "title": "Категория товара, ур. 2", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productCategoryLevel3", + "title": "Категория товара, ур. 3", + "type": "string", + }, + { + "description": "Идентификатор верхнего уровня иерархии поисковых систем.", + "is_dim": True, + "name": "ym:s:searchEngineRootName", + "title": "Последняя поисковая система", + "type": "string", + }, + { + "description": "Размер города по населению.", + "is_dim": True, + "name": "ym:s:regionCitySizeName", + "title": "Размер города", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productCategoryLevel1", + "title": "Категория товара, ур. 1", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:productIDCart", + "title": "ID товара в корзине", + "type": "string", + }, + { + "description": "Источник перехода", + "is_dim": True, + "name": "ym:s:lastSignUTMSource", + "title": "Last Sign UTM Source", + "type": "string", + }, + { + "description": "Дата визита в формате YYYY-MM-DD, округленная до начала недели (понедельник).", + "is_dim": True, + "name": "ym:s:startOfWeek", + "title": "Неделя визита", + "type": "date", + }, + { + "description": "Дата и время предпоследнего визита посетителя в формате YYYY-MM-DD HH:mm:ss, округленное до начала недели.", + "is_dim": True, + "name": "ym:s:previousVisitStartOfWeek", + "title": "Неделя предпоследнего визита", + "type": "date", + }, + { + "description": "Объявление", + "is_dim": True, + "name": "ym:s:lastSignUTMContent", + "title": "Last Sign UTM Content", + "type": "string", + }, + { + "description": "Группировка по достигнутой цели. Позволяет сгруппировать визиты по идентификаторам целей.", + "is_dim": True, + "name": "ym:s:goalName", + "title": "Достигнутая цель", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:lastSignFrom", + "title": "Метка from (last sign)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:browserMonth", + "title": "Месяц на компьютере посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstFrom", + "title": "Метка from (first)", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:mobilePhoneName", + "title": "Производитель устройства", + "type": "string", + }, + { + "description": "Дата на компьютере посетителя в формате YYYY-MM-DD.", + "is_dim": True, + "name": "ym:s:browserDate", + "title": "Дата на компьютере посетителя", + "type": "date", + }, + { + "description": 'Пол посетителя. Возможные значения: "мужской" или "женский".', + "is_dim": True, + "name": "ym:s:genderName", + "title": "Пол", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:previousVisitDayOfWeekName", + "title": "День недели предыдущего визита", + "type": "string", + }, + { + "description": 'Идентификатор верхнего уровня иерархии поисковых систем первого визита посетителя. Например, "Яндекс".', + "is_dim": True, + "name": "ym:s:lastSignSearchEngineRootName", + "title": "Последняя значимая поисковая система", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:firstVisitYear", + "title": "Год первого визита", + "type": "string", + }, + { + "description": "Высота клиентской части окна браузера.", + "is_dim": True, + "name": "ym:s:windowClientHeight", + "title": "Высота окна", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:isNewUserName", + "title": "Является ли визит первым визитом посетителя", + "type": "string", + }, + { + "description": None, + "is_dim": True, + "name": "ym:s:refererDomain", + "title": "Домен реферера", + "type": "string", + }, +] diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/appmetrica_dbapi.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/appmetrica_dbapi.py new file mode 100644 index 000000000..cf4f63a88 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/appmetrica_dbapi.py @@ -0,0 +1,80 @@ +""" +Python Database API Specification 2.0 interface implementation for AppMetrica API +https://www.python.org/dev/peps/pep-0249/ +""" + +from __future__ import annotations + +from sqlalchemy.types import ( # noqa; TODO: might actually be unnecessary. + DATE, + DATETIME, + FLOAT, + INTEGER, + NULLTYPE, + VARCHAR, +) + +from dl_sqlalchemy_metrica_api import metrika_dbapi +from dl_sqlalchemy_metrica_api.api_client import APPMETRICA_API_HOST +from dl_sqlalchemy_metrica_api.api_info.appmetrica import ( + AppMetricaFieldsNamespaces, + fields_by_namespace, +) +from dl_sqlalchemy_metrica_api.exceptions import ( # noqa + ConnectionClosedException, + CursorClosedException, + DatabaseError, + DataError, + Error, + IntegrityError, + InterfaceError, + InternalError, + MetrikaApiAccessDeniedException, + MetrikaApiException, + MetrikaApiObjectNotFoundException, + MetrikaHttpApiException, + NotSupportedError, + OperationalError, + ProgrammingError, + Warning, +) + + +apilevel = "2.0" +threadsafety = 2 +paramstyle = "pyformat" +default_storage_plugin = "" + + +class Connection(metrika_dbapi.Connection): + metrica_host = APPMETRICA_API_HOST + metrica_fields_namespaces_enum = AppMetricaFieldsNamespaces + + @metrika_dbapi.check_connected + def cursor(self): + return Cursor(api_client=self._cli, connection=self) + + @metrika_dbapi.check_connected + def get_table_names(self): + avail_counters = self._cli.get_available_counters() + return list(str(c_info["id"]) for c_info in avail_counters) + + def get_columns(self): + field_props = ("name", "type", "is_dim") + return { + "fields": field_props, + "data": [ + tuple(f_desc[prop] for prop in field_props) for f_desc in fields_by_namespace[self.fields_namespace] + ], + } + + +def connect(oauth_token=None, **kwargs): + oauth_token = oauth_token or kwargs.get("password") + fields_namespace = kwargs.get("database") + accuracy = kwargs.get("accuracy") + return Connection(oauth_token=oauth_token, fields_namespace=fields_namespace, accuracy=accuracy) # , **kwargs) + + +class Cursor(metrika_dbapi.Cursor): + """AppMetrica dbapi cursor""" diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/base.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/base.py new file mode 100644 index 000000000..2317f1389 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/base.py @@ -0,0 +1,651 @@ +from __future__ import annotations + +import datetime +import logging +import re +from urllib.parse import urlencode + +import dateutil.parser +from sqlalchemy import ( + Unicode, + exc, + pool, + util, +) +from sqlalchemy.engine import ( + default, + reflection, +) +from sqlalchemy.sql import ( + compiler, + elements, + operators, + sqltypes, +) + +from dl_sqlalchemy_metrica_api import ( + appmetrica_dbapi, + metrika_dbapi, +) +from dl_sqlalchemy_metrica_api.api_info import appmetrica as appmetrica_api_info +from dl_sqlalchemy_metrica_api.api_info import metrika as metrika_api_info +from dl_sqlalchemy_metrica_api.exceptions import ( + MetrikaApiDimensionInCalc, + MetrikaApiGroupByNotSupported, + MetrikaApiNoMetricsNorGroupBy, + NotSupportedError, +) + + +LOGGER = logging.getLogger(__name__) + + +MAX_LIMIT_VALUE = 10000 +DEFAULT_DATE_PERIOD = 60 # days + + +class MetrikaApiReqPreparer(compiler.IdentifierPreparer): + illegal_initial_characters = {"$"} + legal_characters = re.compile(r"^[A-Z0-9_:<>\-$]+$", re.I) # added ":<>-" + + def __init__(self, dialect, **kwargs): + kwargs.update(initial_quote="'", escape_quote="'") + super(MetrikaApiReqPreparer, self).__init__(dialect, **kwargs) + + def _requires_quotes(self, value): + """Return True if the given identifier requires quoting.""" + lc_value = value.lower() + return ( + lc_value in self.reserved_words + or value[0] in self.illegal_initial_characters + or not self.legal_characters.match(util.text_type(value)) + # or (lc_value != value) + ) + + +class MetrikaApiReqCompiler(compiler.SQLCompiler): + _extra_select_params = None + _extra_bind_params = None + _ordered_columns = None + _labeled_columns_map = None + _group_by_fields = None + _casts = None + _date_filter_present = None + + def _flush_tmp_properties(self): + self._extra_select_params = {} + self._extra_bind_params = {} + self._ordered_columns = False + self._labeled_columns_map = {} + self._group_by_fields = [] + self._casts = {} + self._date_filter_present = False + + @property + def api_info(self): + return metrika_api_info + + def check_field_is_date_datetime(self, field_name): + field_info = self.api_info.fields_by_name.get(field_name) + return field_info and field_info["type"] in ("date", "datetime") + + def visit_column( + self, + column, + add_to_result_map=None, + include_table=True, + _is_in_filter=False, + **kwargs, + ): + name = orig_name = column.name + if name is None: + name = self._fallback_column_name(column) + + is_literal = column.is_literal + if not is_literal and isinstance(name, elements._truncated_label): + name = self._truncated_identifier("colident", name) + + if add_to_result_map is not None: + add_to_result_map(name, orig_name, (column, name, column.key), column.type) + + # Replacing label with real field name + if name in self._labeled_columns_map: + name = self._labeled_columns_map[name][0] + + if _is_in_filter and self.check_field_is_date_datetime(name): + self._date_filter_present = True + + if is_literal: + name = self.escape_literal_column(name) + else: + name = self.preparer.quote(name) + + return name + + def visit_eq_binary(self, binary, operator_, **kw): + return self._generate_generic_binary(binary, "==", **kw) # "==" instead of "=" + + def visit_inv_unary(self, element, operator_, **kw): + return "NOT(%s)" % self.process(element.element, **kw) + + def bindparam_string(self, name, _extra_quoting=True, **kw): + if _extra_quoting: + return "'%s'" % (self.bindtemplate % {"name": name}) + else: + return self.bindtemplate % {"name": name} + + def render_literal_value(self, value, type_): + if isinstance(type_, Unicode): + assert isinstance(value, str) + value = value.replace("\\", "\\\\").replace("'", "\\'") + + if self.dialect.identifier_preparer._double_percents: + value = value.replace("%", "%%") + + return "'%s'" % value + + processor = type_._cached_literal_processor(self.dialect) + if processor: + return processor(value) + else: + raise NotImplementedError("Don't know how to literal-quote value %r" % value) + + def visit_bindparam( + self, + bindparam, + within_columns_clause=False, + literal_binds=False, + skip_bind_expression=False, + _extra_quoting=True, + **kwargs, + ): + if literal_binds or (within_columns_clause and self.ansi_bind_rules): + if bindparam.value is None and bindparam.callable is None: + raise exc.CompileError( + "Bind parameter '%s' without a " "renderable value not allowed here." % bindparam.key + ) + return self.render_literal_bindparam(bindparam, within_columns_clause=True, **kwargs) + + if bindparam.callable: + value = bindparam.effective_value + else: + value = bindparam.value + + def escape(value: str) -> str: + return value.replace("\\", "\\\\").replace("'", "\\'") + + if bindparam.expanding: + assert not isinstance(value, str) + return "({})".format( + ", ".join(f"'{escape(str(piece))}'" if _extra_quoting else escape(str(piece)) for piece in value) + ) + + if isinstance(value, list) and len(value) == 1: + value = value[0] + + if isinstance(value, str): + value = escape(value) + + if _extra_quoting: + return "'%s'" % value + return value + + def visit_between_op_binary(self, binary, operator, **kw): + left_value = binary.left._compiler_dispatch(self, **kw) + if self.check_field_is_date_datetime(left_value): + if len(binary.right.clauses) != 2: + raise exc.CompileError("Unexpected between arguments count") + self._extra_select_params.update( + date1=binary.right.clauses[0]._compiler_dispatch(self, _extra_quoting=False), + date2=binary.right.clauses[1]._compiler_dispatch(self, _extra_quoting=False), + ) + else: + raise NotSupportedError( + "BETWEEN operator supported only for date/datetime fields. " "Requested field: {}".format(left_value) + ) + + return "" + + def visit_not_between_op_binary(self, binary, operator, **kw): + raise NotSupportedError() + + def visit_notbetween_op_binary(self, binary, operator, **kw): + raise NotSupportedError() + + @util.memoized_property + def _like_percent_literal(self): + return elements.literal_column("'*'", type_=sqltypes.STRINGTYPE) + + def visit_contains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + binary.right.value = "*%s*" % binary.right.value + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_contains_op_binary(self, binary, operator, **kw): + binary = binary._clone() + binary.right.value = "*%s*" % binary.right.value + return self.visit_notlike_op_binary(binary, operator, **kw) + + def visit_notcontains_op_binary(self, binary, operator, **kw): + return self.visit_not_contains_op_binary(binary, operator, **kw) + + def visit_startswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + binary.right.value = "%s*" % binary.right.value + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_startswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + binary.right.value = "%s*" % binary.right.value + return self.visit_notlike_op_binary(binary, operator, **kw) + + def visit_notstartsswith_op_binary(self, binary, operator, **kw): + return self.visit_not_startsswith_op_binary(binary, operator, **kw) + + def visit_endswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + binary.right.value = "*%s" % binary.right.value + return self.visit_like_op_binary(binary, operator, **kw) + + def visit_not_endswith_op_binary(self, binary, operator, **kw): + binary = binary._clone() + binary.right.value = "*%s" % binary.right.value + return self.visit_notlike_op_binary(binary, operator, **kw) + + def visit_notendswith_op_binary(self, binary, operator, **kw): + return self.visit_not_endswith_op_binary(binary, operator, **kw) + + def visit_like_op_binary(self, binary, operator, **kw): + return "%s=*%s" % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw), + ) + + def visit_notlike_op_binary(self, binary, operator, **kw): + return self.visit_not_like_op_binary(binary, operator, **kw) + + def visit_not_like_op_binary(self, binary, operator, **kw): + return "%s!*%s" % ( + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw), + ) + + def visit_ilike_op_binary(self, binary, operator, **kw): + raise NotSupportedError() + + def visit_not_ilike_op_binary(self, binary, operator, **kw): + raise NotSupportedError() + + def visit_notilike_op_binary(self, binary, operator, **kw): + raise NotSupportedError() + + def _get_clause_name(self, clause, **kwargs): + if hasattr(clause, "name"): + return clause.name + elif hasattr(clause, "clause") and hasattr(clause.clause, "name"): + return clause.clause.name + else: + return clause._compiler_dispatch(self, **kwargs) + + def visit_sum_func(self, func, **kwargs): + clauses = func.clause_expr.element.clauses + for cla in clauses: + cla_name = self._get_clause_name(cla, **kwargs) + if cla_name in self.api_info.fields_by_name and self.api_info.fields_by_name[cla_name]["is_dim"]: + raise MetrikaApiDimensionInCalc('Not able to use dimensions in calculations: "%s"' % cla_name) + + return " + ".join([cla._compiler_dispatch(self, **kwargs) for cla in clauses]) + + def visit_clauselist(self, clauselist, _group_by_clause=False, **kwargs): + sep = clauselist.operator + if sep is None: + sep = " " + elif sep == operators.comma_op: + sep = "," # instead of ", " + else: + sep = compiler.OPERATORS[clauselist.operator] + + if _group_by_clause: + clauses = [] + for cla in clauselist.clauses: + cla_name = self._get_clause_name(cla, **kwargs) + if cla_name in self._labeled_columns_map and self._labeled_columns_map[cla_name][1] is not None: + clauses.append(self._labeled_columns_map[cla_name][1]) + else: + clauses.append(cla) + + for cla in clauses: + cla_name = self._get_clause_name(cla, **kwargs) + if cla_name not in self.api_info.fields_by_name or not self.api_info.fields_by_name[cla_name]["is_dim"]: + raise MetrikaApiGroupByNotSupported('Grouping by field "%s" is not possible' % cla_name) + + self._group_by_fields = [name for name in (self._get_clause_name(cla, **kwargs) for cla in clauses) if name] + else: + clauses = clauselist.clauses + + return sep.join(s for s in (c._compiler_dispatch(self, **kwargs) for c in clauses) if s) + + def visit_select(self, *args, **kwargs): + self._flush_tmp_properties() + return super().visit_select(*args, **kwargs) + + def _compose_select_body( + self, + text, + select, + compile_state, + inner_columns, + froms, + byfrom, + toplevel, + kwargs, + ): + """ + https://tech.yandex.ru/metrika/doc/api2/api_v1/data-docpage/ + """ + + query_params = {} + + if self.linting & compiler.COLLECT_CARTESIAN_PRODUCTS: + from_linter = compiler.FromLinter({}, set()) + warn_linting = self.linting & compiler.WARN_LINTING + if toplevel: + self.from_linter = from_linter + else: + from_linter = None + warn_linting = False + + if froms: + query_params.update( + ids=",".join( + [f._compiler_dispatch(self, asfrom=True, from_linter=from_linter, **kwargs) for f in froms] + ) + ) + else: + raise NotSupportedError('empty "FROM" clause') + + filters = [] + if select._where_criteria: + where_str = self._generate_delimited_and_list( + select._where_criteria, from_linter=from_linter, _is_in_filter=True, **kwargs + ) + if where_str: + filters.append(where_str) + + if warn_linting: + from_linter.warn() + + if select._having_criteria: + having_str = self._generate_delimited_and_list( + select._having_criteria, + _is_in_filter=True, + **kwargs, + ) + if having_str: + filters.append(having_str) + + if filters: + query_params.update(filters=" AND ".join(filters)) + + if select._group_by_clauses: + # Here only generating self._group_by_fields list. + # It will be written into query_params later, after metrics list generation. + select._group_by_clause._compiler_dispatch(self, _group_by_clause=True, **kwargs) + + metrics_cols = [] + metrics_dims_cols = [] + for col in inner_columns: + col_desc = self.api_info.fields_by_name.get(col) + if col_desc and col_desc["is_dim"]: + metrics_dims_cols.append(col) + else: + metrics_cols.append(col) + if not metrics_cols: + if metrics_dims_cols and select._distinct: + for dim in metrics_dims_cols: + self._group_by_fields.append(dim) + + if self._group_by_fields: + # Hack to be able to get dimensions values + # without necessity to specify any unnecessary metric field explicitly. + fields_namespace = self.api_info.get_namespace_by_name(self._group_by_fields[0]) + any_metric = self.api_info.metrics_by_namespace[fields_namespace][0]["name"] + metrics_cols.append(any_metric) + else: + raise MetrikaApiNoMetricsNorGroupBy("Not found neither metrics to select nor dimensions for group by.") + query_params.update(metrics=",".join(metrics_cols)) + + if self._group_by_fields: + query_params.update(dimensions=",".join(self._group_by_fields)) + + if select._order_by_clauses: + order_expr = select._order_by_clause._compiler_dispatch(self, **kwargs) + if order_expr: + query_params.update(sort=order_expr) + + if select._limit_clause is not None and select._limit_clause.value < MAX_LIMIT_VALUE: + limit_val = select._limit_clause.value + else: + limit_val = MAX_LIMIT_VALUE + query_params.update(limit=limit_val) + + if select._offset_clause is not None: + # In Metrika API offset starts from 1 + offset_val = int(select._offset_clause.value) + 1 + query_params.update(offset=offset_val) + + if select._for_update_arg is not None: + raise NotSupportedError("FOR UPDATE") + + query_params.update(self._extra_select_params) + + for date_param in ("date1", "date2"): + if date_param in query_params: + value = query_params[date_param] + if isinstance(value, list): + value = value[0] + if not type(value) == datetime.date: + if isinstance(value, datetime.datetime): + value = value.date() + else: + value = dateutil.parser.parse(value).date() + query_params[date_param] = value + + today = datetime.date.today() + if not self._date_filter_present and not query_params.get("date1") and not query_params.get("date2"): + # TODO: use counter timezone + dt1 = today - datetime.timedelta(days=DEFAULT_DATE_PERIOD) + query_params.update(date1=dt1, date2=today) + + if "date2" in query_params and query_params["date2"] > today: + query_params.update(date2=today) + + return urlencode(query_params) + + def visit_table(self, table, asfrom=False, ashint=False, **kwargs): + if asfrom or ashint: + return table.name + else: + return "" + + def visit_label( + self, + label, + add_to_result_map=None, + within_label_clause=False, + within_columns_clause=False, + render_label_as_label=None, + **kw, + ): + # only render labels within the columns clause + # or ORDER BY clause of a select. dialect-specific compilers + # can modify this behavior. + render_label_with_as = within_columns_clause and not within_label_clause + render_label_only = render_label_as_label is label + + if render_label_only or render_label_with_as: + if isinstance(label.name, elements._truncated_label): + labelname = self._truncated_identifier("colident", label.name) + else: + labelname = label.name + + if render_label_with_as: + if add_to_result_map is not None: + add_to_result_map( + labelname, + label.name, + (label, labelname) + label._alt_names, + label.type, + ) + + element_processed = label.element._compiler_dispatch( + self, within_columns_clause=True, within_label_clause=True, **kw + ) + + def _unwrap_to_column_clause(element): + LOGGER.info(f"element {element} {type(element)}") + if isinstance(element, elements.ColumnClause): + return element + if hasattr(element, "clause"): + return _unwrap_to_column_clause(element.clause) + else: + LOGGER.warning("Unable dispatch to ColumnClause") + return None + + internal_column_clause = _unwrap_to_column_clause(label.element) + self._labeled_columns_map[labelname] = (element_processed, internal_column_clause) + + return element_processed + else: + return label.element._compiler_dispatch(self, within_columns_clause=False, **kw) + + def visit_asc_op_unary_modifier(self, unary, modifier, **kw): + return unary.element._compiler_dispatch(self, **kw) + + def visit_desc_op_unary_modifier(self, unary, modifier, **kw): + return "-" + unary.element._compiler_dispatch(self, **kw) + + def visit_cast(self, cast, **kwargs): + param_name = cast.clause._compiler_dispatch(self, **kwargs) + cast_type = cast.typeclause._compiler_dispatch(self, **kwargs) + self._casts[param_name] = cast_type + return param_name + + def visit_insert(self, *args, **kwargs): + raise NotSupportedError("INSERT") + + def visit_update(self, *args, **kwargs): + raise NotSupportedError("UPDATE") + + def construct_params( + self, + params=None, + _group_number=None, + _check=True, + extracted_parameters=None, + escape_names=True, + ): + prepared_params = super().construct_params(params, _group_number, _check, extracted_parameters) + result_cols = [] + for col in self._result_columns: + field_name = self._labeled_columns_map.get(col[0], (col[0],))[0] + result_cols.append( + dict( + label=col[0], + name=field_name, + src_key=self.api_info.fields_by_name.get(field_name, {}).get("src_key"), + ) + ) + prepared_params["__RESULT_COLUMNS__"] = result_cols + prepared_params.update(self._extra_bind_params) + if self._casts: + prepared_params["__CASTS__"] = self._casts + LOGGER.info("Metrica query prepared params: %s", prepared_params) + return prepared_params + + +# class MetrikaApiExecutionContext(default.DefaultExecutionContext): +# def get_result_proxy(self): +# res_proxy = result.ResultProxy(self) +# return res_proxy + + +class MetrikaApiDialect(default.DefaultDialect): + name = "metrika_api" + supports_unicode_statements = True + supports_unicode_binds = True + supports_empty_insert = False + supports_multivalues_insert = True + supports_alter = False + supports_pk_autoincrement = False + supports_default_values = False + returns_unicode_strings = True + supports_native_boolean = False + supports_views = False + supports_statement_cache = False + + poolclass = pool.SingletonThreadPool # pool.NullPool + + # execution_ctx_cls = MetrikaApiExecutionContext + statement_compiler = MetrikaApiReqCompiler + preparer = MetrikaApiReqPreparer + + @reflection.cache + def get_table_names(self, connection, schema=None, **kw): + res = connection.execute(metrika_dbapi.InternalCommands.get_tables.value) + return [item[0] for item in res] + + def has_table(self, connection, table_name, schema=None): + return table_name in self.get_table_names(connection) + + def get_columns(self, connection, table_name, schema=None, **kw): + metrika_fields = connection.execute(metrika_dbapi.InternalCommands.get_columns.value) + + columns = [] + for col in metrika_fields: + columns.append( + { + "name": col["name"], + "type": metrika_dbapi.metrika_types_to_sqla[col["type"]], + "nullable": not col["is_dim"], + } + ) + return columns + + @classmethod + def dbapi(cls): + return metrika_dbapi + + def _check_unicode_returns(self, connection, additional_tests=None): + return True + + def _check_unicode_description(self, connection): + return True + + def do_rollback(self, dbapi_connection): + pass + + def get_foreign_keys(self, connection, table_name, schema=None, **kw): + return [] + + def get_indexes(self, connection, table_name, schema=None, **kw): + return [] + + def get_pk_constraint(self, connection, table_name, schema=None, **kw): + return [] + + +class AppMetricaApiReqCompiler(MetrikaApiReqCompiler): + @property + def api_info(self): + return appmetrica_api_info + + +class AppMetricaApiDialect(MetrikaApiDialect): + name = "appmetrica_api" + statement_compiler = AppMetricaApiReqCompiler + supports_statement_cache = False + + @classmethod + def dbapi(cls): + return appmetrica_dbapi diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/exceptions.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/exceptions.py new file mode 100644 index 000000000..61e404499 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/exceptions.py @@ -0,0 +1,83 @@ +# Standard exceptions according to dbapi v2 interface +# https://www.python.org/dev/peps/pep-0249/ + + +class Error(Exception): + pass + + +class Warning(Exception): + pass + + +class InterfaceError(Error): + pass + + +class DatabaseError(Error): + pass + + +class DataError(DatabaseError): + pass + + +class OperationalError(DatabaseError): + pass + + +class IntegrityError(DatabaseError): + pass + + +class InternalError(DatabaseError): + pass + + +class ProgrammingError(DatabaseError): + pass + + +class NotSupportedError(DatabaseError): + pass + + +# Dialect specific exceptions + + +class MetrikaApiException(DatabaseError): + def __init__(self, *args, **kwargs): + self.orig_exc = kwargs.pop("orig_exc", None) + super().__init__(*args, **kwargs) + + +class MetrikaHttpApiException(MetrikaApiException): + pass + + +class MetrikaApiAccessDeniedException(MetrikaHttpApiException): + pass + + +class MetrikaApiObjectNotFoundException(MetrikaHttpApiException): + pass + + +class MetrikaApiGroupByNotSupported(NotSupportedError): + pass + + +class MetrikaApiDimensionInCalc(NotSupportedError): + pass + + +class MetrikaApiNoMetricsNorGroupBy(ProgrammingError): + pass + + +class ConnectionClosedException(MetrikaApiException): + pass + + +class CursorClosedException(MetrikaApiException): + pass diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/metrika_dbapi.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/metrika_dbapi.py new file mode 100644 index 000000000..7d3308b6d --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api/metrika_dbapi.py @@ -0,0 +1,342 @@ +""" +Python Database API Specification 2.0 interface implementation for Metrica API +https://www.python.org/dev/peps/pep-0249/ +""" + +from __future__ import annotations + +from datetime import date +from enum import Enum +from functools import wraps +from urllib.parse import parse_qs + +import dateutil.parser +from sqlalchemy.types import ( + BOOLEAN, + DATE, + DATETIME, + FLOAT, + INTEGER, + NULLTYPE, + VARCHAR, +) + +from dl_sqlalchemy_metrica_api.api_client import ( + METRIKA_API_HOST, + MetrikaApiClient, +) +from dl_sqlalchemy_metrica_api.api_info.metrika import ( + MetrikaApiCounterSource, + fields_by_name, + fields_by_namespace, +) +from dl_sqlalchemy_metrica_api.exceptions import ( # noqa + ConnectionClosedException, + CursorClosedException, + DatabaseError, + DataError, + Error, + IntegrityError, + InterfaceError, + InternalError, + MetrikaApiAccessDeniedException, + MetrikaApiException, + MetrikaApiObjectNotFoundException, + MetrikaHttpApiException, + NotSupportedError, + OperationalError, + ProgrammingError, + Warning, +) + + +apilevel = "2.0" +threadsafety = 2 +paramstyle = "pyformat" +default_storage_plugin = "" + + +metrika_types_to_sqla = { + "integer": INTEGER, + "float": FLOAT, + "string": VARCHAR, + "date": DATE, + "datetime": DATETIME, + # "percents": FLOAT, +} + + +cast_processors = { + "INTEGER": int, + "FLOAT": float, + "VARCHAR": str, + "BOOLEAN": bool, + "DATE": lambda t: dateutil.parser.parse(t).date(), + "DATETIME": lambda t: dateutil.parser.parse(t), +} + + +cast_processors_for_metrika_types = { + "date": cast_processors["DATE"], + "datetime": cast_processors["DATETIME"], +} + + +cast_type_to_sqla_type = { + "INTEGER": INTEGER, + "FLOAT": FLOAT, + "VARCHAR": VARCHAR, + "BOOLEAN": BOOLEAN, + "DATE": DATE, + "DATETIME": DATETIME, +} + + +def check_connected(func): + @wraps(func) + def func_wrapper(self, *args, **kwargs): + if self.is_connected is False: + raise ConnectionClosedException("Connection object is closed") + else: + return func(self, *args, **kwargs) + + return func_wrapper + + +class Connection(object): + metrica_host = METRIKA_API_HOST + metrica_fields_namespaces_enum = MetrikaApiCounterSource + + fields_namespace = None + accuracy = None + + def __init__(self, oauth_token, fields_namespace=None, accuracy=None, **client_kwargs): + client_kwargs.setdefault("host", self.metrica_host) + self._cli = MetrikaApiClient(oauth_token, **client_kwargs) + if fields_namespace: + if not hasattr(self.metrica_fields_namespaces_enum, fields_namespace): + raise MetrikaApiException("Unknown fields namespace: %s" % fields_namespace) + self.fields_namespace = self.metrica_fields_namespaces_enum[fields_namespace] + self.accuracy = accuracy + + self._connected = True + + @property + def is_connected(self): + return self._connected + + @check_connected + def close(self): + del self._cli + self._cli = None + self._connected = False + + @check_connected + def commit(self): + pass + + @check_connected + def cursor(self): + return Cursor(api_client=self._cli, connection=self) + + @check_connected + def get_table_names(self): + avail_counters = self._cli.get_available_counters() + return list(str(c_info["id"]) for c_info in avail_counters) + + def get_columns(self): + field_props = ("name", "type", "is_dim") + return { + "fields": field_props, + "data": [ + tuple(f_desc[prop] for prop in field_props) for f_desc in fields_by_namespace[self.fields_namespace] + ], + } + + @check_connected + def get_avail_date_min(self, counter_id): + return self._cli.get_counter_creation_date(counter_id) + + +def connect(oauth_token=None, **kwargs): + oauth_token = oauth_token or kwargs.get("password") + fields_namespace = kwargs.get("database") + accuracy = kwargs.get("accuracy") + return Connection(oauth_token=oauth_token, fields_namespace=fields_namespace, accuracy=accuracy) # , **kwargs) + + +def check_cursor_connected(func): + @wraps(func) + def func_wrapper(self, *args, **kwargs): + if not self._connected: + raise CursorClosedException("Cursor object is closed") + elif not self.connection.is_connected: + raise ConnectionClosedException("Connection object is closed") + else: + return func(self, *args, **kwargs) + + return func_wrapper + + +class InternalCommands(Enum): + get_columns = "__GET_COLUMNS_COMMAND__" + get_tables = "__GET_TABLES_COMMAND__" + get_avail_date_min = "__GET_AVAILABLE_DATE_MIN__" + get_avail_date_max = "__GET_AVAILABLE_DATE_MAX__" + + +class Cursor(object): + description = None + rowcount = -1 + arraysize = 1 + _result_data = None + + def __init__(self, api_client: MetrikaApiClient, connection: Connection): + self._cli = api_client + self.connection = connection + self._connected = True + + @property + def is_connected(self): + return self._connected + + @check_cursor_connected + def close(self): + self._connected = False + + def _prepare_query_params(self, query, subst_params) -> dict: + query_params = parse_qs(query) + + if subst_params: + for k, v in query_params.items(): + if len(v) != 1: + raise ProgrammingError("Unexpected multiple parameter %s values %s" % (k, v)) + v = v[0] % subst_params + query_params[k] = v + + return query_params + + def _exec_get_columns(self, operation, parameters): + res = self.connection.get_columns() + self._result_data = res["data"] + self.rowcount = len(self._result_data) + self.description = [(f_name, VARCHAR, None, None, None, None, None) for f_name in res["fields"]] + + def _exec_get_tables(self, operation, parameters): + table_names = self.connection.get_table_names() + self._result_data = [(tn,) for tn in table_names] + self.rowcount = len(self._result_data) + self.description = [ + ("name", VARCHAR, None, None, None, None, None), + ] + + def _exec_get_avail_date_min(self, operation, parameters): + counter_id = parameters.get("_COUNTER_ID_") + date_min = self.connection.get_avail_date_min(counter_id) + self._result_data = [(date_min.isoformat(),)] + self.rowcount = 1 + result_columns = parameters.get("__RESULT_COLUMNS__", []) + if result_columns: + col_name = result_columns[0]["label"] or result_columns[0]["name"] + else: + col_name = "date_min" + self.description = [ + (col_name, DATE, None, None, None, None, None), + ] + + def _exec_get_avail_date_max(self, operation, parameters): + # TODO: use counter timezone + today = date.today() + self._result_data = [(today.isoformat(),)] + self.rowcount = 1 + result_columns = parameters.pop("__RESULT_COLUMNS__", []) + if result_columns: + col_name = result_columns[0]["label"] or result_columns[0]["name"] + else: + col_name = "date_max" + self.description = [ + (col_name, DATE, None, None, None, None, None), + ] + + def _meth_by_operation(self, operation): + meth = { + InternalCommands.get_columns.value: self._exec_get_columns, + InternalCommands.get_tables.value: self._exec_get_tables, + InternalCommands.get_avail_date_min.value: self._exec_get_avail_date_min, + InternalCommands.get_avail_date_max.value: self._exec_get_avail_date_max, + }.get(operation) + return meth + + @check_cursor_connected + def execute(self, operation, parameters=None): + self.rowcount = None + self._result_data = None + self.description = None + + meth = self._meth_by_operation(operation) or self._execute_select_data + return meth(operation, parameters) + + def _execute_select_data(self, operation, parameters): + casts = parameters.pop("__CASTS__", None) + result_columns = parameters.pop("__RESULT_COLUMNS__", None) + for col in result_columns: + col_name = col["name"] + if casts and col_name in casts: + col["cast_processor"] = cast_processors[casts[col_name]] + elif col_name in fields_by_name and fields_by_name[col_name]["type"] in cast_processors_for_metrika_types: + col["cast_processor"] = cast_processors_for_metrika_types[fields_by_name[col_name]["type"]] + + query_params = self._prepare_query_params(operation, parameters) + + if self.connection.accuracy is not None: + query_params.update(accuracy=self.connection.accuracy) + + result = self._cli.get_table_data(query_params, result_columns=result_columns) + + self._result_data = result["data"] + self.rowcount = len(self._result_data) + + self.description = [] + for col in result["fields"]: + if casts and col["name"] in casts: + col_type = cast_type_to_sqla_type[casts[col["name"]]] + elif col["name"] not in fields_by_name: + col_type = VARCHAR + else: + col_type = metrika_types_to_sqla.get(fields_by_name[col["name"]]["type"], NULLTYPE) + self.description.append((col["label"] or col["name"], col_type, None, None, None, None, None)) + + # def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]): ... + + @check_cursor_connected + def fetchone(self): + try: + if self._result_data: + return self._result_data.pop(0) + else: + return None + except StopIteration: + return None + + @check_cursor_connected + def fetchmany(self, size=None): + if size is None or size > len(self._result_data): + size = len(self._result_data) + rows = self._result_data[:size] + self._result_data = self._result_data[size:] + return rows + + @check_cursor_connected + def fetchall(self): + rows = self._result_data + self._result_data = [] + return rows + + def __iter__(self): + raise NotImplementedError() + + def setinputsizes(self, *args, **kwargs): + pass + + def setoutputsize(self, *args, **kwargs): + pass diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/__init__.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/ext/__init__.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/ext/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/ext/conftest.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/ext/conftest.py new file mode 100644 index 000000000..9ff732f0d --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/ext/conftest.py @@ -0,0 +1,324 @@ +from __future__ import annotations + +import datetime +import os + +import pytest +import sqlalchemy + +import dl_sqlalchemy_metrica_api + + +METRIKA_SAMPLE_COUNTER_ID = "44147844" +APPMETRICA_SAMPLE_COUNTER_ID = "1111" + + +def _get_oauth_from_env(): + return os.environ.get("METRIKA_OAUTH", None) + + +@pytest.fixture +def metrika_sample_counter_id(): + return METRIKA_SAMPLE_COUNTER_ID + + +@pytest.fixture +def appmetrica_sample_counter_id(): + return APPMETRICA_SAMPLE_COUNTER_ID + + +@pytest.fixture(scope="function") +def shrink_metrika_default_date_period(monkeypatch): + """ + To reduce load to Metrika API and tests run time. + """ + monkeypatch.setattr(dl_sqlalchemy_metrica_api.base, "DEFAULT_DATE_PERIOD", 3) + + +def _metrika_db_engine(): + return sqlalchemy.create_engine("metrika_api://:{}@/hits".format(_get_oauth_from_env())) + + +def _appmetrica_db_engine(): + return sqlalchemy.create_engine("appmetrica_api://:{}@/installs".format(_get_oauth_from_env())) + + +@pytest.fixture(scope="function") +def metrika_db_engine(shrink_metrika_default_date_period): + return _metrika_db_engine() + + +@pytest.fixture(scope="function") +def metrika_db_engine_with_accuracy(shrink_metrika_default_date_period): + return sqlalchemy.create_engine("metrika_api://:{}@/hits?accuracy=0.1".format(_get_oauth_from_env())) + + +@pytest.fixture(scope="function") +def appmetrica_db_engine(shrink_metrika_default_date_period): + return _appmetrica_db_engine() + + +@pytest.fixture(params=[_metrika_db_engine, _appmetrica_db_engine]) +def db_engine(request, shrink_metrika_default_date_period): + return request.param() + + +def _gen_m_expr1(eng): + expr = sqlalchemy.select( + columns=[ + sqlalchemy.type_coerce(sqlalchemy.column("ym:pv:pageviews"), sqlalchemy.Float()), + sqlalchemy.column("ym:pv:users").label("users"), + sqlalchemy.column("ym:pv:date").label("date"), + # sqlalchemy.column('ym:pv:URLDomain'), + sqlalchemy.column("ym:pv:browserName").label("browser"), + ] + ) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.where(sqlalchemy.column("date").between("2019-02-01", "2019-02-03")) + expr = expr.where(sqlalchemy.column("browser") == "Yandex Browser") + # expr = expr.group_by(sqlalchemy.column('ym:pv:screenResolution')) + # expr = expr.group_by(sqlalchemy.column('ym:pv:date')) + expr = expr.group_by(sqlalchemy.column("date")) + expr = expr.group_by(sqlalchemy.column("browser")) + expr = expr.limit(5) + # expr = expr.offset(50) + expr = expr.order_by(sqlalchemy.column("ym:pv:date")) + return expr + + +def _gen_m_expr_distinct_via_group_by(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:browserName").label("browser")]) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.group_by(sqlalchemy.column("browser")) + return expr + + +def _gen_m_expr_distinct(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:browserName")]).distinct() + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + return expr + + +def _gen_m_expr_date_min(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:date").label("date")]) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.group_by(sqlalchemy.column("date")) + expr = expr.limit(1) + # expr = expr.order_by('date') + expr = expr.order_by(sqlalchemy.column("date")) + return expr + + +def _gen_m_expr_date_max(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:date").label("date")]) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.group_by(sqlalchemy.column("date")) + expr = expr.limit(1) + expr = expr.order_by(sqlalchemy.desc(sqlalchemy.column("date"))) + return expr + + +def _gen_m_expr_order_by(eng): + expr = sqlalchemy.select( + columns=[ + sqlalchemy.column("ym:pv:date").label("date"), + sqlalchemy.column("ym:pv:users").label("users"), + ] + ) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.group_by(sqlalchemy.column("date")) + expr = expr.order_by(sqlalchemy.desc(sqlalchemy.literal_column("users"))) + return expr + + +def _gen_m_expr_cast(eng): + expr = sqlalchemy.select( + columns=[ + sqlalchemy.cast(sqlalchemy.column("ym:pv:pageviews"), sqlalchemy.INTEGER), + sqlalchemy.cast(sqlalchemy.column("ym:pv:users"), sqlalchemy.String).label("users_str"), + sqlalchemy.type_coerce( + sqlalchemy.type_coerce(sqlalchemy.column("ym:pv:users"), sqlalchemy.Integer), sqlalchemy.Integer + ).label("users_int"), + ] + ) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + return expr + + +def _gen_m_expr_date_between(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:pageviews"), sqlalchemy.column("ym:pv:users")]) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.where(sqlalchemy.column("ym:pv:date").between("2019-02-01", "2019-02-03")) + return expr + + +def _gen_m_expr_datetime_between_date(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:pageviews"), sqlalchemy.column("ym:pv:users")]) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.where(sqlalchemy.column("ym:pv:dateTime").between("2019-02-01", "2019-02-03")) + return expr + + +def _gen_m_expr_datetime_between_datetime(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:pageviews"), sqlalchemy.column("ym:pv:users")]) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.where( + sqlalchemy.column("ym:pv:dateTime").between( + datetime.datetime.fromisoformat("2019-02-01 00:00:00"), + datetime.datetime.fromisoformat("2019-02-03 23:59:59"), + ) + ) + return expr + + +def _gen_m_expr_datetime_between_datetime_str(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:pageviews"), sqlalchemy.column("ym:pv:users")]) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.where(sqlalchemy.column("ym:pv:dateTime").between("2019-02-01 00:00:00", "2019-02-03 23:59:59")) + return expr + + +def __gen_select_users_browser(eng): + expr = sqlalchemy.select( + columns=[ + sqlalchemy.column("ym:pv:users").label("users"), + sqlalchemy.column("ym:pv:browserName").label("browser"), + ] + ) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.group_by(sqlalchemy.column("browser")) + return expr + + +def _gen_m_expr_like(eng): + expr = __gen_select_users_browser(eng) + expr = expr.where(sqlalchemy.column("browser").like("Yandex*")) + return expr + + +def _gen_m_expr_not_like(eng): + expr = __gen_select_users_browser(eng) + expr = expr.where(sqlalchemy.column("browser").notlike("Yandex*")) + return expr + + +def _gen_m_expr_not_like_1(eng): + expr = __gen_select_users_browser(eng) + expr = expr.where(sqlalchemy.not_(sqlalchemy.column("browser").like("Yandex*"))) + return expr + + +def _gen_m_expr_contains(eng): + expr = __gen_select_users_browser(eng) + expr = expr.where(sqlalchemy.column("browser").contains("Yandex")) + return expr + + +def _gen_m_expr_not_contains(eng): + expr = __gen_select_users_browser(eng) + expr = expr.where(sqlalchemy.not_(sqlalchemy.column("browser").contains("Yandex"))) + return expr + + +def _gen_m_expr_startswith(eng): + expr = __gen_select_users_browser(eng) + expr = expr.where(sqlalchemy.column("browser").startswith("Yandex")) + return expr + + +def _gen_m_expr_not_endswith(eng): + expr = __gen_select_users_browser(eng) + expr = expr.where(sqlalchemy.not_(sqlalchemy.column("browser").endswith("Chrome"))) + return expr + + +def _gen_m_expr_not(eng): + expr = __gen_select_users_browser(eng) + expr = expr.where( + sqlalchemy.not_( + sqlalchemy.or_( + sqlalchemy.column("browser").like("Yandex*"), + sqlalchemy.column("browser") == "Google Chrome", + ) + ) + ) + return expr + + +def _gen_m_expr_not_in(eng): + expr = sqlalchemy.select( + columns=[ + sqlalchemy.column("ym:pv:users").label("users"), + sqlalchemy.column("ym:pv:browserName").label("browser"), + ] + ) + expr = expr.select_from(sqlalchemy.Table(METRIKA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.group_by(sqlalchemy.column("browser")) + expr = expr.where(sqlalchemy.column("browser").notin_(["Yandex Browser", "Google Chrome"])) + return expr + + +def _gen_am_expr1(eng): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:u:sessions"), sqlalchemy.column("ym:u:activeUsers")]) + expr = expr.group_by(sqlalchemy.column("ym:u:dayOfWeekName")) + expr = expr.select_from(sqlalchemy.Table(APPMETRICA_SAMPLE_COUNTER_ID, sqlalchemy.MetaData(bind=eng))) + expr = expr.where(sqlalchemy.column("ym:u:date").between("2019-05-01", "2019-05-03")) + return expr + + +metrika_expr_functions_list = [ + _gen_m_expr1, + _gen_m_expr_date_max, + _gen_m_expr_date_min, + _gen_m_expr_distinct, + _gen_m_expr_distinct_via_group_by, + _gen_m_expr_cast, + _gen_m_expr_date_between, + _gen_m_expr_datetime_between_date, + _gen_m_expr_datetime_between_datetime, + _gen_m_expr_datetime_between_datetime_str, + _gen_m_expr_like, + _gen_m_expr_not_like, + _gen_m_expr_not_like_1, + _gen_m_expr_contains, + _gen_m_expr_not_contains, + _gen_m_expr_not, + _gen_m_expr_not_in, + _gen_m_expr_order_by, +] + +appmetrica_expr_functions_list = [ + _gen_am_expr1, +] + + +@pytest.fixture(params=metrika_expr_functions_list) +def metrika_expr_func(request): + return request.param + + +@pytest.fixture(params=appmetrica_expr_functions_list) +def appmetrica_expr_func(request): + return request.param + + +@pytest.fixture +def m_expr_distinct(metrika_sample_counter_id, metrika_db_engine): + expr = sqlalchemy.select(columns=[sqlalchemy.column("ym:pv:dateTime")]).distinct() + expr = expr.select_from(sqlalchemy.Table(metrika_sample_counter_id, sqlalchemy.MetaData(bind=metrika_db_engine))) + return expr + + +@pytest.fixture() +def metrika_expr_func_expr1(): + return _gen_m_expr1 + + +@pytest.fixture +def metrika_expr_select_date_users(metrika_sample_counter_id, metrika_db_engine): + expr = sqlalchemy.select( + columns=[sqlalchemy.column("ym:pv:date").label("date"), sqlalchemy.column("ym:pv:users")], + ) + expr = expr.select_from(sqlalchemy.Table(metrika_sample_counter_id, sqlalchemy.MetaData(bind=metrika_db_engine))) + expr = expr.group_by(sqlalchemy.column("date")) + return expr diff --git a/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/ext/test_dialect.py b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/ext/test_dialect.py new file mode 100644 index 000000000..e974f0e43 --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/dl_sqlalchemy_metrica_api_tests/ext/test_dialect.py @@ -0,0 +1,242 @@ +from __future__ import annotations + +import datetime +from urllib.parse import parse_qs + +import pytest +import sqlalchemy as sa +from sqlalchemy.dialects import registry +from sqlalchemy.exc import DatabaseError + +from dl_sqlalchemy_metrica_api import exceptions + + +registry.register("metrika_api", "dl_sqlalchemy_metrica_api.base", "MetrikaApiDialect") +registry.register("appmetrica_api", "dl_sqlalchemy_metrica_api.base", "AppMetricaApiDialect") + + +def _test_execute_expr(_expr_func, _db_engine): + expr = _expr_func(_db_engine) + + compiled = str(expr) + print(compiled) + res = _db_engine.execute(expr) + # print(res._cursor_description()) + data = res.fetchall() + for row in data: + print(row) + + +def test_execute_expr(metrika_expr_func, metrika_db_engine): + return _test_execute_expr(metrika_expr_func, metrika_db_engine) + + +def test_execute_appmetrica_expr(appmetrica_expr_func, appmetrica_db_engine): + return _test_execute_expr(appmetrica_expr_func, appmetrica_db_engine) + + +def test_get_columns(db_engine): + insp = sa.inspect(db_engine) + cols = insp.get_columns("123456") + print(cols) + assert isinstance(cols, list) + assert isinstance(cols[0], dict) + assert "name" in cols[0] and "type" in cols[0] + + +def test_distincts(m_expr_distinct, metrika_db_engine): + res = metrika_db_engine.execute(m_expr_distinct) + # print(res._cursor_description()) + data = res.fetchall() + values = [row[0] for row in data] + print(values) + print(set(values)) + print(sorted(set(values))) + assert len(data) == len(set(values)) + + +def test_accuracy(metrika_expr_func_expr1, metrika_db_engine_with_accuracy): + expr = metrika_expr_func_expr1(metrika_db_engine_with_accuracy) + res = metrika_db_engine_with_accuracy.execute(expr) + assert res.cursor.connection.accuracy == "0.1" + + +def test_exceptions(m_expr_distinct, metrika_db_engine): + engine_invalid_token = sa.create_engine("metrika_api://:qwerty@/hits") + with pytest.raises(DatabaseError) as exc_info: + engine_invalid_token.execute(m_expr_distinct) + assert isinstance(exc_info.value.orig, exceptions.MetrikaApiAccessDeniedException) + assert "Invalid oauth_token" in str(exc_info.value.orig) + + expr_invalid_counter_id = sa.select(columns=[sa.column("ym:pv:users")]) + expr_invalid_counter_id = expr_invalid_counter_id.select_from( + sa.Table("2147483647", sa.MetaData(bind=metrika_db_engine)) + ) + with pytest.raises(DatabaseError) as exc_info: + metrika_db_engine.execute(expr_invalid_counter_id) + assert isinstance(exc_info.value.orig, exceptions.MetrikaApiObjectNotFoundException) + assert "Entity not found" in str(exc_info.value.orig) + + +def test_future_date(metrika_expr_select_date_users, metrika_db_engine): + today = datetime.date.today() + expr = metrika_expr_select_date_users.where( + sa.column("ym:pv:date").between( + datetime.datetime.fromisoformat((today - datetime.timedelta(days=2)).isoformat()), + datetime.datetime.fromisoformat("2039-02-03"), # date from future + ) + ) + res = metrika_db_engine.execute(expr) + data = res.fetchall() + max_date = max([row["date"] for row in data]) + assert max_date == today + assert len(data) == 3 + + +def test_date_gt(metrika_expr_select_date_users, metrika_db_engine): + start_dt = (datetime.date.today() - datetime.timedelta(days=5)).isoformat() + expr = metrika_expr_select_date_users.where(sa.column("ym:pv:date") > start_dt) + print(str(expr)) + + res = metrika_db_engine.execute(expr) + data = res.fetchall() + assert len(data) == 5 + + +def test_date_gte(metrika_expr_select_date_users, metrika_db_engine): + start_dt = (datetime.date.today() - datetime.timedelta(days=5)).isoformat() + expr = metrika_expr_select_date_users.where(sa.column("ym:pv:date") >= start_dt) + print(str(expr)) + + res = metrika_db_engine.execute(expr) + data = res.fetchall() + assert len(data) == 6 + + +def test_date_gt_lt(metrika_expr_select_date_users, metrika_db_engine): + start_dt = (datetime.date.today() - datetime.timedelta(days=5)).isoformat() + end_dt = (datetime.date.today() - datetime.timedelta(days=2)).isoformat() + expr = metrika_expr_select_date_users.where(sa.column("ym:pv:date") > start_dt) + expr = expr.where(sa.column("ym:pv:date") < end_dt) + print(str(expr)) + + res = metrika_db_engine.execute(expr) + data = res.fetchall() + print(data) + assert len(data) == 2 + + +def test_date_eq(metrika_expr_select_date_users, metrika_db_engine): + start_dt = (datetime.date.today() - datetime.timedelta(days=1)).isoformat() + expr = metrika_expr_select_date_users.where(sa.column("ym:pv:date") == start_dt) + print(str(expr)) + + res = metrika_db_engine.execute(expr) + data = res.fetchall() + assert len(data) == 1 + + +def test_date_gt_lt_by_alias(metrika_expr_select_date_users, metrika_db_engine): + start_dt = (datetime.date.today() - datetime.timedelta(days=5)).isoformat() + end_dt = (datetime.date.today() - datetime.timedelta(days=2)).isoformat() + expr = metrika_expr_select_date_users.where(sa.column("date") > start_dt) + expr = expr.where(sa.column("date") < end_dt) + print(str(expr)) + + res = metrika_db_engine.execute(expr) + data = res.fetchall() + print(data) + assert len(data) == 2 + + +def test_calculations(metrika_sample_counter_id, metrika_db_engine): + expr = sa.select( + columns=[ + sa.column("ym:pv:date").label("date"), + sa.column("ym:pv:users").label("users"), + sa.column("ym:pv:pageviews").label("pageviews"), + sa.column("ym:pv:pageviewsPerDay"), + (sa.column("ym:pv:pageviews") + sa.column("ym:pv:users")).label("pv_plus_u"), + sa.func.sum( + sa.column("ym:pv:pageviews"), + sa.column("ym:pv:users"), + ).label("sum_pv_u"), + (sa.column("ym:pv:pageviews") - sa.column("users")).label("pv_minus_u"), + (sa.column("ym:pv:pageviews") + 1).label("pv_plus_1"), + (sa.column("ym:pv:users") - 2).label("u_minus_2"), + (sa.column("ym:pv:pageviews") / sa.column("ym:pv:users")).label("pv_per_u"), + (sa.column("ym:pv:pageviews") * sa.column("ym:pv:users") - sa.column("ym:pv:pageviewsPerDay") + 14).label( + "some_strange_expr" + ), + ], + ) + expr = expr.select_from(sa.Table(metrika_sample_counter_id, sa.MetaData(bind=metrika_db_engine))) + expr = expr.group_by(sa.column("date")) + print(str(expr)) + + res = metrika_db_engine.execute(expr) + data = res.fetchall() + print(data) + assert all([(row["pageviews"] + row["users"]) == row["pv_plus_u"] for row in data]) + assert all([(row["pageviews"] + row["users"]) == row["sum_pv_u"] for row in data]) + assert all([(row["pageviews"] - row["users"]) == row["pv_minus_u"] for row in data]) + assert all([row["pv_plus_1"] == row["pageviews"] + 1 for row in data]) + assert all([row["u_minus_2"] == row["users"] - 2 for row in data]) + assert all([round(row["pageviews"] / row["users"], 2) == round(row["pv_per_u"], 2) for row in data]) + assert all( + [ + round(row["pageviews"] * row["users"] - row["ym:pv:pageviewsPerDay"] + 14, 2) + == round(row["some_strange_expr"], 2) + for row in data + ] + ) + + +def test_counter_id(metrika_sample_counter_id, metrika_db_engine): + expr = sa.select( + columns=[ + sa.column("ym:pv:date").label("date"), + sa.column("ym:pv:users").label("users"), + sa.column("ym:pv:counterID").label("counter_id"), + sa.column("ym:pv:counterIDName").label("counter_name"), + ], + ) + expr = expr.select_from(sa.Table(metrika_sample_counter_id, sa.MetaData(bind=metrika_db_engine))) + expr = expr.where(sa.column("counter_id").in_([metrika_sample_counter_id])) + expr = expr.where(sa.column("counter_name").in_(["Metrica live demo"])) + expr = expr.group_by( + sa.column("date"), + sa.column("counter_id"), + sa.column("counter_name"), + ) + print(str(expr)) + + res = metrika_db_engine.execute(expr) + data = res.fetchall() + print(data) + assert all([(row["counter_id"] == metrika_sample_counter_id for row in data)]) + + +def test_multicounter_req(metrika_sample_counter_id, metrika_db_engine): + expr = sa.select( + columns=[ + sa.column("ym:pv:date").label("date"), + sa.column("ym:pv:users").label("users"), + sa.column("ym:pv:counterID").label("counter_id"), + sa.column("ym:pv:counterIDName").label("counter_name"), + ], + ) + expr = expr.select_from( + sa.Table("50514217,51341415", sa.MetaData(bind=metrika_db_engine)), + ) + start_dt = (datetime.date.today() - datetime.timedelta(days=5)).isoformat() + end_dt = (datetime.date.today() - datetime.timedelta(days=2)).isoformat() + expr = expr.where(sa.column("date").between(start_dt, end_dt)) + expr = expr.group_by( + sa.column("date"), + sa.column("counter_id"), + sa.column("counter_name"), + ) + query = parse_qs(str(expr)) + print(query) + assert query["ids"] == ["50514217,51341415"] diff --git a/lib/dl_sqlalchemy_metrica_api/pyproject.toml b/lib/dl_sqlalchemy_metrica_api/pyproject.toml new file mode 100644 index 000000000..e05dc26fb --- /dev/null +++ b/lib/dl_sqlalchemy_metrica_api/pyproject.toml @@ -0,0 +1,38 @@ +[tool.poetry] +name = "datalens-sqlalchemy-metrica-api" +version = "0.0.1" +description = "Yandex Metrica API dialect for SQLAlchemy" +authors = ["DataLens Team "] +packages = [{include = "dl_sqlalchemy_metrica_api"}] +license = "Apache 2.0" +readme = "README.md" + +[tool.poetry.dependencies] +SQLAlchemy = ">=1.4.46, <2.0" +python = ">=3.10, <3.12" +python-dateutil = ">=2.8.2" +requests = ">=2.28.2" +sqlalchemy = ">=1.4.46, <2.0" + +[tool.poetry.plugins."sqlalchemy.dialects"] +appmetrica_api = "dl_sqlalchemy_metrica_api.base:AppMetricaApiDialect" +metrika_api = "dl_sqlalchemy_metrica_api.base:MetrikaApiDialect" +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = [] + +[datalens.pytest.ext] +root_dir = "dl_sqlalchemy_metrica_api_tests/" +target_path = "ext" +labels = ["ext_public"] + +[tool.mypy] +warn_unused_configs = true +disallow_untyped_defs = true +check_untyped_defs = true +strict_optional = true diff --git a/lib/dl_testing/dl_testing/s3_utils.py b/lib/dl_testing/dl_testing/s3_utils.py index 13e55ccdf..fa8d7442e 100644 --- a/lib/dl_testing/dl_testing/s3_utils.py +++ b/lib/dl_testing/dl_testing/s3_utils.py @@ -2,18 +2,28 @@ import asyncio import logging +from typing import ( + TYPE_CHECKING, + Callable, +) import aiobotocore.client import aiobotocore.session +import boto3 import botocore.client import botocore.exceptions import botocore.session +if TYPE_CHECKING: + from mypy_boto3_s3.client import S3Client as SyncS3Client + from types_aiobotocore_s3 import S3Client as AsyncS3Client + + LOGGER = logging.getLogger(__name__) -def create_s3_client(s3_settings) -> aiobotocore.client.AioBaseClient: +def create_s3_client(s3_settings) -> AsyncS3Client: # type: ignore # TODO: fix session = aiobotocore.session.get_session() return session.create_client( service_name="s3", @@ -23,9 +33,9 @@ def create_s3_client(s3_settings) -> aiobotocore.client.AioBaseClient: ) -def create_sync_s3_client(s3_settings) -> botocore.client.BaseClient: - session = botocore.session.get_session() - return session.create_client( +def create_sync_s3_client(s3_settings) -> SyncS3Client: # type: ignore # TODO: fix + session = boto3.Session() + return session.client( service_name="s3", aws_access_key_id=s3_settings.ACCESS_KEY_ID, aws_secret_access_key=s3_settings.SECRET_ACCESS_KEY, @@ -34,7 +44,7 @@ def create_sync_s3_client(s3_settings) -> botocore.client.BaseClient: async def create_s3_bucket( - s3_client: aiobotocore.client.AioBaseClient, + s3_client: AsyncS3Client, bucket_name: str, max_attempts: int = 10, ) -> None: @@ -60,7 +70,7 @@ async def create_s3_bucket( attempt += 1 -async def get_lc_rules_number(s3_client: aiobotocore.client.AioBaseClient, bucket: str) -> int: +async def get_lc_rules_number(s3_client: AsyncS3Client, bucket: str) -> int: try: lc_config = await s3_client.get_bucket_lifecycle_configuration(Bucket=bucket) except botocore.exceptions.ClientError as ex: @@ -71,7 +81,7 @@ async def get_lc_rules_number(s3_client: aiobotocore.client.AioBaseClient, bucke return len(lc_config["Rules"]) -async def s3_file_exists(s3_client: aiobotocore.client.AioBaseClient, bucket: str, key: str) -> bool: +async def s3_file_exists(s3_client: AsyncS3Client, bucket: str, key: str) -> bool: try: s3_resp = await s3_client.head_object( Bucket=bucket, @@ -92,8 +102,8 @@ async def s3_file_exists(s3_client: aiobotocore.client.AioBaseClient, bucket: st '{schema_line}')""" -def s3_tbl_func_maker(s3_settings): - def table_function( +def s3_tbl_func_maker(s3_settings) -> Callable[..., str]: # type: ignore # TODO: fix + def table_function( # type: ignore # TODO: fix for_: str, conn_dto, filename: str, diff --git a/lib/dl_testing/pyproject.toml b/lib/dl_testing/pyproject.toml index 87b997306..9fd829b56 100644 --- a/lib/dl_testing/pyproject.toml +++ b/lib/dl_testing/pyproject.toml @@ -43,3 +43,7 @@ warn_unused_configs = true disallow_untyped_defs = true check_untyped_defs = true strict_optional = true + +[[tool.mypy.overrides]] +module = ["types_aiobotocore_s3.*", "mypy_boto3_s3.*"] +ignore_missing_imports = true diff --git a/lib/dl_utils/dl_utils/task_runner.py b/lib/dl_utils/dl_utils/task_runner.py index 58cbd8d4a..d159aea82 100644 --- a/lib/dl_utils/dl_utils/task_runner.py +++ b/lib/dl_utils/dl_utils/task_runner.py @@ -6,7 +6,6 @@ Any, Awaitable, Iterable, - List, ) @@ -33,7 +32,7 @@ async def finalize(self) -> None: class ConcurrentTaskRunner(TaskRunner): def __init__(self, concurrency_limit: int = 5) -> None: - self._tasks: List[Awaitable] = [] + self._tasks: list[Awaitable] = [] self._sem = asyncio.Semaphore(concurrency_limit) async def _semaphore_wrapper(self, awaitable: Awaitable) -> Any: diff --git a/lib/dl_version/dl_version/__init__.py b/lib/dl_version/dl_version/__init__.py index b29b6f7d7..5becc17c0 100644 --- a/lib/dl_version/dl_version/__init__.py +++ b/lib/dl_version/dl_version/__init__.py @@ -1 +1 @@ -__version__ = "0.2039.0" +__version__ = "1.0.0" diff --git a/lib/dl_version/pyproject.toml b/lib/dl_version/pyproject.toml index 80b582abb..e53f255f1 100644 --- a/lib/dl_version/pyproject.toml +++ b/lib/dl_version/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "datalens-version" -version = "0.2039.0" +version = "1.0.0" description = "" authors = ["DataLens Team "] packages = [{include = "dl_version"}] diff --git a/terrarium/dl_repmanager/.gitignore b/terrarium/dl_repmanager/.gitignore index d843da9f7..935808ef0 100644 --- a/terrarium/dl_repmanager/.gitignore +++ b/terrarium/dl_repmanager/.gitignore @@ -1 +1,2 @@ dl_repmanager/debug_launcher.py +testrepo_copy diff --git a/terrarium/dl_repmanager/dl_repmanager/fs_editor.py b/terrarium/dl_repmanager/dl_repmanager/fs_editor.py index 15a3c0849..5f2700f60 100644 --- a/terrarium/dl_repmanager/dl_repmanager/fs_editor.py +++ b/terrarium/dl_repmanager/dl_repmanager/fs_editor.py @@ -79,15 +79,15 @@ def replace_regex_in_file(self, file_path: Path, regex: re.Pattern, repl: Callab def _iter_files( self, path: Path, - mask_blacklist: Collection[re.Pattern] = (), + exclude_masks: Collection[re.Pattern] = (), ) -> Generator[Path, None, None]: - for file_path in path.rglob("*/"): + for file_path in path.rglob("*"): if file_path.is_file(): - matches_blacklist = False - for mask in mask_blacklist: + matches_exclude_mask = False + for mask in exclude_masks: if mask.match(str(file_path)): - matches_blacklist = True - if matches_blacklist: + matches_exclude_mask = True + if matches_exclude_mask: continue yield file_path @@ -97,9 +97,9 @@ def _replace_text_in_dir( old_text: str, new_text: str, path: Path, - mask_blacklist: Collection[re.Pattern] = (), + exclude_masks: Collection[re.Pattern] = (), ) -> None: - for file_path in self._iter_files(path=path, mask_blacklist=mask_blacklist): + for file_path in self._iter_files(path=path, exclude_masks=exclude_masks): self.replace_text_in_file(file_path, old_text=old_text, new_text=new_text) @final @@ -108,19 +108,19 @@ def replace_text_in_dir( old_text: str, new_text: str, path: Path, - mask_blacklist: Collection[re.Pattern] = (), + exclude_masks: Collection[re.Pattern] = (), ) -> None: self._validate_paths(path) - self._replace_text_in_dir(old_text=old_text, new_text=new_text, path=path, mask_blacklist=mask_blacklist) + self._replace_text_in_dir(old_text=old_text, new_text=new_text, path=path, exclude_masks=exclude_masks) def _replace_regex_in_dir( self, path: Path, regex: re.Pattern, repl: Callable[[re.Match], str], - mask_blacklist: Collection[re.Pattern] = (), + exclude_masks: Collection[re.Pattern] = (), ) -> None: - for file_path in self._iter_files(path=path, mask_blacklist=mask_blacklist): + for file_path in self._iter_files(path=path, exclude_masks=exclude_masks): self.replace_regex_in_file(file_path, regex=regex, repl=repl) @final @@ -129,10 +129,10 @@ def replace_regex_in_dir( path: Path, regex: re.Pattern, repl: Callable[[re.Match], str], - mask_blacklist: Collection[re.Pattern] = (), + exclude_masks: Collection[re.Pattern] = (), ) -> None: self._validate_paths(path) - self._replace_regex_in_dir(path=path, regex=regex, repl=repl, mask_blacklist=mask_blacklist) + self._replace_regex_in_dir(path=path, regex=regex, repl=repl, exclude_masks=exclude_masks) @abc.abstractmethod def _copy_path(self, src_path: Path, dst_path: Path) -> None: diff --git a/terrarium/dl_repmanager/dl_repmanager/primitives.py b/terrarium/dl_repmanager/dl_repmanager/primitives.py index 2782b49fd..086b0451a 100644 --- a/terrarium/dl_repmanager/dl_repmanager/primitives.py +++ b/terrarium/dl_repmanager/dl_repmanager/primitives.py @@ -135,8 +135,8 @@ def single_module_name(self) -> str: @property def single_test_dir(self) -> str: - assert len(self.module_names) == 1 - return self.module_names[0] + assert len(self.test_dirs) == 1 + return self.test_dirs[0] @property def reg_entity_ref(self) -> EntityReference: diff --git a/terrarium/dl_repmanager/dl_repmanager/repository_env.py b/terrarium/dl_repmanager/dl_repmanager/repository_env.py index 67b9c585a..2c0306dca 100644 --- a/terrarium/dl_repmanager/dl_repmanager/repository_env.py +++ b/terrarium/dl_repmanager/dl_repmanager/repository_env.py @@ -8,14 +8,15 @@ include: - core_repo/dl-repo.yml + default_boilerplate_path: lib/dl_package_boilerplate + package_types: - type: lib root_path: lib - boilerplate_path: lib/dl_package_boilerplate - type: app root_path: app - boilerplate_path: lib/dl_package_boilerplate + boilerplate_path: app/app_package_boilerplate tags: - own_dependency_group @@ -30,6 +31,9 @@ plugins: - type: dependency_registration + edit_exclude_masks: + - ".*\\.mo", + Description of the sections: - include: section tells the loader to include another repo config file @@ -46,6 +50,7 @@ from __future__ import annotations from pathlib import Path +import re from typing import ( TYPE_CHECKING, Iterable, @@ -99,6 +104,7 @@ class RepoEnvironment: custom_package_map: dict[str, str] = attr.ib(kw_only=True, factory=dict) fs_editor: FilesystemEditor = attr.ib(kw_only=True) plugin_configs: list[PluginConfig] = attr.ib(kw_only=True, factory=list) + edit_exclude_masks: frozenset[re.Pattern] = attr.ib(kw_only=True, default=frozenset()) def iter_package_abs_dirs(self) -> Iterable[tuple[str, Path]]: return sorted( @@ -136,6 +142,16 @@ def get_fs_editor(self) -> FilesystemEditor: def get_metapackage_spec(self, metapackage_name: str) -> MetaPackageSpec: return self.metapackages[metapackage_name] + def get_edit_exclude_masks(self) -> frozenset[re.Pattern]: + return self.edit_exclude_masks + return tuple( + re.compile(pattern_str) + for pattern_str in ( + r".*\.mo", + r".*\.xlsx", + ) + ) + _DEFAULT_FS_EDITOR_TYPE = "default" @@ -148,6 +164,7 @@ class ConfigContents: custom_package_map: dict[str, str] = attr.ib(kw_only=True, factory=dict) fs_editor_type: Optional[str] = attr.ib(kw_only=True, default=_DEFAULT_FS_EDITOR_TYPE) plugin_configs: list[PluginConfig] = attr.ib(kw_only=True, factory=list) + edit_exclude_masks: frozenset[re.Pattern] = attr.ib(kw_only=True, default=frozenset()) def discover_config(base_path: Path, config_file_name: str) -> Path: @@ -171,13 +188,19 @@ def _load_params_from_yaml_file(self, config_path: Path) -> ConfigContents: base_path = config_path.parent env_settings = config_data.get("dl_repo", {}) + + default_boilerplate_path_str: Optional[str] = env_settings.get("default_boilerplate_path") + package_types: dict[str, PackageTypeConfig] = {} for package_type_data in env_settings.get("package_types", ()): package_type = package_type_data["type"] + boilerplate_path_str = package_type_data.get("boilerplate_path", default_boilerplate_path_str) + if boilerplate_path_str is None: + raise ValueError("Boilerplate must be specified in package type or default") pkg_type_config = PackageTypeConfig( home_repo_path=base_path, path=base_path / package_type_data["root_path"], - boilerplate_path=base_path / package_type_data["boilerplate_path"], + boilerplate_path=base_path / boilerplate_path_str, tags=frozenset(package_type_data.get("tags", ())), ) package_types[package_type] = pkg_type_config @@ -200,6 +223,10 @@ def _load_params_from_yaml_file(self, config_path: Path) -> ConfigContents: ) plugin_configs.append(plugin_config) + edit_exclude_masks: set[re.Pattern] = set() + for edit_exclude_masks_item_str in env_settings.get("edit_exclude_masks", ()): + edit_exclude_masks.add(re.compile(edit_exclude_masks_item_str)) + custom_package_map: dict[str, str] = dict(env_settings.get("custom_package_map", {})) fs_editor_type: Optional[str] = env_settings.get("fs_editor") @@ -209,6 +236,7 @@ def _load_params_from_yaml_file(self, config_path: Path) -> ConfigContents: package_types = dict(included_config_contents.package_types, **package_types) metapackages = dict(included_config_contents.metapackages, **metapackages) custom_package_map = dict(included_config_contents.custom_package_map, **custom_package_map) + edit_exclude_masks |= included_config_contents.edit_exclude_masks fs_editor_type = fs_editor_type or included_config_contents.fs_editor_type # FS editor and plugins are loaded only from the top-level config @@ -219,6 +247,7 @@ def _load_params_from_yaml_file(self, config_path: Path) -> ConfigContents: custom_package_map=custom_package_map, fs_editor_type=fs_editor_type, plugin_configs=plugin_configs, + edit_exclude_masks=frozenset(edit_exclude_masks), ) def _load_from_yaml_file(self, config_path: Path) -> RepoEnvironment: @@ -236,6 +265,7 @@ def _load_from_yaml_file(self, config_path: Path) -> RepoEnvironment: metapackages=config_contents.metapackages, custom_package_map=config_contents.custom_package_map, plugin_configs=config_contents.plugin_configs, + edit_exclude_masks=config_contents.edit_exclude_masks, fs_editor=get_fs_editor( fs_editor_type=fs_editor_type, base_path=base_path, diff --git a/terrarium/dl_repmanager/dl_repmanager/repository_manager.py b/terrarium/dl_repmanager/dl_repmanager/repository_manager.py index abea0109d..75759c28a 100644 --- a/terrarium/dl_repmanager/dl_repmanager/repository_manager.py +++ b/terrarium/dl_repmanager/dl_repmanager/repository_manager.py @@ -316,11 +316,10 @@ def _rename_package_internals(self, old_package_info: PackageInfo, new_package_i new_tests_path = new_pkg_dir / package_test_dir self.fs_editor.move_path(old_path=old_tests_path, new_path=new_tests_path) + # Masks for files that should not be edited + edit_exclude_masks = self.repository_env.get_edit_exclude_masks() + # Replace all package name occurrences with the given name - mask_blacklist = ( - re.compile(r".*\.mo"), - re.compile(r".*\.xlsx"), - ) all_zipped_modules = itertools.chain( zip(old_package_info.module_names, new_package_info.module_names), zip(old_package_info.test_dirs, new_package_info.test_dirs), @@ -338,7 +337,7 @@ def _rename_package_internals(self, old_package_info: PackageInfo, new_package_i regex=regex, repl=repl, path=new_pkg_dir, - mask_blacklist=mask_blacklist, + exclude_masks=edit_exclude_masks, ) # add `-` to the regex @@ -351,7 +350,7 @@ def _rename_package_internals(self, old_package_info: PackageInfo, new_package_i regex=regex, repl=repl, path=new_pkg_dir, - mask_blacklist=mask_blacklist, + exclude_masks=edit_exclude_masks, ) def change_package_type(self, package_module_name: str, new_package_type: str) -> PackageInfo: diff --git a/terrarium/dl_repmanager/dl_repmanager/scripts/cli_base.py b/terrarium/dl_repmanager/dl_repmanager/scripts/cli_base.py new file mode 100644 index 000000000..f43c852fb --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager/scripts/cli_base.py @@ -0,0 +1,23 @@ +import abc +import argparse +from typing import Sequence + +import attr + + +@attr.s +class CliToolBase(abc.ABC): + @classmethod + @abc.abstractmethod + def get_parser(cls) -> argparse.ArgumentParser: + raise NotImplementedError + + @classmethod + @abc.abstractmethod + def run_parsed_args(cls, args: argparse.Namespace) -> None: + raise NotImplementedError + + @classmethod + def run(cls, argv: Sequence[str]) -> None: + parser = cls.get_parser() + cls.run_parsed_args(parser.parse_args(argv)) diff --git a/terrarium/dl_repmanager/dl_repmanager/scripts/package_meta_cli.py b/terrarium/dl_repmanager/dl_repmanager/scripts/package_meta_cli.py index fb1d62354..875ffbf5b 100644 --- a/terrarium/dl_repmanager/dl_repmanager/scripts/package_meta_cli.py +++ b/terrarium/dl_repmanager/dl_repmanager/scripts/package_meta_cli.py @@ -30,6 +30,7 @@ DEFAULT_CONFIG_FILE_NAME, discover_config, ) +from dl_repmanager.scripts.cli_base import CliToolBase log = logging.getLogger(__name__) @@ -80,14 +81,15 @@ def add_package_commands(package_subparsers: argparse._SubParsersAction) -> None @attr.s -class DlPackageMetaTool: +class DlPackageMetaTool(CliToolBase): fs_editor: FilesystemEditor = attr.ib(kw_only=True) package_path: Path = attr.ib(kw_only=True) meta_reader: PackageMetaReader = attr.ib(kw_only=True) meta_writer: PackageMetaWriter = attr.ib(kw_only=True) - def validate_env(cls) -> None: - """Validate that the tool is being run correctly""" + @classmethod + def get_parser(cls) -> argparse.ArgumentParser: + return make_parser() def list_i18n_domains(self) -> None: for domain_spec in sorted(self.meta_reader.get_i18n_domains(), key=lambda spec: spec.domain_name): @@ -122,7 +124,7 @@ def set_meta_array(self, toml_section: str, toml_key: str, toml_value: str) -> N self.meta_writer.toml_writer.set_array_value(section_name=toml_section, key=toml_key, value=arr_value) @classmethod - def run(cls, args: argparse.Namespace) -> None: + def run_parsed_args(cls, args: argparse.Namespace) -> None: package_path = cast(Path, args.package_path) fs_editor = DefaultFilesystemEditor(base_path=package_path) cls.run_for_package_path( @@ -171,9 +173,8 @@ def run_for_package_path( def main() -> None: setup_basic_logging() - parser = make_parser() try: - DlPackageMetaTool.run(parser.parse_args()) + DlPackageMetaTool.run(sys.argv[1:]) except InconsistentStateError: log.exception("Project inconsistent state discovered during cli command run.") sys.exit(1) diff --git a/terrarium/dl_repmanager/dl_repmanager/scripts/repmanager_cli.py b/terrarium/dl_repmanager/dl_repmanager/scripts/repmanager_cli.py index 155e0a957..76d4fccc4 100644 --- a/terrarium/dl_repmanager/dl_repmanager/scripts/repmanager_cli.py +++ b/terrarium/dl_repmanager/dl_repmanager/scripts/repmanager_cli.py @@ -5,6 +5,7 @@ import os from pathlib import Path from pprint import pprint +import sys from typing import Optional import attr @@ -32,6 +33,7 @@ RepositoryManager, ) from dl_repmanager.repository_navigator import RepositoryNavigator +from dl_repmanager.scripts.cli_base import CliToolBase from dl_repmanager.scripts.package_meta_cli import ( DlPackageMetaTool, add_package_commands, @@ -60,9 +62,13 @@ def _entity_ref_list_type(value: str | list[EntityReference]) -> list[EntityRefe return value +_CWD = Path.cwd() + + def make_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser(prog="DL Repository Management CLI") parser.add_argument("--config", help="Specify configuration file", default=DEFAULT_CONFIG_FILE_NAME) + parser.add_argument("--base-path", type=Path, help="Base repository path", default=_CWD) parser.add_argument("--fs-editor", help="Override the FS editor type") parser.add_argument("--dry-run", action="store_true", help="Force usage of virtual FS editor") @@ -234,11 +240,8 @@ def make_parser() -> argparse.ArgumentParser: return parser -_BASE_DIR = Path.cwd() - - @attr.s -class DlRepManagerTool: +class DlRepManagerTool(CliToolBase): repository_env: RepoEnvironment = attr.ib(kw_only=True) package_index: PackageIndex = attr.ib(kw_only=True) repository_manager: RepositoryManager = attr.ib(kw_only=True) @@ -246,8 +249,8 @@ class DlRepManagerTool: py_prj_editor: PyPrjEditor = attr.ib(kw_only=True) @classmethod - def validate_env(cls) -> None: - """Validate that the tool is being run correctly""" + def get_parser(cls) -> argparse.ArgumentParser: + return make_parser() def init(self, package_name: str, package_type: str) -> None: self.repository_manager.init_package(package_module_name=package_name, package_type=package_type) @@ -412,14 +415,11 @@ def recurse_packages( ) @classmethod - def run(cls, args: argparse.Namespace) -> None: - config_file_name = args.config - - fs_editor_type = args.fs_editor if not args.dry_run else "virtual" + def initialize(cls, base_path: Path, config_file_name: str, fs_editor_type: str) -> DlRepManagerTool: repository_env = RepoEnvironmentLoader( config_file_name=config_file_name, override_fs_editor_type=fs_editor_type, - ).load_env(base_path=_BASE_DIR) + ).load_env(base_path=base_path) index_builder = PackageIndexBuilder(repository_env=repository_env) package_index = index_builder.build_index() @@ -441,8 +441,14 @@ def run(cls, args: argparse.Namespace) -> None: package_index=package_index, ), ) + return tool - tool.validate_env() + @classmethod + def run_parsed_args(cls, args: argparse.Namespace) -> None: + config_file_name = args.config + base_path = args.base_path + fs_editor_type = args.fs_editor if not args.dry_run else "virtual" + tool = cls.initialize(base_path=base_path, config_file_name=config_file_name, fs_editor_type=fs_editor_type) match args.command: case "init": @@ -486,8 +492,7 @@ def run(cls, args: argparse.Namespace) -> None: def main() -> None: setup_basic_logging() - parser = make_parser() - DlRepManagerTool.run(parser.parse_args()) + DlRepManagerTool.run(sys.argv[1:]) if __name__ == "__main__": diff --git a/terrarium/dl_repmanager/dl_repmanager/toml_tools.py b/terrarium/dl_repmanager/dl_repmanager/toml_tools.py index c4386c3d9..e8f3d317e 100644 --- a/terrarium/dl_repmanager/dl_repmanager/toml_tools.py +++ b/terrarium/dl_repmanager/dl_repmanager/toml_tools.py @@ -107,8 +107,10 @@ def set_text_value(self, section_name: str, key: str, value: str) -> None: section = self.get_section(section_name) section[key] = value - def set_array_value(self, section_name: str, key: str, value: Sequence[str]) -> None: + def set_array_value(self, section_name: str, key: str, value: Sequence[str | dict]) -> None: section = self.get_section(section_name) + if not all(isinstance(item, (str, dict)) for item in value): + raise TypeError(f"Invalid item type in value {value}") array = tomlkit.array() array.extend(value) section[key] = array diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/dl-repo.yml b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/dl-repo.yml new file mode 100644 index 000000000..0efad77d5 --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/dl-repo.yml @@ -0,0 +1,18 @@ +dl_repo: + fs_editor: default + + default_boilerplate_path: lib/testing_pkg_boilerplate + + package_types: + - type: lib + root_path: lib + tags: + - main_dependency_group + + custom_package_map: {} + metapackages: [] + plugins: + - type: dependency_registration + + edit_exclude_masks: + - ".*/__pycache__" diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/LICENSE b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/LICENSE new file mode 100644 index 000000000..74ba5f6c7 --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 YANDEX LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/README.md b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/README.md new file mode 100644 index 000000000..e751febd7 --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/README.md @@ -0,0 +1,3 @@ +# testing_pkg_boilerplate + +A dummy package for use in tests \ No newline at end of file diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/pyproject.toml b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/pyproject.toml new file mode 100644 index 000000000..02116d0c8 --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/pyproject.toml @@ -0,0 +1,37 @@ + +[tool.poetry] +name = "testing-pkg-boilerplate" +version = "0.0.1" +description = "" +authors = ["DataLens Team "] +packages = [{include = "testing_pkg_boilerplate"}] +license = "Apache 2.0" +readme = "README.md" + + +[tool.poetry.dependencies] +attrs = ">=22.2.0" +python = ">=3.10, <3.12" + +[tool.poetry.group.tests.dependencies] +pytest = ">=7.2.2" + +[build-system] +build-backend = "poetry.core.masonry.api" +requires = [ + "poetry-core", +] + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = [] + +[datalens_ci] +skip_test = true + +[tool.mypy] +warn_unused_configs = true +disallow_untyped_defs = true +check_untyped_defs = true +strict_optional = true diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate/__init__.py b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate/py.typed b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate_tests/__init__.py b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate_tests/unit/__init__.py b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate_tests/unit/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate_tests/unit/conftest.py b/terrarium/dl_repmanager/dl_repmanager_tests/testrepo/lib/testing_pkg_boilerplate/testing_pkg_boilerplate_tests/unit/conftest.py new file mode 100644 index 000000000..e69de29bb diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/unit/base.py b/terrarium/dl_repmanager/dl_repmanager_tests/unit/base.py new file mode 100644 index 000000000..898c3c21c --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/unit/base.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +from contextlib import contextmanager +from pathlib import Path +import shutil +import tempfile +from typing import ( + Generator, + Optional, +) +import uuid + +import attr +import pytest + +from dl_repmanager.fs_editor import FilesystemEditor +from dl_repmanager.package_index import ( + PackageIndex, + PackageIndexBuilder, +) +from dl_repmanager.package_reference import PackageReference +from dl_repmanager.primitives import PackageInfo +from dl_repmanager.repository_env import ( + RepoEnvironment, + RepoEnvironmentLoader, +) +from dl_repmanager.repository_manager import ( + PackageGenerator, + RepositoryManager, +) +from dl_repmanager.repository_navigator import RepositoryNavigator +from dl_repmanager.toml_tools import ( + TOMLIOFactory, + TOMLWriter, +) +from dl_repmanager_tests.unit.config import ( + DEFAULT_PACKAGE_TYPE, + REPO_PATH, +) +from dl_repmanager_tests.unit.tool_runner import ( + PackageCliRunner, + RepoCliRunner, +) + + +@attr.s +class Repo: + """A container/initializer for a bunch of objects""" + + base_path: Path = attr.ib(kw_only=True) + repository_env: RepoEnvironment = attr.ib(init=False) + fs_editor: FilesystemEditor = attr.ib(init=False) + package_index: PackageIndex = attr.ib(init=False) + repository_navigator: RepositoryNavigator = attr.ib(init=False) + package_generator: PackageGenerator = attr.ib(init=False) + package_reference: PackageReference = attr.ib(init=False) + repository_manager: RepositoryManager = attr.ib(init=False) + + @repository_env.default + def _make_repository_env(self) -> RepoEnvironment: + return RepoEnvironmentLoader().load_env(base_path=self.base_path) + + @fs_editor.default + def _make_fs_editor(self) -> FilesystemEditor: + return self.repository_env.get_fs_editor() + + @package_index.default + def _make_package_index(self) -> PackageIndex: + return PackageIndexBuilder(repository_env=self.repository_env).build_index() + + @repository_navigator.default + def _make_repository_navigator(self) -> RepositoryNavigator: + return RepositoryNavigator(repository_env=self.repository_env, package_index=self.package_index) + + @repository_manager.default + def _make_repository_manager(self) -> RepositoryManager: + return RepositoryManager( + repository_env=self.repository_env, + package_index=self.package_index, + repository_navigator=self.repository_navigator, + package_generator=PackageGenerator(repository_env=self.repository_env, package_index=self.package_index), + package_reference=PackageReference(repository_env=self.repository_env, package_index=self.package_index), + ) + + def reload(self) -> Repo: + return self.__class__(base_path=self.base_path) + + +@contextmanager +def deployed_repo(repo_src: Path) -> Generator[Path, None, None]: + repo_dst = Path(tempfile.mkdtemp()) / repo_src.name + shutil.copytree(repo_src, repo_dst) + try: + yield repo_dst + finally: + shutil.rmtree(repo_dst) + + +class RepmanagerTestingBase: + @pytest.fixture(scope="session") + def repo_path(self) -> Generator[Path, None, None]: + with deployed_repo(REPO_PATH) as repo_path: + yield repo_path + + @pytest.fixture(scope="class") + def repo(self, repo_path: Path) -> Repo: + return Repo(base_path=repo_path) + + def generate_package_name(self) -> str: + return f"pkg_{uuid.uuid4().hex[:6]}" + + def init_package( + self, + repo: Repo, + package_type: str = DEFAULT_PACKAGE_TYPE, + package_name: Optional[str] = None, + ) -> PackageInfo: + package_name = package_name or self.generate_package_name() + assert package_name is not None + repo.repository_manager.init_package(package_type=package_type, package_module_name=package_name) + repo = repo.reload() + return repo.package_index.get_package_info_from_module_name(package_name) + + @pytest.fixture(scope="class") + def package(self, repo: Repo) -> PackageInfo: + return self.init_package(repo=repo) + + @contextmanager + def toml_writer(self, repo: Repo, toml_path: Path) -> Generator[TOMLWriter, None, None]: + toml_io_factory = TOMLIOFactory(fs_editor=repo.fs_editor) + with toml_io_factory.toml_writer(toml_path) as toml_writer: + yield toml_writer + + +class RepmanagerCliTestBase(RepmanagerTestingBase): + def get_repo_cli(self, repo_path: Path) -> RepoCliRunner: + return RepoCliRunner(repo_path=repo_path) + + def get_package_cli(self, package_path: Path) -> PackageCliRunner: + return PackageCliRunner(package_path=package_path) + + @pytest.fixture(scope="class") + def repo_cli(self, repo_path: Path) -> RepoCliRunner: + return self.get_repo_cli(repo_path=repo_path) diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/unit/cli/__init__.py b/terrarium/dl_repmanager/dl_repmanager_tests/unit/cli/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/unit/cli/test_package_cli.py b/terrarium/dl_repmanager/dl_repmanager_tests/unit/cli/test_package_cli.py new file mode 100644 index 000000000..e9e4d410e --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/unit/cli/test_package_cli.py @@ -0,0 +1,19 @@ +import pytest + +from dl_repmanager_tests.unit.base import RepmanagerCliTestBase +from dl_repmanager_tests.unit.tool_runner import PackageCliRunner + + +class TestPackageCli(RepmanagerCliTestBase): + @pytest.fixture(scope="class") + def package_cli(self, package) -> PackageCliRunner: + return self.get_package_cli(package_path=package.abs_path) + + def test_list_i18n_domains(self, repo, package, package_cli) -> None: + with self.toml_writer(repo=repo, toml_path=package.toml_path) as toml_writer: + section_name = "datalens.i18n.domains" + toml_writer.add_section(section_name) + toml_writer.set_array_value(section_name=section_name, key="first_domain", value=[{"path": "first_path"}]) + + cli_result = package_cli.run_with_args(["list-i18n-domains"]) + assert "first_domain=first_path" in cli_result.stdout diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/unit/cli/test_repo_cli.py b/terrarium/dl_repmanager/dl_repmanager_tests/unit/cli/test_repo_cli.py new file mode 100644 index 000000000..82144c00b --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/unit/cli/test_repo_cli.py @@ -0,0 +1,13 @@ +from dl_repmanager_tests.unit.base import RepmanagerCliTestBase + + +class TestRepoCli(RepmanagerCliTestBase): + def test_init(self, repo, repo_cli) -> None: + package_name = self.generate_package_name() + repo_cli.run_with_args(["init", "--package-type", "lib", "--package-name", package_name]) + repo = repo.reload() + package_info = repo.package_index.get_package_info_from_module_name(package_name) + assert package_info.package_type == "lib" + assert package_info.single_module_name == package_name + assert package_info.single_test_dir == f"{package_name}_tests" + assert package_info.package_reg_name == package_name.replace("_", "-") diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/unit/config.py b/terrarium/dl_repmanager/dl_repmanager_tests/unit/config.py new file mode 100644 index 000000000..9d3e7f2f0 --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/unit/config.py @@ -0,0 +1,5 @@ +from pathlib import Path + + +REPO_PATH = Path(__file__).parent.parent / "testrepo" +DEFAULT_PACKAGE_TYPE = "lib" diff --git a/terrarium/dl_repmanager/dl_repmanager_tests/unit/tool_runner.py b/terrarium/dl_repmanager/dl_repmanager_tests/unit/tool_runner.py new file mode 100644 index 000000000..c2e0bd63a --- /dev/null +++ b/terrarium/dl_repmanager/dl_repmanager_tests/unit/tool_runner.py @@ -0,0 +1,52 @@ +from contextlib import redirect_stdout +import io +from pathlib import Path +from typing import ( + ClassVar, + Type, +) + +import attr + +from dl_repmanager.scripts.cli_base import CliToolBase +from dl_repmanager.scripts.package_meta_cli import DlPackageMetaTool +from dl_repmanager.scripts.repmanager_cli import DlRepManagerTool + + +@attr.s(frozen=True) +class CliResult: + stdout: str = attr.ib(kw_only=True) + + +class CliRunner: + cli_cls: ClassVar[Type[CliToolBase]] + + def run_with_args(self, argv: list[str]) -> CliResult: + with redirect_stdout(io.StringIO()) as out_stream: + self.cli_cls.run(argv) + + assert isinstance(out_stream, io.StringIO) + result = CliResult(stdout=out_stream.getvalue()) + return result + + +@attr.s +class RepoCliRunner(CliRunner): + cli_cls = DlRepManagerTool + + repo_path: Path = attr.ib() + + def run_with_args(self, argv: list[str]) -> CliResult: + argv = ["--base-path", str(self.repo_path)] + argv + return super().run_with_args(argv) + + +@attr.s +class PackageCliRunner(CliRunner): + cli_cls = DlPackageMetaTool + + package_path: Path = attr.ib() + + def run_with_args(self, argv: list[str]) -> CliResult: + argv = ["--package-path", str(self.package_path)] + argv + return super().run_with_args(argv) diff --git a/tools/taskfiles/taskfile_code_quality.yml b/tools/taskfiles/taskfile_code_quality.yml index 48b1ef42f..299fb6791 100644 --- a/tools/taskfiles/taskfile_code_quality.yml +++ b/tools/taskfiles/taskfile_code_quality.yml @@ -20,10 +20,10 @@ vars: # Files changed since last commit CHANGED_PY_SINGLE_LINE: - sh: cd {{.ROOT_DIR}} && echo $(git diff --relative --name-only HEAD | grep '\.py$' ) + sh: cd {{.ROOT_DIR}} && echo $(git diff --relative --diff-filter=ACMRT --name-only HEAD | grep '\.py$' ) CHANGED_PYPROJECT_SINGLE_LINE: - sh: cd {{.ROOT_DIR}} && echo $(git diff --relative --name-only HEAD | grep 'pyproject.toml$' ) + sh: cd {{.ROOT_DIR}} && echo $(git diff --relative --diff-filter=ACMRT --name-only HEAD | grep 'pyproject.toml$' ) DO_RUFF_FIX: "false"