diff --git a/.github/workflows/vertica-test.yml b/.github/workflows/vertica-test.yml index 97e3a66..4ade6ba 100644 --- a/.github/workflows/vertica-test.yml +++ b/.github/workflows/vertica-test.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: [ '3.8','3.9' ,'3.10', '3.11'] services: vertica: image: vertica/vertica-ce:latest @@ -45,13 +45,11 @@ jobs: - name: Test Basic run: python -m pytest tests/functional/adapter/test_basic.py - name: Test Constraints - run: python -m pytest tests/functional/adapter/test_constraints.py + run: python -m pytest tests/functional/adapter/constraints/test_constraints.py - name: Test Incremental run: python -m pytest tests/functional/adapter/incremental/ - name: Test Concurrency run: python -m pytest tests/functional/adapter/concurrency/ - - name: Test ephemeral - run: python -m pytest tests/functional/adapter/ephemeral/ - name: Test Doc Generate run: python -m pytest tests/functional/adapter/test_doc_gen.py - name: Test Data Type Boolean @@ -61,5 +59,6 @@ jobs: - name: Test Data Type Int run: python -m pytest tests/functional/adapter/utils/data_type/ + diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ed96f6..d98f22d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,30 @@ ## Changelog - This file provides a full account of all changes to dbt-vertica. - "Breaking changes" listed under a version may require action from end users. + +### 1.6.0 + +#### Features: +- Added support for [`dbt-core version 1.6.0`](https://github.com/dbt-labs/dbt-core/discussions/7958) according to DBT guidelines. +- new `clone` command +- Droped support for Python 3.7 + +#### Fixes: +- ensure support for revamped `dbt debug` +- new limit arg for `adapter.execute()` +- Added new functional tests and parameterize them by overriding fixtures: + - TestIncrementalConstraintsRollback + - TestTableContractSqlHeader + - TestIncrementalContractSqlHeader + - TestModelConstraintsRuntimeEnforcement + - TestConstraintQuotedColumn + - TestEquals + - TestMixedNullCompare + - TestNullCompare + - TestVerticaCloneNotPossible + - TestValidateSqlMethod + + ### 1.5.0 #### Features: - Added support for [`dbt-core version 1.5.0`](https://github.com/dbt-labs/dbt-core/discussions/7213) according to DBT guidelines. diff --git a/dbt/adapters/vertica/__version__.py b/dbt/adapters/vertica/__version__.py index 194d475..7d1c21b 100644 --- a/dbt/adapters/vertica/__version__.py +++ b/dbt/adapters/vertica/__version__.py @@ -14,4 +14,4 @@ -version = "1.5.0" +version = "1.6.0" diff --git a/dbt/adapters/vertica/connections.py b/dbt/adapters/vertica/connections.py index fba5429..77d4b36 100644 --- a/dbt/adapters/vertica/connections.py +++ b/dbt/adapters/vertica/connections.py @@ -185,14 +185,17 @@ def cancel(self, connection): connection.handle.cancel() @classmethod - def get_result_from_cursor(cls, cursor: Any) -> agate.Table: + def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table: data: List[Any] = [] column_names: List[str] = [] if cursor.description is not None: column_names = [col[0] for col in cursor.description] - rows = cursor.fetchall() - + if limit: + rows = cursor.fetchmany(limit) + else: + rows = cursor.fetchall() + # rows = cursor.fetchall() # check result for every query if there are some queries with ; separator while cursor.nextset(): check = cursor._message @@ -206,13 +209,13 @@ def get_result_from_cursor(cls, cursor: Any) -> agate.Table: return dbt.clients.agate_helper.table_from_data_flat(data, column_names) def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False + self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None ) -> Tuple[AdapterResponse, agate.Table]: sql = self._add_query_comment(sql) _, cursor = self.add_query(sql, auto_begin) response = self.get_response(cursor) if fetch: - table = self.get_result_from_cursor(cursor) + table = self.get_result_from_cursor(cursor,limit) else: table = dbt.clients.agate_helper.empty_table() while cursor.nextset(): diff --git a/dbt/adapters/vertica/impl.py b/dbt/adapters/vertica/impl.py index bdfb3e9..8acad00 100644 --- a/dbt/adapters/vertica/impl.py +++ b/dbt/adapters/vertica/impl.py @@ -164,3 +164,5 @@ def get_incremental_strategy_macro(self, model_context, strategy: str): # This returns a callable macro return model_context[macro_name] + def debug_query(self) -> None: + self.execute("select 1 as id") \ No newline at end of file diff --git a/dbt/include/vertica/macros/materializations/clone.sql b/dbt/include/vertica/macros/materializations/clone.sql new file mode 100644 index 0000000..550fa65 --- /dev/null +++ b/dbt/include/vertica/macros/materializations/clone.sql @@ -0,0 +1,16 @@ + +{% macro vertica__can_clone_table() %} + {{ return(True) }} +{% endmacro %} + + +{% macro vertica__create_or_replace_clone(this_relation, defer_relation) %} + + +DROP TABLE IF EXISTS {{this_relation}}; + create table + {{ this_relation }} as select * from + {{ defer_relation }} + + +{% endmacro %} \ No newline at end of file diff --git a/example/demo_dbt_vmart/dbt_project.yml b/example/demo_dbt_vmart/dbt_project.yml index 31ffde0..4adfe54 100644 --- a/example/demo_dbt_vmart/dbt_project.yml +++ b/example/demo_dbt_vmart/dbt_project.yml @@ -7,7 +7,7 @@ version: '1.0.0' config-version: 2 # This setting configures which "profile" dbt uses for this project. -profile: 'vmart_project' +profile: vmart_project # These configurations specify where dbt should look for different types of files. # The `model-paths` config, for example, states that models in this project can be diff --git a/setup.py b/setup.py index 1e2d30c..08af162 100644 --- a/setup.py +++ b/setup.py @@ -28,10 +28,10 @@ import sys import re -# require python 3.7 or newer -if sys.version_info < (3, 7): +# require python 3.8 or newer +if sys.version_info < (3, 8): print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.7 or higher.") + print("Please upgrade to Python 3.8 or higher.") sys.exit(1) @@ -78,7 +78,7 @@ def _get_dbt_core_version(): package_name = "dbt-vertica" -package_version = "1.5.0" +package_version = "1.6.0" description = """Official vertica adapter plugin for dbt (data build tool)""" dbt_core_version = _get_dbt_core_version() @@ -110,10 +110,10 @@ def _get_dbt_core_version(): ] }, install_requires=[ - 'dbt-core==1.5.0', + 'dbt-core==1.6.0', # "dbt-core~={}".format(dbt_core_version), 'vertica-python>=1.1.0', - 'dbt-tests-adapter==1.5.0', + 'dbt-tests-adapter==1.6.0', 'python-dotenv==0.21.1', ], classifiers=[ @@ -127,5 +127,5 @@ def _get_dbt_core_version(): "Topic :: Software Development :: Libraries :: Python Modules", "Operating System :: OS Independent" ], - python_requires=">=3.7.2", + python_requires=">=3.8.0", ) diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/constraints/test_constraints.py similarity index 51% rename from tests/functional/adapter/test_constraints.py rename to tests/functional/adapter/constraints/test_constraints.py index 527e6e3..b42a3aa 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/constraints/test_constraints.py @@ -18,16 +18,22 @@ from dbt.tests.adapter.constraints.test_constraints import ( BaseTableConstraintsColumnsEqual, BaseViewConstraintsColumnsEqual, - #BaseTableContractSqlHeader, - #BaseIncrementalContractSqlHeader, BaseIncrementalConstraintsColumnsEqual, BaseConstraintsRuntimeDdlEnforcement, - BaseConstraintsRollback, BaseIncrementalConstraintsRuntimeDdlEnforcement, - BaseIncrementalConstraintsRollback, - BaseModelConstraintsRuntimeEnforcement, - #BaseConstraintQuotedColumn, ) +from dbt.tests.adapter.utils.test_null_compare import BaseNullCompare +from dbt.tests.adapter.utils.test_null_compare import BaseMixedNullCompare + +from dbt.tests.adapter.constraints.test_constraints import BaseIncrementalConstraintsRollback +from dbt.tests.adapter.utils.test_equals import BaseEquals +from dbt.tests.adapter.utils.test_validate_sql import BaseValidateSqlMethod + +from dbt.tests.adapter.constraints.test_constraints import BaseModelConstraintsRuntimeEnforcement +from dbt.tests.adapter.constraints.test_constraints import BaseConstraintQuotedColumn + + + from dbt.tests.adapter.constraints.fixtures import ( my_model_sql, @@ -288,6 +294,189 @@ SELECT DEMO as column_name """ +SEEDS__DATA_EQUALS_CSV = """key_name,x,y,expected +1,1,1,same +2,1,2,different +3,1,,different +4,2,1,different +5,2,2,same +6,2,,different +7,,1,different +8,,2,different +9,,,same +""" + +# model breaking constraints +my_model_with_nulls_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select + + cast(null as {{ dbt.type_int() }}) as id, + + 'red' as color, + '2019-01-01' as date_day +""" + + +my_model_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select + 1 as id, + 'blue' as color, + '2019-01-01' as date_day +""" + + + +model_schema_yml = """ +version: 2 +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: id + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + - type: check + expression: id >= 1 + tests: + - unique + - name: color + data_type: text + - name: date_day + data_type: text + - name: my_model_error + config: + contract: + enforced: true + columns: + - name: id + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + tests: + - unique + - name: color + data_type: text + - name: date_day + data_type: text + - name: my_model_wrong_order + config: + contract: + enforced: true + columns: + - name: id + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + tests: + - unique + - name: color + data_type: text + - name: date_day + data_type: text + - name: my_model_wrong_name + config: + contract: + enforced: true + columns: + - name: id + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + tests: + - unique + - name: color + data_type: text + - name: date_day + data_type: text +""" + +class BaseConstraintsRollback: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "constraints_schema.yml": model_schema_yml, + } + + @pytest.fixture(scope="class") + def null_model_sql(self): + return my_model_with_nulls_sql + + @pytest.fixture(scope="class") + def expected_color(self): + return "blue" + + @pytest.fixture(scope="class") + def expected_error_messages(self): + return ['null value in column "id"', "violates not-null constraint"] + + def assert_expected_error_messages(self, error_message, expected_error_messages): + print(msg in error_message for msg in expected_error_messages) + assert all(msg in error_message for msg in expected_error_messages) + + def test__constraints_enforcement_rollback( + self, project, expected_color, expected_error_messages, null_model_sql + ): + results = run_dbt(["run", "-s", "my_model"]) + assert len(results) == 1 + + # Make a contract-breaking change to the model + write_file(null_model_sql, "models", "my_model.sql") + + failing_results = run_dbt(["run", "-s", "my_model"], expect_pass=True) + assert len(failing_results) == 1 + + # Verify the previous table still exists + relation = relation_from_name(project.adapter, "my_model") + old_model_exists_sql = f"select * from {relation}" + old_model_exists = project.run_sql(old_model_exists_sql, fetch="all") + assert len(old_model_exists) == 1 + assert old_model_exists[0][1] == expected_color + + # Confirm this model was contracted + # TODO: is this step really necessary? + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + my_model_config = manifest.nodes[model_id].config + contract_actual_config = my_model_config.contract + + assert contract_actual_config.enforced is True + + # Its result includes the expected error messages + self.assert_expected_error_messages(failing_results[0].message, expected_error_messages) + + class VerticaColumnEqualSetup: @@ -379,64 +568,209 @@ def models(self): "my_model_wrong_name.sql": my_model_view_wrong_name_sql, "constraints_schema.yml": constraints_yml, } -#class TestVerticaTableConstraintsRollback(BaseConstraintsRollback): -# @pytest.fixture(scope="class") -# def models(self): -# return { -# "my_model.sql": my_model_sql, -# "constraints_schema.yml": constraints_yml, -# } -# @pytest.fixture(scope="class") -# def expected_error_messages(self): -# return ["Required field id cannot be null"] class TestVerticaConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement): @pytest.fixture(scope="class") def expected_sql(self): return """ - create table - +create table include schema privileges as(-- depends_on: select 'blue' as color,1 as id,'2019-01-01' as date_day); +""" +class TestVerticaIncrementalConstraintsRuntimeDdlEnforcement(BaseIncrementalConstraintsRuntimeDdlEnforcement): + @pytest.fixture(scope="class") + def expected_sql(self): + + return """create table include schema privileges as(-- depends_on: select 'blue' as color,1 as id,'2019-01-01' as date_day); ;""" - INCLUDE SCHEMA PRIVILEGES as ( + +my_incremental_model_sql = """ +{{ + config( + materialized = "incremental", + on_schema_change='append_new_columns' + ) +}} select - 'blue' as color, 1 as id, + 'blue' as color, '2019-01-01' as date_day - ) +""" + +my_model_incremental_with_nulls_sql = """ +{{ + config( + materialized = "incremental", + on_schema_change='append_new_columns' ) +}} +select + -- null value for 'id' + cast(null as {{ dbt.type_int() }}) as id, + -- change the color as well (to test rollback) + 'red' as color, + '2019-01-01' as date_day +""" +class BaseIncrementalConstraintsRollback(BaseConstraintsRollback): + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_incremental_model_sql, + "constraints_schema.yml": model_schema_yml, + } + @pytest.fixture(scope="class") + def null_model_sql(self): + return my_model_incremental_with_nulls_sql - ; -""" -class TestVerticaIncrementalConstraintsRuntimeDdlEnforcement(BaseIncrementalConstraintsRuntimeDdlEnforcement): - @pytest.fixture(scope="class") - def expected_sql(self): +class TestIncrementalConstraintsRollback(BaseIncrementalConstraintsRollback): + + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "constraints_schema.yml": model_schema_yml, + } + @pytest.fixture(scope="class") + def expected_error_messages(self): + return [""] + + @pytest.fixture(scope="class") + def expected_color(self): + return "red" + + @pytest.fixture(scope="class") + def null_model_sql(self): + return my_model_with_nulls_sql + + + +class TestValidateSqlMethod(BaseValidateSqlMethod): + pass + +class TestNullCompare(BaseNullCompare): + pass + + +class TestMixedNullCompare(BaseMixedNullCompare): + pass + + +class TestEquals(BaseEquals): + + @pytest.fixture(scope="class") + def seeds(self): + return { + "data_equals.csv": SEEDS__DATA_EQUALS_CSV, + } + pass + + + +class TestConstraintQuotedColumn(BaseConstraintQuotedColumn): + @pytest.fixture(scope="class") + def expected_sql(self): return """ - create table - +create table INCLUDE SCHEMA PRIVILEGES as ( select 'blue' as "from", 1 as id, '2019-01-01' as date_day ) ; """ + pass - INCLUDE SCHEMA PRIVILEGES as ( +class TestModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement): + @pytest.fixture(scope="class") + def expected_sql(self): + return """ +create table INCLUDE SCHEMA PRIVILEGES as ( -- depends_on: select 'blue' as color, 1 as id, '2019-01-01' as date_day ) ; +""" -select - 'blue' as color, - 1 as id, - '2019-01-01' as date_day + +my_model_contract_sql_header_sql = """ +{{ + config( + materialized = "table" ) +}} +{% call set_sql_header(config) %} +set session time zone 'Asia/Kolkata'; +{%- endcall %} +select CURRENT_TIME(0) as column_name + + +""" +model_contract_header_schema_yml = """ +version: 2 +models: + - name: my_model_contract_sql_header + config: + contract: + enforced: false + columns: + - name: column_name + data_type: text +""" +my_model_incremental_contract_sql_header_sql = """ +{{ + config( + materialized = "incremental", + on_schema_change="append_new_columns" + ) +}} -; ; +{% call set_sql_header(config) %} +set session time zone 'Asia/Kolkata'; +{%- endcall %} +select CURRENT_TIME(0) as column_name """ - + +class BaseContractSqlHeader: + """Tests a contracted model with a sql header dependency.""" + + def test__contract_sql_header(self, project): + run_dbt(["run", "-s", "my_model_contract_sql_header"]) + + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model_contract_sql_header" + model_config = manifest.nodes[model_id].config + + assert model_config.contract + + +class BaseTableContractSqlHeader(BaseContractSqlHeader): + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_contract_sql_header.sql": my_model_contract_sql_header_sql, + "constraints_schema.yml": model_contract_header_schema_yml, + } + + +class TestTableContractSqlHeader(BaseTableContractSqlHeader): + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_contract_sql_header.sql": my_model_contract_sql_header_sql, + "constraints_schema.yml": model_contract_header_schema_yml, + } + + +class BaseIncrementalContractSqlHeader(BaseContractSqlHeader): + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_contract_sql_header.sql": my_model_incremental_contract_sql_header_sql, + "constraints_schema.yml": model_contract_header_schema_yml, + } + + +class TestIncrementalContractSqlHeader(BaseIncrementalContractSqlHeader): + pass diff --git a/tests/functional/adapter/dbt_clone/test_dbt_clone.py b/tests/functional/adapter/dbt_clone/test_dbt_clone.py new file mode 100644 index 0000000..22a7b8c --- /dev/null +++ b/tests/functional/adapter/dbt_clone/test_dbt_clone.py @@ -0,0 +1,118 @@ +from copy import deepcopy +from collections import Counter +from dbt.tests.util import run_dbt +from dbt.tests.adapter.dbt_clone.test_dbt_clone import BaseClonePossible +import pytest +import shutil +import os + +class TestVerticaCloneNotPossible(BaseClonePossible): + def test_can_clone_true(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + print(other_schema) + self.run_and_save_state(project.project_root, with_snapshot=True) + clone_args = [ + "clone", + "--state", + "state", + "--target", + "otherschema", + ] + + results = run_dbt(clone_args) + assert len(results) == 4 + + schema_relations = project.adapter.list_relations( + database=project.database, schema=other_schema + ) + types = [r.type for r in schema_relations] + count_types = Counter(types) + assert count_types == Counter({"table": 3, "view": 1}) + + # objects already exist, so this is a no-op + results = run_dbt(clone_args) + assert len(results) == 4 + assert all("no-op" in r.message.lower() for r in results) + + # recreate all objects + results = run_dbt([*clone_args, "--full-refresh"]) + assert len(results) == 4 + + # select only models this time + results = run_dbt([*clone_args, "--resource-type", "model"]) + assert len(results) == 2 + assert all("no-op" in r.message.lower() for r in results) + + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=f"{project.test_schema}_SEEDS" + ) + project.adapter.drop_schema(relation) + + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + + +table_model_1_sql = """ + {{ config( + materialized='table', + transient=true, + ) }} + select 1 as fun + """ + + +class TestVerticaCloneTrainsentTable: + @pytest.fixture(scope="class") + def models(self): + return { + "table_model.sql": table_model_1_sql, + } + + @pytest.fixture(scope="class") + def other_schema(self, unique_schema): + return unique_schema + "_other" + + @pytest.fixture(scope="class") + def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): + outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)} + outputs["default"]["schema"] = unique_schema + outputs["otherschema"]["schema"] = other_schema + return {"test": {"outputs": outputs, "target": "default"}} + + def copy_state(self, project_root): + state_path = os.path.join(project_root, "state") + if not os.path.exists(state_path): + os.makedirs(state_path) + shutil.copyfile( + f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json" + ) + + def run_and_save_state(self, project_root, with_snapshot=False): + results = run_dbt(["run"]) + assert len(results) == 1 + assert not any(r.node.deferred for r in results) + + self.copy_state(project_root) + + def test_can_clone_transient_table(self, project, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root) + + clone_args = [ + "clone", + "--state", + "state", + "--target", + "otherschema", + ] + + results = run_dbt(clone_args) + assert len(results) == 1 \ No newline at end of file diff --git a/tests/functional/adapter/ephemeral/test_ephemeral.py b/tests/functional/adapter/ephemeral/test_ephemeral.py index 9c2027a..c283576 100644 --- a/tests/functional/adapter/ephemeral/test_ephemeral.py +++ b/tests/functional/adapter/ephemeral/test_ephemeral.py @@ -27,9 +27,13 @@ def test_ephemeral_multi_snowflake(self, project): results = run_dbt(["run"]) assert len(results) == 3 # check_relations_equal(project.adapter, ["SEED", "DEPENDENT", "DOUBLE_DEPENDENT", "SUPER_DEPENDENT"]) + # check_relations_equal(project.adapter, ["seed", "dependent", "double_dependent", "super_dependent"]) + # check_relations_equal(project.adapter, ["seed", "dependent", "double_dependent", "super_dependent"]) + + check_relations_equal(project.adapter, ["seed", "dependent"]) check_relations_equal(project.adapter, ["seed", "double_dependent"]) check_relations_equal(project.adapter, ["seed", "super_dependent"]) - +