From 5a47a0e01a7ac3240f6e28391e71da6cf5957f9e Mon Sep 17 00:00:00 2001 From: Jules Bertrand <33326907+julesbertrand@users.noreply.github.com> Date: Tue, 2 Jan 2024 10:33:45 +0100 Subject: [PATCH] test: add integration test to ensure config and cli params are iso (#121) --- Makefile | 16 +++++- deployer/cli.py | 40 ++++++------- deployer/{configuration.py => settings.py} | 56 +++++++++---------- tests/integration_tests/test_configuration.py | 14 +++-- 4 files changed, 70 insertions(+), 56 deletions(-) rename deployer/{configuration.py => settings.py} (59%) diff --git a/Makefile b/Makefile index b2d66a2..afa48a1 100644 --- a/Makefile +++ b/Makefile @@ -49,10 +49,20 @@ format-code: @poetry run pre-commit run -a -.PHONY: run-tests +.PHONY: run-unit-tests ## Run unit tests -run-tests: - @poetry run pytest tests --cov=deployer --cov-report=term-missing -s -vv -W ignore:::pkg_resources +run-unit-tests: + @poetry run pytest tests/unit_tests --cov=deployer --cov-report=term-missing -s -vv -W ignore:::pkg_resources + +.PHONY: run-integration-tests +## Run integration tests +run-integration-tests: + @poetry run pytest tests/integration_tests -s -vv -W ignore:::pkg_resources + + +.PHONY: run-tests +## Run all tests +run-tests: run-unit-tests run-integration-tests .PHONY: profile-cli diff --git a/deployer/cli.py b/deployer/cli.py index 8f2ede2..c95a969 100644 --- a/deployer/cli.py +++ b/deployer/cli.py @@ -8,7 +8,6 @@ from pydantic import ValidationError from typing_extensions import Annotated -from deployer.configuration import load_configuration from deployer.constants import ( DEFAULT_LOCAL_PACKAGE_PATH, DEFAULT_SCHEDULER_TIMEZONE, @@ -16,6 +15,7 @@ PIPELINE_MINIMAL_TEMPLATE, PYTHON_CONFIG_TEMPLATE, ) +from deployer.settings import load_deployer_settings from deployer.utils.config import ( ConfigType, list_config_filepaths, @@ -33,9 +33,9 @@ rich.traceback.install() -deployer_config = load_configuration() +deployer_settings = load_deployer_settings() -PipelineName = make_enum_from_python_package_dir(deployer_config.pipelines_root_path) +PipelineName = make_enum_from_python_package_dir(deployer_settings.pipelines_root_path) def display_version_and_exit(value: bool): @@ -50,7 +50,7 @@ def display_version_and_exit(value: bool): no_args_is_help=True, rich_help_panel="rich", rich_markup_mode="markdown", - context_settings={"default_map": deployer_config.model_dump()}, + context_settings={"default_map": deployer_settings.model_dump()}, ) @@ -82,13 +82,13 @@ def config( if all: config_repr = dict_to_repr( - dict_=deployer_config.model_dump(), - subdict=deployer_config.model_dump(exclude_unset=True), + dict_=deployer_settings.model_dump(), + subdict=deployer_settings.model_dump(exclude_unset=True), ) config_str = "[italic]'*' means the value was set in config file[/italic]\n\n" config_str += "\n".join(config_repr) else: - config_repr = dict_to_repr(dict_=deployer_config.model_dump(exclude_unset=True)) + config_repr = dict_to_repr(dict_=deployer_settings.model_dump(exclude_unset=True)) config_str = "\n".join(config_repr) console.print(config_str) @@ -225,7 +225,7 @@ def deploy( # noqa: C901 ) pipeline_func = import_pipeline_from_dir( - deployer_config.pipelines_root_path, pipeline_name.value + deployer_settings.pipelines_root_path, pipeline_name.value ) from deployer.pipeline_deployer import VertexPipelineDeployer @@ -245,7 +245,7 @@ def deploy( # noqa: C901 if run or schedule: if config_name is not None: config_filepath = ( - Path(deployer_config.config_root_path) / pipeline_name.value / config_name + Path(deployer_settings.config_root_path) / pipeline_name.value / config_name ) parameter_values, input_artifacts = load_config(config_filepath) @@ -332,7 +332,7 @@ def check( if len(PipelineName.__members__) == 0: raise ValueError( "No pipeline found. Please check that the pipeline root path is correct" - f" ('{deployer_config.pipelines_root_path}')" + f" ('{deployer_settings.pipelines_root_path}')" ) from deployer.pipeline_checks import Pipelines @@ -345,7 +345,7 @@ def check( pipelines_to_check = [pipeline_name] if config_filepath is None: to_check = { - p.value: list_config_filepaths(deployer_config.config_root_path, p.value) + p.value: list_config_filepaths(deployer_settings.config_root_path, p.value) for p in pipelines_to_check } else: @@ -359,8 +359,8 @@ def check( p: { "pipeline_name": p, "config_paths": config_filepaths, - "pipelines_root_path": deployer_config.pipelines_root_path, - "config_root_path": deployer_config.config_root_path, + "pipelines_root_path": deployer_settings.pipelines_root_path, + "config_root_path": deployer_settings.config_root_path, } for p, config_filepaths in to_check.items() } @@ -388,13 +388,13 @@ def list( if len(PipelineName.__members__) == 0: logger.warning( "No pipeline found. Please check that the pipeline root path is" - f" correct (current: '{deployer_config.pipelines_root_path}')" + f" correct (current: '{deployer_settings.pipelines_root_path}')" ) raise typer.Exit() if with_configs: pipelines_dict = { - p.name: list_config_filepaths(deployer_config.config_root_path, p.name) + p.name: list_config_filepaths(deployer_settings.config_root_path, p.name) for p in PipelineName.__members__.values() } else: @@ -417,18 +417,18 @@ def create( """Create files structure for a new pipeline.""" logger.info(f"Creating pipeline {pipeline_name}") - if not Path(deployer_config.pipelines_root_path).is_dir(): + if not Path(deployer_settings.pipelines_root_path).is_dir(): raise FileNotFoundError( - f"Pipeline root path '{deployer_config.pipelines_root_path}' does not exist." + f"Pipeline root path '{deployer_settings.pipelines_root_path}' does not exist." " Please check that the pipeline root path is correct" - f" or create it with `mkdir -p {deployer_config.pipelines_root_path}`." + f" or create it with `mkdir -p {deployer_settings.pipelines_root_path}`." ) - pipeline_filepath = Path(deployer_config.pipelines_root_path) / f"{pipeline_name}.py" + pipeline_filepath = Path(deployer_settings.pipelines_root_path) / f"{pipeline_name}.py" pipeline_filepath.touch(exist_ok=False) pipeline_filepath.write_text(PIPELINE_MINIMAL_TEMPLATE.format(pipeline_name=pipeline_name)) - config_dirpath = Path(deployer_config.config_root_path) / pipeline_name + config_dirpath = Path(deployer_settings.config_root_path) / pipeline_name config_dirpath.mkdir(exist_ok=False) for config_name in ["test", "dev", "prod"]: config_filepath = config_dirpath / f"{config_name}.{config_type}" diff --git a/deployer/configuration.py b/deployer/settings.py similarity index 59% rename from deployer/configuration.py rename to deployer/settings.py index c7d70c7..ae6589b 100644 --- a/deployer/configuration.py +++ b/deployer/settings.py @@ -12,8 +12,8 @@ from deployer.utils.models import CustomBaseModel -class DeployerDeployConfig(CustomBaseModel): - """Configuration for Vertex Deployer `deploy` command.""" +class _DeployerDeploySettings(CustomBaseModel): + """Settings for Vertex Deployer `deploy` command.""" env_file: Optional[Path] = None compile: bool = True @@ -31,43 +31,43 @@ class DeployerDeployConfig(CustomBaseModel): local_package_path: Path = constants.DEFAULT_LOCAL_PACKAGE_PATH -class DeployerCheckConfig(CustomBaseModel): - """Configuration for Vertex Deployer `check` command.""" +class _DeployerCheckSettings(CustomBaseModel): + """Settings for Vertex Deployer `check` command.""" all: bool = False config_filepath: Optional[Path] = None raise_error: bool = False -class DeployerListConfig(CustomBaseModel): - """Configuration for Vertex Deployer `list` command.""" +class _DeployerListSettings(CustomBaseModel): + """Settings for Vertex Deployer `list` command.""" with_configs: bool = False -class DeployerCreateConfig(CustomBaseModel): - """Configuration for Vertex Deployer `create` command.""" +class _DeployerCreateSettings(CustomBaseModel): + """Settings for Vertex Deployer `create` command.""" config_type: ConfigType = ConfigType.json -class DeployerConfig(CustomBaseModel): - """Configuration for Vertex Deployer `config` command.""" +class _DeployerConfigSettings(CustomBaseModel): + """Settings for Vertex Deployer `config` command.""" all: bool = False -class DeployerConfig(CustomBaseModel): - """Configuration for Vertex Deployer.""" +class DeployerSettings(CustomBaseModel): + """Settings for Vertex Deployer.""" pipelines_root_path: Path = constants.PIPELINE_ROOT_PATH config_root_path: Path = constants.CONFIG_ROOT_PATH log_level: str = "INFO" - deploy: DeployerDeployConfig = DeployerDeployConfig() - check: DeployerCheckConfig = DeployerCheckConfig() - list: DeployerListConfig = DeployerListConfig() - create: DeployerCreateConfig = DeployerCreateConfig() - config: DeployerConfig = DeployerConfig() + deploy: _DeployerDeploySettings = _DeployerDeploySettings() + check: _DeployerCheckSettings = _DeployerCheckSettings() + list: _DeployerListSettings = _DeployerListSettings() + create: _DeployerCreateSettings = _DeployerCreateSettings() + config: _DeployerConfigSettings = _DeployerConfigSettings() def find_pyproject_toml(path_project_root: Path) -> Optional[str]: @@ -82,31 +82,29 @@ def find_pyproject_toml(path_project_root: Path) -> Optional[str]: def parse_pyproject_toml(path_pyproject_toml: str) -> Dict[str, Any]: """Parse a pyproject toml file, pulling out relevant parts for Deployer.""" pyproject_toml = toml.load(path_pyproject_toml) - config: dict[str, Any] = pyproject_toml.get("tool", {}).get("vertex_deployer", {}) - config = {k.replace("--", "").replace("-", "_"): v for k, v in config.items()} - return config + settings: dict[str, Any] = pyproject_toml.get("tool", {}).get("vertex_deployer", {}) + settings = {k.replace("--", "").replace("-", "_"): v for k, v in settings.items()} + return settings @lru_cache() -def load_configuration() -> DeployerConfig: - """Load the configuration for Vertex Deployer.""" +def load_deployer_settings() -> DeployerSettings: + """Load the settings for Vertex Deployer.""" path_project_root = Path.cwd().resolve() path_pyproject_toml = find_pyproject_toml(path_project_root) if path_pyproject_toml is None: - logger.debug( - "No pyproject.toml file found. Using default configuration for Vertex Deployer." - ) - config = {} + logger.debug("No pyproject.toml file found. Using default settings for Vertex Deployer.") + settings = {} else: - config = parse_pyproject_toml(path_pyproject_toml) + settings = parse_pyproject_toml(path_pyproject_toml) try: - config = DeployerConfig(**config) + settings = DeployerSettings(**settings) except ValidationError as e: msg = f"In {path_pyproject_toml}:\n{e}\n" msg += "Please check your configuration file." raise InvalidPyProjectTOMLError(msg) from e - return config + return settings diff --git a/tests/integration_tests/test_configuration.py b/tests/integration_tests/test_configuration.py index 38c4cbe..d31141d 100644 --- a/tests/integration_tests/test_configuration.py +++ b/tests/integration_tests/test_configuration.py @@ -2,14 +2,14 @@ from collections import defaultdict from inspect import signature from typing import Type +from unittest.mock import patch import typer from pydantic import BaseModel from rich.pretty import pretty_repr from typing_extensions import _AnnotatedAlias -from deployer.cli import app -from deployer.configuration import DeployerConfig +from deployer.settings import DeployerSettings def get_model_recursive_signature(model: Type[BaseModel]): @@ -55,11 +55,17 @@ def compare_dicts(d1, d2): ) -def test_cli_parameters_are_available_in_config(): +def test_deployer_cli_and_settings_consistency(): # Given + + # patch deployer.settings:load_deployer_settings + with patch("deployer.settings.load_deployer_settings") as mock: + mock.return_value = DeployerSettings() + from deployer.cli import app + configured_parameters = { k: v - for k, v in get_model_recursive_signature(DeployerConfig).items() + for k, v in get_model_recursive_signature(DeployerSettings).items() if k not in ["pipelines_root_path", "config_root_path", "log_level"] } cli_parameters = get_typer_app_signature(app)