Skip to content

Commit

Permalink
chore(release): 0.4.1 (#140)
Browse files Browse the repository at this point in the history
  • Loading branch information
julesbertrand authored Feb 14, 2024
2 parents b39e22e + ba554c3 commit 2d6dbdb
Show file tree
Hide file tree
Showing 19 changed files with 230 additions and 74 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
run: make download-poetry

- name: Set up pip cache
uses: actions/cache@v3.3.2
uses: actions/cache@v4.0.0
with:
path: ~/.cache/pypoetry/virtualenvs
key: venv-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }}
Expand Down Expand Up @@ -67,7 +67,7 @@ jobs:
run: make download-poetry

- name: Set up pip cache
uses: actions/cache@v3.3.2
uses: actions/cache@v4.0.0
with:
path: ~/.cache/pypoetry/virtualenvs
key: venv-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }}
Expand Down Expand Up @@ -106,7 +106,7 @@ jobs:
# Cannot check more than compile action here, need GCP environment for upload, run, schedule
run: |
cd example
poetry run -C .. vertex-deployer -log DEBUG deploy dummy_pipeline --compile --env-file example.env
poetry run -C .. vertex-deployer -log DEBUG deploy dummy_pipeline --compile --env-file example.env --skip-validation
- name: Test create command
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/deploy_docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
run: make download-poetry

- name: Set up pip cache
uses: actions/cache@v3.3.2
uses: actions/cache@v4.0.0
with:
path: ~/.cache/pypoetry/virtualenvs
key: venv-${{ env.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }}
Expand Down
16 changes: 8 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,20 +14,20 @@ repos:
- id: check-added-large-files
- repo: local
hooks:
- id: isort
name: isort
entry: poetry run isort --settings-path pyproject.toml
- id: ruff-isort
name: ruff isort
entry: poetry run ruff check --fix --select I --config pyproject.toml --show-fixes
types: [python]
language: system
stages: [commit, push]
- id: black
name: black
entry: poetry run black --config pyproject.toml
- id: ruff-format
name: ruff format
entry: poetry run ruff format --config pyproject.toml
types: [python]
language: system
stages: [commit, push]
- id: ruff
name: ruff
- id: ruff-check
name: ruff check
entry: poetry run ruff check --config pyproject.toml
types: [python]
language: system
Expand Down
45 changes: 21 additions & 24 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
<h1 align="center">Vertex Pipelines Deployer</h1>
<h3 align="center">Deploy Vertex Pipelines within minutes</h3>
<p align="center">
This tool is a wrapper around <a href="https://www.kubeflow.org/docs/components/pipelines/v2/hello-world/">kfp</a> and <a href="https://cloud.google.com/python/docs/reference/aiplatform/latest">google-cloud-aiplatform</a> that allows you to check, compile, upload, run and schedule Vertex Pipelines in a standardized manner.
This tool is a wrapper around <a href="https://www.kubeflow.org/docs/components/pipelines/v2/hello-world/">kfp</a> and <a href="https://cloud.google.com/python/docs/reference/aiplatform/latest">google-cloud-aiplatform</a> that allows you to check, compile, upload, run, and schedule Vertex Pipelines in a standardized manner.
</p>
</div>
</br>
<br />

<!-- PROJECT SHIELDS -->
<div align="center">
Expand All @@ -18,40 +18,40 @@
[![Pre-commit](https://img.shields.io/badge/pre--commit-enabled-informational?logo=pre-commit&logoColor=white)](https://github.com/ornikar/vertex-eduscore/blob/develop/.pre-commit-config.yaml)
[![License](https://img.shields.io/github/license/artefactory/vertex-pipelines-deployer)](https://github.com/artefactory/vertex-pipelines-deployer/blob/main/LICENSE)

[![CI](https://github.com/artefactory/vertex-pipelines-deployer/actions/workflows/ci.yaml/badge.svg?branch%3Amain&event%3Apush)](https://github.com/artefactory/vertex-pipelines-deployer/actions/workflows/ci.yaml/badge.svg?query=branch%3Amain)
[![Release](https://github.com/artefactory/vertex-pipelines-deployer/actions/workflows/release.yaml/badge.svg?branch%3Amain&event%3Apush)](https://github.com/artefactory/vertex-pipelines-deployer/actions/workflows/release.yaml/badge.svg?query=branch%3Amain)
[![CI](https://github.com/artefactory/vertex-pipelines-deployer/actions/workflows/ci.yaml/badge.svg?branch=main&event=push)](https://github.com/artefactory/vertex-pipelines-deployer/actions/workflows/ci.yaml)
[![Release](https://github.com/artefactory/vertex-pipelines-deployer/actions/workflows/release.yaml/badge.svg?branch=main&event=push)](https://github.com/artefactory/vertex-pipelines-deployer/actions/workflows/release.yaml)

</div>


<details>
<summary>📚 Table of Contents</summary>
<ul>
<ol>
<li><a href="#-why-this-tool">Why this tool?</a></li>
<li><a href="#-prerequisites">Prerequisites</a></li>
<li><a href="#-installation">Installation</a></li>
<ul>
<ol>
<li><a href="#from-git-repo">From git repo</a></li>
<li><a href="#from-artifact-registry-not-available-in-pypi-yet">From Artifact Registry (not available in PyPI yet)</a></li>
<li><a href="#add-to-requirements">Add to requirements</a></li>
</ul>
</ol>
<li><a href="#-usage">Usage</a></li>
<ul>
<ol>
<li><a href="#-setup">Setup</a></li>
<li><a href="#-folder-structure">Folder Structure</a></li>
<li><a href="#-cli-deploying-a-pipeline-with-deploy">CLI: Deploying a Pipeline with `deploy`</a></li>
<li><a href="#-cli-checking-pipelines-are-valid-with-check">CLI: Checking Pipelines are valid with `check`</a></li>
<li><a href="#-cli-other-commands">CLI: Other commands</a></li>
<ul>
<ol>
<li><a href="#config">`config`</a></li>
<li><a href="#create">`create`</a></li>
<li><a href="#init">`init`</a></li>
<li><a href="#list">`list`</a></li>
</ul>
</ul>
</ol>
</ol>
<li><a href="#cli-options">CLI: Options</a></li>
<li><a href="#configuration">Configuration</a></li>
</ul>
</ol>
</details>


Expand All @@ -63,26 +63,22 @@

Three use cases:

1. **CI:** check pipeline validity.
1. **Dev mode:** quickly iterate over your pipelines by compiling and running them in multiple environments (test, dev, staging, etc) without duplicating code or looking for the right kfp / aiplatform snippet.
2. **CD:** deploy your pipelines to Vertex Pipelines in a standardized manner in your CD with Cloud Build or GitHub Actions.
1. **CI:** Check pipeline validity.
2. **Dev mode:** Quickly iterate over your pipelines by compiling and running them in multiple environments (test, dev, staging, etc.) without duplicating code or searching for the right kfp/aiplatform snippet.
3. **CD:** Deploy your pipelines to Vertex Pipelines in a standardized manner in your CD with Cloud Build or GitHub Actions.


Four commands:
Two main commands:

- `check`: check your pipelines (imports, compile, check configs validity against pipeline definition).
- `deploy`: compile, upload to Artifact Registry, run and schedule your pipelines.
- `config`: display the configuration from `pyproject.toml`.
- `create`: create a new pipeline and config files.
- `init`: initialize the project with necessary configuration files and directory structure.
- `list`: list all pipelines in the `vertex/pipelines` folder.
- `check`: Check your pipelines (imports, compile, check configs validity against pipeline definition).
- `deploy`: Compile, upload to Artifact Registry, run, and schedule your pipelines.

<!-- --8<-- [end:why] -->

## 📋 Prerequisites
<!-- --8<-- [start:prerequisites] -->

- Unix-like environment (Linux, macOS, WSL, etc...)
- Unix-like environment (Linux, macOS, WSL, etc.)
- Python 3.8 to 3.10
- Google Cloud SDK
- A GCP project with Vertex Pipelines enabled
Expand Down Expand Up @@ -344,7 +340,8 @@ vertex-deployer deploy dummy_pipeline \
--tags my-tag \
--config-filepath vertex/configs/dummy_pipeline/config_test.json \
--experiment-name my-experiment \
--enable-caching
--enable-caching \
--skip-validation
```

### ✅ CLI: Checking Pipelines are valid with `check`
Expand Down
21 changes: 16 additions & 5 deletions deployer/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@
list_config_filepaths,
load_config,
load_vertex_settings,
validate_or_log_settings,
)
from deployer.utils.console import ask_user_for_model_fields
from deployer.utils.logging import LoguruLevel, console
from deployer.utils.console import ask_user_for_model_fields, console
from deployer.utils.logging import LoguruLevel
from deployer.utils.utils import (
dict_to_repr,
import_pipeline_from_dir,
Expand Down Expand Up @@ -216,9 +217,18 @@ def deploy( # noqa: C901
resolve_path=True,
),
] = constants.DEFAULT_LOCAL_PACKAGE_PATH,
skip_validation: Annotated[
bool,
typer.Option(
"--skip-validation / --no-skip",
"-y / -n",
help="Whether to continue without user validation of the settings.",
),
] = True,
):
"""Compile, upload, run and schedule pipelines."""
vertex_settings = load_vertex_settings(env_file=env_file)
validate_or_log_settings(vertex_settings, skip_validation=skip_validation, env_file=env_file)

if schedule:
if cron is None or cron == "":
Expand Down Expand Up @@ -352,17 +362,18 @@ def check(

if all:
logger.info("Checking all pipelines")
pipelines_to_check = ctx.obj["pipeline_names"].__members__.values()
# unpack enum to get list of pipeline names
pipelines_to_check = [x.value for x in ctx.obj["pipeline_names"]]
elif pipeline_name is not None:
logger.info(f"Checking pipeline {pipeline_name}")
pipelines_to_check = [pipeline_name]
if config_filepath is None:
to_check = {
p.value: list_config_filepaths(deployer_settings.config_root_path, p.value)
p: list_config_filepaths(deployer_settings.config_root_path, p)
for p in pipelines_to_check
}
else:
to_check = {p.value: [config_filepath] for p in pipelines_to_check}
to_check = {p: [config_filepath] for p in pipelines_to_check}

try:
with console.status("Checking pipelines..."):
Expand Down
2 changes: 1 addition & 1 deletion deployer/pipeline_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def import_pipeline(self):
try:
_ = self.pipeline
except (ImportError, ModuleNotFoundError) as e:
raise ValueError(f"Pipeline import failed: {e.__repr__()}") # noqa: B904
raise ValueError(f"Pipeline import failed: {e}") from e
return self

@model_validator(mode="after")
Expand Down
1 change: 1 addition & 0 deletions deployer/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ class _DeployerDeploySettings(CustomBaseModel):
enable_caching: bool = False
experiment_name: Optional[str] = None
local_package_path: Path = constants.DEFAULT_LOCAL_PACKAGE_PATH
skip_validation: bool = True


class _DeployerCheckSettings(CustomBaseModel):
Expand Down
41 changes: 41 additions & 0 deletions deployer/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,15 @@
from typing import List, Optional, Tuple, Union

import tomlkit.items
from loguru import logger
from pydantic import ValidationError
from pydantic_settings import BaseSettings, SettingsConfigDict
from rich.prompt import Confirm
from rich.table import Table
from tomlkit import TOMLDocument
from tomlkit.toml_file import TOMLFile

from deployer.utils.console import console
from deployer.utils.exceptions import BadConfigError, UnsupportedConfigFileError


Expand All @@ -28,6 +32,7 @@ def load_vertex_settings(env_file: Optional[Path] = None) -> VertexPipelinesSett
"""Load the settings from the environment."""
try:
settings = VertexPipelinesSettings(_env_file=env_file, _env_file_encoding="utf-8")
print(settings)
except ValidationError as e:
msg = "Validation failed for VertexPipelinesSettings. "
if env_file is not None:
Expand All @@ -39,6 +44,42 @@ def load_vertex_settings(env_file: Optional[Path] = None) -> VertexPipelinesSett
return settings


def validate_or_log_settings(
settings: VertexPipelinesSettings,
skip_validation: bool,
env_file: Optional[Path] = None,
) -> None:
"""Validate the settings or log them if validation is skipped.
Args:
settings (VertexPipelinesSettings): The settings to validate or log.
skip_validation (bool): Whether to skip validation.
env_file (Optional[Path], optional): The path to the environment file. Defaults to None.
Raises:
ValueError: If the user chooses to exit.
"""
msg = "Loaded settings from environment"
if env_file is not None:
msg += f" and `.env` file: `{env_file}`."

if skip_validation:
msg += "\nLoaded settings for Vertex:"
msg += "\n" + "\n".join(f" {k:<30} {v:<30}" for k, v in settings.model_dump().items())
logger.info(msg)
else:
table = Table(show_header=True, header_style="bold", show_lines=True)
table.add_column("Setting Name")
table.add_column("Value")
for k, v in settings.model_dump().items():
table.add_row(k, v)

console.print(msg)
console.print(table)
if not Confirm.ask("Do you want to continue with these settings? ", console=console):
raise ValueError("User chose to exit")


class ConfigType(str, Enum): # noqa: D101
json = "json"
py = "py"
Expand Down
3 changes: 3 additions & 0 deletions deployer/utils/console.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,11 @@
from typing import Type

from pydantic import BaseModel
from rich.console import Console
from rich.prompt import Prompt

console = Console()


def ask_user_for_model_fields(model: Type[BaseModel]) -> dict:
"""Ask user for model fields and return a dictionary with the results.
Expand Down
4 changes: 0 additions & 4 deletions deployer/utils/logging.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from enum import Enum

from loguru import logger
from rich.console import Console


class LoguruLevel(str, Enum): # noqa: D101
Expand All @@ -25,6 +24,3 @@ def __enter__(self) -> None: # noqa: D105

def __exit__(self, exc_type, exc_val, exc_tb) -> None: # noqa: D105
logger.enable(self.name)


console = Console()
2 changes: 1 addition & 1 deletion deployer/utils/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def create_model_from_func(
func_model = create_model(
__model_name=model_name,
__base__=CustomBaseModel,
**{name: (annotation, ...) for name, annotation in func_typing.items()}
**{name: (annotation, ...) for name, annotation in func_typing.items()},
)

return func_model
Expand Down
Loading

0 comments on commit 2d6dbdb

Please sign in to comment.