Skip to content

Commit

Permalink
chore(release): 0.3.2 (#96)
Browse files Browse the repository at this point in the history
  • Loading branch information
julesbertrand authored Nov 8, 2023
2 parents aee31e2 + 42ccfc9 commit 4eb0c53
Show file tree
Hide file tree
Showing 29 changed files with 929 additions and 167 deletions.
32 changes: 32 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Configuration: https://dependabot.com/docs/config-file/
# Docs: https://docs.github.com/en/github/administering-a-repository/keeping-your-dependencies-updated-automatically

version: 2

updates:
- package-ecosystem: "pip"
target-branch: "develop"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "09:00"
allow:
- dependency-type: "all"
labels:
- draft
- dependencies
- python
- package-ecosystem: "github-actions"
target-branch: "develop"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "09:00"
allow:
- dependency-type: "all"
labels:
- draft
- dependencies
- github_actions
22 changes: 11 additions & 11 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
python-version: ['3.8', '3.9', '3.10']

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
Expand All @@ -28,7 +28,7 @@ jobs:
run: make download-poetry

- name: Set up pip cache
uses: actions/cache@v3.2.4
uses: actions/cache@v3.3.2
with:
path: ~/.cache/pypoetry/virtualenvs
key: venv-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }}
Expand Down Expand Up @@ -57,17 +57,17 @@ jobs:
python-version: ['3.8', '3.9', '3.10']

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}

- name: Install poetry
run: make download-poetry

- name: Set up pip cache
uses: actions/cache@v3.2.4
uses: actions/cache@v3.3.2
with:
path: ~/.cache/pypoetry/virtualenvs
key: venv-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }}
Expand All @@ -85,31 +85,31 @@ jobs:
- name: Test root command
run: |
cd example
poetry run vertex-deployer --version
poetry run -C .. vertex-deployer --version
- name: Test config command
run: |
cd example
poetry run vertex-deployer config --all
poetry run -C .. vertex-deployer config --all
- name: Test list command
run: |
cd example
poetry run vertex-deployer list --with-configs
poetry run -C .. vertex-deployer list --with-configs
- name: Test check command
run: |
cd example
poetry run vertex-deployer check --all
poetry run -C .. vertex-deployer check --all
- name: Test deploy command
# Cannot check more than compile action here, need GCP environment for upload, run, schedule
run: |
cd example
poetry run vertex-deployer -log DEBUG deploy dummy_pipeline --compile --env-file example.env
poetry run -C .. vertex-deployer -log DEBUG deploy dummy_pipeline --compile --env-file example.env
- name: Test create command
run: |
cd example
poetry run vertex-deployer create test_pipeline --config-type py
poetry run -C .. vertex-deployer create test_pipeline --config-type py
[ -e example/vertex/pipelines/test_pipeline.py ] && echo 1 || echo 0
44 changes: 44 additions & 0 deletions .github/workflows/deploy_docs.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: Deploy MkDocs to GitHub Pages

on:
release:
types: [published]

env:
python-version: "3.10"

jobs:
Deploy-docs:
name: Deploy docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Set up Python ${{ env.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.python-version }}

- name: Install poetry
run: make download-poetry

- name: Set up pip cache
uses: actions/[email protected]
with:
path: ~/.cache/pypoetry/virtualenvs
key: venv-${{ env.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }}

- name: Set Poetry Path
run: |
echo "$HOME/.poetry/bin" >> $GITHUB_PATH
- name: Install requirements
run: |
poetry run pip install --upgrade pip
poetry install --only docs
- name: Deploying MkDocs documentation
run: |
poetry run mkdocs build
poetry run mkdocs gh-deploy --force
41 changes: 1 addition & 40 deletions .github/workflows/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
contents: write

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0

Expand All @@ -56,42 +56,3 @@ jobs:
--repository-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer \
--verbose \
dist/*
Deploy-docs:
name: Deploy docs
runs-on: ubuntu-latest
concurrency: Deploy-docs
needs: Release
env:
python-version: "3.10"
steps:
- uses: actions/checkout@v3

- name: Set up Python ${{ env.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.python-version }}

- name: Install poetry
run: make download-poetry

- name: Set up pip cache
uses: actions/[email protected]
with:
path: ~/.cache/pypoetry/virtualenvs
key: venv-${{ env.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }}

- name: Set Poetry Path
run: |
echo "$HOME/.poetry/bin" >> $GITHUB_PATH
- name: Install requirements
run: |
poetry run pip install --upgrade pip
poetry install --only docs
- name: Deploying MkDocs documentation
run: |
poetry run mkdocs build
poetry run mkdocs gh-deploy --force
19 changes: 11 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -94,27 +94,30 @@ poetry install
cd example
```

### From GCS (not available in PyPI yet)
### From Artifact Registry (not available in PyPI yet)

Install a specific version:
The package is available on a public Google Artifact Registry repo. You need to specify a
[pip extra index url](https://pip.pypa.io/en/stable/cli/pip_install/#cmdoption-extra-index-url) to install it.

Install latest version:
```bash
export VERSION=0.1.0
gsutil -m cp gs://vertex-pipelines-deployer/vertex_deployer-$VERSION.tar.gz .
pip install ./vertex_deployer-$VERSION.tar.gz
pip install --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer
```

List available versions:
```bash
gsutil ls gs://vertex-pipelines-deployer
pip index versions --extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple vertex-deployer
```

### Add to requirements

It's better to get the .tar.gz archive from gcs, and version it.

Then add the following line to your `requirements.in` file:
Then add the following lines to your `requirements.in` file:
```bash
file:my/path/to/vertex_deployer-$VERSION.tar.gz
--extra-index-url https://europe-west1-python.pkg.dev/vertex-deployer-sandbox-3a8a/vertex-deployer/simple

vertex-deployer==0.3.1
```
<!-- --8<-- [end:installation] -->

Expand Down
29 changes: 25 additions & 4 deletions deployer/pipeline_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,24 @@
from pathlib import Path
from typing import Any, Dict, Generic, List, TypeVar

import kfp.dsl
from loguru import logger
from pydantic import Field, ValidationError, computed_field, model_validator
from pydantic.functional_validators import ModelWrapValidatorHandler
from pydantic_core import PydanticCustomError
from typing_extensions import Annotated
from typing_extensions import Annotated, _AnnotatedAlias

try:
from kfp.dsl import graph_component # since 2.1
except ImportError:
from kfp.components import graph_component # until 2.0.1

from deployer.constants import TEMP_LOCAL_PACKAGE_PATH
from deployer.pipeline_deployer import VertexPipelineDeployer
from deployer.utils.config import list_config_filepaths, load_config
from deployer.utils.exceptions import BadConfigError
from deployer.utils.logging import disable_logger
from deployer.utils.models import CustomBaseModel, create_model_from_pipeline
from deployer.utils.models import CustomBaseModel, create_model_from_func
from deployer.utils.utils import import_pipeline_from_dir

PipelineConfigT = TypeVar("PipelineConfigT")
Expand Down Expand Up @@ -63,7 +69,7 @@ def populate_config_names(cls, data: Any) -> Any:
return data

@computed_field
def pipeline(self) -> Any:
def pipeline(self) -> graph_component.GraphComponent:
"""Import pipeline"""
if getattr(self, "_pipeline", None) is None:
with disable_logger("deployer.utils.utils"):
Expand Down Expand Up @@ -101,7 +107,9 @@ def compile_pipeline(self):
def validate_configs(self):
"""Validate configs against pipeline parameters definition"""
logger.debug(f"Validating configs for pipeline {self.pipeline_name}")
PipelineDynamicModel = create_model_from_pipeline(self.pipeline)
PipelineDynamicModel = create_model_from_func(
self.pipeline.pipeline_func, type_converter=_convert_artifact_type_to_str
)
ConfigsModel = ConfigsDynamicModel[PipelineDynamicModel]
ConfigsModel.model_validate(
{"configs": {x.name: {"config_path": x} for x in self.config_paths}}
Expand All @@ -127,3 +135,16 @@ def _init_remove_temp_directory(self, handler: ModelWrapValidatorHandler) -> Any
shutil.rmtree(TEMP_LOCAL_PACKAGE_PATH)

return validated_self


def _convert_artifact_type_to_str(annotation: type) -> type:
"""Convert a kfp.dsl.Artifact type to a string.
This is mandatory for type checking, as kfp.dsl.Artifact types should be passed as strings
to VertexAI. See https://cloud.google.com/python/docs/reference/aiplatform/latest/google.cloud.aiplatform.PipelineJob
for details.
""" # noqa: E501
if isinstance(annotation, _AnnotatedAlias):
if issubclass(annotation.__origin__, kfp.dsl.Artifact):
return str
return annotation
20 changes: 16 additions & 4 deletions deployer/pipeline_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,10 +187,22 @@ def run(
input_artifacts=input_artifacts,
)

job.submit(
experiment=experiment_name,
service_account=self.service_account,
)
try:
job.submit(
experiment=experiment_name,
service_account=self.service_account,
)
except RuntimeError as e: # HACK: This is a temporary fix
if "could not be associated with Experiment" in str(e):
logger.warning(
f"Encountered an error while linking your job {job.job_id}"
f" with experiment {experiment_name}."
" This is likely due to a bug in the AI Platform Pipelines client."
" You job should be running anyway. Try to link it manually."
)
else:
raise e

return self

def compile_upload_run(
Expand Down
Loading

0 comments on commit 4eb0c53

Please sign in to comment.