Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test: add deployer tests for method with api request #112

Draft
wants to merge 2 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 11 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,24 @@
import pickle

import kfp.dsl
import pytest
from kfp.dsl import Artifact, Input


@pytest.fixture
def dummy_pipeline_fixture():
def test_pipeline_fixture():
@kfp.dsl.component(base_image="python:3.10-slim-buster")
def dummy_component(name: str, artifact: Input[Artifact]) -> None:
print("Hello ", name)

@kfp.dsl.pipeline(name="dummy_pipeline")
def dummy_pipeline(name: str, artifact: Input[Artifact]) -> None:
def test_pipeline(name: str, artifact: Input[Artifact]) -> None:
dummy_component(name=name, artifact=artifact)

return dummy_pipeline
return test_pipeline


@pytest.fixture
def registry_client_response():
with open("tests/samples/registry_client_response.pkl", "rb") as f:
return pickle.load(f) # noqa: S301
Binary file added tests/samples/registry_client_response.pkl
Binary file not shown.
14 changes: 7 additions & 7 deletions tests/unit_tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,16 @@ def func(a: Optional[int], b: str) -> bool:
assert result.__name__ == "func"
assert result.model_json_schema()["properties"] == expected_properties

def test_create_model_from_pipeline_success(self, dummy_pipeline_fixture):
def test_create_model_from_pipeline_success(self, test_pipeline_fixture):
# Given
pipeline = dummy_pipeline_fixture
pipeline = test_pipeline_fixture

# When
result = create_model_from_func(pipeline.pipeline_func)

# Then
assert issubclass(result, CustomBaseModel)
assert result.__name__ == "dummy_pipeline"
assert result.__name__ == "test_pipeline"

def test_create_model_from_func_none_pipeline(self):
# Given
Expand All @@ -44,18 +44,18 @@ def test_create_model_from_func_none_pipeline(self):
with pytest.raises(AttributeError):
create_model_from_func(pipeline)

def test_create_model_from_func_none_pipeline_func(self, dummy_pipeline_fixture):
def test_create_model_from_func_none_pipeline_func(self, test_pipeline_fixture):
# Given
pipeline = dummy_pipeline_fixture
pipeline = test_pipeline_fixture
pipeline.pipeline_func = None

# When / Then
with pytest.raises(TypeError):
create_model_from_func(pipeline.pipeline_func, name="dummy_pipeline")

def test_create_model_from_func_types_are_good(self, dummy_pipeline_fixture):
def test_create_model_from_func_types_are_good(self, test_pipeline_fixture):
# Given
pipeline = dummy_pipeline_fixture
pipeline = test_pipeline_fixture
expected_properties = {
"name": {"title": "Name", "type": "string"},
"artifact": {"title": "Artifact", "type": "string"},
Expand Down
164 changes: 164 additions & 0 deletions tests/unit_tests/test_pipeline_deployer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
from unittest.mock import patch

import pytest
from google.cloud import aiplatform

from deployer.pipeline_deployer import VertexPipelineDeployer


class TestCreatePipelineJob:
def test_create_pipeline_job_with_all_parameters(self, test_pipeline_fixture, tmp_path):
# Given
pipeline_name = "test_pipeline"
template_path = tmp_path / f"{pipeline_name}.yaml"
enable_caching = True
parameter_values = {"name": "johndoe"}
input_artifacts = {"input1": "path/to/input1", "input2": "path/to/input2"}

deployer = VertexPipelineDeployer(
pipeline_name=pipeline_name,
pipeline_func=test_pipeline_fixture,
project_id="my_project",
region="us-central1",
staging_bucket_name="my_bucket",
service_account="my_service_account",
local_package_path=tmp_path,
)
deployer.compile()

# When
job = deployer._create_pipeline_job(
template_path=str(template_path),
enable_caching=enable_caching,
parameter_values=parameter_values,
input_artifacts=input_artifacts,
)

# Then
assert isinstance(job, aiplatform.pipeline_jobs.PipelineJob)
assert job._gca_resource.display_name == "test_pipeline"
assert job.location == "us-central1"

@pytest.mark.parametrize(
"template_filename, pipeline_name",
[
("template.yaml", "test_pipeline"),
("", "test_pipeline"),
("template.yaml", ""),
("template.yaml", None),
("another_pipeline_name.yaml", "another_pipeline_name"),
],
)
def test_create_pipeline_job_with_wrong_template_path_raises_error(
self, template_filename, pipeline_name, test_pipeline_fixture, tmp_path
):
# Given
template_path = tmp_path / template_filename
enable_caching = True
parameter_values = {"param1": "value1", "param2": "value2"}
input_artifacts = {"input1": "path/to/input1", "input2": "path/to/input2"}

deployer = VertexPipelineDeployer(
pipeline_name=pipeline_name,
pipeline_func=test_pipeline_fixture,
project_id="my_project",
region="us-central1",
staging_bucket_name="my_bucket",
service_account="my_service_account",
)

# Then
with pytest.raises((FileNotFoundError, IsADirectoryError)):
# When
deployer._create_pipeline_job(
template_path=str(template_path),
enable_caching=enable_caching,
parameter_values=parameter_values,
input_artifacts=input_artifacts,
)


class TestUploadToRegistry:
def test_upload_to_registry_with_all_parameters(
self, test_pipeline_fixture, tmp_path, registry_client_response
):
# Given
pipeline_name = "test_pipeline"

deployer = VertexPipelineDeployer(
pipeline_name=pipeline_name,
pipeline_func=test_pipeline_fixture,
project_id="my_project",
region="us-central1",
staging_bucket_name="my_bucket",
service_account="my_service_account",
local_package_path=tmp_path,
gar_location="europe-west1",
gar_repo_id="test-pipelines",
)
deployer.compile()

# When
with patch("requests.post", return_value=registry_client_response) as mock_post:
deployer.upload_to_registry(tags=["tag1", "tag2"])

# Then
mock_post.assert_called_once()
template_name, version_name = registry_client_response.text.split("/")
assert deployer.template_name == template_name
assert deployer.version_name == version_name


class TestRun:
def test_default_parameters_shallow(self, test_pipeline_fixture, tmp_path, monkeypatch):
# Given
deployer = VertexPipelineDeployer(
pipeline_name=test_pipeline_fixture.pipeline_func.__name__,
pipeline_func=test_pipeline_fixture,
project_id="my_project",
region="us-central1",
staging_bucket_name="my_staging_bucket",
service_account="my_service_account",
gar_location="europe-west1",
gar_repo_id="test-pipelines",
local_package_path=tmp_path,
)
deployer.compile()

# When
with patch.object(aiplatform.pipeline_jobs.PipelineJob, "submit") as mock_submit:
deployer.run()

# Then
mock_submit.assert_called_once_with(
experiment="test-pipeline-experiment", service_account="my_service_account"
)

def test_with_all_parameters_shallow(self, test_pipeline_fixture, tmp_path, monkeypatch):
# Given
deployer = VertexPipelineDeployer(
pipeline_name=test_pipeline_fixture.pipeline_func.__name__,
pipeline_func=test_pipeline_fixture,
project_id="my_project",
region="us-central1",
staging_bucket_name="my_staging_bucket",
service_account="my_service_account",
local_package_path=tmp_path,
)
deployer.compile()

# When
with patch.object(aiplatform.pipeline_jobs.PipelineJob, "submit") as mock_submit:
deployer.run(
enable_caching=True,
parameter_values={"name": "johndoe"},
# input_artifacts={"input1": "path/to/input1"},
experiment_name="my_experiment",
tag="my_tag",
)

# Then
mock_submit.assert_called_once_with(
experiment="my-experiment",
service_account="my_service_account",
)
Loading