diff --git a/examples/ezkl/linear_regression/README.md b/examples/ezkl/linear_regression/README.md new file mode 100644 index 0000000..d4531f4 --- /dev/null +++ b/examples/ezkl/linear_regression/README.md @@ -0,0 +1,117 @@ +# Train a Linear Regression Using EZKL backend + +This example demonstrates how to train a linear regression model using the EZKL backend. + +First, install the `torch`, `hummingbird-ml` and `scikit-learn` packages by running the following command: + +```bash +pip install torch hummingbird-ml scikit-learn +``` + +This example uses the `scikit-learn` package to train a linear regression model and the `hummingbird-ml` package to convert the trained model to `torch` and then into ONNX, this is to maximize compatibiloity with `ezkl`. + +The code can be found in the [train_linear_regression.py](train_linear_regression.py) file, but we will explain each step. + +## Train a Linear Regression Model + +The following code trains a linear regression model using the `scikit-learn` package: + +```python +import numpy as np +from sklearn.linear_model import LinearRegression + +# Create a dataset +X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]]) +y = np.dot(X, np.array([1, 2])) + 3 + +# Train a linear regression model +model = LinearRegression().fit(X, y) +``` + +## Convert the Trained Model to `torch` + +The following code converts the trained model to `torch` using the `hummingbird-ml` package: + +```python +import hummingbird.ml + +# Convert the trained model to `torch` +hb_model = hummingbird.ml.convert(model, "torch") +``` + +More information about the `hummingbird-ml` package can be found [here](https://github.com/microsoft/hummingbird). + +## Convert the Trained Model to ONNX + +Now that we have a torch model, we can export it to ONNX using the default utilities in the `torch` package: + +```python + # Convert the trained model to ONNX + sample = np.array([7, 2]) + # Input to the model + shape = sample.shape + x = torch.rand(1, *shape, requires_grad=True) + + # Export the model + torch.onnx.export( + model, + x, + "network.onnx", + export_params=True, + opset_version=10, + do_constant_folding=True, + input_names=["input"], + output_names=["output"], + dynamic_axes={"input": {0: "batch_size"}, "output": {0: "batch_size"}}, + ) +``` + +## Create a `input.json` file for transpilation + +For the transpilation we need an example of the input data, in this case we will use the `sample` variable to create the `input.json` file: + +```python +with open("input.json", "w") as f: + f.write( + json.dumps( + { + "input_shapes": [sample.shape], + "input_data": [sample.tolist()], + } + ) + ) +``` + +## Deploy the verifiable model using EZKL framework + +The first step is to use the `giza-cli` to transpile the model and create a version job. Once this job finishes we will be able to deploy the model as a service. + +```bash +giza transpile --framework EZKL --input-data? input.json network.onnx +``` + +The next step is to deploy the model as a service. + +```bash +giza deployments deploy --framework EZKL --model-id --version-id +``` + +## Perform a prediction + +Using the `predict_action.py` you can add the generated `model_id` and `version_id` to the `predict_action.py` file and run the following command: + +```bash +python predict_action.py +``` + +This will start the action to perform the prediction, it includes two tasks, an example of how to perform a prediction using the `GizaModel`: + +```python +model = GizaModel(id=MODEL_ID, version=VERSION) + +result, request_id = model.predict(input_feed=[7, 2], verifiable=True, job_size="S") + +print(f"Result: {result}, request_id: {request_id}") +``` + +The latter will take the request and wait for the proof to be created, check the script for [more information](predict_action.py). diff --git a/examples/ezkl/linear_regression/predict_action.py b/examples/ezkl/linear_regression/predict_action.py new file mode 100644 index 0000000..66a7817 --- /dev/null +++ b/examples/ezkl/linear_regression/predict_action.py @@ -0,0 +1,76 @@ +import time + +import requests +from giza import API_HOST +from giza.client import DeploymentsClient + +from giza_actions.action import Action, action +from giza_actions.model import GizaModel +from giza_actions.task import task + +MODEL_ID = ... # The ID of the model +VERSION = ... # The version of the model + + +def get_deployment_id(): + """ + Retrieve the deployment ID for the model and version. + + Returns: + int: The ID of the deployment. + """ + client = DeploymentsClient(API_HOST) + return client.list(MODEL_ID, VERSION).__root__[0].id + + +@task +def predict(): + """ + Predict using the model and version for a linear regression model. + + Returns: + tuple: The result of the prediction and the request ID. + """ + model = GizaModel(id=MODEL_ID, version=VERSION) + + result, request_id = model.predict(input_feed=[7, 2], verifiable=True, job_size="S") + + print(f"Result: {result}, request_id: {request_id}") + return result, request_id + + +@task +def wait_for_proof(request_id): + """ + Wait for the proof associated with the request ID. For 240 seconds, it will attempt to retrieve the proof every 5 seconds. + + Args: + request_id (str): The ID of the request. + """ + print(f"Waiting for proof for request_id: {request_id}") + client = DeploymentsClient(API_HOST) + + timeout = time.time() + 240 + while True: + now = time.time() + if now > timeout: + print("Proof retrieval timed out") + break + try: + proof = client.get_proof(MODEL_ID, VERSION, get_deployment_id(), request_id) + print(f"Proof: {proof.json(exclude_unset=True)}") + break + except requests.exceptions.HTTPError: + print("Proof retrieval failing, sleeping for 5 seconds") + time.sleep(5) + + +@action(log_prints=True) +def inference(): + result, request_id = predict() + wait_for_proof(request_id) + + +if __name__ == "__main__": + action_deploy = Action(entrypoint=inference, name="ezkl-linear-regression") + action_deploy.serve(name="ezkl-linear-regression") diff --git a/examples/ezkl/linear_regression/train_linear_regression.py b/examples/ezkl/linear_regression/train_linear_regression.py new file mode 100644 index 0000000..cef862f --- /dev/null +++ b/examples/ezkl/linear_regression/train_linear_regression.py @@ -0,0 +1,71 @@ +import json + +import numpy as np +import torch +from hummingbird.ml import convert +from sklearn.linear_model import LinearRegression + +from giza_actions.action import Action, action +from giza_actions.task import task + + +@task +def train(): + X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]]) + + y = np.dot(X, np.array([1, 2])) + 3 + reg = LinearRegression().fit(X, y) + + return reg + + +@task +def convert_to_torch(linear_regression, sample): + return convert(linear_regression, "torch", sample).model + + +@task +def convert_to_onnx(model, sample): + # Input to the model + shape = sample.shape + x = torch.rand(1, *shape, requires_grad=True) + + # Export the model + torch.onnx.export( + model, + x, + "network.onnx", + export_params=True, + opset_version=10, + do_constant_folding=True, + input_names=["input"], + output_names=["output"], + dynamic_axes={"input": {0: "batch_size"}, "output": {0: "batch_size"}}, + ) + + +@task +def create_input_file(sample: np.ndarray): + with open("input.json", "w") as f: + f.write( + json.dumps( + { + "input_shapes": [sample.shape], + "input_data": [sample.tolist()], + } + ) + ) + + +@action(log_prints=True) +def model_to_onnx(): + lr = train() + sample = np.array([7, 2]) + model = convert_to_torch(lr, sample) + convert_to_onnx(model, sample) + create_input_file(sample) + + +if __name__ == "__main__": + action_deploy = Action(entrypoint=model_to_onnx, name="linear-regression-to-onnx") + action_deploy.serve(name="linear-regression-to-onnx") diff --git a/giza_actions/model.py b/giza_actions/model.py index 4a4c34c..60eb23f 100644 --- a/giza_actions/model.py +++ b/giza_actions/model.py @@ -8,8 +8,14 @@ import requests from giza import API_HOST from giza.client import ApiClient, ModelsClient, VersionsClient -from giza.utils.enums import VersionStatus -from osiris.app import create_tensor_from_array, deserialize, serialize, serializer +from giza.utils.enums import Framework, VersionStatus +from osiris.app import ( + create_tensor_from_array, + deserialize, + load_data, + serialize, + serializer, +) from giza_actions.utils import get_deployment_uri @@ -52,18 +58,57 @@ def __init__( if model_path and (id or version): raise ValueError("Either model_path or id and version must be provided.") + if model_path and id and version: + raise ValueError( + "Only one of model_path or id and version should be provided." + ) + if model_path: self.session = ort.InferenceSession(model_path) elif id and version: self.model_client = ModelsClient(API_HOST) self.version_client = VersionsClient(API_HOST) self.api_client = ApiClient(API_HOST) - self.uri = get_deployment_uri(id, version) self._get_credentials() + self.version = self._get_version(id, version) + print(self.version) self.session = None + self.framework = self.version.framework + self.uri = self._retrieve_uri(id, version) if output_path: self._download_model(id, version, output_path) + def _retrieve_uri(self, model_id: int, version_id: int): + """ + Retrieves the URI for making prediction requests to a deployed model. + + Args: + model_id (int): The unique identifier of the model. + version_id (int): The version number of the model. + + Returns: + The URI for making prediction requests to the deployed model. + """ + # Different URI per framework + uri = get_deployment_uri(model_id, version_id) + if self.framework == Framework.CAIRO: + return f"{uri}/cairo_run" + else: + return f"{uri}/predict" + + def _get_version(self, model_id: int, version_id: int): + """ + Retrieves the version of the model specified by model_id and version_id. + + Args: + model_id (int): The unique identifier of the model. + version_id (int): The version number of the model. + + Returns: + The version of the model. + """ + return self.version_client.get(model_id, version_id) + def _download_model(self, model_id: int, version_id: int, output_path: str): """ Downloads the model specified by model_id and version_id to the given output_path. @@ -76,15 +121,16 @@ def _download_model(self, model_id: int, version_id: int, output_path: str): Raises: ValueError: If the model version status is not completed. """ - version = self.version_client.get(model_id, version_id) - if version.status != VersionStatus.COMPLETED: - raise ValueError(f"Model version status is not completed {version.status}") + if self.version.status != VersionStatus.COMPLETED: + raise ValueError( + f"Model version status is not completed {self.version.status}" + ) print("ONNX model is ready, downloading! ✅") - onnx_model = self.api_client.download_original(model_id, version.version) + onnx_model = self.api_client.download_original(model_id, self.version.version) - model_name = version.original_model_path.split("/")[-1] + model_name = self.version.original_model_path.split("/")[-1] save_path = Path(output_path) / model_name with open(save_path, "wb") as f: @@ -133,26 +179,40 @@ def predict( if not self.uri: raise ValueError("Model has not been deployed") - endpoint = f"{self.uri}/cairo_run" - - cairo_payload = self._format_inputs_for_cairo( - input_file, input_feed, fp_impl, job_size + # Non common arguments should be named parameters + payload = self._format_inputs_for_framework( + input_file, input_feed, fp_impl=fp_impl, job_size=job_size ) - response = requests.post(endpoint, json=cairo_payload) + response = requests.post(self.uri, json=payload) - if response.status_code == 200: - serialized_output = json.dumps(response.json()["result"]) - request_id = json.dumps(response.json()["request_id"]) + try: + response.raise_for_status() + except requests.exceptions.HTTPError as e: + logging.error(f"An error occurred in predict: {e}") + error_message = f"Deployment predict error: {response.text}" + logging.error(error_message) + raise e + body = response.json() + serialized_output = ( + json.dumps(body["result"]) + if self.framework == Framework.CAIRO + else body["result"] + ) + request_id = ( + json.dumps(body["request_id"]) + if self.framework == Framework.CAIRO + else body["request_id"] + ) + + if self.framework == Framework.CAIRO: logging.info("Serialized: ", serialized_output) preds = self._parse_cairo_response(serialized_output, output_dtype) - return (preds, request_id) - else: - error_message = f"OrionRunner service error: {response.text}" - logging.error(error_message) - raise Exception(error_message) + elif self.framework == Framework.EZKL: + preds = np.array(serialized_output[0]) + return (preds, request_id) else: if self.session is None: @@ -163,7 +223,24 @@ def predict( return preds except Exception as e: logging.error(f"An error occurred in predict: {e}") - return (None, None) + raise e + + def _format_inputs_for_framework(self, *args, **kwargs): + """ + Formats the inputs for a prediction request for a specific framework. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + """ + match self.framework: + case Framework.CAIRO: + return self._format_inputs_for_cairo(*args, **kwargs) + case Framework.EZKL: + return self._format_inputs_for_ezkl(*args, **kwargs) + case _: + # This should never happen + raise ValueError(f"Unsupported framework: {self.framework}") def _format_inputs_for_cairo( self, @@ -199,6 +276,35 @@ def _format_inputs_for_cairo( return {"job_size": job_size, "args": serialized} + def _format_inputs_for_ezkl( + self, input_file: str, input_feed: Dict, job_size: str, *args, **kwargs + ): + """ + Formats the inputs for a prediction request for EZKL. + + Args: + input_file (str): The path to the input file for prediction. + input_feed (Dict): A dictionary containing the input data for prediction. + + Returns: + dict: A dictionary representing the formatted inputs for the EZKL prediction request. + """ + if input_file is not None: + data = load_data(input_file).reshape([-1]) + elif input_feed is not None: + match input_feed: + case dict(): + data = input_feed["input_data"] + case list(): + data = input_feed + case np.ndarray(): + data = input_feed.reshape([-1]) + case _: + raise ValueError( + "Invalid input_feed format. Must be a dictionary with 'input_data' containintg the data array." + ) + return {"input_data": [data], "job_size": job_size} + def _parse_cairo_response(self, response, data_type: str): """ Parses the response from a OrionRunner prediction request. diff --git a/giza_actions/task.py b/giza_actions/task.py index aa4af8f..a2013da 100644 --- a/giza_actions/task.py +++ b/giza_actions/task.py @@ -13,7 +13,7 @@ def safe_func(*args, **kwargs): res = func(*args, **kwargs) return res except Exception as e: - print(e) + raise e safe_func.__name__ = func.__name__ return prefect_task(safe_func, *task_init_args, **task_init_kwargs) diff --git a/giza_actions/utils.py b/giza_actions/utils.py index f28b2bf..65e62e1 100644 --- a/giza_actions/utils.py +++ b/giza_actions/utils.py @@ -1,5 +1,10 @@ +import logging + +import requests from giza import API_HOST -from giza.client import WorkspaceClient, DeploymentsClient +from giza.client import DeploymentsClient, WorkspaceClient + +logger = logging.getLogger(__name__) def get_workspace_uri(): @@ -14,7 +19,14 @@ def get_workspace_uri(): str: The URL of the current workspace. """ client = WorkspaceClient(API_HOST) - workspace = client.get() + try: + workspace = client.get() + except requests.exceptions.RequestException: + logger.error("Failed to retrieve workspace") + logger.error( + "Please check that you have create a workspaces using the Giza CLI" + ) + raise return workspace.url diff --git a/poetry.lock b/poetry.lock index 531d6f2..3f50a92 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1003,13 +1003,13 @@ tqdm = ["tqdm"] [[package]] name = "giza-cli" -version = "0.12.1" +version = "0.12.2" description = "CLI for interacting with Giza" optional = false python-versions = ">=3.11,<4.0" files = [ - {file = "giza_cli-0.12.1-py3-none-any.whl", hash = "sha256:cba3ff31c8e9fc3454ac2c3ad30ec3f65a218e09e80979136f76a164213ca930"}, - {file = "giza_cli-0.12.1.tar.gz", hash = "sha256:d5ab76998f6bd9ebf37288b9b142d90a385f363cd863e4afd1ed0f2ad3be1674"}, + {file = "giza_cli-0.12.2-py3-none-any.whl", hash = "sha256:3afbb990b809079042c04f4d61f7886d63420f614fb2375e16b1c2b416529991"}, + {file = "giza_cli-0.12.2.tar.gz", hash = "sha256:5067516ce4aefa80737c4a48b0b095ceae9cb026d4a6d44f39ce0168b4947c09"}, ] [package.dependencies] @@ -1366,13 +1366,13 @@ test = ["objgraph", "psutil"] [[package]] name = "griffe" -version = "0.40.1" +version = "0.41.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.40.1-py3-none-any.whl", hash = "sha256:5b8c023f366fe273e762131fe4bfd141ea56c09b3cb825aa92d06a82681cfd93"}, - {file = "griffe-0.40.1.tar.gz", hash = "sha256:66c48a62e2ce5784b6940e603300fcfb807b6f099b94e7f753f1841661fd5c7c"}, + {file = "griffe-0.41.0-py3-none-any.whl", hash = "sha256:8aa7fc6eb00cb80af9c0198178c6b7110cb59fa2c5187bb13ea25eebbe4dd928"}, + {file = "griffe-0.41.0.tar.gz", hash = "sha256:850128c3198c18713eaf0a6cc8572e590a16b1965f72a4e871e66cf84740903f"}, ] [package.dependencies] @@ -1550,17 +1550,6 @@ files = [ [package.extras] colors = ["colorama (>=0.4.6)"] -[[package]] -name = "itsdangerous" -version = "2.1.2" -description = "Safely pass data to untrusted environments and back." -optional = false -python-versions = ">=3.7" -files = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, -] - [[package]] name = "jinja2" version = "3.1.3" @@ -1753,13 +1742,13 @@ files = [ [[package]] name = "kubernetes" -version = "29.0.0" +version = "27.2.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" files = [ - {file = "kubernetes-29.0.0-py2.py3-none-any.whl", hash = "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e"}, - {file = "kubernetes-29.0.0.tar.gz", hash = "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459"}, + {file = "kubernetes-27.2.0-py2.py3-none-any.whl", hash = "sha256:0f9376329c85cf07615ed6886bf9bf21eb1cbfc05e14ec7b0f74ed8153cd2815"}, + {file = "kubernetes-27.2.0.tar.gz", hash = "sha256:d479931c6f37561dbfdf28fc5f46384b1cb8b28f9db344ed4a232ce91990825a"}, ] [package.dependencies] @@ -2580,105 +2569,6 @@ files = [ python-dateutil = ">=2.6,<3.0" pytzdata = ">=2020.1" -[[package]] -name = "pendulum" -version = "3.0.0" -description = "Python datetimes made easy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, -] - -[package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] - [[package]] name = "pillow" version = "10.2.0" @@ -2853,13 +2743,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prefect" -version = "2.16.0" +version = "2.14.6" description = "Workflow orchestration and management." optional = false python-versions = ">=3.8" files = [ - {file = "prefect-2.16.0-py3-none-any.whl", hash = "sha256:19f431962bc51df3d46cd22f206c20b22ae224c9ee46e04ed3d37ea03f92ba65"}, - {file = "prefect-2.16.0.tar.gz", hash = "sha256:5143adb2da972a2fadc6d1de959adb82a8498d347fc3896d839ae4cda7642a8f"}, + {file = "prefect-2.14.6-py3-none-any.whl", hash = "sha256:dde40ca682395e461851041a564c7dea25c56962d167dc69047b569046735b7d"}, + {file = "prefect-2.14.6.tar.gz", hash = "sha256:b5effcb703c2b14dca8602aea1971b1df69dea3b2bcdda2dc2348c072bbe78a2"}, ] [package.dependencies] @@ -2869,7 +2759,6 @@ anyio = ">=3.7.1,<4.0.0" apprise = ">=1.1.0,<2.0.0" asgi-lifespan = ">=1.0,<3.0" asyncpg = ">=0.23" -cachetools = ">=5.3,<6.0" click = ">=8.0,<8.2" cloudpickle = ">=2.0,<4.0" coolname = ">=1.0.4,<3.0.0" @@ -2882,29 +2771,25 @@ graphviz = ">=0.20.1" griffe = ">=0.20.0" httpcore = ">=0.15.0,<2.0.0" httpx = {version = ">=0.23,<0.23.2 || >0.23.2", extras = ["http2"]} -itsdangerous = "*" jinja2 = ">=3.0.0,<4.0.0" jsonpatch = ">=1.32,<2.0" jsonschema = ">=3.2.0,<5.0.0" -kubernetes = ">=24.2.0,<30.0.0" +kubernetes = ">=24.2.0,<29.0.0" orjson = ">=3.7,<4.0" packaging = ">=21.3,<24.3" pathspec = ">=0.8.0" -pendulum = [ - {version = "<3.0", markers = "python_version < \"3.12\""}, - {version = ">=3.0.0,<4", markers = "python_version >= \"3.12\""}, -] +pendulum = ">=2.1.2,<3.0.0" pydantic = {version = ">=1.10.0,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0", extras = ["email"]} python-dateutil = ">=2.8.2,<3.0.0" -python-multipart = ">=0.0.7" python-slugify = ">=5.0,<9.0" -pytz = ">=2021.1,<2025" +pytz = ">=2021.1,<2024" pyyaml = ">=5.4.1,<7.0.0" readchar = ">=4.0.0,<5.0.0" rich = ">=11.0,<14.0" "ruamel.yaml" = ">=0.17.0" sniffio = ">=1.3.0,<2.0.0" sqlalchemy = {version = ">=1.4.22,<1.4.33 || >1.4.33,<3.0.0", extras = ["asyncio"]} +starlette = ">=0.27.0,<0.33.0" toml = ">=0.10.0" typer = ">=0.4.2" typing-extensions = ">=4.5.0,<5.0.0" @@ -2913,7 +2798,7 @@ uvicorn = ">=0.14.0" websockets = ">=10.4,<13.0" [package.extras] -dev = ["cairosvg", "codespell (>=2.2.6)", "ddtrace", "ipython", "ipython (==8.12.*)", "jinja2", "mike", "mkdocs", "mkdocs-gen-files", "mkdocs-material", "mkdocstrings-python", "mock", "moto (>=5)", "mypy", "numpy", "pillow", "pluggy (>=1.4.0)", "pre-commit", "pytest (>7,<8)", "pytest-asyncio (>=0.18.2,!=0.22.0,<0.23.0)", "pytest-benchmark", "pytest-cov", "pytest-env", "pytest-flakefinder", "pytest-timeout", "pytest-xdist (<3.4.0)", "pytkdocs (>=0.14.2)", "pyyaml", "requests", "respx", "ruff", "setuptools (!=60.9.0)", "virtualenv", "watchfiles"] +dev = ["cairosvg", "codespell", "flaky", "ipython", "ipython (==8.12.*)", "jinja2", "mike", "mkdocs", "mkdocs-gen-files", "mkdocs-material", "mkdocstrings-python", "mock", "moto", "mypy", "numpy", "pillow", "pre-commit", "pytest (>7)", "pytest-asyncio (>=0.18.2,!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-env", "pytest-flakefinder", "pytest-timeout", "pytest-xdist (<3.4.0)", "pytkdocs (>=0.14.2)", "pyyaml", "requests", "respx", "ruff", "setuptools (!=60.9.0)", "virtualenv", "watchfiles"] [[package]] name = "prefect-docker" @@ -2935,19 +2820,19 @@ dev = ["black", "flake8", "flaky", "interrogate", "isort", "mkdocs", "mkdocs-gen [[package]] name = "prefect-gcp" -version = "0.5.5" +version = "0.5.4" description = "Prefect tasks and subflows for interacting with Google Cloud Platform." optional = false python-versions = ">=3.7" files = [ - {file = "prefect-gcp-0.5.5.tar.gz", hash = "sha256:8928d309879035e838c070052ea005ba1ec2e36d83971cfe3301c8269ca28b2a"}, - {file = "prefect_gcp-0.5.5-py3-none-any.whl", hash = "sha256:720ad3b033137ccbab22b6e0ec8321c14bc45a57c70af8f13be7580d2b1ccf21"}, + {file = "prefect-gcp-0.5.4.tar.gz", hash = "sha256:1da1962622f6410feb204cc79623f1a4fc77c18c05a22b9731ba347e423c71de"}, + {file = "prefect_gcp-0.5.4-py3-none-any.whl", hash = "sha256:de2d60ebb43c31b6c0a03216cb44c724bea20a37957c5b40a7381493a7e75523"}, ] [package.dependencies] google-api-python-client = ">=2.20.0" google-cloud-storage = ">=2.0.0" -prefect = ">=2.14.10" +prefect = ">=2.13.5" python-slugify = ">=8.0.0" tenacity = ">=8.0.0" @@ -3245,20 +3130,6 @@ cryptography = ["cryptography (>=3.4.0)"] pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] -[[package]] -name = "python-multipart" -version = "0.0.9" -description = "A streaming multipart parser for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, - {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, -] - -[package.extras] -dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] - [[package]] name = "python-slugify" version = "8.0.4" @@ -3278,13 +3149,13 @@ unidecode = ["Unidecode (>=1.1.1)"] [[package]] name = "pytz" -version = "2024.1" +version = "2023.4" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, + {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, ] [[package]] @@ -3941,6 +3812,23 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "starlette" +version = "0.32.0.post1" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.32.0.post1-py3-none-any.whl", hash = "sha256:cd0cb10ddb49313f609cedfac62c8c12e56c7314b66d89bb077ba228bada1b09"}, + {file = "starlette-0.32.0.post1.tar.gz", hash = "sha256:e54e2b7e2fb06dff9eac40133583f10dfa05913f5a85bf26f427c7a40a9a3d02"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] + [[package]] name = "sympy" version = "1.12" @@ -4460,4 +4348,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.11,<4.0" -content-hash = "1d6bff0887f8ff46e90a7f7dd21ff62cd02957b2a25b5ba003dea7f41e9b44f5" +content-hash = "938e9f00fa3af838a9bdf9d077e062e0520decb45dbcea664284c45b1a8457bb" diff --git a/pyproject.toml b/pyproject.toml index f825781..060f7bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.11,<4.0" numpy = "^1.26.2" -prefect = "^2.14.6" +prefect = "2.14.6" onnx = "^1.15.0" httpx = "^0.25.1" onnxruntime = "^1.16.3" @@ -22,7 +22,7 @@ prefect-gcp = "^0.5.4" pyyaml = "^6.0.1" prefect-docker = "^0.4.1" distlib = "^0.3.8" -giza-cli = ">=0.11.0,<1.0.0" +giza-cli = ">=0.12.2,<1.0.0" giza-osiris = ">=0.2.5,<1.0.0" [tool.poetry.dev-dependencies]