From fc1efe57ecd22d46235920a46382cb823bbd1dd6 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Sat, 31 Aug 2024 12:40:16 +0530 Subject: [PATCH 01/17] extend scale API to docker and in-memory - fix incorrect max_count in worker_pool post worker deletion --- .../service/worker/worker_pool_service.py | 40 ++----------------- .../src/syft/service/worker/worker_service.py | 1 + 2 files changed, 5 insertions(+), 36 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index 22d560fe661..84c659eeea9 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -11,7 +11,6 @@ from ...custom_worker.config import PrebuiltWorkerConfig from ...custom_worker.config import WorkerConfig from ...custom_worker.k8s import IN_KUBERNETES -from ...custom_worker.runner_k8s import KubernetesRunner from ...serde.serializable import serializable from ...store.document_store import DocumentStore from ...store.document_store_errors import NotFoundException @@ -40,7 +39,6 @@ from .utils import get_orchestration_type from .utils import run_containers from .utils import run_workers_in_threads -from .utils import scale_kubernetes_pool from .worker_image import SyftWorkerImage from .worker_image_stash import SyftWorkerImageStash from .worker_pool import ContainerSpawnStatus @@ -423,11 +421,7 @@ def scale( Allows both scaling up and down the worker pool. """ - if not IN_KUBERNETES: - raise SyftException( - public_message="Scaling is only supported in Kubernetes mode" - ) - elif number < 0: + if number < 0: # zero is a valid scale down raise SyftException(public_message=f"Invalid number of workers: {number}") @@ -448,40 +442,14 @@ def scale( registry_password=None, ) else: - # scale down at kubernetes control plane - runner = KubernetesRunner() - scale_kubernetes_pool( - runner, - pool_name=worker_pool.name, - replicas=number, - ).unwrap() - - # scale down removes the last "n" workers - # workers to delete = len(workers) - number workers_to_delete = worker_pool.worker_list[ -(current_worker_count - number) : ] - worker_stash = context.server.get_service("WorkerService").stash - # delete linkedobj workers + worker_service = context.server.get_service("WorkerService") + for worker in workers_to_delete: - worker_stash.delete_by_uid( - credentials=context.credentials, - uid=worker.object_uid, - ).unwrap() - - # update worker_pool - worker_pool.max_count = number - worker_pool.worker_list = worker_pool.worker_list[:number] - self.stash.update( - credentials=context.credentials, - obj=worker_pool, - ).unwrap( - public_message=( - f"Pool {worker_pool.name} was scaled down, " - f"but failed to update the stash" - ) - ) + worker_service.delete(context=context, uid=worker.object_uid) return SyftSuccess(message=f"Worker pool scaled to {number} workers") diff --git a/packages/syft/src/syft/service/worker/worker_service.py b/packages/syft/src/syft/service/worker/worker_service.py index cb093c55d2f..136ed5b91a5 100644 --- a/packages/syft/src/syft/service/worker/worker_service.py +++ b/packages/syft/src/syft/service/worker/worker_service.py @@ -183,6 +183,7 @@ def _delete( obj for obj in worker_pool.worker_list if obj.object_uid == uid ) worker_pool.worker_list.remove(worker_linked_object) + worker_pool.max_count -= 1 except StopIteration: pass From 7fc666b3c8dc7a96eb078a14c31b8a59eda33871 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Sat, 31 Aug 2024 14:48:57 +0530 Subject: [PATCH 02/17] fix inmemory worker to add consumer only if it doesn't exist - add an optional flag to force scale --- packages/syft/src/syft/server/server.py | 19 ++++++++++++++----- .../service/worker/worker_pool_service.py | 9 +++++++-- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/server/server.py b/packages/syft/src/syft/server/server.py index 0dd1eefa41c..99b100e2bc5 100644 --- a/packages/syft/src/syft/server/server.py +++ b/packages/syft/src/syft/server/server.py @@ -620,12 +620,8 @@ def start_in_memory_workers( ) -> None: """Starts in-memory workers for the server.""" - worker_pools = self.pool_stash.get_all(credentials=self.verify_key).ok() + worker_pools = self.pool_stash.get_all(credentials=self.verify_key).unwrap() for worker_pool in worker_pools: # type: ignore - # Skip the default worker pool - if worker_pool.name == DEFAULT_WORKER_POOL_NAME: - continue - # Create consumers for each worker pool for linked_worker in worker_pool.worker_list: self.add_consumer_for_service( @@ -642,6 +638,19 @@ def add_consumer_for_service( address: str, message_handler: type[AbstractMessageHandler] = APICallMessageHandler, ) -> None: + def is_syft_worker_consumer_running( + queue_name: str, syft_worker_id: UID + ) -> bool: + consumers = self.queue_manager.consumers.get(queue_name, []) + for consumer in consumers: + if consumer.syft_worker_id == syft_worker_id: + return True + return False + + # Check if the consumer is already running + if is_syft_worker_consumer_running(syft_worker_id, message_handler.queue_name): + return + consumer: QueueConsumer = self.queue_manager.create_consumer( message_handler, address=address, diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index 84c659eeea9..67d2292255a 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -415,6 +415,7 @@ def scale( number: int, pool_id: UID | None = None, pool_name: str | None = None, + force: bool = False, ) -> SyftSuccess: """ Scale the worker pool to the given number of workers in Kubernetes. @@ -449,9 +450,13 @@ def scale( worker_service = context.server.get_service("WorkerService") for worker in workers_to_delete: - worker_service.delete(context=context, uid=worker.object_uid) + worker_service.delete( + context=context, uid=worker.object_uid, force=force + ) - return SyftSuccess(message=f"Worker pool scaled to {number} workers") + return SyftSuccess( + message=f"Worker pool '{worker_pool.name}' scaled to {number} workers" + ) @service_method( path="worker_pool.filter_by_image_id", From ae46cd8fdcceb148da03280d328ebd7f6fd4addb Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Sat, 31 Aug 2024 15:20:36 +0530 Subject: [PATCH 03/17] add notebook for testing scaling --- .../014-scale-delete-worker-pools.ipynb | 374 ++++++++++++++++++ 1 file changed, 374 insertions(+) create mode 100644 notebooks/scenarios/bigquery/014-scale-delete-worker-pools.ipynb diff --git a/notebooks/scenarios/bigquery/014-scale-delete-worker-pools.ipynb b/notebooks/scenarios/bigquery/014-scale-delete-worker-pools.ipynb new file mode 100644 index 00000000000..65cf32950b2 --- /dev/null +++ b/notebooks/scenarios/bigquery/014-scale-delete-worker-pools.ipynb @@ -0,0 +1,374 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "0", + "metadata": {}, + "outputs": [], + "source": [ + "# import os\n", + "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + "# os.environ[\"DEV_MODE\"] = \"True\"\n", + "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"" + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "#### Helpers" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import time\n", + "\n", + "\n", + "class TimeoutError(Exception):\n", + " pass\n", + "\n", + "\n", + "class Timeout:\n", + " def __init__(self, timeout_duration):\n", + " if timeout_duration > 60:\n", + " raise ValueError(\"Timeout duration cannot exceed 60 seconds.\")\n", + " self.timeout_duration = timeout_duration\n", + "\n", + " def run_with_timeout(self, condition_func, *args, **kwargs):\n", + " start_time = time.time()\n", + " result = None\n", + "\n", + " while True:\n", + " elapsed_time = time.time() - start_time\n", + " if elapsed_time > self.timeout_duration:\n", + " raise TimeoutError(\n", + " f\"Function execution exceeded {self.timeout_duration} seconds.\"\n", + " )\n", + "\n", + " # Check if the condition is met\n", + " try:\n", + " if condition_func():\n", + " print(\"Condition met, exiting early.\")\n", + " break\n", + " except Exception as e:\n", + " print(f\"Exception in target function: {e}\")\n", + " break # Exit the loop if an exception occurs in the function\n", + " time.sleep(1)\n", + "\n", + " return result" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "### Import lib" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import os\n", + "\n", + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "environment" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "\n", + "# syft absolute\n", + "import syft as sy" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "server = sy.orchestra.launch(\n", + " name=\"bigquery-high\",\n", + " dev_mode=True,\n", + " server_side_type=\"high\",\n", + " port=\"8080\",\n", + " n_consumers=1, # How many workers to be spawned\n", + " create_producer=True, # Can produce more workers\n", + " reset=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "high_client = sy.login(\n", + " url=\"http://localhost:8080\", email=\"info@openmined.org\", password=\"changethis\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "high_client.worker_pools" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": {}, + "outputs": [], + "source": [ + "default_worker_pool = high_client.worker_pools.get_by_name(\"default-pool\")\n", + "default_worker_pool" + ] + }, + { + "cell_type": "markdown", + "id": "10", + "metadata": {}, + "source": [ + "### Scale Worker pool" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11", + "metadata": {}, + "outputs": [], + "source": [ + "# Scale to 1\n", + "high_client.api.worker_pool.scale(number=1, pool_name=default_worker_pool.name)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12", + "metadata": {}, + "outputs": [], + "source": [ + "high_client.api.services.worker_pool[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13", + "metadata": {}, + "outputs": [], + "source": [ + "# Scale up workers\n", + "scale_up_result = high_client.api.worker_pool.scale(\n", + " number=5, pool_name=default_worker_pool.name\n", + ")\n", + "scale_up_result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "14", + "metadata": {}, + "outputs": [], + "source": [ + "assert scale_up_result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15", + "metadata": {}, + "outputs": [], + "source": [ + "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 5" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": [ + "##### Scale down gracefully" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17", + "metadata": {}, + "outputs": [], + "source": [ + "# Scale down workers, this gracefully shutdowns the consumers\n", + "scale_down_result = high_client.api.worker_pool.scale(\n", + " number=1, pool_name=default_worker_pool.name\n", + ")\n", + "scale_down_result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "18", + "metadata": {}, + "outputs": [], + "source": [ + "assert scale_down_result, scale_down_result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "19", + "metadata": {}, + "outputs": [], + "source": [ + "def has_worker_scaled_down_to_one():\n", + " return high_client.api.worker_pool[default_worker_pool.name].max_count == 1\n", + "\n", + "\n", + "worker_scale_timeout = Timeout(timeout_duration=20)\n", + "worker_scale_timeout.run_with_timeout(has_worker_scaled_down_to_one)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "assert high_client.api.worker_pool[default_worker_pool.name].max_count == 1" + ] + }, + { + "cell_type": "markdown", + "id": "21", + "metadata": {}, + "source": [ + "##### Scale down forcefully" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22", + "metadata": {}, + "outputs": [], + "source": [ + "# First scale up\n", + "high_client.api.services.worker_pool.scale(number=5, pool_name=default_worker_pool.name)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23", + "metadata": {}, + "outputs": [], + "source": [ + "assert high_client.api.services.worker_pool[\"default-pool\"].max_count == 5" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24", + "metadata": {}, + "outputs": [], + "source": [ + "# Forcefully scale down workers, in this case the workers are terminated immediatedly." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25", + "metadata": {}, + "outputs": [], + "source": [ + "high_client.api.services.worker_pool.scale(\n", + " 1, pool_name=default_worker_pool.name, force=True\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26", + "metadata": {}, + "outputs": [], + "source": [ + "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "28", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From d7e129551c3b1fd35970784a7d69d1f4d747c2ca Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 2 Sep 2024 12:35:55 +0530 Subject: [PATCH 04/17] handle worker not present during worker delete --- .../syft/src/syft/service/worker/worker_service.py | 4 +++- .../syft/src/syft/service/worker/worker_stash.py | 13 +++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/worker/worker_service.py b/packages/syft/src/syft/service/worker/worker_service.py index 136ed5b91a5..f8e5dbcb272 100644 --- a/packages/syft/src/syft/service/worker/worker_service.py +++ b/packages/syft/src/syft/service/worker/worker_service.py @@ -188,7 +188,9 @@ def _delete( pass # Delete worker from worker stash - self.stash.delete_by_uid(credentials=context.credentials, uid=uid).unwrap() + self.stash.find_and_delete_by_uid( + credentials=context.credentials, uid=uid + ).unwrap() # Update worker pool worker_pool_stash.update(context.credentials, obj=worker_pool).unwrap() diff --git a/packages/syft/src/syft/service/worker/worker_stash.py b/packages/syft/src/syft/service/worker/worker_stash.py index ddcdfa733a2..20c85c99c95 100644 --- a/packages/syft/src/syft/service/worker/worker_stash.py +++ b/packages/syft/src/syft/service/worker/worker_stash.py @@ -10,8 +10,10 @@ from ...store.document_store import PartitionKey from ...store.document_store import PartitionSettings from ...store.document_store import QueryKeys +from ...store.document_store import UIDPartitionKey from ...store.document_store_errors import NotFoundException from ...store.document_store_errors import StashException +from ...types.errors import SyftException from ...types.result import as_result from ...types.uid import UID from ...util.telemetry import instrument @@ -74,3 +76,14 @@ def update_consumer_state( worker = self.get_by_uid(credentials=credentials, uid=worker_uid).unwrap() worker.consumer_state = consumer_state return self.update(credentials=credentials, obj=worker).unwrap() + + @as_result(StashException, SyftException) + def find_and_delete_by_uid( + self, credentials: SyftVerifyKey, uid: UID, has_permission: bool = False + ) -> bool: + qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) + try: + worker = self.query_one(credentials=credentials, qks=qks).unwrap() + except NotFoundException: + return True + return self.delete_by_uid(credentials=credentials, uid=worker.id).unwrap() From c2b8c05360375a6e6b443e04f7d478d116152837 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 2 Sep 2024 17:36:09 +0530 Subject: [PATCH 05/17] try merging scale and delete worker API --- .../service/worker/worker_pool_service.py | 20 ++++++++++++++++--- .../src/syft/service/worker/worker_service.py | 10 ++++++++-- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index 67d2292255a..14e5950fa6a 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -11,6 +11,7 @@ from ...custom_worker.config import PrebuiltWorkerConfig from ...custom_worker.config import WorkerConfig from ...custom_worker.k8s import IN_KUBERNETES +from ...custom_worker.runner_k8s import KubernetesRunner from ...serde.serializable import serializable from ...store.document_store import DocumentStore from ...store.document_store_errors import NotFoundException @@ -39,6 +40,7 @@ from .utils import get_orchestration_type from .utils import run_containers from .utils import run_workers_in_threads +from .utils import scale_kubernetes_pool from .worker_image import SyftWorkerImage from .worker_image_stash import SyftWorkerImageStash from .worker_pool import ContainerSpawnStatus @@ -443,15 +445,27 @@ def scale( registry_password=None, ) else: + # scale down removes the last "n" workers + # workers to delete = len(workers) - number workers_to_delete = worker_pool.worker_list[ -(current_worker_count - number) : ] - worker_service = context.server.get_service("WorkerService") + # scale down at kubernetes control plane + if IN_KUBERNETES: + runner = KubernetesRunner() + scale_kubernetes_pool( + runner, + pool_name=worker_pool.name, + replicas=number, + ).unwrap() + worker_service = context.server.get_service("WorkerService") for worker in workers_to_delete: - worker_service.delete( - context=context, uid=worker.object_uid, force=force + syft_worker = worker.resolve_with_context(context=context).unwrap() + syft_worker.to_be_deleted = True + worker_service._delete( + context=context, worker=syft_worker, force=force, via_scale=True ) return SyftSuccess( diff --git a/packages/syft/src/syft/service/worker/worker_service.py b/packages/syft/src/syft/service/worker/worker_service.py index f8e5dbcb272..a8bc3d4db35 100644 --- a/packages/syft/src/syft/service/worker/worker_service.py +++ b/packages/syft/src/syft/service/worker/worker_service.py @@ -137,7 +137,11 @@ def logs( return logs if raw else logs.decode(errors="ignore") def _delete( - self, context: AuthedServiceContext, worker: SyftWorker, force: bool = False + self, + context: AuthedServiceContext, + worker: SyftWorker, + force: bool = False, + via_scale: bool = False, ) -> SyftSuccess: uid = worker.id @@ -157,7 +161,9 @@ def _delete( credentials=context.credentials, pool_name=worker.worker_pool_name ).unwrap() - if IN_KUBERNETES: + if IN_KUBERNETES and via_scale: + pass + elif IN_KUBERNETES: # Kubernetes will only restart the worker NOT REMOVE IT runner = KubernetesRunner() runner.delete_pod(pod_name=worker.name) From 920e8a1d260f3594208f7dbda4b7db9e39dfb6c5 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Mon, 2 Sep 2024 15:22:32 +0200 Subject: [PATCH 06/17] ignore rate limit exceptions in request.[submit, approve, deny] --- .../syft/service/notifier/notifier_service.py | 8 +- .../syft/service/request/request_service.py | 81 +++++++++++-------- packages/syft/src/syft/types/errors.py | 8 +- 3 files changed, 56 insertions(+), 41 deletions(-) diff --git a/packages/syft/src/syft/service/notifier/notifier_service.py b/packages/syft/src/syft/service/notifier/notifier_service.py index 5e635628e04..b407e7b38df 100644 --- a/packages/syft/src/syft/service/notifier/notifier_service.py +++ b/packages/syft/src/syft/service/notifier/notifier_service.py @@ -29,6 +29,10 @@ logger = logging.getLogger(__name__) +class RateLimitException(SyftException): + public_message = "Rate limit exceeded." + + @serializable(canonical_name="NotifierService", version=1) class NotifierService(AbstractService): store: DocumentStore @@ -325,7 +329,7 @@ def set_email_rate_limit( # This is not a public API. # This method is used by other services to dispatch notifications internally - @as_result(SyftException) + @as_result(SyftException, RateLimitException) def dispatch_notification( self, context: AuthedServiceContext, notification: Notification ) -> SyftSuccess: @@ -381,7 +385,7 @@ def dispatch_notification( current_state.count += 1 current_state.date = datetime.now() else: - raise SyftException( + raise RateLimitException( public_message="Couldn't send the email. You have surpassed the" + " email threshold limit. Please try again later." ) diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index e13fb988c56..d3e88ca26f7 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -1,14 +1,20 @@ +# stdlib +import logging + # relative from ...serde.serializable import serializable +from ...server.credentials import SyftVerifyKey from ...store.document_store import DocumentStore from ...store.linked_obj import LinkedObject from ...types.uid import UID from ..context import AuthedServiceContext +from ..notification.email_templates import EmailTemplate from ..notification.email_templates import RequestEmailTemplate from ..notification.email_templates import RequestUpdateEmailTemplate from ..notification.notification_service import CreateNotification from ..notification.notification_service import NotificationService from ..notifier.notifier_enums import NOTIFIERS +from ..notifier.notifier_service import RateLimitException from ..response import SyftSuccess from ..service import AbstractService from ..service import SERVICE_TO_TYPES @@ -27,6 +33,8 @@ from .request import SubmitRequest from .request_stash import RequestStash +logger = logging.getLogger(__name__) + @serializable(canonical_name="RequestService", version=1) class RequestService(AbstractService): @@ -52,27 +60,21 @@ def submit( request, ).unwrap() - link = LinkedObject.with_context(request, context=context) - admin_verify_key = context.server.get_service_method( UserService.admin_verify_key ) root_verify_key = admin_verify_key() if send_message: - subject_msg = f"Result to request {str(request.id)[:4]}...{str(request.id)[-3:]}\ + message_subject = f"Result to request {str(request.id)[:4]}...{str(request.id)[-3:]}\ has been successfully deposited." - message = CreateNotification( - subject=subject_msg if not reason else reason, - from_user_verify_key=context.credentials, + self._send_email_notification( + context=context, + message_subject=message_subject if not reason else reason, + request=request, to_user_verify_key=root_verify_key, - linked_obj=link, - notifier_types=[NOTIFIERS.EMAIL], email_template=RequestEmailTemplate, ) - method = context.server.get_service_method(NotificationService.send) - method(context=context, notification=message) - return request @service_method( @@ -179,8 +181,6 @@ def apply( ) request_notification = filter_by_obj(context=context, obj_uid=uid).unwrap() - link = LinkedObject.with_context(request, context=context) - if not request.get_status(context) == RequestStatus.PENDING: if request_notification is not None: mark_as_read = context.server.get_service_method( @@ -188,46 +188,57 @@ def apply( ) mark_as_read(context=context, uid=request_notification.id) - notification = CreateNotification( - subject=f"Your request ({str(uid)[:4]}) has been approved!", - from_user_verify_key=context.credentials, + self._send_email_notification( + context=context, + message_subject=f"Your request ({str(uid)[:4]}) has been approved. ", + request=request, to_user_verify_key=request.requesting_user_verify_key, - linked_obj=link, - notifier_types=[NOTIFIERS.EMAIL], email_template=RequestUpdateEmailTemplate, ) + return result - send_notification = context.server.get_service_method( - NotificationService.send - ) - send_notification(context=context, notification=notification) + def _send_email_notification( + self, + *, + context: AuthedServiceContext, + request: Request, + message_subject: str, + to_user_verify_key: SyftVerifyKey, + email_template: type[EmailTemplate], + ) -> None: + linked_obj = LinkedObject.with_context(request, context=context) + notification = CreateNotification( + subject=message_subject, + from_user_verify_key=context.credentials, + to_user_verify_key=to_user_verify_key, + linked_obj=linked_obj, + notifier_types=[NOTIFIERS.EMAIL], + email_template=email_template, + ) - return result + send_notification = context.server.get_service_method(NotificationService.send) + try: + send_notification(context=context, notification=notification) + except RateLimitException as e: + logger.error(f"Error sending notification: {e}") @service_method(path="request.undo", name="undo", unwrap_on_success=False) def undo(self, context: AuthedServiceContext, uid: UID, reason: str) -> SyftSuccess: - request = self.stash.get_by_uid( + request: Request = self.stash.get_by_uid( credentials=context.credentials, uid=uid ).unwrap() context.extra_kwargs["reason"] = reason request.undo(context=context) - link = LinkedObject.with_context(request, context=context) - message_subject = f"Your request ({str(uid)[:4]}) has been denied. " - - notification = CreateNotification( - subject=message_subject, - from_user_verify_key=context.credentials, + self._send_email_notification( + context=context, + message_subject=f"Your request ({str(uid)[:4]}) has been denied. ", + request=request, to_user_verify_key=request.requesting_user_verify_key, - linked_obj=link, - notifier_types=[NOTIFIERS.EMAIL], email_template=RequestUpdateEmailTemplate, ) - send_notification = context.server.get_service_method(NotificationService.send) - send_notification(context=context, notification=notification) - return SyftSuccess(message=f"Request {uid} successfully denied!") def save(self, context: AuthedServiceContext, request: Request) -> Request: diff --git a/packages/syft/src/syft/types/errors.py b/packages/syft/src/syft/types/errors.py index 09d70be9c56..501c6c90048 100644 --- a/packages/syft/src/syft/types/errors.py +++ b/packages/syft/src/syft/types/errors.py @@ -160,10 +160,10 @@ def __str__(self) -> str: server_trace = self._server_trace message = self._private_message or self.public - return f""" -{message} -server_trace: {server_trace} -""" + if server_trace: + message = f"{message}\nserver_trace: {server_trace}" + + return message def _repr_html_(self) -> str: is_dev_mode = os.getenv("DEV_MODE", "false").lower() == "true" From 86b899b40c43bc6ea4a2fa41f6794e8668f857ed Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 2 Sep 2024 18:56:59 +0530 Subject: [PATCH 07/17] show a warning to indicate that jobs aren't killed in a worker pool --- .../src/syft/service/worker/worker_pool_service.py | 9 ++++++++- .../syft/src/syft/service/worker/worker_service.py | 13 ++++++------- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index 22d560fe661..fd8d61841bd 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -423,6 +423,8 @@ def scale( Allows both scaling up and down the worker pool. """ + client_warning = "" + if not IN_KUBERNETES: raise SyftException( public_message="Scaling is only supported in Kubernetes mode" @@ -470,6 +472,8 @@ def scale( uid=worker.object_uid, ).unwrap() + client_warning += "Scaling down workers doesn't kill the associated jobs. Please delete them manually." + # update worker_pool worker_pool.max_count = number worker_pool.worker_list = worker_pool.worker_list[:number] @@ -483,7 +487,10 @@ def scale( ) ) - return SyftSuccess(message=f"Worker pool scaled to {number} workers") + return SyftSuccess( + message=f"Worker pool scaled to {number} workers", + client_warnings=[client_warning] if client_warning else [], + ) @service_method( path="worker_pool.filter_by_image_id", diff --git a/packages/syft/src/syft/service/worker/worker_service.py b/packages/syft/src/syft/service/worker/worker_service.py index a324035b2d2..e7a91910edd 100644 --- a/packages/syft/src/syft/service/worker/worker_service.py +++ b/packages/syft/src/syft/service/worker/worker_service.py @@ -23,7 +23,6 @@ from ..service import AuthedServiceContext from ..service import service_method from ..user.user_roles import ADMIN_ROLE_LEVEL -from ..user.user_roles import DATA_OWNER_ROLE_LEVEL from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL from .utils import DEFAULT_WORKER_POOL_NAME from .utils import _get_healthcheck_based_on_status @@ -194,12 +193,12 @@ def _delete( message=f"Worker with id: {uid} deleted successfully from pool: {worker_pool.name}" ) - @service_method( - path="worker.delete", - name="delete", - roles=DATA_OWNER_ROLE_LEVEL, - unwrap_on_success=False, - ) + # @service_method( + # path="worker.delete", + # name="delete", + # roles=DATA_OWNER_ROLE_LEVEL, + # unwrap_on_success=False, + # ) def delete( self, context: AuthedServiceContext, From 8d50594223fd1d6666edc92d3958934db55f0f7a Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Mon, 2 Sep 2024 16:19:23 +0200 Subject: [PATCH 08/17] resultify _send_email_notification --- packages/syft/src/syft/service/request/request_service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index d3e88ca26f7..234ba59f4b8 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -6,6 +6,8 @@ from ...server.credentials import SyftVerifyKey from ...store.document_store import DocumentStore from ...store.linked_obj import LinkedObject +from ...types.errors import SyftException +from ...types.result import as_result from ...types.uid import UID from ..context import AuthedServiceContext from ..notification.email_templates import EmailTemplate @@ -197,6 +199,7 @@ def apply( ) return result + @as_result(SyftException, RateLimitException) def _send_email_notification( self, *, @@ -217,10 +220,7 @@ def _send_email_notification( ) send_notification = context.server.get_service_method(NotificationService.send) - try: - send_notification(context=context, notification=notification) - except RateLimitException as e: - logger.error(f"Error sending notification: {e}") + send_notification(context=context, notification=notification) @service_method(path="request.undo", name="undo", unwrap_on_success=False) def undo(self, context: AuthedServiceContext, uid: UID, reason: str) -> SyftSuccess: From 41254de582ee22ab6e824b12396573160a01eea4 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 2 Sep 2024 20:37:21 +0530 Subject: [PATCH 09/17] re-enable worker delete service --- .../syft/src/syft/service/worker/worker_service.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_service.py b/packages/syft/src/syft/service/worker/worker_service.py index e7a91910edd..a324035b2d2 100644 --- a/packages/syft/src/syft/service/worker/worker_service.py +++ b/packages/syft/src/syft/service/worker/worker_service.py @@ -23,6 +23,7 @@ from ..service import AuthedServiceContext from ..service import service_method from ..user.user_roles import ADMIN_ROLE_LEVEL +from ..user.user_roles import DATA_OWNER_ROLE_LEVEL from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL from .utils import DEFAULT_WORKER_POOL_NAME from .utils import _get_healthcheck_based_on_status @@ -193,12 +194,12 @@ def _delete( message=f"Worker with id: {uid} deleted successfully from pool: {worker_pool.name}" ) - # @service_method( - # path="worker.delete", - # name="delete", - # roles=DATA_OWNER_ROLE_LEVEL, - # unwrap_on_success=False, - # ) + @service_method( + path="worker.delete", + name="delete", + roles=DATA_OWNER_ROLE_LEVEL, + unwrap_on_success=False, + ) def delete( self, context: AuthedServiceContext, From 474a7d42a83fe5e6ee5acf7b35b881f5baa3371a Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 2 Sep 2024 20:39:57 +0530 Subject: [PATCH 10/17] add notebook for testing --- .../013-scale-delete-worker-pools.ipynb | 18447 ++++++++++++++++ 1 file changed, 18447 insertions(+) create mode 100644 notebooks/Experimental/013-scale-delete-worker-pools.ipynb diff --git a/notebooks/Experimental/013-scale-delete-worker-pools.ipynb b/notebooks/Experimental/013-scale-delete-worker-pools.ipynb new file mode 100644 index 00000000000..e2cc4eb3558 --- /dev/null +++ b/notebooks/Experimental/013-scale-delete-worker-pools.ipynb @@ -0,0 +1,18447 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "0", + "metadata": {}, + "outputs": [], + "source": [ + "# import os\n", + "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + "# os.environ[\"DEV_MODE\"] = \"True\"\n", + "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"" + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "#### Helpers" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import time\n", + "\n", + "\n", + "class TimeoutError(Exception):\n", + " pass\n", + "\n", + "\n", + "class Timeout:\n", + " def __init__(self, timeout_duration):\n", + " if timeout_duration > 60:\n", + " raise ValueError(\"Timeout duration cannot exceed 60 seconds.\")\n", + " self.timeout_duration = timeout_duration\n", + "\n", + " def run_with_timeout(self, condition_func, *args, **kwargs):\n", + " start_time = time.time()\n", + " result = None\n", + "\n", + " while True:\n", + " elapsed_time = time.time() - start_time\n", + " if elapsed_time > self.timeout_duration:\n", + " raise TimeoutError(\n", + " f\"Function execution exceeded {self.timeout_duration} seconds.\"\n", + " )\n", + "\n", + " # Check if the condition is met\n", + " try:\n", + " if condition_func():\n", + " print(\"Condition met, exiting early.\")\n", + " break\n", + " except Exception as e:\n", + " print(f\"Exception in target function: {e}\")\n", + " break # Exit the loop if an exception occurs in the function\n", + " time.sleep(1)\n", + "\n", + " return result" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "### Import lib" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'remote'" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# stdlib\n", + "import os\n", + "\n", + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "environment" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "\n", + "# syft absolute\n", + "import syft as sy" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "server = sy.orchestra.launch(\n", + " name=\"bigquery-high\",\n", + " dev_mode=True,\n", + " server_side_type=\"high\",\n", + " port=\"9082\",\n", + " n_consumers=1, # How many workers to be spawned\n", + " create_producer=True, # Can produce more workers\n", + " reset=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "high_client = sy.login(\n", + " url=\"http://localhost:9082\", email=\"info@openmined.org\", password=\"changethis\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

WorkerPool Dicttuple

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "high_client.worker_pools" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " \n", + "
\n", + "

default-pool

\n", + "

\n", + " Created on: \n", + " 2024-09-02 13:32:54\n", + "

\n", + "

\n", + " Healthy Workers:\n", + " 1 / 1\n", + "

\n", + "

\n", + " Running Workers:\n", + " 1 / 1\n", + "

\n", + " \n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

SyftWorker List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/markdown": [ + "```python\n", + "class WorkerPool:\n", + " id: str = 2e0c3309113a46a3a342d563576f3f4b\n", + " name: str = \"default-pool\"\n", + " image: str = syft.service.worker.worker_image.SyftWorkerImage\n", + " max_count: str = 1\n", + " workers: str = [syft.service.worker.worker_pool.SyftWorker]\n", + " created_at: str = 2024-09-02 13:32:54\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.worker.worker_pool.WorkerPool" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "default_worker_pool = high_client.worker_pools.get_by_name(\"default-pool\")\n", + "default_worker_pool" + ] + }, + { + "cell_type": "markdown", + "id": "10", + "metadata": {}, + "source": [ + "### Scale Worker pool" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "11", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Worker pool already has 1 workers

" + ], + "text/plain": [ + "SyftSuccess: Worker pool already has 1 workers" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Scale to 1\n", + "high_client.api.worker_pool.scale(number=1, pool_name=default_worker_pool.name)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "12", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " \n", + "
\n", + "

default-pool

\n", + "

\n", + " Created on: \n", + " 2024-09-02 13:32:54\n", + "

\n", + "

\n", + " Healthy Workers:\n", + " 1 / 1\n", + "

\n", + "

\n", + " Running Workers:\n", + " 1 / 1\n", + "

\n", + " \n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

SyftWorker List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/markdown": [ + "```python\n", + "class WorkerPool:\n", + " id: str = 2e0c3309113a46a3a342d563576f3f4b\n", + " name: str = \"default-pool\"\n", + " image: str = syft.service.worker.worker_image.SyftWorkerImage\n", + " max_count: str = 1\n", + " workers: str = [syft.service.worker.worker_pool.SyftWorker]\n", + " created_at: str = 2024-09-02 13:32:54\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.worker.worker_pool.WorkerPool" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "high_client.api.services.worker_pool[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "13", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Worker pool scaled to 2 workers

" + ], + "text/plain": [ + "SyftSuccess: Worker pool scaled to 2 workers" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Scale up workers\n", + "scale_up_result = high_client.api.worker_pool.scale(\n", + " number=2, pool_name=default_worker_pool.name\n", + ")\n", + "scale_up_result" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "14", + "metadata": {}, + "outputs": [], + "source": [ + "assert scale_up_result" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "15", + "metadata": {}, + "outputs": [], + "source": [ + "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 2" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": [ + "##### Scale down" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "17", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Worker pool scaled to 1 workers

" + ], + "text/plain": [ + "SyftSuccess: Worker pool scaled to 1 workers" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Scale down workers, this gracefully shutdowns the consumers\n", + "scale_down_result = high_client.api.worker_pool.scale(\n", + " number=1, pool_name=default_worker_pool.name\n", + ")\n", + "scale_down_result" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "18", + "metadata": {}, + "outputs": [], + "source": [ + "assert scale_down_result, scale_down_result" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "25", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Worker pool scaled to 1 workers

" + ], + "text/plain": [ + "SyftSuccess: Worker pool scaled to 1 workers" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "high_client.api.services.worker_pool.scale(\n", + " 1,\n", + " pool_name=default_worker_pool.name,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "26", + "metadata": {}, + "outputs": [], + "source": [ + "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" + ] + }, + { + "cell_type": "markdown", + "id": "84757ccb", + "metadata": {}, + "source": [ + "#### Delete Worker Pool" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "27", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Successfully deleted worker pool with id 2e0c3309113a46a3a342d563576f3f4b

" + ], + "text/plain": [ + "SyftSuccess: Successfully deleted worker pool with id 2e0c3309113a46a3a342d563576f3f4b" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pool_delete_result = high_client.api.services.worker_pool.delete(\n", + " pool_name=default_worker_pool.name\n", + ")\n", + "pool_delete_result" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "28", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Successfully deleted worker pool with id 2e0c3309113a46a3a342d563576f3f4b

" + ], + "text/plain": [ + "SyftSuccess: Successfully deleted worker pool with id 2e0c3309113a46a3a342d563576f3f4b" + ] + }, + "execution_count": 30, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pool_delete_result" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "id": "dc7a72fa", + "metadata": {}, + "outputs": [], + "source": [ + "with sy.raises(KeyError):\n", + " high_client.api.services.worker_pool[default_worker_pool.name]" + ] + }, + { + "cell_type": "markdown", + "id": "358ae69b", + "metadata": {}, + "source": [ + "#### Re-launch the default worker pool" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "63461753", + "metadata": {}, + "outputs": [], + "source": [ + "default_worker_image = high_client.api.services.worker_image.get_all()[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "id": "827bc7ee", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

ContainerSpawnStatus List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "[ContainerSpawnStatus(worker_name='default-pool-0', worker=syft.service.worker.worker_pool.SyftWorker, error=None)]" + ] + }, + "execution_count": 43, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "launch_result = high_client.api.services.worker_pool.launch(\n", + " pool_name=default_worker_pool.name, image_uid=default_worker_image.id, num_workers=1\n", + ")\n", + "launch_result" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "44e3292a", + "metadata": {}, + "outputs": [], + "source": [ + "assert high_client.api.services.worker_pool[default_worker_pool.name]\n", + "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From f052db852d656f16f6fe414d0274e321db7ed5ae Mon Sep 17 00:00:00 2001 From: IonesioJunior Date: Mon, 2 Sep 2024 12:20:06 -0300 Subject: [PATCH 11/17] ADD scale delete worker pools to bq scenarios --- .../013-scale-delete-worker-pools.ipynb | 18447 ---------------- 1 file changed, 18447 deletions(-) delete mode 100644 notebooks/Experimental/013-scale-delete-worker-pools.ipynb diff --git a/notebooks/Experimental/013-scale-delete-worker-pools.ipynb b/notebooks/Experimental/013-scale-delete-worker-pools.ipynb deleted file mode 100644 index e2cc4eb3558..00000000000 --- a/notebooks/Experimental/013-scale-delete-worker-pools.ipynb +++ /dev/null @@ -1,18447 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# import os\n", - "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", - "# os.environ[\"DEV_MODE\"] = \"True\"\n", - "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "#### Helpers" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "import time\n", - "\n", - "\n", - "class TimeoutError(Exception):\n", - " pass\n", - "\n", - "\n", - "class Timeout:\n", - " def __init__(self, timeout_duration):\n", - " if timeout_duration > 60:\n", - " raise ValueError(\"Timeout duration cannot exceed 60 seconds.\")\n", - " self.timeout_duration = timeout_duration\n", - "\n", - " def run_with_timeout(self, condition_func, *args, **kwargs):\n", - " start_time = time.time()\n", - " result = None\n", - "\n", - " while True:\n", - " elapsed_time = time.time() - start_time\n", - " if elapsed_time > self.timeout_duration:\n", - " raise TimeoutError(\n", - " f\"Function execution exceeded {self.timeout_duration} seconds.\"\n", - " )\n", - "\n", - " # Check if the condition is met\n", - " try:\n", - " if condition_func():\n", - " print(\"Condition met, exiting early.\")\n", - " break\n", - " except Exception as e:\n", - " print(f\"Exception in target function: {e}\")\n", - " break # Exit the loop if an exception occurs in the function\n", - " time.sleep(1)\n", - "\n", - " return result" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "### Import lib" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "4", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'remote'" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# stdlib\n", - "import os\n", - "\n", - "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "environment" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "\n", - "# syft absolute\n", - "import syft as sy" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "server = sy.orchestra.launch(\n", - " name=\"bigquery-high\",\n", - " dev_mode=True,\n", - " server_side_type=\"high\",\n", - " port=\"9082\",\n", - " n_consumers=1, # How many workers to be spawned\n", - " create_producer=True, # Can produce more workers\n", - " reset=True,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "7", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "high_client = sy.login(\n", - " url=\"http://localhost:9082\", email=\"info@openmined.org\", password=\"changethis\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "8", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

WorkerPool Dicttuple

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/plain": [ - "" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "high_client.worker_pools" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "9", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - " \n", - "
\n", - "

default-pool

\n", - "

\n", - " Created on: \n", - " 2024-09-02 13:32:54\n", - "

\n", - "

\n", - " Healthy Workers:\n", - " 1 / 1\n", - "

\n", - "

\n", - " Running Workers:\n", - " 1 / 1\n", - "

\n", - " \n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

SyftWorker List

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/markdown": [ - "```python\n", - "class WorkerPool:\n", - " id: str = 2e0c3309113a46a3a342d563576f3f4b\n", - " name: str = \"default-pool\"\n", - " image: str = syft.service.worker.worker_image.SyftWorkerImage\n", - " max_count: str = 1\n", - " workers: str = [syft.service.worker.worker_pool.SyftWorker]\n", - " created_at: str = 2024-09-02 13:32:54\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.worker.worker_pool.WorkerPool" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "default_worker_pool = high_client.worker_pools.get_by_name(\"default-pool\")\n", - "default_worker_pool" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "### Scale Worker pool" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "11", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Worker pool already has 1 workers

" - ], - "text/plain": [ - "SyftSuccess: Worker pool already has 1 workers" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Scale to 1\n", - "high_client.api.worker_pool.scale(number=1, pool_name=default_worker_pool.name)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "12", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - " \n", - "
\n", - "

default-pool

\n", - "

\n", - " Created on: \n", - " 2024-09-02 13:32:54\n", - "

\n", - "

\n", - " Healthy Workers:\n", - " 1 / 1\n", - "

\n", - "

\n", - " Running Workers:\n", - " 1 / 1\n", - "

\n", - " \n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

SyftWorker List

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/markdown": [ - "```python\n", - "class WorkerPool:\n", - " id: str = 2e0c3309113a46a3a342d563576f3f4b\n", - " name: str = \"default-pool\"\n", - " image: str = syft.service.worker.worker_image.SyftWorkerImage\n", - " max_count: str = 1\n", - " workers: str = [syft.service.worker.worker_pool.SyftWorker]\n", - " created_at: str = 2024-09-02 13:32:54\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.worker.worker_pool.WorkerPool" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "high_client.api.services.worker_pool[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "13", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Worker pool scaled to 2 workers

" - ], - "text/plain": [ - "SyftSuccess: Worker pool scaled to 2 workers" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Scale up workers\n", - "scale_up_result = high_client.api.worker_pool.scale(\n", - " number=2, pool_name=default_worker_pool.name\n", - ")\n", - "scale_up_result" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "assert scale_up_result" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 2" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "##### Scale down" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "17", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Worker pool scaled to 1 workers

" - ], - "text/plain": [ - "SyftSuccess: Worker pool scaled to 1 workers" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Scale down workers, this gracefully shutdowns the consumers\n", - "scale_down_result = high_client.api.worker_pool.scale(\n", - " number=1, pool_name=default_worker_pool.name\n", - ")\n", - "scale_down_result" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "assert scale_down_result, scale_down_result" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "id": "25", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Worker pool scaled to 1 workers

" - ], - "text/plain": [ - "SyftSuccess: Worker pool scaled to 1 workers" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "high_client.api.services.worker_pool.scale(\n", - " 1,\n", - " pool_name=default_worker_pool.name,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" - ] - }, - { - "cell_type": "markdown", - "id": "84757ccb", - "metadata": {}, - "source": [ - "#### Delete Worker Pool" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "id": "27", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Successfully deleted worker pool with id 2e0c3309113a46a3a342d563576f3f4b

" - ], - "text/plain": [ - "SyftSuccess: Successfully deleted worker pool with id 2e0c3309113a46a3a342d563576f3f4b" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pool_delete_result = high_client.api.services.worker_pool.delete(\n", - " pool_name=default_worker_pool.name\n", - ")\n", - "pool_delete_result" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "28", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Successfully deleted worker pool with id 2e0c3309113a46a3a342d563576f3f4b

" - ], - "text/plain": [ - "SyftSuccess: Successfully deleted worker pool with id 2e0c3309113a46a3a342d563576f3f4b" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pool_delete_result" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "id": "dc7a72fa", - "metadata": {}, - "outputs": [], - "source": [ - "with sy.raises(KeyError):\n", - " high_client.api.services.worker_pool[default_worker_pool.name]" - ] - }, - { - "cell_type": "markdown", - "id": "358ae69b", - "metadata": {}, - "source": [ - "#### Re-launch the default worker pool" - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "id": "63461753", - "metadata": {}, - "outputs": [], - "source": [ - "default_worker_image = high_client.api.services.worker_image.get_all()[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "id": "827bc7ee", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

ContainerSpawnStatus List

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/plain": [ - "[ContainerSpawnStatus(worker_name='default-pool-0', worker=syft.service.worker.worker_pool.SyftWorker, error=None)]" - ] - }, - "execution_count": 43, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "launch_result = high_client.api.services.worker_pool.launch(\n", - " pool_name=default_worker_pool.name, image_uid=default_worker_image.id, num_workers=1\n", - ")\n", - "launch_result" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "id": "44e3292a", - "metadata": {}, - "outputs": [], - "source": [ - "assert high_client.api.services.worker_pool[default_worker_pool.name]\n", - "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} From 83551b2a6a21a5b8a3970b4ef26b82d0aa4d4cf1 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Mon, 2 Sep 2024 16:19:08 +0000 Subject: [PATCH 12/17] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- .../backend/grid/images/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 233 +++++++++--------- packages/grid/helm/repo/syft-0.9.1-beta.8.tgz | Bin 0 -> 10452 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/values.yaml | 2 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- packages/syftcli/manifest.yml | 8 +- 14 files changed, 139 insertions(+), 126 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.9.1-beta.8.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index ab7be632e66..46a32486d2e 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.9.1-beta.7 +current_version = 0.9.1-beta.8 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index ca7bda00dd4..378dddaafdf 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.1-beta.7" +__version__ = "0.9.1-beta.8" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index ca7bda00dd4..378dddaafdf 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.1-beta.7" +__version__ = "0.9.1-beta.8" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile index 45142938e08..1442b56e737 100644 --- a/packages/grid/backend/grid/images/worker_cpu.dockerfile +++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile @@ -5,7 +5,7 @@ # NOTE: This dockerfile will be built inside a syft-backend container in PROD # Hence COPY will not work the same way in DEV vs. PROD -ARG SYFT_VERSION_TAG="0.9.1-beta.7" +ARG SYFT_VERSION_TAG="0.9.1-beta.8" FROM openmined/syft-backend:${SYFT_VERSION_TAG} # should match base image python version diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 6b11e475068..4c4dacff73f 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -28,7 +28,7 @@ vars: DOCKER_IMAGE_RATHOLE: openmined/syft-rathole DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/syft-enclave-attestation CONTAINER_REGISTRY: "docker.io" - VERSION: "0.9.1-beta.7" + VERSION: "0.9.1-beta.8" PLATFORM: $(uname -m | grep -q 'arm64' && echo "arm64" || echo "amd64") # This is a list of `images` that DevSpace can build for this project diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index ff11a21f42e..4aeb4e78ed6 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "syft-ui", - "version": "0.9.1-beta.7", + "version": "0.9.1-beta.8", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 02e5807e70e..a4c25cfcf56 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.9.1-beta.8 + created: "2024-09-02T16:16:57.339632675Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 54cd02a8bc61feeed6fdff85340b16bc20407870a0f730925454ba51ce5ed14e + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.1-beta.8.tgz + version: 0.9.1-beta.8 - apiVersion: v2 appVersion: 0.9.1-beta.7 - created: "2024-09-01T12:35:01.607739179Z" + created: "2024-09-02T16:16:57.338890556Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d1bdc207bb0a21391fb18f89a0434d9361cf054ea389370648ffd13753dabe70 @@ -16,7 +29,7 @@ entries: version: 0.9.1-beta.7 - apiVersion: v2 appVersion: 0.9.1-beta.6 - created: "2024-09-01T12:35:01.607023951Z" + created: "2024-09-02T16:16:57.338118581Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 74b62672d982c0ca8ea86f8b4d1f25d1df6a9b7a8c4ac1551fd5635da7d29c48 @@ -29,7 +42,7 @@ entries: version: 0.9.1-beta.6 - apiVersion: v2 appVersion: 0.9.1-beta.5 - created: "2024-09-01T12:35:01.60630159Z" + created: "2024-09-02T16:16:57.3373752Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a69a7ac7d1b02b0bc547f4236398c001827ff964bb07fe663ef3545f9b6cf5f1 @@ -42,7 +55,7 @@ entries: version: 0.9.1-beta.5 - apiVersion: v2 appVersion: 0.9.1-beta.4 - created: "2024-09-01T12:35:01.605559181Z" + created: "2024-09-02T16:16:57.336255317Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 3d67a6d3bdb9e77e8fb0aa644312c9af5a49feb824d1b65f9b384796f059ed7c @@ -55,7 +68,7 @@ entries: version: 0.9.1-beta.4 - apiVersion: v2 appVersion: 0.9.1-beta.3 - created: "2024-09-01T12:35:01.604850886Z" + created: "2024-09-02T16:16:57.335063923Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 90d804df3afadfd9400cbb320898040cc89a74f6d3e45f0365455ed30785f200 @@ -68,7 +81,7 @@ entries: version: 0.9.1-beta.3 - apiVersion: v2 appVersion: 0.9.1-beta.2 - created: "2024-09-01T12:35:01.604121913Z" + created: "2024-09-02T16:16:57.334191753Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6f605af4ffc0e42a0733593faf1b5e588bbe58ff9f49b903a41bd4a751ddb694 @@ -81,7 +94,7 @@ entries: version: 0.9.1-beta.2 - apiVersion: v2 appVersion: 0.9.1-beta.1 - created: "2024-09-01T12:35:01.603408168Z" + created: "2024-09-02T16:16:57.33346893Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c99243e63888391654f23044144e2095dee48a599cd4b2e4f43ead6f76a8572 @@ -94,7 +107,7 @@ entries: version: 0.9.1-beta.1 - apiVersion: v2 appVersion: 0.9.0 - created: "2024-09-01T12:35:01.602678703Z" + created: "2024-09-02T16:16:57.332750084Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: baf218c8543a2525f7d4cced1e49b0d4e38ee1661d7171a55a069bf765b5b6d8 @@ -107,7 +120,7 @@ entries: version: 0.9.0 - apiVersion: v2 appVersion: 0.9.0-beta.5 - created: "2024-09-01T12:35:01.601178767Z" + created: "2024-09-02T16:16:57.332045494Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a4eafd04b39b0c75d6a28ed2f7cfece450150477dc2c6a01e10e2087a5b02835 @@ -120,7 +133,7 @@ entries: version: 0.9.0-beta.5 - apiVersion: v2 appVersion: 0.9.0-beta.4 - created: "2024-09-01T12:35:01.600472786Z" + created: "2024-09-02T16:16:57.331335234Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5a3cd3dd57609231ffc13e6af8d55f68b1b79fbbe8261740db957526fb8a536a @@ -133,7 +146,7 @@ entries: version: 0.9.0-beta.4 - apiVersion: v2 appVersion: 0.9.0-beta.3 - created: "2024-09-01T12:35:01.599722242Z" + created: "2024-09-02T16:16:57.330617781Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: affe0898286720a0281c2363bed404a09d229a5359951b4dfdd8e746d628b4cb @@ -146,7 +159,7 @@ entries: version: 0.9.0-beta.3 - apiVersion: v2 appVersion: 0.9.0-beta.2 - created: "2024-09-01T12:35:01.599007926Z" + created: "2024-09-02T16:16:57.329871354Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 105b60f0ff01f50386d2b063cb58c0e91ee41b74cefee7bca3f56e4025c38dd1 @@ -159,7 +172,7 @@ entries: version: 0.9.0-beta.2 - apiVersion: v2 appVersion: 0.9.0-beta.1 - created: "2024-09-01T12:35:01.598305322Z" + created: "2024-09-02T16:16:57.329152268Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 10246075684d168e6a51c009581b77df8d729e29e11abc4a360fae42659a6409 @@ -172,7 +185,7 @@ entries: version: 0.9.0-beta.1 - apiVersion: v2 appVersion: 0.8.8 - created: "2024-09-01T12:35:01.597568965Z" + created: "2024-09-02T16:16:57.328422001Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 46f75bdf8c39e0f17de266bf19b64852e0dbf7f7bcea60bf7a19018ff17370ad @@ -185,7 +198,7 @@ entries: version: 0.8.8 - apiVersion: v2 appVersion: 0.8.8-beta.4 - created: "2024-09-01T12:35:01.596858435Z" + created: "2024-09-02T16:16:57.327008621Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc0a3b49df19435a407e4764be6c5748511f14273e668e7f1d326af28b29f22a @@ -198,7 +211,7 @@ entries: version: 0.8.8-beta.4 - apiVersion: v2 appVersion: 0.8.8-beta.3 - created: "2024-09-01T12:35:01.596143859Z" + created: "2024-09-02T16:16:57.326266202Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: de2fba39516e98be39ae0110a2cfa5bfa2b665d7a35a4516b43c5310bbf621dc @@ -211,7 +224,7 @@ entries: version: 0.8.8-beta.3 - apiVersion: v2 appVersion: 0.8.8-beta.2 - created: "2024-09-01T12:35:01.59540699Z" + created: "2024-09-02T16:16:57.32556037Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1323f4082c65944b522cd8e36dc7285c83c7dfcf6a56f7962665a8b1256a4d09 @@ -224,7 +237,7 @@ entries: version: 0.8.8-beta.2 - apiVersion: v2 appVersion: 0.8.8-beta.1 - created: "2024-09-01T12:35:01.59407265Z" + created: "2024-09-02T16:16:57.324852194Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ec027b50b8182ef656be14ddca9537785c37712a4be8cb940f30ac029b63de2d @@ -237,7 +250,7 @@ entries: version: 0.8.8-beta.1 - apiVersion: v2 appVersion: 0.8.7 - created: "2024-09-01T12:35:01.593183367Z" + created: "2024-09-02T16:16:57.324108813Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7ea7f63d1c6d0948860547f8aa39343fc5ef399c8e62d9d7edd4473cf44d8186 @@ -250,7 +263,7 @@ entries: version: 0.8.7 - apiVersion: v2 appVersion: 0.8.7-beta.16 - created: "2024-09-01T12:35:01.586616313Z" + created: "2024-09-02T16:16:57.317137932Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75190eae57b64c2c47ab4a7fe3c6e94f35eb8045807a843ec8d7b26585c9e840 @@ -263,7 +276,7 @@ entries: version: 0.8.7-beta.16 - apiVersion: v2 appVersion: 0.8.7-beta.15 - created: "2024-09-01T12:35:01.585548555Z" + created: "2024-09-02T16:16:57.316267104Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56879d9a9f10febce88676d3d20621d74d17f9e33f5df6ae1e9bc3078c216f0c @@ -276,7 +289,7 @@ entries: version: 0.8.7-beta.15 - apiVersion: v2 appVersion: 0.8.7-beta.14 - created: "2024-09-01T12:35:01.584703124Z" + created: "2024-09-02T16:16:57.31542018Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e7cbca1d603ba11e09ae2a3089cfdafaa08cfa07c553c4f0fb8b42f8d3028f7 @@ -289,7 +302,7 @@ entries: version: 0.8.7-beta.14 - apiVersion: v2 appVersion: 0.8.7-beta.13 - created: "2024-09-01T12:35:01.583839398Z" + created: "2024-09-02T16:16:57.314546928Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1dbe3ecdfec57bf25020cbcff783fab908f0eb0640ad684470b2fd1da1928005 @@ -302,7 +315,7 @@ entries: version: 0.8.7-beta.13 - apiVersion: v2 appVersion: 0.8.7-beta.12 - created: "2024-09-01T12:35:01.583148095Z" + created: "2024-09-02T16:16:57.313793027Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e92b2f3a522dabb3a79ff762a7042ae16d2bf3a53eebbb2885a69b9f834d109c @@ -315,7 +328,7 @@ entries: version: 0.8.7-beta.12 - apiVersion: v2 appVersion: 0.8.7-beta.11 - created: "2024-09-01T12:35:01.582447745Z" + created: "2024-09-02T16:16:57.313080062Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 099f6cbd44b699ee2410a4be012ed1a8a65bcacb06a43057b2779d7fe34fc0ad @@ -328,7 +341,7 @@ entries: version: 0.8.7-beta.11 - apiVersion: v2 appVersion: 0.8.7-beta.10 - created: "2024-09-01T12:35:01.5816969Z" + created: "2024-09-02T16:16:57.312351468Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 00773cb241522e281c1915339fc362e047650e08958a736e93d6539f44cb5e25 @@ -341,7 +354,7 @@ entries: version: 0.8.7-beta.10 - apiVersion: v2 appVersion: 0.8.7-beta.9 - created: "2024-09-01T12:35:01.592351341Z" + created: "2024-09-02T16:16:57.323264624Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a3f8e85d9ddef7a644b959fcc2fcb0fc08f7b6abae1045e893d0d62fa4ae132e @@ -354,7 +367,7 @@ entries: version: 0.8.7-beta.9 - apiVersion: v2 appVersion: 0.8.7-beta.8 - created: "2024-09-01T12:35:01.591703389Z" + created: "2024-09-02T16:16:57.322598005Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a422ac88d8fd1fb80d5004d5eb6e95fa9efc7f6a87da12e5ac04829da7f04c4d @@ -367,7 +380,7 @@ entries: version: 0.8.7-beta.8 - apiVersion: v2 appVersion: 0.8.7-beta.7 - created: "2024-09-01T12:35:01.59105217Z" + created: "2024-09-02T16:16:57.321910608Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0dc313a1092e6256a7c8aad002c8ec380b3add2c289d680db1e238a336399b7a @@ -380,7 +393,7 @@ entries: version: 0.8.7-beta.7 - apiVersion: v2 appVersion: 0.8.7-beta.6 - created: "2024-09-01T12:35:01.590423054Z" + created: "2024-09-02T16:16:57.321245272Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 052a2ec1102d2a4c9915f95647abd4a6012f56fa05a106f4952ee9b55bf7bae8 @@ -393,7 +406,7 @@ entries: version: 0.8.7-beta.6 - apiVersion: v2 appVersion: 0.8.7-beta.5 - created: "2024-09-01T12:35:01.589752359Z" + created: "2024-09-02T16:16:57.320548437Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1728af756907c3fcbe87c2fd2de014a2d963c22a4c2eb6af6596b525a9b9a18a @@ -406,7 +419,7 @@ entries: version: 0.8.7-beta.5 - apiVersion: v2 appVersion: 0.8.7-beta.4 - created: "2024-09-01T12:35:01.589087926Z" + created: "2024-09-02T16:16:57.319836423Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 387a57a3904a05ed61e92ee48605ef6fd5044ff7e822e0924e0d4c485e2c88d2 @@ -419,7 +432,7 @@ entries: version: 0.8.7-beta.4 - apiVersion: v2 appVersion: 0.8.7-beta.3 - created: "2024-09-01T12:35:01.588447748Z" + created: "2024-09-02T16:16:57.318364972Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 3668002b7a4118516b2ecd61d6275f60d83fc12841587ab8f62e1c1200731c67 @@ -432,7 +445,7 @@ entries: version: 0.8.7-beta.3 - apiVersion: v2 appVersion: 0.8.7-beta.2 - created: "2024-09-01T12:35:01.587795619Z" + created: "2024-09-02T16:16:57.317736134Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e62217ffcadee2b8896ab0543f9ccc42f2df898fd979438ac9376d780b802af7 @@ -445,7 +458,7 @@ entries: version: 0.8.7-beta.2 - apiVersion: v2 appVersion: 0.8.7-beta.1 - created: "2024-09-01T12:35:01.581014554Z" + created: "2024-09-02T16:16:57.310850916Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 553981fe1d5c980e6903b3ff2f1b9b97431f6dd8aee91e3976bcc5594285235e @@ -458,7 +471,7 @@ entries: version: 0.8.7-beta.1 - apiVersion: v2 appVersion: 0.8.6 - created: "2024-09-01T12:35:01.580469414Z" + created: "2024-09-02T16:16:57.310287219Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ddbbe6fea1702e57404875eb3019a3b1a341017bdbb5fbc6ce418507e5c15756 @@ -471,7 +484,7 @@ entries: version: 0.8.6 - apiVersion: v2 appVersion: 0.8.6-beta.1 - created: "2024-09-01T12:35:01.579191096Z" + created: "2024-09-02T16:16:57.309746264Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc2c81ef6796ac853dce256e6bf8a6af966c21803e6534ea21920af681c62e61 @@ -484,7 +497,7 @@ entries: version: 0.8.6-beta.1 - apiVersion: v2 appVersion: 0.8.5 - created: "2024-09-01T12:35:01.578635647Z" + created: "2024-09-02T16:16:57.30920633Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: db5d90d44006209fd5ecdebd88f5fd56c70f7c76898343719a0ff8da46da948a @@ -497,7 +510,7 @@ entries: version: 0.8.5 - apiVersion: v2 appVersion: 0.8.5-post.2 - created: "2024-09-01T12:35:01.577830811Z" + created: "2024-09-02T16:16:57.308454272Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ea3f7269b55f773fa165d7008c054b7cf3ec4c62eb40a96f08cd3a9b77fd2165 @@ -510,7 +523,7 @@ entries: version: 0.8.5-post.2 - apiVersion: v2 appVersion: 0.8.5-post.1 - created: "2024-09-01T12:35:01.5772952Z" + created: "2024-09-02T16:16:57.30790942Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9deb844d3dc2d8480c60f8c631dcc7794adfb39cec3aa3b1ce22ea26fdf87d02 @@ -523,7 +536,7 @@ entries: version: 0.8.5-post.1 - apiVersion: v2 appVersion: 0.8.5-beta.10 - created: "2024-09-01T12:35:01.569580585Z" + created: "2024-09-02T16:16:57.300171486Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49 @@ -536,7 +549,7 @@ entries: version: 0.8.5-beta.10 - apiVersion: v2 appVersion: 0.8.5-beta.9 - created: "2024-09-01T12:35:01.576540979Z" + created: "2024-09-02T16:16:57.307145801Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b @@ -549,7 +562,7 @@ entries: version: 0.8.5-beta.9 - apiVersion: v2 appVersion: 0.8.5-beta.8 - created: "2024-09-01T12:35:01.575786327Z" + created: "2024-09-02T16:16:57.306341005Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 @@ -562,7 +575,7 @@ entries: version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-09-01T12:35:01.57502868Z" + created: "2024-09-02T16:16:57.305576765Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -575,7 +588,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-09-01T12:35:01.574262256Z" + created: "2024-09-02T16:16:57.304797558Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -588,7 +601,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-09-01T12:35:01.573438134Z" + created: "2024-09-02T16:16:57.303685757Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -601,7 +614,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-09-01T12:35:01.571898571Z" + created: "2024-09-02T16:16:57.302519901Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -614,7 +627,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-09-01T12:35:01.571146564Z" + created: "2024-09-02T16:16:57.30171804Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -627,7 +640,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-09-01T12:35:01.57038496Z" + created: "2024-09-02T16:16:57.300939353Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -640,7 +653,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-09-01T12:35:01.568770119Z" + created: "2024-09-02T16:16:57.299393952Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -652,7 +665,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-09-01T12:35:01.568381963Z" + created: "2024-09-02T16:16:57.299007002Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -664,7 +677,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-09-01T12:35:01.565057046Z" + created: "2024-09-02T16:16:57.296009918Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -676,7 +689,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-09-01T12:35:01.564648322Z" + created: "2024-09-02T16:16:57.295216865Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -688,7 +701,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-09-01T12:35:01.563891396Z" + created: "2024-09-02T16:16:57.2943856Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -700,7 +713,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-09-01T12:35:01.56348742Z" + created: "2024-09-02T16:16:57.293930043Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -712,7 +725,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-09-01T12:35:01.563079617Z" + created: "2024-09-02T16:16:57.293520172Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -724,7 +737,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-09-01T12:35:01.562656345Z" + created: "2024-09-02T16:16:57.29311026Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -736,7 +749,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-09-01T12:35:01.56224191Z" + created: "2024-09-02T16:16:57.292700018Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -748,7 +761,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-09-01T12:35:01.561795755Z" + created: "2024-09-02T16:16:57.292289104Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -760,7 +773,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-09-01T12:35:01.561390668Z" + created: "2024-09-02T16:16:57.291860417Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -772,7 +785,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-09-01T12:35:01.5609805Z" + created: "2024-09-02T16:16:57.291378722Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -784,7 +797,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-09-01T12:35:01.560564392Z" + created: "2024-09-02T16:16:57.290770692Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -796,7 +809,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-09-01T12:35:01.560141841Z" + created: "2024-09-02T16:16:57.290307331Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -808,7 +821,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-09-01T12:35:01.558651914Z" + created: "2024-09-02T16:16:57.289265876Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -820,7 +833,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-09-01T12:35:01.558012308Z" + created: "2024-09-02T16:16:57.28865413Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -832,7 +845,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-09-01T12:35:01.557613331Z" + created: "2024-09-02T16:16:57.287765047Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -844,7 +857,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-09-01T12:35:01.557211339Z" + created: "2024-09-02T16:16:57.287358371Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -856,7 +869,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-09-01T12:35:01.556789801Z" + created: "2024-09-02T16:16:57.286951916Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -868,7 +881,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-09-01T12:35:01.556437953Z" + created: "2024-09-02T16:16:57.286592649Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -880,7 +893,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-09-01T12:35:01.556090192Z" + created: "2024-09-02T16:16:57.286203135Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -892,7 +905,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-09-01T12:35:01.555746279Z" + created: "2024-09-02T16:16:57.285859216Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -904,7 +917,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-09-01T12:35:01.555407165Z" + created: "2024-09-02T16:16:57.285514816Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -916,7 +929,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-09-01T12:35:01.567961016Z" + created: "2024-09-02T16:16:57.298586431Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -928,7 +941,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-09-01T12:35:01.567617914Z" + created: "2024-09-02T16:16:57.298205453Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -940,7 +953,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-09-01T12:35:01.567269251Z" + created: "2024-09-02T16:16:57.297863357Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -952,7 +965,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-09-01T12:35:01.566913135Z" + created: "2024-09-02T16:16:57.297515301Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -964,7 +977,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-09-01T12:35:01.566542662Z" + created: "2024-09-02T16:16:57.297162625Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -976,7 +989,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-09-01T12:35:01.565946671Z" + created: "2024-09-02T16:16:57.2967885Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -988,7 +1001,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-09-01T12:35:01.56424127Z" + created: "2024-09-02T16:16:57.294793608Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -1000,7 +1013,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-09-01T12:35:01.559706206Z" + created: "2024-09-02T16:16:57.289878935Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1016,7 +1029,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-09-01T12:35:01.555051109Z" + created: "2024-09-02T16:16:57.285159155Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1032,7 +1045,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-09-01T12:35:01.554461205Z" + created: "2024-09-02T16:16:57.284603733Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1048,7 +1061,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-09-01T12:35:01.553762047Z" + created: "2024-09-02T16:16:57.28389168Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1064,7 +1077,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-09-01T12:35:01.553167645Z" + created: "2024-09-02T16:16:57.283305921Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1080,7 +1093,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-09-01T12:35:01.551849365Z" + created: "2024-09-02T16:16:57.28269713Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1096,7 +1109,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-09-01T12:35:01.551191004Z" + created: "2024-09-02T16:16:57.28122287Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1112,7 +1125,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-09-01T12:35:01.550640775Z" + created: "2024-09-02T16:16:57.280642742Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1128,7 +1141,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-09-01T12:35:01.550058085Z" + created: "2024-09-02T16:16:57.28009253Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1144,7 +1157,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-09-01T12:35:01.549415733Z" + created: "2024-09-02T16:16:57.279456318Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1160,7 +1173,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-09-01T12:35:01.548754796Z" + created: "2024-09-02T16:16:57.278810217Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1176,7 +1189,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-09-01T12:35:01.548121382Z" + created: "2024-09-02T16:16:57.278125655Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1192,7 +1205,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-09-01T12:35:01.547467278Z" + created: "2024-09-02T16:16:57.277484764Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1208,7 +1221,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-09-01T12:35:01.546804759Z" + created: "2024-09-02T16:16:57.27680483Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1224,7 +1237,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-09-01T12:35:01.545438283Z" + created: "2024-09-02T16:16:57.276031513Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1240,7 +1253,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-09-01T12:35:01.544762729Z" + created: "2024-09-02T16:16:57.274603875Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1256,7 +1269,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-09-01T12:35:01.544128482Z" + created: "2024-09-02T16:16:57.273925124Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1272,7 +1285,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-09-01T12:35:01.543470522Z" + created: "2024-09-02T16:16:57.273283472Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1288,7 +1301,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-09-01T12:35:01.542792674Z" + created: "2024-09-02T16:16:57.272633694Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1304,7 +1317,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-09-01T12:35:01.542113433Z" + created: "2024-09-02T16:16:57.271985319Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1320,7 +1333,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-09-01T12:35:01.541554958Z" + created: "2024-09-02T16:16:57.271164334Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1336,7 +1349,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-09-01T12:35:01.540988008Z" + created: "2024-09-02T16:16:57.270597911Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1352,7 +1365,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-09-01T12:35:01.540397553Z" + created: "2024-09-02T16:16:57.269998748Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1368,7 +1381,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-09-01T12:35:01.539351267Z" + created: "2024-09-02T16:16:57.269383755Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1384,7 +1397,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-09-01T12:35:01.538501728Z" + created: "2024-09-02T16:16:57.267911017Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1400,7 +1413,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-09-01T12:35:01.537798773Z" + created: "2024-09-02T16:16:57.267262281Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1416,7 +1429,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-09-01T12:35:01.537239617Z" + created: "2024-09-02T16:16:57.266704014Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1432,7 +1445,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-09-01T12:35:01.536676824Z" + created: "2024-09-02T16:16:57.266092688Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1448,7 +1461,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-09-01T12:35:01.536110314Z" + created: "2024-09-02T16:16:57.265516327Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1464,7 +1477,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-09-01T12:35:01.535480316Z" + created: "2024-09-02T16:16:57.264910641Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1478,4 +1491,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-09-01T12:35:01.534690649Z" +generated: "2024-09-02T16:16:57.26415648Z" diff --git a/packages/grid/helm/repo/syft-0.9.1-beta.8.tgz b/packages/grid/helm/repo/syft-0.9.1-beta.8.tgz new file mode 100644 index 0000000000000000000000000000000000000000..735bb06cb8f5629e37a98d520781d3afe85835ed GIT binary patch literal 10452 zcmV;_C@a?=iwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PKBhciXnIXg}*$%wf8B<8JHiN4D0@z1MNn)Tg$kBPr?X*=sKq zA|Z)2Nw5THM@@2n`(1eO{SYO~a@r%b%MnRnFc<)XnZaN{S98|9Tq2J(R>-!#d5Y9( zwOVH7KX0X|+3@R_7aNJqb3c?g~^uwtf3RT zIdHLaV>;Mq4p(D7L~EwbYYwt{=%FFA`xJXRaTs=3en|u0Hw~iGrfDyl%w!gx@@to4IR;RS9xyiM6!@e= zrh`3N1Mu*|q|96Oz##e^_8KOs0p!meQP&T|(=ix$j(m#&#kz+X$Z-gB;(>whu|-^) zPiuHPfZM?Y#{CIQe!LpPWB@;0T#x&3bqmAM;O+a%$<^SNqk$ZQ*ybPD7z>EB2h8)a ztnw%1n0F*Vss?Z;UhuZZP}T0+jZU}G(Q369@Osy&RDA!jBD{lX$~S-7XWpzh>oo3%-8Su=wO(#x_k)p@S$Q8hJ#G84**iUTIe+KR_2^aUY8FOK=-gg zd(gy8Z_0T_n+u&z#rwu@RB&{NVI2}l40n&*o1DP|TjF5u;*X4TSj$=g@qjHo;xCr` zt%riCArVdShKTx=XWJT<)^4=|rWOqD9kDeIPyn)h%7Qgp5MfR%i#(Xl0`BliO}IMH z?kL8%vV1X|p$Xpjl`l6ROV=CG;fgKDS-^qF=Ll>0EGA5%^oGkqlD<#MOkIzBT=m3Y zL^BH;N)~zsF=EIufid>(O&zD;Q2BPxBZAsU;Jlw@EtHNkov~9cavCy`uY5l4U8?Zz z%jL^J3{jH_@|yl%~cN}}`000|7*>Vk$``|7Nhs@yzr5QOT7vkm&c)N*BX$oQ;$hcX zWx7AEk|Ec;!z&3QWah!Izm%?4zIeWoW`gGuSJY@Mmsyl4<1yM?PCZY`vP@^;VJey# z3|CnnN1X_T#zuFFz(OeJ**I6o9~?g~u>(HkC&^YVSDwOf?gJ&{a zXMFgv9yGu0eA}&3mrI$4kzK!+(_Q;ki5ODB2%q@O*En^UMP(=BG>Z+!Kw9jtS4@GW z3oicfJt=wV7NS(bL;PhuW^1l55W^2>wPhVYk25t`v+ZELyX+r_wszejzR_3^vaql) zu}Ac-W8M>3+Sixlq0UItBFL!EkcAu_*Hst2&^?S;_!!~Il|j5Ij;f^Mh_UuQr3lA- z$H7)Yf(wR8YR!%edEIVTDArs^&@B@?e=RHCeud~4(06nT-D5zEVJh1hC>6EXyWBoP zu6rGJlg9gr^C17;)QRV0zC48~%|Q;?8?ll`j2`^!-vwJ%V*iI>+qDqGw0S$2^v4bM zk?nGiVa@(;x7|KX?f-Po&QA~fKl?~xm76S)b0>r}&KyjKGGtj?R`~P@8qzs{Gg$cY zMVe55A-)G5wlJbt*jJH_4Uu^yUna9p2v`_-4%o!QfM#6C;3A!Za125zwd<#o# z*}!MBPxGKc-O0SBF4Fn8G?WEqfGjHyjI6Qxq~f`S`gkP?gpDl#TnalcZ+#$vagwh% zL9d1|TY=|04kta&q7&OjjsY`lk%t(C_yVjmOEfM3Q$`a}aNy`z^zoAOP!9fTcntR@ zH5u^#9f`%vBM+2o%VBL?4(K(T|Lbb_ziUDF16>l|;*z@r^?)s%*ce>2zzvZ>M-DjT zfiI36w$RrE^-KqbE4CyK5D)&-cULT|&9wQ#DR$xkQJ_-4qN|2Q7N+yk_pH}i3>)nj zFxS>!YguhwI}Ww6*BWmaf6|Vk25|g1_M*{2R#3`@w6ojD6?hj=Ug-=;?Sl7%z`ssl zg=`Bx!@|REuA(BKUPOkFPbNWt>C-ZpTvMr@n`|8ikDCxQfKF71R;3 zhiA)G5$QVd4AWUytHxHtoFR_|odf-73ETo!NNfQUg;3)QtJF-AYTjG1ksQo{SRcq8 zWVLhi!Zsm2NK`-Mk*$55P(t7E*eOKiCy)myNLv#19JDW;Um zXJRwmMy6D7#UP9m1nJDY$ExqRL@rooyo-qwFlyE1jsZSJ3(WPkIrexvIEtY^45ypV zfh!@Y7R+rf<8uMNk+gSg(bdnrm>tr_DJB0+rMl5z?hZYH*2sUIR{JC+|DANtx(E4h zAL-Ml=J&NrV%x+4A?X2^9BS`5C*g|Jr%!b-9o_N`44N+THFTbj%X}I%xOnk>^UIeS z&uHMe>E!0Cs~oOpI&2Qw-)Q}BR7(h#>bG(~gwNn%*Wv<<_74rTsRo~cdA_5=Sy%it z?Xf?fn;$`|NAR&@h~Kq%yOave1uZTLh8oxgt%T2DItJ&=CnXJ*6-jK6>!u7-(ukK4 zsdI9M&PY+_fGoNB#X{id3VK@1&E{8j*ZORgED`L`efd)3!gGTzn}P}=*Sb-VB7cTB zas%~xMwe|p+V`~RP| zPY(8^4icqJ8j*V+ED;dkUkBrlLFT{Ju3v)ey5=EKbjy{HJm%A)hnrh z^Jxm_({`sK&Z2Lt;k-#D6>x4hT8UnJSCPnK6mExcaUGlE+o_1Jm=hHTa@88DjN&A$ zlZx1W+yLFmRa8LtBnj(PMRYrx;9I$l3i!TC4c@bg2%l_%aP>;64BxXPv~INltT?ne zhb5vv;Z?+|8**I!tv3Mqp|KzN7e$fB+k;Hm)O~{?^N}eKoU;#Oy7j5932|r(5UyE~?d?`9R0R!vAklVka zWMhr87!0jNY?jJ*8fcW*SZ$%!rBPA4!z=msRz8bU-XEC;-=;OnVCywkrxcbhG8cG$ zC%sF*i#X!L5__@@e4#g~@TkXPJCDBzLDq|bih}WjMZB;=ma}g4D^Y%k#3rxG)hICr z#%LC?t#4&l^g--&}*MU5i${fQz6oJATRoOVWX!}4*e$E)5lZIR_P@!Oa6AzGb zGj7!CTx?352{S39$Kq8u17yJm)3RWO!NYZ*@*yJ1YVypnlB2ZO%=7UvAaxWuRdj(J z>>*ZISa_?+$~EONprt^_s-v0qPJs+6RP7*hu&}dWOE^1OGavZ^TWLZjkrihMf|u8# zE)C93;`0=al=sPd>GRDHGxhM$w`|u-;TfWkH%u!0_KtRia!ADBS6$SaZH%CN4(?wy zlS#Az>hOjTivx*^fP8pY8j^gvu6D@cH; zNuk?mM2ATd6o>C(wGFU>ATqyj`R|g5>_1`F; zb+3hPujnopxX)QUl8aYjnTjyVEKbiY{AJ~T<;)iRTVG;iSeVkfIyJFXjF@%tpH4fe z_%EGyw|j{HvX_*I|J1JCnU2v5o$77O5Elh|;X%ptxTt~J9q)xxFwM`baR2)=I7z#Y zaLZ3^SL?)fiGv*$R#u}f*5wMUjw%$^mECjSwoU-u@+rgK)vyP*#KG!RhiYgY!efBS z$|;Y0ylQ<7lX`=+X8x;{`*D$>*3JL(PI~|MynBAo|M!w&i>44axxkX_d`-HQP+0c;{!teyX2C8#4^9Pr+b5v)7^(`t3o z>wo+F5dU#6scikfkeyy42Be0^UZ#1yUf+-%rLk4#z#)B#Ssw3*Do+o72~8_#ytYRs zSBhtykvr^U=;Y0i-atNE63_fYG`ey3O1hf{6dVz&Oy~>h@p!gjr$lh%cSDp z2gLLx|KYoa%HamD3~XVB1*nR+H7&A8SDjm74$N8jAqDt1GJERwAlIdNRa1r-p8FQ% zTMpberem1SqT=|i&s6rkC@}u)H>@)?lyiA3hgHs7Io2aQ=P+Wkb#Tkan7?$$yVi0N2QW?e_UuTK_veJ=lNtlD4z|?A7ceMV`yR9J6;rP`TO1BaG<8 z>cQl4m^KB;M;X9o7=IbS5%Is)Li+S6tq+EA!=e-K@meYr2t0F=*;2vAg3JNB;&TGE z)3fteIS9mFXPXsRz`qUy`2X;1RfTlvZxF*C{>6t^AN*}m8@Sb6(b^|m%JAlGRuX{6^`o|(wna!XA#@2 zxP~Y>>b7VT#U;j#IH71`hYg8k>Z=|MJOg{!7-3|r_&TU&scJ^7OL(#pEvnuXui`;d zDz+w8LuLsH15?G}@~p~>vM_rAO0m^qO|`M;SF!$yhr}vHBVIfgmlP(!s4T`Q9$qT~ zlMK2gJYh?Uh2E!CP=eT{4ADrar&Po+PnZM1w9x`51c$&-twM&vmoLp!R1F2Mmo0Q3 z9ac_NG4w4f7~U7w16t8ojz_wIr^pz&7l&TkMLCAbNjRc$;3FO+?A-Taj{q-XHaPHn zhnY4`WC*DK)2s1xICwj~y}0QoGJteF_`xG~FZBUnZeq(&+J5f0A!5s3$OIZ;Jkz}D zdDY|L#pM&%FMJ4dk+>Y(PA;x)`=jZPgYhH}iymlquc($HvAB{a58${z`p_RuC%+7H z+mt5(s{$qb1WrCK7>9bWYcg41RjeLZ+Lz3zYLS0;QAaB>|r|LcAK zeSbO}j0)ylnVz@?^yR33G3ig`?K}NZ!JH0ay=Kc_-QM(X2BTl5KMzJf^+)5X2&O`5 zR~=8Xmg#UXxSn3!T)ZooLE6q2!2H(dp>;WUe_OeqGEul2!brRgIr!pqeetG$UAYgk zmB*zxM*fqE_$^yua&db*m|RS*2DgtxCn?QU!|DCFKfSu0^hdWB*V9q|-PL$9`lTus zIbqh<)%VlI?bT0%sR~NAwjwpQWqfi}tTrE+BO@44cHrUEu+7J&eri}84Hc_oJP zbkzT^_xrih!sDm6p`EfH}AsQeW{mVH0w%%{R! zlSl>E5mZBL58{+Wt5}-K`(9C)@`|J=d4WW}*nBe?mh?xC9B4DQAtFew6UaH_O#~|e zxym3CkSmXaIP|KcLE@f281k8~0G~Pzkm;$X5X-C73&9A9Uj&kAmOLtuk92W=1z}+e zC@KMGc~3xTp-oN4{oA)i5^_Ps;tZy$G{e`_4Pifw^O(AW%U7zRG0~MO7aw>PFjaSe zHE6dH+&Q;))TtVVKktwMatrSGUvg)qnS=Z>|S# zrsK(Abn&h~y?L_%(etkKE291jn8+)Ba*>&gE-tTb-&HoDNKkPr$`8n*#U_;8ko{$% zNG#hraio8Hd42JrKfRbt`eUgvl#33r_T*Kw3n3Ld_jwufP0l=XvB9yR=Gb0znjpwE z>-=-7^`W(QnB0{HNZo0d0xB-TlUBYapW{sfG1L^dEjCkC$olyw>@X8qw9fOUGIo~M@ zIYFshdd`t)`5vAuJxrIxGJ4Q$B{I0!Gl?t$;c&|eVpq4UVZ+J6RfQL)OKi-L%UGyv_#O5f zY}F~&ea~d8RMX>Zmab@1=5FTZ4j?kut{Yf*XcYr9@O_iHwr9sVPykk)dQKwjdgi@p z;YIAG(C|~qvJ+9T9=SyYDYg2XoSdHSfTLGt;U%~|4=1nA!b?zpHcno;5nsyLgWGg7 zjJ|kwa5Y?vFWj4hhao_V9z!kL6&zO((y3ad8gsk z!j~2@?MdK?C0)hSP9CL_RuvDBWDxGh`+>xrAds$ON^gh}^>ZCY7#Tl%Cc^_K?1yNz ztVIJ)2H@%?xPwq`_|&^G53YhwlZyQReA6=`8@T;?f zg`_Mof@Slpw}2IOb;edCZutH5sc^`Zn~FBI-1{3F?zc6ZEHSd!va0zc53iK%q0`!# z?O|)1%FY|LBZJCX&HuUA{%@+te>*qP%p}wLUL>$dAW0lny51N5Xe{#r>cReMVknhtZhoxgQFT-7G6u-qBDf7o z=`aSHWAyUqC34N9myPd_zDnc_qSMbFHi*7+{6a6}>KJyr-3*R1i_Dru1iZ3cOT@tt zjBV^<-JF|xnwb>vE@si>oMKjN5{Eim`CN`X#WNpcF5>X8#12fZ$NZ(LGvf+ff@V5G za^UV>$Yb*hDXOH|ZN>FJCI5*7I`Z#sV*h_rDgJ*Z{?A#vb&&t|kv6pdBuB=R3Voug zP*kZDjb9dt@g(S+x7z1>vKPtPpSwdhOeOPwC$ay(sWAV~GWY+Tot+)*zk5k}@&Du9 z|J6tTm$NQ6_P_T;LI1s{iuNBr9^1$qczr~a|Ke0S|924k^_y+>f7<7*!~V~H(l+b= z9+&;++4%`txsiwg1$#m9%^#Wfx%)hx%I5zrVt;;9;r`!~PI~=6Kkpv)fA*5LjsLlC zM)0SL{TUD_5&1KtKK&Wue#Ti|#-I^IN2%sMn9jYW?_daKE62gy(vS2SlrT4S8O$lQ zLw`Xz*h-uQMTXva`olgv{A9~Z-1|})9&x%=2EVzwy_&pz69u{0F3L_il1=z&lNg6t zg#MzDd2;7SwR5>q-kqMvO&pL#3^k2e+H4)0I@?Dv`0l&dmlDrUSo%ox?E{x$WZWP(M=^+xohQ`~Y+&*#cPwy8H(w$+2rnN9fM^0$aBgohtu zjKU(wc!MnD=-5j#Y#FnMrkC0}_@31RO;ptkou;D@e>uCWkF7+3jOqyYWOrFh1oTz6 z+l?yo5L*%p@3N`@WG7aUM|_qjnNzRlTet@rUvcH0_RC+JzaGKg{|-N5WMGfJ{Pn%N z@Q{IjJ%(Q&@Qf1u4l|kMKN7`MxXNFTw1h;-yw#AHIdX*dfM;6HjyY%wPpx#E6=VKX zXjK}S3ayt39FS}NdIX<8L*lJLbei*8I34_Y^rTJ6jQ^QMX7yl=UqfY{_h~5|PMr8> z+cXRdKOhfhA;r zze+x}6S9|Qx~?E|D&&TE$Ms0jDT!}$!i72u(PbEkhIt6j&AT{%mzygARDFr{yIcGa zu2yjYonpthaEPjza-9(Z=QUaM;L{iR$|8%bNkQ+YgcZ^rv)ZpL|{VS2)fuu3jdr`ydJfCJq(F&fKb2wQQ~Lpns&-^Q%Dubu z`m!4~JhC4vZaRxfrC)WnFl)!;zFgC``X(f}%Es+Pyn$p?z(*Y1=Y*&n5HX)>&k_o; zf(0J3F%Roy9pHN8H>@+9^utdZw>c0!kj*m=_j%^Gt;&UB{o$17Up|${|NHX)Yqz=? z|G#$UaQ<^IX)F7Gv47ycb5Ot-lLyV+(O|?9?L7G6bV(Pr5`X z1VWo}_7cCKnKt1S26yrb*uh;lB-&{Bmwfkc@h~$TV~PyJgV&(78sa~-bs5jxU>n>rfwyG3N^H(k&A^OtnisB-1Kb@P80XMoo2|DB(=((>Qw`62%IUeadsf0GkJ&%p@( zbf<(OA|=iV#aYYOe?Tb7>k99euDYd|&A;WQVhqq$+lmF{AGxs@K;3j}F}K{KHy88Q ztL@^62R@pW!ilfjklfLBWI(l&4arQFbz72!M~$lAZx_^Q1sB_`Ju~!n{c)v)_P6fR z^uL}&L5(`Requ8bucoyl#^D+31{)@38EyT=K>~ z{yBNIYVn3_x{+%@BiF4Ae!Jbx3MgUQWvjQQC!8!RQmA0pvY@6-&pn0Cg!@1F6z8}5 zyF@LkQKaR6#ofan`-uO3cG5}9|J_6Uul=Mn+g9EF7z2O;@w9?}P|5S|!o}Jeu0!=K z9rE_!61#A5bHHh>&bRrHpdx;-$ zkf+y4f7^!(aY%wPr9G495g|fG{dZU6$>^8qcrqAWyz5Wj_OJVs{nwB|hW*CJ`DKt`XHMYIX z?7ma^BKTqoJkK@o5$fw5pNnxf9g~^JdW$WzD)4cBrkl;SP{nfw$KG`D_^91?Ad+y_ z7sMiYVoe|=|A!}hqhF8r0bA$)b#m5C%m1zRA^zuHl3KEy=>Dg`BWdMg&=!c{2ei_* z@dHbbpeNM>qKgSqUSCnORECC)D8pVuA|-*VS8?a47x129PoB8ujmCz`1u-LKY#tZE z&uPdD=sQ_xxxQUl=yTIbAur#o!Chopi+TchA?7Jq6%iNm!i8(fAAw`G&xm*yyrl&H zFk`$85>lQ=cX72jikNIwP7U#vefsp5bV{qh{~sL(ejWdBwKDgAb`SQS{iKTgKafPy zq2kk{(Ox^h819laO~BHa=o@YN{sdlBF`izt*}e{|X>hyB03q_WMu3)$hNqJ3&`?1^_jH`V?+srvkXGXLLp=QMNw z*JedkajEK_bMO1S84pDLsu1uC#_aH2cmd=5K^BwRpI}S^#Cd7|Lu17^gM0< zJvrR}zn`=n|KG$7>pu^Yy(GBMC|uQHB(KDl9wP}*Te*xB zLU^RlNCbCNr;&n^8+whOAWq{4;(4XM1;`s=h!aje8zvAa#1Zxx~m~%ReeR1{D!Fh2&XlCq6$= z9upBty~SPxyWRg#o5%l9nIUt>QqCn!x!z>a;!@$7rfSnJyR{x~LA2`)rUT#pIJoII zqr%OSMsU6t&l+vmr_c`Xk1EVj=vk59dKElB+4g9!z5-2hoF$%yC@~j|BB~T;**{tG zoEhST$@Nz#p#*)H(`M2eqKFKoxt@;_$@MWq5J?PVS!$)Gzz8%w-)XAnrpQq`WXdx` zV)c?4>-Qv0q4kZg-1a!b+i1Z~6x?X#e(dSYl95q*)-qFWy>7RDWYpL9n0%SUDB55z zl4{r1oHxwZbaSGIU#MGC0zy~QT?0T>G}lcXXmi)YeR33*>X`}W6CQFk7^P_rDa*WywaPUGbUhq;~ z0A;{~$(HaRM-AYsG7v|Urc%=7EGXgY2`kn6zQI^)eZ_o#~_I+c9?rPsRn9Y9^&s@~L z|9`a?@Bw)y@hM!r?bBLA1#Pc2Oar5)%5#tW^#20@0RR8+ KOq&1zIspI`MA(b~ literal 0 HcmV?d00001 diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml index aa49e8cf422..a41e1743fa6 100644 --- a/packages/grid/helm/syft/Chart.yaml +++ b/packages/grid/helm/syft/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: syft description: Perform numpy-like analysis on data that remains in someone elses server type: application -version: "0.9.1-beta.7" -appVersion: "0.9.1-beta.7" +version: "0.9.1-beta.8" +appVersion: "0.9.1-beta.8" home: https://github.com/OpenMined/PySyft/ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index 6fb3bb8fe87..c11da5138b5 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -1,7 +1,7 @@ global: # Affects only backend, frontend, and seaweedfs containers registry: docker.io - version: 0.9.1-beta.7 + version: 0.9.1-beta.8 # Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION randomizedSecrets: true diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 6de365022e5..399a06e69f5 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.9.1-beta.7" +version = attr: "0.9.1-beta.8" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index ca7bda00dd4..378dddaafdf 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.1-beta.7" +__version__ = "0.9.1-beta.8" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index c9ab84d5897..94a28156ce3 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.9.1-beta.7" +__version__ = "0.9.1-beta.8" # stdlib from collections.abc import Callable diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml index 6d08749fbfb..e2bb7153376 100644 --- a/packages/syftcli/manifest.yml +++ b/packages/syftcli/manifest.yml @@ -1,11 +1,11 @@ manifestVersion: 1.0 -syftVersion: 0.9.1-beta.7 -dockerTag: 0.9.1-beta.7 +syftVersion: 0.9.1-beta.8 +dockerTag: 0.9.1-beta.8 images: - - docker.io/openmined/syft-frontend:0.9.1-beta.7 - - docker.io/openmined/syft-backend:0.9.1-beta.7 + - docker.io/openmined/syft-frontend:0.9.1-beta.8 + - docker.io/openmined/syft-backend:0.9.1-beta.8 - docker.io/library/mongo:7.0.4 - docker.io/traefik:v2.11.0 From aa27aca88fa9f2a7f2279bca238613399df93219 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Sep 2024 08:24:29 +0530 Subject: [PATCH 13/17] revert changes in worker pool and worker service --- .../service/worker/worker_pool_service.py | 49 ++++++++++++------- .../src/syft/service/worker/worker_service.py | 17 ++----- .../src/syft/service/worker/worker_stash.py | 15 ------ 3 files changed, 34 insertions(+), 47 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index 14e5950fa6a..22d560fe661 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -417,14 +417,17 @@ def scale( number: int, pool_id: UID | None = None, pool_name: str | None = None, - force: bool = False, ) -> SyftSuccess: """ Scale the worker pool to the given number of workers in Kubernetes. Allows both scaling up and down the worker pool. """ - if number < 0: + if not IN_KUBERNETES: + raise SyftException( + public_message="Scaling is only supported in Kubernetes mode" + ) + elif number < 0: # zero is a valid scale down raise SyftException(public_message=f"Invalid number of workers: {number}") @@ -445,32 +448,42 @@ def scale( registry_password=None, ) else: + # scale down at kubernetes control plane + runner = KubernetesRunner() + scale_kubernetes_pool( + runner, + pool_name=worker_pool.name, + replicas=number, + ).unwrap() + # scale down removes the last "n" workers # workers to delete = len(workers) - number workers_to_delete = worker_pool.worker_list[ -(current_worker_count - number) : ] - # scale down at kubernetes control plane - if IN_KUBERNETES: - runner = KubernetesRunner() - scale_kubernetes_pool( - runner, - pool_name=worker_pool.name, - replicas=number, + worker_stash = context.server.get_service("WorkerService").stash + # delete linkedobj workers + for worker in workers_to_delete: + worker_stash.delete_by_uid( + credentials=context.credentials, + uid=worker.object_uid, ).unwrap() - worker_service = context.server.get_service("WorkerService") - for worker in workers_to_delete: - syft_worker = worker.resolve_with_context(context=context).unwrap() - syft_worker.to_be_deleted = True - worker_service._delete( - context=context, worker=syft_worker, force=force, via_scale=True + # update worker_pool + worker_pool.max_count = number + worker_pool.worker_list = worker_pool.worker_list[:number] + self.stash.update( + credentials=context.credentials, + obj=worker_pool, + ).unwrap( + public_message=( + f"Pool {worker_pool.name} was scaled down, " + f"but failed to update the stash" ) + ) - return SyftSuccess( - message=f"Worker pool '{worker_pool.name}' scaled to {number} workers" - ) + return SyftSuccess(message=f"Worker pool scaled to {number} workers") @service_method( path="worker_pool.filter_by_image_id", diff --git a/packages/syft/src/syft/service/worker/worker_service.py b/packages/syft/src/syft/service/worker/worker_service.py index a8bc3d4db35..a324035b2d2 100644 --- a/packages/syft/src/syft/service/worker/worker_service.py +++ b/packages/syft/src/syft/service/worker/worker_service.py @@ -19,7 +19,6 @@ from ...types.errors import SyftException from ...types.result import as_result from ...types.uid import UID -from ...util.telemetry import instrument from ..service import AbstractService from ..service import AuthedServiceContext from ..service import service_method @@ -38,7 +37,6 @@ from .worker_stash import WorkerStash -@instrument @serializable(canonical_name="WorkerService", version=1) class WorkerService(AbstractService): store: DocumentStore @@ -137,11 +135,7 @@ def logs( return logs if raw else logs.decode(errors="ignore") def _delete( - self, - context: AuthedServiceContext, - worker: SyftWorker, - force: bool = False, - via_scale: bool = False, + self, context: AuthedServiceContext, worker: SyftWorker, force: bool = False ) -> SyftSuccess: uid = worker.id @@ -161,9 +155,7 @@ def _delete( credentials=context.credentials, pool_name=worker.worker_pool_name ).unwrap() - if IN_KUBERNETES and via_scale: - pass - elif IN_KUBERNETES: + if IN_KUBERNETES: # Kubernetes will only restart the worker NOT REMOVE IT runner = KubernetesRunner() runner.delete_pod(pod_name=worker.name) @@ -189,14 +181,11 @@ def _delete( obj for obj in worker_pool.worker_list if obj.object_uid == uid ) worker_pool.worker_list.remove(worker_linked_object) - worker_pool.max_count -= 1 except StopIteration: pass # Delete worker from worker stash - self.stash.find_and_delete_by_uid( - credentials=context.credentials, uid=uid - ).unwrap() + self.stash.delete_by_uid(credentials=context.credentials, uid=uid).unwrap() # Update worker pool worker_pool_stash.update(context.credentials, obj=worker_pool).unwrap() diff --git a/packages/syft/src/syft/service/worker/worker_stash.py b/packages/syft/src/syft/service/worker/worker_stash.py index 20c85c99c95..b2b059ffec5 100644 --- a/packages/syft/src/syft/service/worker/worker_stash.py +++ b/packages/syft/src/syft/service/worker/worker_stash.py @@ -10,13 +10,10 @@ from ...store.document_store import PartitionKey from ...store.document_store import PartitionSettings from ...store.document_store import QueryKeys -from ...store.document_store import UIDPartitionKey from ...store.document_store_errors import NotFoundException from ...store.document_store_errors import StashException -from ...types.errors import SyftException from ...types.result import as_result from ...types.uid import UID -from ...util.telemetry import instrument from ..action.action_permissions import ActionObjectPermission from ..action.action_permissions import ActionPermission from .worker_pool import ConsumerState @@ -25,7 +22,6 @@ WorkerContainerNamePartitionKey = PartitionKey(key="container_name", type_=str) -@instrument @serializable(canonical_name="WorkerStash", version=1) class WorkerStash(NewBaseUIDStoreStash): object_type = SyftWorker @@ -76,14 +72,3 @@ def update_consumer_state( worker = self.get_by_uid(credentials=credentials, uid=worker_uid).unwrap() worker.consumer_state = consumer_state return self.update(credentials=credentials, obj=worker).unwrap() - - @as_result(StashException, SyftException) - def find_and_delete_by_uid( - self, credentials: SyftVerifyKey, uid: UID, has_permission: bool = False - ) -> bool: - qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) - try: - worker = self.query_one(credentials=credentials, qks=qks).unwrap() - except NotFoundException: - return True - return self.delete_by_uid(credentials=credentials, uid=worker.id).unwrap() From c199d0587bee430cb45744777724e0af1400dad8 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Sep 2024 08:31:31 +0530 Subject: [PATCH 14/17] update scale notebook test --- ...nb => 013-scale-delete-worker-pools.ipynb} | 43 +++++++++++-------- 1 file changed, 25 insertions(+), 18 deletions(-) rename notebooks/scenarios/bigquery/{014-scale-delete-worker-pools.ipynb => 013-scale-delete-worker-pools.ipynb} (88%) diff --git a/notebooks/scenarios/bigquery/014-scale-delete-worker-pools.ipynb b/notebooks/scenarios/bigquery/013-scale-delete-worker-pools.ipynb similarity index 88% rename from notebooks/scenarios/bigquery/014-scale-delete-worker-pools.ipynb rename to notebooks/scenarios/bigquery/013-scale-delete-worker-pools.ipynb index 65cf32950b2..ee946b52843 100644 --- a/notebooks/scenarios/bigquery/014-scale-delete-worker-pools.ipynb +++ b/notebooks/scenarios/bigquery/013-scale-delete-worker-pools.ipynb @@ -190,7 +190,7 @@ "source": [ "# Scale up workers\n", "scale_up_result = high_client.api.worker_pool.scale(\n", - " number=5, pool_name=default_worker_pool.name\n", + " number=2, pool_name=default_worker_pool.name\n", ")\n", "scale_up_result" ] @@ -212,7 +212,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 5" + "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 2" ] }, { @@ -220,7 +220,7 @@ "id": "16", "metadata": {}, "source": [ - "##### Scale down gracefully" + "##### Scale down" ] }, { @@ -269,7 +269,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert high_client.api.worker_pool[default_worker_pool.name].max_count == 1" + "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" ] }, { @@ -277,7 +277,7 @@ "id": "21", "metadata": {}, "source": [ - "##### Scale down forcefully" + "#### Delete Worker Pool" ] }, { @@ -287,8 +287,10 @@ "metadata": {}, "outputs": [], "source": [ - "# First scale up\n", - "high_client.api.services.worker_pool.scale(number=5, pool_name=default_worker_pool.name)" + "pool_delete_result = high_client.api.services.worker_pool.delete(\n", + " pool_name=default_worker_pool.name\n", + ")\n", + "pool_delete_result" ] }, { @@ -298,7 +300,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert high_client.api.services.worker_pool[\"default-pool\"].max_count == 5" + "pool_delete_result" ] }, { @@ -308,19 +310,16 @@ "metadata": {}, "outputs": [], "source": [ - "# Forcefully scale down workers, in this case the workers are terminated immediatedly." + "with sy.raises(KeyError):\n", + " high_client.api.services.worker_pool[default_worker_pool.name]" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "id": "25", "metadata": {}, - "outputs": [], "source": [ - "high_client.api.services.worker_pool.scale(\n", - " 1, pool_name=default_worker_pool.name, force=True\n", - ")" + "#### Re-launch the default worker pool" ] }, { @@ -330,7 +329,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" + "default_worker_image = high_client.api.services.worker_image.get_all()[0]" ] }, { @@ -339,7 +338,12 @@ "id": "27", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "launch_result = high_client.api.services.worker_pool.launch(\n", + " pool_name=default_worker_pool.name, image_uid=default_worker_image.id, num_workers=1\n", + ")\n", + "launch_result" + ] }, { "cell_type": "code", @@ -347,7 +351,10 @@ "id": "28", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "assert high_client.api.services.worker_pool[default_worker_pool.name]\n", + "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" + ] } ], "metadata": { From 938e4585167cea405c176a48610e92385fddb7e7 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Tue, 3 Sep 2024 14:36:51 +1000 Subject: [PATCH 15/17] Fixed small issue with load_jobs - added reset_k8s.sh helper --- notebooks/scenarios/bigquery/job_helpers.py | 3 ++ scripts/reset_k8s.sh | 42 +++++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100755 scripts/reset_k8s.sh diff --git a/notebooks/scenarios/bigquery/job_helpers.py b/notebooks/scenarios/bigquery/job_helpers.py index 3d83a4f05a6..804a218f962 100644 --- a/notebooks/scenarios/bigquery/job_helpers.py +++ b/notebooks/scenarios/bigquery/job_helpers.py @@ -321,6 +321,9 @@ def load_jobs(users, high_client, filepath="./jobs.json"): data = {} jobs_list = [] for user in users: + if user.email not in data: + print(f"{user.email} missing from jobs") + continue user_jobs = data[user.email] for user_job in user_jobs: test_job = TestJob(**user_job) diff --git a/scripts/reset_k8s.sh b/scripts/reset_k8s.sh new file mode 100755 index 00000000000..d0d245be6f2 --- /dev/null +++ b/scripts/reset_k8s.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# WARNING: this will drop the 'app' database in your mongo-0 instance in the syft namespace +echo $1 + +# Dropping the database on mongo-0 +if [ -z $1 ]; then + MONGO_POD_NAME="mongo-0" +else + MONGO_POD_NAME=$1 +fi + +DROPCMD="<&1 + +# Resetting the backend pod +BACKEND_POD=$(kubectl get pods -n syft -o jsonpath="{.items[*].metadata.name}" | tr ' ' '\n' | grep -E ".*backend.*") +if [ -n "$BACKEND_POD" ]; then + kubectl delete pod -n syft $BACKEND_POD + echo "Backend pod $BACKEND_POD has been deleted and will be restarted." +else + echo "No backend pod found." +fi + +# Deleting StatefulSets that end with -pool +POOL_STATEFULSETS=$(kubectl get statefulsets -n syft -o jsonpath="{.items[*].metadata.name}" | tr ' ' '\n' | grep -E ".*-pool$") +if [ -n "$POOL_STATEFULSETS" ]; then + for STATEFULSET in $POOL_STATEFULSETS; do + kubectl delete statefulset -n syft $STATEFULSET + echo "StatefulSet $STATEFULSET has been deleted." + done +else + echo "No StatefulSets ending with '-pool' found." +fi + +# wait for backend to come back up +bash packages/grid/scripts/wait_for.sh service backend --namespace syft From 7e7e8cbdc52fa91aeeb7f606dafeb8ec1fab7653 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Sep 2024 09:23:19 +0530 Subject: [PATCH 16/17] fix timeout issue on worker pool scale Added admin credentials to new notebook move scale delete worker up in the flow revert changes in inmemory worker start --- ...nb => 001-scale-delete-worker-pools.ipynb} | 197 +++++++++++------- .../scenarios/bigquery/02-configure-api.ipynb | 11 +- .../syft/src/syft/custom_worker/runner_k8s.py | 3 +- packages/syft/src/syft/server/server.py | 17 +- .../src/syft/service/queue/zmq_consumer.py | 2 +- 5 files changed, 144 insertions(+), 86 deletions(-) rename notebooks/scenarios/bigquery/{013-scale-delete-worker-pools.ipynb => 001-scale-delete-worker-pools.ipynb} (71%) diff --git a/notebooks/scenarios/bigquery/013-scale-delete-worker-pools.ipynb b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb similarity index 71% rename from notebooks/scenarios/bigquery/013-scale-delete-worker-pools.ipynb rename to notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb index ee946b52843..0549593495b 100644 --- a/notebooks/scenarios/bigquery/013-scale-delete-worker-pools.ipynb +++ b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb @@ -95,10 +95,7 @@ "metadata": {}, "outputs": [], "source": [ - "# stdlib\n", - "\n", - "# syft absolute\n", - "import syft as sy" + "num_workers = int(os.environ.get(\"NUM_TEST_WORKERS\", 1))" ] }, { @@ -108,15 +105,10 @@ "metadata": {}, "outputs": [], "source": [ - "server = sy.orchestra.launch(\n", - " name=\"bigquery-high\",\n", - " dev_mode=True,\n", - " server_side_type=\"high\",\n", - " port=\"8080\",\n", - " n_consumers=1, # How many workers to be spawned\n", - " create_producer=True, # Can produce more workers\n", - " reset=True,\n", - ")" + "# stdlib\n", + "\n", + "# syft absolute\n", + "import syft as sy" ] }, { @@ -126,9 +118,15 @@ "metadata": {}, "outputs": [], "source": [ - "high_client = sy.login(\n", - " url=\"http://localhost:8080\", email=\"info@openmined.org\", password=\"changethis\"\n", - ")" + "# third party\n", + "# run email server\n", + "from helpers import EmailServer\n", + "from helpers import SMTPTestServer\n", + "\n", + "email_server = EmailServer()\n", + "email_server.reset_emails()\n", + "smtp_server = SMTPTestServer(email_server)\n", + "smtp_server.start()" ] }, { @@ -138,7 +136,14 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.worker_pools" + "server = sy.orchestra.launch(\n", + " name=\"bigquery-high\",\n", + " dev_mode=True,\n", + " server_side_type=\"high\",\n", + " port=\"8080\",\n", + " n_consumers=num_workers, # How many workers to be spawned\n", + " create_producer=True, # Can produce more workers\n", + ")" ] }, { @@ -148,16 +153,20 @@ "metadata": {}, "outputs": [], "source": [ - "default_worker_pool = high_client.worker_pools.get_by_name(\"default-pool\")\n", - "default_worker_pool" + "ROOT_EMAIL = \"admin@bigquery.org\"\n", + "ROOT_PASSWORD = \"bqpw\"" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "id": "10", "metadata": {}, + "outputs": [], "source": [ - "### Scale Worker pool" + "high_client = sy.login(\n", + " url=\"http://localhost:8080\", email=ROOT_EMAIL, password=ROOT_PASSWORD\n", + ")" ] }, { @@ -167,8 +176,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Scale to 1\n", - "high_client.api.worker_pool.scale(number=1, pool_name=default_worker_pool.name)" + "high_client.worker_pools" ] }, { @@ -178,31 +186,24 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.api.services.worker_pool[0]" + "default_worker_pool = high_client.worker_pools.get_by_name(\"default-pool\")\n", + "default_worker_pool" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "id": "13", "metadata": {}, - "outputs": [], "source": [ - "# Scale up workers\n", - "scale_up_result = high_client.api.worker_pool.scale(\n", - " number=2, pool_name=default_worker_pool.name\n", - ")\n", - "scale_up_result" + "### Scale Worker pool" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "id": "14", "metadata": {}, - "outputs": [], "source": [ - "assert scale_up_result" + "##### Scale up" ] }, { @@ -212,15 +213,21 @@ "metadata": {}, "outputs": [], "source": [ - "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 2" + "# Scale to 1\n", + "if environment == \"remote\":\n", + " high_client.api.worker_pool.scale(\n", + " number=num_workers, pool_name=default_worker_pool.name\n", + " )" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "id": "16", "metadata": {}, + "outputs": [], "source": [ - "##### Scale down" + "high_client.api.services.worker_pool[0]" ] }, { @@ -230,21 +237,26 @@ "metadata": {}, "outputs": [], "source": [ - "# Scale down workers, this gracefully shutdowns the consumers\n", - "scale_down_result = high_client.api.worker_pool.scale(\n", - " number=1, pool_name=default_worker_pool.name\n", - ")\n", - "scale_down_result" + "# Scale up workers\n", + "if environment == \"remote\":\n", + " scale_up_result = high_client.api.worker_pool.scale(\n", + " number=5, pool_name=default_worker_pool.name\n", + " )\n", + " if environment == \"remote\":\n", + " assert scale_up_result, scale_up_result\n", + "\n", + " assert (\n", + " high_client.api.services.worker_pool[default_worker_pool.name].max_count\n", + " == 5\n", + " )" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "id": "18", "metadata": {}, - "outputs": [], "source": [ - "assert scale_down_result, scale_down_result" + "##### Scale down" ] }, { @@ -254,12 +266,12 @@ "metadata": {}, "outputs": [], "source": [ - "def has_worker_scaled_down_to_one():\n", - " return high_client.api.worker_pool[default_worker_pool.name].max_count == 1\n", - "\n", - "\n", - "worker_scale_timeout = Timeout(timeout_duration=20)\n", - "worker_scale_timeout.run_with_timeout(has_worker_scaled_down_to_one)" + "# Scale down workers, this gracefully shutdowns the consumers\n", + "if environment == \"remote\":\n", + " scale_down_result = high_client.api.worker_pool.scale(\n", + " number=num_workers, pool_name=default_worker_pool.name\n", + " )\n", + " assert scale_down_result, scale_down_result" ] }, { @@ -269,28 +281,38 @@ "metadata": {}, "outputs": [], "source": [ - "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" + "if environment == \"remote\":\n", + "\n", + " def has_worker_scaled_down():\n", + " return (\n", + " high_client.api.worker_pool[default_worker_pool.name].max_count\n", + " == num_workers\n", + " )\n", + "\n", + " worker_scale_timeout = Timeout(timeout_duration=20)\n", + " worker_scale_timeout.run_with_timeout(has_worker_scaled_down)" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "id": "21", "metadata": {}, + "outputs": [], "source": [ - "#### Delete Worker Pool" + "if environment == \"remote\":\n", + " assert (\n", + " high_client.api.services.worker_pool[default_worker_pool.name].max_count\n", + " == num_workers\n", + " )" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "id": "22", "metadata": {}, - "outputs": [], "source": [ - "pool_delete_result = high_client.api.services.worker_pool.delete(\n", - " pool_name=default_worker_pool.name\n", - ")\n", - "pool_delete_result" + "#### Delete Worker Pool" ] }, { @@ -300,6 +322,9 @@ "metadata": {}, "outputs": [], "source": [ + "pool_delete_result = high_client.api.services.worker_pool.delete(\n", + " pool_name=default_worker_pool.name\n", + ")\n", "pool_delete_result" ] }, @@ -311,7 +336,7 @@ "outputs": [], "source": [ "with sy.raises(KeyError):\n", - " high_client.api.services.worker_pool[default_worker_pool.name]" + " _ = high_client.api.services.worker_pool[default_worker_pool.name]" ] }, { @@ -329,7 +354,7 @@ "metadata": {}, "outputs": [], "source": [ - "default_worker_image = high_client.api.services.worker_image.get_all()[0]" + "default_worker_image = default_worker_pool.image" ] }, { @@ -340,9 +365,10 @@ "outputs": [], "source": [ "launch_result = high_client.api.services.worker_pool.launch(\n", - " pool_name=default_worker_pool.name, image_uid=default_worker_image.id, num_workers=1\n", - ")\n", - "launch_result" + " pool_name=default_worker_pool.name,\n", + " image_uid=default_worker_image.id,\n", + " num_workers=num_workers,\n", + ")" ] }, { @@ -353,8 +379,39 @@ "outputs": [], "source": [ "assert high_client.api.services.worker_pool[default_worker_pool.name]\n", - "assert high_client.api.services.worker_pool[default_worker_pool.name].max_count == 1" + "assert (\n", + " high_client.api.services.worker_pool[default_worker_pool.name].max_count\n", + " == num_workers\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29", + "metadata": {}, + "outputs": [], + "source": [ + "smtp_server.stop()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "30", + "metadata": {}, + "outputs": [], + "source": [ + "server.land()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/notebooks/scenarios/bigquery/02-configure-api.ipynb b/notebooks/scenarios/bigquery/02-configure-api.ipynb index bcec4a3905e..854190b1861 100644 --- a/notebooks/scenarios/bigquery/02-configure-api.ipynb +++ b/notebooks/scenarios/bigquery/02-configure-api.ipynb @@ -457,6 +457,15 @@ "email_server.get_emails_for_user(user_email=ADMIN_EMAIL)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "email_server.get_emails_for_user(user_email=\"admin@bigquery.org\")" + ] + }, { "cell_type": "code", "execution_count": null, @@ -535,7 +544,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.11.5" } }, "nbformat": 4, diff --git a/packages/syft/src/syft/custom_worker/runner_k8s.py b/packages/syft/src/syft/custom_worker/runner_k8s.py index ddb9765042c..6aff4bbff64 100644 --- a/packages/syft/src/syft/custom_worker/runner_k8s.py +++ b/packages/syft/src/syft/custom_worker/runner_k8s.py @@ -73,12 +73,13 @@ def create_pool( def scale_pool(self, pool_name: str, replicas: int) -> StatefulSet | None: deployment = self.get_pool(pool_name) + timeout = max(SCALE_POOL_TIMEOUT_SEC * replicas, SCALE_POOL_TIMEOUT_SEC) if not deployment: return None deployment.scale(replicas) deployment.wait( f"jsonpath='{JSONPATH_AVAILABLE_REPLICAS}'={replicas}", - timeout=SCALE_POOL_TIMEOUT_SEC, + timeout=timeout, ) return deployment diff --git a/packages/syft/src/syft/server/server.py b/packages/syft/src/syft/server/server.py index ff3826dac6c..89ca780477f 100644 --- a/packages/syft/src/syft/server/server.py +++ b/packages/syft/src/syft/server/server.py @@ -623,6 +623,10 @@ def start_in_memory_workers( worker_pools = self.pool_stash.get_all(credentials=self.verify_key).unwrap() for worker_pool in worker_pools: # type: ignore + # Skip the default worker pool + if worker_pool.name == DEFAULT_WORKER_POOL_NAME: + continue + # Create consumers for each worker pool for linked_worker in worker_pool.worker_list: self.add_consumer_for_service( @@ -639,19 +643,6 @@ def add_consumer_for_service( address: str, message_handler: type[AbstractMessageHandler] = APICallMessageHandler, ) -> None: - def is_syft_worker_consumer_running( - queue_name: str, syft_worker_id: UID - ) -> bool: - consumers = self.queue_manager.consumers.get(queue_name, []) - for consumer in consumers: - if consumer.syft_worker_id == syft_worker_id: - return True - return False - - # Check if the consumer is already running - if is_syft_worker_consumer_running(syft_worker_id, message_handler.queue_name): - return - consumer: QueueConsumer = self.queue_manager.create_consumer( message_handler, address=address, diff --git a/packages/syft/src/syft/service/queue/zmq_consumer.py b/packages/syft/src/syft/service/queue/zmq_consumer.py index 1d327b814de..4de8da60494 100644 --- a/packages/syft/src/syft/service/queue/zmq_consumer.py +++ b/packages/syft/src/syft/service/queue/zmq_consumer.py @@ -88,7 +88,7 @@ def close(self) -> None: try: if self.thread is not None: self.thread.join(timeout=THREAD_TIMEOUT_SEC) - if self.thread.is_alive(): + if self.thread is not None and self.thread.is_alive(): logger.error( f"ZMQConsumer thread join timed out during closing. " f"SyftWorker id {self.syft_worker_id}, " From ed8151b78747d71acde6b9831bb626af7074cab7 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Tue, 3 Sep 2024 11:50:44 +0200 Subject: [PATCH 17/17] fix --- packages/syft/src/syft/service/action/action_object.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 984dd162491..43054d01974 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1257,7 +1257,7 @@ def refresh_object(self, resolve_nested: bool = True) -> ActionObject: def has_storage_permission(self) -> bool: try: api = self.get_api() - return api.services.action.has_storage_permission(self.id) + return api.services.action.has_storage_permission(self.id.id) except Exception: return False