diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 30453568dfa..43510615ea1 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.9.1-beta.11 +current_version = 0.9.2-beta.2 tag = False tag_name = {new_version} commit = True diff --git a/.bumpversion_stable.cfg b/.bumpversion_stable.cfg index d529a21d6f2..960888e7f50 100644 --- a/.bumpversion_stable.cfg +++ b/.bumpversion_stable.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.9.0 +current_version = 0.9.1 tag = False tag_name = {new_version} commit = True diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index 2ad68b0730c..02eff919dea 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -60,9 +60,9 @@ jobs: if: steps.changes.outputs.stack == 'true' timeout-minutes: 60 run: | - echo "Skipping pr image test" + tox -e backend.test.basecpu # run: | - # tox -e backend.test.basecpu + # echo "Skipping pr image test" pr-tests-syft-integration: strategy: @@ -508,7 +508,7 @@ jobs: chmod 700 get_helm.sh ./get_helm.sh - - name: Run Notebooks Tests + - name: Run Notebooks Scenario Tests if: steps.changes.outputs.stack == 'true' timeout-minutes: 60 env: @@ -541,8 +541,8 @@ jobs: shell: bash run: | mkdir -p ./k8s-logs - kubectl describe all -A --context k3d-test-datasite-1 --namespace syft > ./k8s-logs/test-datasite-1-desc-${{ steps.date.outputs.date }}.txt - kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-test-datasite-1 --namespace syft > ./k8s-logs/test-datasite-1-logs-${{ steps.date.outputs.date }}.txt + kubectl describe all -A --context k3d-bigquery-high --namespace syft > ./k8s-logs/bigquery-high-desc-${{ steps.date.outputs.date }}.txt + kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-bigquery-high --namespace syft > ./k8s-logs/bigquery-high-logs-${{ steps.date.outputs.date }}.txt ls -la ./k8s-logs - name: Upload logs to GitHub @@ -620,3 +620,136 @@ jobs: if: steps.changes.outputs.syft == 'true' run: | tox -e migration.test + + pr-tests-notebook-scenario-k8s-sync: + strategy: + max-parallel: 99 + matrix: + os: [ubuntu-latest] + python-version: ["3.12"] + fail-fast: false + + runs-on: ${{matrix.os}} + + steps: + - name: Permission to home directory + run: | + sudo chown -R $USER:$USER $HOME + - uses: actions/checkout@v4 + - name: Check for file changes + uses: dorny/paths-filter@v3 + id: changes + with: + base: ${{ github.ref }} + token: ${{ github.token }} + filters: .github/file-filters.yml + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + if: steps.changes.outputs.stack == 'true' + with: + python-version: ${{ matrix.python-version }} + + - name: Add K3d Registry + run: | + sudo python ./scripts/patch_hosts.py --add-k3d-registry + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + tool-cache: true + large-packages: false + + # free 10GB of space + - name: Remove unnecessary files + if: matrix.os == 'ubuntu-latest' + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + docker image prune --all --force + docker builder prune --all --force + docker system prune --all --force + - name: Install pip dependencies + if: steps.changes.outputs.stack == 'true' + run: | + python -m pip install --upgrade pip + pip install uv==0.2.17 tox==4.16.0 tox-uv==1.9.0 + uv --version + - name: Get uv cache dir + if: steps.changes.outputs.stack == 'true' + id: pip-cache + shell: bash + run: | + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT + - name: Load github cache + uses: actions/cache@v4 + if: steps.changes.outputs.stack == 'true' + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} + restore-keys: | + ${{ runner.os }}-uv-py${{ matrix.python-version }} + - name: Install kubectl + if: steps.changes.outputs.stack == 'true' + run: | + # cleanup apt version + sudo apt remove kubectl || true + # install kubectl 1.27 + curl -LO https://dl.k8s.io/release/v1.27.2/bin/linux/amd64/kubectl + chmod +x kubectl + sudo install kubectl /usr/local/bin; + - name: Install helm + if: steps.changes.outputs.stack == 'true' + run: | + # install helm + curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 + chmod 700 get_helm.sh + ./get_helm.sh + - name: Run Notebook Scenario Sync Tests + if: steps.changes.outputs.stack == 'true' + timeout-minutes: 60 + env: + GITHUB_CI: true + shell: bash + run: | + K3D_VERSION=v5.6.3 + DEVSPACE_VERSION=v6.3.12 + # install k3d + wget https://github.com/k3d-io/k3d/releases/download/${K3D_VERSION}/k3d-linux-amd64 + mv k3d-linux-amd64 k3d + chmod +x k3d + export PATH=`pwd`:$PATH + k3d version + curl -sSL https://github.com/loft-sh/devspace/releases/download/${DEVSPACE_VERSION}/devspace-linux-amd64 -o ./devspace + chmod +x devspace + devspace version + tox -e stack.test.notebook.scenario.k8s.sync + - name: Get current timestamp + id: date + if: failure() + shell: bash + run: echo "date=$(date +%s)" >> $GITHUB_OUTPUT + + - name: Collect logs from k3d + if: steps.changes.outputs.stack == 'true' && failure() + shell: bash + run: | + mkdir -p ./k8s-logs + kubectl describe all -A --context k3d-bigquery-low --namespace syft > ./k8s-logs/bigquery-low-desc-${{ steps.date.outputs.date }}.txt + kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-bigquery-low --namespace syft > ./k8s-logs/bigquery-low-logs-${{ steps.date.outputs.date }}.txt + kubectl describe all -A --context k3d-bigquery-high --namespace syft > ./k8s-logs/bigquery-high-desc-${{ steps.date.outputs.date }}.txt + kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-bigquery-high --namespace syft > ./k8s-logs/bigquery-high-logs-${{ steps.date.outputs.date }}.txt + ls -la ./k8s-logs + - name: Upload logs to GitHub + uses: actions/upload-artifact@master + if: steps.changes.outputs.stack == 'true' && failure() + with: + name: k8s-logs-notebook-${{ matrix.os }}-${{ steps.date.outputs.date }} + path: ./k8s-logs/ + + - name: Cleanup k3d + if: steps.changes.outputs.stack == 'true' && failure() + shell: bash + run: | + export PATH=`pwd`:$PATH + k3d cluster delete bigquery-high || true + k3d cluster delete bigquery-low || true diff --git a/README.md b/README.md index 05eb5058383..52b441cc9f1 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ Launch =0.9,<0.9.1") +sy.requires(">=0.9.1,<0.9.2") server = sy.orchestra.launch( name="my-datasite", @@ -70,7 +70,7 @@ Main way to use a Datasite is via our Syft client, in a Jupyter Notebook. Check ```python import syft as sy -sy.requires(">=0.9,<0.9.1") +sy.requires(">=0.9.1,<0.9.2") datasite_client = sy.login( port=8080, @@ -138,12 +138,12 @@ For questions about PySyft, reach out via `#support` on Docs +- `0.9.1` (Stable) - Docs - Install PySyft (Stable): `pip install -U syft` **Latest Beta** -- `0.9.1` (Beta) - `dev` branch 👈🏽 +- `0.9.2` (Beta) - `dev` branch 👈🏽 - Install PySyft (Beta): `pip install -U syft --pre` Find more about previous releases here. diff --git a/VERSION b/VERSION index a5f840e30f5..5e0d33e7748 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.1-beta.11" +__version__ = "0.9.2-beta.2" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/14-container-images.ipynb similarity index 99% rename from notebooks/api/0.8/10-container-images.ipynb rename to notebooks/api/0.8/14-container-images.ipynb index 8d6c0136f96..5588389a792 100644 --- a/notebooks/api/0.8/10-container-images.ipynb +++ b/notebooks/api/0.8/14-container-images.ipynb @@ -150,7 +150,7 @@ " if (bool(os.environ[\"DEV_MODE\"]) and running_as_container)\n", " else sy.__version__\n", ")\n", - "syft_base_worker_tag = \"0.9.0-beta.5\"" + "# syft_base_worker_tag = \"0.9.2-beta.1\"" ] }, { diff --git a/notebooks/scenarios/bigquery/00-start-and-configure-server-and-admins.ipynb b/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb similarity index 94% rename from notebooks/scenarios/bigquery/00-start-and-configure-server-and-admins.ipynb rename to notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb index 8a438cbc935..444ba2fe199 100644 --- a/notebooks/scenarios/bigquery/00-start-and-configure-server-and-admins.ipynb +++ b/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb @@ -23,10 +23,21 @@ "# stdlib\n", "from os import environ as env\n", "\n", + "# third party\n", + "# run email server\n", + "from helpers import get_email_server\n", + "\n", "# syft absolute\n", "import syft as sy" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Launch & login" + ] + }, { "cell_type": "code", "execution_count": null, @@ -49,23 +60,6 @@ "env[\"DEFAULT_ROOT_PASSWORD\"] = ROOT_PASSWORD" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# third party\n", - "# run email server\n", - "from helpers import EmailServer\n", - "from helpers import SMTPTestServer\n", - "\n", - "email_server = EmailServer()\n", - "email_server.reset_emails()\n", - "smtp_server = SMTPTestServer(email_server)\n", - "smtp_server.start()" - ] - }, { "cell_type": "code", "execution_count": null, @@ -83,6 +77,15 @@ ")" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "email_server, smtp_server = get_email_server(reset=True)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -195,6 +198,13 @@ "root_client.users.delete(new_user_id2)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Cleanup" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb index 4741a74ebce..e46587d23e5 100644 --- a/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb +++ b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb @@ -13,126 +13,52 @@ "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"" ] }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "#### Helpers" - ] - }, { "cell_type": "code", "execution_count": null, - "id": "2", + "id": "1", "metadata": {}, "outputs": [], "source": [ "# stdlib\n", - "import time\n", - "\n", - "\n", - "class TimeoutError(Exception):\n", - " pass\n", - "\n", - "\n", - "class Timeout:\n", - " def __init__(self, timeout_duration):\n", - " if timeout_duration > 60:\n", - " raise ValueError(\"Timeout duration cannot exceed 60 seconds.\")\n", - " self.timeout_duration = timeout_duration\n", - "\n", - " def run_with_timeout(self, condition_func, *args, **kwargs):\n", - " start_time = time.time()\n", - " result = None\n", - "\n", - " while True:\n", - " elapsed_time = time.time() - start_time\n", - " if elapsed_time > self.timeout_duration:\n", - " raise TimeoutError(\n", - " f\"Function execution exceeded {self.timeout_duration} seconds.\"\n", - " )\n", + "import os\n", "\n", - " # Check if the condition is met\n", - " try:\n", - " if condition_func():\n", - " print(\"Condition met, exiting early.\")\n", - " break\n", - " except Exception as e:\n", - " print(f\"Exception in target function: {e}\")\n", - " break # Exit the loop if an exception occurs in the function\n", - " time.sleep(1)\n", + "# third party\n", + "from helpers import Timeout\n", + "from helpers import get_email_server\n", "\n", - " return result" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "### Import lib" + "# syft absolute\n", + "import syft as sy" ] }, { "cell_type": "code", "execution_count": null, - "id": "4", + "id": "2", "metadata": {}, "outputs": [], "source": [ - "# stdlib\n", - "import os\n", - "\n", "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "environment" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "num_workers = int(os.environ.get(\"NUM_TEST_WORKERS\", 1))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", "\n", - "# syft absolute\n", - "import syft as sy" + "num_workers = int(os.environ.get(\"NUM_TEST_WORKERS\", 1))\n", + "\n", + "ROOT_EMAIL = \"admin@bigquery.org\"\n", + "ROOT_PASSWORD = \"bqpw\"\n", + "environment" ] }, { - "cell_type": "code", - "execution_count": null, - "id": "7", + "cell_type": "markdown", + "id": "3", "metadata": {}, - "outputs": [], "source": [ - "# third party\n", - "# run email server\n", - "from helpers import EmailServer\n", - "from helpers import SMTPTestServer\n", - "\n", - "email_server = EmailServer()\n", - "email_server.reset_emails()\n", - "smtp_server = SMTPTestServer(email_server)\n", - "smtp_server.start()" + "### Launch server & login" ] }, { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -149,18 +75,17 @@ { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "5", "metadata": {}, "outputs": [], "source": [ - "ROOT_EMAIL = \"admin@bigquery.org\"\n", - "ROOT_PASSWORD = \"bqpw\"" + "email_server, smtp_server = get_email_server(reset=True)" ] }, { "cell_type": "code", "execution_count": null, - "id": "10", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -172,7 +97,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -182,7 +107,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -192,7 +117,7 @@ }, { "cell_type": "markdown", - "id": "13", + "id": "9", "metadata": {}, "source": [ "### Scale Worker pool" @@ -200,7 +125,7 @@ }, { "cell_type": "markdown", - "id": "14", + "id": "10", "metadata": {}, "source": [ "##### Scale up" @@ -209,7 +134,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -223,7 +148,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -233,7 +158,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -253,7 +178,7 @@ }, { "cell_type": "markdown", - "id": "18", + "id": "14", "metadata": {}, "source": [ "##### Scale down" @@ -262,7 +187,7 @@ { "cell_type": "code", "execution_count": null, - "id": "19", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -277,7 +202,7 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -296,7 +221,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -309,7 +234,7 @@ }, { "cell_type": "markdown", - "id": "22", + "id": "18", "metadata": {}, "source": [ "#### Delete Worker Pool" @@ -318,7 +243,7 @@ { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -331,7 +256,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -341,7 +266,7 @@ }, { "cell_type": "markdown", - "id": "25", + "id": "21", "metadata": {}, "source": [ "#### Re-launch the default worker pool" @@ -350,7 +275,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -360,7 +285,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -374,7 +299,7 @@ { "cell_type": "code", "execution_count": null, - "id": "28", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -388,7 +313,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -398,7 +323,7 @@ { "cell_type": "code", "execution_count": null, - "id": "30", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -408,7 +333,7 @@ { "cell_type": "code", "execution_count": null, - "id": "31", + "id": "27", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/scenarios/bigquery/01-setup-datasite.ipynb b/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb similarity index 92% rename from notebooks/scenarios/bigquery/01-setup-datasite.ipynb rename to notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb index 6ce2e541480..1280a4e19d7 100644 --- a/notebooks/scenarios/bigquery/01-setup-datasite.ipynb +++ b/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb @@ -21,8 +21,12 @@ "# stdlib\n", "import os\n", "\n", - "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "environment" + "# third party\n", + "from helpers import get_email_server\n", + "\n", + "# syft absolute\n", + "import syft as sy\n", + "from syft import test_settings" ] }, { @@ -31,27 +35,15 @@ "metadata": {}, "outputs": [], "source": [ - "# stdlib\n", - "\n", - "# syft absolute\n", - "import syft as sy\n", - "from syft import test_settings" + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "environment" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# third party\n", - "# run email server\n", - "from helpers import EmailServer\n", - "from helpers import SMTPTestServer\n", - "\n", - "email_server = EmailServer()\n", - "smtp_server = SMTPTestServer(email_server)\n", - "smtp_server.start()" + "# Launch & login" ] }, { @@ -70,6 +62,15 @@ ")" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "_, smtp_server = get_email_server(reset=True)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -82,6 +83,20 @@ ")" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Submit images and build pools" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Add registry" + ] + }, { "cell_type": "code", "execution_count": null, @@ -140,6 +155,13 @@ "local_registry" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Upload Image" + ] + }, { "cell_type": "code", "execution_count": null, @@ -259,6 +281,13 @@ "# worker_docker_tag" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Build image" + ] + }, { "cell_type": "code", "execution_count": null, @@ -338,23 +367,6 @@ "dockerfile_list" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# worker_image = next(\n", - "# (\n", - "# image\n", - "# for image in dockerfile_list\n", - "# if image.is_prebuilt and docker_tag in str(image.image_identifier)\n", - "# ),\n", - "# None,\n", - "# )\n", - "# worker_image" - ] - }, { "cell_type": "code", "execution_count": null, @@ -407,6 +419,13 @@ "num_workers = int(os.environ.get(\"NUM_TEST_WORKERS\", 1))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Launch pool" + ] + }, { "cell_type": "code", "execution_count": null, @@ -423,6 +442,13 @@ "result" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Scale pool" + ] + }, { "cell_type": "code", "execution_count": null, @@ -463,6 +489,13 @@ "assert len(high_client.api.services.user.get_all()) == 2" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Cleanup" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb b/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb index b3ca595d812..990f91183d8 100644 --- a/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb +++ b/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb @@ -22,23 +22,14 @@ "metadata": {}, "outputs": [], "source": [ + "# stdlib\n", "# stdlib\n", "import os\n", "\n", - "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "environment" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ "# third party\n", "from helpers import SENDER\n", "from helpers import create_user\n", + "from helpers import get_email_server\n", "from helpers import make_user\n", "from helpers import save_users\n", "\n", @@ -49,18 +40,22 @@ { "cell_type": "code", "execution_count": null, - "id": "3", + "id": "2", "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "# run email server\n", - "from helpers import EmailServer\n", - "from helpers import SMTPTestServer\n", + "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"\n", "\n", - "email_server = EmailServer()\n", - "smtp_server = SMTPTestServer(email_server)\n", - "smtp_server.start()" + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "environment" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "# Launch server & login" ] }, { @@ -87,7 +82,16 @@ "metadata": {}, "outputs": [], "source": [ - "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"\n", + "email_server, smtp_server = get_email_server()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ "high_client = sy.login(\n", " url=\"http://localhost:8080\", email=ADMIN_EMAIL, password=ADMIN_PW\n", ")" @@ -96,7 +100,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -106,7 +110,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -121,7 +125,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -131,7 +135,7 @@ }, { "cell_type": "markdown", - "id": "9", + "id": "10", "metadata": {}, "source": [ "# Register users" @@ -140,7 +144,7 @@ { "cell_type": "code", "execution_count": null, - "id": "10", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -151,7 +155,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -173,7 +177,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -183,7 +187,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -193,10 +197,18 @@ "await asyncio.sleep(5)" ] }, + { + "cell_type": "markdown", + "id": "15", + "metadata": {}, + "source": [ + "## Verify Emails are sent" + ] + }, { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -215,7 +227,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -240,7 +252,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -250,7 +262,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -261,7 +273,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -275,7 +287,7 @@ { "cell_type": "code", "execution_count": null, - "id": "19", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -287,7 +299,7 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -297,7 +309,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -305,13 +317,22 @@ "# high_client.api.services.settings.enable_notifications()" ] }, + { + "cell_type": "markdown", + "id": "24", + "metadata": {}, + "source": [ + "## Test reset password" + ] + }, { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "25", "metadata": {}, "outputs": [], "source": [ + "# This is necessary as it sets the new token value in user.reset_token\n", "token = reset_password_user.get_token()\n", "token" ] @@ -319,10 +340,11 @@ { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "26", "metadata": {}, "outputs": [], "source": [ + "# This is necessary as it sets the new password value in user.new_password\n", "passw = reset_password_user.make_new_password()\n", "passw" ] @@ -330,7 +352,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "27", "metadata": {}, "outputs": [], "source": [ @@ -341,7 +363,7 @@ { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -354,7 +376,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -364,18 +386,106 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "30", "metadata": {}, "outputs": [], "source": [ "# relogin\n", - "reset_password_user.client = reset_password_user.client" + "reset_password_user.relogin()\n", + "# reset_password_user.client = reset_password_user.client" + ] + }, + { + "cell_type": "markdown", + "id": "31", + "metadata": {}, + "source": [ + "## Reset password second time" ] }, { "cell_type": "code", "execution_count": null, - "id": "28", + "id": "32", + "metadata": {}, + "outputs": [], + "source": [ + "reset_password_user.client.guest().forgot_password(email=reset_password_user.email)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "33", + "metadata": {}, + "outputs": [], + "source": [ + "output = reset_password_user.client.guest().reset_password(\n", + " token=reset_password_user.get_token(),\n", + " new_password=reset_password_user.make_new_password(),\n", + ")\n", + "output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "34", + "metadata": {}, + "outputs": [], + "source": [ + "assert isinstance(output, sy.SyftSuccess)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "35", + "metadata": {}, + "outputs": [], + "source": [ + "# print(f\"token:\\t\\t {reset_password_user.reset_token}\\n\\\n", + "# password:\\t {reset_password_user.password}\\n\\\n", + "# new password:\\t {reset_password_user.new_password}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36", + "metadata": {}, + "outputs": [], + "source": [ + "reset_password_user.update_password()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37", + "metadata": {}, + "outputs": [], + "source": [ + "# print(f\"token:\\t\\t {reset_password_user.reset_token}\\n\\\n", + "# password:\\t {reset_password_user.password}\\n\\\n", + "# new password:\\t {reset_password_user.new_password}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38", + "metadata": {}, + "outputs": [], + "source": [ + "# relogin\n", + "reset_password_user.relogin()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39", "metadata": {}, "outputs": [], "source": [ @@ -385,7 +495,115 @@ { "cell_type": "code", "execution_count": null, - "id": "29", + "id": "40", + "metadata": {}, + "outputs": [], + "source": [ + "reset_password_user" + ] + }, + { + "cell_type": "markdown", + "id": "41", + "metadata": {}, + "source": [ + "## Reduce token expiration and try resetting" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "42", + "metadata": {}, + "outputs": [], + "source": [ + "# Variable is poorly named, token expiration time is in seconds and not minutes\n", + "high_client.api.services.settings.update(pwd_token_config={\"token_exp_min\": 3})\n", + "high_client.refresh()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "43", + "metadata": {}, + "outputs": [], + "source": [ + "reset_password_user.client.guest().forgot_password(email=reset_password_user.email)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "44", + "metadata": {}, + "outputs": [], + "source": [ + "# Wait 3 seconds to ensure token expires\n", + "await asyncio.sleep(3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "45", + "metadata": {}, + "outputs": [], + "source": [ + "# This should throw a SyftError because we waited too long\n", + "output = reset_password_user.client.guest().reset_password(\n", + " token=reset_password_user.get_token(),\n", + " new_password=reset_password_user.make_new_password(),\n", + ")\n", + "output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46", + "metadata": {}, + "outputs": [], + "source": [ + "assert isinstance(output, sy.SyftError)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "47", + "metadata": {}, + "outputs": [], + "source": [ + "# relogin\n", + "with sy.raises(sy.SyftException, show=True):\n", + " reset_password_user.relogin()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "48", + "metadata": {}, + "outputs": [], + "source": [ + "# Set things back to the the default settings\n", + "high_client.api.services.settings.update(pwd_token_config={\"token_exp_min\": 1800})\n", + "high_client.refresh()" + ] + }, + { + "cell_type": "markdown", + "id": "49", + "metadata": {}, + "source": [ + "# Cleanup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "50", "metadata": {}, "outputs": [], "source": [ @@ -395,7 +613,7 @@ { "cell_type": "code", "execution_count": null, - "id": "30", + "id": "51", "metadata": {}, "outputs": [], "source": [ @@ -405,7 +623,7 @@ { "cell_type": "code", "execution_count": null, - "id": "31", + "id": "52", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/scenarios/bigquery/02-configure-api.ipynb b/notebooks/scenarios/bigquery/020-configure-api.ipynb similarity index 85% rename from notebooks/scenarios/bigquery/02-configure-api.ipynb rename to notebooks/scenarios/bigquery/020-configure-api.ipynb index 9f0051c141c..c3ad678a00b 100644 --- a/notebooks/scenarios/bigquery/02-configure-api.ipynb +++ b/notebooks/scenarios/bigquery/020-configure-api.ipynb @@ -19,12 +19,7 @@ "outputs": [], "source": [ "# set to use the live APIs\n", - "# import os\n", - "# os.environ[\"TEST_BIGQUERY_APIS_LIVE\"] = \"True\"\n", - "# third party\n", - "from apis import make_schema\n", - "from apis import make_submit_query\n", - "from apis import make_test_query" + "# os.environ[\"TEST_BIGQUERY_APIS_LIVE\"] = \"True\"" ] }, { @@ -33,6 +28,14 @@ "metadata": {}, "outputs": [], "source": [ + "# third party\n", + "from apis import make_schema\n", + "from apis import make_submit_query\n", + "from apis import make_test_query\n", + "\n", + "# run email server\n", + "from helpers import get_email_server\n", + "\n", "# syft absolute\n", "import syft as sy\n", "from syft import test_settings" @@ -44,14 +47,14 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "# run email server\n", - "from helpers import EmailServer\n", - "from helpers import SMTPTestServer\n", - "\n", - "email_server = EmailServer()\n", - "smtp_server = SMTPTestServer(email_server)\n", - "smtp_server.start()" + "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Launch server & login" ] }, { @@ -76,7 +79,7 @@ "metadata": {}, "outputs": [], "source": [ - "this_worker_pool_name = \"bigquery-pool\"" + "email_server, smtp_server = get_email_server()" ] }, { @@ -85,12 +88,20 @@ "metadata": {}, "outputs": [], "source": [ - "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"\n", "high_client = sy.login(\n", " url=\"http://localhost:8080\", email=ADMIN_EMAIL, password=ADMIN_PW\n", ")" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "this_worker_pool_name = \"bigquery-pool\"" + ] + }, { "cell_type": "code", "execution_count": null, @@ -100,6 +111,13 @@ "assert len(high_client.worker_pools.get_all()) == 2" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Create `test_query` endpoint" + ] + }, { "cell_type": "code", "execution_count": null, @@ -173,6 +191,13 @@ "high_client.custom_api.add(endpoint=new_endpoint)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Update `test_query` endpoint" + ] + }, { "cell_type": "code", "execution_count": null, @@ -196,6 +221,13 @@ ")" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Test `test_query` endpoint" + ] + }, { "cell_type": "code", "execution_count": null, @@ -238,12 +270,11 @@ "metadata": {}, "outputs": [], "source": [ - "schema_function = make_schema(\n", - " settings={\n", - " \"calls_per_min\": 5,\n", - " },\n", - " worker_pool=this_worker_pool_name,\n", - ")" + "# Test private version\n", + "result = high_client.api.services.bigquery.test_query.private(\n", + " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", + ")\n", + "result" ] }, { @@ -252,8 +283,7 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.custom_api.add(endpoint=schema_function)\n", - "high_client.refresh()" + "assert len(result) == 10" ] }, { @@ -262,9 +292,13 @@ "metadata": {}, "outputs": [], "source": [ - "submit_query_function = make_submit_query(\n", - " settings={}, worker_pool=this_worker_pool_name\n", - ")" + "# Test mock version for wrong queries\n", + "with sy.raises(\n", + " sy.SyftException(public_message=\"*must be qualified with a dataset*\"), show=True\n", + "):\n", + " _ = high_client.api.services.bigquery.test_query.mock(\n", + " sql_query=\"SELECT * FROM invalid_table LIMIT 1\"\n", + " )" ] }, { @@ -273,7 +307,11 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.custom_api.add(endpoint=submit_query_function)" + "# Test private version\n", + "result = high_client.api.services.bigquery.test_query.private(\n", + " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 1\"\n", + ")\n", + "result" ] }, { @@ -282,9 +320,14 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.api.services.api.update(\n", - " endpoint_path=\"bigquery.submit_query\", hide_mock_definition=True\n", - ")" + "assert len(result) == 1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Inspect endpoint state" ] }, { @@ -293,7 +336,9 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.custom_api.api_endpoints()" + "# Inspect the context state on an endpoint\n", + "state = high_client.api.services.bigquery.test_query.mock.context.state\n", + "state" ] }, { @@ -302,7 +347,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(high_client.custom_api.api_endpoints()) == 3" + "len(state[ADMIN_EMAIL])" ] }, { @@ -311,7 +356,14 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.api.services.bigquery.test_query" + "assert len(state[ADMIN_EMAIL]) >= 2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Create `schema` endpoint" ] }, { @@ -320,7 +372,12 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.api.services.bigquery.submit_query" + "schema_function = make_schema(\n", + " settings={\n", + " \"calls_per_min\": 5,\n", + " },\n", + " worker_pool=this_worker_pool_name,\n", + ")" ] }, { @@ -329,11 +386,15 @@ "metadata": {}, "outputs": [], "source": [ - "# Test mock version\n", - "result = high_client.api.services.bigquery.test_query.mock(\n", - " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", - ")\n", - "result" + "high_client.custom_api.add(endpoint=schema_function)\n", + "high_client.refresh()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Test `schema` endpoint" ] }, { @@ -342,10 +403,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Test private version\n", - "result = high_client.api.services.bigquery.test_query.private(\n", - " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", - ")\n", + "# Testing schema\n", + "result = high_client.api.services.bigquery.schema()\n", "result" ] }, @@ -355,7 +414,14 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(result) == 10" + "assert len(result) == 23" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Create `submit_query` endpoint" ] }, { @@ -364,13 +430,9 @@ "metadata": {}, "outputs": [], "source": [ - "# Test mock version for wrong queries\n", - "with sy.raises(\n", - " sy.SyftException(public_message=\"*must be qualified with a dataset*\"), show=True\n", - "):\n", - " _ = high_client.api.services.bigquery.test_query.mock(\n", - " sql_query=\"SELECT * FROM invalid_table LIMIT 1\"\n", - " )" + "submit_query_function = make_submit_query(\n", + " settings={}, worker_pool=this_worker_pool_name\n", + ")" ] }, { @@ -379,11 +441,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Test private version\n", - "result = high_client.api.services.bigquery.test_query.private(\n", - " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 1\"\n", - ")\n", - "result" + "high_client.custom_api.add(endpoint=submit_query_function)" ] }, { @@ -392,7 +450,9 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(result) == 1" + "high_client.api.services.api.update(\n", + " endpoint_path=\"bigquery.submit_query\", hide_mock_definition=True\n", + ")" ] }, { @@ -401,9 +461,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Inspect the context state on an endpoint\n", - "state = high_client.api.services.bigquery.test_query.mock.context.state\n", - "state" + "high_client.custom_api.api_endpoints()" ] }, { @@ -412,7 +470,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(state[ADMIN_EMAIL]) >= 3" + "assert len(high_client.custom_api.api_endpoints()) == 3" ] }, { @@ -421,9 +479,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Testing schema\n", - "result = high_client.api.services.bigquery.schema()\n", - "result" + "high_client.api.services.bigquery.test_query" ] }, { @@ -432,7 +488,14 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(result) == 23" + "high_client.api.services.bigquery.submit_query" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Test `submit_query` endpoint" ] }, { @@ -448,13 +511,23 @@ ")" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Test emails" + ] + }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "email_server.get_emails_for_user(user_email=ADMIN_EMAIL)" + "assert (\n", + " \"Job Failed\"\n", + " in email_server.get_emails_for_user(user_email=ADMIN_EMAIL)[0].email_content\n", + ")" ] }, { @@ -502,6 +575,13 @@ "assert \"Query submitted\" in result" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Cleanup" + ] + }, { "cell_type": "code", "execution_count": null, @@ -529,6 +609,11 @@ } ], "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, "language_info": { "codemirror_mode": { "name": "ipython", diff --git a/notebooks/scenarios/bigquery/021-create-jobs.ipynb b/notebooks/scenarios/bigquery/021-create-jobs.ipynb index ea549d3625d..3625af65c88 100644 --- a/notebooks/scenarios/bigquery/021-create-jobs.ipynb +++ b/notebooks/scenarios/bigquery/021-create-jobs.ipynb @@ -34,10 +34,15 @@ "outputs": [], "source": [ "# stdlib\n", + "from collections import Counter\n", "import os\n", "\n", - "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "environment" + "# third party\n", + "from helpers import get_email_server\n", + "\n", + "# syft absolute\n", + "import syft as sy\n", + "from syft.service.job.job_stash import JobStatus" ] }, { @@ -47,10 +52,8 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "\n", - "# syft absolute\n", - "import syft as sy" + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "environment" ] }, { @@ -60,26 +63,17 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "# run email server\n", - "from helpers import EmailServer\n", - "from helpers import SMTPTestServer\n", - "\n", - "email_server = EmailServer()\n", - "email_server.reset_emails()\n", - "smtp_server = SMTPTestServer(email_server)\n", - "smtp_server.start()" + "SERVER_PORT = \"8080\"\n", + "SERVER_URL = f\"http://localhost:{SERVER_PORT}\"\n", + "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "id": "5", "metadata": {}, - "outputs": [], "source": [ - "SERVER_PORT = \"8080\"\n", - "SERVER_URL = f\"http://localhost:{SERVER_PORT}\"" + "# Launch server & login" ] }, { @@ -106,7 +100,16 @@ "metadata": {}, "outputs": [], "source": [ - "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"\n", + "email_server, smtp_server = get_email_server(reset=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ "high_client = sy.login(\n", " url=\"http://localhost:8080\", email=ADMIN_EMAIL, password=ADMIN_PW\n", ")" @@ -115,7 +118,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -126,7 +129,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -135,7 +138,7 @@ }, { "cell_type": "markdown", - "id": "10", + "id": "11", "metadata": {}, "source": [ "# Create jobs" @@ -144,7 +147,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -156,43 +159,40 @@ }, { "cell_type": "markdown", - "id": "12", + "id": "13", "metadata": {}, "source": [ - "# Test queries" + "# Inspect job data (requests for these jobs to be created)" ] }, { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "14", "metadata": {}, "outputs": [], "source": [ "num_jobs = int(os.environ.get(\"NUM_TEST_JOBS\", 10))\n", "\n", - "jobs = create_jobs(users, total_jobs=num_jobs)" + "jobs_data = create_jobs(users, total_jobs=num_jobs)" ] }, { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "15", "metadata": {}, "outputs": [], "source": [ - "# stdlib\n", - "from collections import Counter\n", - "\n", - "counts = Counter([j.job_type for j in jobs])\n", + "counts = Counter([j.job_type for j in jobs_data])\n", "for k, v in counts.most_common():\n", - " print(f\"number of {k}: {v}\")" + " print(f\"{k}: #{v}\")" ] }, { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -203,45 +203,41 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "17", "metadata": {}, "outputs": [], "source": [ - "print(f\"num jobs: {len(jobs)}\")\n", - "num_should_submit = 0\n", - "for job in jobs:\n", - " print(\n", - " f\"Job type: {job.job_type}, should succeed: {job.should_succeed}, should submit: {job.should_submit}\"\n", - " )\n", - " if job.should_submit:\n", - " num_should_submit += 1" + "print(f\"{len(jobs_data)=}\")\n", + "\n", + "for job in jobs_data:\n", + " print(f\"{job.job_type=}, {job.should_succeed=}, {job.should_submit=}\")" ] }, { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "18", "metadata": {}, "outputs": [], "source": [ - "assert len(jobs) == num_jobs\n", - "assert all(isinstance(j, TestJob) for j in jobs)\n", - "assert all(job.client is not None for job in jobs)" + "assert len(jobs_data) == num_jobs\n", + "assert all(isinstance(j, TestJob) for j in jobs_data)\n", + "assert all(job.client is not None for job in jobs_data)" ] }, { "cell_type": "code", "execution_count": null, - "id": "18", + "id": "19", "metadata": {}, "outputs": [], "source": [ - "save_jobs(jobs)" + "save_jobs(jobs_data)" ] }, { "cell_type": "markdown", - "id": "19", + "id": "20", "metadata": {}, "source": [ "# Submit jobs\n" @@ -250,136 +246,170 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "21", "metadata": {}, "outputs": [], "source": [ "admin_emails_before = len(email_server.get_emails_for_user(\"admin@bigquery.org\"))\n", - "print(\"admin emails before\", admin_emails_before)\n", - "admin_emails_before" + "print(f\"{admin_emails_before=}\")" + ] + }, + { + "cell_type": "markdown", + "id": "22", + "metadata": {}, + "source": [ + "## Test Succesful jobs" ] }, { "cell_type": "code", "execution_count": null, - "id": "21", + "id": "23", "metadata": {}, "outputs": [], "source": [ - "# stdlib\n", - "\n", - "responses = []\n", - "\n", - "for job in jobs:\n", + "jobs_submit_should_succeed = [j for j in jobs_data if j.should_submit]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24", + "metadata": {}, + "outputs": [], + "source": [ + "for job in jobs_submit_should_succeed:\n", + " client = job.client\n", + " response = client.api.services.bigquery.submit_query(\n", + " func_name=job.func_name, query=job.query\n", + " )\n", + " job.code_path = extract_code_path(response)" + ] + }, + { + "cell_type": "markdown", + "id": "25", + "metadata": {}, + "source": [ + "## Test failures" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26", + "metadata": {}, + "outputs": [], + "source": [ + "jobs_submit_should_fail = [j for j in jobs_data if not j.should_submit]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27", + "metadata": {}, + "outputs": [], + "source": [ + "for job in jobs_submit_should_fail:\n", " client = job.client\n", "\n", - " if not job.should_submit:\n", - " # Submitting should throw error (eg func_name invalid syntax)\n", - " with sy.raises(sy.SyftException):\n", - " response = client.api.services.bigquery.submit_query(\n", - " func_name=job.func_name, query=job.query\n", - " )\n", - " responses.append(None)\n", - " else:\n", - " response = client.api.services.bigquery.submit_query(\n", + " with sy.raises(sy.SyftException):\n", + " client.api.services.bigquery.submit_query(\n", " func_name=job.func_name, query=job.query\n", - " )\n", - " job.code_path = extract_code_path(response)\n", - " responses.append(response)\n", - "\n", - " # time.sleep(1)" + " )" ] }, { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "28", "metadata": {}, "outputs": [], "source": [ - "for job in jobs:\n", - " print(\n", - " f\"Job {job.func_name:.20} is submitted, {job.should_submit}, should be submitted {job.is_submitted}\"\n", - " )\n", + "for job in jobs_data:\n", + " print(f\"Job {job.func_name:.20} {job.should_submit=}, {job.is_submitted=}\")\n", "\n", - "assert all(job.is_submitted == job.should_submit for job in jobs)" + "assert all(job.is_submitted == job.should_submit for job in jobs_data)" ] }, { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "29", "metadata": {}, "outputs": [], "source": [ - "save_jobs(jobs)" + "save_jobs(jobs_data)" ] }, { "cell_type": "markdown", - "id": "24", + "id": "30", "metadata": {}, "source": [ - "## Test: cannot execute" + "## Test: cannot execute submitted jobs yet" ] }, { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "31", "metadata": {}, "outputs": [], "source": [ - "submitted_jobs = [job for job in jobs if job.should_submit]\n", - "\n", + "submitted_jobs = [job for job in jobs_data if job.should_submit]\n", + "job_execution_fns = [getattr(job.client.code, job.code_path) for job in submitted_jobs]\n", "assert len(submitted_jobs) # failsafe for next tests" ] }, { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "32", "metadata": {}, "outputs": [], "source": [ - "# Blocking\n", - "\n", - "for job in submitted_jobs:\n", - " execute_code_fn = getattr(job.client.code, job.code_path)\n", + "for fn in job_execution_fns:\n", + " # blocking\n", " with sy.raises(\n", " sy.SyftException(public_message=\"*Your code is waiting for approval*\")\n", " ):\n", - " result = execute_code_fn()" + " result = fn()" ] }, { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "33", "metadata": {}, "outputs": [], "source": [ - "# Nonblocking\n", - "\n", - "# syft absolute\n", - "from syft.service.job.job_stash import JobStatus\n", - "\n", - "for job in submitted_jobs:\n", - " execute_code_fn = getattr(job.client.code, job.code_path)\n", - " result_job = execute_code_fn(blocking=False)\n", + "for fn in job_execution_fns:\n", + " # nonblocking\n", + " result_job = fn(blocking=False)\n", " result_job.wait()\n", " assert isinstance(result_job.result, sy.SyftError)\n", - " assert result_job.status == JobStatus.ERRORED\n", - " # time.sleep(1)" + " assert result_job.status == JobStatus.ERRORED" + ] + }, + { + "cell_type": "markdown", + "id": "34", + "metadata": {}, + "source": [ + "# Verify that admin has emails for submitted requests" ] }, { "cell_type": "code", "execution_count": null, - "id": "28", + "id": "35", "metadata": {}, "outputs": [], "source": [ + "num_should_submit = sum(j.should_submit for j in jobs_data)\n", "admin_emails_after = len(email_server.get_emails_for_user(\"admin@bigquery.org\"))\n", "print(\"admin emails after\", admin_emails_after)\n", "assert admin_emails_after >= admin_emails_before + num_should_submit\n", @@ -387,10 +417,18 @@ "# assert len(users_emails) == after_number_of_emails + 1" ] }, + { + "cell_type": "markdown", + "id": "36", + "metadata": {}, + "source": [ + "# Cleanup" + ] + }, { "cell_type": "code", "execution_count": null, - "id": "29", + "id": "37", "metadata": {}, "outputs": [], "source": [ @@ -400,7 +438,7 @@ { "cell_type": "code", "execution_count": null, - "id": "30", + "id": "38", "metadata": {}, "outputs": [], "source": [ @@ -410,7 +448,7 @@ { "cell_type": "code", "execution_count": null, - "id": "31", + "id": "39", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/scenarios/bigquery/04-do-review-requests.ipynb b/notebooks/scenarios/bigquery/040-do-review-requests.ipynb similarity index 61% rename from notebooks/scenarios/bigquery/04-do-review-requests.ipynb rename to notebooks/scenarios/bigquery/040-do-review-requests.ipynb index 716eb756c43..9f0a301f2e3 100644 --- a/notebooks/scenarios/bigquery/04-do-review-requests.ipynb +++ b/notebooks/scenarios/bigquery/040-do-review-requests.ipynb @@ -18,14 +18,18 @@ "metadata": {}, "outputs": [], "source": [ + "# stdlib\n", + "import random\n", + "\n", "# third party\n", - "# run email server\n", - "from helpers import EmailServer\n", - "from helpers import SMTPTestServer\n", + "from helpers import get_email_server\n", + "from job_helpers import approve_by_running\n", + "from job_helpers import get_job_emails\n", + "from job_helpers import get_request_for_job_info\n", "\n", - "email_server = EmailServer()\n", - "smtp_server = SMTPTestServer(email_server)\n", - "smtp_server.start()" + "# syft absolute\n", + "import syft as sy\n", + "from syft.service.job.job_stash import Job" ] }, { @@ -34,12 +38,14 @@ "metadata": {}, "outputs": [], "source": [ - "# stdlib\n", - "import random\n", - "\n", - "# syft absolute\n", - "import syft as sy\n", - "from syft.service.job.job_stash import Job" + "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Start server & login" ] }, { @@ -64,7 +70,6 @@ "metadata": {}, "outputs": [], "source": [ - "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"\n", "high_client = sy.login(\n", " url=\"http://localhost:8080\", email=ADMIN_EMAIL, password=ADMIN_PW\n", ")" @@ -76,7 +81,14 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.requests.get_all_pending()" + "email_server, smtp_server = get_email_server()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Review requests" ] }, { @@ -97,7 +109,7 @@ "metadata": {}, "outputs": [], "source": [ - "users = load_users(high_client)" + "high_client.requests.get_all_pending()" ] }, { @@ -106,7 +118,14 @@ "metadata": {}, "outputs": [], "source": [ - "jobs = load_jobs(users, high_client)" + "users = load_users(high_client)\n", + "jobs_data = load_jobs(users, high_client)\n", + "all_requests = high_client.requests\n", + "submitted_jobs_data = [job for job in jobs_data if job.is_submitted]\n", + "n_emails_per_job_user = {\n", + " k: len(v)\n", + " for k, v in get_job_emails(submitted_jobs_data, high_client, email_server).items()\n", + "}" ] }, { @@ -115,7 +134,17 @@ "metadata": {}, "outputs": [], "source": [ - "all_requests = high_client.requests" + "# TODO we should record whether it was approved or deposited\n", + "# and test doing both in either order as there might be a bug when\n", + "# force overwriting\n", + "# also changing deny to approve and back again" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Run or deny" ] }, { @@ -124,15 +153,12 @@ "metadata": {}, "outputs": [], "source": [ - "def get_request_for_job(requests, job):\n", - " job_requests = [\n", - " r for r in all_requests if r.code.service_func_name == job.func_name\n", - " ]\n", - " if len(job_requests) == 1:\n", - " return job_requests[0]\n", - " if len(job_requests) > 1:\n", - " raise Exception(f\"Multiple of the same job: {job} in requests: {requests}\")\n", - " return None" + "submitted_jobs_data_should_succeed = [\n", + " j for j in submitted_jobs_data if j.should_succeed\n", + "]\n", + "submitted_jobs_data_should_fail = [\n", + " j for j in submitted_jobs_data if not j.should_succeed\n", + "]" ] }, { @@ -141,7 +167,18 @@ "metadata": {}, "outputs": [], "source": [ - "submitted_jobs = [job for job in jobs if job.is_submitted]" + "for job in submitted_jobs_data_should_succeed:\n", + " request = get_request_for_job_info(all_requests, job)\n", + " if random.randrange(2):\n", + " choice = \"approved with deposit_result\"\n", + " response = approve_by_running(request)\n", + " assert isinstance(response, Job)\n", + " else:\n", + " choice = \"approved\"\n", + " response = request.approve()\n", + " assert isinstance(response, sy.SyftSuccess)\n", + " print(f\"Job {job.func_name} should succeed: {job.should_succeed} and was {choice}\")\n", + " job.admin_reviewed = True" ] }, { @@ -150,17 +187,21 @@ "metadata": {}, "outputs": [], "source": [ - "def approve_by_running(request):\n", - " job = request.code(blocking=False)\n", - " result = job.wait()\n", - " print(\"got result of type\", type(result), \"bool\", bool(result))\n", - " # got result of type bool False\n", - " # assert result won't work unless we know what type is coming back\n", - " job_info = job.info(result=True)\n", - " # need force when running multiple times\n", - " # todo check and dont run if its already done\n", - " response = request.deposit_result(job_info, approve=True, force=True)\n", - " return response" + "for job in submitted_jobs_data_should_fail:\n", + " request = get_request_for_job_info(all_requests, job)\n", + " response = request.deny(\n", + " reason=f\"Your request {job.func_name} looks wrong, try again.\"\n", + " )\n", + " assert isinstance(response, sy.SyftSuccess)\n", + " assert not job.should_succeed\n", + " job.admin_reviewed = True" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Verify that users have new emails" ] }, { @@ -169,10 +210,10 @@ "metadata": {}, "outputs": [], "source": [ - "# TODO we should record whether it was approved or deposited\n", - "# and test doing both in either order as there might be a bug when\n", - "# force overwriting\n", - "# also changing deny to approve and back again" + "new_n_emails_per_job_user = {\n", + " k: len(v)\n", + " for k, v in get_job_emails(submitted_jobs_data, high_client, email_server).items()\n", + "}" ] }, { @@ -181,34 +222,16 @@ "metadata": {}, "outputs": [], "source": [ - "for job in submitted_jobs:\n", - " request = get_request_for_job(all_requests, job)\n", - " choice = None\n", - " number_of_emails = len(\n", - " email_server.get_emails_for_user(request.requesting_user_email)\n", - " )\n", - " if job.should_succeed:\n", - " if random.randrange(2):\n", - " choice = \"approved with deposit_result\"\n", - " response = approve_by_running(request)\n", - " assert isinstance(response, Job)\n", - " else:\n", - " choice = \"approved\"\n", - " response = request.approve()\n", - " assert isinstance(response, sy.SyftSuccess)\n", - " else:\n", - " choice = \"denied\"\n", - " response = request.deny(\n", - " reason=f\"Your request {job.func_name} looks wrong, try again.\"\n", - " )\n", - " assert isinstance(response, sy.SyftSuccess)\n", - "\n", - " after_users_emails = len(\n", - " email_server.get_emails_for_user(request.requesting_user_email)\n", - " )\n", - " # assert after_users_emails > number_of_emails\n", - " job.admin_reviewed = True\n", - " print(f\"Job {job.func_name} should succeed: {job.should_succeed} and was {choice}\")" + "# for user_email, new_count in new_n_emails_per_job_user.items():\n", + "# old_count = n_emails_per_job_user[user_email]\n", + "# assert new_count > old_count" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Save state" ] }, { @@ -217,7 +240,7 @@ "metadata": {}, "outputs": [], "source": [ - "save_jobs(jobs)" + "save_jobs(jobs_data)" ] }, { @@ -238,6 +261,13 @@ "high_client.requests.get_all_rejected()" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Cleanup" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/notebooks/scenarios/bigquery/05-ds-get-results.ipynb b/notebooks/scenarios/bigquery/050-ds-get-results.ipynb similarity index 68% rename from notebooks/scenarios/bigquery/05-ds-get-results.ipynb rename to notebooks/scenarios/bigquery/050-ds-get-results.ipynb index 843fdefcaf6..72ab42122c1 100644 --- a/notebooks/scenarios/bigquery/05-ds-get-results.ipynb +++ b/notebooks/scenarios/bigquery/050-ds-get-results.ipynb @@ -19,13 +19,13 @@ "outputs": [], "source": [ "# third party\n", - "# run email server\n", - "from helpers import EmailServer\n", - "from helpers import SMTPTestServer\n", + "from helpers import get_email_server\n", + "from helpers import load_users\n", + "from job_helpers import load_jobs\n", + "from job_helpers import save_jobs\n", "\n", - "email_server = EmailServer()\n", - "smtp_server = SMTPTestServer(email_server)\n", - "smtp_server.start()" + "# syft absolute\n", + "import syft as sy" ] }, { @@ -34,8 +34,14 @@ "metadata": {}, "outputs": [], "source": [ - "# syft absolute\n", - "import syft as sy" + "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Launch server & login" ] }, { @@ -60,7 +66,6 @@ "metadata": {}, "outputs": [], "source": [ - "ADMIN_EMAIL, ADMIN_PW = \"admin2@bigquery.org\", \"bqpw2\"\n", "high_client = sy.login(\n", " url=\"http://localhost:8080\", email=ADMIN_EMAIL, password=ADMIN_PW\n", ")" @@ -72,19 +77,14 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "from helpers import load_users\n", - "from job_helpers import load_jobs\n", - "from job_helpers import save_jobs" + "email_server, smtp_server = get_email_server()" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "users = load_users(high_client)" + "# Download results" ] }, { @@ -93,6 +93,7 @@ "metadata": {}, "outputs": [], "source": [ + "users = load_users(high_client)\n", "jobs = load_jobs(users, high_client)" ] }, @@ -104,7 +105,12 @@ "source": [ "# submitted_jobs = [job for job in jobs if job.is_submitted]\n", "reviewed_jobs = [job for job in jobs if job.admin_reviewed]\n", - "len(reviewed_jobs)" + "reviewed_jobs_should_succeed = [j for j in reviewed_jobs if j.should_succeed]\n", + "reviewed_jobs_should_fail = [j for j in reviewed_jobs if not j.should_succeed]\n", + "\n", + "print(\n", + " f\"{len(reviewed_jobs)=}, {len(reviewed_jobs_should_succeed)=}, {len(reviewed_jobs_should_fail)=}\"\n", + ")" ] }, { @@ -124,42 +130,35 @@ "metadata": {}, "outputs": [], "source": [ - "for job in reviewed_jobs:\n", - " print(\n", - " f\"> Checking job: {job.job_type} {job.func_name} for user {job.user_email} {job.should_succeed}\"\n", - " )\n", + "for job in reviewed_jobs_should_succeed:\n", + " print(f\"> Checking job: {job.job_type} {job.func_name} for user {job.user_email}\")\n", " api_method = job.code_method\n", + " j = api_method(blocking=False)\n", + " res = j.wait()\n", "\n", - " if job.should_succeed:\n", - " # print(\"Expecting job to succeed\")\n", - " j = api_method(blocking=False)\n", - " res = j.wait()\n", - " if isinstance(res, sy.SyftError):\n", - " job.result_as_expected = False\n", - " print(\"Expected success, got error\")\n", - " continue\n", + " if isinstance(res, sy.SyftError):\n", + " raise sy.SyftException(public_message=\"Expected success, got error\")\n", "\n", - " result = res.get()\n", - " if hasattr(result, \"__len__\"):\n", - " pass\n", - " # print(\"> Result length\", len(result))\n", + " result = res.get()\n", + " job.result_as_expected = True" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "for job in reviewed_jobs_should_fail:\n", + " print(f\"> Checking job: {job.job_type} {job.func_name} for user {job.user_email}\")\n", + " api_method = job.code_method\n", "\n", - " # assert len(result) == job.settings[\"limit\"]\n", - " print(\"success\")\n", + " j = api_method(blocking=False)\n", + " res = j.wait()\n", + " if isinstance(res, sy.SyftError):\n", " job.result_as_expected = True\n", - " save_jobs(jobs)\n", " else:\n", - " # print(\"Expecting job to fail\")\n", - " j = api_method(blocking=False)\n", - " res = j.wait()\n", - " if isinstance(res, sy.SyftError):\n", - " job.result_as_expected = True\n", - " print(\"Expected to error, success\")\n", - " else:\n", - " print(\"failed job didnt raise\", type(j))\n", - " job.result_as_expected = False\n", - "\n", - " save_jobs(jobs)" + " raise sy.SyftException(public_message=f\"failed, job didnt raise {type(j)}\")" ] }, { @@ -168,8 +167,7 @@ "metadata": {}, "outputs": [], "source": [ - "expected_jobs = [job for job in jobs if job.result_as_expected]\n", - "len(expected_jobs)" + "save_jobs(jobs)" ] }, { @@ -179,10 +177,18 @@ "outputs": [], "source": [ "# TODO fix\n", + "expected_jobs = [job for job in jobs if job.result_as_expected]\n", "print(f\"got expected_jobs: {len(expected_jobs)} == reviewed_jobs: {len(reviewed_jobs)}\")\n", "assert len(reviewed_jobs) == len(expected_jobs)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Cleanup" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/notebooks/scenarios/bigquery/helpers.py b/notebooks/scenarios/bigquery/helpers.py index 233ef73f303..f58d41a20f8 100644 --- a/notebooks/scenarios/bigquery/helpers.py +++ b/notebooks/scenarios/bigquery/helpers.py @@ -4,6 +4,7 @@ from dataclasses import field import json import re +import time from typing import Any # third party @@ -144,6 +145,10 @@ def __getitem__(self, key): return None return self.to_dict()[key] + def update_password(self): + self.password = self.new_password + self.new_password = None + @property def emails(self) -> list[Email]: if not self._email_server: @@ -275,6 +280,49 @@ async def async_stop(self): self._stop_event.set() # Stop the server by setting the event +class TimeoutError(Exception): + pass + + +class Timeout: + def __init__(self, timeout_duration): + if timeout_duration > 60: + raise ValueError("Timeout duration cannot exceed 60 seconds.") + self.timeout_duration = timeout_duration + + def run_with_timeout(self, condition_func, *args, **kwargs): + start_time = time.time() + result = None + + while True: + elapsed_time = time.time() - start_time + if elapsed_time > self.timeout_duration: + raise TimeoutError( + f"Function execution exceeded {self.timeout_duration} seconds." + ) + + # Check if the condition is met + try: + if condition_func(): + print("Condition met, exiting early.") + break + except Exception as e: + print(f"Exception in target function: {e}") + break # Exit the loop if an exception occurs in the function + time.sleep(1) + + return result + + +def get_email_server(reset=False): + email_server = EmailServer() + if reset: + email_server.reset_emails() + smtp_server = SMTPTestServer(email_server) + smtp_server.start() + return email_server, smtp_server + + def create_user(root_client, test_user): if not user_exists(root_client, test_user.email): fake = Faker() diff --git a/notebooks/scenarios/bigquery/job_helpers.py b/notebooks/scenarios/bigquery/job_helpers.py index 777a781836d..f87cfdc7324 100644 --- a/notebooks/scenarios/bigquery/job_helpers.py +++ b/notebooks/scenarios/bigquery/job_helpers.py @@ -212,6 +212,13 @@ def create_job_funcname_xss(user: TestUser) -> TestJob: return job +def get_request_for_job_info(requests, job): + job_requests = [r for r in requests if r.code.service_func_name == job.func_name] + if len(job_requests) != 1: + raise Exception(f"Too many or too few requests: {job} in requests: {requests}") + return job_requests[0] + + def create_job_query_xss(user: TestUser) -> TestJob: job_type = "job_query_xss" func_name = f"{job_type}_{secrets.token_hex(3)}" @@ -301,6 +308,29 @@ def extract_code_path(response) -> str | None: return None +def approve_by_running(request): + job = request.code(blocking=False) + result = job.wait() + print("got result of type", type(result), "bool", bool(result)) + # got result of type bool False + # assert result won't work unless we know what type is coming back + job_info = job.info(result=True) + # need force when running multiple times + # todo check and dont run if its already done + response = request.deposit_result(job_info, approve=True, force=True) + return response + + +def get_job_emails(jobs, client, email_server): + all_requests = client.requests + res = {} + for job in jobs: + request = get_request_for_job_info(all_requests, job) + emails = email_server.get_emails_for_user(request.requesting_user_email) + res[request.requesting_user_email] = emails + return res + + def resolve_request(request): service_func_name = request.code.service_func_name if service_func_name.startswith("simple_query"): diff --git a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb b/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb index fc5c3b2c3e0..4719ec98261 100644 --- a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb +++ b/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb @@ -1,5 +1,35 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import os\n", + "\n", + "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + "# os.environ[\"DEV_MODE\"] = \"True\"\n", + "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_HIGH\"] = \"9081\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_LOW\"] = \"9083\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "\n", + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"auto\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", + "print(environment, high_port, low_port)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -24,7 +54,7 @@ " reset=True,\n", " n_consumers=1,\n", " create_producer=True,\n", - " port=\"auto\",\n", + " port=low_port,\n", ")\n", "\n", "server_high = sy.orchestra.launch(\n", @@ -34,7 +64,7 @@ " reset=True,\n", " n_consumers=1,\n", " create_producer=True,\n", - " port=\"auto\",\n", + " port=high_port,\n", ")" ] }, @@ -62,7 +92,15 @@ "metadata": {}, "outputs": [], "source": [ - "low_client.worker_pools.get_all()" + "assert len(high_client.worker_pools.get_all()) == 1\n", + "assert len(low_client.worker_pools.get_all()) == 1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Setup High First" ] }, { @@ -71,8 +109,8 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(high_client.worker_pools.get_all()) == 1\n", - "assert len(low_client.worker_pools.get_all()) == 1" + "external_registry = test_settings.get(\"external_registry\", default=\"docker.io\")\n", + "external_registry" ] }, { @@ -81,25 +119,8 @@ "metadata": {}, "outputs": [], "source": [ - "def launch_worker_pool(client, pool_name):\n", - " if pool_name not in [x.name for x in client.worker_pools]:\n", - " external_registry = test_settings.get(\"external_registry\", default=\"docker.io\")\n", - " worker_docker_tag = f\"openmined/bigquery:{sy.__version__}\"\n", - " result = client.api.services.worker_image.submit(\n", - " worker_config=sy.PrebuiltWorkerConfig(\n", - " tag=f\"{external_registry}/{worker_docker_tag}\"\n", - " )\n", - " )\n", - " worker_image = client.images.get_all()[1]\n", - " result = client.api.services.image_registry.add(external_registry)\n", - " result = client.api.services.worker_pool.launch(\n", - " pool_name=pool_name,\n", - " image_uid=worker_image.id,\n", - " num_workers=1,\n", - " )\n", - " return result\n", - " else:\n", - " print(\"Pool already exists\")" + "result = high_client.api.services.image_registry.add(external_registry)\n", + "result" ] }, { @@ -108,7 +129,8 @@ "metadata": {}, "outputs": [], "source": [ - "pool_name = \"bigquery-pool\"" + "image_registry_list = high_client.api.services.image_registry.get_all()\n", + "image_registry_list" ] }, { @@ -117,7 +139,8 @@ "metadata": {}, "outputs": [], "source": [ - "launch_worker_pool(high_client, pool_name)" + "local_registry = image_registry_list[0]\n", + "local_registry" ] }, { @@ -126,7 +149,8 @@ "metadata": {}, "outputs": [], "source": [ - "launch_worker_pool(low_client, pool_name)" + "base_worker_image = high_client.images.get_all()[0]\n", + "base_worker_image" ] }, { @@ -135,8 +159,13 @@ "metadata": {}, "outputs": [], "source": [ - "# result = high_client.worker_pools.scale(number=5, pool_name=pool_name)\n", - "# result" + "worker_dockerfile = f\"\"\"\n", + "FROM {str(base_worker_image.image_identifier)}\n", + "\n", + "RUN uv pip install db-dtypes google-cloud-bigquery \n", + "\n", + "\"\"\".strip()\n", + "worker_dockerfile" ] }, { @@ -145,8 +174,8 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(high_client.worker_pools.get_all()) == 2\n", - "assert len(low_client.worker_pools.get_all()) == 2" + "docker_config = sy.DockerWorkerConfig(dockerfile=worker_dockerfile)\n", + "assert docker_config.dockerfile == worker_dockerfile" ] }, { @@ -155,8 +184,304 @@ "metadata": {}, "outputs": [], "source": [ - "base_worker_image = high_client.images.get_all()[0]\n", - "base_worker_image" + "submit_result = high_client.api.services.worker_image.submit(\n", + " worker_config=docker_config\n", + ")\n", + "submit_result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# get non prebuilt\n", + "dockerfile_list = high_client.images.get_all()\n", + "worker_image = next(\n", + " (\n", + " image\n", + " for image in dockerfile_list\n", + " if not image.is_prebuilt and image.config.dockerfile == worker_dockerfile\n", + " ),\n", + " None,\n", + ")\n", + "worker_image" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "docker_tag = str(base_worker_image.image_identifier).replace(\n", + " \"backend\", \"worker-bigquery\"\n", + ")\n", + "docker_tag" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if environment == \"remote\":\n", + " docker_build_result = high_client.api.services.worker_image.build(\n", + " image_uid=worker_image.id,\n", + " tag=docker_tag,\n", + " registry_uid=local_registry.id,\n", + " )\n", + " print(docker_build_result)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if environment == \"remote\":\n", + " push_result = high_client.api.services.worker_image.push(worker_image.id)\n", + " print(push_result)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "docker_config = sy.PrebuiltWorkerConfig(tag=docker_tag)\n", + "docker_config" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "result = high_client.api.services.worker_image.submit(worker_config=docker_config)\n", + "worker_image_id = result.value.id\n", + "result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Update the list\n", + "dockerfile_list = high_client.images.get_all()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# get prebuilt\n", + "# dockerfile_list = high_client.images.get_all()\n", + "# worker_image = next(\n", + "# (image for image in dockerfile_list if image.is_prebuilt),\n", + "# None,\n", + "# )\n", + "# worker_image\n", + "\n", + "# TODO: fix\n", + "# Similar issue as in non-sync notebooks. Refer to 01-setup-datasite.ipynb\n", + "\n", + "worker_image = next(\n", + " (\n", + " image\n", + " for image in dockerfile_list\n", + " if \"worker-bigquery\" in str(image.image_identifier)\n", + " ),\n", + " None,\n", + ")\n", + "worker_image" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert worker_image" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "worker_pool_name = \"bigquery-pool\"\n", + "custom_pool_pod_annotations = {\"bigquery-custom-pool\": \"Pod annotation for bigquery\"}\n", + "custom_pool_pod_labels = {\"bigquery-custom-pool\": \"Pod_label_for_bigquery\"}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "result = high_client.api.services.worker_pool.launch(\n", + " pool_name=worker_pool_name,\n", + " image_uid=worker_image.id,\n", + " num_workers=1,\n", + " pod_annotations=custom_pool_pod_annotations,\n", + " pod_labels=custom_pool_pod_labels,\n", + ")\n", + "result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if environment == \"remote\":\n", + " result = high_client.worker_pools.scale(number=2, pool_name=worker_pool_name)\n", + " print(result)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert len(high_client.worker_pools.get_all()) == 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "widget = sy.sync(from_client=high_client, to_client=low_client, hide_usercode=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "widget" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert len(high_client.worker_pools.get_all()) == 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "high_client.settings.allow_guest_signup(enable=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Setup Low" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "result = low_client.api.services.image_registry.add(external_registry)\n", + "result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "docker_config = sy.PrebuiltWorkerConfig(tag=docker_tag)\n", + "docker_config" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "result = low_client.api.services.worker_image.submit(worker_config=docker_config)\n", + "result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# get prebuilt\n", + "dockerfile_list = low_client.images.get_all()\n", + "worker_image = next(\n", + " (\n", + " image\n", + " for image in dockerfile_list\n", + " if \"worker-bigquery\" in str(image.image_identifier)\n", + " ),\n", + " None,\n", + ")\n", + "worker_image\n", + "# worker_image = next(\n", + "# (image for image in dockerfile_list if image.is_prebuilt),\n", + "# None,\n", + "# )\n", + "# worker_image" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "result = low_client.api.services.worker_pool.launch(\n", + " pool_name=worker_pool_name,\n", + " image_uid=worker_image.id,\n", + " num_workers=1,\n", + " pod_annotations=custom_pool_pod_annotations,\n", + " pod_labels=custom_pool_pod_labels,\n", + ")\n", + "result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "assert len(low_client.worker_pools.get_all()) == 2" ] }, { @@ -179,7 +504,24 @@ "metadata": {}, "outputs": [], "source": [ - "high_client.settings.allow_guest_signup(enable=False)\n", + "# widget = sy.sync(from_client=low_client, to_client=high_client, hide_usercode=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# widget" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "low_client.settings.allow_guest_signup(enable=False)" ] }, @@ -203,16 +545,10 @@ "metadata": {}, "outputs": [], "source": [ - "server_high.land()\n", - "server_low.land()" + "if environment != \"remote\":\n", + " server_high.land()\n", + " server_low.land()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -231,7 +567,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb index f82af8be5aa..576b74e526a 100644 --- a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb +++ b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb @@ -3,9 +3,37 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import os\n", + "\n", + "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + "# os.environ[\"DEV_MODE\"] = \"True\"\n", + "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_HIGH\"] = \"9081\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_LOW\"] = \"9083\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "\n", + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"auto\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", + "print(environment, high_port, low_port)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "# stdlib\n", @@ -33,9 +61,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "# syft absolute\n", @@ -46,9 +72,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "server_low = sy.orchestra.launch(\n", @@ -57,7 +81,7 @@ " dev_mode=True,\n", " n_consumers=1,\n", " create_producer=True,\n", - " port=\"auto\",\n", + " port=low_port,\n", ")\n", "\n", "server_high = sy.orchestra.launch(\n", @@ -66,16 +90,14 @@ " dev_mode=True,\n", " n_consumers=1,\n", " create_producer=True,\n", - " port=\"auto\",\n", + " port=high_port,\n", ")" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "low_client = server_low.login(email=\"info@openmined.org\", password=\"changethis\")\n", @@ -85,9 +107,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "assert len(high_client.worker_pools.get_all()) == 2\n", @@ -97,9 +117,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "this_worker_pool_name = \"bigquery-pool\"" @@ -108,9 +126,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "# !pip list | grep bigquery" @@ -119,9 +135,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "# !pip install db-dtypes google-cloud-bigquery" @@ -137,9 +151,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "mock_func = make_test_query(\n", @@ -153,9 +165,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "private_func = make_test_query(\n", @@ -168,9 +178,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "new_endpoint = sy.TwinAPIEndpoint(\n", @@ -187,9 +195,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "# Here, we update the endpoint to timeout after 100s (rather the default of 60s)\n", @@ -201,9 +207,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "high_client.api.services.api.update(\n", @@ -214,9 +218,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "schema_function = make_schema(\n", @@ -230,9 +232,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "high_client.custom_api.add(endpoint=schema_function)\n", @@ -242,9 +242,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "metadata": {} - }, + "metadata": {}, "outputs": [], "source": [ "dataset_1 = test_settings.get(\"dataset_1\", default=\"dataset_1\")\n", @@ -577,8 +575,8 @@ "metadata": {}, "outputs": [], "source": [ - "server_high.land()\n", - "server_low.land()" + "assert len(low_client.custom_api.api_endpoints()) == 3\n", + "assert len(high_client.custom_api.api_endpoints()) == 3" ] }, { @@ -586,7 +584,11 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "if environment != \"remote\":\n", + " server_high.land()\n", + " server_low.land()" + ] } ], "metadata": { @@ -605,7 +607,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb b/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb index 90db5b2c585..309e37755ea 100644 --- a/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb +++ b/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb @@ -1,5 +1,33 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import os\n", + "\n", + "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + "# os.environ[\"DEV_MODE\"] = \"True\"\n", + "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_LOW\"] = \"9083\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "\n", + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", + "print(environment, low_port)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -30,7 +58,7 @@ " dev_mode=True,\n", " n_consumers=1,\n", " create_producer=True,\n", - " port=\"auto\",\n", + " port=low_port,\n", ")" ] }, @@ -72,8 +100,7 @@ "table_1 = test_settings.get(\"table_1\", default=\"table_1\")\n", "table_2 = test_settings.get(\"table_2\", default=\"table_2\")\n", "table_2_col_id = test_settings.get(\"table_2_col_id\", default=\"table_id\")\n", - "table_2_col_score = test_settings.get(\"table_2_col_score\", default=\"colname\")\n", - "query_limit_size = test_settings.get(\"query_limit_size\", default=10000)" + "table_2_col_score = test_settings.get(\"table_2_col_score\", default=\"colname\")" ] }, { @@ -120,7 +147,7 @@ "outputs": [], "source": [ "FUNC_NAME = \"large_sample\"\n", - "LARGE_SAMPLE_QUERY = f\"SELECT * FROM {dataset_2}.{table_2} LIMIT {query_limit_size}\"" + "LARGE_SAMPLE_QUERY = f\"SELECT * FROM {dataset_2}.{table_2} LIMIT 10000\"" ] }, { @@ -234,7 +261,8 @@ "metadata": {}, "outputs": [], "source": [ - "server_low.land()" + "if environment != \"remote\":\n", + " server_low.land()" ] }, { @@ -261,7 +289,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.5" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb b/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb index d09bfdd503d..aaf83dc0d01 100644 --- a/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb +++ b/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb @@ -6,10 +6,28 @@ "metadata": {}, "outputs": [], "source": [ + "# stdlib\n", + "import os\n", + "\n", "# syft absolute\n", "import syft as sy\n", "from syft.service.code.user_code import UserCode\n", - "from syft.service.request.request import Request" + "from syft.service.request.request import Request\n", + "\n", + "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_HIGH\"] = \"9081\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_LOW\"] = \"9083\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"auto\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")" ] }, { @@ -24,7 +42,7 @@ " dev_mode=True,\n", " n_consumers=1,\n", " create_producer=True,\n", - " port=\"auto\",\n", + " port=low_port,\n", ")\n", "\n", "server_high = sy.orchestra.launch(\n", @@ -33,7 +51,7 @@ " dev_mode=True,\n", " n_consumers=1,\n", " create_producer=True,\n", - " port=\"auto\",\n", + " port=high_port,\n", ")" ] }, @@ -47,41 +65,6 @@ "high_client = server_high.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# # todo: this is way too noisy\n", - "# widget = sy.sync(from_client=low_client, to_client=high_client)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# widget" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# sync the users new request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": null, @@ -284,25 +267,6 @@ "job.wait()" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# # todo: this is way too noisy\n", - "# widget = sy.sync(from_client=high_client, to_client=low_client)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# widget" - ] - }, { "cell_type": "code", "execution_count": null, @@ -386,8 +350,9 @@ "metadata": {}, "outputs": [], "source": [ - "server_high.land()\n", - "server_low.land()" + "if environment != \"remote\":\n", + " server_high.land()\n", + " server_low.land()" ] }, { @@ -414,7 +379,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb b/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb index 37b7388d2c4..f4bddbd75d1 100644 --- a/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb +++ b/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb @@ -6,12 +6,28 @@ "metadata": {}, "outputs": [], "source": [ + "# stdlib\n", + "import os\n", + "\n", "# third party\n", "import pandas as pd\n", "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_settings" + "\n", + "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_LOW\"] = \"9083\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", + "print(environment, low_port)" ] }, { @@ -26,7 +42,7 @@ " dev_mode=True,\n", " n_consumers=1,\n", " create_producer=True,\n", - " port=\"auto\",\n", + " port=low_port,\n", ")" ] }, @@ -86,8 +102,7 @@ "metadata": {}, "outputs": [], "source": [ - "query_limit_size = test_settings.get(\"query_limit_size\", default=10000)\n", - "assert len(res) == query_limit_size" + "assert len(res) == 10000" ] }, { @@ -96,7 +111,8 @@ "metadata": {}, "outputs": [], "source": [ - "server_low.land()" + "if environment != \"remote\":\n", + " server_low.land()" ] }, { @@ -123,7 +139,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.5" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/packages/grid/VERSION b/packages/grid/VERSION index a5f840e30f5..5e0d33e7748 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.1-beta.11" +__version__ = "0.9.2-beta.2" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile index 4e799478d55..9be5f7caf37 100644 --- a/packages/grid/backend/grid/images/worker_cpu.dockerfile +++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile @@ -5,7 +5,7 @@ # NOTE: This dockerfile will be built inside a syft-backend container in PROD # Hence COPY will not work the same way in DEV vs. PROD -ARG SYFT_VERSION_TAG="0.9.1-beta.11" +ARG SYFT_VERSION_TAG="0.9.2-beta.2" FROM openmined/syft-backend:${SYFT_VERSION_TAG} # should match base image python version diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 1234ed1e640..ae6245df2c1 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -28,7 +28,7 @@ vars: DOCKER_IMAGE_RATHOLE: openmined/syft-rathole DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/syft-enclave-attestation CONTAINER_REGISTRY: "docker.io" - VERSION: "0.9.1-beta.11" + VERSION: "0.9.2-beta.2" PLATFORM: $(uname -m | grep -q 'arm64' && echo "arm64" || echo "amd64") # This is a list of `images` that DevSpace can build for this project diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 780ddfd83f0..c1ce0ad5a5f 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "syft-ui", - "version": "0.9.1-beta.11", + "version": "0.9.2-beta.2", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index df7fe4d7061..7d6e44855a8 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,48 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.9.2-beta.2 + created: "2024-09-08T12:36:18.767070467Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 28f2b7a8efc4b70ee06aa1b0ae1c908b9125acc5a1d484fff180576b31af961c + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.2-beta.2.tgz + version: 0.9.2-beta.2 + - apiVersion: v2 + appVersion: 0.9.2-beta.1 + created: "2024-09-08T12:36:18.766342856Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: b855675198e17c05eb202ea74898e9b45ac5e88d5d76cecd2743e955f1654171 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.2-beta.1.tgz + version: 0.9.2-beta.1 + - apiVersion: v2 + appVersion: 0.9.1 + created: "2024-09-08T12:36:18.765608403Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 929fa3b094487c58e45a6bc5842019106bb5e033137c500a440f3bbc521654d8 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.1.tgz + version: 0.9.1 - apiVersion: v2 appVersion: 0.9.1-beta.11 - created: "2024-09-04T02:53:13.072220834Z" + created: "2024-09-08T12:36:18.758299828Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c48a42596a0c977164d24398d587da402922ddcbb23114604eba44d51d6b2198 @@ -16,7 +55,7 @@ entries: version: 0.9.1-beta.11 - apiVersion: v2 appVersion: 0.9.1-beta.10 - created: "2024-09-04T02:53:13.071501184Z" + created: "2024-09-08T12:36:18.757542292Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cf41185fd87ce4625a98c02cea1560bf729f2e134c1cc8c3e0882f9a8259849d @@ -29,7 +68,7 @@ entries: version: 0.9.1-beta.10 - apiVersion: v2 appVersion: 0.9.1-beta.9 - created: "2024-09-04T02:53:13.078961833Z" + created: "2024-09-08T12:36:18.764856207Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 24e37fb3bf6217421a2e9d749373f5c74d660c9ff3f55884a7a1dbb6e555c334 @@ -42,7 +81,7 @@ entries: version: 0.9.1-beta.9 - apiVersion: v2 appVersion: 0.9.1-beta.8 - created: "2024-09-04T02:53:13.078219319Z" + created: "2024-09-08T12:36:18.764132253Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 54cd02a8bc61feeed6fdff85340b16bc20407870a0f730925454ba51ce5ed14e @@ -55,7 +94,7 @@ entries: version: 0.9.1-beta.8 - apiVersion: v2 appVersion: 0.9.1-beta.7 - created: "2024-09-04T02:53:13.077508475Z" + created: "2024-09-08T12:36:18.763294897Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d1bdc207bb0a21391fb18f89a0434d9361cf054ea389370648ffd13753dabe70 @@ -68,7 +107,7 @@ entries: version: 0.9.1-beta.7 - apiVersion: v2 appVersion: 0.9.1-beta.6 - created: "2024-09-04T02:53:13.076791088Z" + created: "2024-09-08T12:36:18.761898046Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 74b62672d982c0ca8ea86f8b4d1f25d1df6a9b7a8c4ac1551fd5635da7d29c48 @@ -81,7 +120,7 @@ entries: version: 0.9.1-beta.6 - apiVersion: v2 appVersion: 0.9.1-beta.5 - created: "2024-09-04T02:53:13.076076988Z" + created: "2024-09-08T12:36:18.761157701Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a69a7ac7d1b02b0bc547f4236398c001827ff964bb07fe663ef3545f9b6cf5f1 @@ -94,7 +133,7 @@ entries: version: 0.9.1-beta.5 - apiVersion: v2 appVersion: 0.9.1-beta.4 - created: "2024-09-04T02:53:13.075251941Z" + created: "2024-09-08T12:36:18.760451441Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 3d67a6d3bdb9e77e8fb0aa644312c9af5a49feb824d1b65f9b384796f059ed7c @@ -107,7 +146,7 @@ entries: version: 0.9.1-beta.4 - apiVersion: v2 appVersion: 0.9.1-beta.3 - created: "2024-09-04T02:53:13.074499339Z" + created: "2024-09-08T12:36:18.759701158Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 90d804df3afadfd9400cbb320898040cc89a74f6d3e45f0365455ed30785f200 @@ -120,7 +159,7 @@ entries: version: 0.9.1-beta.3 - apiVersion: v2 appVersion: 0.9.1-beta.2 - created: "2024-09-04T02:53:13.073772575Z" + created: "2024-09-08T12:36:18.758999266Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6f605af4ffc0e42a0733593faf1b5e588bbe58ff9f49b903a41bd4a751ddb694 @@ -133,7 +172,7 @@ entries: version: 0.9.1-beta.2 - apiVersion: v2 appVersion: 0.9.1-beta.1 - created: "2024-09-04T02:53:13.070775241Z" + created: "2024-09-08T12:36:18.756821594Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c99243e63888391654f23044144e2095dee48a599cd4b2e4f43ead6f76a8572 @@ -146,7 +185,7 @@ entries: version: 0.9.1-beta.1 - apiVersion: v2 appVersion: 0.9.0 - created: "2024-09-04T02:53:13.070015316Z" + created: "2024-09-08T12:36:18.756091068Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: baf218c8543a2525f7d4cced1e49b0d4e38ee1661d7171a55a069bf765b5b6d8 @@ -159,7 +198,7 @@ entries: version: 0.9.0 - apiVersion: v2 appVersion: 0.9.0-beta.5 - created: "2024-09-04T02:53:13.069315211Z" + created: "2024-09-08T12:36:18.755357757Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a4eafd04b39b0c75d6a28ed2f7cfece450150477dc2c6a01e10e2087a5b02835 @@ -172,7 +211,7 @@ entries: version: 0.9.0-beta.5 - apiVersion: v2 appVersion: 0.9.0-beta.4 - created: "2024-09-04T02:53:13.068605839Z" + created: "2024-09-08T12:36:18.753825308Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5a3cd3dd57609231ffc13e6af8d55f68b1b79fbbe8261740db957526fb8a536a @@ -185,7 +224,7 @@ entries: version: 0.9.0-beta.4 - apiVersion: v2 appVersion: 0.9.0-beta.3 - created: "2024-09-04T02:53:13.067896939Z" + created: "2024-09-08T12:36:18.75307757Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: affe0898286720a0281c2363bed404a09d229a5359951b4dfdd8e746d628b4cb @@ -198,7 +237,7 @@ entries: version: 0.9.0-beta.3 - apiVersion: v2 appVersion: 0.9.0-beta.2 - created: "2024-09-04T02:53:13.067181456Z" + created: "2024-09-08T12:36:18.752382781Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 105b60f0ff01f50386d2b063cb58c0e91ee41b74cefee7bca3f56e4025c38dd1 @@ -211,7 +250,7 @@ entries: version: 0.9.0-beta.2 - apiVersion: v2 appVersion: 0.9.0-beta.1 - created: "2024-09-04T02:53:13.066418434Z" + created: "2024-09-08T12:36:18.751684595Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 10246075684d168e6a51c009581b77df8d729e29e11abc4a360fae42659a6409 @@ -224,7 +263,7 @@ entries: version: 0.9.0-beta.1 - apiVersion: v2 appVersion: 0.8.8 - created: "2024-09-04T02:53:13.064876909Z" + created: "2024-09-08T12:36:18.7509863Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 46f75bdf8c39e0f17de266bf19b64852e0dbf7f7bcea60bf7a19018ff17370ad @@ -237,7 +276,7 @@ entries: version: 0.8.8 - apiVersion: v2 appVersion: 0.8.8-beta.4 - created: "2024-09-04T02:53:13.064178498Z" + created: "2024-09-08T12:36:18.750282374Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc0a3b49df19435a407e4764be6c5748511f14273e668e7f1d326af28b29f22a @@ -250,7 +289,7 @@ entries: version: 0.8.8-beta.4 - apiVersion: v2 appVersion: 0.8.8-beta.3 - created: "2024-09-04T02:53:13.063479516Z" + created: "2024-09-08T12:36:18.74952626Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: de2fba39516e98be39ae0110a2cfa5bfa2b665d7a35a4516b43c5310bbf621dc @@ -263,7 +302,7 @@ entries: version: 0.8.8-beta.3 - apiVersion: v2 appVersion: 0.8.8-beta.2 - created: "2024-09-04T02:53:13.062767329Z" + created: "2024-09-08T12:36:18.748796465Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1323f4082c65944b522cd8e36dc7285c83c7dfcf6a56f7962665a8b1256a4d09 @@ -276,7 +315,7 @@ entries: version: 0.8.8-beta.2 - apiVersion: v2 appVersion: 0.8.8-beta.1 - created: "2024-09-04T02:53:13.061999879Z" + created: "2024-09-08T12:36:18.747994316Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ec027b50b8182ef656be14ddca9537785c37712a4be8cb940f30ac029b63de2d @@ -289,7 +328,7 @@ entries: version: 0.8.8-beta.1 - apiVersion: v2 appVersion: 0.8.7 - created: "2024-09-04T02:53:13.061293934Z" + created: "2024-09-08T12:36:18.747273618Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7ea7f63d1c6d0948860547f8aa39343fc5ef399c8e62d9d7edd4473cf44d8186 @@ -302,7 +341,7 @@ entries: version: 0.8.7 - apiVersion: v2 appVersion: 0.8.7-beta.16 - created: "2024-09-04T02:53:13.054538524Z" + created: "2024-09-08T12:36:18.740689949Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75190eae57b64c2c47ab4a7fe3c6e94f35eb8045807a843ec8d7b26585c9e840 @@ -315,7 +354,7 @@ entries: version: 0.8.7-beta.16 - apiVersion: v2 appVersion: 0.8.7-beta.15 - created: "2024-09-04T02:53:13.053707987Z" + created: "2024-09-08T12:36:18.739825212Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56879d9a9f10febce88676d3d20621d74d17f9e33f5df6ae1e9bc3078c216f0c @@ -328,7 +367,7 @@ entries: version: 0.8.7-beta.15 - apiVersion: v2 appVersion: 0.8.7-beta.14 - created: "2024-09-04T02:53:13.052868903Z" + created: "2024-09-08T12:36:18.738964242Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e7cbca1d603ba11e09ae2a3089cfdafaa08cfa07c553c4f0fb8b42f8d3028f7 @@ -341,7 +380,7 @@ entries: version: 0.8.7-beta.14 - apiVersion: v2 appVersion: 0.8.7-beta.13 - created: "2024-09-04T02:53:13.051991259Z" + created: "2024-09-08T12:36:18.737299883Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1dbe3ecdfec57bf25020cbcff783fab908f0eb0640ad684470b2fd1da1928005 @@ -354,7 +393,7 @@ entries: version: 0.8.7-beta.13 - apiVersion: v2 appVersion: 0.8.7-beta.12 - created: "2024-09-04T02:53:13.051264865Z" + created: "2024-09-08T12:36:18.73659769Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e92b2f3a522dabb3a79ff762a7042ae16d2bf3a53eebbb2885a69b9f834d109c @@ -367,7 +406,7 @@ entries: version: 0.8.7-beta.12 - apiVersion: v2 appVersion: 0.8.7-beta.11 - created: "2024-09-04T02:53:13.049700589Z" + created: "2024-09-08T12:36:18.735872905Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 099f6cbd44b699ee2410a4be012ed1a8a65bcacb06a43057b2779d7fe34fc0ad @@ -380,7 +419,7 @@ entries: version: 0.8.7-beta.11 - apiVersion: v2 appVersion: 0.8.7-beta.10 - created: "2024-09-04T02:53:13.049001326Z" + created: "2024-09-08T12:36:18.735175922Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 00773cb241522e281c1915339fc362e047650e08958a736e93d6539f44cb5e25 @@ -393,7 +432,7 @@ entries: version: 0.8.7-beta.10 - apiVersion: v2 appVersion: 0.8.7-beta.9 - created: "2024-09-04T02:53:13.060452266Z" + created: "2024-09-08T12:36:18.745766651Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a3f8e85d9ddef7a644b959fcc2fcb0fc08f7b6abae1045e893d0d62fa4ae132e @@ -406,7 +445,7 @@ entries: version: 0.8.7-beta.9 - apiVersion: v2 appVersion: 0.8.7-beta.8 - created: "2024-09-04T02:53:13.059780294Z" + created: "2024-09-08T12:36:18.745083855Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a422ac88d8fd1fb80d5004d5eb6e95fa9efc7f6a87da12e5ac04829da7f04c4d @@ -419,7 +458,7 @@ entries: version: 0.8.7-beta.8 - apiVersion: v2 appVersion: 0.8.7-beta.7 - created: "2024-09-04T02:53:13.059065863Z" + created: "2024-09-08T12:36:18.744433719Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0dc313a1092e6256a7c8aad002c8ec380b3add2c289d680db1e238a336399b7a @@ -432,7 +471,7 @@ entries: version: 0.8.7-beta.7 - apiVersion: v2 appVersion: 0.8.7-beta.6 - created: "2024-09-04T02:53:13.057870872Z" + created: "2024-09-08T12:36:18.743813168Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 052a2ec1102d2a4c9915f95647abd4a6012f56fa05a106f4952ee9b55bf7bae8 @@ -445,7 +484,7 @@ entries: version: 0.8.7-beta.6 - apiVersion: v2 appVersion: 0.8.7-beta.5 - created: "2024-09-04T02:53:13.057069339Z" + created: "2024-09-08T12:36:18.743192829Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1728af756907c3fcbe87c2fd2de014a2d963c22a4c2eb6af6596b525a9b9a18a @@ -458,7 +497,7 @@ entries: version: 0.8.7-beta.5 - apiVersion: v2 appVersion: 0.8.7-beta.4 - created: "2024-09-04T02:53:13.056443463Z" + created: "2024-09-08T12:36:18.742571407Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 387a57a3904a05ed61e92ee48605ef6fd5044ff7e822e0924e0d4c485e2c88d2 @@ -471,7 +510,7 @@ entries: version: 0.8.7-beta.4 - apiVersion: v2 appVersion: 0.8.7-beta.3 - created: "2024-09-04T02:53:13.055816044Z" + created: "2024-09-08T12:36:18.741946368Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 3668002b7a4118516b2ecd61d6275f60d83fc12841587ab8f62e1c1200731c67 @@ -484,7 +523,7 @@ entries: version: 0.8.7-beta.3 - apiVersion: v2 appVersion: 0.8.7-beta.2 - created: "2024-09-04T02:53:13.055177514Z" + created: "2024-09-08T12:36:18.741288138Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e62217ffcadee2b8896ab0543f9ccc42f2df898fd979438ac9376d780b802af7 @@ -497,7 +536,7 @@ entries: version: 0.8.7-beta.2 - apiVersion: v2 appVersion: 0.8.7-beta.1 - created: "2024-09-04T02:53:13.048326088Z" + created: "2024-09-08T12:36:18.734505438Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 553981fe1d5c980e6903b3ff2f1b9b97431f6dd8aee91e3976bcc5594285235e @@ -510,7 +549,7 @@ entries: version: 0.8.7-beta.1 - apiVersion: v2 appVersion: 0.8.6 - created: "2024-09-04T02:53:13.047807232Z" + created: "2024-09-08T12:36:18.733987199Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ddbbe6fea1702e57404875eb3019a3b1a341017bdbb5fbc6ce418507e5c15756 @@ -523,7 +562,7 @@ entries: version: 0.8.6 - apiVersion: v2 appVersion: 0.8.6-beta.1 - created: "2024-09-04T02:53:13.047268678Z" + created: "2024-09-08T12:36:18.733413978Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc2c81ef6796ac853dce256e6bf8a6af966c21803e6534ea21920af681c62e61 @@ -536,7 +575,7 @@ entries: version: 0.8.6-beta.1 - apiVersion: v2 appVersion: 0.8.5 - created: "2024-09-04T02:53:13.04671682Z" + created: "2024-09-08T12:36:18.732872235Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: db5d90d44006209fd5ecdebd88f5fd56c70f7c76898343719a0ff8da46da948a @@ -549,7 +588,7 @@ entries: version: 0.8.5 - apiVersion: v2 appVersion: 0.8.5-post.2 - created: "2024-09-04T02:53:13.045926808Z" + created: "2024-09-08T12:36:18.732110771Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ea3f7269b55f773fa165d7008c054b7cf3ec4c62eb40a96f08cd3a9b77fd2165 @@ -562,7 +601,7 @@ entries: version: 0.8.5-post.2 - apiVersion: v2 appVersion: 0.8.5-post.1 - created: "2024-09-04T02:53:13.045347139Z" + created: "2024-09-08T12:36:18.731554461Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9deb844d3dc2d8480c60f8c631dcc7794adfb39cec3aa3b1ce22ea26fdf87d02 @@ -575,7 +614,7 @@ entries: version: 0.8.5-post.1 - apiVersion: v2 appVersion: 0.8.5-beta.10 - created: "2024-09-04T02:53:13.037619138Z" + created: "2024-09-08T12:36:18.723781898Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49 @@ -588,7 +627,7 @@ entries: version: 0.8.5-beta.10 - apiVersion: v2 appVersion: 0.8.5-beta.9 - created: "2024-09-04T02:53:13.044583786Z" + created: "2024-09-08T12:36:18.730472999Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b @@ -601,7 +640,7 @@ entries: version: 0.8.5-beta.9 - apiVersion: v2 appVersion: 0.8.5-beta.8 - created: "2024-09-04T02:53:13.043809825Z" + created: "2024-09-08T12:36:18.729116422Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 @@ -614,7 +653,7 @@ entries: version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-09-04T02:53:13.042693542Z" + created: "2024-09-08T12:36:18.72836675Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -627,7 +666,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-09-04T02:53:13.041435248Z" + created: "2024-09-08T12:36:18.727613222Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -640,7 +679,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-09-04T02:53:13.040684379Z" + created: "2024-09-08T12:36:18.726861386Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -653,7 +692,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-09-04T02:53:13.039927989Z" + created: "2024-09-08T12:36:18.726092739Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -666,7 +705,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-09-04T02:53:13.039175648Z" + created: "2024-09-08T12:36:18.725304675Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -679,7 +718,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-09-04T02:53:13.038377Z" + created: "2024-09-08T12:36:18.724546538Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -692,7 +731,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-09-04T02:53:13.036848723Z" + created: "2024-09-08T12:36:18.722967335Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -704,7 +743,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-09-04T02:53:13.036459708Z" + created: "2024-09-08T12:36:18.721840745Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -716,7 +755,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-09-04T02:53:13.033209137Z" + created: "2024-09-08T12:36:18.719311616Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -728,7 +767,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-09-04T02:53:13.032798783Z" + created: "2024-09-08T12:36:18.718904105Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -740,7 +779,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-09-04T02:53:13.0319598Z" + created: "2024-09-08T12:36:18.71814765Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -752,7 +791,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-09-04T02:53:13.031553713Z" + created: "2024-09-08T12:36:18.71774102Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -764,7 +803,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-09-04T02:53:13.031144842Z" + created: "2024-09-08T12:36:18.717300877Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -776,7 +815,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-09-04T02:53:13.030732253Z" + created: "2024-09-08T12:36:18.71688535Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -788,7 +827,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-09-04T02:53:13.030286132Z" + created: "2024-09-08T12:36:18.716429407Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -800,7 +839,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-09-04T02:53:13.029869736Z" + created: "2024-09-08T12:36:18.715998031Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -812,7 +851,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-09-04T02:53:13.029453511Z" + created: "2024-09-08T12:36:18.715501643Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -824,7 +863,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-09-04T02:53:13.029033589Z" + created: "2024-09-08T12:36:18.714527111Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -836,7 +875,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-09-04T02:53:13.028598177Z" + created: "2024-09-08T12:36:18.714017358Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -848,7 +887,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-09-04T02:53:13.027621297Z" + created: "2024-09-08T12:36:18.713600678Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -860,7 +899,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-09-04T02:53:13.026425499Z" + created: "2024-09-08T12:36:18.71259043Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -872,7 +911,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-09-04T02:53:13.026022839Z" + created: "2024-09-08T12:36:18.712151659Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -884,7 +923,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-09-04T02:53:13.025614328Z" + created: "2024-09-08T12:36:18.711749928Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -896,7 +935,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-09-04T02:53:13.02520203Z" + created: "2024-09-08T12:36:18.711347255Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -908,7 +947,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-09-04T02:53:13.024726134Z" + created: "2024-09-08T12:36:18.710941737Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -920,7 +959,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-09-04T02:53:13.024374569Z" + created: "2024-09-08T12:36:18.710586403Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -932,7 +971,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-09-04T02:53:13.024022213Z" + created: "2024-09-08T12:36:18.710232631Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -944,7 +983,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-09-04T02:53:13.023675086Z" + created: "2024-09-08T12:36:18.70987894Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -956,7 +995,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-09-04T02:53:13.023329883Z" + created: "2024-09-08T12:36:18.70943491Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -968,7 +1007,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-09-04T02:53:13.036038353Z" + created: "2024-09-08T12:36:18.721384733Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -980,7 +1019,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-09-04T02:53:13.035673413Z" + created: "2024-09-08T12:36:18.721049486Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -992,7 +1031,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-09-04T02:53:13.034967163Z" + created: "2024-09-08T12:36:18.720707376Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -1004,7 +1043,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-09-04T02:53:13.034227565Z" + created: "2024-09-08T12:36:18.720373292Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -1016,7 +1055,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-09-04T02:53:13.033890196Z" + created: "2024-09-08T12:36:18.720032194Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -1028,7 +1067,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-09-04T02:53:13.033552577Z" + created: "2024-09-08T12:36:18.719650851Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -1040,7 +1079,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-09-04T02:53:13.032380003Z" + created: "2024-09-08T12:36:18.718494449Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -1052,7 +1091,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-09-04T02:53:13.027013906Z" + created: "2024-09-08T12:36:18.713158682Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1068,7 +1107,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-09-04T02:53:13.022970092Z" + created: "2024-09-08T12:36:18.709064818Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1084,7 +1123,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-09-04T02:53:13.02237316Z" + created: "2024-09-08T12:36:18.70801798Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1100,7 +1139,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-09-04T02:53:13.021665662Z" + created: "2024-09-08T12:36:18.707248091Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1116,7 +1155,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-09-04T02:53:13.02025283Z" + created: "2024-09-08T12:36:18.706665842Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1132,7 +1171,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-09-04T02:53:13.019680344Z" + created: "2024-09-08T12:36:18.706077763Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1148,7 +1187,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-09-04T02:53:13.019014813Z" + created: "2024-09-08T12:36:18.705372965Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1164,7 +1203,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-09-04T02:53:13.018419374Z" + created: "2024-09-08T12:36:18.704813589Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1180,7 +1219,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-09-04T02:53:13.017868969Z" + created: "2024-09-08T12:36:18.704262278Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1196,7 +1235,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-09-04T02:53:13.017223206Z" + created: "2024-09-08T12:36:18.703613966Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1212,7 +1251,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-09-04T02:53:13.016580338Z" + created: "2024-09-08T12:36:18.702955214Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1228,7 +1267,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-09-04T02:53:13.015934465Z" + created: "2024-09-08T12:36:18.701943556Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1244,7 +1283,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-09-04T02:53:13.015271871Z" + created: "2024-09-08T12:36:18.700921395Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1260,7 +1299,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-09-04T02:53:13.014561597Z" + created: "2024-09-08T12:36:18.700275117Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1276,7 +1315,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-09-04T02:53:13.013341028Z" + created: "2024-09-08T12:36:18.699639398Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1292,7 +1331,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-09-04T02:53:13.012524657Z" + created: "2024-09-08T12:36:18.699006705Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1308,7 +1347,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-09-04T02:53:13.011826927Z" + created: "2024-09-08T12:36:18.69836758Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1324,7 +1363,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-09-04T02:53:13.011179892Z" + created: "2024-09-08T12:36:18.697714529Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1340,7 +1379,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-09-04T02:53:13.010516346Z" + created: "2024-09-08T12:36:18.697029789Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1356,7 +1395,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-09-04T02:53:13.009859812Z" + created: "2024-09-08T12:36:18.696365557Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1372,7 +1411,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-09-04T02:53:13.009293277Z" + created: "2024-09-08T12:36:18.695074089Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1388,7 +1427,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-09-04T02:53:13.008708688Z" + created: "2024-09-08T12:36:18.694504805Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1404,7 +1443,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-09-04T02:53:13.008085346Z" + created: "2024-09-08T12:36:18.69393551Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1420,7 +1459,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-09-04T02:53:13.006549731Z" + created: "2024-09-08T12:36:18.693314119Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1436,7 +1475,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-09-04T02:53:13.00588899Z" + created: "2024-09-08T12:36:18.692656028Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1452,7 +1491,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-09-04T02:53:13.005234932Z" + created: "2024-09-08T12:36:18.691990213Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1468,7 +1507,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-09-04T02:53:13.004634112Z" + created: "2024-09-08T12:36:18.691429335Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1484,7 +1523,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-09-04T02:53:13.004051707Z" + created: "2024-09-08T12:36:18.690849019Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1500,7 +1539,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-09-04T02:53:13.003374085Z" + created: "2024-09-08T12:36:18.690251232Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1516,7 +1555,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-09-04T02:53:13.002724235Z" + created: "2024-09-08T12:36:18.689253475Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1530,4 +1569,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-09-04T02:53:13.001921509Z" +generated: "2024-09-08T12:36:18.688085652Z" diff --git a/packages/grid/helm/repo/syft-0.9.1.tgz b/packages/grid/helm/repo/syft-0.9.1.tgz new file mode 100644 index 00000000000..02358982203 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.9.1.tgz differ diff --git a/packages/grid/helm/repo/syft-0.9.2-beta.1.tgz b/packages/grid/helm/repo/syft-0.9.2-beta.1.tgz new file mode 100644 index 00000000000..f2ed473b494 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.9.2-beta.1.tgz differ diff --git a/packages/grid/helm/repo/syft-0.9.2-beta.2.tgz b/packages/grid/helm/repo/syft-0.9.2-beta.2.tgz new file mode 100644 index 00000000000..2c501e02279 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.9.2-beta.2.tgz differ diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml index c6ac117f190..0069d1cffd2 100644 --- a/packages/grid/helm/syft/Chart.yaml +++ b/packages/grid/helm/syft/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: syft description: Perform numpy-like analysis on data that remains in someone elses server type: application -version: "0.9.1-beta.11" -appVersion: "0.9.1-beta.11" +version: "0.9.2-beta.2" +appVersion: "0.9.2-beta.2" home: https://github.com/OpenMined/PySyft/ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index fe0db497b98..ae49ef960b1 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -1,7 +1,7 @@ global: # Affects only backend, frontend, and seaweedfs containers registry: docker.io - version: 0.9.1-beta.11 + version: 0.9.2-beta.2 # Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION randomizedSecrets: true diff --git a/packages/syft/PYPI.md b/packages/syft/PYPI.md index 1c0d3e1fa87..def75011e15 100644 --- a/packages/syft/PYPI.md +++ b/packages/syft/PYPI.md @@ -1,7 +1,7 @@


-Syft Logo +Syft Logo

Data Science on data you are not allowed to see

@@ -38,7 +38,7 @@ Launch =0.9,<0.9.1") +sy.requires(">=0.9.1,<0.9.2") server = sy.orchestra.launch( name="my-datasite", @@ -67,7 +67,7 @@ Main way to use a Datasite is via our Syft client, in a Jupyter Notebook. Check ```python import syft as sy -sy.requires(">=0.9,<0.9.1") +sy.requires(">=0.9.1,<0.9.2") datasite_client = sy.login( port=8080, @@ -135,15 +135,15 @@ For questions about PySyft, reach out via `#support` on Docs +- `0.9.1` (Stable) - Docs - Install PySyft (Stable): `pip install -U syft` **Latest Beta** -- `0.9.1` (Beta) - `dev` branch 👈🏽 +- `0.9.2` (Beta) - `dev` branch 👈🏽 - Install PySyft (Beta): `pip install -U syft --pre` -Find more about previous releases here. +Find more about previous releases here. # Community @@ -158,18 +158,18 @@ Supported by the OpenMined Foundation, the OpenMined Community is an online netw
- +
- +
- +
@@ -179,7 +179,7 @@ Supported by the OpenMined Foundation, the OpenMined Community is an online netw OpenMined and Syft appreciates all contributors, if you would like to fix a bug or suggest a new feature, please reach out via Github or Slack! -Contributors +Contributors # About OpenMined @@ -193,44 +193,44 @@ OpenMined is a non-profit foundation creating technology infrastructure that hel diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 780ea06f5f2..6c0d2d48459 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.9.1-beta.11" +version = attr: "0.9.2-beta.2" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index a5f840e30f5..5e0d33e7748 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.1-beta.11" +__version__ = "0.9.2-beta.2" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index ba73ed9bad4..62655f0aed1 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.9.1-beta.11" +__version__ = "0.9.2-beta.2" # stdlib from collections.abc import Callable diff --git a/packages/syft/src/syft/abstract_server.py b/packages/syft/src/syft/abstract_server.py index 4f0fba356c5..c222cf4ea5a 100644 --- a/packages/syft/src/syft/abstract_server.py +++ b/packages/syft/src/syft/abstract_server.py @@ -9,6 +9,7 @@ if TYPE_CHECKING: # relative + from .server.service_registry import ServiceRegistry from .service.service import AbstractService @@ -39,6 +40,7 @@ class AbstractServer: server_type: ServerType | None server_side_type: ServerSideType | None in_memory_workers: bool + services: "ServiceRegistry" def get_service(self, path_or_func: str | Callable) -> "AbstractService": raise NotImplementedError diff --git a/packages/syft/src/syft/assets/jinja/syft_exception.jinja2 b/packages/syft/src/syft/assets/jinja/syft_exception.jinja2 index bd4bc01635a..eab1977edb4 100644 --- a/packages/syft/src/syft/assets/jinja/syft_exception.jinja2 +++ b/packages/syft/src/syft/assets/jinja/syft_exception.jinja2 @@ -6,35 +6,47 @@
- {% if server_trace%} -
Server Trace:
-
{{server_trace | escape}}
-
-
+ {% if server_trace %} +
Server Trace:
+
+      {% if dev_mode %}
+        {{ server_trace | make_links | safe }}
+      {% else %}
+        {{ server_trace | escape }}
       {% endif %}
-      
Client Trace:
-
{{traceback_str | escape}}
+
+
+
+ {% endif %} +
Client Trace:
+
+      {% if dev_mode %}
+        {{ traceback_str | make_links | safe }}
+      {% else %}
+        {{ traceback_str | escape }}
+      {% endif %}
+    
+ .syft-exception-trace { + display: inline; + } + \ No newline at end of file diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 093dee625db..692bd017565 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -615,8 +615,9 @@ def register(self, new_user: UserCreate) -> SyftSigningKey | None: ) else: service_context = ServerServiceContext(server=self.server) - method = self.server.get_service_method(UserService.register) - response = method(context=service_context, new_user=new_user) + response = self.server.services.user.register( + context=service_context, new_user=new_user + ) response = post_process_result(response, unwrap_on_success=False) return response diff --git a/packages/syft/src/syft/orchestra.py b/packages/syft/src/syft/orchestra.py index 5921b53b434..0d295b81982 100644 --- a/packages/syft/src/syft/orchestra.py +++ b/packages/syft/src/syft/orchestra.py @@ -28,6 +28,7 @@ from .server.enclave import Enclave from .server.gateway import Gateway from .server.uvicorn import serve_server +from .service.queue.queue import ConsumerType from .service.response import SyftInfo from .types.errors import SyftException from .util.util import get_random_available_port @@ -182,6 +183,7 @@ def deploy_to_python( log_level: str | int | None = None, debug: bool = False, migrate: bool = False, + consumer_type: ConsumerType | None = None, ) -> ServerHandle: worker_classes = { ServerType.DATASITE: Datasite, @@ -213,6 +215,7 @@ def deploy_to_python( "debug": debug, "migrate": migrate, "deployment_type": deployment_type_enum, + "consumer_type": consumer_type, } if port: @@ -325,6 +328,7 @@ def launch( debug: bool = False, migrate: bool = False, from_state_folder: str | Path | None = None, + consumer_type: ConsumerType | None = None, ) -> ServerHandle: if from_state_folder is not None: with open(f"{from_state_folder}/config.json") as f: @@ -373,6 +377,7 @@ def launch( background_tasks=background_tasks, debug=debug, migrate=migrate, + consumer_type=consumer_type, ) display( SyftInfo( diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index c1eba4bfc63..5f9f6a8fab1 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -1,1178 +1,5 @@ { - "dev": { - "object_versions": { - "SyftObjectVersioned": { - "1": { - "version": 1, - "hash": "7c842dcdbb57e2528ffa690ea18c19fff3c8a591811d40cad2b19be3100e2ff4", - "action": "add" - } - }, - "BaseDateTime": { - "1": { - "version": 1, - "hash": "614db484b1950be729902b1861bd3a7b33899176507c61cef11dc0d44611cfd3", - "action": "add" - } - }, - "SyftObject": { - "1": { - "version": 1, - "hash": "bb70d874355988908d3a92a3941d6613a6995a4850be3b6a0147f4d387724406", - "action": "add" - } - }, - "PartialSyftObject": { - "1": { - "version": 1, - "hash": "19a995fcc2833f4fab24584fd99b71a80c2ef1f13c06f83af79e4482846b1656", - "action": "add" - } - }, - "ServerMetadata": { - "1": { - "version": 1, - "hash": "1691c7667eca86b20c4189e90ce4e643dd41fd3682cdb69c6308878f2a6f135c", - "action": "add" - } - }, - "StoreConfig": { - "1": { - "version": 1, - "hash": "a9997fce6a8a0ed2884c58b8eb9382f8554bdd18fff61f8bf0451945bcff12c7", - "action": "add" - } - }, - "MongoDict": { - "1": { - "version": 1, - "hash": "57e36f57eed75e62b29e2bac1295035a9bf2c0e3c56719dac24cb6cc685be00b", - "action": "add" - } - }, - "MongoStoreConfig": { - "1": { - "version": 1, - "hash": "53342b27d34165b7e2699f8e7ad70d13d125875e6a75e8fa18f5796428f41036", - "action": "add" - } - }, - "LinkedObject": { - "1": { - "version": 1, - "hash": "d80f5ac7f51a9383be1a3cb334d56ae50e49733ed3199f3b6b5d6febd9de410b", - "action": "add" - } - }, - "BaseConfig": { - "1": { - "version": 1, - "hash": "10bd7566041d0f0a3aa295367785fdcc2c5bbf0ded984ac9230754f37496a6a7", - "action": "add" - }, - "2": { - "version": 2, - "hash": "890d2879ac44611db9b88ba9334a721130d0ac3aa18a303fa9e4081f14b9b8c7", - "action": "add" - } - }, - "ServiceConfig": { - "1": { - "version": 1, - "hash": "28af8a296f5ff63de50438277eaa1f4380682e6aca9f2ca28320d7a444825e88", - "action": "add" - }, - "2": { - "version": 2, - "hash": "93dfab144e0b0884c602358b3a9ce889bb29ab96e3b4adcfe3cef47a31694a9a", - "action": "add" - } - }, - "LibConfig": { - "1": { - "version": 1, - "hash": "ee8f0e3f6aae81948d72e30226645e8eb5d312a6770411a1edca748168c467c0", - "action": "add" - }, - "2": { - "version": 2, - "hash": "a8a78a8d726ee9e79f95614f3d0fa5b85edc6fce7be7651715208669be93e0e3", - "action": "add" - } - }, - "APIEndpoint": { - "1": { - "version": 1, - "hash": "faa1cf9336a0d1233868c8c57745ff38c0be60399dc1acd0c0e8dd440e405dbd", - "action": "add" - } - }, - "LibEndpoint": { - "1": { - "version": 1, - "hash": "a585c83a33a019d363ae5a0c6d4197193654307c19a4829dfbf8a8cfd2c1842a", - "action": "add" - } - }, - "SignedSyftAPICall": { - "1": { - "version": 1, - "hash": "2f959455f7130f4e59360b8aa58f19785b76eaa0f8a5a9188a6cbf32b31311ca", - "action": "add" - } - }, - "SyftAPICall": { - "1": { - "version": 1, - "hash": "59e89e7b9ea30deaed64d1ffd9bc0769b999d3082b305428432c1f5be36c6343", - "action": "add" - } - }, - "SyftAPIData": { - "1": { - "version": 1, - "hash": "820b279c581cafd9bb5009702d4e3db22ec3a3156676426304b9038dad260a24", - "action": "add" - } - }, - "SyftAPI": { - "1": { - "version": 1, - "hash": "cc13ab058ee36748c14b0d4bd9b9e894c7566fff09cfa4170b3eece520169f15", - "action": "add" - } - }, - "User": { - "1": { - "version": 1, - "hash": "2df4b68182c558dba5485a8a6867acf2a5c341b249ad67373a504098aa8c4343", - "action": "add" - }, - "2": { - "version": 2, - "hash": "af6fb5b2e1606e97838f4a60f0536ad95db606d455e94acbd1977df866608a2c", - "action": "add" - } - }, - "UserUpdate": { - "1": { - "version": 1, - "hash": "1bf6707c69b809c804fb939c7c37d787c2f6889508a4bec37d24221af2eb777a", - "action": "add" - } - }, - "UserCreate": { - "1": { - "version": 1, - "hash": "49d6087e2309ba59987f3126e286e74b3a66492a08ad82fa507ea17d52ce78e3", - "action": "add" - } - }, - "UserSearch": { - "1": { - "version": 1, - "hash": "9ac946338cca68d00d1696a57943442f062628ec3daf53077d0bdd3f72cd9fa0", - "action": "add" - } - }, - "UserView": { - "1": { - "version": 1, - "hash": "0b52d758e31d5889c9cd88afb467aae4a74e34a5276924e07012243c34d300fe", - "action": "add" - } - }, - "UserViewPage": { - "1": { - "version": 1, - "hash": "1cd6528d02ec180f080d5c35f0da760d8a59af9da7baaa9c17c1c7cedcc858fa", - "action": "add" - } - }, - "UserPrivateKey": { - "1": { - "version": 1, - "hash": "4817d8147aba94373f320dcd90e65f097cf6e5a2ef353aa8520e23128d522b5d", - "action": "add" - } - }, - "DateTime": { - "1": { - "version": 1, - "hash": "394abb554114ead4d63c36e3fe83ac018dead4b21a8465174009577c46d54c58", - "action": "add" - } - }, - "ReplyNotification": { - "1": { - "version": 1, - "hash": "84102dfc59d711b03c2f3d3a6ecaca000b6835f1bbdd9af801057f7aacb5f1d0", - "action": "add" - } - }, - "Notification": { - "1": { - "version": 1, - "hash": "af4cb232bff390c431e399975f048b34da7e940ace8b23b940a3b398c91c5326", - "action": "add" - } - }, - "CreateNotification": { - "1": { - "version": 1, - "hash": "7e426c946b7d5db6f9427960ec16042f3018091d835ca5966f3568c324a2ab53", - "action": "add" - } - }, - "UserNotificationActivity": { - "1": { - "version": 1, - "hash": "422fd01c6d9af38688a9982abd34e80794a1f6ddd444cca225d77f49189847a9", - "action": "add" - } - }, - "NotificationPreferences": { - "1": { - "version": 1, - "hash": "a42f06b367e7c6cbabcbf3cfcc84d1ca0873e457d972ebd060e87c9d6185f62b", - "action": "add" - } - }, - "NotifierSettings": { - "1": { - "version": 1, - "hash": "65c8ab814d35fac32f68d3000756692592cc59940f30e3af3dcdfa2328755b9d", - "action": "add" - }, - "2": { - "version": 2, - "hash": "be8b52597fc628d1b7cd22b776ee81416e1adbb04a45188778eb0e32ed1416b4", - "action": "add" - } - }, - "SyftImageRegistry": { - "1": { - "version": 1, - "hash": "67e18903e41cba1afe136adf29d404b63ec04fea6e928abb2533ec4fa52b246b", - "action": "add" - } - }, - "SyftWorkerImage": { - "1": { - "version": 1, - "hash": "44da7badfbe573d5403d3ab78c077f17dbefc560b81fdf927b671815be047441", - "action": "add" - } - }, - "SyftWorker": { - "1": { - "version": 1, - "hash": "9d897f6039eabe48dfa8e8d5c5cdcb283b0375b4c64571b457777eaaf3fb1920", - "action": "add" - } - }, - "WorkerPool": { - "1": { - "version": 1, - "hash": "16efc5dd2596ae744fd611c8f46af9eaec1bd5729eb20e85e9fd2f31df402564", - "action": "add" - } - }, - "MarkdownDescription": { - "1": { - "version": 1, - "hash": "31a73f8824cad1636a55d14b6a1074cdb071d0d4e16e86baaa3d4f63a7e80134", - "action": "add" - } - }, - "HTMLObject": { - "1": { - "version": 1, - "hash": "97f2e93f5ceaa88015047186f66a17ff13df2a6b7925b41331f9e19d5a515a9f", - "action": "add" - } - }, - "PwdTokenResetConfig": { - "1": { - "version": 1, - "hash": "0415a272428f22add4896c64aa9f29c8c1d35619e2433da6564eb5f1faff39ac", - "action": "add" - } - }, - "ServerSettingsUpdate": { - "1": { - "version": 1, - "hash": "1e4260ad879ae80728c3ffae2cd1d48759abd51f9d0960d4b25855cdbb4c506b", - "action": "add" - }, - "2": { - "version": 2, - "hash": "23b2716e9dceca667e228408e2416c82f11821e322e5bccf1f83406f3d09abdc", - "action": "add" - }, - "3": { - "version": 3, - "hash": "335c7946f2e52d09c7b26f511120cd340717c74c5cca9107e84f839da993c55c", - "action": "add" - }, - "4": { - "version": 4, - "hash": "8d7a41992c39c287fcb46383bed429ce75d3c9524ced8c86b88c26dd0232e2fe", - "action": "add" - } - }, - "ServerSettings": { - "1": { - "version": 1, - "hash": "5a1e7470cbeaaae5b80ac9beecb743734f7e4e42d429a09ea8defa569a5ddff1", - "action": "add" - }, - "2": { - "version": 2, - "hash": "7727ea54e494dc9deaa0d1bd38ac8a6180bc192b74eec5659adbc338a19e21f5", - "action": "add" - }, - "3": { - "version": 3, - "hash": "997667e1cba22d151857aacc2caba6b1ca73c1648adbd03461dc74a0c0c372b3", - "action": "add" - }, - "4": { - "version": 4, - "hash": "b8067777967a0e06733433e179e549caaf501419d62f7e8474ee33b839e3890d", - "action": "add" - } - }, - "HTTPConnection": { - "1": { - "version": 1, - "hash": "bf10f81646c71069c76292b1237b4a3de1e507264392c5c591d067636ce6fb46", - "action": "add" - } - }, - "PythonConnection": { - "1": { - "version": 1, - "hash": "28010778b5e3463ff6960a0e2224818de00bc7b5e6f892192e02e399ccbe18b5", - "action": "add" - } - }, - "ActionDataEmpty": { - "1": { - "version": 1, - "hash": "e0e4a5cf18d05b6b747addc048515c6f2a5f35f0766ebaee96d898cb971e1c5b", - "action": "add" - } - }, - "ObjectNotReady": { - "1": { - "version": 1, - "hash": "8cf471e205cd0893d6aae5f0227d14db7df1c9698da08a3ab991f59132d17fe9", - "action": "add" - } - }, - "ActionDataLink": { - "1": { - "version": 1, - "hash": "3469478343439e411b761c270eec63eb3d533e459ad72d0965158c3a6cdf3b9a", - "action": "add" - } - }, - "Action": { - "1": { - "version": 1, - "hash": "021826d7c6f69bd0283d025d40661f3ffbeba8810ca94de01344f6afbdae62cd", - "action": "add" - } - }, - "ActionObject": { - "1": { - "version": 1, - "hash": "0a5f4bc343cb114a251f06686ecdbb59d74bfb3d29a098b176699deb35a1e683", - "action": "add" - } - }, - "AnyActionObject": { - "1": { - "version": 1, - "hash": "b3c44c7788c59c03fa1baeec656c2ca6e633f4cbd4b23ff7ece6ee94c38449f0", - "action": "add" - } - }, - "CustomEndpointActionObject": { - "1": { - "version": 1, - "hash": "c7addbaf2777707f3e91e5c1e092343476cd22efc4ec8617f39ccf76e61a5a14", - "action": "add" - }, - "2": { - "version": 2, - "hash": "846ba36e8737a1bec16853c9de54c4948450009278e0b76fe7e3355ef9e70089", - "action": "add" - } - }, - "DataSubject": { - "1": { - "version": 1, - "hash": "582cdf9e82b5d6915b7f09f7c0d5f08328b11a2ce9b0198e5083f1672c2e2bf5", - "action": "add" - } - }, - "DataSubjectCreate": { - "1": { - "version": 1, - "hash": "5a8423c2690d55f425bfeecc87cd4a797a75d88ebb5fbda754d4f269b62d2ceb", - "action": "add" - } - }, - "DataSubjectMemberRelationship": { - "1": { - "version": 1, - "hash": "0810483ea76ea10c8f286c6035dc0b2085291f345183be50c179f3a05a577110", - "action": "add" - } - }, - "Contributor": { - "1": { - "version": 1, - "hash": "30c32bd44098f00e0b15496be441763b6e50af8b12d3d2bef33aca6287193876", - "action": "add" - } - }, - "Asset": { - "1": { - "version": 1, - "hash": "000abc78719611c106295cf12b1690b7e5411dc1bb9db9d4afd22956da90d1f4", - "action": "add" - } - }, - "CreateAsset": { - "1": { - "version": 1, - "hash": "357d52576cb12b24fb3980342bb49a562b065c0e4419e87d34176340628c7309", - "action": "add" - } - }, - "Dataset": { - "1": { - "version": 1, - "hash": "0ca6b0b4a3aebb2c8f351668075b44951bb20d1e23a779b82109124f334ce3a4", - "action": "add" - } - }, - "DatasetPageView": { - "1": { - "version": 1, - "hash": "aa0dd69637281b80d5523b4409a2c7e89db114c9fe79c858063c6dadff8977d1", - "action": "add" - }, - "2": { - "version": 2, - "hash": "be1ca6dcd0b3aa0481ce5dce737e78432d06a78ad0c701aaf136be407c798352", - "action": "add" - } - }, - "CreateDataset": { - "1": { - "version": 1, - "hash": "7e02dfa89540c3dbebacbb13810d95cdc4e36db31d56cffed7ab54abe25716c9", - "action": "add" - } - }, - "SyftLog": { - "1": { - "version": 1, - "hash": "1bcd71e5bf3f0db3bba0996f33b6b2bde3489b9c71f11e6b30c3495c76a8f53f", - "action": "add" - } - }, - "JobItem": { - "1": { - "version": 1, - "hash": "0b32277b7d3b9bdc14a2a51cc9005f8254e7f7b6ec059ddcccbcd681a807afb6", - "action": "add" - }, - "2": { - "version": 2, - "hash": "b087d0c62b7d304c6ca80e4fb0e8a7f2a444be8f8cba57490dc09aeb98033105", - "action": "add" - } - }, - "ExecutionOutput": { - "1": { - "version": 1, - "hash": "e36c71685edf5276a3427cb6749550486d3a177c1dcf73dd337ab2a73c0ce6b5", - "action": "add" - } - }, - "TwinObject": { - "1": { - "version": 1, - "hash": "4f31243fb348dbb083579afd6f638d75af010cb53d19bfba59b74afff41ccbbb", - "action": "add" - } - }, - "PolicyRule": { - "1": { - "version": 1, - "hash": "44d1ca1db97be46f66558aa1a729ff31bf8e113c6a913b11aedf9d6b6ad5b7b5", - "action": "add" - } - }, - "CreatePolicyRule": { - "1": { - "version": 1, - "hash": "342bb723526d445151a0435f57d251f4c1219f8ae7cca3e8e9fce52e2ee1b8b1", - "action": "add" - } - }, - "CreatePolicyRuleConstant": { - "1": { - "version": 1, - "hash": "78b54832cb0468a87013bc36bc11d4759874ca1b5065a1b711f1e5ef5d94c2df", - "action": "add" - } - }, - "Matches": { - "1": { - "version": 1, - "hash": "dd6d91ddb2ec5eaf60be2b0899ecfdb9a15f7904aa39d2f4d9bb2d7b793040e6", - "action": "add" - } - }, - "PreFill": { - "1": { - "version": 1, - "hash": "c7aefb11dc4c4569dcd1e6988371047a32a8be1b32ad46d12adba419a19769ad", - "action": "add" - } - }, - "UserOwned": { - "1": { - "version": 1, - "hash": "c8738dc3d8c2a5ef461b85a0467c3dff53dab16b54a4d12b44b1477906aef51d", - "action": "add" - } - }, - "MixedInputPolicy": { - "1": { - "version": 1, - "hash": "37bb12d950518d9579c8ec7c4cc22ac731ea82caf8c1370dd0b0a82b46462dde", - "action": "add" - } - }, - "ExactMatch": { - "1": { - "version": 1, - "hash": "5eb37edbf5e451d942e599247f3eaed923c1fe9d91eefdba02bf06503f6cc08d", - "action": "add" - } - }, - "OutputHistory": { - "1": { - "version": 1, - "hash": "9366db79d131f8c65e5a4ff12c90e2aa0c11e302debe06e46eeb93b26e2aaf61", - "action": "add" - } - }, - "OutputPolicyExecuteCount": { - "1": { - "version": 1, - "hash": "2a77e5ed5c7b0391147562651ad4061e20b11745c191fbc34cb549da37ba72dd", - "action": "add" - } - }, - "OutputPolicyExecuteOnce": { - "1": { - "version": 1, - "hash": "5589c00d127d9eb1f5ccf3a16def8219737784d57bb3bf9be5cb6d83325ef436", - "action": "add" - } - }, - "EmptyInputPolicy": { - "1": { - "version": 1, - "hash": "7ef81cfd223be0064600e1503f8b04bafc16385e27730e9319466e68a077c68b", - "action": "add" - } - }, - "UserPolicy": { - "1": { - "version": 1, - "hash": "74373bb71a334f4dcf77623ae10ff5b1c7e5b3006f65f2051ffb1e01f422f982", - "action": "add" - } - }, - "SubmitUserPolicy": { - "1": { - "version": 1, - "hash": "ec4e808eb39613bcdbbbf9ffb3267612084a9d99880a2f3bee3ef32d46329c02", - "action": "add" - } - }, - "UserCodeStatusCollection": { - "1": { - "version": 1, - "hash": "735ecf2d4abb1e7d19b2e751d880f32b01ce267ba10e417ef1b440be3d94d8f1", - "action": "add" - } - }, - "UserCode": { - "1": { - "version": 1, - "hash": "3bcd14413b9c4fbde7c5612c2ed713518340280b5cff89cf2aaaf1c77c4037a8", - "action": "add" - } - }, - "SubmitUserCode": { - "1": { - "version": 1, - "hash": "d2bb8cfe12f070b4adafded78ce01900c5409bd83f055f94b1e285745ef65a76", - "action": "add" - } - }, - "UserCodeExecutionResult": { - "1": { - "version": 1, - "hash": "1f4cbc62caac4dd193f427306405dc7a099ae744bea5830cf57149ce71c1e589", - "action": "add" - } - }, - "UserCodeExecutionOutput": { - "1": { - "version": 1, - "hash": "c1d53300a39dbbb437d7d5a1257bd175a067b1065f4099a0938fac7540035258", - "action": "add" - }, - "2": { - "version": 2, - "hash": "3e104e39b4ab53c950e61e4f7e92ce935cf96a5100de301de9bf297eb7e5787e", - "action": "add" - } - }, - "CodeHistory": { - "1": { - "version": 1, - "hash": "e3ef5346f108257828f364d22b12d9311812c9cf843200afef5dc4d9302f9b21", - "action": "add" - } - }, - "CodeHistoryView": { - "1": { - "version": 1, - "hash": "8b8b97d334b51d1ce0a9efab722411ff25caa3f12be319105954497e0a306eb2", - "action": "add" - } - }, - "CodeHistoriesDict": { - "1": { - "version": 1, - "hash": "01d7dcd4b21525a06e4484d8699a4a34a5c84f1f6026ec55e32eb30412742601", - "action": "add" - } - }, - "UsersCodeHistoriesDict": { - "1": { - "version": 1, - "hash": "4ed8b83973258ea19a1f91feb2590ff73b801be86f4296cc3db48f6929ff784c", - "action": "add" - } - }, - "BlobFile": { - "1": { - "version": 1, - "hash": "d99239100f1cb0b73c69b2ad7cab01a06909cc3a4976ba2b3b67cf6fe5e2f516", - "action": "add" - } - }, - "BlobFileOBject": { - "1": { - "version": 1, - "hash": "6c40dab2c8d2220d4fff7cc653d76cc026a856db7e2b5713b6341e255adc7ea2", - "action": "add" - } - }, - "SecureFilePathLocation": { - "1": { - "version": 1, - "hash": "ea5978b98d7773d221665b450454c9130c103a5c850669a0acd620607cd614b7", - "action": "add" - } - }, - "SeaweedSecureFilePathLocation": { - "1": { - "version": 1, - "hash": "3fc9bfc8c1b1cf660c9747e8c1fe3eb2220e78d4e3b5d6b5c5f29a07a77ebf3e", - "action": "add" - } - }, - "AzureSecureFilePathLocation": { - "1": { - "version": 1, - "hash": "090a9e962eeb655586ee966c5651d8996363969818a38f9a486fd64d33047e05", - "action": "add" - } - }, - "BlobStorageEntry": { - "1": { - "version": 1, - "hash": "afdc6a1d8a24b1ee1ed9d3e79f5bac64b4f0d9d36800f07f10be0b896470345f", - "action": "add" - } - }, - "BlobStorageMetadata": { - "1": { - "version": 1, - "hash": "9d4b61ac4ea1910c2f7c767a50a6a52544a24663548f069e79bd906f11b538e4", - "action": "add" - } - }, - "CreateBlobStorageEntry": { - "1": { - "version": 1, - "hash": "ffc3cbfeade67d074dc5bf7d655a1eb8c83630076028a72b3cc4548f3b413e14", - "action": "add" - } - }, - "SyftObjectMigrationState": { - "1": { - "version": 1, - "hash": "ee83315828551f18904bab18e0cac48896493620561215b04cc448e6ce5834af", - "action": "add" - } - }, - "StoreMetadata": { - "1": { - "version": 1, - "hash": "8de9a22a2765ef976bc161cb0704347d30350c085da8c8ffa876065cfca3e5fd", - "action": "add" - } - }, - "MigrationData": { - "1": { - "version": 1, - "hash": "cb96b8c8413609e1224341d1b0dd1efb08387c0ff7b0ff65eba36c0b104c9ed1", - "action": "add" - }, - "2": { - "version": 2, - "hash": "1d1b14c196221ecf6d644d7dcaa32ac9e90361b2687fa83161ff399ebc6df1bd", - "action": "add" - } - }, - "BlobRetrieval": { - "1": { - "version": 1, - "hash": "c422c74b89a9349742acaa848566fe18bfef1a83333458b858c074baed37a859", - "action": "add" - } - }, - "SyftObjectRetrieval": { - "1": { - "version": 1, - "hash": "b2b62447445adc4cd0b77ab59d6fa56624dd316fb50281e570daad07556b6db2", - "action": "add" - } - }, - "BlobRetrievalByURL": { - "1": { - "version": 1, - "hash": "4db0e3b7a6334d3835356d8393866711e243e360af25a95f3cc4066f032404b5", - "action": "add" - } - }, - "BlobDeposit": { - "1": { - "version": 1, - "hash": "6eb5cc57dc763126bfc6ec5a2b79d02e77eadf9d9efb1888a5c366b7799c1c24", - "action": "add" - } - }, - "OnDiskBlobDeposit": { - "1": { - "version": 1, - "hash": "817bf1bee4a35bfa1cd25d6779a10d8d180b1b3f1e837952f81f48b9411d1970", - "action": "add" - } - }, - "RemoteConfig": { - "1": { - "version": 1, - "hash": "179d067099a178d748c6d9a0477e8de7c3b55577439669eca7150258f2409567", - "action": "add" - } - }, - "AzureRemoteConfig": { - "1": { - "version": 1, - "hash": "a143811fec0da5fd881e927643ef667c91c78a2c90519cf88da7da20738bd187", - "action": "add" - } - }, - "SeaweedFSBlobDeposit": { - "1": { - "version": 1, - "hash": "febeb2a2ce81aa2c512e4c6b611b582984042aafa0541403d4584662273a166c", - "action": "add" - } - }, - "DictStoreConfig": { - "1": { - "version": 1, - "hash": "2e1365c5535fa51c22eef79f67dd6444789bc829c27881367e3050e06e2ffbfe", - "action": "add" - } - }, - "NumpyArrayObject": { - "1": { - "version": 1, - "hash": "05dd2917b7692b3daf4e7ad083a46fa7ec7a2be8faac8d4a654809189c986443", - "action": "add" - } - }, - "NumpyScalarObject": { - "1": { - "version": 1, - "hash": "8753e5c78270a5cacbf0439447724772f4765351a4a8b58b0a5c416a6b2c8b6e", - "action": "add" - } - }, - "NumpyBoolObject": { - "1": { - "version": 1, - "hash": "331c44f8fa3d0a077f1aaad7313bae2c43b386d04def7b8bedae9fdf7690134d", - "action": "add" - } - }, - "PandasDataframeObject": { - "1": { - "version": 1, - "hash": "5e8018364cea31d5f185a901da4ab89846b02153ee7d041ee8a6d305ece31f90", - "action": "add" - } - }, - "PandasSeriesObject": { - "1": { - "version": 1, - "hash": "b8bd482bf16fc7177e9778292cd42f8835b6ced2ce8dc88908b4b8e6d7c7528f", - "action": "add" - } - }, - "Change": { - "1": { - "version": 1, - "hash": "75fb9a5cd4e76b189ebe130a421d3921a0c251947a48bbb92a2ef1c315dc3c16", - "action": "add" - } - }, - "ChangeStatus": { - "1": { - "version": 1, - "hash": "c914a6f7637b555a51b71e8e197e591f7a2e28121e29b5dd586f87e0383d179d", - "action": "add" - } - }, - "ActionStoreChange": { - "1": { - "version": 1, - "hash": "1a803bb08924b49f3114fd46e0e132f819d4d56be5e03a27e9fe90947ca26e85", - "action": "add" - } - }, - "CreateCustomImageChange": { - "1": { - "version": 1, - "hash": "c3dbea3f49979fdcc517c0d13cd02739ca2fe86b370c42496a224f142ae31562", - "action": "add" - } - }, - "CreateCustomWorkerPoolChange": { - "1": { - "version": 1, - "hash": "0355793dd58b364dcb84fff29714b6a26446bead3ba95c6d75e3200008e580f4", - "action": "add" - } - }, - "Request": { - "1": { - "version": 1, - "hash": "1d69f5f0074114f99aa29c5ee77cb20b9151e5b50e77b026f11c3632a12efadf", - "action": "add" - } - }, - "RequestInfo": { - "1": { - "version": 1, - "hash": "779562547744ebed64548f8021647292604fdf4256bf79685dfa14a1e56cc27b", - "action": "add" - } - }, - "RequestInfoFilter": { - "1": { - "version": 1, - "hash": "bb881a003032f4676321218d7cd09580f4d64fccaa1cf9e118fdcd5c73c3d3a8", - "action": "add" - } - }, - "SubmitRequest": { - "1": { - "version": 1, - "hash": "6c38b6ffd0a6f7442746e68b9ace7b21cb1dca7d2031929db5f9a302a280403f", - "action": "add" - } - }, - "ObjectMutation": { - "1": { - "version": 1, - "hash": "ce88096760ce9334599c8194ec97b0a1470651ad680d9d21b8826a0df0af2a36", - "action": "add" - } - }, - "EnumMutation": { - "1": { - "version": 1, - "hash": "5173fda73df17a344eb663b7692cca48bd46bf1773455439836b852cd165448c", - "action": "add" - } - }, - "UserCodeStatusChange": { - "1": { - "version": 1, - "hash": "89aaf7f1368c782e3a1b9e79988877f6eaa05ab84365f7d321b757fde7fe86e7", - "action": "add" - } - }, - "SyncedUserCodeStatusChange": { - "1": { - "version": 1, - "hash": "d9ad2d341eb645bd50d06330cd30fd4c266f93e37b9f5391d58b78365fc440e6", - "action": "add" - } - }, - "TwinAPIContextView": { - "1": { - "version": 1, - "hash": "e099eef32cb3a8a806cbdc54cc7fca96bed3d60344bd571163ec049db407938b", - "action": "add" - } - }, - "CustomAPIView": { - "1": { - "version": 1, - "hash": "769e96bebd05736ab860591670fb6da19406239b0104ddc71bd092a134335146", - "action": "add" - } - }, - "CustomApiEndpoint": { - "1": { - "version": 1, - "hash": "ec4a217585336d1b59c93c18570443a63f4fbb24d2c088fbacf80bcf389d23e8", - "action": "add" - } - }, - "PrivateAPIEndpoint": { - "1": { - "version": 1, - "hash": "6d7d143432c2811c520ab6dade005ba40173b590e5c676be04f5921b970ef938", - "action": "add" - } - }, - "PublicAPIEndpoint": { - "1": { - "version": 1, - "hash": "3bf51fc33aa8feb1abc9d0ef792e8889da31a57050430e0bd8e17f2065ff8734", - "action": "add" - } - }, - "UpdateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "851e59412716e73c7f70a696619e0b375ce136b43f6fe2ea784747091caba5d8", - "action": "add" - } - }, - "CreateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "3d0b84dae95ebcc6647b5aabe54e65b3c6bf957665fde57d8037806a4aac13be", - "action": "add" - } - }, - "TwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "d1947b8f9c80d6c9b443e5a9f0758afa8849a5f12b9a511feefd7e4f82c374f4", - "action": "add" - } - }, - "SyncState": { - "1": { - "version": 1, - "hash": "9a3f0bb973858b55bc766c9770c4d9abcc817898f797d94a89938650c0c67868", - "action": "add" - } - }, - "WorkerSettings": { - "1": { - "version": 1, - "hash": "dca33003904a71688e5b07db65f8833eb4de8135aade7154076b8eafbb94d26b", - "action": "add" - } - }, - "HTTPServerRoute": { - "1": { - "version": 1, - "hash": "938245604a9c7e50001299afff5b669b2548364e356fed22a22780497831bf81", - "action": "add" - } - }, - "PythonServerRoute": { - "1": { - "version": 1, - "hash": "a068d8f942d55ecb6d45af88a27c6ebf208584275bf589cbc308df3f774ab9a9", - "action": "add" - } - }, - "VeilidServerRoute": { - "1": { - "version": 1, - "hash": "e676bc165601d2ede69707a4b6168ed4674f3f98887026d098a2dd4da4dfd097", - "action": "add" - } - }, - "ServerPeer": { - "1": { - "version": 1, - "hash": "0d5f252018e324ea0d2dcb5c2ad8bd15707220565fce4f14de7f63a8f9e4391b", - "action": "add" - } - }, - "ServerPeerUpdate": { - "1": { - "version": 1, - "hash": "0b854b57db7a18118c1fd8f31495b2ba4eeb9fbe4f24c631ff112418a94570d3", - "action": "add" - } - }, - "AssociationRequestChange": { - "1": { - "version": 1, - "hash": "0134ac0002879c85fc9ddb06bed6306a8905c8434b0a40d3a96ce24a7bd4da90", - "action": "add" - } - }, - "QueueItem": { - "1": { - "version": 1, - "hash": "1db212c46b6c56ccc5579cfe2141b693f0cd9286e2ede71210393e8455379bf1", - "action": "add" - } - }, - "ActionQueueItem": { - "1": { - "version": 1, - "hash": "396d579dfc2e2b36b9fbed2f204bffcca1bea7ee2db7175045dd3328ebf08718", - "action": "add" - } - }, - "APIEndpointQueueItem": { - "1": { - "version": 1, - "hash": "f04b3990a8d29c116d301e70df54d58f188895307a411dc13a666ff764ffd8dd", - "action": "add" - } - }, - "ZMQClientConfig": { - "1": { - "version": 1, - "hash": "36ee8f75067d5144f0ed062cdc79466caae16b7a128231d89b6b430174843bde", - "action": "add" - } - }, - "SQLiteStoreConfig": { - "1": { - "version": 1, - "hash": "ad062a5f863ae84683867d2a6a5e1d4420c010a64b88bc7b392106e33d71ac03", - "action": "add" - } - }, - "ProjectEvent": { - "1": { - "version": 1, - "hash": "dc0486c52daebd5e98c2b3b03ffd9a9a14bc3d86d8dc0c23e41ebf6c31fe2ffb", - "action": "add" - } - }, - "ProjectThreadMessage": { - "1": { - "version": 1, - "hash": "99256d7592577d1e37df94a06eabc0a287f2d79e144c51fd719315e278edb46d", - "action": "add" - } - }, - "ProjectMessage": { - "1": { - "version": 1, - "hash": "b5004b6354f71b19c81dd5f4b20bf446e0b959f5608a22707e96b944dd8175b0", - "action": "add" - } - }, - "ProjectRequestResponse": { - "1": { - "version": 1, - "hash": "52162a8a779a4a301d8755691bf4cf994c86b9f650f9e8c8a923b44e635b1bc0", - "action": "add" - } - }, - "ProjectRequest": { - "1": { - "version": 1, - "hash": "dc684135d5a5a48e5fc7988598c1e6e0de76cf1c5995f1c283fcf63d0eb4d24f", - "action": "add" - } - }, - "AnswerProjectPoll": { - "1": { - "version": 1, - "hash": "c83d83a5ba6cc034d5061df200b3f1d029aa770b1e13dbef959bb1790323dc6e", - "action": "add" - } - }, - "ProjectPoll": { - "1": { - "version": 1, - "hash": "ecf69b3b324e0bee9c82295796d44c4e8f796496cdc9db6d4302c2f160566466", - "action": "add" - } - }, - "Project": { - "1": { - "version": 1, - "hash": "de86a1163ddbcd1cc3cc2b1b5dfcb85a8ad9f9d4bbc759c2b1f92a0d0a2ff184", - "action": "add" - } - }, - "ProjectSubmit": { - "1": { - "version": 1, - "hash": "7555ba11ee5a814dcd9c45647300020f7359efc1081559940990cbd745936cac", - "action": "add" - } - }, - "Plan": { - "1": { - "version": 1, - "hash": "ed05cb87aec832098fc464ac36cd6bceaab705463d0d2fa1b2d8e1ccc510018c", - "action": "add" - } - }, - "EnclaveMetadata": { - "1": { - "version": 1, - "hash": "8d2dfafa01ec909c080a790cf15a8fc78e00382d3bfe6207098ceb25a60b9c53", - "action": "add" - } - } - } + "1": { + "release_name": "0.9.1.json" } } diff --git a/packages/syft/src/syft/protocol/releases/0.9.1.json b/packages/syft/src/syft/protocol/releases/0.9.1.json new file mode 100644 index 00000000000..9c33a5d3a88 --- /dev/null +++ b/packages/syft/src/syft/protocol/releases/0.9.1.json @@ -0,0 +1,1178 @@ +{ + "1": { + "object_versions": { + "SyftObjectVersioned": { + "1": { + "version": 1, + "hash": "7c842dcdbb57e2528ffa690ea18c19fff3c8a591811d40cad2b19be3100e2ff4", + "action": "add" + } + }, + "BaseDateTime": { + "1": { + "version": 1, + "hash": "614db484b1950be729902b1861bd3a7b33899176507c61cef11dc0d44611cfd3", + "action": "add" + } + }, + "SyftObject": { + "1": { + "version": 1, + "hash": "bb70d874355988908d3a92a3941d6613a6995a4850be3b6a0147f4d387724406", + "action": "add" + } + }, + "PartialSyftObject": { + "1": { + "version": 1, + "hash": "19a995fcc2833f4fab24584fd99b71a80c2ef1f13c06f83af79e4482846b1656", + "action": "add" + } + }, + "ServerMetadata": { + "1": { + "version": 1, + "hash": "1691c7667eca86b20c4189e90ce4e643dd41fd3682cdb69c6308878f2a6f135c", + "action": "add" + } + }, + "StoreConfig": { + "1": { + "version": 1, + "hash": "a9997fce6a8a0ed2884c58b8eb9382f8554bdd18fff61f8bf0451945bcff12c7", + "action": "add" + } + }, + "MongoDict": { + "1": { + "version": 1, + "hash": "57e36f57eed75e62b29e2bac1295035a9bf2c0e3c56719dac24cb6cc685be00b", + "action": "add" + } + }, + "MongoStoreConfig": { + "1": { + "version": 1, + "hash": "53342b27d34165b7e2699f8e7ad70d13d125875e6a75e8fa18f5796428f41036", + "action": "add" + } + }, + "LinkedObject": { + "1": { + "version": 1, + "hash": "d80f5ac7f51a9383be1a3cb334d56ae50e49733ed3199f3b6b5d6febd9de410b", + "action": "add" + } + }, + "BaseConfig": { + "1": { + "version": 1, + "hash": "10bd7566041d0f0a3aa295367785fdcc2c5bbf0ded984ac9230754f37496a6a7", + "action": "add" + }, + "2": { + "version": 2, + "hash": "890d2879ac44611db9b88ba9334a721130d0ac3aa18a303fa9e4081f14b9b8c7", + "action": "add" + } + }, + "ServiceConfig": { + "1": { + "version": 1, + "hash": "28af8a296f5ff63de50438277eaa1f4380682e6aca9f2ca28320d7a444825e88", + "action": "add" + }, + "2": { + "version": 2, + "hash": "93dfab144e0b0884c602358b3a9ce889bb29ab96e3b4adcfe3cef47a31694a9a", + "action": "add" + } + }, + "LibConfig": { + "1": { + "version": 1, + "hash": "ee8f0e3f6aae81948d72e30226645e8eb5d312a6770411a1edca748168c467c0", + "action": "add" + }, + "2": { + "version": 2, + "hash": "a8a78a8d726ee9e79f95614f3d0fa5b85edc6fce7be7651715208669be93e0e3", + "action": "add" + } + }, + "APIEndpoint": { + "1": { + "version": 1, + "hash": "faa1cf9336a0d1233868c8c57745ff38c0be60399dc1acd0c0e8dd440e405dbd", + "action": "add" + } + }, + "LibEndpoint": { + "1": { + "version": 1, + "hash": "a585c83a33a019d363ae5a0c6d4197193654307c19a4829dfbf8a8cfd2c1842a", + "action": "add" + } + }, + "SignedSyftAPICall": { + "1": { + "version": 1, + "hash": "2f959455f7130f4e59360b8aa58f19785b76eaa0f8a5a9188a6cbf32b31311ca", + "action": "add" + } + }, + "SyftAPICall": { + "1": { + "version": 1, + "hash": "59e89e7b9ea30deaed64d1ffd9bc0769b999d3082b305428432c1f5be36c6343", + "action": "add" + } + }, + "SyftAPIData": { + "1": { + "version": 1, + "hash": "820b279c581cafd9bb5009702d4e3db22ec3a3156676426304b9038dad260a24", + "action": "add" + } + }, + "SyftAPI": { + "1": { + "version": 1, + "hash": "cc13ab058ee36748c14b0d4bd9b9e894c7566fff09cfa4170b3eece520169f15", + "action": "add" + } + }, + "User": { + "1": { + "version": 1, + "hash": "2df4b68182c558dba5485a8a6867acf2a5c341b249ad67373a504098aa8c4343", + "action": "add" + }, + "2": { + "version": 2, + "hash": "af6fb5b2e1606e97838f4a60f0536ad95db606d455e94acbd1977df866608a2c", + "action": "add" + } + }, + "UserUpdate": { + "1": { + "version": 1, + "hash": "1bf6707c69b809c804fb939c7c37d787c2f6889508a4bec37d24221af2eb777a", + "action": "add" + } + }, + "UserCreate": { + "1": { + "version": 1, + "hash": "49d6087e2309ba59987f3126e286e74b3a66492a08ad82fa507ea17d52ce78e3", + "action": "add" + } + }, + "UserSearch": { + "1": { + "version": 1, + "hash": "9ac946338cca68d00d1696a57943442f062628ec3daf53077d0bdd3f72cd9fa0", + "action": "add" + } + }, + "UserView": { + "1": { + "version": 1, + "hash": "0b52d758e31d5889c9cd88afb467aae4a74e34a5276924e07012243c34d300fe", + "action": "add" + } + }, + "UserViewPage": { + "1": { + "version": 1, + "hash": "1cd6528d02ec180f080d5c35f0da760d8a59af9da7baaa9c17c1c7cedcc858fa", + "action": "add" + } + }, + "UserPrivateKey": { + "1": { + "version": 1, + "hash": "4817d8147aba94373f320dcd90e65f097cf6e5a2ef353aa8520e23128d522b5d", + "action": "add" + } + }, + "DateTime": { + "1": { + "version": 1, + "hash": "394abb554114ead4d63c36e3fe83ac018dead4b21a8465174009577c46d54c58", + "action": "add" + } + }, + "ReplyNotification": { + "1": { + "version": 1, + "hash": "84102dfc59d711b03c2f3d3a6ecaca000b6835f1bbdd9af801057f7aacb5f1d0", + "action": "add" + } + }, + "Notification": { + "1": { + "version": 1, + "hash": "af4cb232bff390c431e399975f048b34da7e940ace8b23b940a3b398c91c5326", + "action": "add" + } + }, + "CreateNotification": { + "1": { + "version": 1, + "hash": "7e426c946b7d5db6f9427960ec16042f3018091d835ca5966f3568c324a2ab53", + "action": "add" + } + }, + "UserNotificationActivity": { + "1": { + "version": 1, + "hash": "422fd01c6d9af38688a9982abd34e80794a1f6ddd444cca225d77f49189847a9", + "action": "add" + } + }, + "NotificationPreferences": { + "1": { + "version": 1, + "hash": "a42f06b367e7c6cbabcbf3cfcc84d1ca0873e457d972ebd060e87c9d6185f62b", + "action": "add" + } + }, + "NotifierSettings": { + "1": { + "version": 1, + "hash": "65c8ab814d35fac32f68d3000756692592cc59940f30e3af3dcdfa2328755b9d", + "action": "add" + }, + "2": { + "version": 2, + "hash": "be8b52597fc628d1b7cd22b776ee81416e1adbb04a45188778eb0e32ed1416b4", + "action": "add" + } + }, + "SyftImageRegistry": { + "1": { + "version": 1, + "hash": "67e18903e41cba1afe136adf29d404b63ec04fea6e928abb2533ec4fa52b246b", + "action": "add" + } + }, + "SyftWorkerImage": { + "1": { + "version": 1, + "hash": "44da7badfbe573d5403d3ab78c077f17dbefc560b81fdf927b671815be047441", + "action": "add" + } + }, + "SyftWorker": { + "1": { + "version": 1, + "hash": "9d897f6039eabe48dfa8e8d5c5cdcb283b0375b4c64571b457777eaaf3fb1920", + "action": "add" + } + }, + "WorkerPool": { + "1": { + "version": 1, + "hash": "16efc5dd2596ae744fd611c8f46af9eaec1bd5729eb20e85e9fd2f31df402564", + "action": "add" + } + }, + "MarkdownDescription": { + "1": { + "version": 1, + "hash": "31a73f8824cad1636a55d14b6a1074cdb071d0d4e16e86baaa3d4f63a7e80134", + "action": "add" + } + }, + "HTMLObject": { + "1": { + "version": 1, + "hash": "97f2e93f5ceaa88015047186f66a17ff13df2a6b7925b41331f9e19d5a515a9f", + "action": "add" + } + }, + "PwdTokenResetConfig": { + "1": { + "version": 1, + "hash": "0415a272428f22add4896c64aa9f29c8c1d35619e2433da6564eb5f1faff39ac", + "action": "add" + } + }, + "ServerSettingsUpdate": { + "1": { + "version": 1, + "hash": "1e4260ad879ae80728c3ffae2cd1d48759abd51f9d0960d4b25855cdbb4c506b", + "action": "add" + }, + "2": { + "version": 2, + "hash": "23b2716e9dceca667e228408e2416c82f11821e322e5bccf1f83406f3d09abdc", + "action": "add" + }, + "3": { + "version": 3, + "hash": "335c7946f2e52d09c7b26f511120cd340717c74c5cca9107e84f839da993c55c", + "action": "add" + }, + "4": { + "version": 4, + "hash": "8d7a41992c39c287fcb46383bed429ce75d3c9524ced8c86b88c26dd0232e2fe", + "action": "add" + } + }, + "ServerSettings": { + "1": { + "version": 1, + "hash": "5a1e7470cbeaaae5b80ac9beecb743734f7e4e42d429a09ea8defa569a5ddff1", + "action": "add" + }, + "2": { + "version": 2, + "hash": "7727ea54e494dc9deaa0d1bd38ac8a6180bc192b74eec5659adbc338a19e21f5", + "action": "add" + }, + "3": { + "version": 3, + "hash": "997667e1cba22d151857aacc2caba6b1ca73c1648adbd03461dc74a0c0c372b3", + "action": "add" + }, + "4": { + "version": 4, + "hash": "b8067777967a0e06733433e179e549caaf501419d62f7e8474ee33b839e3890d", + "action": "add" + } + }, + "HTTPConnection": { + "1": { + "version": 1, + "hash": "bf10f81646c71069c76292b1237b4a3de1e507264392c5c591d067636ce6fb46", + "action": "add" + } + }, + "PythonConnection": { + "1": { + "version": 1, + "hash": "28010778b5e3463ff6960a0e2224818de00bc7b5e6f892192e02e399ccbe18b5", + "action": "add" + } + }, + "ActionDataEmpty": { + "1": { + "version": 1, + "hash": "e0e4a5cf18d05b6b747addc048515c6f2a5f35f0766ebaee96d898cb971e1c5b", + "action": "add" + } + }, + "ObjectNotReady": { + "1": { + "version": 1, + "hash": "8cf471e205cd0893d6aae5f0227d14db7df1c9698da08a3ab991f59132d17fe9", + "action": "add" + } + }, + "ActionDataLink": { + "1": { + "version": 1, + "hash": "3469478343439e411b761c270eec63eb3d533e459ad72d0965158c3a6cdf3b9a", + "action": "add" + } + }, + "Action": { + "1": { + "version": 1, + "hash": "021826d7c6f69bd0283d025d40661f3ffbeba8810ca94de01344f6afbdae62cd", + "action": "add" + } + }, + "ActionObject": { + "1": { + "version": 1, + "hash": "0a5f4bc343cb114a251f06686ecdbb59d74bfb3d29a098b176699deb35a1e683", + "action": "add" + } + }, + "AnyActionObject": { + "1": { + "version": 1, + "hash": "b3c44c7788c59c03fa1baeec656c2ca6e633f4cbd4b23ff7ece6ee94c38449f0", + "action": "add" + } + }, + "CustomEndpointActionObject": { + "1": { + "version": 1, + "hash": "c7addbaf2777707f3e91e5c1e092343476cd22efc4ec8617f39ccf76e61a5a14", + "action": "add" + }, + "2": { + "version": 2, + "hash": "846ba36e8737a1bec16853c9de54c4948450009278e0b76fe7e3355ef9e70089", + "action": "add" + } + }, + "DataSubject": { + "1": { + "version": 1, + "hash": "582cdf9e82b5d6915b7f09f7c0d5f08328b11a2ce9b0198e5083f1672c2e2bf5", + "action": "add" + } + }, + "DataSubjectCreate": { + "1": { + "version": 1, + "hash": "5a8423c2690d55f425bfeecc87cd4a797a75d88ebb5fbda754d4f269b62d2ceb", + "action": "add" + } + }, + "DataSubjectMemberRelationship": { + "1": { + "version": 1, + "hash": "0810483ea76ea10c8f286c6035dc0b2085291f345183be50c179f3a05a577110", + "action": "add" + } + }, + "Contributor": { + "1": { + "version": 1, + "hash": "30c32bd44098f00e0b15496be441763b6e50af8b12d3d2bef33aca6287193876", + "action": "add" + } + }, + "Asset": { + "1": { + "version": 1, + "hash": "000abc78719611c106295cf12b1690b7e5411dc1bb9db9d4afd22956da90d1f4", + "action": "add" + } + }, + "CreateAsset": { + "1": { + "version": 1, + "hash": "357d52576cb12b24fb3980342bb49a562b065c0e4419e87d34176340628c7309", + "action": "add" + } + }, + "Dataset": { + "1": { + "version": 1, + "hash": "0ca6b0b4a3aebb2c8f351668075b44951bb20d1e23a779b82109124f334ce3a4", + "action": "add" + } + }, + "DatasetPageView": { + "1": { + "version": 1, + "hash": "aa0dd69637281b80d5523b4409a2c7e89db114c9fe79c858063c6dadff8977d1", + "action": "add" + }, + "2": { + "version": 2, + "hash": "be1ca6dcd0b3aa0481ce5dce737e78432d06a78ad0c701aaf136be407c798352", + "action": "add" + } + }, + "CreateDataset": { + "1": { + "version": 1, + "hash": "7e02dfa89540c3dbebacbb13810d95cdc4e36db31d56cffed7ab54abe25716c9", + "action": "add" + } + }, + "SyftLog": { + "1": { + "version": 1, + "hash": "1bcd71e5bf3f0db3bba0996f33b6b2bde3489b9c71f11e6b30c3495c76a8f53f", + "action": "add" + } + }, + "JobItem": { + "1": { + "version": 1, + "hash": "0b32277b7d3b9bdc14a2a51cc9005f8254e7f7b6ec059ddcccbcd681a807afb6", + "action": "add" + }, + "2": { + "version": 2, + "hash": "b087d0c62b7d304c6ca80e4fb0e8a7f2a444be8f8cba57490dc09aeb98033105", + "action": "add" + } + }, + "ExecutionOutput": { + "1": { + "version": 1, + "hash": "e36c71685edf5276a3427cb6749550486d3a177c1dcf73dd337ab2a73c0ce6b5", + "action": "add" + } + }, + "TwinObject": { + "1": { + "version": 1, + "hash": "4f31243fb348dbb083579afd6f638d75af010cb53d19bfba59b74afff41ccbbb", + "action": "add" + } + }, + "PolicyRule": { + "1": { + "version": 1, + "hash": "44d1ca1db97be46f66558aa1a729ff31bf8e113c6a913b11aedf9d6b6ad5b7b5", + "action": "add" + } + }, + "CreatePolicyRule": { + "1": { + "version": 1, + "hash": "342bb723526d445151a0435f57d251f4c1219f8ae7cca3e8e9fce52e2ee1b8b1", + "action": "add" + } + }, + "CreatePolicyRuleConstant": { + "1": { + "version": 1, + "hash": "78b54832cb0468a87013bc36bc11d4759874ca1b5065a1b711f1e5ef5d94c2df", + "action": "add" + } + }, + "Matches": { + "1": { + "version": 1, + "hash": "dd6d91ddb2ec5eaf60be2b0899ecfdb9a15f7904aa39d2f4d9bb2d7b793040e6", + "action": "add" + } + }, + "PreFill": { + "1": { + "version": 1, + "hash": "c7aefb11dc4c4569dcd1e6988371047a32a8be1b32ad46d12adba419a19769ad", + "action": "add" + } + }, + "UserOwned": { + "1": { + "version": 1, + "hash": "c8738dc3d8c2a5ef461b85a0467c3dff53dab16b54a4d12b44b1477906aef51d", + "action": "add" + } + }, + "MixedInputPolicy": { + "1": { + "version": 1, + "hash": "37bb12d950518d9579c8ec7c4cc22ac731ea82caf8c1370dd0b0a82b46462dde", + "action": "add" + } + }, + "ExactMatch": { + "1": { + "version": 1, + "hash": "5eb37edbf5e451d942e599247f3eaed923c1fe9d91eefdba02bf06503f6cc08d", + "action": "add" + } + }, + "OutputHistory": { + "1": { + "version": 1, + "hash": "9366db79d131f8c65e5a4ff12c90e2aa0c11e302debe06e46eeb93b26e2aaf61", + "action": "add" + } + }, + "OutputPolicyExecuteCount": { + "1": { + "version": 1, + "hash": "2a77e5ed5c7b0391147562651ad4061e20b11745c191fbc34cb549da37ba72dd", + "action": "add" + } + }, + "OutputPolicyExecuteOnce": { + "1": { + "version": 1, + "hash": "5589c00d127d9eb1f5ccf3a16def8219737784d57bb3bf9be5cb6d83325ef436", + "action": "add" + } + }, + "EmptyInputPolicy": { + "1": { + "version": 1, + "hash": "7ef81cfd223be0064600e1503f8b04bafc16385e27730e9319466e68a077c68b", + "action": "add" + } + }, + "UserPolicy": { + "1": { + "version": 1, + "hash": "74373bb71a334f4dcf77623ae10ff5b1c7e5b3006f65f2051ffb1e01f422f982", + "action": "add" + } + }, + "SubmitUserPolicy": { + "1": { + "version": 1, + "hash": "ec4e808eb39613bcdbbbf9ffb3267612084a9d99880a2f3bee3ef32d46329c02", + "action": "add" + } + }, + "UserCodeStatusCollection": { + "1": { + "version": 1, + "hash": "735ecf2d4abb1e7d19b2e751d880f32b01ce267ba10e417ef1b440be3d94d8f1", + "action": "add" + } + }, + "UserCode": { + "1": { + "version": 1, + "hash": "3bcd14413b9c4fbde7c5612c2ed713518340280b5cff89cf2aaaf1c77c4037a8", + "action": "add" + } + }, + "SubmitUserCode": { + "1": { + "version": 1, + "hash": "d2bb8cfe12f070b4adafded78ce01900c5409bd83f055f94b1e285745ef65a76", + "action": "add" + } + }, + "UserCodeExecutionResult": { + "1": { + "version": 1, + "hash": "1f4cbc62caac4dd193f427306405dc7a099ae744bea5830cf57149ce71c1e589", + "action": "add" + } + }, + "UserCodeExecutionOutput": { + "1": { + "version": 1, + "hash": "c1d53300a39dbbb437d7d5a1257bd175a067b1065f4099a0938fac7540035258", + "action": "add" + }, + "2": { + "version": 2, + "hash": "3e104e39b4ab53c950e61e4f7e92ce935cf96a5100de301de9bf297eb7e5787e", + "action": "add" + } + }, + "CodeHistory": { + "1": { + "version": 1, + "hash": "e3ef5346f108257828f364d22b12d9311812c9cf843200afef5dc4d9302f9b21", + "action": "add" + } + }, + "CodeHistoryView": { + "1": { + "version": 1, + "hash": "8b8b97d334b51d1ce0a9efab722411ff25caa3f12be319105954497e0a306eb2", + "action": "add" + } + }, + "CodeHistoriesDict": { + "1": { + "version": 1, + "hash": "01d7dcd4b21525a06e4484d8699a4a34a5c84f1f6026ec55e32eb30412742601", + "action": "add" + } + }, + "UsersCodeHistoriesDict": { + "1": { + "version": 1, + "hash": "4ed8b83973258ea19a1f91feb2590ff73b801be86f4296cc3db48f6929ff784c", + "action": "add" + } + }, + "BlobFile": { + "1": { + "version": 1, + "hash": "d99239100f1cb0b73c69b2ad7cab01a06909cc3a4976ba2b3b67cf6fe5e2f516", + "action": "add" + } + }, + "BlobFileOBject": { + "1": { + "version": 1, + "hash": "6c40dab2c8d2220d4fff7cc653d76cc026a856db7e2b5713b6341e255adc7ea2", + "action": "add" + } + }, + "SecureFilePathLocation": { + "1": { + "version": 1, + "hash": "ea5978b98d7773d221665b450454c9130c103a5c850669a0acd620607cd614b7", + "action": "add" + } + }, + "SeaweedSecureFilePathLocation": { + "1": { + "version": 1, + "hash": "3fc9bfc8c1b1cf660c9747e8c1fe3eb2220e78d4e3b5d6b5c5f29a07a77ebf3e", + "action": "add" + } + }, + "AzureSecureFilePathLocation": { + "1": { + "version": 1, + "hash": "090a9e962eeb655586ee966c5651d8996363969818a38f9a486fd64d33047e05", + "action": "add" + } + }, + "BlobStorageEntry": { + "1": { + "version": 1, + "hash": "afdc6a1d8a24b1ee1ed9d3e79f5bac64b4f0d9d36800f07f10be0b896470345f", + "action": "add" + } + }, + "BlobStorageMetadata": { + "1": { + "version": 1, + "hash": "9d4b61ac4ea1910c2f7c767a50a6a52544a24663548f069e79bd906f11b538e4", + "action": "add" + } + }, + "CreateBlobStorageEntry": { + "1": { + "version": 1, + "hash": "ffc3cbfeade67d074dc5bf7d655a1eb8c83630076028a72b3cc4548f3b413e14", + "action": "add" + } + }, + "SyftObjectMigrationState": { + "1": { + "version": 1, + "hash": "ee83315828551f18904bab18e0cac48896493620561215b04cc448e6ce5834af", + "action": "add" + } + }, + "StoreMetadata": { + "1": { + "version": 1, + "hash": "8de9a22a2765ef976bc161cb0704347d30350c085da8c8ffa876065cfca3e5fd", + "action": "add" + } + }, + "MigrationData": { + "1": { + "version": 1, + "hash": "cb96b8c8413609e1224341d1b0dd1efb08387c0ff7b0ff65eba36c0b104c9ed1", + "action": "add" + }, + "2": { + "version": 2, + "hash": "1d1b14c196221ecf6d644d7dcaa32ac9e90361b2687fa83161ff399ebc6df1bd", + "action": "add" + } + }, + "BlobRetrieval": { + "1": { + "version": 1, + "hash": "c422c74b89a9349742acaa848566fe18bfef1a83333458b858c074baed37a859", + "action": "add" + } + }, + "SyftObjectRetrieval": { + "1": { + "version": 1, + "hash": "b2b62447445adc4cd0b77ab59d6fa56624dd316fb50281e570daad07556b6db2", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "1": { + "version": 1, + "hash": "4db0e3b7a6334d3835356d8393866711e243e360af25a95f3cc4066f032404b5", + "action": "add" + } + }, + "BlobDeposit": { + "1": { + "version": 1, + "hash": "6eb5cc57dc763126bfc6ec5a2b79d02e77eadf9d9efb1888a5c366b7799c1c24", + "action": "add" + } + }, + "OnDiskBlobDeposit": { + "1": { + "version": 1, + "hash": "817bf1bee4a35bfa1cd25d6779a10d8d180b1b3f1e837952f81f48b9411d1970", + "action": "add" + } + }, + "RemoteConfig": { + "1": { + "version": 1, + "hash": "179d067099a178d748c6d9a0477e8de7c3b55577439669eca7150258f2409567", + "action": "add" + } + }, + "AzureRemoteConfig": { + "1": { + "version": 1, + "hash": "a143811fec0da5fd881e927643ef667c91c78a2c90519cf88da7da20738bd187", + "action": "add" + } + }, + "SeaweedFSBlobDeposit": { + "1": { + "version": 1, + "hash": "febeb2a2ce81aa2c512e4c6b611b582984042aafa0541403d4584662273a166c", + "action": "add" + } + }, + "DictStoreConfig": { + "1": { + "version": 1, + "hash": "2e1365c5535fa51c22eef79f67dd6444789bc829c27881367e3050e06e2ffbfe", + "action": "add" + } + }, + "NumpyArrayObject": { + "1": { + "version": 1, + "hash": "05dd2917b7692b3daf4e7ad083a46fa7ec7a2be8faac8d4a654809189c986443", + "action": "add" + } + }, + "NumpyScalarObject": { + "1": { + "version": 1, + "hash": "8753e5c78270a5cacbf0439447724772f4765351a4a8b58b0a5c416a6b2c8b6e", + "action": "add" + } + }, + "NumpyBoolObject": { + "1": { + "version": 1, + "hash": "331c44f8fa3d0a077f1aaad7313bae2c43b386d04def7b8bedae9fdf7690134d", + "action": "add" + } + }, + "PandasDataframeObject": { + "1": { + "version": 1, + "hash": "5e8018364cea31d5f185a901da4ab89846b02153ee7d041ee8a6d305ece31f90", + "action": "add" + } + }, + "PandasSeriesObject": { + "1": { + "version": 1, + "hash": "b8bd482bf16fc7177e9778292cd42f8835b6ced2ce8dc88908b4b8e6d7c7528f", + "action": "add" + } + }, + "Change": { + "1": { + "version": 1, + "hash": "75fb9a5cd4e76b189ebe130a421d3921a0c251947a48bbb92a2ef1c315dc3c16", + "action": "add" + } + }, + "ChangeStatus": { + "1": { + "version": 1, + "hash": "c914a6f7637b555a51b71e8e197e591f7a2e28121e29b5dd586f87e0383d179d", + "action": "add" + } + }, + "ActionStoreChange": { + "1": { + "version": 1, + "hash": "1a803bb08924b49f3114fd46e0e132f819d4d56be5e03a27e9fe90947ca26e85", + "action": "add" + } + }, + "CreateCustomImageChange": { + "1": { + "version": 1, + "hash": "c3dbea3f49979fdcc517c0d13cd02739ca2fe86b370c42496a224f142ae31562", + "action": "add" + } + }, + "CreateCustomWorkerPoolChange": { + "1": { + "version": 1, + "hash": "0355793dd58b364dcb84fff29714b6a26446bead3ba95c6d75e3200008e580f4", + "action": "add" + } + }, + "Request": { + "1": { + "version": 1, + "hash": "1d69f5f0074114f99aa29c5ee77cb20b9151e5b50e77b026f11c3632a12efadf", + "action": "add" + } + }, + "RequestInfo": { + "1": { + "version": 1, + "hash": "779562547744ebed64548f8021647292604fdf4256bf79685dfa14a1e56cc27b", + "action": "add" + } + }, + "RequestInfoFilter": { + "1": { + "version": 1, + "hash": "bb881a003032f4676321218d7cd09580f4d64fccaa1cf9e118fdcd5c73c3d3a8", + "action": "add" + } + }, + "SubmitRequest": { + "1": { + "version": 1, + "hash": "6c38b6ffd0a6f7442746e68b9ace7b21cb1dca7d2031929db5f9a302a280403f", + "action": "add" + } + }, + "ObjectMutation": { + "1": { + "version": 1, + "hash": "ce88096760ce9334599c8194ec97b0a1470651ad680d9d21b8826a0df0af2a36", + "action": "add" + } + }, + "EnumMutation": { + "1": { + "version": 1, + "hash": "5173fda73df17a344eb663b7692cca48bd46bf1773455439836b852cd165448c", + "action": "add" + } + }, + "UserCodeStatusChange": { + "1": { + "version": 1, + "hash": "89aaf7f1368c782e3a1b9e79988877f6eaa05ab84365f7d321b757fde7fe86e7", + "action": "add" + } + }, + "SyncedUserCodeStatusChange": { + "1": { + "version": 1, + "hash": "d9ad2d341eb645bd50d06330cd30fd4c266f93e37b9f5391d58b78365fc440e6", + "action": "add" + } + }, + "TwinAPIContextView": { + "1": { + "version": 1, + "hash": "e099eef32cb3a8a806cbdc54cc7fca96bed3d60344bd571163ec049db407938b", + "action": "add" + } + }, + "CustomAPIView": { + "1": { + "version": 1, + "hash": "769e96bebd05736ab860591670fb6da19406239b0104ddc71bd092a134335146", + "action": "add" + } + }, + "CustomApiEndpoint": { + "1": { + "version": 1, + "hash": "ec4a217585336d1b59c93c18570443a63f4fbb24d2c088fbacf80bcf389d23e8", + "action": "add" + } + }, + "PrivateAPIEndpoint": { + "1": { + "version": 1, + "hash": "6d7d143432c2811c520ab6dade005ba40173b590e5c676be04f5921b970ef938", + "action": "add" + } + }, + "PublicAPIEndpoint": { + "1": { + "version": 1, + "hash": "3bf51fc33aa8feb1abc9d0ef792e8889da31a57050430e0bd8e17f2065ff8734", + "action": "add" + } + }, + "UpdateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "851e59412716e73c7f70a696619e0b375ce136b43f6fe2ea784747091caba5d8", + "action": "add" + } + }, + "CreateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "3d0b84dae95ebcc6647b5aabe54e65b3c6bf957665fde57d8037806a4aac13be", + "action": "add" + } + }, + "TwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "d1947b8f9c80d6c9b443e5a9f0758afa8849a5f12b9a511feefd7e4f82c374f4", + "action": "add" + } + }, + "SyncState": { + "1": { + "version": 1, + "hash": "9a3f0bb973858b55bc766c9770c4d9abcc817898f797d94a89938650c0c67868", + "action": "add" + } + }, + "WorkerSettings": { + "1": { + "version": 1, + "hash": "dca33003904a71688e5b07db65f8833eb4de8135aade7154076b8eafbb94d26b", + "action": "add" + } + }, + "HTTPServerRoute": { + "1": { + "version": 1, + "hash": "938245604a9c7e50001299afff5b669b2548364e356fed22a22780497831bf81", + "action": "add" + } + }, + "PythonServerRoute": { + "1": { + "version": 1, + "hash": "a068d8f942d55ecb6d45af88a27c6ebf208584275bf589cbc308df3f774ab9a9", + "action": "add" + } + }, + "VeilidServerRoute": { + "1": { + "version": 1, + "hash": "e676bc165601d2ede69707a4b6168ed4674f3f98887026d098a2dd4da4dfd097", + "action": "add" + } + }, + "ServerPeer": { + "1": { + "version": 1, + "hash": "0d5f252018e324ea0d2dcb5c2ad8bd15707220565fce4f14de7f63a8f9e4391b", + "action": "add" + } + }, + "ServerPeerUpdate": { + "1": { + "version": 1, + "hash": "0b854b57db7a18118c1fd8f31495b2ba4eeb9fbe4f24c631ff112418a94570d3", + "action": "add" + } + }, + "AssociationRequestChange": { + "1": { + "version": 1, + "hash": "0134ac0002879c85fc9ddb06bed6306a8905c8434b0a40d3a96ce24a7bd4da90", + "action": "add" + } + }, + "QueueItem": { + "1": { + "version": 1, + "hash": "1db212c46b6c56ccc5579cfe2141b693f0cd9286e2ede71210393e8455379bf1", + "action": "add" + } + }, + "ActionQueueItem": { + "1": { + "version": 1, + "hash": "396d579dfc2e2b36b9fbed2f204bffcca1bea7ee2db7175045dd3328ebf08718", + "action": "add" + } + }, + "APIEndpointQueueItem": { + "1": { + "version": 1, + "hash": "f04b3990a8d29c116d301e70df54d58f188895307a411dc13a666ff764ffd8dd", + "action": "add" + } + }, + "ZMQClientConfig": { + "1": { + "version": 1, + "hash": "36ee8f75067d5144f0ed062cdc79466caae16b7a128231d89b6b430174843bde", + "action": "add" + } + }, + "SQLiteStoreConfig": { + "1": { + "version": 1, + "hash": "ad062a5f863ae84683867d2a6a5e1d4420c010a64b88bc7b392106e33d71ac03", + "action": "add" + } + }, + "ProjectEvent": { + "1": { + "version": 1, + "hash": "dc0486c52daebd5e98c2b3b03ffd9a9a14bc3d86d8dc0c23e41ebf6c31fe2ffb", + "action": "add" + } + }, + "ProjectThreadMessage": { + "1": { + "version": 1, + "hash": "99256d7592577d1e37df94a06eabc0a287f2d79e144c51fd719315e278edb46d", + "action": "add" + } + }, + "ProjectMessage": { + "1": { + "version": 1, + "hash": "b5004b6354f71b19c81dd5f4b20bf446e0b959f5608a22707e96b944dd8175b0", + "action": "add" + } + }, + "ProjectRequestResponse": { + "1": { + "version": 1, + "hash": "52162a8a779a4a301d8755691bf4cf994c86b9f650f9e8c8a923b44e635b1bc0", + "action": "add" + } + }, + "ProjectRequest": { + "1": { + "version": 1, + "hash": "dc684135d5a5a48e5fc7988598c1e6e0de76cf1c5995f1c283fcf63d0eb4d24f", + "action": "add" + } + }, + "AnswerProjectPoll": { + "1": { + "version": 1, + "hash": "c83d83a5ba6cc034d5061df200b3f1d029aa770b1e13dbef959bb1790323dc6e", + "action": "add" + } + }, + "ProjectPoll": { + "1": { + "version": 1, + "hash": "ecf69b3b324e0bee9c82295796d44c4e8f796496cdc9db6d4302c2f160566466", + "action": "add" + } + }, + "Project": { + "1": { + "version": 1, + "hash": "de86a1163ddbcd1cc3cc2b1b5dfcb85a8ad9f9d4bbc759c2b1f92a0d0a2ff184", + "action": "add" + } + }, + "ProjectSubmit": { + "1": { + "version": 1, + "hash": "7555ba11ee5a814dcd9c45647300020f7359efc1081559940990cbd745936cac", + "action": "add" + } + }, + "Plan": { + "1": { + "version": 1, + "hash": "ed05cb87aec832098fc464ac36cd6bceaab705463d0d2fa1b2d8e1ccc510018c", + "action": "add" + } + }, + "EnclaveMetadata": { + "1": { + "version": 1, + "hash": "8d2dfafa01ec909c080a790cf15a8fc78e00382d3bfe6207098ceb25a60b9c53", + "action": "add" + } + } + } + } +} diff --git a/packages/syft/src/syft/server/routes.py b/packages/syft/src/syft/server/routes.py index ec5bedd6d01..a492e999f8e 100644 --- a/packages/syft/src/syft/server/routes.py +++ b/packages/syft/src/syft/server/routes.py @@ -49,8 +49,7 @@ def _get_server_connection(peer_uid: UID) -> ServerConnection: # relative from ..service.network.server_peer import route_to_connection - network_service = worker.get_service("NetworkService") - peer = network_service.stash.get_by_uid(worker.verify_key, peer_uid).unwrap() + peer = worker.network.stash.get_by_uid(worker.verify_key, peer_uid).unwrap() peer_server_route = peer.pick_highest_priority_route() connection = route_to_connection(route=peer_server_route) return connection @@ -168,9 +167,8 @@ def syft_new_api_call( def handle_forgot_password(email: str, server: AbstractServer) -> Response: try: - method = server.get_service_method(UserService.forgot_password) context = UnauthedServiceContext(server=server) - result = method(context=context, email=email) + result = server.services.user.forgot_password(context=context, email=email) except SyftException as e: result = SyftError.from_public_exception(e) @@ -186,9 +184,10 @@ def handle_reset_password( token: str, new_password: str, server: AbstractServer ) -> Response: try: - method = server.get_service_method(UserService.reset_password) context = UnauthedServiceContext(server=server) - result = method(context=context, token=token, new_password=new_password) + result = server.services.user.reset_password( + context=context, token=token, new_password=new_password + ) except SyftException as e: result = SyftError.from_public_exception(e) @@ -206,12 +205,11 @@ def handle_login(email: str, password: str, server: AbstractServer) -> Response: except ValidationError as e: return {"Error": e.json()} - method = server.get_service_method(UserService.exchange_credentials) context = UnauthedServiceContext( server=server, login_credentials=login_credentials ) try: - result = method(context=context).value + result = server.services.user.exchange_credentials(context=context).value if not isinstance(result, UserPrivateKey): response = SyftError(message=f"Incorrect return type: {type(result)}") else: diff --git a/packages/syft/src/syft/server/server.py b/packages/syft/src/syft/server/server.py index 102b34ad99a..b9737bd873c 100644 --- a/packages/syft/src/syft/server/server.py +++ b/packages/syft/src/syft/server/server.py @@ -42,8 +42,6 @@ from ..service.action.action_store import DictActionStore from ..service.action.action_store import MongoActionStore from ..service.action.action_store import SQLiteActionStore -from ..service.blob_storage.service import BlobStorageService -from ..service.code.user_code_service import UserCodeService from ..service.code.user_code_stash import UserCodeStash from ..service.context import AuthedServiceContext from ..service.context import ServerServiceContext @@ -54,13 +52,13 @@ from ..service.job.job_stash import JobStatus from ..service.job.job_stash import JobType from ..service.metadata.server_metadata import ServerMetadata -from ..service.network.network_service import NetworkService from ..service.network.utils import PeerHealthCheckTask from ..service.notifier.notifier_service import NotifierService from ..service.queue.base_queue import AbstractMessageHandler from ..service.queue.base_queue import QueueConsumer from ..service.queue.base_queue import QueueProducer from ..service.queue.queue import APICallMessageHandler +from ..service.queue.queue import ConsumerType from ..service.queue.queue import QueueManager from ..service.queue.queue_stash import APIEndpointQueueItem from ..service.queue.queue_stash import ActionQueueItem @@ -81,12 +79,10 @@ from ..service.user.user import UserCreate from ..service.user.user import UserView from ..service.user.user_roles import ServiceRole -from ..service.user.user_service import UserService from ..service.user.user_stash import UserStash from ..service.worker.utils import DEFAULT_WORKER_IMAGE_TAG from ..service.worker.utils import DEFAULT_WORKER_POOL_NAME from ..service.worker.utils import create_default_image -from ..service.worker.worker_image_service import SyftWorkerImageService from ..service.worker.worker_pool import WorkerPool from ..service.worker.worker_pool_service import SyftWorkerPoolService from ..service.worker.worker_pool_stash import SyftWorkerPoolStash @@ -338,6 +334,7 @@ def __init__( smtp_host: str | None = None, association_request_auto_approval: bool = False, background_tasks: bool = False, + consumer_type: ConsumerType | None = None, ): # 🟡 TODO 22: change our ENV variable format and default init args to make this # less horrible or add some convenience functions @@ -381,10 +378,15 @@ def __init__( self.association_request_auto_approval = association_request_auto_approval + consumer_type = ( + consumer_type or ConsumerType.Thread + if thread_workers + else ConsumerType.Process + ) self.queue_config = self.create_queue_config( n_consumers=n_consumers, create_producer=create_producer, - thread_workers=thread_workers, + consumer_type=consumer_type, queue_port=queue_port, queue_config=queue_config, ) @@ -528,8 +530,7 @@ def init_blob_storage(self, config: BlobStorageConfig | None = None) -> None: from ..store.blob_storage.seaweedfs import SeaweedFSConfig if isinstance(config, SeaweedFSConfig) and self.signing_key: - blob_storage_service = self.get_service(BlobStorageService) - remote_profiles = blob_storage_service.remote_profile_stash.get_all( + remote_profiles = self.services.blob_storage.remote_profile_stash.get_all( credentials=self.signing_key.verify_key, has_permission=True ).unwrap() for remote_profile in remote_profiles: @@ -578,7 +579,7 @@ def create_queue_config( self, n_consumers: int, create_producer: bool, - thread_workers: bool, + consumer_type: ConsumerType, queue_port: int | None, queue_config: QueueConfig | None, ) -> QueueConfig: @@ -587,13 +588,14 @@ def create_queue_config( elif queue_port is not None or n_consumers > 0 or create_producer: if not create_producer and queue_port is None: logger.warn("No queue port defined to bind consumers.") + queue_config_ = ZMQQueueConfig( client_config=ZMQClientConfig( create_producer=create_producer, queue_port=queue_port, n_consumers=n_consumers, ), - thread_workers=thread_workers, + consumer_type=consumer_type, ) else: queue_config_ = ZMQQueueConfig() @@ -727,6 +729,7 @@ def named( in_memory_workers: bool = True, association_request_auto_approval: bool = False, background_tasks: bool = False, + consumer_type: ConsumerType | None = None, ) -> Server: uid = get_named_server_uid(name) name_hash = hashlib.sha256(name.encode("utf8")).digest() @@ -757,6 +760,7 @@ def named( reset=reset, association_request_auto_approval=association_request_auto_approval, background_tasks=background_tasks, + consumer_type=consumer_type, ) def is_root(self, credentials: SyftVerifyKey) -> bool: @@ -815,8 +819,9 @@ def find_and_migrate_data( credentials=self.verify_key, role=ServiceRole.ADMIN, ) - migration_service = self.get_service("migrationservice") - return migration_service.migrate_data(context, document_store_object_types) + return self.services.migration.migrate_data( + context, document_store_object_types + ) @property def guest_client(self) -> SyftClient: @@ -868,11 +873,10 @@ def post_init(self) -> None: ) if "usercodeservice" in self.service_path_map: - user_code_service = self.get_service(UserCodeService) - user_code_service.load_user_code(context=context) + self.services.user_code.load_user_code(context=context) def reload_user_code() -> None: - user_code_service.load_user_code(context=context) + self.services.user_code.load_user_code(context=context) ti = thread_ident() if ti is not None: @@ -929,11 +933,11 @@ def init_stores( @property def job_stash(self) -> JobStash: - return self.get_service("jobservice").stash + return self.services.job.stash @property def worker_stash(self) -> WorkerStash: - return self.get_service("workerservice").stash + return self.services.worker.stash @property def service_path_map(self) -> dict[str, AbstractService]: @@ -1116,9 +1120,9 @@ def forward_message( ) client = None - - network_service = self.get_service(NetworkService) - peer = network_service.stash.get_by_uid(self.verify_key, server_uid).unwrap() + peer = self.services.network.stash.get_by_uid( + self.verify_key, server_uid + ).unwrap() # Since we have several routes to a peer # we need to cache the client for a given server_uid along with the route @@ -1162,11 +1166,9 @@ def forward_message( raise SyftException(public_message=(f"Server has no route to {server_uid}")) def get_role_for_credentials(self, credentials: SyftVerifyKey) -> ServiceRole: - return ( - self.get_service("userservice") - .get_role_for_credentials(credentials=credentials) - .unwrap() - ) + return self.services.user.get_role_for_credentials( + credentials=credentials + ).unwrap() @instrument def handle_api_call( @@ -1413,10 +1415,7 @@ def add_action_to_queue( has_execute_permissions=has_execute_permissions, worker_pool=worker_pool_ref, # set worker pool reference as part of queue item ) - - user_service = self.get_service("UserService") - user_service = cast(UserService, user_service) - user_id = user_service.get_user_id_for_credentials(credentials).unwrap() + user_id = self.services.user.get_user_id_for_credentials(credentials).unwrap() return self.add_queueitem_to_queue( queue_item=queue_item, @@ -1444,9 +1443,6 @@ def add_queueitem_to_queue( role = self.get_role_for_credentials(credentials=credentials) context = AuthedServiceContext(server=self, credentials=credentials, role=role) - action_service = self.get_service("actionservice") - log_service = self.get_service("logservice") - result_obj = ActionObject.empty() if action is not None: result_obj = ActionObject.obj_not_ready( @@ -1459,10 +1455,8 @@ def add_queueitem_to_queue( result_obj.syft_server_location = self.id result_obj.syft_client_verify_key = credentials - action_service = self.get_service("actionservice") - - if not action_service.store.exists(uid=action.result_id): - action_service.set_result_to_store( + if not self.services.action.store.exists(uid=action.result_id): + self.services.action.set_result_to_store( result_action_object=result_obj, context=context, ).unwrap() @@ -1485,7 +1479,7 @@ def add_queueitem_to_queue( self.job_stash.set(credentials, job).unwrap() self.queue_stash.set_placeholder(credentials, queue_item).unwrap() - log_service.add(context, log_id, queue_item.job_id) + self.services.log.add(context, log_id, queue_item.job_id) return job @@ -1509,8 +1503,7 @@ def _sort_jobs(self, jobs: list[Job]) -> list[Job]: def _get_existing_user_code_jobs( self, context: AuthedServiceContext, user_code_id: UID ) -> list[Job]: - job_service = self.get_service("jobservice") - jobs = job_service.get_by_user_code_id( + jobs = self.services.job.get_by_user_code_id( context=context, user_code_id=user_code_id ) return self._sort_jobs(jobs) @@ -1523,8 +1516,7 @@ def _is_usercode_call_on_owned_kwargs( ) -> bool: if api_call.path != "code.call": return False - user_code_service = self.get_service("usercodeservice") - return user_code_service.is_execution_on_owned_args( + return self.services.user_code.is_execution_on_owned_args( context, user_code_id, api_call.kwargs ) @@ -1552,7 +1544,7 @@ def add_api_call_to_queue( action = Action.from_api_call(unsigned_call) user_code_id = action.user_code_id - user = self.get_service(UserService).get_current_user(context) + user = self.services.user.get_current_user(context) user = cast(UserView, user) is_execution_on_owned_kwargs_allowed = ( @@ -1627,11 +1619,11 @@ def add_api_call_to_queue( @property def pool_stash(self) -> SyftWorkerPoolStash: - return self.get_service(SyftWorkerPoolService).stash + return self.services.syft_worker_pool.stash @property def user_code_stash(self) -> UserCodeStash: - return self.get_service(UserCodeService).stash + return self.services.user_code.stash @as_result(NotFoundException) def get_default_worker_pool(self) -> WorkerPool | None: @@ -1803,7 +1795,7 @@ def get_default_worker_tag_by_env(dev_mode: bool = False) -> str | None: def create_default_worker_pool(server: Server) -> None: credentials = server.verify_key pull_image = not server.dev_mode - image_stash = server.get_service(SyftWorkerImageService).stash + image_stash = server.services.syft_worker_image.stash default_pool_name = server.settings.default_worker_pool try: @@ -1835,9 +1827,8 @@ def create_default_worker_pool(server: Server) -> None: if not default_image.is_built: logger.info(f"Building default worker image with tag={default_worker_tag}. ") - image_build_method = server.get_service_method(SyftWorkerImageService.build) # Build the Image for given tag - result = image_build_method( + result = server.services.worker_image.build( context, image_uid=default_image.id, tag=DEFAULT_WORKER_IMAGE_TAG, @@ -1854,8 +1845,7 @@ def create_default_worker_pool(server: Server) -> None: ) if default_worker_pool is None: worker_to_add_ = worker_count - create_pool_method = server.get_service_method(SyftWorkerPoolService.launch) - result = create_pool_method( + result = server.services.syft_worker_pool.launch( context, pool_name=default_pool_name, image_uid=default_image.id, @@ -1869,10 +1859,7 @@ def create_default_worker_pool(server: Server) -> None: default_worker_pool.worker_list ) if worker_to_add_ > 0: - add_worker_method = server.get_service_method( - SyftWorkerPoolService.add_workers - ) - result = add_worker_method( + result = server.services.syft_worker_pool.add_workers( context=context, number=worker_to_add_, pool_name=default_pool_name, diff --git a/packages/syft/src/syft/service/action/action_endpoint.py b/packages/syft/src/syft/service/action/action_endpoint.py index 32cc2128541..2237b3963a6 100644 --- a/packages/syft/src/syft/service/action/action_endpoint.py +++ b/packages/syft/src/syft/service/action/action_endpoint.py @@ -75,18 +75,18 @@ def __call_function( self, call_mode: EXECUTION_MODE, *args: Any, **kwargs: Any ) -> Any: self.context = self.__check_context() - endpoint_service = self.context.server.get_service("apiservice") - if call_mode == EXECUTION_MODE.MOCK: - __endpoint_mode = endpoint_service.execute_server_side_endpoint_mock_by_id - elif call_mode == EXECUTION_MODE.PRIVATE: __endpoint_mode = ( - endpoint_service.execute_service_side_endpoint_private_by_id + self.context.server.services.api.execute_server_side_endpoint_mock_by_id ) + elif call_mode == EXECUTION_MODE.PRIVATE: + __endpoint_mode = self.context.server.services.api.execute_service_side_endpoint_private_by_id else: - __endpoint_mode = endpoint_service.execute_server_side_endpoint_by_id + __endpoint_mode = ( + self.context.server.services.api.execute_server_side_endpoint_by_id + ) - return __endpoint_mode( + return __endpoint_mode( # type: ignore[misc] *args, context=self.context, endpoint_uid=self.endpoint_id, diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 43054d01974..d7e566b733a 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1175,8 +1175,9 @@ def get_sync_dependencies( # relative from ..job.job_stash import Job - job_service = context.server.get_service("jobservice") # type: ignore - job: Job | None = job_service.get_by_result_id(context, self.id.id) # type: ignore + job: Job | None = context.server.services.job.get_by_result_id( + context, self.id.id + ) # type: ignore if job is not None: return [job.id] else: diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 843880a1db6..94935631f01 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -2,7 +2,6 @@ import importlib import logging from typing import Any -from typing import cast # third party import numpy as np @@ -18,7 +17,6 @@ from ...types.syft_object import SyftObject from ...types.twin_object import TwinObject from ...types.uid import UID -from ..blob_storage.service import BlobStorageService from ..code.user_code import UserCode from ..code.user_code import execute_byte_code from ..context import AuthedServiceContext @@ -194,9 +192,8 @@ def _set( if action_object.mock_obj.syft_action_saved_to_blob_store: blob_id = action_object.mock_obj.syft_blob_storage_entry_id permission = ActionObjectPermission(blob_id, ActionPermission.ALL_READ) - blob_storage_service = context.server.get_service(BlobStorageService) # add_permission is not resultified. - blob_storage_service.stash.add_permission(permission) + context.server.services.blob_storage.stash.add_permission(permission) if has_result_read_permission: action_object = action_object.private @@ -358,9 +355,6 @@ def _user_code_execute( override_execution_permission = ( context.has_execute_permissions or context.role == ServiceRole.ADMIN ) - if context.server: - user_code_service = context.server.get_service("usercodeservice") - input_policy = code_item.get_input_policy(context) output_policy = code_item.get_output_policy(context) @@ -427,7 +421,7 @@ def _user_code_execute( update_policy=not override_execution_permission, ) code_item.output_policy = output_policy # type: ignore - user_code_service.update_code_state(context, code_item) + context.server.services.user_code.update_code_state(context, code_item) if isinstance(exec_result.result, ActionObject): result_action_object = ActionObject.link( result_id=result_id, pointer_id=exec_result.result.id @@ -454,7 +448,7 @@ def _user_code_execute( update_policy=not override_execution_permission, ) code_item.output_policy = output_policy # type: ignore - user_code_service.update_code_state(context, code_item) + context.server.services.user_code.update_code_state(context, code_item) result_action_object_private = wrap_result( result_id, private_exec_result.result ) @@ -552,10 +546,6 @@ def set_result_to_store( has_result_read_permission=True, ).unwrap() - blob_storage_service: AbstractService = context.server.get_service( - BlobStorageService - ) - def store_permission( x: SyftVerifyKey | None = None, ) -> ActionObjectPermission: @@ -572,7 +562,7 @@ def blob_permission( if result_blob_id is not None: blob_permissions = [blob_permission(x) for x in output_readers] - blob_storage_service.stash.add_permissions(blob_permissions) + context.server.blob_storage.stash.add_permissions(blob_permissions) return set_result @@ -816,12 +806,11 @@ def execute(self, context: AuthedServiceContext, action: Action) -> ActionObject if action.action_type == ActionType.CREATEOBJECT: result_action_object = action.create_object elif action.action_type == ActionType.SYFTFUNCTION: - usercode_service = context.server.get_service("usercodeservice") kwarg_ids = {} for k, v in action.kwargs.items(): # transform lineage ids into ids kwarg_ids[k] = v.id - return usercode_service._call( # type: ignore[union-attr] + return context.server.services.user_code._call( # type: ignore[union-attr] context, action.user_code_id, action.result_id, **kwarg_ids ).unwrap() elif action.action_type == ActionType.FUNCTION: @@ -933,25 +922,21 @@ def _delete_blob_storage_entry( ) -> SyftSuccess: deleted_blob_ids = [] - blob_store_service = cast( - BlobStorageService, context.server.get_service(BlobStorageService) - ) - if isinstance(obj, ActionObject) and obj.syft_blob_storage_entry_id: - blob_store_service.delete( + context.server.services.blob_storage.delete( context=context, uid=obj.syft_blob_storage_entry_id ) deleted_blob_ids.append(obj.syft_blob_storage_entry_id) if isinstance(obj, TwinObject): if obj.private.syft_blob_storage_entry_id: - blob_store_service.delete( + context.server.services.blob_storage.delete( context=context, uid=obj.private.syft_blob_storage_entry_id ) deleted_blob_ids.append(obj.private.syft_blob_storage_entry_id) if obj.mock.syft_blob_storage_entry_id: - blob_store_service.delete( + context.server.services.blob_storage.delete( context=context, uid=obj.mock.syft_blob_storage_entry_id ) deleted_blob_ids.append(obj.mock.syft_blob_storage_entry_id) diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index 82881ef99d3..44567e8d8ef 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -37,7 +37,6 @@ from ..context import AuthedServiceContext from ..response import SyftError from ..user.user import UserView -from ..user.user_service import UserService from .utils import print as log_print NOT_ACCESSIBLE_STRING = "N / A" @@ -224,8 +223,7 @@ def build_internal_context( helper_function_set = HelperFunctionSet(helper_function_dict) - user_service = context.server.get_service("userservice") - user = user_service.get_current_user(context) + user = context.server.services.user.get_current_user(context) return TwinAPIAuthedContext( credentials=context.credentials, @@ -506,10 +504,9 @@ def get_user_client_from_server(self, context: AuthedServiceContext) -> SyftClie # get a user client guest_client = context.server.get_guest_client() user_client = guest_client - signing_key_for_verify_key = context.server.get_service_method( - UserService.signing_key_for_verify_key + private_key = context.server.services.user.signing_key_for_verify_key( + context.credentials ) - private_key = signing_key_for_verify_key(context.credentials) signing_key = private_key.signing_key user_client.credentials = signing_key return user_client @@ -578,7 +575,7 @@ def exec_code( api_service = context.server.get_service("apiservice") api_service.stash.upsert( - context.server.get_service("userservice").admin_verify_key(), self + context.server.services.user.admin_verify_key(), self ).unwrap() print = original_print # type: ignore @@ -653,7 +650,7 @@ def code_string(context: TransformContext) -> TransformContext: ) context.server = cast(AbstractServer, context.server) - admin_key = context.server.get_service("userservice").admin_verify_key() + admin_key = context.server.services.user.admin_verify_key() # If endpoint exists **AND** (has visible access **OR** the user is admin) if endpoint_type is not None and ( diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 2f530b207bd..a8c443a6271 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -16,7 +16,6 @@ from ...types.errors import SyftException from ...types.result import as_result from ...types.uid import UID -from ..action.action_service import ActionService from ..context import AuthedServiceContext from ..response import SyftSuccess from ..service import AbstractService @@ -82,8 +81,7 @@ def set( syft_server_location=context.server.id, syft_client_verify_key=context.credentials, ) - action_service = context.server.get_service("actionservice") - action_service.set_result_to_store( + context.server.services.action.set_result_to_store( context=context, result_action_object=action_obj, has_result_read_permission=True, @@ -265,7 +263,7 @@ def api_endpoints( context: AuthedServiceContext, ) -> list[TwinAPIEndpointView]: """Retrieves a list of available API endpoints view available to the user.""" - admin_key = context.server.get_service("userservice").admin_verify_key() + admin_key = context.server.services.user.admin_verify_key() all_api_endpoints = self.stash.get_all(admin_key).unwrap() api_endpoint_view = [ @@ -350,7 +348,6 @@ def _call_in_jobs( from ..job.job_stash import JobStatus # So result is a Job object - job_service = context.server.get_service("jobservice") job_id = job.id # Question: For a small moment, when job status is updated, it doesn't return the job during the .get() as if # it's not in the stash. Then afterwards if appears again. Is this a bug? @@ -363,7 +360,7 @@ def _call_in_jobs( or job.status == JobStatus.PROCESSING or job.status == JobStatus.CREATED ): - job = job_service.get(context, job_id) + job = context.server.services.job.get(context, job_id) time.sleep(0.1) if (time.time() - custom_endpoint.endpoint_timeout) > start: raise SyftException( @@ -447,9 +444,8 @@ def call( context, *args, log_id=log_id, **kwargs ).unwrap() action_obj = ActionObject.from_obj(exec_result) - action_service = cast(ActionService, context.server.get_service(ActionService)) try: - return action_service.set_result_to_store( + return context.server.services.action.set_result_to_store( context=context, result_action_object=action_obj, has_result_read_permission=True, @@ -481,9 +477,8 @@ def call_public( ).unwrap() action_obj = ActionObject.from_obj(exec_result) - action_service = cast(ActionService, context.server.get_service(ActionService)) try: - return action_service.set_result_to_store( + return context.server.services.action.set_result_to_store( context=context, result_action_object=action_obj, has_result_read_permission=True, @@ -518,10 +513,8 @@ def call_private( ).unwrap() action_obj = ActionObject.from_obj(exec_result) - - action_service = cast(ActionService, context.server.get_service(ActionService)) try: - return action_service.set_result_to_store( + return context.server.services.action.set_result_to_store( context=context, result_action_object=action_obj ).unwrap() except Exception as e: @@ -594,7 +587,7 @@ def execute_server_side_endpoint_mock_by_id( def get_endpoint_by_uid( self, context: AuthedServiceContext, uid: UID ) -> TwinAPIEndpoint: - admin_key = context.server.get_service("userservice").admin_verify_key() + admin_key = context.server.services.user.admin_verify_key() return self.stash.get_by_uid(admin_key, uid).unwrap() @as_result(StashException) diff --git a/packages/syft/src/syft/service/api/utils.py b/packages/syft/src/syft/service/api/utils.py index 5194d7de9cf..8680c4512ee 100644 --- a/packages/syft/src/syft/service/api/utils.py +++ b/packages/syft/src/syft/service/api/utils.py @@ -33,8 +33,7 @@ def to_str(arg: Any) -> str: new_args = [to_str(arg) for arg in args] new_str = sep.join(new_args) + end if context.server is not None: - log_service = context.server.get_service("LogService") - log_service.append(context=context, uid=log_id, new_str=new_str) + context.server.services.log.append(context=context, uid=log_id, new_str=new_str) time = datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S") return __builtin__.print( f"{time} FUNCTION LOG :", diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 3ba1b20a00f..0921ea9e704 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -74,7 +74,6 @@ from ..dataset.dataset import Asset from ..job.job_stash import Job from ..output.output_service import ExecutionOutput -from ..output.output_service import OutputService from ..policy.policy import Constant from ..policy.policy import CustomInputPolicy from ..policy.policy import CustomOutputPolicy @@ -89,7 +88,6 @@ from ..policy.policy import init_policy from ..policy.policy import load_policy_code from ..policy.policy import partition_by_server -from ..policy.policy_service import PolicyService from ..response import SyftError from ..response import SyftInfo from ..response import SyftSuccess @@ -415,8 +413,7 @@ def _compute_status_l0( else: # Serverside server_identity = ServerIdentity.from_server(context.server) - output_service = context.server.get_service("outputservice") - is_approved = output_service.has_output_read_permissions( + is_approved = context.server.services.output.has_output_read_permissions( context, self.id, self.user_verify_key ) is_denied = self.l0_deny_reason is not None @@ -666,10 +663,7 @@ def output_history(self) -> list[ExecutionOutput]: def get_output_history( self, context: AuthedServiceContext ) -> list[ExecutionOutput]: - output_service = cast( - OutputService, context.server.get_service("outputservice") - ) - return output_service.get_by_user_code_id(context, self.id) + return context.server.services.output.get_by_user_code_id(context, self.id) @as_result(SyftException) def store_execution_output( @@ -689,10 +683,7 @@ def store_execution_output( ) output_ids = filter_only_uids(outputs) - - output_service = context.server.get_service("outputservice") - output_service = cast(OutputService, output_service) - return output_service.create( + return context.server.services.output.create( context, user_code_id=self.id, output_ids=output_ids, @@ -1460,9 +1451,10 @@ def locate_launch_jobs(context: TransformContext) -> TransformContext: v = LaunchJobVisitor() v.visit(tree) nested_calls = v.nested_calls - user_code_service = context.server.get_service("usercodeService") for call in nested_calls: - user_codes = user_code_service.get_by_service_name(context, call) + user_codes = context.server.services.user_code.get_by_service_name( + context, call + ) # TODO: Not great user_code = user_codes[-1] user_code_link = LinkedObject.from_obj( @@ -1511,11 +1503,10 @@ def add_credentials(context: TransformContext) -> TransformContext: def check_policy(policy: Any, context: TransformContext) -> TransformContext: if context.server is not None: - policy_service = context.server.get_service(PolicyService) if isinstance(policy, SubmitUserPolicy): policy = policy.to(UserPolicy, context=context) elif isinstance(policy, UID): - policy = policy_service.get_policy_by_uid(context, policy) + policy = context.server.services.policy.get_policy_by_uid(context, policy) return policy @@ -1581,7 +1572,7 @@ def create_code_status(context: TransformContext) -> TransformContext: f"Invalid server type:{context.server.server_type} for code submission" ) - res = context.server.get_service("usercodestatusservice").create(context, status) + res = context.server.services.user_code_status.create(context, status) # relative from .status_service import UserCodeStatusService @@ -1685,43 +1676,20 @@ def __init__(self, context: AuthedServiceContext) -> None: if server is None: raise ValueError(f"{context}'s server is None") - job_service = server.get_service("jobservice") - action_service = server.get_service("actionservice") - # user_service = server.get_service("userservice") - def job_set_n_iters(n_iters: int) -> None: job = context.job job.n_iters = n_iters - job_service.update(context, job) + server.services.job.update(context, job) def job_set_current_iter(current_iter: int) -> None: job = context.job job.current_iter = current_iter - job_service.update(context, job) + server.services.job.update(context, job) def job_increase_current_iter(current_iter: int) -> None: job = context.job job.current_iter += current_iter - job_service.update(context, job) - - # def set_api_registry(): - # user_signing_key = [ - # x.signing_key - # for x in user_service.stash.partition.data.values() - # if x.verify_key == context.credentials - # ][0] - # data_protcol = get_data_protocol() - # user_api = server.get_api(context.credentials, data_protcol.latest_version) - # user_api.signing_key = user_signing_key - # # We hardcode a python connection here since we have access to the server - # # TODO: this is not secure - # user_api.connection = PythonConnection(server=server) - - # APIRegistry.set_api_for( - # server_uid=server.id, - # user_verify_key=context.credentials, - # api=user_api, - # ) + server.services.job.update(context, job) def launch_job(func: UserCode, **kwargs: Any) -> Job | None: # relative @@ -1729,7 +1697,7 @@ def launch_job(func: UserCode, **kwargs: Any) -> Job | None: kw2id = {} for k, v in kwargs.items(): value = ActionObject.from_obj(v) - ptr = action_service.set_result_to_store( + ptr = server.services.action.set_result_to_store( value, context, has_result_read_permission=False ).unwrap() kw2id[k] = ptr.id @@ -1820,8 +1788,9 @@ def to_str(arg: Any) -> str: new_args = [to_str(arg) for arg in args] new_str = sep.join(new_args) + end if context.server is not None: - log_service = context.server.get_service("LogService") - log_service.append(context=context, uid=log_id, new_str=new_str) + context.server.services.log.append( + context=context, uid=log_id, new_str=new_str + ) time = datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S") return __builtin__.print( f"{time} FUNCTION LOG ({job_id}):", @@ -1886,8 +1855,9 @@ def to_str(arg: Any) -> str: and context.job.log_id is not None ): log_id = context.job.log_id - log_service = context.server.get_service("LogService") - log_service.append(context=context, uid=log_id, new_err=error_msg) + context.server.services.log.append( + context=context, uid=log_id, new_err=error_msg + ) result_message = ( f"Exception encountered while running {code_item.service_func_name}" diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 696f961932c..32863990a6a 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -26,7 +26,6 @@ from ..request.request import SubmitRequest from ..request.request import SyncedUserCodeStatusChange from ..request.request import UserCodeStatusChange -from ..request.request_service import RequestService from ..response import SyftSuccess from ..service import AbstractService from ..service import SERVICE_TO_TYPES @@ -129,7 +128,7 @@ def _submit( ) if code.status_link is not None: - _ = context.server.get_service("usercodestatusservice").remove( + _ = context.server.services.user_code_status.remove( root_context, code.status_link.object_uid ) @@ -204,16 +203,15 @@ def _post_user_code_transform_ops( raise SyftException( public_message="outputs can only be distributed to input owners" ) - - worker_pool_service = context.server.get_service("SyftWorkerPoolService") - worker_pool_service._get_worker_pool( + context.server.services.syft_worker_pool._get_worker_pool( context, pool_name=user_code.worker_pool_name, ) # Create a code history - code_history_service = context.server.get_service("codehistoryservice") - code_history_service.submit_version(context=context, code=user_code) + context.server.services.code_history.submit_version( + context=context, code=user_code + ) return user_code @@ -225,11 +223,10 @@ def _request_code_execution( reason: str | None = "", ) -> Request: # Cannot make multiple requests for the same code - get_by_usercode_id = context.server.get_service_method( - RequestService.get_by_usercode_id - ) # FIX: Change requestservice result type - existing_requests = get_by_usercode_id(context, user_code.id) + existing_requests = context.server.services.request.get_by_usercode_id( + context, user_code.id + ) if len(existing_requests) > 0: raise SyftException( @@ -266,8 +263,9 @@ def _request_code_execution( changes = [status_change] request = SubmitRequest(changes=changes) - method = context.server.get_service_method(RequestService.submit) - result = method(context=context, request=request, reason=reason) + result = context.server.services.request.submit( + context=context, request=request, reason=reason + ) return result @@ -383,22 +381,19 @@ def is_execution_allowed( def is_execution_on_owned_args_allowed(self, context: AuthedServiceContext) -> bool: if context.role == ServiceRole.ADMIN: return True - user_service = context.server.get_service("userservice") - current_user = user_service.get_current_user(context=context) + current_user = context.server.services.user.get_current_user(context=context) return current_user.mock_execution_permission def keep_owned_kwargs( self, kwargs: dict[str, Any], context: AuthedServiceContext ) -> dict[str, Any]: """Return only the kwargs that are owned by the user""" - action_service = context.server.get_service("actionservice") - mock_kwargs = {} for k, v in kwargs.items(): if isinstance(v, UID): # Jobs have UID kwargs instead of ActionObject try: - v = action_service.get(context, uid=v) + v = context.server.services.action.get(context, uid=v) except Exception: # nosec: we are skipping when dont find it pass if ( @@ -576,14 +571,11 @@ def _call( "which is currently not supported. Run your function with `blocking=False` to run" " as a job on your worker pool." ) - - action_service = context.server.get_service("actionservice") - - action_obj = action_service._user_code_execute( + action_obj = context.server.services.action._user_code_execute( context, code, kwarg2id, result_id ).unwrap() - result = action_service.set_result_to_store( + result = context.server.services.action.set_result_to_store( action_obj, context, code.get_output_policy(context) ).unwrap() @@ -681,8 +673,7 @@ def resolve_outputs( outputs = [] for output_id in output_ids: if context.server is not None: - action_service = context.server.get_service("actionservice") - output = action_service.get( + output = context.server.services.action.get( context, uid=output_id, twin_mode=TwinMode.PRIVATE ) outputs.append(output) diff --git a/packages/syft/src/syft/service/code_history/code_history_service.py b/packages/syft/src/syft/service/code_history/code_history_service.py index 5383e8c9dcb..a3d06bdb4fa 100644 --- a/packages/syft/src/syft/service/code_history/code_history_service.py +++ b/packages/syft/src/syft/service/code_history/code_history_service.py @@ -8,7 +8,6 @@ from ...types.uid import UID from ..code.user_code import SubmitUserCode from ..code.user_code import UserCode -from ..code.user_code_service import UserCodeService from ..context import AuthedServiceContext from ..response import SyftSuccess from ..service import AbstractService @@ -44,9 +43,10 @@ def submit_version( code: SubmitUserCode | UserCode, comment: str | None = None, ) -> SyftSuccess: - user_code_service = context.server.get_service("usercodeservice") if isinstance(code, SubmitUserCode): - code = user_code_service._submit(context=context, code=code) + code = context.server.services.user_code._submit( + context=context, submit_code=code + ) try: code_history = self.stash.get_by_service_func_name_and_verify_key( @@ -100,12 +100,8 @@ def fetch_histories_for_user( credentials=context.credentials, user_verify_key=user_verify_key ).unwrap() - user_code_service: UserCodeService = context.server.get_service( - "usercodeservice" - ) # type: ignore - def get_code(uid: UID) -> UserCode: - return user_code_service.stash.get_by_uid( + return context.server.services.user_code.stash.get_by_uid( credentials=context.server.verify_key, uid=uid, ).unwrap() @@ -142,8 +138,7 @@ def get_histories_for_current_user( def get_history_for_user( self, context: AuthedServiceContext, email: str ) -> CodeHistoriesDict: - user_service = context.server.get_service("userservice") - user = user_service.stash.get_by_email( + user = context.server.services.user.stash.get_by_email( credentials=context.credentials, email=email ).unwrap() return self.fetch_histories_for_user( @@ -165,8 +160,7 @@ def get_histories_group_by_user( else: code_histories = self.stash.get_all(context.credentials).unwrap() - user_service = context.server.get_service("userservice") - users = user_service.stash.get_all(context.credentials).unwrap() + users = context.server.services.user.stash.get_all(context.credentials).unwrap() user_code_histories = UsersCodeHistoriesDict(server_uid=context.server.id) verify_key_2_user_email = {} @@ -193,8 +187,7 @@ def get_by_func_name_and_user_email( user_email: str, user_id: UID, ) -> list[CodeHistory]: - user_service = context.server.get_service("userservice") - user_verify_key = user_service.user_verify_key(user_email) + user_verify_key = context.server.services.user.user_verify_key(user_email) kwargs = { "id": user_id, diff --git a/packages/syft/src/syft/service/data_subject/data_subject_service.py b/packages/syft/src/syft/service/data_subject/data_subject_service.py index b386fd1ec8d..b8d5e6b8528 100644 --- a/packages/syft/src/syft/service/data_subject/data_subject_service.py +++ b/packages/syft/src/syft/service/data_subject/data_subject_service.py @@ -20,7 +20,6 @@ from .data_subject import DataSubject from .data_subject import DataSubjectCreate from .data_subject import NamePartitionKey -from .data_subject_member_service import DataSubjectMemberService @serializable(canonical_name="DataSubjectStash", version=1) @@ -63,11 +62,7 @@ def __init__(self, store: DocumentStore) -> None: def add( self, context: AuthedServiceContext, data_subject: DataSubjectCreate ) -> SyftSuccess: - """Register a data subject.""" - - member_relationship_add = context.server.get_service_method( - DataSubjectMemberService.add - ) + """Register a data subject.""" # member_relationships: set[tuple[str, str]] = data_subject.member_relationships if len(member_relationships) == 0: @@ -84,7 +79,9 @@ def add( ds.to(DataSubject, context=context), ignore_duplicates=True, ).unwrap() - member_relationship_add(context, parent_ds.name, child_ds.name) + context.server.services.data_subject_member.add( + context, parent_ds.name, child_ds.name + ) return SyftSuccess( message=f"{len(member_relationships)+1} Data Subjects Registered", @@ -100,12 +97,10 @@ def get_all(self, context: AuthedServiceContext) -> list[DataSubject]: def get_members( self, context: AuthedServiceContext, data_subject_name: str ) -> list[DataSubject]: - get_relatives = context.server.get_service_method( - DataSubjectMemberService.get_relatives + relatives = context.server.services.data_subject.get_relatives( + context, data_subject_name ) - relatives = get_relatives(context, data_subject_name) - members = [] for relative in relatives: result = self.get_by_name(context=context, name=relative.child) diff --git a/packages/syft/src/syft/service/dataset/dataset.py b/packages/syft/src/syft/service/dataset/dataset.py index 22a8b09c462..6c4a91e06c2 100644 --- a/packages/syft/src/syft/service/dataset/dataset.py +++ b/packages/syft/src/syft/service/dataset/dataset.py @@ -40,7 +40,6 @@ from ..action.action_object import ActionObject from ..data_subject.data_subject import DataSubject from ..data_subject.data_subject import DataSubjectCreate -from ..data_subject.data_subject_service import DataSubjectService from ..response import SyftError from ..response import SyftSuccess from ..response import SyftWarning @@ -765,8 +764,7 @@ def create_and_store_twin(context: TransformContext) -> TransformContext: raise ValueError( "f{context}'s server is None, please log in. No trasformation happened" ) - action_service = context.server.get_service("actionservice") - action_service._set( + context.server.services.action._set( context=context.to_server_context(), action_object=twin, ).unwrap(public_message="Failed to create and store twin") @@ -795,10 +793,11 @@ def set_data_subjects(context: TransformContext) -> TransformContext: public_message="f{context}'s server is None, please log in. No trasformation happened" ) data_subjects = context.output["data_subjects"] - get_data_subject = context.server.get_service_method(DataSubjectService.get_by_name) resultant_data_subjects = [] for data_subject in data_subjects: - result = get_data_subject(context=context, name=data_subject.name) + result = context.server.services.data_subject.get_by_name( + context=context, name=data_subject.name + ) resultant_data_subjects.append(result) context.output["data_subjects"] = resultant_data_subjects return context diff --git a/packages/syft/src/syft/service/dataset/dataset_service.py b/packages/syft/src/syft/service/dataset/dataset_service.py index cd347c11b35..43cbfacb117 100644 --- a/packages/syft/src/syft/service/dataset/dataset_service.py +++ b/packages/syft/src/syft/service/dataset/dataset_service.py @@ -2,7 +2,6 @@ from collections.abc import Collection from collections.abc import Sequence import logging -from typing import cast # relative from ...serde.serializable import serializable @@ -11,7 +10,6 @@ from ...types.uid import UID from ..action.action_permissions import ActionObjectPermission from ..action.action_permissions import ActionPermission -from ..action.action_service import ActionService from ..context import AuthedServiceContext from ..response import SyftSuccess from ..service import AbstractService @@ -228,10 +226,7 @@ def delete( f"in Dataset {uid}" ) - action_service = cast( - ActionService, context.server.get_service(ActionService) - ) - action_service.delete( + context.server.services.action.delete( context=context, uid=asset.action_id, soft_delete=True ) diff --git a/packages/syft/src/syft/service/job/job_service.py b/packages/syft/src/syft/service/job/job_service.py index 0ad153e4bda..fbbdfd7d856 100644 --- a/packages/syft/src/syft/service/job/job_service.py +++ b/packages/syft/src/syft/service/job/job_service.py @@ -2,7 +2,6 @@ from collections.abc import Callable import inspect import time -from typing import cast # relative from ...serde.serializable import serializable @@ -15,7 +14,6 @@ from ..action.action_permissions import ActionPermission from ..code.user_code import UserCode from ..context import AuthedServiceContext -from ..log.log_service import LogService from ..queue.queue_stash import ActionQueueItem from ..response import SyftSuccess from ..service import AbstractService @@ -135,9 +133,7 @@ def restart(self, context: AuthedServiceContext, uid: UID) -> SyftSuccess: ).unwrap() context.server.job_stash.set(context.credentials, job).unwrap() - - log_service = context.server.get_service("logservice") - log_service.restart(context, job.log_id) + context.server.services.log.restart(context, job.log_id) return SyftSuccess(message="Great Success!") @@ -232,9 +228,7 @@ def add_read_permission_job_for_code_owner( def add_read_permission_log_for_code_owner( self, context: AuthedServiceContext, log_id: UID, user_code: UserCode ) -> None: - log_service = context.server.get_service("logservice") - log_service = cast(LogService, log_service) - return log_service.stash.add_permission( + return context.server.services.log.stash.add_permission( ActionObjectPermission( log_id, ActionPermission.READ, user_code.user_verify_key ) @@ -268,14 +262,13 @@ def create_job_for_user_code_id( user_code_id=user_code_id, resolved=is_resolved, ) - user_code_service = context.server.get_service("usercodeservice") - user_code = user_code_service.get_by_uid(context=context, uid=user_code_id) + user_code = context.server.services.user_code.get_by_uid( + context=context, uid=user_code_id + ) # The owner of the code should be able to read the job self.stash.set(context.credentials, job).unwrap() - - log_service = context.server.get_service("logservice") - log_service.add( + context.server.services.log.add( context, job.log_id, job.id, diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 413d36ba753..fc83b675503 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -313,8 +313,7 @@ def subjobs(self) -> list["Job"]: return api.services.job.get_subjobs(self.id) def get_subjobs(self, context: AuthedServiceContext) -> list["Job"]: - job_service = context.server.get_service("jobservice") - return job_service.get_subjobs(context, self.id) + return context.server.services.job.get_subjobs(context, self.id) @property def owner(self) -> UserView: @@ -647,7 +646,7 @@ def get_sync_dependencies(self, context: AuthedServiceContext) -> list[UID]: # dependencies.append(self.user_code_id) try: - output = context.server.get_service("outputservice").get_by_job_id( # type: ignore + output = context.server.services.output.get_by_job_id( # type: ignore context, self.id ) if output is not None: diff --git a/packages/syft/src/syft/service/metadata/metadata_service.py b/packages/syft/src/syft/service/metadata/metadata_service.py index 4e4e84d1364..70453d9b084 100644 --- a/packages/syft/src/syft/service/metadata/metadata_service.py +++ b/packages/syft/src/syft/service/metadata/metadata_service.py @@ -21,12 +21,6 @@ def __init__(self, store: DocumentStore) -> None: def get_metadata(self, context: AuthedServiceContext) -> ServerMetadata: return context.server.metadata # type: ignore - # @service_method(path="metadata.get_admin", name="get_admin", roles=GUEST_ROLE_LEVEL) - # def get_admin(self, context: AuthedServiceContext): - # user_service = context.server.get_service("userservice") - # admin_user = user_service.get_all(context=context)[0] - # return admin_user - @service_method(path="metadata.get_env", name="get_env", roles=GUEST_ROLE_LEVEL) def get_env(self, context: AuthedServiceContext) -> str: return context.server.packages diff --git a/packages/syft/src/syft/service/network/association_request.py b/packages/syft/src/syft/service/network/association_request.py index a9ed93b1adf..49071512ff1 100644 --- a/packages/syft/src/syft/service/network/association_request.py +++ b/packages/syft/src/syft/service/network/association_request.py @@ -1,6 +1,5 @@ # stdlib import secrets -from typing import cast # relative from ...client.client import SyftClient @@ -41,9 +40,6 @@ def _run( tuple[bytes, ServerPeer]: The result of the association request. Raises on errors. """ - # relative - from .network_service import NetworkService - if not apply: # TODO: implement undo for AssociationRequestChange raise SyftException( @@ -52,10 +48,6 @@ def _run( # Get the network service service_ctx = context.to_service_ctx() - network_service = cast( - NetworkService, service_ctx.server.get_service(NetworkService) - ) - network_stash = network_service.stash # Check if remote peer to be added is via reverse tunnel rtunnel_route = self.remote_peer.get_rtunnel_route() @@ -66,7 +58,7 @@ def _run( # If the remote peer is added via reverse tunnel, we skip ping to peer if add_rtunnel_route: - network_service.set_reverse_tunnel_config( + service_ctx.server.services.network.set_reverse_tunnel_config( context=context, remote_server_peer=self.remote_peer, ) @@ -99,7 +91,7 @@ def _run( raise SyftException(public_message=str(e)) # Adding the remote peer to the network stash - network_stash.create_or_update_peer( + service_ctx.server.services.network.stash.create_or_update_peer( service_ctx.server.verify_key, self.remote_peer ) # this way they can match up who we are with who they think we are diff --git a/packages/syft/src/syft/service/network/network_service.py b/packages/syft/src/syft/service/network/network_service.py index 5e4dfdbadf4..428501fb92d 100644 --- a/packages/syft/src/syft/service/network/network_service.py +++ b/packages/syft/src/syft/service/network/network_service.py @@ -41,7 +41,6 @@ from ..request.request import Request from ..request.request import RequestStatus from ..request.request import SubmitRequest -from ..request.request_service import RequestService from ..response import SyftInfo from ..response import SyftSuccess from ..service import AbstractService @@ -347,16 +346,12 @@ def add_peer( changes=[association_request_change], requesting_user_verify_key=context.credentials, ) - request_submit_method = context.server.get_service_method(RequestService.submit) - request = request_submit_method(context, submit_request) + request = context.server.services.request.submit(context, submit_request) if ( isinstance(request, Request) and context.server.settings.association_request_auto_approval ): - request_apply_method = context.server.get_service_method( - RequestService.apply - ) - return request_apply_method(context, uid=request.id) + return context.server.services.request.apply(context, uid=request.id) return request @@ -519,10 +514,7 @@ def delete_peer_by_id(self, context: AuthedServiceContext, uid: UID) -> SyftSucc context=context, peer_id=uid ) for request in association_requests: - request_delete_method = context.server.get_service_method( - RequestService.delete_by_uid - ) - request_delete_method(context, request.id) + context.server.services.request.delete_by_uid(context, request.id) # TODO: Notify the peer (either by email or by other form of notifications) # that it has been deleted from the network return SyftSuccess(message=f"Server Peer with id {uid} deleted.") @@ -860,10 +852,7 @@ def _get_association_requests_by_peer_id( """ Get all the association requests from a peer. The association requests are sorted by request_time. """ - request_get_all_method: Callable = context.server.get_service_method( - RequestService.get_all - ) - all_requests: list[Request] = request_get_all_method(context) + all_requests: list[Request] = context.server.services.request.get_all(context) association_requests: list[Request] = [ request for request in all_requests diff --git a/packages/syft/src/syft/service/network/utils.py b/packages/syft/src/syft/service/network/utils.py index 62fff066e35..280e836f17b 100644 --- a/packages/syft/src/syft/service/network/utils.py +++ b/packages/syft/src/syft/service/network/utils.py @@ -2,7 +2,6 @@ import logging import threading import time -from typing import cast # relative from ...serde.serializable import serializable @@ -10,7 +9,6 @@ from ...types.errors import SyftException from ..context import AuthedServiceContext from ..response import SyftError -from .network_service import NetworkService from .network_service import ServerPeerAssociationStatus from .server_peer import ServerPeer from .server_peer import ServerPeerConnectionStatus @@ -40,11 +38,7 @@ def peer_route_heathcheck(self, context: AuthedServiceContext) -> None: Returns: None """ - - network_service = cast( - NetworkService, context.server.get_service(NetworkService) - ) - network_stash = network_service.stash + network_stash = context.server.services.network.stash try: all_peers: list[ServerPeer] = network_stash.get_all( diff --git a/packages/syft/src/syft/service/notification/email_templates.py b/packages/syft/src/syft/service/notification/email_templates.py index 2a17a662086..2ebc0908a88 100644 --- a/packages/syft/src/syft/service/notification/email_templates.py +++ b/packages/syft/src/syft/service/notification/email_templates.py @@ -23,6 +23,107 @@ def email_body(notification: "Notification", context: AuthedServiceContext) -> s return "" +@serializable(canonical_name="FailedJobTemplate", version=1) +class FailedJobTemplate(EmailTemplate): + @staticmethod + def email_title(notification: "Notification", context: AuthedServiceContext) -> str: + return "Job Failed Notification" + + @staticmethod + def email_body(notification: "Notification", context: AuthedServiceContext) -> str: + notification.linked_obj = cast(LinkedObject, notification.linked_obj) + queueitem_obj = notification.linked_obj.resolve_with_context( + context=context + ).unwrap() + + worker_pool_obj = queueitem_obj.worker_pool.resolve_with_context( + context=context + ).unwrap() + method = queueitem_obj.method + if queueitem_obj.service == "apiservice": + method = queueitem_obj.kwargs.pop("path", "") + queueitem_obj.kwargs.pop("log_id") + + head = """ + + + + Job Failed Notification + + + """ + body = f""" + +
+

Job Failed Notification

+

Hello,

+

We regret to inform you that your function job has encountered an + unexpected error and could not be completed successfully.

+ +
+

Job Details

+

Job ID: {queueitem_obj.job_id}

+

Worker Pool: {worker_pool_obj.name}

+

Method: {method}

+

Service: {queueitem_obj.service}

+

Arguments (args): {queueitem_obj.args}

+

Keyword Arguments (kwargs): {queueitem_obj.kwargs}

+
+ + +
+ + """ + return f"""{head} {body}""" + + @serializable(canonical_name="PasswordResetTemplate", version=1) class PasswordResetTemplate(EmailTemplate): @staticmethod @@ -31,7 +132,7 @@ def email_title(notification: "Notification", context: AuthedServiceContext) -> @staticmethod def email_body(notification: "Notification", context: AuthedServiceContext) -> str: - user_service = context.server.get_service("userservice") + user_service = context.server.services.user admin_verify_key = user_service.admin_verify_key() user = user_service.stash.get_by_verify_key( credentials=admin_verify_key, verify_key=notification.to_user_verify_key @@ -50,6 +151,10 @@ def email_body(notification: "Notification", context: AuthedServiceContext) -> s if result.is_err(): raise Exception("Couldn't update the user password") + expiry_time = context.server.services.settings.get( + context=context + ).pwd_token_config.token_exp_min + head = """
- + - + - + - + - + - + - + - + - + - + - +