diff --git a/.bumpversion.cfg b/.bumpversion.cfg index ad67b288cb3..8a20081be56 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.4-beta.22 +current_version = 0.8.4-beta.30 tag = False tag_name = {new_version} commit = True @@ -26,9 +26,11 @@ values = [bumpversion:part:prenum] first_value = 1 +# Root + [bumpversion:file:VERSION] -[bumpversion:file:packages/grid/devspace.yaml] +# Syft [bumpversion:file:packages/syft/setup.cfg] @@ -36,6 +38,10 @@ first_value = 1 [bumpversion:file:packages/syft/src/syft/VERSION] +# Grid + +[bumpversion:file:packages/grid/devspace.yaml] + [bumpversion:file:packages/grid/VERSION] [bumpversion:file:packages/grid/backend/worker_cpu.dockerfile] @@ -44,14 +50,20 @@ first_value = 1 [bumpversion:file:packages/grid/helm/syft/Chart.yaml] +[bumpversion:file:packages/grid/podman/podman-kube/podman-syft-kube.yaml] + +[bumpversion:file:packages/grid/podman/podman-kube/podman-syft-kube-config.yaml] + +[bumpversion:file:packages/grid/helm/syft/values.yaml] + +# Hagrid + [bumpversion:file:packages/hagrid/hagrid/manifest_template.yml] [bumpversion:file:packages/hagrid/hagrid/deps.py] -[bumpversion:file:packages/grid/podman/podman-kube/podman-syft-kube.yaml] - -[bumpversion:file:packages/grid/podman/podman-kube/podman-syft-kube-config.yaml] +# Syft CLI [bumpversion:file:packages/syftcli/manifest.yml] -[bumpversion:file:packages/grid/helm/syft/values.yaml] + diff --git a/.bumpversion_stable.cfg b/.bumpversion_stable.cfg new file mode 100644 index 00000000000..73284247c2d --- /dev/null +++ b/.bumpversion_stable.cfg @@ -0,0 +1,25 @@ +[bumpversion] +current_version = 0.8.3 +tag = False +tag_name = {new_version} +commit = True +parse = + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) +serialize = + {major}.{minor}.{patch} + +# Syft + +[bumpversion:file:packages/syft/src/syft/stable_version.py] + +# Hagrid + +[bumpversion:file:packages/hagrid/hagrid/stable_version.py] + +[bumpversion:file:packages/hagrid/hagrid/cache.py] + + diff --git a/.github/workflows/cd-hagrid.yml b/.github/workflows/cd-hagrid.yml index 817e2c43702..a349e12b94a 100644 --- a/.github/workflows/cd-hagrid.yml +++ b/.github/workflows/cd-hagrid.yml @@ -11,6 +11,12 @@ on: required: false default: "false" +# Prevents concurrent runs of the same workflow +# while the previous run is still in progress +concurrency: + group: "CD - Hagrid" + cancel-in-progress: false + jobs: call-pr-tests-linting: if: github.repository == 'OpenMined/PySyft' && (github.event.inputs.skip_tests == 'false' || github.event_name == 'schedule') # don't run on forks diff --git a/.github/workflows/cd-post-release-tests.yml b/.github/workflows/cd-post-release-tests.yml new file mode 100644 index 00000000000..c817f1d32f2 --- /dev/null +++ b/.github/workflows/cd-post-release-tests.yml @@ -0,0 +1,189 @@ +name: CD - Post Release Tests + +on: + workflow_dispatch: + inputs: + syft_version: + description: "Syft version to test" + required: true + type: string + + workflow_call: + inputs: + syft_version: + description: "Syft version to test" + required: true + type: string + +jobs: + notebook-test-hagrid: + strategy: + max-parallel: 99 + matrix: + os: [ubuntu-latest] + python-version: ["3.11"] + + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + + # free 10GB of space + - name: Remove unnecessary files + if: matrix.os == 'ubuntu-latest' + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + docker image prune --all --force + docker builder prune --all --force + docker system prune --all --force + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + + with: + python-version: ${{ matrix.python-version }} + + - name: Upgrade pip + run: | + python -m pip install --upgrade --user pip + + - name: Get pip cache dir + id: pip-cache + shell: bash + run: | + echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + + - name: pip cache + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-py${{ matrix.python-version }}- + + - name: Install Hagrid and tox + run: | + pip install -U hagrid + pip install tox + + - name: Hagrid Version + run: | + hagrid version + + - name: Launch Domain + run: | + hagrid launch test-domain-1 to docker:8081 --tag=${{ inputs.syft_version }} --low-side + + - name: Run tests + env: + NODE_PORT: "8081" + SYFT_VERSION: ${{ inputs.syft_version }} + EXCLUDE_NOTEBOOKS: "not 11-container-images-k8s.ipynb" + run: | + tox -e e2e.test.notebook + + syft-install-check: + strategy: + max-parallel: 99 + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + # Re-enable macos-14 when it's available when we remove python 3.9 from syft + # os: [ubuntu-latest, macos-latest, windows-latest, macos-14] + python-version: ["3.11", "3.10", "3.9"] + + runs-on: ${{ matrix.os }} + steps: + - name: System Architecture + run: | + echo "System Architecture: $(uname -m)" + echo "System Version: $(uname -a)" + + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + + with: + python-version: ${{ matrix.python-version }} + + - name: Upgrade pip + run: | + python -m pip install --upgrade --user pip + + - name: Get pip cache dir + id: pip-cache + shell: bash + run: | + echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + + - name: pip cache + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-py${{ matrix.python-version }}- + + - name: Install Syft + run: | + pip install syft==${{ inputs.syft_version }} + + - name: Check Syft version + run: | + python -c "import syft; print(syft.__version__)" + + notebook-test-k8s-k3d: + strategy: + max-parallel: 99 + matrix: + os: [ubuntu-latest] + python-version: ["3.11"] + + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + + # free 10GB of space + - name: Remove unnecessary files + if: matrix.os == 'ubuntu-latest' + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + docker image prune --all --force + docker builder prune --all --force + docker system prune --all --force + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + + with: + python-version: ${{ matrix.python-version }} + + - name: Install K3d + run: | + K3D_VERSION=v5.6.0 + wget -q -O - https://raw.githubusercontent.com/k3d-io/k3d/main/install.sh | TAG=$K3D_VERSION bash + + - name: Get pip cache dir + id: pip-cache + shell: bash + run: | + echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + + - name: pip cache + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-py${{ matrix.python-version }}- + + - name: Install tox + run: | + pip install tox + + - name: Run K8s tests + env: + SYFT_VERSION: ${{ inputs.syft_version }} + run: | + tox -e syft.test.helm diff --git a/.github/workflows/cd-syft-dev.yml b/.github/workflows/cd-syft-dev.yml index 657afc0e8d1..b49e457743a 100644 --- a/.github/workflows/cd-syft-dev.yml +++ b/.github/workflows/cd-syft-dev.yml @@ -70,20 +70,10 @@ jobs: id: buildx uses: docker/setup-buildx-action@v3 - - name: Install Azure CLI - run: | - curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash - az version - - - name: Login to Azure CLI - uses: azure/login@v1 - with: - creds: ${{ secrets.AZURE_CREDENTIALS_GITHUB_CI }} - - name: Login to Azure Container Registry - uses: azure/docker-login@v1 + uses: docker/login-action@v3 with: - login-server: ${{ secrets.ACR_SERVER }} + registry: ${{ secrets.ACR_SERVER }} username: ${{ secrets.ACR_USERNAME }} password: ${{ secrets.ACR_PASSWORD }} @@ -178,29 +168,3 @@ jobs: add: "." push: "origin main" cwd: "./infrastructure/" - - - name: Cleanup Azure Container Registry - run: | - ACR_REGISTRY_NAME=${{ secrets.ACR_REGISTRY_NAME }} - - echo ">> Fetching repo list.." - REPO_LIST=$(az acr repository list -n $ACR_REGISTRY_NAME -o tsv) - - KEEP_PREV_VERSIONS=5 - TAIL_FROM_LINE=$(($KEEP_PREV_VERSIONS + 1)) - - for repo in $REPO_LIST - do - echo "Cleaning up '$repo'" - az acr repository show-tags --name $ACR_REGISTRY_NAME --repository $repo --orderby time_desc --output tsv \ - | grep dev- \ - | tail -n +$TAIL_FROM_LINE \ - | xargs -r -I% az acr repository untag --name $ACR_REGISTRY_NAME --image $repo:% - done - - - name: Logout and cleanup Azure account - if: always() - run: | - az logout - az cache purge - az account clear diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index d815fa0f981..d1c63cc843b 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -312,6 +312,10 @@ jobs: runs-on: ubuntu-latest permissions: contents: write # For tag and release notes. + + outputs: + syft_version: ${{ steps.release_checks.outputs.syft_version }} + steps: - name: Permission to home directory run: | @@ -354,6 +358,7 @@ jobs: run: | echo "github_release_version=$(python packages/grid/VERSION | sed 's/-beta./b/')" >> $GITHUB_OUTPUT echo "future_stable_version=$(python packages/grid/VERSION | sed 's/-beta.*//')" >> $GITHUB_OUTPUT + echo "syft_version=$(python packages/grid/VERSION)" >> $GITHUB_OUTPUT if [[ ${{ needs.merge-docker-images.outputs.release_tag}} == "beta" ]]; then echo "is_beta_release=true" >> $GITHUB_OUTPUT else @@ -514,3 +519,17 @@ jobs: add: "helm/" push: "origin gh-pages" cwd: "./ghpages/" + + # Since the post-release tests are dependent on the release being available on PyPI + # we need to wait for PyPI to update before running the post-release tests + - name: Wait for PyPI to update + run: | + sleep 60 + + # Can we remove the always flag from the below job? + call-cd-post-release-tests: + needs: [deploy-syft] + if: always() && github.repository == 'OpenMined/PySyft' && needs.deploy-syft.result == 'success' # don't run on forks + uses: OpenMined/PySyft/.github/workflows/cd-post-release-tests.yml@dev + with: + syft_version: ${{ needs.deploy-syft.outputs.syft_version }} diff --git a/.github/workflows/e2e-tests-notebook.yml b/.github/workflows/e2e-tests-notebook.yml new file mode 100644 index 00000000000..ab3ec2042cf --- /dev/null +++ b/.github/workflows/e2e-tests-notebook.yml @@ -0,0 +1,89 @@ +name: E2E - Notebook Tests + +on: + workflow_dispatch: + inputs: + syft_version: + description: "Syft version to test" + required: true + type: string + node_url: + description: "Node URL to use" + required: true + type: string + node_port: + description: "Node port" + required: true + type: number + exclude_notebooks: + description: "Notebooks to exclude ex: not 11-container-images-k8s.ipynb" + required: false + type: string + + workflow_call: + inputs: + syft_version: + description: "Syft version to test" + required: true + type: string + node_url: + description: "Node URL to use" + required: true + type: string + node_port: + description: "Node port" + required: true + type: number + exclude_notebooks: + description: "Notebooks to exclude ex: not 11-container-images-k8s.ipynb" + required: false + type: string + +jobs: + notebook-test-hagrid: + strategy: + max-parallel: 99 + matrix: + os: [ubuntu-latest] + python-version: ["3.11"] + + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + + with: + python-version: ${{ matrix.python-version }} + + - name: Upgrade pip + run: | + python -m pip install --upgrade --user pip + + - name: Get pip cache dir + id: pip-cache + shell: bash + run: | + echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + + - name: pip cache + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-py${{ matrix.python-version }}- + + - name: Install tox + run: | + pip install tox + + - name: Run Notebook tests + env: + SYFT_VERSION: ${{ inputs.syft_version }} + NODE_URL: ${{ inputs.node_url }} + NODE_PORT: ${{ inputs.node_port }} + EXCLUDE_NOTEBOOKS: ${{ inputs.exclude_notebooks }} + run: | + tox -e e2e.test.notebook diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index bb443c081cc..4225c635424 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -172,7 +172,7 @@ jobs: docker volume prune -f || true docker buildx use default || true - - uses: pnpm/action-setup@v2 + - uses: pnpm/action-setup@v3 with: version: 8 run_install: false diff --git a/.github/workflows/pr-tests-hagrid.yml b/.github/workflows/pr-tests-hagrid.yml index ef70aa042df..b8dd7e1da2a 100644 --- a/.github/workflows/pr-tests-hagrid.yml +++ b/.github/workflows/pr-tests-hagrid.yml @@ -106,3 +106,65 @@ jobs: if: steps.changes.outputs.hagrid == 'true' run: | twine check dist/*.whl + + pr-tests-syft-hagrid-comptability: + strategy: + max-parallel: 99 + matrix: + os: [ubuntu-latest] + python-version: ["3.11"] + syft-version: ["0.8.2", "0.8.2b6", "0.8.3"] + + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + + - name: Check for file changes + uses: dorny/paths-filter@v3 + id: changes + with: + base: ${{ github.ref }} + token: ${{ github.token }} + filters: .github/file-filters.yml + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + if: steps.changes.outputs.hagrid == 'true' + with: + python-version: ${{ matrix.python-version }} + + - name: Upgrade pip + if: steps.changes.outputs.hagrid == 'true' + run: | + python -m pip install --upgrade --user pip + + - name: Get pip cache dir + id: pip-cache + if: steps.changes.outputs.hagrid == 'true' + shell: bash + run: | + echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + + - name: pip cache + uses: actions/cache@v4 + if: steps.changes.outputs.hagrid == 'true' + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('packages/syft/setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('packages/syft/setup.cfg') }} + + # https://github.com/google/jax/issues/17693 + # pinning ml-dtypes due to jax version==0.4.10 + - name: Install Syft ${{ matrix.syft-version }} + if: steps.changes.outputs.hagrid == 'true' + run: | + pip install ml-dtypes==0.2.0 + pip install syft==${{ matrix.syft-version }} + pip install . + + - name: Run Orchestra Command + if: steps.changes.outputs.hagrid == 'true' + run: | + python -c "import syft as sy; domain1 = sy.orchestra.launch(name='test-domain-1', dev_mode=True, reset=True)" + python -c "import syft as sy; domain2 = sy.orchestra.launch(name='test-domain-2',dev_mode=False, reset=True)" diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index b051565469f..4caaabab56b 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -46,7 +46,7 @@ jobs: restore-keys: | ${{ runner.os }}-pip-py${{ matrix.python-version }}- - - uses: pre-commit/action@v3.0.0 + - uses: pre-commit/action@v3.0.1 - name: Check Protocol Version run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 81b29307e8a..f1a21cb76ec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -172,7 +172,7 @@ repos: - id: mypy name: "mypy: syft" always_run: true - files: "^packages/syft/src/syft/serde|^packages/syft/src/syft/util/env.py|^packages/syft/src/syft/util/logger.py|^packages/syft/src/syft/util/markdown.py|^packages/syft/src/syft/util/notebook_ui/notebook_addons.py" + files: "^packages/syft/src/syft/serde|^packages/syft/src/syft/util/env.py|^packages/syft/src/syft/util/logger.py|^packages/syft/src/syft/util/markdown.py|^packages/syft/src/syft/util/notebook_ui/notebook_addons.py|^packages/syft/src/syft/service/warnings.py|^packages/syft/src/syft/service/dataset|^packages/syft/src/syft/service/worker|^packages/syft/src/syft/service/user" #files: "^packages/syft/src/syft/serde" args: [ "--follow-imports=skip", diff --git a/VERSION b/VERSION index 8aabcce8104..8023d707449 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.4-beta.22" +__version__ = "0.8.4-beta.30" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/10-container-images.ipynb index 35f26e791ff..09014159318 100644 --- a/notebooks/api/0.8/10-container-images.ipynb +++ b/notebooks/api/0.8/10-container-images.ipynb @@ -51,6 +51,7 @@ "# Disable inmemory worker for container stack\n", "running_as_container = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\") in (\n", " \"container_stack\",\n", + " \"k8s\",\n", ")\n", "in_memory_workers = not running_as_container" ] @@ -115,8 +116,8 @@ "metadata": {}, "outputs": [], "source": [ - "custom_dockerfile_str = \"\"\"\n", - "FROM openmined/grid-backend:0.8.4-beta.12\n", + "custom_dockerfile_str = f\"\"\"\n", + "FROM openmined/grid-backend:{sy.__version__}\n", "\n", "RUN pip install pydicom\n", "\n", @@ -941,7 +942,8 @@ "metadata": {}, "outputs": [], "source": [ - "assert job.job_worker_id is not None" + "# Disabling it due to Race Condition Error\n", + "# assert job.job_worker_id is not None" ] }, { @@ -1069,8 +1071,8 @@ "metadata": {}, "outputs": [], "source": [ - "custom_dockerfile_str_2 = \"\"\"\n", - "FROM openmined/grid-backend:0.8.4-beta.12\n", + "custom_dockerfile_str_2 = f\"\"\"\n", + "FROM openmined/grid-backend:{sy.__version__}\n", "\n", "RUN pip install opendp\n", "\"\"\".strip()\n", @@ -1220,8 +1222,8 @@ "metadata": {}, "outputs": [], "source": [ - "custom_dockerfile_str_3 = \"\"\"\n", - "FROM openmined/grid-backend:0.8.4-beta.12\n", + "custom_dockerfile_str_3 = f\"\"\"\n", + "FROM openmined/grid-backend:{sy.__version__}\n", "\n", "RUN pip install recordlinkage\n", "\"\"\".strip()\n", @@ -1428,7 +1430,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.11.5" } }, "nbformat": 4, diff --git a/notebooks/api/0.8/11-container-images-k8s.ipynb b/notebooks/api/0.8/11-container-images-k8s.ipynb index 68bc6982f8c..64dd231ff31 100644 --- a/notebooks/api/0.8/11-container-images-k8s.ipynb +++ b/notebooks/api/0.8/11-container-images-k8s.ipynb @@ -28,6 +28,9 @@ "# syft absolute\n", "import syft as sy\n", "\n", + "from getpass import getpass # noqa\n", + "\n", + "\n", "sy.requires(SYFT_VERSION)\n", "\n", "# syft absolute\n", @@ -210,11 +213,14 @@ "metadata": {}, "outputs": [], "source": [ - "registry = \"k3d-registry.localhost:5000\"\n", + "registry = os.getenv(\"SYFT_BASE_IMAGE_REGISTRY\", \"docker.io\")\n", "repo = \"openmined/grid-backend\"\n", "\n", - "res = requests.get(url=f\"http://{registry}/v2/{repo}/tags/list\")\n", - "tag = res.json()[\"tags\"][0]" + "if \"k3d\" in registry:\n", + " res = requests.get(url=f\"http://{registry}/v2/{repo}/tags/list\")\n", + " tag = res.json()[\"tags\"][0]\n", + "else:\n", + " tag = sy.__version__" ] }, { @@ -322,7 +328,23 @@ "id": "91a66871", "metadata": {}, "source": [ - "#### Add k3d Registry in Syft" + "#### Add External Registry in Syft" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "32a323ca-8293-408a-a878-a954df55d787", + "metadata": {}, + "outputs": [], + "source": [ + "external_registry = os.getenv(\"EXTERNAL_REGISTRY\", registry)\n", + "external_registry_username = os.getenv(\"EXTERNAL_REGISTRY_USERNAME\", None)\n", + "external_registry_password = os.getenv(\"EXTERNAL_REGISTRY_PASSWORD\", None)\n", + "\n", + "# external_registry = input()\n", + "# external_registry_username = getpass(\"Enter Registry Username\")\n", + "# external_registry_password = getpass(\"Enter Registry Password\")" ] }, { @@ -332,7 +354,7 @@ "metadata": {}, "outputs": [], "source": [ - "registry_add_result = domain_client.api.services.image_registry.add(registry)\n", + "registry_add_result = domain_client.api.services.image_registry.add(external_registry)\n", "registry_add_result" ] }, @@ -488,7 +510,11 @@ "outputs": [], "source": [ "push_result = None\n", - "push_result = domain_client.api.services.worker_image.push(workerimage.id)\n", + "push_result = domain_client.api.services.worker_image.push(\n", + " workerimage.id,\n", + " username=external_registry_username,\n", + " password=external_registry_password,\n", + ")\n", "push_result" ] }, @@ -544,6 +570,8 @@ " name=worker_pool_name,\n", " image_uid=workerimage.id,\n", " num_workers=3,\n", + " reg_username=external_registry_username,\n", + " reg_password=external_registry_password,\n", ")" ] }, @@ -1027,7 +1055,11 @@ "source": [ "# Push OpenDP Image to registry\n", "push_result = None\n", - "push_result = domain_client.api.services.worker_image.push(workerimage_opendp.id)\n", + "push_result = domain_client.api.services.worker_image.push(\n", + " workerimage_opendp.id,\n", + " username=external_registry_username,\n", + " password=external_registry_password,\n", + ")\n", "assert isinstance(push_result, sy.SyftSuccess), str(push_result)" ] }, @@ -1040,9 +1072,7 @@ "source": [ "pool_name_opendp = \"opendp-pool\"\n", "pool_create_request = domain_client.api.services.worker_pool.pool_creation_request(\n", - " pool_name=pool_name_opendp,\n", - " num_workers=3,\n", - " image_uid=workerimage_opendp.id,\n", + " pool_name=pool_name_opendp, num_workers=3, image_uid=workerimage_opendp.id\n", ")\n", "pool_create_request" ] @@ -1066,7 +1096,9 @@ "outputs": [], "source": [ "# get the pending request and approve it\n", - "req_result = pool_create_request.approve()\n", + "req_result = pool_create_request.approve(\n", + " reg_username=external_registry_username, reg_password=external_registry_password\n", + ")\n", "req_result" ] }, @@ -1210,7 +1242,9 @@ "metadata": {}, "outputs": [], "source": [ - "req_result = pool_image_create_request.approve()\n", + "req_result = pool_image_create_request.approve(\n", + " reg_username=external_registry_username, reg_password=external_registry_password\n", + ")\n", "req_result" ] }, diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 8aabcce8104..8023d707449 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.4-beta.22" +__version__ = "0.8.4-beta.30" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile index 88acd441e45..5eccce071e2 100644 --- a/packages/grid/backend/worker_cpu.dockerfile +++ b/packages/grid/backend/worker_cpu.dockerfile @@ -9,7 +9,7 @@ # Later we'd want to uninstall old python, and then install a new python runtime... # ... but pre-built syft deps may break! -ARG SYFT_VERSION_TAG="0.8.4-beta.22" +ARG SYFT_VERSION_TAG="0.8.4-beta.30" FROM openmined/grid-backend:${SYFT_VERSION_TAG} ARG PYTHON_VERSION="3.11" diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 8f698707f2d..a14423c3e1a 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -25,7 +25,7 @@ vars: DEVSPACE_ENV_FILE: "default.env" CONTAINER_REGISTRY: "docker.io" NODE_NAME: "mynode" - VERSION: "0.8.4-beta.22" + VERSION: "0.8.4-beta.30" # This is a list of `images` that DevSpace can build for this project # We recommend to skip image building during development (devspace dev) as much as possible diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 39a05242ac3..aaa0fe4ab8e 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.4-beta.22", + "version": "0.8.4-beta.30", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index f6e676e2bac..8d2ddd55959 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,105 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.4-beta.30 + created: "2024-02-19T03:16:26.697231931Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.30.tgz + version: 0.8.4-beta.30 + - apiVersion: v2 + appVersion: 0.8.4-beta.29 + created: "2024-02-19T03:16:26.696464782Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.29.tgz + version: 0.8.4-beta.29 + - apiVersion: v2 + appVersion: 0.8.4-beta.28 + created: "2024-02-19T03:16:26.696043382Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.28.tgz + version: 0.8.4-beta.28 + - apiVersion: v2 + appVersion: 0.8.4-beta.27 + created: "2024-02-19T03:16:26.695633303Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.27.tgz + version: 0.8.4-beta.27 + - apiVersion: v2 + appVersion: 0.8.4-beta.26 + created: "2024-02-19T03:16:26.695223515Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.26.tgz + version: 0.8.4-beta.26 + - apiVersion: v2 + appVersion: 0.8.4-beta.25 + created: "2024-02-19T03:16:26.694762681Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.25.tgz + version: 0.8.4-beta.25 + - apiVersion: v2 + appVersion: 0.8.4-beta.24 + created: "2024-02-19T03:16:26.694345519Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.24.tgz + version: 0.8.4-beta.24 + - apiVersion: v2 + appVersion: 0.8.4-beta.23 + created: "2024-02-19T03:16:26.693932264Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.23.tgz + version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-02-12T14:01:21.70441132Z" + created: "2024-02-19T03:16:26.693506656Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -15,7 +111,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-02-12T14:01:21.703986689Z" + created: "2024-02-19T03:16:26.693071831Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -27,7 +123,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-02-12T14:01:21.703578548Z" + created: "2024-02-19T03:16:26.692604345Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -39,7 +135,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-02-12T14:01:21.702568445Z" + created: "2024-02-19T03:16:26.690894774Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -51,7 +147,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-02-12T14:01:21.702159743Z" + created: "2024-02-19T03:16:26.690458306Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -63,7 +159,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.17 - created: "2024-02-12T14:01:21.701761641Z" + created: "2024-02-19T03:16:26.690063796Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498 @@ -75,7 +171,7 @@ entries: version: 0.8.4-beta.17 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-02-12T14:01:21.701356416Z" + created: "2024-02-19T03:16:26.68966597Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -87,7 +183,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-02-12T14:01:21.700948616Z" + created: "2024-02-19T03:16:26.689261472Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -99,7 +195,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-02-12T14:01:21.700540986Z" + created: "2024-02-19T03:16:26.688855691Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -111,7 +207,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-02-12T14:01:21.700172469Z" + created: "2024-02-19T03:16:26.688507559Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -123,7 +219,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-02-12T14:01:21.699802279Z" + created: "2024-02-19T03:16:26.688157773Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -135,7 +231,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-02-12T14:01:21.699136629Z" + created: "2024-02-19T03:16:26.687810472Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -147,7 +243,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-02-12T14:01:21.698420975Z" + created: "2024-02-19T03:16:26.687464073Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -159,7 +255,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-02-12T14:01:21.707569648Z" + created: "2024-02-19T03:16:26.700095403Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -171,7 +267,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-02-12T14:01:21.707223523Z" + created: "2024-02-19T03:16:26.699745948Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -183,7 +279,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-02-12T14:01:21.706849435Z" + created: "2024-02-19T03:16:26.699369492Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -195,7 +291,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-02-12T14:01:21.706501787Z" + created: "2024-02-19T03:16:26.698541656Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -207,7 +303,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-02-12T14:01:21.706077356Z" + created: "2024-02-19T03:16:26.697908499Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -219,7 +315,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-02-12T14:01:21.70509791Z" + created: "2024-02-19T03:16:26.69756762Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -231,7 +327,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-02-12T14:01:21.70475964Z" + created: "2024-02-19T03:16:26.696816351Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -243,7 +339,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-02-12T14:01:21.703130914Z" + created: "2024-02-19T03:16:26.69173006Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -259,7 +355,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-02-12T14:01:21.698057787Z" + created: "2024-02-19T03:16:26.687097566Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -275,7 +371,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-02-12T14:01:21.697334159Z" + created: "2024-02-19T03:16:26.686457195Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -291,7 +387,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-02-12T14:01:21.69614369Z" + created: "2024-02-19T03:16:26.68512368Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -307,7 +403,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-02-12T14:01:21.695037849Z" + created: "2024-02-19T03:16:26.684523335Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -323,7 +419,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-02-12T14:01:21.693978484Z" + created: "2024-02-19T03:16:26.683951413Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -339,7 +435,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.3 - created: "2024-02-12T14:01:21.692778116Z" + created: "2024-02-19T03:16:26.683284432Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -355,7 +451,7 @@ entries: version: 0.8.3-beta.3 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-02-12T14:01:21.691765218Z" + created: "2024-02-19T03:16:26.68269098Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -371,7 +467,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-02-12T14:01:21.690718036Z" + created: "2024-02-19T03:16:26.682126521Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -387,7 +483,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-02-12T14:01:21.689752196Z" + created: "2024-02-19T03:16:26.681532017Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -403,7 +499,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-02-12T14:01:21.688057746Z" + created: "2024-02-19T03:16:26.680843717Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -419,7 +515,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-02-12T14:01:21.686769555Z" + created: "2024-02-19T03:16:26.679510886Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -435,7 +531,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-02-12T14:01:21.686030387Z" + created: "2024-02-19T03:16:26.678861388Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -451,7 +547,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-02-12T14:01:21.685380025Z" + created: "2024-02-19T03:16:26.678183778Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -467,7 +563,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-02-12T14:01:21.684634716Z" + created: "2024-02-19T03:16:26.677521696Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -483,7 +579,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.53 - created: "2024-02-12T14:01:21.683397299Z" + created: "2024-02-19T03:16:26.676836592Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -499,7 +595,7 @@ entries: version: 0.8.2-beta.53 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-02-12T14:01:21.682621664Z" + created: "2024-02-19T03:16:26.676195931Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -515,7 +611,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-02-12T14:01:21.681949961Z" + created: "2024-02-19T03:16:26.675526275Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -531,7 +627,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-02-12T14:01:21.680888994Z" + created: "2024-02-19T03:16:26.674573098Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -547,7 +643,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-02-12T14:01:21.67909002Z" + created: "2024-02-19T03:16:26.673232355Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -563,7 +659,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-02-12T14:01:21.677827046Z" + created: "2024-02-19T03:16:26.672553041Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -579,7 +675,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-02-12T14:01:21.676839225Z" + created: "2024-02-19T03:16:26.671873798Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -595,7 +691,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-02-12T14:01:21.675827158Z" + created: "2024-02-19T03:16:26.67125645Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -611,7 +707,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-02-12T14:01:21.674785066Z" + created: "2024-02-19T03:16:26.67064822Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -627,7 +723,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-02-12T14:01:21.673779071Z" + created: "2024-02-19T03:16:26.670088441Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -643,7 +739,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-02-12T14:01:21.672772545Z" + created: "2024-02-19T03:16:26.669510066Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -659,7 +755,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-02-12T14:01:21.671533976Z" + created: "2024-02-19T03:16:26.668838527Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -675,7 +771,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-02-12T14:01:21.654623586Z" + created: "2024-02-19T03:16:26.667492113Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -691,7 +787,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-02-12T14:01:21.654071126Z" + created: "2024-02-19T03:16:26.666913629Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -707,7 +803,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-02-12T14:01:21.653516172Z" + created: "2024-02-19T03:16:26.66632752Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -723,7 +819,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-02-12T14:01:21.652911454Z" + created: "2024-02-19T03:16:26.665736582Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -739,7 +835,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-02-12T14:01:21.652177857Z" + created: "2024-02-19T03:16:26.665116349Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -753,4 +849,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-02-12T14:01:21.651428851Z" +generated: "2024-02-19T03:16:26.664292905Z" diff --git a/packages/grid/helm/repo/syft-0.8.4-beta.23.tgz b/packages/grid/helm/repo/syft-0.8.4-beta.23.tgz new file mode 100644 index 00000000000..c63b83324ea Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.4-beta.23.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.4-beta.24.tgz b/packages/grid/helm/repo/syft-0.8.4-beta.24.tgz new file mode 100644 index 00000000000..2772af5c171 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.4-beta.24.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.4-beta.25.tgz b/packages/grid/helm/repo/syft-0.8.4-beta.25.tgz new file mode 100644 index 00000000000..c68bd073259 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.4-beta.25.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.4-beta.26.tgz b/packages/grid/helm/repo/syft-0.8.4-beta.26.tgz new file mode 100644 index 00000000000..714bd52c19d Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.4-beta.26.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.4-beta.27.tgz b/packages/grid/helm/repo/syft-0.8.4-beta.27.tgz new file mode 100644 index 00000000000..1fe808a5a5f Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.4-beta.27.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.4-beta.28.tgz b/packages/grid/helm/repo/syft-0.8.4-beta.28.tgz new file mode 100644 index 00000000000..5a87f7abe1e Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.4-beta.28.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.4-beta.29.tgz b/packages/grid/helm/repo/syft-0.8.4-beta.29.tgz new file mode 100644 index 00000000000..500c3c12eb6 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.4-beta.29.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.4-beta.30.tgz b/packages/grid/helm/repo/syft-0.8.4-beta.30.tgz new file mode 100644 index 00000000000..064680aa536 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.4-beta.30.tgz differ diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml index 3d4bbe02279..d3aad119920 100644 --- a/packages/grid/helm/syft/Chart.yaml +++ b/packages/grid/helm/syft/Chart.yaml @@ -2,6 +2,6 @@ apiVersion: v2 name: syft description: Perform numpy-like analysis on data that remains in someone elses server type: application -version: "0.8.4-beta.22" -appVersion: "0.8.4-beta.22" +version: "0.8.4-beta.30" +appVersion: "0.8.4-beta.30" icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png \ No newline at end of file diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index 6203ee16e29..2f918cf9eef 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -29,7 +29,7 @@ registry: syft: registry: "docker.io" - version: 0.8.4-beta.22 + version: 0.8.4-beta.30 node: settings: diff --git a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml index d2e7625ad86..f6c4480374f 100644 --- a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml +++ b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml @@ -31,7 +31,7 @@ data: RABBITMQ_VERSION: 3 SEAWEEDFS_VERSION: 3.59 DOCKER_IMAGE_SEAWEEDFS: chrislusf/seaweedfs:3.55 - VERSION: 0.8.4-beta.22 + VERSION: 0.8.4-beta.30 VERSION_HASH: unknown STACK_API_KEY: "" diff --git a/packages/grid/podman/podman-kube/podman-syft-kube.yaml b/packages/grid/podman/podman-kube/podman-syft-kube.yaml index 232fc55a132..6e8504bfff7 100644 --- a/packages/grid/podman/podman-kube/podman-syft-kube.yaml +++ b/packages/grid/podman/podman-kube/podman-syft-kube.yaml @@ -41,7 +41,7 @@ spec: - configMapRef: name: podman-syft-config - image: docker.io/openmined/grid-backend:0.8.4-beta.22 + image: docker.io/openmined/grid-backend:0.8.4-beta.30 imagePullPolicy: IfNotPresent resources: {} tty: true @@ -57,7 +57,7 @@ spec: envFrom: - configMapRef: name: podman-syft-config - image: docker.io/openmined/grid-frontend:0.8.4-beta.22 + image: docker.io/openmined/grid-frontend:0.8.4-beta.30 imagePullPolicy: IfNotPresent resources: {} tty: true diff --git a/packages/hagrid/.bumpversion.cfg b/packages/hagrid/.bumpversion.cfg index 7c93af26e37..e603df6ab55 100644 --- a/packages/hagrid/.bumpversion.cfg +++ b/packages/hagrid/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.3.108 +current_version = 0.3.110 tag = False tag_name = {new_version} commit = True diff --git a/packages/hagrid/hagrid/cli.py b/packages/hagrid/hagrid/cli.py index 133b8837071..df1428930b7 100644 --- a/packages/hagrid/hagrid/cli.py +++ b/packages/hagrid/hagrid/cli.py @@ -50,7 +50,6 @@ from .cache import arg_cache from .deps import DEPENDENCIES from .deps import LATEST_BETA_SYFT -from .deps import LATEST_STABLE_SYFT from .deps import allowed_hosts from .deps import check_docker_service_status from .deps import check_docker_version @@ -93,6 +92,7 @@ from .quickstart_ui import fetch_notebooks_from_zipfile from .quickstart_ui import quickstart_download_notebook from .rand_sec import generate_sec_random_password +from .stable_version import LATEST_STABLE_SYFT from .style import RichGroup from .util import fix_windows_virtualenv_api from .util import from_url diff --git a/packages/hagrid/hagrid/deps.py b/packages/hagrid/hagrid/deps.py index 55460e7e073..36e8ff6b8ae 100644 --- a/packages/hagrid/hagrid/deps.py +++ b/packages/hagrid/hagrid/deps.py @@ -41,8 +41,7 @@ from .nb_output import NBOutput from .version import __version__ -LATEST_STABLE_SYFT = "0.8.3" -LATEST_BETA_SYFT = "0.8.4-beta.22" +LATEST_BETA_SYFT = "0.8.4-beta.30" DOCKER_ERROR = """ You are running an old version of docker, possibly on Linux. You need to install v2. diff --git a/packages/hagrid/hagrid/manifest_template.yml b/packages/hagrid/hagrid/manifest_template.yml index f67bb5d5bbd..9252cf2e229 100644 --- a/packages/hagrid/hagrid/manifest_template.yml +++ b/packages/hagrid/hagrid/manifest_template.yml @@ -1,9 +1,9 @@ manifestVersion: 0.1 -hagrid_version: 0.3.108 -syft_version: 0.8.4-beta.22 -dockerTag: 0.8.4-beta.22 +hagrid_version: 0.3.110 +syft_version: 0.8.4-beta.30 +dockerTag: 0.8.4-beta.30 baseUrl: https://raw.githubusercontent.com/OpenMined/PySyft/ -hash: 799965e620b01f9cd1d8e7f23cc4cdc0fbc0410b +hash: 9184f5678a91764d86e2556c2f0d859080ea3b31 target_dir: ~/.hagrid/PySyft/ files: grid: diff --git a/packages/hagrid/hagrid/orchestra.py b/packages/hagrid/hagrid/orchestra.py index 8576b8fef7c..8a0e74c06b6 100644 --- a/packages/hagrid/hagrid/orchestra.py +++ b/packages/hagrid/hagrid/orchestra.py @@ -13,6 +13,7 @@ from typing import Any from typing import Callable from typing import Optional +from typing import TYPE_CHECKING from typing import Union # relative @@ -21,7 +22,6 @@ from .names import random_name from .util import ImportFromSyft from .util import NodeSideType -from .util import NodeType from .util import shell DEFAULT_PORT = 8080 @@ -31,6 +31,9 @@ ClientAlias = Any # we don't want to import Client in case it changes +if TYPE_CHECKING: + NodeType = ImportFromSyft.import_node_type() + # Define a function to read and print a stream def read_stream(stream: subprocess.PIPE) -> None: @@ -95,6 +98,7 @@ def container_exists_with(name: str, port: int) -> bool: def get_node_type(node_type: Optional[Union[str, NodeType]]) -> Optional[NodeType]: + NodeType = ImportFromSyft.import_node_type() if node_type is None: node_type = os.environ.get("ORCHESTRA_NODE_TYPE", NodeType.DOMAIN) try: @@ -236,6 +240,7 @@ def deploy_to_python( queue_port: Optional[int] = None, ) -> Optional[NodeHandle]: stage_protocol_changes = ImportFromSyft.import_stage_protocol_changes() + NodeType = ImportFromSyft.import_node_type() sy = get_syft_client() if sy is None: return sy @@ -259,7 +264,7 @@ def deploy_to_python( "processes": processes, "dev_mode": dev_mode, "tail": tail, - "node_type": str(node_type_enum), + "node_type": node_type_enum, "node_side_type": node_side_type, "enable_warnings": enable_warnings, # new kwargs @@ -296,7 +301,7 @@ def deploy_to_python( worker_class = worker_classes[node_type_enum] sig = inspect.signature(worker_class.named) supported_kwargs = {k: v for k, v in kwargs.items() if k in sig.parameters} - if "node_type" in sig.parameters.keys(): + if "node_type" in sig.parameters.keys() and "migrate" in sig.parameters: supported_kwargs["migrate"] = True worker = worker_class.named(**supported_kwargs) else: @@ -482,6 +487,7 @@ def launch( queue_port: Optional[int] = None, in_memory_workers: bool = True, ) -> Optional[NodeHandle]: + NodeType = ImportFromSyft.import_node_type() if dev_mode is True: os.environ["DEV_MODE"] = "True" thread_workers = True @@ -495,8 +501,6 @@ def launch( dev_mode = str_to_bool(os.environ.get("DEV_MODE", f"{dev_mode}")) node_type_enum: Optional[NodeType] = get_node_type(node_type=node_type) - if not node_type_enum: - return None node_side_type_enum = ( NodeSideType.HIGH_SIDE diff --git a/packages/hagrid/hagrid/stable_version.py b/packages/hagrid/hagrid/stable_version.py new file mode 100644 index 00000000000..0a95cf11317 --- /dev/null +++ b/packages/hagrid/hagrid/stable_version.py @@ -0,0 +1 @@ +LATEST_STABLE_SYFT = "0.8.3" diff --git a/packages/hagrid/hagrid/util.py b/packages/hagrid/hagrid/util.py index a43a25e9689..41c5dcb39a5 100644 --- a/packages/hagrid/hagrid/util.py +++ b/packages/hagrid/hagrid/util.py @@ -22,17 +22,6 @@ def __str__(self) -> str: return self.value -class NodeType(str, Enum): - DOMAIN = "domain" - NETWORK = "network" - ENCLAVE = "enclave" - GATEWAY = "gateway" - - def __str__(self) -> str: - # Use values when transforming NodeType to str - return self.value - - class ImportFromSyft: @staticmethod def import_syft_error() -> Callable: @@ -56,6 +45,16 @@ def stage_protocol_changes(*args: Any, **kwargs: Any) -> None: return stage_protocol_changes + @staticmethod + def import_node_type() -> Callable: + try: + # syft absolute + from syft.abstract_node import NodeType + except Exception: + NodeType = DummyNum + + return NodeType + def from_url(url: str) -> Tuple[str, str, int, str, Union[Any, str]]: try: diff --git a/packages/hagrid/hagrid/version.py b/packages/hagrid/hagrid/version.py index 19175a1670c..84581fc53e7 100644 --- a/packages/hagrid/hagrid/version.py +++ b/packages/hagrid/hagrid/version.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # HAGrid Version -__version__ = "0.3.108" +__version__ = "0.3.110" if __name__ == "__main__": print(__version__) diff --git a/packages/hagrid/setup.py b/packages/hagrid/setup.py index 94bc8d4c77b..a3e560ce20a 100644 --- a/packages/hagrid/setup.py +++ b/packages/hagrid/setup.py @@ -5,7 +5,7 @@ from setuptools import find_packages from setuptools import setup -__version__ = "0.3.108" +__version__ = "0.3.110" DATA_FILES = {"img": ["hagrid/img/*.png"], "hagrid": ["*.yml"]} diff --git a/packages/syft/MANIFEST.in b/packages/syft/MANIFEST.in index 8cdc899ed4f..0458665c8df 100644 --- a/packages/syft/MANIFEST.in +++ b/packages/syft/MANIFEST.in @@ -3,3 +3,5 @@ include src/syft/capnp/* include src/syft/cache/* include src/syft/img/* include src/syft/protocol/protocol_version.json +include src/syft/protocol/releases/* + diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 1240a671df8..a6d6b6e5254 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.8.4-beta.22" +version = attr: "0.8.4-beta.30" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index 8aabcce8104..8023d707449 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.4-beta.22" +__version__ = "0.8.4-beta.30" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index 91d53ecaa5e..e11bef3db79 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.8.4-beta.22" +__version__ = "0.8.4-beta.30" # stdlib import pathlib @@ -68,6 +68,7 @@ from .service.response import SyftSuccess # noqa: F401 from .service.user.roles import Roles as roles # noqa: F401 from .service.user.user_service import UserService # noqa: F401 +from .stable_version import LATEST_STABLE_SYFT from .types.twin_object import TwinObject # noqa: F401 from .types.uid import UID # noqa: F401 from .util import filterwarnings # noqa: F401 @@ -81,7 +82,6 @@ from .util.util import get_root_data_path # noqa: F401 from .util.version_compare import make_requires -LATEST_STABLE_SYFT = "0.8.3" requires = make_requires(LATEST_STABLE_SYFT, __version__) diff --git a/packages/syft/src/syft/serde/third_party.py b/packages/syft/src/syft/serde/third_party.py index 1abfe2d9cdc..2d70250cbe6 100644 --- a/packages/syft/src/syft/serde/third_party.py +++ b/packages/syft/src/syft/serde/third_party.py @@ -96,8 +96,8 @@ def deserialize_dataframe(buf: bytes) -> DataFrame: def deserialize_series(blob: bytes) -> Series: - df = DataFrame.from_dict(deserialize(blob, from_bytes=True)) - return df[df.columns[0]] + df: DataFrame = DataFrame.from_dict(deserialize(blob, from_bytes=True)) + return Series(df[df.columns[0]]) recursive_serde_register( diff --git a/packages/syft/src/syft/service/dataset/dataset.py b/packages/syft/src/syft/service/dataset/dataset.py index 87fed2a65b2..a5d56e8a605 100644 --- a/packages/syft/src/syft/service/dataset/dataset.py +++ b/packages/syft/src/syft/service/dataset/dataset.py @@ -101,7 +101,7 @@ class MarkdownDescription(SyftObject): text: str - def _repr_markdown_(self): + def _repr_markdown_(self) -> str: style = """

{self.name}

-

{self.description.text}

+

{description_text}

{uploaded_by_line}

Created on: {self.created_at}

URL: @@ -535,7 +539,10 @@ def _old_repr_markdown_(self) -> str: _repr_str = f"Syft Dataset: {self.name}\n" _repr_str += "Assets:\n" for asset in self.asset_list: - _repr_str += f"\t{asset.name}: {asset.description.text}\n" + if asset.description is not None: + _repr_str += f"\t{asset.name}: {asset.description.text}\n\n" + else: + _repr_str += f"\t{asset.name}\n\n" if self.citation: _repr_str += f"Citation: {self.citation}\n" if self.url: @@ -552,7 +559,10 @@ def _markdown_(self) -> str: _repr_str = f"Syft Dataset: {self.name}\n\n" _repr_str += "Assets:\n\n" for asset in self.asset_list: - _repr_str += f"\t{asset.name}: {asset.description.text}\n\n" + if asset.description is not None: + _repr_str += f"\t{asset.name}: {asset.description.text}\n\n" + else: + _repr_str += f"\t{asset.name}\n\n" if self.citation: _repr_str += f"Citation: {self.citation}\n\n" if self.url: @@ -621,7 +631,7 @@ class CreateDataset(Dataset): id: Optional[UID] = None created_at: Optional[DateTime] - uploader: Optional[Contributor] + uploader: Optional[Contributor] # type: ignore[assignment] class Config: validate_assignment = True @@ -670,7 +680,7 @@ def add_contributor( return SyftError(message=f"Failed to add contributor. Error: {e}") def add_asset( - self, asset: CreateAsset, force_replace=False + self, asset: CreateAsset, force_replace: bool = False ) -> Union[SyftSuccess, SyftError]: if asset.mock is None: raise ValueError(_ASSET_WITH_NONE_MOCK_ERROR_MESSAGE) @@ -694,7 +704,7 @@ def add_asset( message=f"Asset '{asset.name}' added to '{self.name}' Dataset." ) - def replace_asset(self, asset: CreateAsset): + def replace_asset(self, asset: CreateAsset) -> Union[SyftSuccess, SyftError]: return self.add_asset(asset=asset, force_replace=True) def remove_asset(self, name: str) -> None: diff --git a/packages/syft/src/syft/service/dataset/dataset_service.py b/packages/syft/src/syft/service/dataset/dataset_service.py index 2558e1e560b..a06a3c72f74 100644 --- a/packages/syft/src/syft/service/dataset/dataset_service.py +++ b/packages/syft/src/syft/service/dataset/dataset_service.py @@ -2,6 +2,7 @@ from collections.abc import Collection from typing import List from typing import Optional +from typing import Sequence from typing import Union # relative @@ -52,7 +53,7 @@ def _paginate_collection( def _paginate_dataset_collection( - datasets: Collection[Dataset], + datasets: Sequence[Dataset], page_size: Optional[int] = 0, page_index: Optional[int] = 0, ) -> Union[DictTuple[str, Dataset], DatasetPageView]: @@ -204,7 +205,9 @@ def get_assets_by_action_id( name="dataset_delete_by_id", warning=HighSideCRUDWarning(confirmation=True), ) - def delete_dataset(self, context: AuthedServiceContext, uid: UID): + def delete_dataset( + self, context: AuthedServiceContext, uid: UID + ) -> Union[SyftSuccess, SyftError]: result = self.stash.delete_by_uid(context.credentials, uid) if result.is_ok(): return result.ok() diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index efa03deac94..49e2ff8cf25 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -163,7 +163,7 @@ def time_remaining_string(self): @property def worker(self): api = APIRegistry.api_for( - node_uid=self.node_uid, + node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) return api.services.worker.get(self.job_worker_id) @@ -253,7 +253,7 @@ def restart(self, kill=False) -> None: self.status != JobStatus.PROCESSING and self.status != JobStatus.CREATED ): api = APIRegistry.api_for( - node_uid=self.node_uid, + node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) call = SyftAPICall( @@ -273,7 +273,7 @@ def restart(self, kill=False) -> None: def kill(self) -> Union[None, SyftError]: if self.job_pid is not None: api = APIRegistry.api_for( - node_uid=self.node_uid, + node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) @@ -292,7 +292,7 @@ def kill(self) -> Union[None, SyftError]: def fetch(self) -> None: api = APIRegistry.api_for( - node_uid=self.node_uid, + node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) call = SyftAPICall( @@ -314,7 +314,7 @@ def fetch(self) -> None: @property def subjobs(self): api = APIRegistry.api_for( - node_uid=self.node_uid, + node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) return api.services.job.get_subjobs(self.id) @@ -322,14 +322,14 @@ def subjobs(self): @property def owner(self): api = APIRegistry.api_for( - node_uid=self.node_uid, + node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) return api.services.user.get_current_user(self.id) def logs(self, stdout=True, stderr=True, _print=True): api = APIRegistry.api_for( - node_uid=self.node_uid, + node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) results = [] @@ -414,7 +414,7 @@ def wait(self, job_only=False): from time import sleep api = APIRegistry.api_for( - node_uid=self.node_uid, + node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) diff --git a/packages/syft/src/syft/service/project/project.py b/packages/syft/src/syft/service/project/project.py index b550193b59d..57fc514c5d3 100644 --- a/packages/syft/src/syft/service/project/project.py +++ b/packages/syft/src/syft/service/project/project.py @@ -358,7 +358,7 @@ def poll_creation_wizard() -> List[Any]: Participants can then select the answer that best represents their opinion or preference""" description3 = """Since you didn't pass in questions, choices into .create_poll() (or you did so incorrectly), -this wizard is going to guide you through the process of creating a poll.""" +this wizard is going to guide you through the process of creating a poll""" description4 = """In this wizard, we're going to ask you for a question and list of choices to create the poll. The Questions and choices are converted to strings""" diff --git a/packages/syft/src/syft/service/user/roles.py b/packages/syft/src/syft/service/user/roles.py index d9f548272e1..6d7fd256217 100644 --- a/packages/syft/src/syft/service/user/roles.py +++ b/packages/syft/src/syft/service/user/roles.py @@ -7,5 +7,5 @@ class Roles(Enum): UPLOADER = "Uploader" EDITOR = "Editor" - def __str__(self): + def __str__(self) -> str: return f"{self.value}" diff --git a/packages/syft/src/syft/service/user/user.py b/packages/syft/src/syft/service/user/user.py index c6211f9d0d3..98e087aaeb5 100644 --- a/packages/syft/src/syft/service/user/user.py +++ b/packages/syft/src/syft/service/user/user.py @@ -172,12 +172,12 @@ class UserCreateV1(UserUpdateV1): email: EmailStr name: str - role: Optional[ServiceRole] = None + role: Optional[ServiceRole] = None # type: ignore[assignment] password: str - password_verify: Optional[str] = None + password_verify: Optional[str] = None # type: ignore[assignment] verify_key: Optional[SyftVerifyKey] - institution: Optional[str] - website: Optional[str] + institution: Optional[str] # type: ignore[assignment] + website: Optional[str] # type: ignore[assignment] created_by: Optional[SyftSigningKey] @@ -188,12 +188,12 @@ class UserCreate(UserUpdate): email: EmailStr name: str - role: Optional[ServiceRole] = None # make sure role cant be set without uid + role: Optional[ServiceRole] = None # type: ignore[assignment] password: str - password_verify: Optional[str] = None + password_verify: Optional[str] = None # type: ignore[assignment] verify_key: Optional[SyftVerifyKey] - institution: Optional[str] - website: Optional[str] + institution: Optional[str] # type: ignore[assignment] + website: Optional[str] # type: ignore[assignment] created_by: Optional[SyftSigningKey] mock_execution_permission: bool = False @@ -399,40 +399,40 @@ def user_to_user_verify() -> List[Callable]: @migrate(UserV1, User) -def upgrade_user_v1_to_v2(): +def upgrade_user_v1_to_v2() -> List[Callable]: return [make_set_default(key="mock_execution_permission", value=False)] @migrate(User, UserV1) -def downgrade_user_v2_to_v1(): +def downgrade_user_v2_to_v1() -> List[Callable]: return [drop(["mock_execution_permission"])] @migrate(UserUpdateV1, UserUpdate) -def upgrade_user_update_v1_to_v2(): +def upgrade_user_update_v1_to_v2() -> List[Callable]: return [make_set_default(key="mock_execution_permission", value=False)] @migrate(UserUpdate, UserUpdateV1) -def downgrade_user_update_v2_to_v1(): +def downgrade_user_update_v2_to_v1() -> List[Callable]: return [drop(["mock_execution_permission"])] @migrate(UserCreateV1, UserCreate) -def upgrade_user_create_v1_to_v2(): +def upgrade_user_create_v1_to_v2() -> List[Callable]: return [make_set_default(key="mock_execution_permission", value=False)] @migrate(UserCreate, UserCreateV1) -def downgrade_user_create_v2_to_v1(): +def downgrade_user_create_v2_to_v1() -> List[Callable]: return [drop(["mock_execution_permission"])] @migrate(UserViewV1, UserView) -def upgrade_user_view_v1_to_v2(): +def upgrade_user_view_v1_to_v2() -> List[Callable]: return [make_set_default(key="mock_execution_permission", value=False)] @migrate(UserView, UserViewV1) -def downgrade_user_view_v2_to_v1(): +def downgrade_user_view_v2_to_v1() -> List[Callable]: return [drop(["mock_execution_permission"])] diff --git a/packages/syft/src/syft/service/user/user_roles.py b/packages/syft/src/syft/service/user/user_roles.py index f4ee5bf03d4..970c75910f6 100644 --- a/packages/syft/src/syft/service/user/user_roles.py +++ b/packages/syft/src/syft/service/user/user_roles.py @@ -1,6 +1,7 @@ # stdlib from enum import Enum from typing import Any +from typing import Dict from typing import List from typing import Tuple from typing import Union @@ -42,13 +43,13 @@ def roles_descending(cls) -> List[Tuple[int, Self]]: tuples.append((x.value, x)) return sorted(tuples, reverse=True) - @staticmethod - def roles_for_level(level: Union[int, Self]) -> List[Self]: + @classmethod + def roles_for_level(cls, level: Union[int, Self]) -> List[Self]: if isinstance(level, ServiceRole): level = level.value roles = [] level_float = float(level) - service_roles = ServiceRole.roles_descending() + service_roles = cls.roles_descending() for role in service_roles: role_num = role[0] if role_num == 0: @@ -59,7 +60,7 @@ def roles_for_level(level: Union[int, Self]) -> List[Self]: level_float = level_float % role_num return roles - def capabilities(self): + def capabilities(self) -> List[ServiceRoleCapability]: return ROLE_TO_CAPABILITIES[self] def __add__(self, other: Any) -> int: @@ -90,19 +91,21 @@ def __lt__(self, other: Self) -> bool: + ServiceRole.ADMIN ) -DATA_SCIENTIST_ROLE_LEVEL = ServiceRole.roles_for_level( +DATA_SCIENTIST_ROLE_LEVEL: List[ServiceRole] = ServiceRole.roles_for_level( ServiceRole.DATA_SCIENTIST + ServiceRole.DATA_OWNER + ServiceRole.ADMIN ) -ONLY_DATA_SCIENTIST_ROLE_LEVEL = ServiceRole.roles_for_level(ServiceRole.DATA_SCIENTIST) +ONLY_DATA_SCIENTIST_ROLE_LEVEL: List[ServiceRole] = ServiceRole.roles_for_level( + ServiceRole.DATA_SCIENTIST +) -DATA_OWNER_ROLE_LEVEL = ServiceRole.roles_for_level( +DATA_OWNER_ROLE_LEVEL: List[ServiceRole] = ServiceRole.roles_for_level( ServiceRole.DATA_OWNER + ServiceRole.ADMIN ) ADMIN_ROLE_LEVEL = ServiceRole.roles_for_level(ServiceRole.ADMIN) -ROLE_TO_CAPABILITIES = { +ROLE_TO_CAPABILITIES: Dict[ServiceRole, List[ServiceRoleCapability]] = { ServiceRole.NONE: [], ServiceRole.GUEST: [ ServiceRoleCapability.CAN_MAKE_DATA_REQUESTS, diff --git a/packages/syft/src/syft/service/user/user_service.py b/packages/syft/src/syft/service/user/user_service.py index 79067aa0e1f..9ad1b25da68 100644 --- a/packages/syft/src/syft/service/user/user_service.py +++ b/packages/syft/src/syft/service/user/user_service.py @@ -122,8 +122,10 @@ def get_all( for i in range(0, len(results), page_size) ] # Return the proper slice using chunk_index - results = results[page_index] - results = UserViewPage(users=results, total=total) + if page_index is not None: + results = results[page_index] + results = UserViewPage(users=results, total=total) + return results # 🟡 TODO: No user exists will happen when result.ok() is empty list @@ -180,9 +182,9 @@ def search( ] # Return the proper slice using page_index - results = results[page_index] - - results = UserViewPage(users=results, total=total) + if page_index is not None: + results = results[page_index] + results = UserViewPage(users=results, total=total) return results @@ -317,7 +319,9 @@ def update( return user.to(UserView) - def get_target_object(self, credentials: SyftVerifyKey, uid: UID): + def get_target_object( + self, credentials: SyftVerifyKey, uid: UID + ) -> Union[User, SyftError]: user_result = self.stash.get_by_uid(credentials=credentials, uid=uid) if user_result.is_err(): return SyftError(message=str(user_result.err())) diff --git a/packages/syft/src/syft/service/user/user_stash.py b/packages/syft/src/syft/service/user/user_stash.py index fe76cc8de9f..a130f83b3bb 100644 --- a/packages/syft/src/syft/service/user/user_stash.py +++ b/packages/syft/src/syft/service/user/user_stash.py @@ -60,10 +60,10 @@ def set( ignore_duplicates=ignore_duplicates, ) - def admin_verify_key(self): + def admin_verify_key(self) -> Result[Optional[SyftVerifyKey], str]: return Ok(self.partition.root_verify_key) - def admin_user(self): + def admin_user(self) -> Result[Optional[User], str]: return self.get_by_role( credentials=self.admin_verify_key().ok(), role=ServiceRole.ADMIN ) @@ -110,7 +110,7 @@ def get_by_verify_key( return self.query_one(credentials=credentials, qks=qks) def delete_by_uid( - self, credentials: SyftVerifyKey, uid: UID, has_permission=False + self, credentials: SyftVerifyKey, uid: UID, has_permission: bool = False ) -> Result[SyftSuccess, str]: qk = UIDPartitionKey.with_obj(uid) result = super().delete( @@ -121,7 +121,7 @@ def delete_by_uid( return result def update( - self, credentials: SyftVerifyKey, user: User, has_permission=False + self, credentials: SyftVerifyKey, user: User, has_permission: bool = False ) -> Result[User, str]: res = self.check_type(user, self.object_type) # we dont use and_then logic here as it is hard because of the order of the arguments diff --git a/packages/syft/src/syft/service/warnings.py b/packages/syft/src/syft/service/warnings.py index a213cb63cb9..553531046a8 100644 --- a/packages/syft/src/syft/service/warnings.py +++ b/packages/syft/src/syft/service/warnings.py @@ -1,4 +1,5 @@ # stdlib +from typing import Any from typing import Optional # third party @@ -30,7 +31,7 @@ class APIEndpointWarning(SyftBaseModel): message: Optional[str] = None enabled: bool = True - def __eq__(self, other) -> bool: + def __eq__(self, other: Any) -> bool: if isinstance(other, APIEndpointWarning): return self.message == other.message and self._bool == other._bool return self._bool == other @@ -54,7 +55,7 @@ def _repr_html_(self) -> str: def message_from(self, context: Optional[WarningContext]) -> Self: raise NotImplementedError - def show(self): + def show(self) -> bool: if not self.enabled or not self.message: return True display(self) @@ -68,24 +69,24 @@ def show(self): @serializable() class CRUDWarning(APIEndpointWarning): - def message_from(self, context: Optional[WarningContext] = None): + def message_from(self, context: Optional[WarningContext] = None) -> Self: message = None confirmation = self.confirmation if context is not None: node = context.node - node_side_type = node.node_side_type - node_type = node.node_type - _msg = ( - "which could host datasets with private information." - if node_side_type.value == NodeSideType.HIGH_SIDE.value - else "which only hosts mock or synthetic data." - ) - message = ( - "You're performing an operation on " - f"{node_side_type.value} side {node_type.value}, {_msg}" - ) - confirmation = node_side_type.value == NodeSideType.HIGH_SIDE.value - message = message + if node is not None: + node_side_type = node.node_side_type + node_type = node.node_type + _msg = ( + "which could host datasets with private information." + if node_side_type.value == NodeSideType.HIGH_SIDE.value + else "which only hosts mock or synthetic data." + ) + message = ( + "You're performing an operation on " + f"{node_side_type.value} side {node_type.value}, {_msg}" + ) + confirmation = node_side_type.value == NodeSideType.HIGH_SIDE.value return CRUDWarning(confirmation=confirmation, message=message) @@ -94,63 +95,62 @@ def message_from(self, context: Optional[WarningContext] = None): class CRUDReminder(CRUDWarning): confirmation: bool = False - def message_from(self, context: Optional[WarningContext] = None): + def message_from(self, context: Optional[WarningContext] = None) -> Self: message = None confirmation = self.confirmation if context is not None: node = context.node - node_side_type = node.node_side_type - node_type = node.node_type - _msg = ( - "which could host datasets with private information." - if node_side_type.value == NodeSideType.HIGH_SIDE.value - else "which only hosts mock or synthetic data." - ) - message = ( - "You're performing an operation on " - f"{node_side_type.value} side {node_type.value}, {_msg}" - ) - message = message + if node is not None: + node_side_type = node.node_side_type + node_type = node.node_type + _msg = ( + "which could host datasets with private information." + if node_side_type.value == NodeSideType.HIGH_SIDE.value + else "which only hosts mock or synthetic data." + ) + message = ( + "You're performing an operation on " + f"{node_side_type.value} side {node_type.value}, {_msg}" + ) return CRUDReminder(confirmation=confirmation, message=message) @serializable() class LowSideCRUDWarning(APIEndpointWarning): - def message_from(self, context: Optional[WarningContext] = None): + def message_from(self, context: Optional[WarningContext] = None) -> Self: confirmation = self.confirmation message = None if context is not None: node = context.node - node_side_type = node.node_side_type - node_type = node.node_type - if node_side_type.value == NodeSideType.LOW_SIDE.value: - message = ( - "You're performing an operation on " - f"{node_side_type.value} side {node_type.value} " - "which only hosts mock or synthetic data." - ) - - message = message + if node is not None: + node_side_type = node.node_side_type + node_type = node.node_type + if node_side_type.value == NodeSideType.LOW_SIDE.value: + message = ( + "You're performing an operation on " + f"{node_side_type.value} side {node_type.value} " + "which only hosts mock or synthetic data." + ) return LowSideCRUDWarning(confirmation=confirmation, message=message) @serializable() class HighSideCRUDWarning(APIEndpointWarning): - def message_from(self, context: Optional[WarningContext] = None): + def message_from(self, context: Optional[WarningContext] = None) -> Self: confirmation = self.confirmation message = None if context is not None: node = context.node - node_side_type = node.node_side_type - node_type = node.node_type - if node_side_type.value == NodeSideType.HIGH_SIDE.value: - message = ( - "You're performing an operation on " - f"{node_side_type.value} side {node_type.value} " - "which could host datasets with private information." - ) - message = message + if node is not None: + node_side_type = node.node_side_type + node_type = node.node_type + if node_side_type.value == NodeSideType.HIGH_SIDE.value: + message = ( + "You're performing an operation on " + f"{node_side_type.value} side {node_type.value} " + "which could host datasets with private information." + ) return HighSideCRUDWarning(confirmation=confirmation, message=message) diff --git a/packages/syft/src/syft/service/worker/image_identifier.py b/packages/syft/src/syft/service/worker/image_identifier.py index 43623f44d36..4651c3f4f2e 100644 --- a/packages/syft/src/syft/service/worker/image_identifier.py +++ b/packages/syft/src/syft/service/worker/image_identifier.py @@ -53,7 +53,7 @@ def from_str(cls, tag: str) -> Self: return cls(repo=repo, registry=registry, tag=tag) @property - def repo_with_tag(self) -> str: + def repo_with_tag(self) -> Optional[str]: if self.repo or self.tag: return f"{self.repo}:{self.tag}" return None diff --git a/packages/syft/src/syft/service/worker/image_registry.py b/packages/syft/src/syft/service/worker/image_registry.py index 806a0946d2b..7292273c605 100644 --- a/packages/syft/src/syft/service/worker/image_registry.py +++ b/packages/syft/src/syft/service/worker/image_registry.py @@ -4,6 +4,7 @@ # third party from pydantic import validator +from typing_extensions import Self # relative from ...serde.serializable import serializable @@ -28,7 +29,7 @@ class SyftImageRegistry(SyftObject): url: str @validator("url") - def validate_url(cls, val: str): + def validate_url(cls, val: str) -> str: if not val: raise ValueError("Invalid Registry URL. Must not be empty") @@ -38,7 +39,7 @@ def validate_url(cls, val: str): return val @classmethod - def from_url(cls, full_str: str): + def from_url(cls, full_str: str) -> Self: # this is only for urlparse if "://" not in full_str: full_str = f"http://{full_str}" diff --git a/packages/syft/src/syft/service/worker/image_registry_service.py b/packages/syft/src/syft/service/worker/image_registry_service.py index 8c628f84486..8acbd9e6f4b 100644 --- a/packages/syft/src/syft/service/worker/image_registry_service.py +++ b/packages/syft/src/syft/service/worker/image_registry_service.py @@ -1,5 +1,6 @@ # stdlib from typing import List +from typing import Optional from typing import Union # relative @@ -62,7 +63,7 @@ def delete( self, context: AuthedServiceContext, uid: UID = None, - url: str = None, + url: Optional[str] = None, ) -> Union[SyftSuccess, SyftError]: # TODO - we need to make sure that there are no workers running an image bound to this registry diff --git a/packages/syft/src/syft/service/worker/utils.py b/packages/syft/src/syft/service/worker/utils.py index fa216e4e6f7..f7443139027 100644 --- a/packages/syft/src/syft/service/worker/utils.py +++ b/packages/syft/src/syft/service/worker/utils.py @@ -5,6 +5,8 @@ import socket import socketserver import sys +from typing import Any +from typing import Dict from typing import List from typing import Optional from typing import Tuple @@ -12,6 +14,8 @@ # third party import docker +from docker.models.containers import Container +from kr8s.objects import Pod # relative from ...abstract_node import AbstractNode @@ -47,7 +51,9 @@ def backend_container_name() -> str: return f"{hostname}-{service_name}-1" -def get_container(docker_client: docker.DockerClient, container_name: str): +def get_container( + docker_client: docker.DockerClient, container_name: str +) -> Optional[Container]: try: existing_container = docker_client.containers.get(container_name) except docker.errors.NotFound: @@ -56,14 +62,20 @@ def get_container(docker_client: docker.DockerClient, container_name: str): return existing_container -def extract_config_from_backend(worker_name: str, docker_client: docker.DockerClient): +def extract_config_from_backend( + worker_name: str, docker_client: docker.DockerClient +) -> Dict[str, Any]: # Existing main backend container backend_container = get_container( docker_client, container_name=backend_container_name() ) # Config with defaults - extracted_config = {"volume_binds": {}, "network_mode": None, "environment": {}} + extracted_config: Dict[str, Any] = { + "volume_binds": {}, + "network_mode": None, + "environment": {}, + } if backend_container is None: return extracted_config @@ -94,7 +106,7 @@ def extract_config_from_backend(worker_name: str, docker_client: docker.DockerCl return extracted_config -def get_free_tcp_port(): +def get_free_tcp_port() -> int: with socketserver.TCPServer(("localhost", 0), None) as s: free_port = s.server_address[1] return free_port @@ -113,7 +125,7 @@ def run_container_using_docker( registry_url: Optional[str] = None, ) -> ContainerSpawnStatus: if not worker_image.is_built: - raise Exception("Image must be built before running it.") + raise ValueError("Image must be built before running it.") # Get hostname hostname = socket.gethostname() @@ -165,8 +177,11 @@ def run_container_using_docker( environment["QUEUE_PORT"] = queue_port environment["CONTAINER_HOST"] = "docker" + if worker_image.image_identifier is None: + raise ValueError(f"Image {worker_image} does not have an identifier") + container = docker_client.containers.run( - worker_image.image_identifier.full_name_with_tag, + image=worker_image.image_identifier.full_name_with_tag, name=f"{hostname}-{worker_name}", detach=True, auto_remove=True, @@ -257,20 +272,22 @@ def run_workers_in_threads( return results -def prepare_kubernetes_pool_env(runner: KubernetesRunner, env_vars: dict): +def prepare_kubernetes_pool_env( + runner: KubernetesRunner, env_vars: dict +) -> Tuple[List, Dict]: # get current backend pod name backend_pod_name = os.getenv("K8S_POD_NAME") if not backend_pod_name: - raise ValueError(message="Pod name not provided in environment variable") + raise ValueError("Pod name not provided in environment variable") # get current backend's credentials path - creds_path = os.getenv("CREDENTIALS_PATH") + creds_path: Optional[Union[str, Path]] = os.getenv("CREDENTIALS_PATH") if not creds_path: - raise ValueError(message="Credentials path not provided") + raise ValueError("Credentials path not provided") creds_path = Path(creds_path) - if not creds_path.exists(): - raise ValueError(message="Credentials file does not exist") + if creds_path is not None and not creds_path.exists(): + raise ValueError("Credentials file does not exist") # create a secret for the node credentials owned by the backend, not the pool. node_secret = KubeUtils.create_secret( @@ -283,7 +300,7 @@ def prepare_kubernetes_pool_env(runner: KubernetesRunner, env_vars: dict): # clone and patch backend environment variables backend_env = runner.get_pod_env_vars(backend_pod_name) or [] - env_vars = KubeUtils.patch_env_vars(backend_env, env_vars) + env_vars_: List = KubeUtils.patch_env_vars(backend_env, env_vars) mount_secrets = { node_secret.metadata.name: { "mountPath": str(creds_path), @@ -291,7 +308,7 @@ def prepare_kubernetes_pool_env(runner: KubernetesRunner, env_vars: dict): }, } - return env_vars, mount_secrets + return env_vars_, mount_secrets def create_kubernetes_pool( @@ -304,8 +321,8 @@ def create_kubernetes_pool( reg_username: Optional[str] = None, reg_password: Optional[str] = None, reg_url: Optional[str] = None, - **kwargs, -): + **kwargs: Any, +) -> Union[List[Pod], SyftError]: pool = None error = False @@ -355,7 +372,7 @@ def scale_kubernetes_pool( runner: KubernetesRunner, pool_name: str, replicas: int, -): +) -> Union[List[Pod], SyftError]: pool = runner.get_pool(pool_name) if not pool: return SyftError(message=f"Pool does not exist. name={pool_name}") @@ -374,28 +391,33 @@ def run_workers_in_kubernetes( worker_count: int, pool_name: str, queue_port: int, - start_idx=0, + start_idx: int = 0, debug: bool = False, reg_username: Optional[str] = None, reg_password: Optional[str] = None, reg_url: Optional[str] = None, - **kwargs, + **kwargs: Any, ) -> Union[List[ContainerSpawnStatus], SyftError]: spawn_status = [] runner = KubernetesRunner() if not runner.exists(pool_name=pool_name): - pool_pods = create_kubernetes_pool( - runner=runner, - tag=worker_image.image_identifier.full_name_with_tag, - pool_name=pool_name, - replicas=worker_count, - queue_port=queue_port, - debug=debug, - reg_username=reg_username, - reg_password=reg_password, - reg_url=reg_url, - ) + if worker_image.image_identifier is not None: + pool_pods = create_kubernetes_pool( + runner=runner, + tag=worker_image.image_identifier.full_name_with_tag, + pool_name=pool_name, + replicas=worker_count, + queue_port=queue_port, + debug=debug, + reg_username=reg_username, + reg_password=reg_password, + reg_url=reg_url, + ) + else: + return SyftError( + message=f"image with uid {worker_image.id} does not have an image identifier" + ) else: pool_pods = scale_kubernetes_pool(runner, pool_name, worker_count) @@ -584,28 +606,35 @@ def _get_healthcheck_based_on_status(status: WorkerStatus) -> WorkerHealth: return WorkerHealth.UNHEALTHY -def image_build(image: SyftWorkerImage, **kwargs) -> Union[ImageBuildResult, SyftError]: - full_tag = image.image_identifier.full_name_with_tag - try: - builder = CustomWorkerBuilder() - return builder.build_image( - config=image.config, - tag=full_tag, - rm=True, - forcerm=True, - **kwargs, - ) - except docker.errors.APIError as e: - return SyftError( - message=f"Docker API error when building '{full_tag}'. Reason - {e}" - ) - except docker.errors.DockerException as e: - return SyftError( - message=f"Docker exception when building '{full_tag}'. Reason - {e}" - ) - except Exception as e: +def image_build( + image: SyftWorkerImage, **kwargs: Dict[str, Any] +) -> Union[ImageBuildResult, SyftError]: + if image.image_identifier is not None: + full_tag = image.image_identifier.full_name_with_tag + try: + builder = CustomWorkerBuilder() + return builder.build_image( + config=image.config, + tag=full_tag, + rm=True, + forcerm=True, + **kwargs, + ) + except docker.errors.APIError as e: + return SyftError( + message=f"Docker API error when building '{full_tag}'. Reason - {e}" + ) + except docker.errors.DockerException as e: + return SyftError( + message=f"Docker exception when building '{full_tag}'. Reason - {e}" + ) + except Exception as e: + return SyftError( + message=f"Unknown exception when building '{full_tag}'. Reason - {e}" + ) + else: return SyftError( - message=f"Unknown exception when building '{full_tag}'. Reason - {e}" + message=f"image with uid {image.id} does not have an image identifier" ) @@ -614,34 +643,40 @@ def image_push( username: Optional[str] = None, password: Optional[str] = None, ) -> Union[ImagePushResult, SyftError]: - full_tag = image.image_identifier.full_name_with_tag - try: - builder = CustomWorkerBuilder() - result = builder.push_image( - # this should be consistent with docker build command - tag=image.image_identifier.full_name_with_tag, - registry_url=image.image_identifier.registry_host, - username=username, - password=password, - ) + if image.image_identifier is not None: + full_tag = image.image_identifier.full_name_with_tag + try: + builder = CustomWorkerBuilder() + result = builder.push_image( + # this should be consistent with docker build command + tag=image.image_identifier.full_name_with_tag, + registry_url=image.image_identifier.registry_host, + username=username, + password=password, + ) + + if "error" in result.logs.lower() or result.exit_code: + return SyftError( + message=f"Failed to push {full_tag}. " + f"Exit code: {result.exit_code}. " + f"Logs:\n{result.logs}" + ) - if "error" in result.logs.lower() or result.exit_code: + return result + except docker.errors.APIError as e: + return SyftError(message=f"Docker API error when pushing {full_tag}. {e}") + except docker.errors.DockerException as e: return SyftError( - message=f"Failed to push {full_tag}. " - f"Exit code: {result.exit_code}. " - f"Logs:\n{result.logs}" + message=f"Docker exception when pushing {full_tag}. Reason - {e}" ) - - return result - except docker.errors.APIError as e: - return SyftError(message=f"Docker API error when pushing {full_tag}. {e}") - except docker.errors.DockerException as e: - return SyftError( - message=f"Docker exception when pushing {full_tag}. Reason - {e}" - ) - except Exception as e: + except Exception as e: + return SyftError( + message=f"Unknown exception when pushing {image.image_identifier}. Reason - {e}" + ) + else: return SyftError( - message=f"Unknown exception when pushing {image.image_identifier}. Reason - {e}" + message=f"image with uid {image.id} does not have an " + "image identifier and tag, hence we can't push it." ) diff --git a/packages/syft/src/syft/service/worker/worker.py b/packages/syft/src/syft/service/worker/worker.py index 0242dba177a..ef3fc4aec5d 100644 --- a/packages/syft/src/syft/service/worker/worker.py +++ b/packages/syft/src/syft/service/worker/worker.py @@ -1,4 +1,8 @@ # stdlib +from typing import Any +from typing import Callable +from typing import Dict +from typing import List # relative from ...serde.serializable import serializable @@ -39,7 +43,7 @@ class DockerWorker(SyftObject): container_id: str created_at: DateTime = DateTime.now() - def _coll_repr_(self): + def _coll_repr_(self) -> Dict[str, Any]: return { "container_name": self.container_name, "container_id": self.container_id, @@ -48,10 +52,10 @@ def _coll_repr_(self): @migrate(DockerWorker, DockerWorkerV1) -def downgrade_job_v2_to_v1(): +def downgrade_job_v2_to_v1() -> List[Callable]: return [drop(["container_name"])] @migrate(DockerWorkerV1, DockerWorker) -def upgrade_job_v2_to_v3(): +def upgrade_job_v2_to_v3() -> List[Callable]: return [make_set_default("job_consumer_id", None)] diff --git a/packages/syft/src/syft/service/worker/worker_image_service.py b/packages/syft/src/syft/service/worker/worker_image_service.py index 78b16a67395..8422341b009 100644 --- a/packages/syft/src/syft/service/worker/worker_image_service.py +++ b/packages/syft/src/syft/service/worker/worker_image_service.py @@ -75,7 +75,7 @@ def build( registry_uid: Optional[UID] = None, pull: bool = True, ) -> Union[SyftSuccess, SyftError]: - registry: SyftImageRegistry = None + registry: Optional[SyftImageRegistry] = None if IN_KUBERNETES and registry_uid is None: return SyftError(message="Registry UID is required in Kubernetes mode.") @@ -96,7 +96,7 @@ def build( registry_result = image_registry_service.get_by_id(context, registry_uid) if registry_result.is_err(): return registry_result - registry: SyftImageRegistry = registry_result.ok() + registry = registry_result.ok() try: if registry: @@ -204,12 +204,13 @@ def get_all( images: List[SyftWorkerImage] = result.ok() res = {} - # if image is built index by full_name_with_tag - res.update( - {im.image_identifier.full_name_with_tag: im for im in images if im.is_built} - ) + # if image is built, index it by full_name_with_tag + for im in images: + if im.is_built and im.image_identifier is not None: + res[im.image_identifier.full_name_with_tag] = im # and then index all images by id - # TODO: jupyter repr needs to be updated to show unique values (even if multiple keys point to same value) + # TODO: jupyter repr needs to be updated to show unique values + # (even if multiple keys point to same value) res.update({im.id.to_string(): im for im in images if not im.is_built}) return DictTuple(res) diff --git a/packages/syft/src/syft/service/worker/worker_image_stash.py b/packages/syft/src/syft/service/worker/worker_image_stash.py index 49eb6fa1802..a1580076104 100644 --- a/packages/syft/src/syft/service/worker/worker_image_stash.py +++ b/packages/syft/src/syft/service/worker/worker_image_stash.py @@ -1,5 +1,6 @@ # stdlib from typing import List +from typing import Optional from typing import Union # third party @@ -59,6 +60,6 @@ def set( def get_by_docker_config( self, credentials: SyftVerifyKey, config: DockerWorkerConfig - ): + ) -> Result[Optional[SyftWorkerImage], str]: qks = QueryKeys(qks=[WorkerConfigPK.with_obj(config)]) return self.query_one(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/worker/worker_pool.py b/packages/syft/src/syft/service/worker/worker_pool.py index 377ee118b55..151addcaac0 100644 --- a/packages/syft/src/syft/service/worker/worker_pool.py +++ b/packages/syft/src/syft/service/worker/worker_pool.py @@ -87,7 +87,7 @@ def logs(self) -> Union[str, SyftError]: return api.services.worker.logs(uid=self.id) - def get_job_repr(self): + def get_job_repr(self) -> str: if self.job_id is not None: api = APIRegistry.api_for( node_uid=self.syft_node_location, @@ -117,14 +117,18 @@ def refresh_status(self) -> None: def _coll_repr_(self) -> Dict[str, Any]: self.refresh_status() + if self.image and self.image.image_identifier: image_name_with_tag = self.image.image_identifier.full_name_with_tag else: image_name_with_tag = "In Memory Worker" + + healthcheck = self.healthcheck.value if self.healthcheck is not None else "" + return { "Name": self.name, "Image": image_name_with_tag, - "Healthcheck (health / unhealthy)": f"{self.healthcheck.value}", + "Healthcheck (health / unhealthy)": f"{healthcheck}", "Status": f"{self.status.value}", "Job": self.get_job_repr(), "Created at": str(self.created_at), @@ -166,6 +170,8 @@ def image(self) -> Optional[Union[SyftWorkerImage, SyftError]]: ) if api is not None: return api.services.worker_image.get_by_uid(uid=self.image_id) + else: + return None @property def running_workers(self) -> Union[List[UID], SyftError]: diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index 11e83b01112..38f223332ea 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -443,7 +443,7 @@ def scale( number: int, pool_id: Optional[UID] = None, pool_name: Optional[str] = None, - ): + ) -> Union[SyftError, SyftSuccess]: """ Scale the worker pool to the given number of workers in Kubernetes. Allows both scaling up and down the worker pool. @@ -466,7 +466,7 @@ def scale( return SyftSuccess(message=f"Worker pool already has {number} workers") elif number > current_worker_count: workers_to_add = number - current_worker_count - return self.add_workers( + result = self.add_workers( context=context, number=workers_to_add, pool_id=pool_id, @@ -475,6 +475,8 @@ def scale( reg_username=None, reg_password=None, ) + if isinstance(result, SyftError): + return result else: # scale down at kubernetes control plane runner = KubernetesRunner() @@ -514,7 +516,7 @@ def scale( return SyftError( message=( f"Pool {worker_pool.name} was scaled down, " - f"but failed update the stash with err: {result.err()}" + f"but failed update the stash with err: {update_result.err()}" ) ) @@ -656,6 +658,11 @@ def _create_workers_in_pool( number=worker_cnt + existing_worker_cnt, ) else: + registry_host = ( + worker_image.image_identifier.registry_host + if worker_image.image_identifier is not None + else None + ) result = run_containers( pool_name=pool_name, worker_image=worker_image, @@ -666,7 +673,7 @@ def _create_workers_in_pool( dev_mode=context.node.dev_mode, reg_username=reg_username, reg_password=reg_password, - reg_url=worker_image.image_identifier.registry_host, + reg_url=registry_host, ) if isinstance(result, SyftError): return result diff --git a/packages/syft/src/syft/service/worker/worker_service.py b/packages/syft/src/syft/service/worker/worker_service.py index 9871d7fa18a..af9b9ba7070 100644 --- a/packages/syft/src/syft/service/worker/worker_service.py +++ b/packages/syft/src/syft/service/worker/worker_service.py @@ -255,7 +255,7 @@ def refresh_worker_status( workers: List[SyftWorker], worker_stash: WorkerStash, credentials: SyftVerifyKey, -): +) -> List[SyftWorker]: if IN_KUBERNETES: result = refresh_status_kubernetes(workers) else: @@ -277,7 +277,7 @@ def refresh_worker_status( return result -def refresh_status_kubernetes(workers: List[SyftWorker]): +def refresh_status_kubernetes(workers: List[SyftWorker]) -> List[SyftWorker]: updated_workers = [] runner = KubernetesRunner() for worker in workers: @@ -292,7 +292,7 @@ def refresh_status_kubernetes(workers: List[SyftWorker]): return updated_workers -def refresh_status_docker(workers: List[SyftWorker]): +def refresh_status_docker(workers: List[SyftWorker]) -> List[SyftWorker]: updated_workers = [] with contextlib.closing(docker.from_env()) as client: @@ -317,6 +317,7 @@ def _stop_worker_container( container.stop() # Remove the container and its volumes _remove_worker_container(container, force=force, v=True) + return None except Exception as e: return SyftError( message=f"Failed to delete worker with id: {worker.id}. Error: {e}" diff --git a/packages/syft/src/syft/service/worker/worker_stash.py b/packages/syft/src/syft/service/worker/worker_stash.py index 1e82dcfccdf..cb7a914ed9b 100644 --- a/packages/syft/src/syft/service/worker/worker_stash.py +++ b/packages/syft/src/syft/service/worker/worker_stash.py @@ -59,13 +59,13 @@ def get_worker_by_name( def update_consumer_state( self, credentials: SyftVerifyKey, worker_uid: UID, consumer_state: ConsumerState - ): + ) -> Result[str, str]: res = self.get_by_uid(credentials=credentials, uid=worker_uid) if res.is_err(): return Err( f"Failed to retrieve Worker with id: {worker_uid}. Error: {res.err()}" ) - worker: SyftWorker = res.ok() + worker: Optional[SyftWorker] = res.ok() if worker is None: return Err(f"Worker with id: {worker_uid} not found") worker.consumer_state = consumer_state diff --git a/packages/syft/src/syft/stable_version.py b/packages/syft/src/syft/stable_version.py new file mode 100644 index 00000000000..0a95cf11317 --- /dev/null +++ b/packages/syft/src/syft/stable_version.py @@ -0,0 +1 @@ +LATEST_STABLE_SYFT = "0.8.3" diff --git a/packages/syft/tests/syft/custom_worker/config_test.py b/packages/syft/tests/syft/custom_worker/config_test.py index 2295c5f8a64..04d805b5990 100644 --- a/packages/syft/tests/syft/custom_worker/config_test.py +++ b/packages/syft/tests/syft/custom_worker/config_test.py @@ -15,6 +15,7 @@ import yaml # syft absolute +import syft as sy from syft.custom_worker.config import CustomBuildConfig from syft.custom_worker.config import CustomWorkerConfig from syft.custom_worker.config import DockerWorkerConfig @@ -167,8 +168,8 @@ def test_load_custom_worker_config( DOCKER_METHODS = ["from_str", "from_path"] -DOCKER_CONFIG_OPENDP = """ - FROM openmined/grid-backend:0.8.4-beta.12 +DOCKER_CONFIG_OPENDP = f""" + FROM openmined/grid-backend:{sy.__version__} RUN pip install opendp """ @@ -201,6 +202,6 @@ def test_docker_worker_config(dockerfile_path: Path, method: str) -> None: assert docker_config.dockerfile == dockerfile_path.read_text().strip() assert docker_config.description == description - new_description = description + " (syft version is 0.8.4-beta.12)" + new_description = description + f" (syft version is {sy.__version__})" docker_config.set_description(description_text=new_description) assert docker_config.description == new_description diff --git a/packages/syft/tests/syft/worker_pool/worker_pool_service_test.py b/packages/syft/tests/syft/worker_pool/worker_pool_service_test.py index 09b1026b35e..8aad9f5a27e 100644 --- a/packages/syft/tests/syft/worker_pool/worker_pool_service_test.py +++ b/packages/syft/tests/syft/worker_pool/worker_pool_service_test.py @@ -2,6 +2,7 @@ from faker import Faker # syft absolute +import syft as sy from syft.custom_worker.config import DockerWorkerConfig from syft.node.worker import Worker from syft.service.request.request import CreateCustomWorkerPoolChange @@ -24,8 +25,8 @@ def test_create_image_and_pool_request_accept(faker: Faker, worker: Worker): assert root_client.credentials != ds_client.credentials # the DS makes a request to create an image and a pool based on the image - custom_dockerfile = """ - FROM openmined/grid-backend:0.8.4-beta.12 + custom_dockerfile = f""" + FROM openmined/grid-backend:{sy.__version__} RUN pip install recordlinkage """ @@ -71,8 +72,8 @@ def test_create_pool_request_accept(faker: Faker, worker: Worker): assert root_client.credentials != ds_client.credentials # the DO submits the docker config to build an image - custom_dockerfile_str = """ - FROM openmined/grid-backend:0.8.4-beta.12 + custom_dockerfile_str = f""" + FROM openmined/grid-backend:{sy.__version__} RUN pip install opendp """ diff --git a/packages/syft/tests/syft/worker_pool/worker_test.py b/packages/syft/tests/syft/worker_pool/worker_test.py index acc9f66c0dd..6503e51eb66 100644 --- a/packages/syft/tests/syft/worker_pool/worker_test.py +++ b/packages/syft/tests/syft/worker_pool/worker_test.py @@ -1,4 +1,5 @@ # syft absolute +import syft as sy from syft.custom_worker.config import DockerWorkerConfig from syft.node.worker import Worker from syft.service.response import SyftSuccess @@ -8,8 +9,8 @@ def get_docker_config(): # the DS makes a request to create an image and a pool based on the image - custom_dockerfile = """ - FROM openmined/grid-backend:0.8.4-beta.12 + custom_dockerfile = f""" + FROM openmined/grid-backend:{sy.__version__} RUN pip install recordlinkage """ return DockerWorkerConfig(dockerfile=custom_dockerfile) diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml index 28f220d3c28..65139bdd522 100644 --- a/packages/syftcli/manifest.yml +++ b/packages/syftcli/manifest.yml @@ -1,11 +1,11 @@ manifestVersion: 1.0 -syftVersion: 0.8.4-beta.22 -dockerTag: 0.8.4-beta.22 +syftVersion: 0.8.4-beta.30 +dockerTag: 0.8.4-beta.30 images: - - docker.io/openmined/grid-frontend:0.8.4-beta.22 - - docker.io/openmined/grid-backend:0.8.4-beta.22 + - docker.io/openmined/grid-frontend:0.8.4-beta.30 + - docker.io/openmined/grid-backend:0.8.4-beta.30 - docker.io/library/mongo:7.0.4 - docker.io/traefik:v2.10 diff --git a/scripts/build_images.sh b/scripts/build_images.sh index 280ca544620..da58203e08b 100644 --- a/scripts/build_images.sh +++ b/scripts/build_images.sh @@ -1,6 +1,6 @@ #!/bin/bash -REGISTRY=${1:-"k3d-registry.localhost:5000"} +REGISTRY=${1:-"k3d-registry.localhost:5800"} TAG=${2:-"latest"} docker image build -f ./packages/grid/backend/backend.dockerfile --target backend -t $REGISTRY/openmined/grid-backend:$TAG ./packages diff --git a/scripts/bump_version.py b/scripts/bump_version.py new file mode 100644 index 00000000000..f6a443a022e --- /dev/null +++ b/scripts/bump_version.py @@ -0,0 +1,94 @@ +# The script mainly help in bumping the version of the package +# during release process. + +# stdlib +# It covers mainly two scenarios: +# Bumping from beta to the stable version +# Bumping from stable to the next beta version +import argparse +import subprocess +import sys + +# Command line processing + + +def bump_to_stable(version: str): + print(f"Bumping to stable version {version}.") + + # Invoke bump2version by subprocess and pass the version + + # bumping beta to stable + subprocess.run( + [ + "bump2version", + "prenum", + "--new-version", + version, + "--no-commit", + "--allow-dirty", + ], + check=True, + ) + # bumping previous stable to current stable + subprocess.run( + [ + "bump2version", + "patch", + "--config-file", + ".bumpversion_stable.cfg", + "--new-version", + version, + "--no-commit", + "--allow-dirty", + ], + check=True, + ) + + +def bump_to_next_beta(version: str): + version = version + "-beta.0" + print(f"Bumping to next beta version {version}.") + + # bumping stable to beta + subprocess.run( + [ + "bump2version", + "prenum", + "--new-version", + version, + "--no-commit", + "--allow-dirty", + ], + check=True, + ) + + +def handle_command_line(): + parser = argparse.ArgumentParser(description="Bump the version of the package.") + parser.add_argument( + "--bump-to-stable", + type=str, + help="Bump the current beta version to a stable version.", + ) + parser.add_argument( + "--bump-to-next-beta", + type=str, + help="Bump the current stable version to the next beta version.", + ) + + if len(sys.argv) < 2: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + if args.bump_to_stable: + bump_to_stable(args.bump_to_stable) + elif args.bump_to_next_beta: + bump_to_next_beta(args.bump_to_next_beta) + else: + parser.print_help() + sys.exit(1) + + +if __name__ == "__main__": + handle_command_line() diff --git a/scripts/hagrid_hash b/scripts/hagrid_hash index 010a1364538..2791d55f62f 100644 --- a/scripts/hagrid_hash +++ b/scripts/hagrid_hash @@ -1 +1 @@ -7a5b3ec4541268b9b6d41ae76a771231 +8062e2de4754fbc983daabd67ca5d8cc diff --git a/scripts/k8s-coredns-custom.yml b/scripts/k8s-coredns-custom.yml new file mode 100644 index 00000000000..0c1085ae2ee --- /dev/null +++ b/scripts/k8s-coredns-custom.yml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: coredns-custom + namespace: kube-system +data: + k3d-registry.localhost.server: | + k3d-registry.localhost { + log + rewrite name k3d-registry.localhost host.k3d.internal + forward . 127.0.0.1 + } diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index a4486865cb2..d973dc01d7e 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -25,8 +25,8 @@ def test_image_build(domain_1_port) -> None: ) # Submit Docker Worker Config - docker_config_rl = """ - FROM openmined/grid-backend:0.8.4-beta.12 + docker_config_rl = f""" + FROM openmined/grid-backend:{sy.__version__} RUN pip install recordlinkage """ docker_config = DockerWorkerConfig(dockerfile=docker_config_rl) @@ -78,8 +78,8 @@ def test_pool_launch(domain_1_port) -> None: assert len(domain_client.worker_pools.get_all()) == 1 # Submit Docker Worker Config - docker_config_opendp = """ - FROM openmined/grid-backend:0.8.4-beta.12 + docker_config_opendp = f""" + FROM openmined/grid-backend:{sy.__version__} RUN pip install opendp """ docker_config = DockerWorkerConfig(dockerfile=docker_config_opendp) @@ -178,8 +178,8 @@ def test_pool_image_creation_job_requests(domain_1_port) -> None: ds_client = sy.login(email=ds_email, password="secret_pw", port=domain_1_port) # the DS makes a request to create an image and a pool based on the image - docker_config_np = """ - FROM openmined/grid-backend:0.8.4-beta.12 + docker_config_np = f""" + FROM openmined/grid-backend:{sy.__version__} RUN pip install numpy """ docker_config = DockerWorkerConfig(dockerfile=docker_config_np) diff --git a/tox.ini b/tox.ini index cb254f79ffc..0a368808cac 100644 --- a/tox.ini +++ b/tox.ini @@ -33,6 +33,7 @@ envlist = syftcli.publish syftcli.build backend.test.basecpu + e2e.test.notebook skipsdist = True @@ -669,11 +670,13 @@ allowlist_externals = kubectx k3d echo + tox setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:k8s} NODE_PORT = {env:NODE_PORT:9082} GITHUB_CI = {env:GITHUB_CI:false} PYTEST_MODULES = {env:PYTEST_MODULES:frontend container_workload} + SYFT_BASE_IMAGE_REGISTRY = {env:SYFT_BASE_IMAGE_REGISTRY:k3d-registry.localhost:5800} commands = bash -c "echo Running with GITHUB_CI=$GITHUB_CI; date" python -c 'import syft as sy; sy.stage_protocol_changes()' @@ -692,8 +695,8 @@ commands = bash -c "docker volume rm k3d-testdomain1-images --force || true" - # Creating registory - bash -c 'k3d registry create registry.localhost --port 5000 -v `pwd`/k3d-registry:/var/lib/registry || true' + # Creating registry + bash -c 'k3d registry create registry.localhost --port 5800 -v `pwd`/k3d-registry:/var/lib/registry || true' # Creating testgateway1 cluster on port 9081 bash -c 'NODE_NAME=testgateway1 NODE_PORT=9081 && \ @@ -707,7 +710,7 @@ commands = -p gateway \ --var NODE_NAME=$NODE_NAME \ --var TEST_MODE=1 \ - --var CONTAINER_REGISTRY=k3d-registry.localhost:5000 \ + --var CONTAINER_REGISTRY=k3d-registry.localhost:5800 \ --var NODE_TYPE=gateway \ deploy -b; \ do ((--r))||exit;echo "retrying" && sleep 20;done)' @@ -717,13 +720,16 @@ commands = k3d cluster create $NODE_NAME -p "$NODE_PORT:80@loadbalancer" --registry-use k3d-registry.localhost || true \ k3d cluster start $NODE_NAME' + # Patches CoreDNS + tox -e dev.k8s.patch.coredns + bash -c 'NODE_NAME=testdomain1 NODE_PORT=9082 && \ cd packages/grid && \ (r=5;while ! \ devspace --no-warn --kube-context "k3d-$NODE_NAME" --namespace $NODE_NAME \ --var NODE_NAME=$NODE_NAME \ --var TEST_MODE=1 \ - --var CONTAINER_REGISTRY=k3d-registry.localhost:5000 \ + --var CONTAINER_REGISTRY=k3d-registry.localhost:5800 \ deploy -b; \ do ((--r))||exit;echo "retrying" && sleep 20;done)' @@ -846,77 +852,67 @@ commands = [testenv:syft.test.helm] description = Test Helm Chart for Kubernetes -deps = - {[testenv:syft]deps} - {[testenv:hagrid]deps} - nbmake changedir = {toxinidir} -passenv=HOME, USER +passenv=HOME,USER,EXTERNAL_REGISTRY_USERNAME,EXTERNAL_REGISTRY_PASSWORD allowlist_externals = - devspace - kubectl grep sleep bash - kubectx - k3d - echo - rm - helm + tox setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:k8s} NODE_PORT = {env:NODE_PORT:8080} + NODE_URL = {env:NODE_URL:http://localhost} + EXCLUDE_NOTEBOOKS = {env:EXCLUDE_NOTEBOOKS:not 10-container-images.ipynb} + SYFT_VERSION = {env:SYFT_VERSION:local} + EXTERNAL_REGISTRY = {env:EXTERNAL_REGISTRY:k3d-registry.localhost:5800} commands = - k3d version + bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE NODE_PORT=$NODE_PORT NODE_URL=$NODE_URL \ + Excluding notebooks: $EXCLUDE_NOTEBOOKS SYFT_VERSION=$SYFT_VERSION \ + EXTERNAL_REGISTRY=$EXTERNAL_REGISTRY; date" + + bash -c "k3d version" + + # Remvoing old clusters and volumes and registry ; bash -c "docker rm $(docker ps -aq) --force || true" bash -c "k3d cluster delete syft || true" bash -c "docker volume rm k3d-syft-images --force || true" - bash -c "k3d registry delete k3d-registry.localhost || true" - bash -c 'k3d registry create registry.localhost --port 5000 -v `pwd`/k3d-registry:/var/lib/registry || true' - - bash -c 'NODE_NAME=syft NODE_PORT=8080 && \ + # Creating registry and cluster + bash -c 'k3d registry create registry.localhost --port 5800 -v `pwd`/k3d-registry:/var/lib/registry || true' + bash -c 'NODE_NAME=syft NODE_PORT=${NODE_PORT} && \ k3d cluster create syft -p "$NODE_PORT:80@loadbalancer" --registry-use k3d-registry.localhost || true \ k3d cluster start syft' + tox -e dev.k8s.patch.coredns + sleep 10 + bash -c "kubectl --context k3d-syft create namespace syft || true" + + # if syft version is local, then install local helm charts + # else install the helm charts from the openmined gh-pages branch + bash -c 'if [[ $SYFT_VERSION == "local" ]]; then \ + echo "Installing local helm charts"; \ + bash -c "cd packages/grid/helm && helm install --kube-context k3d-syft --namespace syft syft ./syft --set configuration.devmode=true"; \ + else \ + echo "Installing helm charts from repo for syft version: ${SYFT_VERSION}"; \ + bash -c "helm repo add openmined https://openmined.github.io/PySyft/helm && helm repo update openmined"; \ + bash -c "helm install --kube-context k3d-syft --namespace syft syft openmined/syft --version=${SYFT_VERSION} --set configuration.devmode=true"; \ + fi' - ; skopeo list-tags --tls-verify=false docker://k3d-registry.localhost:5000/openmined/grid-backend - ; skopeo inspect --tls-verify=false docker://k3d-registry.localhost:5000/openmined/grid-backend:f1725f - ; helm uninstall --kube-context k3d-syft --namespace syft syft - ; helm install --kube-context k3d-syft --namespace syft syft ./syft - ; k3d cluster create syft -p "8080:80@loadbalancer" && k3d cluster start syft - - sleep 50 - - bash -c 'cd packages/grid && \ - kubectl --context k3d-syft create namespace syft || true; \ - helm install --kube-context k3d-syft --namespace syft syft ./helm/syft --debug' - + ; wait for everything else to be loaded bash packages/grid/scripts/wait_for.sh service frontend --context k3d-syft --namespace syft bash -c '(kubectl logs service/frontend --context k3d-syft --namespace syft -f &) | grep -q -E "Network:\s+https?://[a-zA-Z0-9.-]+:[0-9]+/" || true' - - ; wait for everything else to be loaded bash packages/grid/scripts/wait_for.sh service mongo --context k3d-syft --namespace syft bash packages/grid/scripts/wait_for.sh service backend --context k3d-syft --namespace syft bash packages/grid/scripts/wait_for.sh service proxy --context k3d-syft --namespace syft - bash -c '(kubectl logs service/backend --context k3d-syft --namespace syft -f &) | grep -q "Application startup complete" || true' - ; frontend - bash -c 'if [[ "$PYTEST_MODULES" == *"frontend"* ]]; then \ - echo "Starting frontend"; date; \ - pytest tests/integration -m frontend -p no:randomly -k "test_serves_domain_frontend" --co; \ - pytest tests/integration -m frontend -vvvv -p no:randomly -p no:benchmark -o log_cli=True --capture=no -k "test_serves_domain_frontend"; \ - return=$?; \ - echo "Finished frontend"; date; \ - exit $return; \ - fi' - - ; ignore 06 because of opendp on arm64 - pytest --nbmake notebooks/api/0.8 -p no:randomly -k 'not 10-container-images.ipynb' -vvvv + # Run Notebook tests + tox -e e2e.test.notebook + # Cleanup bash -c "k3d cluster delete syft || true" bash -c "docker volume rm k3d-syft-images --force || true" @@ -938,13 +934,26 @@ allowlist_externals = sudo commands = ; create registry - bash -c 'k3d registry create registry.localhost --port 5000 -v $HOME/.k3d-registry:/var/lib/registry || true' + bash -c 'k3d registry create registry.localhost --port 5800 -v $HOME/.k3d-registry:/var/lib/registry || true' ; add patches to host bash -c 'if ! grep -q k3d-registry.localhost /etc/hosts; then sudo {envpython} scripts/patch_hosts.py --add-k3d-registry --fix-docker-hosts; fi' ; Fail this command if registry is not working - bash -c 'URL=http://k3d-registry.localhost:5000/v2/_catalog; curl -X GET $URL' + bash -c 'URL=http://k3d-registry.localhost:5800/v2/_catalog; curl -X GET $URL' + +[testenv:dev.k8s.patch.coredns] +description = Patch CoreDNS to resolve k3d-registry.localhost +changedir = {toxinidir} +passenv=HOME,USER +allowlist_externals = + bash +commands = + ; patch coredns so k3d-registry.localhost works in k3d + bash -c 'kubectl apply -f ./scripts/k8s-coredns-custom.yml' + + ; restarts coredns + bash -c 'kubectl delete pod -n kube-system -l k8s-app=kube-dns' [testenv:dev.k8s.start] description = Start local Kubernetes registry & cluster with k3d @@ -959,9 +968,12 @@ commands = tox -e dev.k8s.registry ; for NodePort to work add the following --> -p "NodePort:NodePort@loadbalancer" - bash -c 'k3d cluster create syft-dev -p "8080:80@loadbalancer" --registry-use k3d-registry.localhost:5000; \ + bash -c 'k3d cluster create syft-dev -p "8080:80@loadbalancer" --registry-use k3d-registry.localhost:5800; \ kubectl create namespace syft || true' + ; patch coredns + tox -e dev.k8s.patch.coredns + ; dump cluster info tox -e dev.k8s.info @@ -974,7 +986,7 @@ allowlist_externals = bash commands = ; deploy syft helm charts - bash -c 'devspace deploy -b --kube-context k3d-syft-dev --namespace syft --var CONTAINER_REGISTRY=k3d-registry.localhost:5000' + bash -c 'devspace deploy -b --kube-context k3d-syft-dev --namespace syft --var CONTAINER_REGISTRY=k3d-registry.localhost:5800' [testenv:dev.k8s.hotreload] description = Start development with hot-reload in Kubernetes @@ -985,7 +997,7 @@ allowlist_externals = tox commands = ; deploy syft helm charts with hot-reload - bash -c 'devspace dev --kube-context k3d-syft-dev --namespace syft --var CONTAINER_REGISTRY=k3d-registry.localhost:5000' + bash -c 'devspace dev --kube-context k3d-syft-dev --namespace syft --var CONTAINER_REGISTRY=k3d-registry.localhost:5800' [testenv:dev.k8s.info] description = Gather info about the localKubernetes cluster @@ -1009,7 +1021,7 @@ allowlist_externals = bash commands = bash -c 'devspace purge --force-purge --kube-context k3d-syft-dev --namespace syft; sleep 3' - bash -c 'devspace cleanup images --kube-context k3d-syft-dev --namespace syft --var CONTAINER_REGISTRY=k3d-registry.localhost:5000 || true' + bash -c 'devspace cleanup images --kube-context k3d-syft-dev --namespace syft --var CONTAINER_REGISTRY=k3d-registry.localhost:5800 || true' bash -c 'kubectl config use-context k3d-syft-dev' bash -c 'kubectl delete all --all --namespace syft || true' bash -c 'kubectl delete pvc --all --namespace syft || true' @@ -1085,3 +1097,45 @@ commands = bash -c 'for pkg in perl make curl wget; do docker run --rm cpu-worker:custom-cmd apk -e info "$pkg"; done' bash -c 'docker run --rm cpu-worker:custom-cmd bash -c "cd cowsay-3.7.0 && curl https://api.github.com/zen -s | ./cowsay"' bash -c 'docker rmi cpu-worker:custom-cmd' + + +# There are other adjacent notebook tests like +# stack.test.notebook, syft.test.notebook, etc. +# They could be modularized and reused. +# The below is the notebook test suite for point at external servers +[testenv:e2e.test.notebook] +description = E2E Notebook tests +changedir = {toxinidir} +allowlist_externals = + bash +passenv = EXTERNAL_REGISTRY,EXTERNAL_REGISTRY_USERNAME,EXTERNAL_REGISTRY_PASSWORD +setenv = + ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:k8s} + NODE_PORT = {env:NODE_PORT:8080} + NODE_URL = {env:NODE_URL:http://localhost} + EXCLUDE_NOTEBOOKS = {env:EXCLUDE_NOTEBOOKS:} + SYFT_VERSION = {env:SYFT_VERSION:local} +commands = + bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE NODE_PORT=$NODE_PORT NODE_URL=$NODE_URL \ + Excluding notebooks: $EXCLUDE_NOTEBOOKS SYFT_VERSION=$SYFT_VERSION \ + EXTERNAL_REGISTRY=$EXTERNAL_REGISTRY; date" + + + # Schema for EXLUDE_NOTEBOOKS is + # for excluding + # notebook1.ipynb, notebook2.ipynb + # EXCLUDE_NOTEBOOKS=not notebook1.ipynb and not notebook2.ipynb + bash -c "pip install pytest pytest-randomly nbmake" + + # If the syft version is local install the local version + # else install the version of syft specified + bash -c " if [[ $SYFT_VERSION == 'local' ]]; then \ + echo 'Building local syft'; \ + pip install packages/syft[data_science]; \ + else \ + echo 'Installing syft version: ${SYFT_VERSION}'; \ + pip install syft[data_science]==${SYFT_VERSION}; \ + fi" + + + pytest notebooks/api/0.8 --nbmake -p no:randomly -vvvv --nbmake-timeout=1000 -k '{env:EXCLUDE_NOTEBOOKS:}' \ No newline at end of file