diff --git a/.bumpversion.cfg b/.bumpversion.cfg index cd48a82318d..2ac552c2537 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.7-beta.13 +current_version = 0.8.8-beta.1 tag = False tag_name = {new_version} commit = True diff --git a/.bumpversion_stable.cfg b/.bumpversion_stable.cfg index 52b011ac7b1..4ed947e8dd6 100644 --- a/.bumpversion_stable.cfg +++ b/.bumpversion_stable.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.6 +current_version = 0.8.7 tag = False tag_name = {new_version} commit = True diff --git a/.github/workflows/cd-feature-branch.yml b/.github/workflows/cd-feature-branch.yml new file mode 100644 index 00000000000..0b67328c5c7 --- /dev/null +++ b/.github/workflows/cd-feature-branch.yml @@ -0,0 +1,378 @@ +#TODO: Due to lack of time, this could not be de-duplicated +# from cd-syft.yml, which have a similar structure +name: CD - Feature Branch + +on: + workflow_dispatch: + inputs: + release_version: + description: "Syft Version to Release" + required: true + type: string + + release_branch: + description: "Branch to Release from" + required: true + type: string + + release_id: + description: "A Unique Identifier for the Release, which would be appended as version+release_id, Can contain only [A-Z][a-z][0-9][.]" + required: true + type: string + + release_platform: + description: "Release Platform" + required: true + default: "TEST_PYPI" + type: choice + options: + - TEST_PYPI + # - REAL_PYPI + # - REAL_AND_TEST_PYPI + +# Prevents concurrent runs of the same workflow +# while the previous run is still in progress +concurrency: + group: "CD - Feature Branch" + cancel-in-progress: false + +jobs: + build-and-push-docker-images: + strategy: + matrix: + runner: [sh-arc-linux-x64, sh-arc-linux-arm64] + runs-on: ${{ matrix.runner }} + + outputs: + server_version: ${{ steps.release_metadata.outputs.server_version }} + + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.release_branch }} + + # actions/setup-python doesn't yet support ARM + - name: Setup Python on x64 + if: ${{ !endsWith(matrix.runner, '-arm64') }} + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + # Currently psutil package requires gcc to be installed on arm + # for building psutil from source + # as linux/aarch64 wheels not avaialble for psutil + # We could remove once we have aarch64 wheels + # https://github.com/giampaolo/psutil/issues/1972 + - name: Install Metadata packages for arm64 + if: ${{ endsWith(matrix.runner, '-arm64') }} + run: | + sudo apt update -y + sudo apt install software-properties-common -y + sudo apt install gcc curl -y + sudo apt-get install python3-dev -y + + - name: Setup Python on arm64 + if: ${{ endsWith(matrix.runner, '-arm64') }} + uses: deadsnakes/action@v3.1.0 + with: + python-version: "3.12" + + - name: Install Git + run: | + sudo apt-get update + sudo apt-get install git -y + + - name: Check python version + run: | + python --version + python3 --version + which python + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install uv==0.2.17 tox tox-uv==1.9.0 bump2version==1.0.1 + uv --version + + - name: Generate Release Metadata + id: release_metadata + run: | + if [[ ${{matrix.runner}} == *"x64"* ]]; then + echo "release_platform=linux/amd64" >> $GITHUB_OUTPUT + echo "short_release_platform=amd64" >> $GITHUB_OUTPUT + else + echo "release_platform=linux/arm64" >> $GITHUB_OUTPUT + echo "short_release_platform=arm64" >> $GITHUB_OUTPUT + fi + echo "server_version=${{ github.event.inputs.release_version }}+${{github.events.inputs.release_id}}.$(git rev-parse --short ${{ github.sha }})" >> $GITHUB_OUTPUT + + - name: Bump to Final Release version + run: | + python scripts/bump_version.py --bump-to-stable ${{ steps.release_metadata.outputs.server_version}} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_LOGIN }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push `syft-backend` image to DockerHub + id: syft-backend-build + uses: docker/build-push-action@v6 + with: + context: ./packages + file: ./packages/grid/backend/backend.dockerfile + platforms: ${{ steps.release_metadata.outputs.release_platform }} + target: backend + outputs: type=image,name=openmined/syft-backend,push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=openmined/syft-backend:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-backend:cache-${{ steps.release_metadata.outputs.short_release_platform }},mode=max + + - name: Export digest for syft-backend + run: | + mkdir -p /tmp/digests/syft-backend + digest="${{ steps.syft-backend-build.outputs.digest }}" + touch "/tmp/digests/syft-backend/${digest#sha256:}" + + - name: Build and push `syft-frontend` image to DockerHub + id: syft-frontend-build + uses: docker/build-push-action@v6 + with: + context: ./packages/grid/frontend + file: ./packages/grid/frontend/frontend.dockerfile + platforms: ${{ steps.release_metadata.outputs.release_platform }} + outputs: type=image,name=openmined/syft-frontend,push-by-digest=true,name-canonical=true,push=true + target: syft-ui-development + cache-from: type=registry,ref=openmined/syft-frontend:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-frontend:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max + + - name: Export digest for syft-frontend + run: | + mkdir -p /tmp/digests/syft-frontend + digest="${{ steps.syft-frontend-build.outputs.digest }}" + touch "/tmp/digests/syft-frontend/${digest#sha256:}" + + - name: Build and push `syft-seaweedfs` image to DockerHub + id: syft-seaweedfs-build + uses: docker/build-push-action@v6 + with: + context: ./packages/grid/seaweedfs + file: ./packages/grid/seaweedfs/seaweedfs.dockerfile + platforms: ${{ steps.release_metadata.outputs.release_platform }} + outputs: type=image,name=openmined/syft-seaweedfs,push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=openmined/syft-seaweedfs:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-seaweedfs:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max + + - name: Export digest for syft-seaweedfs + run: | + mkdir -p /tmp/digests/syft-seaweedfs + digest="${{ steps.syft-seaweedfs-build.outputs.digest }}" + touch "/tmp/digests/syft-seaweedfs/${digest#sha256:}" + + # Some of the dependencies of syft-enclave-attestation are not available for arm64 + # Hence, we are building syft-enclave-attestation only for x64 (see the `if` conditional) + - name: Build and push `syft-enclave-attestation` image to DockerHub + if: ${{ endsWith(matrix.runner, '-x64') }} + id: syft-enclave-attestation-build + uses: docker/build-push-action@v6 + with: + context: ./packages/grid/enclave/attestation + file: ./packages/grid/enclave/attestation/attestation.dockerfile + platforms: ${{ steps.release_metadata.outputs.release_platform }} + outputs: type=image,name=openmined/syft-enclave-attestation,push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=openmined/syft-enclave-attestation:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-enclave-attestation:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max + + - name: Export digest for syft-enclave-attestation + if: ${{ endsWith(matrix.runner, '-x64') }} + run: | + mkdir -p /tmp/digests/syft-enclave-attestation + digest="${{ steps.syft-enclave-attestation-build.outputs.digest }}" + touch "/tmp/digests/syft-enclave-attestation/${digest#sha256:}" + + - name: Build and push `syft` image to registry + id: syft-build + uses: docker/build-push-action@v6 + with: + context: ./packages/ + file: ./packages/grid/syft-client/syft.Dockerfile + outputs: type=image,name=openmined/syft-client,push-by-digest=true,name-canonical=true,push=true + platforms: ${{ steps.release_metadata.outputs.release_platform }} + cache-from: type=registry,ref=openmined/syft-client:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-client:cache-${{ steps.release_metadata.outputs.short_release_platform }},mode=max + + - name: Export digest for `syft` image + run: | + mkdir -p /tmp/digests/syft + digest="${{ steps.syft-build.outputs.digest }}" + touch "/tmp/digests/syft/${digest#sha256:}" + + - name: Upload digests + uses: actions/upload-artifact@v4 + with: + name: digests-${{ steps.release_metadata.outputs.server_version }}-${{ steps.release_metadata.outputs.short_release_platform }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + #Used to merge x64 and arm64 into one docker image + merge-docker-images: + needs: [build-and-push-docker-images] + if: always() && (needs.build-and-push-docker-images.result == 'success') + + runs-on: sh-arc-linux-x64 + + outputs: + server_version: ${{ needs.build-and-push-docker-images.outputs.server_version }} + + steps: + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: /tmp/digests + pattern: digests-${{ needs.build-and-push-docker-images.outputs.server_version }}-* + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_LOGIN }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Create manifest list and push for syft-backend + working-directory: /tmp/digests/syft-backend + run: | + docker buildx imagetools create \ + -t openmined/syft-backend:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + $(printf 'openmined/syft-backend@sha256:%s ' *) + + - name: Create manifest list and push for syft-frontend + working-directory: /tmp/digests/syft-frontend + run: | + docker buildx imagetools create \ + -t openmined/syft-frontend:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + $(printf 'openmined/syft-frontend@sha256:%s ' *) + + - name: Create manifest list and push for syft-seaweedfs + working-directory: /tmp/digests/syft-seaweedfs + run: | + docker buildx imagetools create \ + -t openmined/syft-seaweedfs:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + $(printf 'openmined/syft-seaweedfs@sha256:%s ' *) + + - name: Create manifest list and push for syft-enclave-attestation + working-directory: /tmp/digests/syft-enclave-attestation + run: | + docker buildx imagetools create \ + -t openmined/syft-enclave-attestation:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + $(printf 'openmined/syft-enclave-attestation@sha256:%s ' *) + + - name: Create manifest list and push for syft client + working-directory: /tmp/digests/syft + run: | + docker buildx imagetools create \ + -t openmined/syft-client:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + $(printf 'openmined/syft-client@sha256:%s ' *) + + deploy-syft: + needs: [merge-docker-images] + if: always() && needs.merge-docker-images.result == 'success' + + runs-on: ubuntu-latest + + steps: + - name: Permission to home directory + run: | + sudo chown -R $USER:$USER $HOME + + - uses: actions/checkout@v4 + with: + token: ${{ secrets.SYFT_BOT_COMMIT_TOKEN }} + ref: ${{ github.event.inputs.release_branch }} + + # free 10GB of space + - name: Remove unnecessary files + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + docker image prune --all --force + docker builder prune --all --force + docker system prune --all --force + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install uv==0.2.17 tox tox-uv==1.9.0 setuptools wheel twine bump2version PyYAML + uv --version + + - name: Bump to Final Release version + run: | + python scripts/bump_version.py --bump-to-stable ${{ needs.merge-docker-images.outputs.server_version }} + + - name: Build Helm Chart + shell: bash + run: | + # install k3d + K3D_VERSION=v5.6.3 + wget https://github.com/k3d-io/k3d/releases/download/${K3D_VERSION}/k3d-linux-amd64 + mv k3d-linux-amd64 k3d + chmod +x k3d + export PATH=`pwd`:$PATH + k3d version + + #Install Devspace + DEVSPACE_VERSION=v6.3.12 + curl -sSL https://github.com/loft-sh/devspace/releases/download/${DEVSPACE_VERSION}/devspace-linux-amd64 -o ./devspace + chmod +x devspace + devspace version + + # Install helm + curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash + helm version + + tox -e syft.build.helm + tox -e syft.package.helm + + - name: Linting + run: | + tox -e lint || true + + - name: Manual Build and Publish + run: | + tox -e syft.publish + if [[ "${{ github.event.inputs.release_platform }}" == "TEST_PYPI" ]]; then + twine upload -r testpypi -u __token__ -p ${{ secrets.OM_SYFT_TEST_PYPI_TOKEN }} packages/syft/dist/* + fi + + # Checkout to gh-pages and update helm repo + - name: Checkout to gh-pages + uses: actions/checkout@v4 + with: + ref: gh-pages + token: ${{ secrets.SYFT_BOT_COMMIT_TOKEN }} + path: ghpages + + - name: Copy helm repo files from Syft Repo + run: | + rm -rf ghpages/helm/* + cp -R packages/grid/helm/repo/. ghpages/helm/ + + - name: Commit changes to gh-pages + uses: EndBug/add-and-commit@v9 + with: + author_name: ${{ secrets.OM_BOT_NAME }} + author_email: ${{ secrets.OM_BOT_EMAIL }} + message: "Update Helm package from Syft Repo" + add: "helm/" + push: "origin gh-pages" + cwd: "./ghpages/" diff --git a/.github/workflows/cd-syft-dev.yml b/.github/workflows/cd-syft-dev.yml index b28510a2906..0b9ce3fe27d 100644 --- a/.github/workflows/cd-syft-dev.yml +++ b/.github/workflows/cd-syft-dev.yml @@ -77,11 +77,11 @@ jobs: username: ${{ secrets.ACR_USERNAME }} password: ${{ secrets.ACR_PASSWORD }} - - name: Set Grid package version - id: grid + - name: Set Server package version + id: server shell: bash run: | - echo "GRID_VERSION=$(python packages/grid/VERSION)" >> $GITHUB_OUTPUT + echo "SERVER_VERSION=$(python packages/grid/VERSION)" >> $GITHUB_OUTPUT - name: Build and push `syft` image to registry uses: docker/build-push-action@v6 @@ -92,9 +92,9 @@ jobs: tags: | ${{ secrets.ACR_SERVER }}/openmined/syft-client:dev ${{ secrets.ACR_SERVER }}/openmined/syft-client:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/syft-client:${{ steps.grid.outputs.GRID_VERSION }} + ${{ secrets.ACR_SERVER }}/openmined/syft-client:${{ steps.server.outputs.SERVER_VERSION }} - - name: Build and push `grid-backend` image to registry + - name: Build and push `syft-backend` image to registry uses: docker/build-push-action@v6 with: context: ./packages @@ -102,43 +102,43 @@ jobs: push: true target: backend tags: | - ${{ secrets.ACR_SERVER }}/openmined/grid-backend:dev - ${{ secrets.ACR_SERVER }}/openmined/grid-backend:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/grid-backend:${{ steps.grid.outputs.GRID_VERSION }} + ${{ secrets.ACR_SERVER }}/openmined/syft-backend:dev + ${{ secrets.ACR_SERVER }}/openmined/syft-backend:dev-${{ github.sha }} + ${{ secrets.ACR_SERVER }}/openmined/syft-backend:${{ steps.syft.outputs.SERVER_VERSION }} - - name: Build and push `grid-frontend` image to registry + - name: Build and push `syft-frontend` image to registry uses: docker/build-push-action@v6 with: context: ./packages/grid/frontend file: ./packages/grid/frontend/frontend.dockerfile push: true tags: | - ${{ secrets.ACR_SERVER }}/openmined/grid-frontend:dev - ${{ secrets.ACR_SERVER }}/openmined/grid-frontend:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/grid-frontend:${{ steps.grid.outputs.GRID_VERSION }} - target: grid-ui-development + ${{ secrets.ACR_SERVER }}/openmined/syft-frontend:dev + ${{ secrets.ACR_SERVER }}/openmined/syft-frontend:dev-${{ github.sha }} + ${{ secrets.ACR_SERVER }}/openmined/syft-frontend:${{ steps.syft.outputs.SERVER_VERSION }} + target: syft-ui-development - - name: Build and push `grid-seaweedfs` image to registry + - name: Build and push `syft-seaweedfs` image to registry uses: docker/build-push-action@v6 with: context: ./packages/grid/seaweedfs file: ./packages/grid/seaweedfs/seaweedfs.dockerfile push: true tags: | - ${{ secrets.ACR_SERVER }}/openmined/grid-seaweedfs:dev - ${{ secrets.ACR_SERVER }}/openmined/grid-seaweedfs:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/grid-seaweedfs:${{ steps.grid.outputs.GRID_VERSION }} + ${{ secrets.ACR_SERVER }}/openmined/syft-seaweedfs:dev + ${{ secrets.ACR_SERVER }}/openmined/syft-seaweedfs:dev-${{ github.sha }} + ${{ secrets.ACR_SERVER }}/openmined/syft-seaweedfs:${{ steps.syft.outputs.SERVER_VERSION }} - - name: Build and push `grid-enclave-attestation` image to registry + - name: Build and push `syft-enclave-attestation` image to registry uses: docker/build-push-action@v6 with: context: ./packages/grid/enclave/attestation file: ./packages/grid/enclave/attestation/attestation.dockerfile push: true tags: | - ${{ secrets.ACR_SERVER }}/openmined/grid-enclave-attestation:dev - ${{ secrets.ACR_SERVER }}/openmined/grid-enclave-attestation:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/grid-enclave-attestation:${{ steps.grid.outputs.GRID_VERSION }} + ${{ secrets.ACR_SERVER }}/openmined/syft-enclave-attestation:dev + ${{ secrets.ACR_SERVER }}/openmined/syft-enclave-attestation:dev-${{ github.sha }} + ${{ secrets.ACR_SERVER }}/openmined/syft-enclave-attestation:${{ steps.syft.outputs.SERVER_VERSION }} - name: Build Helm Chart & Copy to infra if: github.ref == 'refs/heads/dev' || github.event.inputs.deploy-helm == 'true' diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index d0610ac7956..f9b0cd12295 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -90,7 +90,7 @@ jobs: outputs: release_tag: ${{ steps.get_release_tag.outputs.release_tag }} - grid_version: ${{ steps.release_metadata.outputs.grid_version }} + server_version: ${{ steps.release_metadata.outputs.server_version }} steps: - uses: actions/checkout@v4 @@ -166,7 +166,7 @@ jobs: echo "release_platform=linux/arm64" >> $GITHUB_OUTPUT echo "short_release_platform=arm64" >> $GITHUB_OUTPUT fi - echo "grid_version=$(python packages/grid/VERSION)" >> $GITHUB_OUTPUT + echo "server_version=$(python packages/grid/VERSION)" >> $GITHUB_OUTPUT # TODO: Optimize redundant bump protocol version checks - name: Check and Bump Protocol Version @@ -185,79 +185,79 @@ jobs: username: ${{ secrets.DOCKER_LOGIN }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Build and push `grid-backend` image to DockerHub - id: grid-backend-build + - name: Build and push `syft-backend` image to DockerHub + id: syft-backend-build uses: docker/build-push-action@v6 with: context: ./packages file: ./packages/grid/backend/backend.dockerfile platforms: ${{ steps.release_metadata.outputs.release_platform }} target: backend - outputs: type=image,name=openmined/grid-backend,push-by-digest=true,name-canonical=true,push=true - cache-from: type=registry,ref=openmined/grid-backend:cache-${{ steps.release_metadata.outputs.short_release_platform }} - cache-to: type=registry,ref=openmined/grid-backend:cache-${{ steps.release_metadata.outputs.short_release_platform }},mode=max + outputs: type=image,name=openmined/syft-backend,push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=openmined/syft-backend:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-backend:cache-${{ steps.release_metadata.outputs.short_release_platform }},mode=max - - name: Export digest for grid-backend + - name: Export digest for syft-backend run: | - mkdir -p /tmp/digests/grid-backend - digest="${{ steps.grid-backend-build.outputs.digest }}" - touch "/tmp/digests/grid-backend/${digest#sha256:}" + mkdir -p /tmp/digests/syft-backend + digest="${{ steps.syft-backend-build.outputs.digest }}" + touch "/tmp/digests/syft-backend/${digest#sha256:}" - - name: Build and push `grid-frontend` image to DockerHub - id: grid-frontend-build + - name: Build and push `syft-frontend` image to DockerHub + id: syft-frontend-build uses: docker/build-push-action@v6 with: context: ./packages/grid/frontend file: ./packages/grid/frontend/frontend.dockerfile platforms: ${{ steps.release_metadata.outputs.release_platform }} - outputs: type=image,name=openmined/grid-frontend,push-by-digest=true,name-canonical=true,push=true - target: grid-ui-development - cache-from: type=registry,ref=openmined/grid-frontend:cache-${{ steps.release_metadata.outputs.short_release_platform }} - cache-to: type=registry,ref=openmined/grid-frontend:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max + outputs: type=image,name=openmined/syft-frontend,push-by-digest=true,name-canonical=true,push=true + target: syft-ui-development + cache-from: type=registry,ref=openmined/syft-frontend:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-frontend:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max - - name: Export digest for grid-frontend + - name: Export digest for syft-frontend run: | - mkdir -p /tmp/digests/grid-frontend - digest="${{ steps.grid-frontend-build.outputs.digest }}" - touch "/tmp/digests/grid-frontend/${digest#sha256:}" + mkdir -p /tmp/digests/syft-frontend + digest="${{ steps.syft-frontend-build.outputs.digest }}" + touch "/tmp/digests/syft-frontend/${digest#sha256:}" - - name: Build and push `grid-seaweedfs` image to DockerHub - id: grid-seaweedfs-build + - name: Build and push `syft-seaweedfs` image to DockerHub + id: syft-seaweedfs-build uses: docker/build-push-action@v6 with: context: ./packages/grid/seaweedfs file: ./packages/grid/seaweedfs/seaweedfs.dockerfile platforms: ${{ steps.release_metadata.outputs.release_platform }} - outputs: type=image,name=openmined/grid-seaweedfs,push-by-digest=true,name-canonical=true,push=true - cache-from: type=registry,ref=openmined/grid-seaweedfs:cache-${{ steps.release_metadata.outputs.short_release_platform }} - cache-to: type=registry,ref=openmined/grid-seaweedfs:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max + outputs: type=image,name=openmined/syft-seaweedfs,push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=openmined/syft-seaweedfs:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-seaweedfs:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max - - name: Export digest for grid-seaweedfs + - name: Export digest for syft-seaweedfs run: | - mkdir -p /tmp/digests/grid-seaweedfs - digest="${{ steps.grid-seaweedfs-build.outputs.digest }}" - touch "/tmp/digests/grid-seaweedfs/${digest#sha256:}" + mkdir -p /tmp/digests/syft-seaweedfs + digest="${{ steps.syft-seaweedfs-build.outputs.digest }}" + touch "/tmp/digests/syft-seaweedfs/${digest#sha256:}" - # Some of the dependencies of grid-enclave-attestation are not available for arm64 - # Hence, we are building grid-enclave-attestation only for x64 (see the `if` conditional) - - name: Build and push `grid-enclave-attestation` image to DockerHub + # Some of the dependencies of syft-enclave-attestation are not available for arm64 + # Hence, we are building syft-enclave-attestation only for x64 (see the `if` conditional) + - name: Build and push `syft-enclave-attestation` image to DockerHub if: ${{ endsWith(matrix.runner, '-x64') }} - id: grid-enclave-attestation-build + id: syft-enclave-attestation-build uses: docker/build-push-action@v6 with: context: ./packages/grid/enclave/attestation file: ./packages/grid/enclave/attestation/attestation.dockerfile platforms: ${{ steps.release_metadata.outputs.release_platform }} - outputs: type=image,name=openmined/grid-enclave-attestation,push-by-digest=true,name-canonical=true,push=true - cache-from: type=registry,ref=openmined/grid-enclave-attestation:cache-${{ steps.release_metadata.outputs.short_release_platform }} - cache-to: type=registry,ref=openmined/grid-enclave-attestation:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max + outputs: type=image,name=openmined/syft-enclave-attestation,push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=openmined/syft-enclave-attestation:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/syft-enclave-attestation:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max - - name: Export digest for grid-enclave-attestation + - name: Export digest for syft-enclave-attestation if: ${{ endsWith(matrix.runner, '-x64') }} run: | - mkdir -p /tmp/digests/grid-enclave-attestation - digest="${{ steps.grid-enclave-attestation-build.outputs.digest }}" - touch "/tmp/digests/grid-enclave-attestation/${digest#sha256:}" + mkdir -p /tmp/digests/syft-enclave-attestation + digest="${{ steps.syft-enclave-attestation-build.outputs.digest }}" + touch "/tmp/digests/syft-enclave-attestation/${digest#sha256:}" - name: Build and push `syft` image to registry id: syft-build @@ -279,7 +279,7 @@ jobs: - name: Upload digests uses: actions/upload-artifact@v4 with: - name: digests-${{ steps.release_metadata.outputs.grid_version }}-${{ steps.release_metadata.outputs.short_release_platform }} + name: digests-${{ steps.release_metadata.outputs.server_version }}-${{ steps.release_metadata.outputs.short_release_platform }} path: /tmp/digests/* if-no-files-found: error retention-days: 1 @@ -299,7 +299,7 @@ jobs: uses: actions/download-artifact@v4 with: path: /tmp/digests - pattern: digests-${{ needs.build-and-push-docker-images.outputs.grid_version }}-* + pattern: digests-${{ needs.build-and-push-docker-images.outputs.server_version }}-* merge-multiple: true - name: Set up Docker Buildx @@ -311,43 +311,43 @@ jobs: username: ${{ secrets.DOCKER_LOGIN }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Create manifest list and push for grid-backend - working-directory: /tmp/digests/grid-backend + - name: Create manifest list and push for syft-backend + working-directory: /tmp/digests/syft-backend run: | docker buildx imagetools create \ - -t openmined/grid-backend:${{ needs.build-and-push-docker-images.outputs.grid_version }} \ - -t openmined/grid-backend:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ - $(printf 'openmined/grid-backend@sha256:%s ' *) + -t openmined/syft-backend:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + -t openmined/syft-backend:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ + $(printf 'openmined/syft-backend@sha256:%s ' *) - - name: Create manifest list and push for grid-frontend - working-directory: /tmp/digests/grid-frontend + - name: Create manifest list and push for syft-frontend + working-directory: /tmp/digests/syft-frontend run: | docker buildx imagetools create \ - -t openmined/grid-frontend:${{ needs.build-and-push-docker-images.outputs.grid_version }} \ - -t openmined/grid-frontend:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ - $(printf 'openmined/grid-frontend@sha256:%s ' *) + -t openmined/syft-frontend:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + -t openmined/syft-frontend:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ + $(printf 'openmined/syft-frontend@sha256:%s ' *) - - name: Create manifest list and push for grid-seaweedfs - working-directory: /tmp/digests/grid-seaweedfs + - name: Create manifest list and push for syft-seaweedfs + working-directory: /tmp/digests/syft-seaweedfs run: | docker buildx imagetools create \ - -t openmined/grid-seaweedfs:${{ needs.build-and-push-docker-images.outputs.grid_version }} \ - -t openmined/grid-seaweedfs:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ - $(printf 'openmined/grid-seaweedfs@sha256:%s ' *) + -t openmined/syft-seaweedfs:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + -t openmined/syft-seaweedfs:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ + $(printf 'openmined/syft-seaweedfs@sha256:%s ' *) - - name: Create manifest list and push for grid-enclave-attestation - working-directory: /tmp/digests/grid-enclave-attestation + - name: Create manifest list and push for syft-enclave-attestation + working-directory: /tmp/digests/syft-enclave-attestation run: | docker buildx imagetools create \ - -t openmined/grid-enclave-attestation:${{ needs.build-and-push-docker-images.outputs.grid_version }} \ - -t openmined/grid-enclave-attestation:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ - $(printf 'openmined/grid-enclave-attestation@sha256:%s ' *) + -t openmined/syft-enclave-attestation:${{ needs.build-and-push-docker-images.outputs.server_version }} \ + -t openmined/syft-enclave-attestation:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ + $(printf 'openmined/syft-enclave-attestation@sha256:%s ' *) - name: Create manifest list and push for syft working-directory: /tmp/digests/syft run: | docker buildx imagetools create \ - -t openmined/syft-client:${{ needs.build-and-push-docker-images.outputs.grid_version }} \ + -t openmined/syft-client:${{ needs.build-and-push-docker-images.outputs.server_version }} \ -t openmined/syft-client:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ $(printf 'openmined/syft-client@sha256:%s ' *) diff --git a/.github/workflows/e2e-tests-notebook.yml b/.github/workflows/e2e-tests-notebook.yml index deef4da0680..2b9f466933c 100644 --- a/.github/workflows/e2e-tests-notebook.yml +++ b/.github/workflows/e2e-tests-notebook.yml @@ -7,12 +7,12 @@ on: description: "Syft version to test" required: true type: string - node_url: - description: "Node URL to use" + server_url: + description: "Server URL to use" required: true type: string - node_port: - description: "Node port" + server_port: + description: "Server port" required: true type: number exclude_notebooks: @@ -26,12 +26,12 @@ on: description: "Syft version to test" required: true type: string - node_url: - description: "Node URL to use" + server_url: + description: "Server URL to use" required: true type: string - node_port: - description: "Node port" + server_port: + description: "Server port" required: true type: number exclude_notebooks: @@ -84,8 +84,8 @@ jobs: - name: Run Notebook tests env: SYFT_VERSION: ${{ inputs.syft_version }} - NODE_URL: ${{ inputs.node_url }} - NODE_PORT: ${{ inputs.node_port }} + SERVER_URL: ${{ inputs.server_url }} + SERVER_PORT: ${{ inputs.server_port }} EXCLUDE_NOTEBOOKS: ${{ inputs.exclude_notebooks }} run: | tox -e e2e.test.notebook diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index d94230d155e..d3d42279e85 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -60,7 +60,9 @@ jobs: if: steps.changes.outputs.stack == 'true' timeout-minutes: 60 run: | - tox -e backend.test.basecpu + echo "Skipping pr image test" + # run: | + # tox -e backend.test.basecpu pr-tests-syft-integration: strategy: @@ -68,7 +70,7 @@ jobs: matrix: os: [ubuntu-latest] python-version: ["3.12"] - pytest-modules: ["local_node"] + pytest-modules: ["local_server"] fail-fast: false runs-on: ${{matrix.os}} @@ -252,9 +254,9 @@ jobs: run: | mkdir -p ./k8s-logs kubectl describe all -A --context k3d-test-gateway-1 --namespace syft > ./k8s-logs/test-gateway-1-desc-${{ steps.date.outputs.date }}.txt - kubectl describe all -A --context k3d-test-domain-1 --namespace syft > ./k8s-logs/test-domain-1-desc-${{ steps.date.outputs.date }}.txt + kubectl describe all -A --context k3d-test-datasite-1 --namespace syft > ./k8s-logs/test-datasite-1-desc-${{ steps.date.outputs.date }}.txt kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-test-gateway-1 --namespace syft > ./k8s-logs/test-gateway-1-logs-${{ steps.date.outputs.date }}.txt - kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-test-domain-1 --namespace syft > ./k8s-logs/test-domain-1-logs-${{ steps.date.outputs.date }}.txt + kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-test-datasite-1 --namespace syft > ./k8s-logs/test-datasite-1-logs-${{ steps.date.outputs.date }}.txt ls -la ./k8s-logs - name: Upload logs to GitHub @@ -270,7 +272,7 @@ jobs: run: | export PATH=`pwd`:$PATH k3d cluster delete test-gateway-1 || true - k3d cluster delete test-domain-1 || true + k3d cluster delete test-datasite-1 || true k3d registry delete k3d-registry.localhost || true pr-tests-notebook-k8s: @@ -395,9 +397,9 @@ jobs: run: | mkdir -p ./k8s-logs kubectl describe all -A --context k3d-test-gateway-1 --namespace syft > ./k8s-logs/test-gateway-1-desc-${{ steps.date.outputs.date }}.txt - kubectl describe all -A --context k3d-test-domain-1 --namespace syft > ./k8s-logs/test-domain-1-desc-${{ steps.date.outputs.date }}.txt + kubectl describe all -A --context k3d-test-datasite-1 --namespace syft > ./k8s-logs/test-datasite-1-desc-${{ steps.date.outputs.date }}.txt kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-test-gateway-1 --namespace syft > ./k8s-logs/test-gateway-1-logs-${{ steps.date.outputs.date }}.txt - kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-test-domain-1 --namespace syft > ./k8s-logs/test-domain-1-logs-${{ steps.date.outputs.date }}.txt + kubectl logs -l app.kubernetes.io/name!=random --prefix=true --context k3d-test-datasite-1 --namespace syft > ./k8s-logs/test-datasite-1-logs-${{ steps.date.outputs.date }}.txt ls -la ./k8s-logs - name: Upload logs to GitHub @@ -413,10 +415,11 @@ jobs: run: | export PATH=`pwd`:$PATH k3d cluster delete test-gateway-1 || true - k3d cluster delete test-domain-1 || true + k3d cluster delete test-datasite-1 || true k3d registry delete k3d-registry.localhost || true pr-tests-migrations: + if: false strategy: max-parallel: 99 matrix: diff --git a/.isort.cfg b/.isort.cfg index 5c9360d3549..aeb09bb8f36 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -2,18 +2,18 @@ profile=black force_single_line=True known_syft=syft -known_grid=grid +known_server=grid known_syftcli=syftcli known_first_party=src remove_redundant_aliases=True -sections=FUTURE,STDLIB,THIRDPARTY,SYFT,GRID,SYFTCLI,FIRSTPARTY,LOCALFOLDER +sections=FUTURE,STDLIB,THIRDPARTY,SYFT,SERVER,SYFTCLI,FIRSTPARTY,LOCALFOLDER lines_between_types=0 force_sort_within_sections=True import_heading_future=future import_heading_stdlib=stdlib import_heading_thirdparty=third party import_heading_syft=syft absolute -import_heading_grid=grid absolute +import_heading_server=server absolute import_heading_syftcli=syftcli absolute import_heading_firstparty=first party import_heading_localfolder=relative diff --git a/README.md b/README.md index 8c602a27f0f..b89130201c7 100644 --- a/README.md +++ b/README.md @@ -23,9 +23,9 @@ $ pip install -U syft[data_science] ```python # from Jupyter / Python import syft as sy -sy.requires(">=0.8.6,<0.8.7") -node = sy.orchestra.launch( - name="my-domain", +sy.requires(">=0.8.7,<0.8.8") +server = sy.orchestra.launch( + name="my-datasite", port=8080, create_producer=True, n_consumers=1, @@ -36,9 +36,9 @@ node = sy.orchestra.launch( ```bash # or from the command line -$ syft launch --name=my-domain --port=8080 --reset=True +$ syft launch --name=my-datasite --port=8080 --reset=True -Starting syft-node server on 0.0.0.0:8080 +Starting syft-datasite server on 0.0.0.0:8080 ``` ## Launch Client @@ -46,7 +46,7 @@ Starting syft-node server on 0.0.0.0:8080 ```python import syft as sy sy.requires(">=0.8.6,<0.8.7") -domain_client = sy.login( +datasite_client = sy.login( port=8080, email="info@openmined.org", password="changethis" @@ -64,7 +64,7 @@ domain_client = sy.login( - 04-pytorch-example.ipynb - 05-custom-policy.ipynb - 06-multiple-code-requests.ipynb -- 07-domain-register-control-flow.ipynb +- 07-datasite-register-control-flow.ipynb - 08-code-version.ipynb - 09-blob-storage.ipynb - 10-container-images.ipynb @@ -104,7 +104,7 @@ SYFT_VERSION="" #### 4. Provisioning Helm Charts ```sh -helm install my-domain openmined/syft --version $SYFT_VERSION --namespace syft --create-namespace --set ingress.className="traefik" +helm install my-datasite openmined/syft --version $SYFT_VERSION --namespace syft --create-namespace --set ingress.className="traefik" ``` ### Ingress Controllers @@ -141,16 +141,17 @@ helm install ... --set ingress.class="gce" # Install Notes - PySyft 0.8.6 Requires: 🐍 `python 3.10 - 3.12` - Run: `pip install -U syft` -- PyGrid Requires: 🐳 `docker` or ☸️ `kubernetes` +- Syft Server Requires: 🐳 `docker` or ☸️ `kubernetes` # Versions `0.9.0` - Coming soon... -`0.8.7` (Beta) - `dev` branch 👈🏽 API - Coming soon... -`0.8.6` (Stable) - API +`0.8.8` (Beta) - `dev` branch 👈🏽 API - Coming soon... +`0.8.7` (Stable) - API Deprecated: +- `0.8.6` - API - `0.8.5-post.2` - API - `0.8.4` - API - `0.8.3` - API @@ -162,7 +163,7 @@ Deprecated: - `0.5.1` - Course 2 + M1 Hotfix - `0.2.0` - `0.5.0` -PySyft and PyGrid use the same `version` and its best to match them up where possible. We release weekly betas which can be used in each context: +PySyft and Syft Server use the same `version` and its best to match them up where possible. We release weekly betas which can be used in each context: PySyft (Stable): `pip install -U syft` @@ -179,9 +180,9 @@ PySyft (Beta): `pip install -U syft --pre` ### Why should I use Syft? -`Syft` allows a `Data Scientist` to ask `questions` about a `dataset` and, within `privacy limits` set by the `data owner`, get `answers` to those `questions`, all without obtaining a `copy` of the data itself. We call this process `Remote Data Science`. It means in a wide variety of `domains` across society, the current `risks` of sharing information (`copying` data) with someone such as, privacy invasion, IP theft and blackmail will no longer prevent the vast `benefits` such as innovation, insights and scientific discovery which secure access will provide. +`Syft` allows a `Data Scientist` to ask `questions` about a `dataset` and, within `privacy limits` set by the `data owner`, get `answers` to those `questions`, all without obtaining a `copy` of the data itself. We call this process `Remote Data Science`. It means in a wide variety of `datasites` across society, the current `risks` of sharing information (`copying` data) with someone such as, privacy invasion, IP theft and blackmail will no longer prevent the vast `benefits` such as innovation, insights and scientific discovery which secure access will provide. -No more cold calls to get `access` to a dataset. No more weeks of `wait times` to get a `result` on your `query`. It also means `1000x more data` in every domain. PySyft opens the doors to a streamlined Data Scientist `workflow`, all with the individual's `privacy` at its heart. +No more cold calls to get `access` to a dataset. No more weeks of `wait times` to get a `result` on your `query`. It also means `1000x more data` in every datasite. PySyft opens the doors to a streamlined Data Scientist `workflow`, all with the individual's `privacy` at its heart. -Provides services to a group of `Data Owners` and `Data Scientists`, such as dataset `search` and bulk `project approval` (legal / technical) to participate in a project. A gateway server acts as a bridge between it's members (`Domains`) and their subscribers (`Data Scientists`) and can provide access to a collection of `domains` at once. +Provides services to a group of `Data Owners` and `Data Scientists`, such as dataset `search` and bulk `project approval` (legal / technical) to participate in a project. A gateway server acts as a bridge between it's members (`Datasites`) and their subscribers (`Data Scientists`) and can provide access to a collection of `datasites` at once. diff --git a/VERSION b/VERSION index df1dec5602a..8742f2ae357 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.13" +__version__ = "0.8.8-beta.1" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/docs/source/_static/install_tutorials/pygrid_ui.png b/docs/source/_static/install_tutorials/pygrid_ui.png deleted file mode 100644 index a31db3d6111..00000000000 Binary files a/docs/source/_static/install_tutorials/pygrid_ui.png and /dev/null differ diff --git a/docs/source/_static/personas-image/data-owner/00-deploy-domain-00.gif b/docs/source/_static/personas-image/data-owner/00-deploy-datasite-00.gif similarity index 100% rename from docs/source/_static/personas-image/data-owner/00-deploy-domain-00.gif rename to docs/source/_static/personas-image/data-owner/00-deploy-datasite-00.gif diff --git a/docs/source/_static/personas-image/data-owner/00-deploy-domain-01.jpg b/docs/source/_static/personas-image/data-owner/00-deploy-datasite-01.jpg similarity index 100% rename from docs/source/_static/personas-image/data-owner/00-deploy-domain-01.jpg rename to docs/source/_static/personas-image/data-owner/00-deploy-datasite-01.jpg diff --git a/docs/source/api_reference/index.rst b/docs/source/api_reference/index.rst index 7d7e85d02a8..d87a01b35e9 100644 --- a/docs/source/api_reference/index.rst +++ b/docs/source/api_reference/index.rst @@ -22,7 +22,7 @@ objects, functions and methods. syft.client syft.external - syft.node + syft.server syft.serde syft.service syft.store diff --git a/docs/source/api_reference/syft.client.api.rst b/docs/source/api_reference/syft.client.api.rst index 9e7889482c6..3022a1c5086 100644 --- a/docs/source/api_reference/syft.client.api.rst +++ b/docs/source/api_reference/syft.client.api.rst @@ -27,7 +27,7 @@ syft.client.api APIEndpoint APIModule APIRegistry - NodeView + ServerView SignedSyftAPICall SyftAPI SyftAPICall diff --git a/docs/source/api_reference/syft.client.connection.rst b/docs/source/api_reference/syft.client.connection.rst index 912c3b50596..139d9eee60e 100644 --- a/docs/source/api_reference/syft.client.connection.rst +++ b/docs/source/api_reference/syft.client.connection.rst @@ -17,7 +17,7 @@ syft.client.connection .. autosummary:: - NodeConnection + ServerConnection diff --git a/docs/source/api_reference/syft.client.registry.rst b/docs/source/api_reference/syft.client.registry.rst index 57f0136d312..9b2987bfb78 100644 --- a/docs/source/api_reference/syft.client.registry.rst +++ b/docs/source/api_reference/syft.client.registry.rst @@ -17,7 +17,7 @@ syft.client.registry .. autosummary:: - DomainRegistry + DatasiteRegistry NetworkRegistry diff --git a/docs/source/api_reference/syft.node.credentials.rst b/docs/source/api_reference/syft.node.credentials.rst index a1888491bcc..99e54dd153e 100644 --- a/docs/source/api_reference/syft.node.credentials.rst +++ b/docs/source/api_reference/syft.node.credentials.rst @@ -1,7 +1,7 @@ -syft.node.credentials +syft.server.credentials ===================== -.. automodule:: syft.node.credentials +.. automodule:: syft.server.credentials diff --git a/docs/source/api_reference/syft.node.domain.rst b/docs/source/api_reference/syft.node.datasite.rst similarity index 63% rename from docs/source/api_reference/syft.node.domain.rst rename to docs/source/api_reference/syft.node.datasite.rst index eb4ff6334a5..687eb004816 100644 --- a/docs/source/api_reference/syft.node.domain.rst +++ b/docs/source/api_reference/syft.node.datasite.rst @@ -1,7 +1,7 @@ -syft.node.domain +syft.server.datasite ================ -.. automodule:: syft.node.domain +.. automodule:: syft.server.datasite @@ -17,7 +17,7 @@ syft.node.domain .. autosummary:: - Domain + Datasite diff --git a/docs/source/api_reference/syft.node.gateway.rst b/docs/source/api_reference/syft.node.gateway.rst index 410841b9447..8b6d6335fd1 100644 --- a/docs/source/api_reference/syft.node.gateway.rst +++ b/docs/source/api_reference/syft.node.gateway.rst @@ -1,7 +1,7 @@ -syft.node.gateway +syft.server.gateway ================= -.. automodule:: syft.node.gateway +.. automodule:: syft.server.gateway diff --git a/docs/source/api_reference/syft.node.node.rst b/docs/source/api_reference/syft.node.node.rst index 1c71dd53574..e3f263f97d0 100644 --- a/docs/source/api_reference/syft.node.node.rst +++ b/docs/source/api_reference/syft.node.node.rst @@ -1,7 +1,7 @@ -syft.node.node +syft.server.server ============== -.. automodule:: syft.node.node +.. automodule:: syft.server.server @@ -18,7 +18,7 @@ syft.node.node get_default_root_email get_default_root_password get_env - get_node_uid_env + get_server_uid_env get_private_key_env gipc_decoder gipc_encoder @@ -35,7 +35,7 @@ syft.node.node .. autosummary:: - Node + Server diff --git a/docs/source/api_reference/syft.node.routes.rst b/docs/source/api_reference/syft.node.routes.rst index a5bce85bd46..926814c5b62 100644 --- a/docs/source/api_reference/syft.node.routes.rst +++ b/docs/source/api_reference/syft.node.routes.rst @@ -1,7 +1,7 @@ -syft.node.routes +syft.server.routes ================ -.. automodule:: syft.node.routes +.. automodule:: syft.server.routes diff --git a/docs/source/api_reference/syft.node.rst b/docs/source/api_reference/syft.node.rst index a94e20d906d..183e2e7f281 100644 --- a/docs/source/api_reference/syft.node.rst +++ b/docs/source/api_reference/syft.node.rst @@ -1,7 +1,7 @@ -syft.node +syft.server ========= -.. automodule:: syft.node +.. automodule:: syft.server @@ -27,13 +27,13 @@ :toctree: :recursive: - syft.node.credentials - syft.node.domain - syft.node.gateway - syft.node.node - syft.node.routes - syft.node.run - syft.node.server - syft.node.worker - syft.node.worker_settings + syft.server.credentials + syft.server.datasite + syft.server.gateway + syft.server.server + syft.server.routes + syft.server.run + syft.server.server + syft.server.worker + syft.server.worker_settings diff --git a/docs/source/api_reference/syft.node.run.rst b/docs/source/api_reference/syft.node.run.rst index 3f8cc943294..588a53322d9 100644 --- a/docs/source/api_reference/syft.node.run.rst +++ b/docs/source/api_reference/syft.node.run.rst @@ -1,7 +1,7 @@ -syft.node.run +syft.server.run ============= -.. automodule:: syft.node.run +.. automodule:: syft.server.run diff --git a/docs/source/api_reference/syft.node.server.rst b/docs/source/api_reference/syft.node.server.rst index efcc7d9642a..1484aee73f4 100644 --- a/docs/source/api_reference/syft.node.server.rst +++ b/docs/source/api_reference/syft.node.server.rst @@ -1,7 +1,7 @@ -syft.node.server +syft.server.server ================ -.. automodule:: syft.node.server +.. automodule:: syft.server.server @@ -17,7 +17,7 @@ syft.node.server kill_process make_app run_uvicorn - serve_node + serve_server diff --git a/docs/source/api_reference/syft.node.worker.rst b/docs/source/api_reference/syft.node.worker.rst index 510732d7c39..f2da3a94e81 100644 --- a/docs/source/api_reference/syft.node.worker.rst +++ b/docs/source/api_reference/syft.node.worker.rst @@ -1,7 +1,7 @@ -syft.node.worker +syft.server.worker ================ -.. automodule:: syft.node.worker +.. automodule:: syft.server.worker diff --git a/docs/source/api_reference/syft.node.worker_settings.rst b/docs/source/api_reference/syft.node.worker_settings.rst index 0b5521ddaef..a9880847ec0 100644 --- a/docs/source/api_reference/syft.node.worker_settings.rst +++ b/docs/source/api_reference/syft.node.worker_settings.rst @@ -1,7 +1,7 @@ -syft.node.worker\_settings +syft.server.worker\_settings ========================== -.. automodule:: syft.node.worker_settings +.. automodule:: syft.server.worker_settings diff --git a/docs/source/api_reference/syft.rst b/docs/source/api_reference/syft.rst index d10b471583a..0e930b08dce 100644 --- a/docs/source/api_reference/syft.rst +++ b/docs/source/api_reference/syft.rst @@ -15,7 +15,7 @@ Subpackages syft.capnp syft.client syft.external - syft.node + syft.server syft.serde syft.service syft.store @@ -25,10 +25,10 @@ Subpackages Submodules ---------- -syft.abstract\_node module +syft.abstract\_server module -------------------------- -.. automodule:: syft.abstract_node +.. automodule:: syft.abstract_server :members: :undoc-members: :show-inheritance: diff --git a/docs/source/api_reference/syft.service.action.action_object.rst b/docs/source/api_reference/syft.service.action.action_object.rst index 014e584d460..89424403841 100644 --- a/docs/source/api_reference/syft.service.action.action_object.rst +++ b/docs/source/api_reference/syft.service.action.action_object.rst @@ -18,7 +18,7 @@ syft.service.action.action\_object has_action_data_empty is_action_data_empty make_action_side_effect - propagate_node_uid + propagate_server_uid send_action_side_effect diff --git a/docs/source/api_reference/syft.service.context.rst b/docs/source/api_reference/syft.service.context.rst index 276c578330b..990de2487d1 100644 --- a/docs/source/api_reference/syft.service.context.rst +++ b/docs/source/api_reference/syft.service.context.rst @@ -19,7 +19,7 @@ syft.service.context AuthedServiceContext ChangeContext - NodeServiceContext + ServerServiceContext UnauthedServiceContext diff --git a/docs/source/api_reference/syft.service.policy.policy.rst b/docs/source/api_reference/syft.service.policy.policy.rst index 74b52aa14cb..4e6cb3820dc 100644 --- a/docs/source/api_reference/syft.service.policy.policy.rst +++ b/docs/source/api_reference/syft.service.policy.policy.rst @@ -29,7 +29,7 @@ syft.service.policy.policy init_policy load_policy_code new_getfile - partition_by_node + partition_by_server process_class_code retrieve_from_db submit_policy_code_to_user_code diff --git a/docs/source/api_reference/syft.types.rst b/docs/source/api_reference/syft.types.rst index 67c05d63fd2..646214ca364 100644 --- a/docs/source/api_reference/syft.types.rst +++ b/docs/source/api_reference/syft.types.rst @@ -29,7 +29,7 @@ syft.types.base syft.types.datetime - syft.types.grid_url + syft.types.server_url syft.types.syft_metaclass syft.types.syft_object syft.types.transforms diff --git a/docs/source/api_reference/syft.types.grid_url.rst b/docs/source/api_reference/syft.types.server_url.rst similarity index 62% rename from docs/source/api_reference/syft.types.grid_url.rst rename to docs/source/api_reference/syft.types.server_url.rst index 123f9f34af6..c9ad2ab0625 100644 --- a/docs/source/api_reference/syft.types.grid_url.rst +++ b/docs/source/api_reference/syft.types.server_url.rst @@ -1,7 +1,7 @@ -syft.types.grid\_url +syft.types.server\_url ==================== -.. automodule:: syft.types.grid_url +.. automodule:: syft.types.server_url @@ -17,7 +17,7 @@ syft.types.grid\_url .. autosummary:: - GridURL + ServerURL diff --git a/docs/source/api_reference/syft.types.transforms.rst b/docs/source/api_reference/syft.types.transforms.rst index 374052d54f7..ef575c10e7b 100644 --- a/docs/source/api_reference/syft.types.transforms.rst +++ b/docs/source/api_reference/syft.types.transforms.rst @@ -14,7 +14,7 @@ syft.types.transforms .. autosummary:: add_credentials_for_key - add_node_uid_for_key + add_server_uid_for_key convert_types drop generate_id diff --git a/docs/source/deployment/glossary.rst b/docs/source/deployment/glossary.rst index a257c3ca4f8..6e5b2bf95e0 100644 --- a/docs/source/deployment/glossary.rst +++ b/docs/source/deployment/glossary.rst @@ -12,20 +12,20 @@ General terms Data Consortium ~~~~~~~~~~~~~~~~~~~~~ -A legal agreement under which multiple data owners delegate legal authority (IRB authority) to a central party, such that a data scientist need only enter into legal contract with that central party in order to perform analysis across all relevant participating domains in the ``data consortium``. +A legal agreement under which multiple data owners delegate legal authority (IRB authority) to a central party, such that a data scientist need only enter into legal contract with that central party in order to perform analysis across all relevant participating datasites in the ``data consortium``. Differential Privacy ~~~~~~~~~~~~~~~~~~~~~ While the textbook definition can be found here_, within the context of remote data science, ``differential privacy`` is a set of algorithms which empower a data owner to limit the probability that a data scientist will be able to use their statistical results to reverse engineer the data owner's def. of too much information about the underlying data that generated those results. In a nutshell, its aim is to prevent a Data Scientist from identifying any individual from the dataset through reverse-engineering. -Domain Node +Datasite Server ~~~~~~~~~~~~~~~~~~~~~ A ``computer system`` (or collection of computer systems) which manages the remote study of a data owner's data by a data scientist. It is responsible for allowing the `Data Owner` to manage the data, as well as incoming ``requests`` from data scientists and for gatekeeping the data scientist's access to data, compute, and experimental results stored within the data owner's compute infrastructure. -Network Node +Gateway Server ~~~~~~~~~~~~~~~~~~~~~ -A server which exists outside of any data owner's institution, providing services to the network of data owners and data scientists such as dataset search and bulk project approval (simultaneous legal/technical approval to participate in a project across groups of domains and data scientists at a time). A Network acts as a bridge between between its members and subscribers. The members are ``Domains`` while subscribers are the ``end users`` (e.g. Data Scientist) who explore and perform analysis on the datasets hosted by the members. -A network is used to provide access to a collection of domains at once i.e. if a user agrees to a ``Network Agreement``, then they automatically agree to the conditions to the Domains enlisted in that Network. +A server which exists outside of any data owner's institution, providing services to the network of data owners and data scientists such as dataset search and bulk project approval (simultaneous legal/technical approval to participate in a project across groups of datasites and data scientists at a time). A Network acts as a bridge between between its members and subscribers. The members are ``Datasites`` while subscribers are the ``end users`` (e.g. Data Scientist) who explore and perform analysis on the datasets hosted by the members. +A network is used to provide access to a collection of datasites at once i.e. if a user agrees to a ``Network Agreement``, then they automatically agree to the conditions to the Datasites enlisted in that Network. Privacy Budget ~~~~~~~~~~~~~~~~~~~~~ @@ -39,9 +39,9 @@ PySyft ~~~~~~~~~~~~~~~~~~~~~ An open-source platform that enables remote data science experiments by combining ``federated learning`` and ``differentialy privacy`` techniques. -PyGrid +Syft Server ~~~~~~~~~~~~~~~~~~~~~ -``PyGrid`` is a ``peer-to-peer network`` of data owners and data scientists who can collectively train AI models using ``PySyft``. ``PyGrid`` is also the central server for conducting both model-centric and data-centric ``federated learning``. You may control PyGrid via our user-interface, ``PyGrid Admin``. +``Syft Server`` is also the central server for conducting both model-centric and data-centric ``federated learning``. You may control Syft Server via our user-interface, ``Syft UI``. Remote Data Science ~~~~~~~~~~~~~~~~~~~~~ @@ -54,25 +54,25 @@ Data Owner ~~~~~~~~~~~~~~~~~~~~~ Within the field of remote data science, a data owner is someone who has a (digital) dataset which they would like to make available for study by an outside party whom they may or may not fully trust to have good intentions. -Domain Owner +Datasite Owner ~~~~~~~~~~~~~~~~~~~~~ -A user of ``PyGrid`` who has deployed a domain node. +A user of ``Syft Server`` who has deployed a datasite server. Network Owner ~~~~~~~~~~~~~~~~~~~~~ -Within the field of remote data science, a network owner provides technical and legal services helping to connect data scientists with data owners (domains) by helping them find each other (dataset search) and by helping them enter into bulk legal agreements through the hosting of a network-level data consortium to which such data owners and data scientist may apply. +Within the field of remote data science, a network owner provides technical and legal services helping to connect data scientists with data owners (datasites) by helping them find each other (dataset search) and by helping them enter into bulk legal agreements through the hosting of a network-level data consortium to which such data owners and data scientist may apply. Data Scientist ~~~~~~~~~~~~~~~~~~~~~ -Within the context of remote data science, a data scientist is a persona which desires to answer a specific question using data owned by someone else. This user is required to sign a ``Data Access Agreement`` if you have required one in the ``Domain Settings Configurations``. +Within the context of remote data science, a data scientist is a persona which desires to answer a specific question using data owned by someone else. This user is required to sign a ``Data Access Agreement`` if you have required one in the ``Datasite Settings Configurations``. -Domain Compliance Officer +Datasite Compliance Officer ~~~~~~~~~~~~~~~~~~~~~~~~~~~ All the personas in an institution that are in charge of making sure that the utilization of data at an institution occurs within legal boundaries and under their supervision and with their liability/responsibility. Network Compliance Officer ~~~~~~~~~~~~~~~~~~~~~~~~~~~ -All the personas in an institution that are in charge of making sure that the access and utilization of data between their network's domains and members (data scientists) fall within the bounds outlined in the network's legal agreements. +All the personas in an institution that are in charge of making sure that the access and utilization of data between their network's datasites and members (data scientists) fall within the bounds outlined in the network's legal agreements. User roles ============ @@ -82,7 +82,7 @@ Default roles Data Scientist """""""""""""" -This role is for users who will be performing computations on your datasets. They may be users you know directly or those who found your domain through search and discovery. By default this user can see a ``list of your datasets`` and can request to get results. This user will also be required to sign a ``Data Access Agreement`` if you have required one in the ``Domain Settings Configurations``. +This role is for users who will be performing computations on your datasets. They may be users you know directly or those who found your datasite through search and discovery. By default this user can see a ``list of your datasets`` and can request to get results. This user will also be required to sign a ``Data Access Agreement`` if you have required one in the ``Datasite Settings Configurations``. Default permissions: @@ -90,7 +90,7 @@ Default permissions: Compliance Officer """""""""""""""""""" -This role is for users who will help you manage requests made on your node. They should be users you trust. They are not able to change ``Domain Settings`` or edit roles but they are by default able to accept or deny ``user requests`` on behalf of the ``domain node``. +This role is for users who will help you manage requests made on your server. They should be users you trust. They are not able to change ``Datasite Settings`` or edit roles but they are by default able to accept or deny ``user requests`` on behalf of the ``datasite server``. Default permissions: @@ -100,7 +100,7 @@ Default permissions: Admin """""" -This role is for users who will help you manage your node. This should be users you trust. The main difference between this ``user`` and a ``Compliance Officer`` is that this user by default not only can manage requests but can also edit ``Domain Settings.`` This is the highest level permission outside of an Owner. +This role is for users who will help you manage your server. This should be users you trust. The main difference between this ``user`` and a ``Compliance Officer`` is that this user by default not only can manage requests but can also edit ``Datasite Settings.`` This is the highest level permission outside of an Owner. Default permissions: @@ -108,7 +108,7 @@ Default permissions: Owner """""""" -There is only one Owner account assigned to any one domain node. The owner account is the highest level permission and is a requirement for deploying a domain node. If you should ever want to transfer ownership of your domain node to someone, please contact us at support@openmined.org. +There is only one Owner account assigned to any one datasite server. The owner account is the highest level permission and is a requirement for deploying a datasite server. If you should ever want to transfer ownership of your datasite server to someone, please contact us at support@openmined.org. Default permissions: @@ -116,16 +116,16 @@ Default permissions: * Cannot disable permissions by default -Domain membership roles +Datasite membership roles ~~~~~~~~~~~~~~~~~~~~~~~~~~ Guest """""""""""""" -The lowest level of ``network membership``, a guest domain is listed within a network node's registry and its datasets are searchable/discoverable by all users of the network, but the network has no legal relationship to the domain nor any authority to grant data scientists access to its data. As such, upon discovering a domain on the network, such a data scientist must apply directly to the domain for access by creating an account on such a domain and signing a legal agreement (a "data-sharing agreement") directly with its corresponding data owner. +The lowest level of ``network membership``, a guest datasite is listed within a network server's registry and its datasets are searchable/discoverable by all users of the network, but the network has no legal relationship to the datasite nor any authority to grant data scientists access to its data. As such, upon discovering a datasite on the network, such a data scientist must apply directly to the datasite for access by creating an account on such a datasite and signing a legal agreement (a "data-sharing agreement") directly with its corresponding data owner. Member """""""""""""" -The highest level of ``network membership``, a full domain member is greater than a guest member because, beyond its listing within a network node's registry, the domain has entered into a legal relationship with the network owner such that the network owner can unilaterally give its full data scientists access to data hosted by the domain. Note that this does not mean that the network can control access to all potential users of the ``registered domain``, because the domain's membership in the network is non-exclusive (domains can register in multiple networks and also accept direct data-scientist users on the side). A network node only has authority to give its own full data scientists access to any full domain within its registry. +The highest level of ``network membership``, a full datasite member is greater than a guest member because, beyond its listing within a network server's registry, the datasite has entered into a legal relationship with the network owner such that the network owner can unilaterally give its full data scientists access to data hosted by the datasite. Note that this does not mean that the network can control access to all potential users of the ``registered datasite``, because the datasite's membership in the network is non-exclusive (datasites can register in multiple networks and also accept direct data-scientist users on the side). A network server only has authority to give its own full data scientists access to any full datasite within its registry. .. |image0| image:: ../_static/deployment/image2.png :width: 95% diff --git a/docs/source/getting_started/index.rst b/docs/source/getting_started/index.rst index 4633130e472..e2264459c00 100644 --- a/docs/source/getting_started/index.rst +++ b/docs/source/getting_started/index.rst @@ -7,9 +7,9 @@ Getting Started .. toctree:: :maxdepth: 3 -Welcome to the domain deployment installation tutorials! +Welcome to the datasite deployment installation tutorials! This section of our documentation is designed to be the -simplest way to get you started deploying your data to a domain node +simplest way to get you started deploying your data to a datasite server on an OSX, Linux, or Windows machine and interacting with it as a data scientist using PySyft. @@ -20,7 +20,7 @@ as a data scientist using PySyft. `advanced deployment documentation `__. The purpose of these tutorials is to help you install everything -you need to run a Domain node from your personal machine (such +you need to run a Datasite server from your personal machine (such as if you're running through OpenMined `courses `__ or @@ -30,7 +30,7 @@ notebooks with PySyft installed, such as if you're pretending to be both Data Owner and Data Scientist as a part of a tutorial or course. -**We will be setting up the following dependencies before PySyft and PyGrid:** +**We will be setting up the following dependencies before PySyft and Syft Server:** * Python >=3.9 * pip diff --git a/docs/source/guides/index.rst b/docs/source/guides/index.rst index ff7da37d0b1..7f60ef9e966 100644 --- a/docs/source/guides/index.rst +++ b/docs/source/guides/index.rst @@ -34,12 +34,12 @@ an ``outside party`` they may or may not ``fully trust`` has good intentions. You Will Learn ⬇️ """""""""""""""""""" -| :doc:`Part 1: Deploying your own Domain Server ` -| :doc:`Part 2: Uploading Private Data to a Domain Server ` -| :doc:`Part 3: Creating User Accounts on your Domain Server ` +| :doc:`Part 1: Deploying your own Datasite Server ` +| :doc:`Part 2: Uploading Private Data to a Datasite Server ` +| :doc:`Part 3: Creating User Accounts on your Datasite Server ` | :doc:`Part 4: Joining a Network ` | :doc:`Part 5: Creating a Network <04-create-network>` -| :doc:`Part 6: Configuring Privacy Budget on your Domain Server <05-configure-pb>` +| :doc:`Part 6: Configuring Privacy Budget on your Datasite Server <05-configure-pb>` B. Getting Started with Data Scientist 👩🏽‍🔬 @@ -50,9 +50,9 @@ specific ``question`` using one or more data owners' ``datasets``. You Will Learn ⬇️ """""""""""""""""""" -| :doc:`Part 7: Connect to a Domain` -| :doc:`Part 8: Searching for Datasets on the Domain` -| :doc:`Part 9: Exploring a Dataset in the Domain` +| :doc:`Part 7: Connect to a Datasite` +| :doc:`Part 8: Searching for Datasets on the Datasite` +| :doc:`Part 9: Exploring a Dataset in the Datasite` | :doc:`Part 10: Training a Model` | :doc:`Part 11: Retrieving Secure Results <>` diff --git a/docs/source/index.rst b/docs/source/index.rst index 4f2244cd612..3386ad515cd 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -75,7 +75,7 @@ use and development of PySyft! syft.client syft.external - syft.node + syft.server syft.serde syft.service syft.store diff --git a/notebooks/admin/Custom API + Custom Worker.ipynb b/notebooks/admin/Custom API + Custom Worker.ipynb deleted file mode 100644 index d50c1f1b4f2..00000000000 --- a/notebooks/admin/Custom API + Custom Worker.ipynb +++ /dev/null @@ -1,523 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "## Custom API + Custom Worker" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "#### Import dependencies" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "import syft as sy\n", - "from syft.service.settings.settings import NodeSettingsUpdate\n", - "from syft.service.worker.worker_image import SyftWorkerImage" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "## remote mode\n", - "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", - "# os.environ[\"DEV_MODE\"] = \"True\"\n", - "domain_client = sy.login(\n", - " email=\"info@openmined.org\",\n", - " url=\"http://127.0.0.1\",\n", - " password=\"\",\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# # python mode\n", - "# # !uv pip install google-cloud-bigquery db_dtypes\n", - "# node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True, reset=True)\n", - "# domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "domain_client.worker_pools" - ] - }, - { - "cell_type": "markdown", - "id": "6", - "metadata": {}, - "source": [ - "## Register a custom Image" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "registry = \"us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us\"\n", - "backend_version = None\n", - "assert backend_version is not None\n", - "\n", - "custom_dockerfile_str = f\"\"\"\n", - "FROM {registry}/openmined/grid-backend:{backend_version}\n", - "\n", - "RUN uv pip install google-cloud-bigquery[all]==3.20.1 db-dtypes==1.2.0\n", - "\n", - "\"\"\".strip()\n", - "print(custom_dockerfile_str)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": {}, - "outputs": [], - "source": [ - "docker_config = sy.DockerWorkerConfig(dockerfile=custom_dockerfile_str)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "submit_result = domain_client.api.services.worker_image.submit(\n", - " worker_config=docker_config\n", - ")\n", - "submit_result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "dockerfile_list = domain_client.images.get_all()\n", - "dockerfile_list" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "workerimage = next(\n", - " (\n", - " image\n", - " for image in dockerfile_list\n", - " if not image.is_prebuilt and image.config.dockerfile == custom_dockerfile_str\n", - " ),\n", - " None,\n", - ")\n", - "\n", - "assert isinstance(workerimage, SyftWorkerImage), str(workerimage)\n", - "workerimage" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "external_registry = registry\n", - "worker_docker_tag = \"openmined/bigquery:0.0.1\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "registry_add_result = domain_client.api.services.image_registry.add(external_registry)\n", - "registry_add_result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "registries = domain_client.api.services.image_registry.get_all()\n", - "registry_uid = next((r.id for r in registries if r.url == external_registry), None)\n", - "registry_uid" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# build with registry_uid\n", - "docker_build_result = domain_client.api.services.worker_image.build(\n", - " image_uid=workerimage.id,\n", - " tag=worker_docker_tag,\n", - " registry_uid=registry_uid,\n", - ")\n", - "print(docker_build_result)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "image_list = domain_client.images.get_all()\n", - "# we can also index with string using the repo_with_tag format\n", - "workerimage = next((image for image in image_list if image.id == workerimage.id), None)\n", - "assert workerimage.is_built is True\n", - "workerimage" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "push_result = domain_client.api.services.worker_image.push(workerimage.id)\n", - "push_result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "worker_pool_name = \"bigquery-pool\"\n", - "domain_client.api.services.worker_pool.launch(\n", - " pool_name=worker_pool_name,\n", - " image_uid=workerimage.id,\n", - " num_workers=1,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "domain_client.worker_pools[1]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "new_default_worker_pool = NodeSettingsUpdate(default_worker_pool=worker_pool_name)\n", - "domain_client.settings.update(settings=new_default_worker_pool)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "SERVICE_ACCOUNT = {}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "# debug manually\n", - "# from google.oauth2 import service_account\n", - "# from google.cloud import bigquery\n", - "# credentials = service_account.Credentials.from_service_account_info(SERVICE_ACCOUNT)\n", - "# scoped_credentials = credentials.with_scopes(['https://www.googleapis.com/auth/bigquery'])\n", - "# scoped_credentials = credentials.with_scopes(['https://www.googleapis.com/auth/cloud-platform'])\n", - "\n", - "# client = bigquery.Client(\n", - "# credentials=scoped_credentials,\n", - "# location=\"us-west1\",\n", - "# )\n", - "# sql=\"SELECT * FROM reddit-testing-415005.test_1gb.accounts limit 10\"\n", - "# rows = client.query_and_wait(\n", - "# sql\n", - "# )\n", - "# g = sy.ActionObject.from_obj(rows)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.api_endpoint_method(\n", - " settings={\"credentials\": SERVICE_ACCOUNT, \"project_id\": \"reddit-testing-415005\"}\n", - ")\n", - "def public_function(context, sql: str) -> str:\n", - " # third party\n", - " from google.cloud import bigquery\n", - " from google.oauth2 import service_account\n", - "\n", - " credentials = service_account.Credentials.from_service_account_info(\n", - " context.settings[\"credentials\"]\n", - " )\n", - " scoped_credentials = credentials.with_scopes(\n", - " [\"https://www.googleapis.com/auth/bigquery\"]\n", - " )\n", - " scoped_credentials = credentials.with_scopes(\n", - " [\"https://www.googleapis.com/auth/cloud-platform\"]\n", - " )\n", - "\n", - " client = bigquery.Client(\n", - " credentials=scoped_credentials,\n", - " location=\"us-west1\",\n", - " )\n", - "\n", - " rows = client.query_and_wait(\n", - " sql,\n", - " project=context.settings[\"project_id\"],\n", - " )\n", - "\n", - " return rows\n", - "\n", - "\n", - "@sy.api_endpoint_method(\n", - " settings={\"credentials\": SERVICE_ACCOUNT, \"project_id\": \"reddit-testing-415005\"}\n", - ")\n", - "def private_function(context, sql: str) -> str:\n", - " # third party\n", - " from google.cloud import bigquery\n", - " from google.oauth2 import service_account\n", - "\n", - " credentials = service_account.Credentials.from_service_account_info(\n", - " context.settings[\"credentials\"]\n", - " )\n", - " scoped_credentials = credentials.with_scopes(\n", - " [\"https://www.googleapis.com/auth/bigquery\"]\n", - " )\n", - " scoped_credentials = credentials.with_scopes(\n", - " [\"https://www.googleapis.com/auth/cloud-platform\"]\n", - " )\n", - "\n", - " client = bigquery.Client(\n", - " credentials=scoped_credentials,\n", - " location=\"us-west1\",\n", - " )\n", - "\n", - " rows = client.query_and_wait(\n", - " sql,\n", - " project=context.settings[\"project_id\"],\n", - " )\n", - "\n", - " return rows" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "new_endpoint = sy.TwinAPIEndpoint(\n", - " path=\"bigquery.query\",\n", - " mock_function=public_function,\n", - " private_function=private_function,\n", - " description=\"Lore ipsulum ...\",\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "response = domain_client.api.services.api.delete(endpoint_path=\"bigquery.query\")\n", - "response" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "response = domain_client.api.services.api.add(endpoint=new_endpoint)\n", - "response" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "domain_client.refresh()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.syft_function_single_use(\n", - " endpoint=domain_client.api.services.bigquery.query,\n", - " worker_pool_name=worker_pool_name,\n", - ")\n", - "def job_function(endpoint):\n", - " result = endpoint(\n", - " sql=\"SELECT * FROM reddit-testing-415005.test_1gb.accounts limit 10\"\n", - " )\n", - " return result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "new_project = sy.Project(\n", - " name=\"My Cool UN Project\",\n", - " description=\"Hi, I want to calculate the trade volume in million's with my cool code.\",\n", - " members=[domain_client],\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "result = new_project.create_code_request(job_function, domain_client)\n", - "domain_client.requests[-1].approve()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [ - "domain_client.settings.get().default_worker_pool" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32", - "metadata": {}, - "outputs": [], - "source": [ - "job = domain_client.code.job_function(\n", - " endpoint=domain_client.api.services.bigquery.query, blocking=False\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "job" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34", - "metadata": {}, - "outputs": [], - "source": [ - "domain_client.jobs" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/api/0.8/00-load-data.ipynb b/notebooks/api/0.8/00-load-data.ipynb index ad98a5ac361..5370bd2e595 100644 --- a/notebooks/api/0.8/00-load-data.ipynb +++ b/notebooks/api/0.8/00-load-data.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Loading data into Syft Domain Server as a Data Owner\n", + "# Loading data into Syft Datasite Server as a Data Owner\n", "\n", "Welcome to Syft! This tutorial consists of 4 Jupyter notebooks that covers the basics of Syft which includes\n", "* [Uploading a private dataset as a Data Owner](./00-load-data.ipynb)\n", @@ -55,7 +55,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Launch a Syft Domain Server" + "### Launch a Syft Datasite Server" ] }, { @@ -66,8 +66,10 @@ }, "outputs": [], "source": [ - "# Launch a fresh domain server named \"test-domain-1\" in dev mode on the local machine\n", - "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True, reset=True)" + "# Launch a fresh datasite server named \"test-datasite-1\" in dev mode on the local machine\n", + "server = sy.orchestra.launch(\n", + " name=\"test-datasite-1\", port=\"auto\", dev_mode=True, reset=True\n", + ")" ] }, { @@ -78,8 +80,17 @@ }, "outputs": [], "source": [ - "# log into the node with default root credentials\n", - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "# log into the server with default root credentials\n", + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "datasite_client" ] }, { @@ -91,7 +102,7 @@ "outputs": [], "source": [ "# List the available API\n", - "domain_client.api" + "datasite_client.api" ] }, { @@ -114,7 +125,7 @@ "outputs": [], "source": [ "# Check for existing Data Subjects\n", - "data_subjects = domain_client.data_subject_registry.get_all()" + "data_subjects = datasite_client.data_subject_registry.get_all()" ] }, { @@ -206,7 +217,7 @@ "outputs": [], "source": [ "# Adds the data subject and all its members to the registry\n", - "response = domain_client.data_subject_registry.add_data_subject(country)\n", + "response = datasite_client.data_subject_registry.add_data_subject(country)\n", "response" ] }, @@ -230,7 +241,7 @@ "outputs": [], "source": [ "# Lets look at the data subjects added to the data\n", - "data_subjects = domain_client.data_subject_registry.get_all()\n", + "data_subjects = datasite_client.data_subject_registry.get_all()\n", "data_subjects" ] }, @@ -375,7 +386,7 @@ "dataset.add_contributor(\n", " name=\"Andrew Trask\",\n", " email=\"andrew@openmined.org\",\n", - " note=\"Andrew runs this domain and prepared the dataset metadata.\",\n", + " note=\"Andrew runs this datasite and prepared the dataset metadata.\",\n", ")\n", "\n", "dataset.add_contributor(\n", @@ -439,7 +450,7 @@ "ctf.add_contributor(\n", " name=\"Andrew Trask\",\n", " email=\"andrew@openmined.org\",\n", - " note=\"Andrew runs this domain and prepared the asset.\",\n", + " note=\"Andrew runs this datasite and prepared the asset.\",\n", ")" ] }, @@ -539,7 +550,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Upload Syft Dataset to Domain Server" + "### Upload Syft Dataset to Datasite Server" ] }, { @@ -550,7 +561,7 @@ }, "outputs": [], "source": [ - "upload_res = domain_client.upload_dataset(dataset)\n", + "upload_res = datasite_client.upload_dataset(dataset)\n", "upload_res" ] }, @@ -569,8 +580,8 @@ "metadata": {}, "outputs": [], "source": [ - "# We can list all the datasets on the Domain Server by invoking the following\n", - "datasets = domain_client.datasets.get_all()\n", + "# We can list all the datasets on the Datasite Server by invoking the following\n", + "datasets = datasite_client.datasets.get_all()\n", "datasets" ] }, @@ -604,7 +615,7 @@ } }, "source": [ - "### Reading the Syft Dataset from Domain Server\n", + "### Reading the Syft Dataset from Datasite Server\n", "\n", "Following the logical hierarchy of `Dataset`, `Asset`, and its variant, we can read the data as follows" ] @@ -618,7 +629,7 @@ "outputs": [], "source": [ "# Reading the mock dataset\n", - "mock = domain_client.datasets[0].assets[0].mock" + "mock = datasite_client.datasets[0].assets[0].mock" ] }, { @@ -642,7 +653,7 @@ "source": [ "# Reading the real dataset\n", "# NOTE: Private data can be accessed by the Data Owners, but NOT the Data Scientists\n", - "real = domain_client.datasets[0].assets[0].data" + "real = datasite_client.datasets[0].assets[0].data" ] }, { @@ -660,12 +671,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Create a new Data Scientist account on the Domain Server\n", + "### Create a new Data Scientist account on the Datasite Server\n", "\n", "Signup is disabled by default.\n", - "An Admin/DO can enable it by `domain_client.settings.allow_guest_signup(enable=True)`\n", + "An Admin/DO can enable it by `datasite_client.settings.allow_guest_signup(enable=True)`\n", "\n", - "Refer to notebook [07-domain-register-control-flow](./07-domain-register-control-flow.ipynb) for more information." + "Refer to notebook [07-datasite-register-control-flow](./07-datasite-register-control-flow.ipynb) for more information." ] }, { @@ -674,7 +685,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.register(\n", + "datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -692,9 +703,9 @@ }, "outputs": [], "source": [ - "# Cleanup local domain server\n", - "if node.node_type.value == \"python\":\n", - " node.land()" + "# Cleanup local datasite server\n", + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/01-submit-code.ipynb b/notebooks/api/0.8/01-submit-code.ipynb index 3d360c88b18..74bb381b81d 100644 --- a/notebooks/api/0.8/01-submit-code.ipynb +++ b/notebooks/api/0.8/01-submit-code.ipynb @@ -40,7 +40,7 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "from syft.client.api import NodeIdentity\n", + "from syft.client.api import ServerIdentity\n", "from syft.service.request.request import RequestStatus\n", "\n", "sy.requires(SYFT_VERSION)" @@ -50,7 +50,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Launch a Syft Domain Server" + "### Launch a Syft Datasite Server" ] }, { @@ -61,15 +61,15 @@ }, "outputs": [], "source": [ - "# Launch and connect to test-domain-1 server we setup in the previous notebook\n", - "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True)" + "# Launch and connect to test-datasite-1 server we setup in the previous notebook\n", + "server = sy.orchestra.launch(name=\"test-datasite-1\", port=\"auto\", dev_mode=True)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Every `node` exposes a \"guest\" client that allows some basic read operations on the node without creating an account." + "Every `server` exposes a \"guest\" client that allows some basic read operations on the server without creating an account." ] }, { @@ -80,7 +80,7 @@ }, "outputs": [], "source": [ - "guest_domain_client = node.client" + "guest_datasite_client = server.client" ] }, { @@ -92,7 +92,7 @@ "outputs": [], "source": [ "# Print this to see the few commands that are available for the guest client\n", - "guest_domain_client" + "guest_datasite_client" ] }, { @@ -102,14 +102,14 @@ "outputs": [], "source": [ "# This will return the public credentials of the guest client\n", - "guest_credentials = guest_domain_client.credentials" + "guest_credentials = guest_datasite_client.credentials" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Login into the Domain with Data Scientist credentials that we created in [00-load-data.ipynb](./00-load-data.ipynb) notebook" + "Login into the Datasite with Data Scientist credentials that we created in [00-load-data.ipynb](./00-load-data.ipynb) notebook" ] }, { @@ -120,7 +120,7 @@ }, "outputs": [], "source": [ - "jane_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")\n", + "jane_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")\n", "jane_client" ] }, @@ -139,7 +139,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Explore available Syft Datasets in the Domain Node" + "### Explore available Syft Datasets in the Datasite Server" ] }, { @@ -338,7 +338,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "You can validate your code against the mock data, before submitting it to the Domain Server" + "You can validate your code against the mock data, before submitting it to the Datasite Server" ] }, { @@ -385,11 +385,11 @@ "source": [ "# Tests\n", "assert len(sum_trade_value_mil.kwargs) == 1\n", - "node_identity = NodeIdentity.from_api(jane_client.api)\n", - "assert node_identity in sum_trade_value_mil.kwargs\n", - "assert \"trade_data\" in sum_trade_value_mil.kwargs[node_identity]\n", + "server_identity = ServerIdentity.from_api(jane_client.api)\n", + "assert server_identity in sum_trade_value_mil.kwargs\n", + "assert \"trade_data\" in sum_trade_value_mil.kwargs[server_identity]\n", "assert (\n", - " sum_trade_value_mil.input_policy_init_kwargs[node_identity][\"trade_data\"]\n", + " sum_trade_value_mil.input_policy_init_kwargs[server_identity][\"trade_data\"]\n", " == asset.action_id\n", ")" ] @@ -409,7 +409,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Submit your code to the Domain Server\n", + "### Submit your code to the Datasite Server\n", "\n", "We start by creating new Syft Project" ] @@ -481,7 +481,7 @@ }, "outputs": [], "source": [ - "# Once we start the project, it will submit the project along with the code request to the Domain Server\n", + "# Once we start the project, it will submit the project along with the code request to the Datasite Server\n", "project = new_project.send()\n", "project" ] @@ -569,10 +569,10 @@ }, "outputs": [], "source": [ - "# Cleanup local domain server\n", + "# Cleanup local datasite server\n", "\n", - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/02-review-code-and-approve.ipynb b/notebooks/api/0.8/02-review-code-and-approve.ipynb index cd84910030b..0d1f81dc9ef 100644 --- a/notebooks/api/0.8/02-review-code-and-approve.ipynb +++ b/notebooks/api/0.8/02-review-code-and-approve.ipynb @@ -45,7 +45,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Login to Syft Domain Server" + "### Login to Syft Datasite Server" ] }, { @@ -56,8 +56,8 @@ }, "outputs": [], "source": [ - "# Launch and connect to test-domain-1 server we setup in the previous notebook\n", - "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True)" + "# Launch and connect to test-datasite-1 server we setup in the previous notebook\n", + "server = sy.orchestra.launch(name=\"test-datasite-1\", port=\"auto\", dev_mode=True)" ] }, { @@ -68,17 +68,17 @@ }, "outputs": [], "source": [ - "# Log into the node with default root credentials\n", - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "# Log into the server with default root credentials\n", + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Selecting Project in the Syft Domain Server\n", + "### Selecting Project in the Syft Datasite Server\n", "\n", - "Let's see all the projects that are created by Data Scientists in this Domain Server" + "Let's see all the projects that are created by Data Scientists in this Datasite Server" ] }, { @@ -89,7 +89,7 @@ }, "outputs": [], "source": [ - "domain_client.projects" + "datasite_client.projects" ] }, { @@ -101,7 +101,7 @@ "outputs": [], "source": [ "# Select the project you want to work with\n", - "project = domain_client.projects[0]\n", + "project = datasite_client.projects[0]\n", "project" ] }, @@ -471,10 +471,10 @@ }, "outputs": [], "source": [ - "# Cleanup local domain server\n", + "# Cleanup local datasite server\n", "\n", - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/03-data-scientist-download-result.ipynb b/notebooks/api/0.8/03-data-scientist-download-result.ipynb index 81ce4f783fc..47b6cfe001a 100644 --- a/notebooks/api/0.8/03-data-scientist-download-result.ipynb +++ b/notebooks/api/0.8/03-data-scientist-download-result.ipynb @@ -43,7 +43,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Login to Syft Domain Server" + "### Login to Syft Datasite Server" ] }, { @@ -54,7 +54,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"test-domain-1\", dev_mode=True)" + "server = sy.orchestra.launch(name=\"test-datasite-1\", dev_mode=True)" ] }, { @@ -65,7 +65,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "datasite_client = server.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -86,7 +86,7 @@ "outputs": [], "source": [ "# Get the canada_trade_flow asset from the Canada Trade dataset\n", - "asset = domain_client.datasets[0].assets[0]\n", + "asset = datasite_client.datasets[0].assets[0]\n", "asset" ] }, @@ -103,7 +103,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.code.sum_trade_value_mil" + "datasite_client.code.sum_trade_value_mil" ] }, { @@ -121,7 +121,7 @@ }, "outputs": [], "source": [ - "result_pointer = domain_client.code.sum_trade_value_mil(trade_data=asset)\n", + "result_pointer = datasite_client.code.sum_trade_value_mil(trade_data=asset)\n", "result_pointer" ] }, @@ -161,7 +161,7 @@ }, "outputs": [], "source": [ - "ops = domain_client.code[-1].output_policy\n", + "ops = datasite_client.code[-1].output_policy\n", "ops" ] }, @@ -171,7 +171,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.code" + "datasite_client.code" ] }, { @@ -201,10 +201,10 @@ }, "outputs": [], "source": [ - "# Cleanup local domain\n", + "# Cleanup local datasite\n", "\n", - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/04-pytorch-example.ipynb b/notebooks/api/0.8/04-pytorch-example.ipynb index 5cbc89ecaea..dcfc34acbc2 100644 --- a/notebooks/api/0.8/04-pytorch-example.ipynb +++ b/notebooks/api/0.8/04-pytorch-example.ipynb @@ -43,7 +43,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"test-domain-1\", dev_mode=True, reset=True)" + "server = sy.orchestra.launch(name=\"test-datasite-1\", dev_mode=True, reset=True)" ] }, { @@ -55,7 +55,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -128,8 +128,8 @@ }, "outputs": [], "source": [ - "train_domain_obj = train.send(domain_client)\n", - "type(train_domain_obj)" + "train_datasite_obj = train.send(datasite_client)\n", + "type(train_datasite_obj)" ] }, { @@ -139,7 +139,7 @@ "metadata": {}, "outputs": [], "source": [ - "train_domain_obj" + "train_datasite_obj" ] }, { @@ -149,7 +149,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert torch.round(train_domain_obj.syft_action_data.sum()) == 1557" + "assert torch.round(train_datasite_obj.syft_action_data.sum()) == 1557" ] }, { @@ -233,7 +233,7 @@ }, "outputs": [], "source": [ - "weight_domain_obj = w.send(domain_client)" + "weight_datasite_obj = w.send(datasite_client)" ] }, { @@ -246,7 +246,9 @@ "outputs": [], "source": [ "@sy.syft_function(\n", - " input_policy=sy.ExactMatch(weights=weight_domain_obj.id, data=train_domain_obj.id),\n", + " input_policy=sy.ExactMatch(\n", + " weights=weight_datasite_obj.id, data=train_datasite_obj.id\n", + " ),\n", " output_policy=sy.SingleExecutionExactOutput(),\n", ")\n", "def train_mlp(weights, data):\n", @@ -292,7 +294,7 @@ }, "outputs": [], "source": [ - "pointer = train_mlp(weights=weight_domain_obj, data=train_domain_obj)\n", + "pointer = train_mlp(weights=weight_datasite_obj, data=train_datasite_obj)\n", "output = pointer.get()" ] }, @@ -315,7 +317,7 @@ }, "outputs": [], "source": [ - "request = domain_client.code.request_code_execution(train_mlp)\n", + "request = datasite_client.code.request_code_execution(train_mlp)\n", "request" ] }, @@ -340,8 +342,8 @@ }, "outputs": [], "source": [ - "domain_client._api = None\n", - "_ = domain_client.api" + "datasite_client._api = None\n", + "_ = datasite_client.api" ] }, { @@ -353,7 +355,7 @@ }, "outputs": [], "source": [ - "result_ptr = domain_client.code.train_mlp(weights=w.id, data=train.id)" + "result_ptr = datasite_client.code.train_mlp(weights=w.id, data=train.id)" ] }, { @@ -387,8 +389,8 @@ }, "outputs": [], "source": [ - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/05-custom-policy.ipynb b/notebooks/api/0.8/05-custom-policy.ipynb index fee03727122..69b25ed410b 100644 --- a/notebooks/api/0.8/05-custom-policy.ipynb +++ b/notebooks/api/0.8/05-custom-policy.ipynb @@ -41,7 +41,9 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True, reset=True)" + "server = sy.orchestra.launch(\n", + " name=\"test-datasite-1\", port=\"auto\", dev_mode=True, reset=True\n", + ")" ] }, { @@ -53,7 +55,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -63,7 +65,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.register(\n", + "datasite_client.register(\n", " email=\"newuser@openmined.org\", name=\"John Doe\", password=\"pw\", password_verify=\"pw\"\n", ")" ] @@ -75,7 +77,7 @@ "metadata": {}, "outputs": [], "source": [ - "client_low_ds = node.login(email=\"newuser@openmined.org\", password=\"pw\")" + "client_low_ds = server.login(email=\"newuser@openmined.org\", password=\"pw\")" ] }, { @@ -231,7 +233,7 @@ "metadata": {}, "outputs": [], "source": [ - "x_pointer = x_pointer.send(domain_client)" + "x_pointer = x_pointer.send(datasite_client)" ] }, { @@ -247,7 +249,7 @@ "\n", "# syft absolute\n", "from syft.client.api import AuthedServiceContext\n", - "from syft.client.api import NodeIdentity\n", + "from syft.client.api import ServerIdentity\n", "\n", "\n", "class CustomExactMatch(sy.CustomInputPolicy):\n", @@ -272,21 +274,21 @@ "\n", " def retrieve_from_db(self, code_item_id, allowed_inputs, context):\n", " # syft absolute\n", - " from syft import NodeType\n", + " from syft import ServerType\n", " from syft.service.action.action_object import TwinMode\n", "\n", - " action_service = context.node.get_service(\"actionservice\")\n", + " action_service = context.server.get_service(\"actionservice\")\n", " code_inputs = {}\n", "\n", - " # When we are retrieving the code from the database, we need to use the node's\n", + " # When we are retrieving the code from the database, we need to use the server's\n", " # verify key as the credentials. This is because when we approve the code, we\n", " # we allow the private data to be used only for this specific code.\n", " # but we are not modifying the permissions of the private data\n", "\n", " root_context = AuthedServiceContext(\n", - " node=context.node, credentials=context.node.verify_key\n", + " server=context.server, credentials=context.server.verify_key\n", " )\n", - " if context.node.node_type == NodeType.DOMAIN:\n", + " if context.server.server_type == ServerType.DATASITE:\n", " for var_name, arg_id in allowed_inputs.items():\n", " kwarg_value = action_service._get(\n", " context=root_context,\n", @@ -299,7 +301,7 @@ " code_inputs[var_name] = kwarg_value.ok()\n", " else:\n", " raise Exception(\n", - " f\"Invalid Node Type for Code Submission:{context.node.node_type}\"\n", + " f\"Invalid Server Type for Code Submission:{context.server.server_type}\"\n", " )\n", " return Ok(code_inputs)\n", "\n", @@ -310,19 +312,19 @@ " context,\n", " ):\n", " # syft absolute\n", - " from syft import NodeType\n", + " from syft import ServerType\n", " from syft import UID\n", "\n", - " if context.node.node_type == NodeType.DOMAIN:\n", - " node_identity = NodeIdentity(\n", - " node_name=context.node.name,\n", - " node_id=context.node.id,\n", - " verify_key=context.node.signing_key.verify_key,\n", + " if context.server.server_type == ServerType.DATASITE:\n", + " server_identity = ServerIdentity(\n", + " server_name=context.server.name,\n", + " server_id=context.server.id,\n", + " verify_key=context.server.signing_key.verify_key,\n", " )\n", - " allowed_inputs = allowed_inputs.get(node_identity, {})\n", + " allowed_inputs = allowed_inputs.get(server_identity, {})\n", " else:\n", " raise Exception(\n", - " f\"Invalid Node Type for Code Submission:{context.node.node_type}\"\n", + " f\"Invalid Server Type for Code Submission:{context.server.server_type}\"\n", " )\n", " filtered_kwargs = {}\n", " for key in allowed_inputs.keys():\n", @@ -379,20 +381,20 @@ " context,\n", "):\n", " # syft absolute\n", - " from syft import NodeType\n", + " from syft import ServerType\n", " from syft import UID\n", - " from syft.client.api import NodeIdentity\n", + " from syft.client.api import ServerIdentity\n", "\n", - " if context.node.node_type == NodeType.DOMAIN:\n", - " node_identity = NodeIdentity(\n", - " node_name=context.node.name,\n", - " node_id=context.node.id,\n", - " verify_key=context.node.signing_key.verify_key,\n", + " if context.server.server_type == ServerType.DATASITE:\n", + " server_identity = ServerIdentity(\n", + " server_name=context.server.name,\n", + " server_id=context.server.id,\n", + " verify_key=context.server.signing_key.verify_key,\n", " )\n", - " allowed_inputs = allowed_inputs.get(node_identity, {})\n", + " allowed_inputs = allowed_inputs.get(server_identity, {})\n", " else:\n", " raise Exception(\n", - " f\"Invalid Node Type for Code Submission:{context.node.node_type}\"\n", + " f\"Invalid Server Type for Code Submission:{context.server.server_type}\"\n", " )\n", " filtered_kwargs = {}\n", " for key in allowed_inputs.keys():\n", @@ -469,7 +471,7 @@ "metadata": {}, "outputs": [], "source": [ - "for request in domain_client.requests:\n", + "for request in datasite_client.requests:\n", " if request.id == request_id:\n", " break" ] @@ -589,7 +591,7 @@ }, "outputs": [], "source": [ - "for code in domain_client.code.get_all():\n", + "for code in datasite_client.code.get_all():\n", " if code.service_func_name == \"func\":\n", " break\n", "print(code.output_policy.state)\n", @@ -605,8 +607,8 @@ }, "outputs": [], "source": [ - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] } ], diff --git a/notebooks/api/0.8/06-multiple-code-requests.ipynb b/notebooks/api/0.8/06-multiple-code-requests.ipynb index a52b5c6b38f..85f4fe0b88d 100644 --- a/notebooks/api/0.8/06-multiple-code-requests.ipynb +++ b/notebooks/api/0.8/06-multiple-code-requests.ipynb @@ -41,7 +41,9 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", reset=True, dev_mode=True)" + "server = sy.orchestra.launch(\n", + " name=\"test-datasite-1\", port=\"auto\", reset=True, dev_mode=True\n", + ")" ] }, { @@ -53,7 +55,7 @@ }, "outputs": [], "source": [ - "root_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -142,7 +144,7 @@ }, "outputs": [], "source": [ - "ds_client = node.login(email=\"sheldon@caltech.edu\", password=\"abc123\")" + "ds_client = server.login(email=\"sheldon@caltech.edu\", password=\"abc123\")" ] }, { @@ -358,7 +360,7 @@ "metadata": {}, "outputs": [], "source": [ - "# The Domain Owner retrieves by name or uid for approval\n", + "# The Datasite Owner retrieves by name or uid for approval\n", "root_client_project = root_client.projects.get_by_uid(project.id)\n", "assert isinstance(root_client_project, sy.service.project.project.Project)" ] @@ -549,8 +551,8 @@ }, "outputs": [], "source": [ - "if node.deployment_type.value in [\"python\", \"single_container\"]:\n", - " node.land()" + "if server.deployment_type.value in [\"python\", \"single_container\"]:\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/07-domain-register-control-flow.ipynb b/notebooks/api/0.8/07-datasite-register-control-flow.ipynb similarity index 86% rename from notebooks/api/0.8/07-domain-register-control-flow.ipynb rename to notebooks/api/0.8/07-datasite-register-control-flow.ipynb index 5bd493a47c9..3f19b01311a 100644 --- a/notebooks/api/0.8/07-domain-register-control-flow.ipynb +++ b/notebooks/api/0.8/07-datasite-register-control-flow.ipynb @@ -5,9 +5,9 @@ "id": "0", "metadata": {}, "source": [ - "# Registering Users in Syft Domain Server\n", + "# Registering Users in Syft Datasite Server\n", "\n", - "By default users are not allowed to create a new account on the Syft Domain Server. This notebook is a tutorial for Data Owners to enable guest signups on their deployments." + "By default users are not allowed to create a new account on the Syft Datasite Server. This notebook is a tutorial for Data Owners to enable guest signups on their deployments." ] }, { @@ -48,7 +48,7 @@ "id": "4", "metadata": {}, "source": [ - "### Launch a Syft Domain Server" + "### Launch a Syft Datasite Server" ] }, { @@ -58,7 +58,9 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True, reset=True)" + "server = sy.orchestra.launch(\n", + " name=\"test-datasite-1\", port=\"auto\", dev_mode=True, reset=True\n", + ")" ] }, { @@ -68,8 +70,8 @@ "metadata": {}, "outputs": [], "source": [ - "# log into the node with default root credentials\n", - "root_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "# log into the server with default root credentials\n", + "root_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -87,7 +89,7 @@ "metadata": {}, "outputs": [], "source": [ - "# The assumed state of this test is a node with signup set to False\n", + "# The assumed state of this test is a server with signup set to False\n", "# however if the tox task has set it to True you need to overwrite the setting\n", "# before running the tests\n", "# root_client.settings.allow_guest_signup(enable=False)" @@ -118,7 +120,7 @@ "outputs": [], "source": [ "# Register a new user as a GUEST\n", - "response_2 = node.register(\n", + "response_2 = server.register(\n", " email=\"batman@gotham.com\",\n", " password=\"1rIzHAx6uQaP\",\n", " password_verify=\"1rIzHAx6uQaP\",\n", @@ -135,7 +137,7 @@ "outputs": [], "source": [ "# Register a new user as a GUEST\n", - "response_3 = node.register(\n", + "response_3 = server.register(\n", " email=\"robin@gotham.com\",\n", " password=\"5v1ei4OM2N4m\",\n", " password_verify=\"5v1ei4OM2N4m\",\n", @@ -172,7 +174,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Get the current settings of the node\n", + "# Get the current settings of the server\n", "root_client.settings.get()" ] }, @@ -195,7 +197,7 @@ "outputs": [], "source": [ "# Refresh the root client to fetch the updated settings\n", - "root_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -206,7 +208,7 @@ "outputs": [], "source": [ "# Register a new user as a GUEST\n", - "response_2 = node.register(\n", + "response_2 = server.register(\n", " email=\"batman@gotham.com\",\n", " password=\"1rIzHAx6uQaP\",\n", " password_verify=\"1rIzHAx6uQaP\",\n", @@ -223,7 +225,7 @@ "outputs": [], "source": [ "# Register a new user as a GUEST\n", - "response_3 = node.register(\n", + "response_3 = server.register(\n", " email=\"robin@gotham.com\",\n", " password=\"5v1ei4OM2N4m\",\n", " password_verify=\"5v1ei4OM2N4m\",\n", @@ -272,7 +274,7 @@ "outputs": [], "source": [ "# Refresh the root client to fetch the updated settings\n", - "root_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -283,7 +285,7 @@ "outputs": [], "source": [ "# Register a new user as a GUEST\n", - "response_2 = node.register(\n", + "response_2 = server.register(\n", " email=\"bane@gotham.com\",\n", " password=\"SKY5cC2zQPRP\",\n", " password_verify=\"SKY5cC2zQPRP\",\n", @@ -300,7 +302,7 @@ "outputs": [], "source": [ "# Register a new user as a GUEST\n", - "response_3 = node.register(\n", + "response_3 = server.register(\n", " email=\"riddler@gotham.com\",\n", " password=\"7eVGUuNDyH8P\",\n", " password_verify=\"7eVGUuNDyH8P\",\n", @@ -329,10 +331,10 @@ "metadata": {}, "outputs": [], "source": [ - "# Cleanup local domain server\n", + "# Cleanup local datasite server\n", "\n", - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/08-code-version.ipynb b/notebooks/api/0.8/08-code-version.ipynb index 4168770ca88..b67ffb54f0d 100644 --- a/notebooks/api/0.8/08-code-version.ipynb +++ b/notebooks/api/0.8/08-code-version.ipynb @@ -41,7 +41,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Launch a Syft Domain Server" + "### Launch a Syft Datasite Server" ] }, { @@ -50,7 +50,7 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True)" + "server = sy.orchestra.launch(name=\"test-datasite-1\", port=\"auto\", dev_mode=True)" ] }, { @@ -59,15 +59,15 @@ "metadata": {}, "outputs": [], "source": [ - "# log into the node with default root credentials\n", - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "# log into the server with default root credentials\n", + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Create a new Data Scientist account on the Domain Server\n" + "### Create a new Data Scientist account on the Datasite Server\n" ] }, { @@ -76,7 +76,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.register(\n", + "datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -92,7 +92,7 @@ "metadata": {}, "outputs": [], "source": [ - "jane_client = node.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "jane_client = server.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -214,7 +214,7 @@ "metadata": {}, "outputs": [], "source": [ - "admin_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "admin_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -268,9 +268,9 @@ "metadata": {}, "outputs": [], "source": [ - "# # Cleanup local domain server\n", - "if node.node_type.value == \"python\":\n", - " node.land()" + "# # Cleanup local datasite server\n", + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/09-blob-storage.ipynb b/notebooks/api/0.8/09-blob-storage.ipynb index 93491499896..57c1837effd 100644 --- a/notebooks/api/0.8/09-blob-storage.ipynb +++ b/notebooks/api/0.8/09-blob-storage.ipynb @@ -33,8 +33,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"test-domain-1\",\n", + "server = sy.orchestra.launch(\n", + " name=\"test-datasite-1\",\n", " dev_mode=True,\n", " reset=True,\n", " create_producer=True,\n", @@ -47,7 +47,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -89,7 +89,7 @@ "metadata": {}, "outputs": [], "source": [ - "data_ptr = domain_client.upload_files([a_file, b_file])" + "data_ptr = datasite_client.upload_files([a_file, b_file])" ] }, { @@ -127,7 +127,7 @@ "metadata": {}, "outputs": [], "source": [ - "lines_file_pptr = domain_client.upload_files(x_file)[0].syft_action_data" + "lines_file_pptr = datasite_client.upload_files(x_file)[0].syft_action_data" ] }, { @@ -169,7 +169,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.upload_dataset(ds)" + "datasite_client.upload_dataset(ds)" ] }, { @@ -219,7 +219,7 @@ "metadata": {}, "outputs": [], "source": [ - "single_data_ptr = domain_client.upload_files(a_file)\n", + "single_data_ptr = datasite_client.upload_files(a_file)\n", "single_data_ptr" ] }, @@ -237,14 +237,14 @@ "outputs": [], "source": [ "if False:\n", - " domain_client.upload_files(\"./path/to/folder\")" + " datasite_client.upload_files(\"./path/to/folder\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "#### Cleanup local domain server" + "#### Cleanup local datasite server" ] }, { @@ -253,8 +253,8 @@ "metadata": {}, "outputs": [], "source": [ - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/10-container-images.ipynb index 6ce97e012e1..ab3f6d0338b 100644 --- a/notebooks/api/0.8/10-container-images.ipynb +++ b/notebooks/api/0.8/10-container-images.ipynb @@ -101,8 +101,8 @@ "metadata": {}, "outputs": [], "source": [ - "domain = sy.orchestra.launch(\n", - " name=\"test-domain-1\",\n", + "datasite = sy.orchestra.launch(\n", + " name=\"test-datasite-1\",\n", " dev_mode=True,\n", " create_producer=True,\n", " reset=True,\n", @@ -117,7 +117,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client = domain.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = datasite.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -135,7 +135,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.worker_pools" + "datasite_client.worker_pools" ] }, { @@ -150,7 +150,8 @@ " if (bool(os.environ[\"DEV_MODE\"]) and running_as_container)\n", " else sy.__version__\n", ")\n", - "syft_base_worker_tag" + "# TODO: Remove this once the stable syft version is released\n", + "syft_base_worker_tag = \"0.8.7-beta.15\"" ] }, { @@ -169,7 +170,7 @@ "outputs": [], "source": [ "opendp_dockerfile_str = f\"\"\"\n", - "FROM openmined/grid-backend:{syft_base_worker_tag}\n", + "FROM openmined/syft-backend:{syft_base_worker_tag}\n", "\n", "RUN uv pip install opendp\n", "\n", @@ -227,7 +228,7 @@ "metadata": {}, "outputs": [], "source": [ - "submit_result = domain_client.api.services.worker_image.submit(\n", + "submit_result = datasite_client.api.services.worker_image.submit(\n", " worker_config=docker_config\n", ")" ] @@ -259,7 +260,7 @@ "metadata": {}, "outputs": [], "source": [ - "dockerfile_list = domain_client.images.get_all()\n", + "dockerfile_list = datasite_client.images.get_all()\n", "dockerfile_list" ] }, @@ -270,7 +271,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(domain_client.images.get_all()) == 2" + "assert len(datasite_client.images.get_all()) == 2" ] }, { @@ -314,7 +315,7 @@ "metadata": {}, "outputs": [], "source": [ - "registry_add_result = domain_client.api.services.image_registry.add(\"localhost:5678\")\n", + "registry_add_result = datasite_client.api.services.image_registry.add(\"localhost:5678\")\n", "registry_add_result" ] }, @@ -335,7 +336,7 @@ "metadata": {}, "outputs": [], "source": [ - "images = domain_client.api.services.image_registry.get_all()\n", + "images = datasite_client.api.services.image_registry.get_all()\n", "assert len(images) == 1\n", "images" ] @@ -389,7 +390,7 @@ "source": [ "registry_uid = local_registry.id if running_as_container else local_registry.id\n", "\n", - "docker_build_result = domain_client.api.services.worker_image.build(\n", + "docker_build_result = datasite_client.api.services.worker_image.build(\n", " image_uid=workerimage.id,\n", " tag=docker_tag,\n", " registry_uid=registry_uid,\n", @@ -425,7 +426,7 @@ "metadata": {}, "outputs": [], "source": [ - "image_list = domain_client.images.get_all()\n", + "image_list = datasite_client.images.get_all()\n", "image_list" ] }, @@ -501,7 +502,7 @@ " local_registry_container.start()\n", " sleep(5)\n", "\n", - " push_result = domain_client.api.services.worker_image.push(workerimage.id)\n", + " push_result = datasite_client.api.services.worker_image.push(workerimage.id)\n", " assert isinstance(push_result, sy.SyftSuccess), str(push_result)" ] }, @@ -599,7 +600,7 @@ "outputs": [], "source": [ "worker_pool_name = \"opendp-pool\"\n", - "worker_pool_res = domain_client.api.services.worker_pool.launch(\n", + "worker_pool_res = datasite_client.api.services.worker_pool.launch(\n", " pool_name=worker_pool_name,\n", " image_uid=workerimage.id,\n", " num_workers=2,\n", @@ -638,7 +639,7 @@ "metadata": {}, "outputs": [], "source": [ - "worker_pool_list = domain_client.worker_pools\n", + "worker_pool_list = datasite_client.worker_pools\n", "worker_pool_list" ] }, @@ -649,7 +650,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(domain_client.worker_pools.get_all()) == 2\n", + "assert len(datasite_client.worker_pools.get_all()) == 2\n", "worker_pool = None\n", "for pool in worker_pool_list:\n", " if pool.name == worker_pool_name:\n", @@ -667,7 +668,7 @@ "outputs": [], "source": [ "# We can filter pools based on the image id upon which the pools were built\n", - "domain_client.api.services.worker_pool.filter_by_image_id(image_uid=workerimage.id)" + "datasite_client.api.services.worker_pool.filter_by_image_id(image_uid=workerimage.id)" ] }, { @@ -706,7 +707,7 @@ "metadata": {}, "outputs": [], "source": [ - "raw_worker_logs = domain_client.api.services.worker.logs(\n", + "raw_worker_logs = datasite_client.api.services.worker.logs(\n", " uid=second_worker.id,\n", " raw=True,\n", ")\n", @@ -730,7 +731,7 @@ "metadata": {}, "outputs": [], "source": [ - "worker_logs = domain_client.api.services.worker.logs(\n", + "worker_logs = datasite_client.api.services.worker.logs(\n", " uid=second_worker.id,\n", ")\n", "worker_logs" @@ -761,7 +762,7 @@ "metadata": {}, "outputs": [], "source": [ - "worker_delete_res = domain_client.api.services.worker.delete(\n", + "worker_delete_res = datasite_client.api.services.worker.delete(\n", " uid=second_worker.id, force=True\n", ")" ] @@ -795,7 +796,7 @@ "source": [ "# Refetch the worker pool\n", "# Ensure that the deleted worker's id is not present\n", - "for pool in domain_client.api.services.worker_pool.get_all():\n", + "for pool in datasite_client.api.services.worker_pool.get_all():\n", " if pool.name == worker_pool_name:\n", " worker_pool = pool\n", "assert len(worker_pool.workers) == 1\n", @@ -831,7 +832,7 @@ "data = np.array([1, 2, 3])\n", "data_action_obj = sy.ActionObject.from_obj(data)\n", "\n", - "data_pointer = data_action_obj.send(domain_client)\n", + "data_pointer = data_action_obj.send(datasite_client)\n", "data_pointer" ] }, @@ -880,7 +881,7 @@ "metadata": {}, "outputs": [], "source": [ - "request = domain_client.code.request_code_execution(custom_worker_func)\n", + "request = datasite_client.code.request_code_execution(custom_worker_func)\n", "request" ] }, @@ -891,7 +892,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.requests[-1].approve(approve_nested=True)" + "datasite_client.requests[-1].approve(approve_nested=True)" ] }, { @@ -901,7 +902,7 @@ "metadata": {}, "outputs": [], "source": [ - "job = domain_client.code.custom_worker_func(x=data_pointer, blocking=False)\n", + "job = datasite_client.code.custom_worker_func(x=data_pointer, blocking=False)\n", "job" ] }, @@ -912,7 +913,7 @@ "metadata": {}, "outputs": [], "source": [ - "worker_pool = domain_client.worker_pools[worker_pool_name]\n", + "worker_pool = datasite_client.worker_pools[worker_pool_name]\n", "worker_pool" ] }, @@ -943,7 +944,7 @@ "metadata": {}, "outputs": [], "source": [ - "job = domain_client.jobs[-1]\n", + "job = datasite_client.jobs[-1]\n", "job" ] }, @@ -988,7 +989,7 @@ "source": [ "# Once the work is done by the worker, its state is returned to idle again.\n", "consuming_worker_is_now_idle = False\n", - "for worker in domain_client.worker_pools[worker_pool_name].workers:\n", + "for worker in datasite_client.worker_pools[worker_pool_name].workers:\n", " if worker.id == job.job_worker_id:\n", " consuming_worker_is_now_idle = worker.consumer_state.value.lower() == \"idle\"\n", "\n", @@ -1025,7 +1026,7 @@ "source": [ "# delete the remaining workers\n", "for worker in worker_pool.workers:\n", - " res = domain_client.api.services.worker.delete(\n", + " res = datasite_client.api.services.worker.delete(\n", " uid=worker.id,\n", " )\n", " assert isinstance(res, sy.SyftSuccess), str(res)" @@ -1038,7 +1039,7 @@ "metadata": {}, "outputs": [], "source": [ - "delete_res = domain_client.api.services.worker_image.remove(workerimage.id)\n", + "delete_res = datasite_client.api.services.worker_image.remove(workerimage.id)\n", "delete_res" ] }, @@ -1080,7 +1081,7 @@ "outputs": [], "source": [ "custom_dockerfile_str_2 = f\"\"\"\n", - "FROM openmined/grid-backend:{syft_base_worker_tag}\n", + "FROM openmined/syft-backend:{syft_base_worker_tag}\n", "\n", "RUN uv pip install opendp\n", "\"\"\".strip()\n", @@ -1095,7 +1096,7 @@ "metadata": {}, "outputs": [], "source": [ - "submit_result = domain_client.api.services.worker_image.submit(\n", + "submit_result = datasite_client.api.services.worker_image.submit(\n", " worker_config=docker_config_2\n", ")\n", "submit_result" @@ -1108,7 +1109,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.images" + "datasite_client.images" ] }, { @@ -1120,7 +1121,7 @@ "source": [ "# get the image that's not built\n", "workerimage_2 = None\n", - "for im in domain_client.images:\n", + "for im in datasite_client.images:\n", " if im.config == docker_config_2:\n", " workerimage_2 = im" ] @@ -1142,7 +1143,7 @@ "source": [ "docker_tag_2 = \"openmined/custom-worker-opendp:latest\"\n", "\n", - "docker_build_result = domain_client.api.services.worker_image.build(\n", + "docker_build_result = datasite_client.api.services.worker_image.build(\n", " image_uid=workerimage_2.id,\n", " tag=docker_tag_2,\n", " pull=pull,\n", @@ -1158,7 +1159,7 @@ "outputs": [], "source": [ "opendp_pool_name = \"second-opendp-pool\"\n", - "pool_create_request = domain_client.api.services.worker_pool.pool_creation_request(\n", + "pool_create_request = datasite_client.api.services.worker_pool.pool_creation_request(\n", " pool_name=opendp_pool_name, num_workers=2, image_uid=workerimage_2.id\n", ")\n", "pool_create_request" @@ -1203,7 +1204,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.worker_pools[opendp_pool_name]" + "datasite_client.worker_pools[opendp_pool_name]" ] }, { @@ -1213,8 +1214,8 @@ "metadata": {}, "outputs": [], "source": [ - "assert domain_client.worker_pools[opendp_pool_name]\n", - "assert len(domain_client.worker_pools[opendp_pool_name].workers) == 2" + "assert datasite_client.worker_pools[opendp_pool_name]\n", + "assert len(datasite_client.worker_pools[opendp_pool_name].workers) == 2" ] }, { @@ -1225,7 +1226,7 @@ "outputs": [], "source": [ "# default, opendp-pool, second-opendp-pool\n", - "assert len(domain_client.worker_pools.get_all()) == 3" + "assert len(datasite_client.worker_pools.get_all()) == 3" ] }, { @@ -1243,11 +1244,11 @@ "metadata": {}, "outputs": [], "source": [ - "for worker in domain_client.worker_pools[\"second-opendp-pool\"].workers:\n", - " res = domain_client.api.services.worker.delete(uid=worker.id, force=True)\n", + "for worker in datasite_client.worker_pools[\"second-opendp-pool\"].workers:\n", + " res = datasite_client.api.services.worker.delete(uid=worker.id, force=True)\n", " assert isinstance(res, sy.SyftSuccess), str(res)\n", "\n", - "assert len(domain_client.worker_pools[\"second-opendp-pool\"].workers) == 0" + "assert len(datasite_client.worker_pools[\"second-opendp-pool\"].workers) == 0" ] }, { @@ -1265,7 +1266,7 @@ "metadata": {}, "outputs": [], "source": [ - "delete_res = domain_client.api.services.worker_image.remove(workerimage_2.id)\n", + "delete_res = datasite_client.api.services.worker_image.remove(workerimage_2.id)\n", "delete_res" ] }, @@ -1296,7 +1297,7 @@ "outputs": [], "source": [ "custom_dockerfile_str_3 = f\"\"\"\n", - "FROM openmined/grid-backend:{syft_base_worker_tag}\n", + "FROM openmined/syft-backend:{syft_base_worker_tag}\n", "\n", "RUN uv pip install recordlinkage\n", "\"\"\".strip()\n", @@ -1315,7 +1316,7 @@ "source": [ "recordlinkage_pool_name = \"recordlinkage-pool\"\n", "pool_image_create_request = (\n", - " domain_client.api.services.worker_pool.create_image_and_pool_request(\n", + " datasite_client.api.services.worker_pool.create_image_and_pool_request(\n", " pool_name=recordlinkage_pool_name,\n", " num_workers=2,\n", " tag=docker_tag_3,\n", @@ -1370,7 +1371,7 @@ "outputs": [], "source": [ "# Get updated request object and status\n", - "for req in domain_client.requests:\n", + "for req in datasite_client.requests:\n", " if req.id == pool_image_create_request.id:\n", " pool_image_create_request = req\n", "\n", @@ -1387,7 +1388,7 @@ "image_exists = False\n", "recordlinkage_image = None\n", "\n", - "for im in domain_client.images.get_all():\n", + "for im in datasite_client.images.get_all():\n", " if im.image_identifier and im.image_identifier.repo_with_tag == docker_tag_3:\n", " image_exists = True\n", " recordlinkage_image = im\n", @@ -1403,7 +1404,7 @@ "metadata": {}, "outputs": [], "source": [ - "recordlinkage_pool = domain_client.worker_pools[recordlinkage_pool_name]\n", + "recordlinkage_pool = datasite_client.worker_pools[recordlinkage_pool_name]\n", "\n", "assert recordlinkage_pool\n", "assert len(recordlinkage_pool.workers) == 2" @@ -1425,7 +1426,7 @@ "outputs": [], "source": [ "for worker in recordlinkage_pool.workers:\n", - " res = domain_client.api.services.worker.delete(uid=worker.id, force=True)\n", + " res = datasite_client.api.services.worker.delete(uid=worker.id, force=True)\n", " assert isinstance(res, sy.SyftSuccess), str(res)" ] }, @@ -1444,7 +1445,7 @@ "metadata": {}, "outputs": [], "source": [ - "delete_res = domain_client.api.services.worker_image.remove(recordlinkage_image.id)\n", + "delete_res = datasite_client.api.services.worker_image.remove(recordlinkage_image.id)\n", "delete_res" ] }, @@ -1455,7 +1456,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain.land()" + "datasite.land()" ] }, { diff --git a/notebooks/api/0.8/11-container-images-k8s.ipynb b/notebooks/api/0.8/11-container-images-k8s.ipynb index 5b78acff06e..68620d9c114 100644 --- a/notebooks/api/0.8/11-container-images-k8s.ipynb +++ b/notebooks/api/0.8/11-container-images-k8s.ipynb @@ -82,8 +82,8 @@ "os.environ[\"DEV_MODE\"] = \"True\"\n", "\n", "# Uncomment this to add custom values\n", - "# os.environ[\"NODE_URL\"] = \"http://localhost\"\n", - "# os.environ[\"NODE_PORT\"] = \"8080\"" + "# os.environ[\"SERVER_URL\"] = \"http://localhost\"\n", + "# os.environ[\"SERVER_PORT\"] = \"8080\"" ] }, { @@ -93,8 +93,8 @@ "metadata": {}, "outputs": [], "source": [ - "domain = sy.orchestra.launch(\n", - " name=\"test-domain-1\",\n", + "datasite = sy.orchestra.launch(\n", + " name=\"test-datasite-1\",\n", " dev_mode=True,\n", ")" ] @@ -106,8 +106,8 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client = domain.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "domain_client" + "datasite_client = datasite.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "datasite_client" ] }, { @@ -133,7 +133,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.worker_pools" + "datasite_client.worker_pools" ] }, { @@ -151,7 +151,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = domain_client.api.services.worker_pool.scale(\n", + "result = datasite_client.api.services.worker_pool.scale(\n", " number=3, pool_name=\"default-pool\"\n", ")\n", "assert not isinstance(result, sy.SyftError), str(result)\n", @@ -165,7 +165,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = domain_client.api.services.worker_pool.get_by_name(pool_name=\"default-pool\")\n", + "result = datasite_client.api.services.worker_pool.get_by_name(pool_name=\"default-pool\")\n", "assert len(result.workers) == 3, str(result.to_dict())\n", "result" ] @@ -199,7 +199,7 @@ "metadata": {}, "outputs": [], "source": [ - "default_pool_scale_res = domain_client.api.services.worker_pool.scale(\n", + "default_pool_scale_res = datasite_client.api.services.worker_pool.scale(\n", " number=1, pool_name=\"default-pool\"\n", ")\n", "assert not isinstance(default_pool_scale_res, sy.SyftError), str(default_pool_scale_res)\n", @@ -213,7 +213,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = domain_client.api.services.worker_pool.get_by_name(pool_name=\"default-pool\")\n", + "result = datasite_client.api.services.worker_pool.get_by_name(pool_name=\"default-pool\")\n", "assert len(result.workers) == 1, str(result.to_dict())\n", "result" ] @@ -225,7 +225,7 @@ "metadata": {}, "outputs": [], "source": [ - "default_worker_pool = domain_client.api.services.worker_pool.get_by_name(\n", + "default_worker_pool = datasite_client.api.services.worker_pool.get_by_name(\n", " pool_name=\"default-pool\"\n", ")\n", "default_worker_pool" @@ -250,7 +250,7 @@ "from syft.util.util import get_latest_tag\n", "\n", "registry = os.getenv(\"SYFT_BASE_IMAGE_REGISTRY\", \"docker.io\")\n", - "repo = \"openmined/grid-backend\"\n", + "repo = \"openmined/syft-backend\"\n", "\n", "if \"k3d\" in registry:\n", " tag = get_latest_tag(registry, repo)\n", @@ -300,7 +300,7 @@ "metadata": {}, "outputs": [], "source": [ - "submit_result = domain_client.api.services.worker_image.submit(\n", + "submit_result = datasite_client.api.services.worker_image.submit(\n", " worker_config=docker_config\n", ")\n", "submit_result" @@ -323,7 +323,7 @@ "metadata": {}, "outputs": [], "source": [ - "dockerfile_list = domain_client.images.get_all()\n", + "dockerfile_list = datasite_client.images.get_all()\n", "dockerfile_list" ] }, @@ -389,7 +389,7 @@ "metadata": {}, "outputs": [], "source": [ - "registry_add_result = domain_client.api.services.image_registry.add(external_registry)\n", + "registry_add_result = datasite_client.api.services.image_registry.add(external_registry)\n", "registry_add_result" ] }, @@ -410,7 +410,7 @@ "metadata": {}, "outputs": [], "source": [ - "image_registry_list = domain_client.api.services.image_registry.get_all()\n", + "image_registry_list = datasite_client.api.services.image_registry.get_all()\n", "image_registry_list" ] }, @@ -474,7 +474,7 @@ "docker_tag = \"openmined/custom-worker:0.7.8\"\n", "\n", "\n", - "docker_build_result = domain_client.api.services.worker_image.build(\n", + "docker_build_result = datasite_client.api.services.worker_image.build(\n", " image_uid=workerimage.id,\n", " tag=docker_tag,\n", " registry_uid=registry_uid,\n", @@ -499,7 +499,7 @@ "metadata": {}, "outputs": [], "source": [ - "image_list = domain_client.images.get_all()\n", + "image_list = datasite_client.images.get_all()\n", "image_list" ] }, @@ -545,7 +545,7 @@ "outputs": [], "source": [ "push_result = None\n", - "push_result = domain_client.api.services.worker_image.push(\n", + "push_result = datasite_client.api.services.worker_image.push(\n", " workerimage.id,\n", " username=external_registry_username,\n", " password=external_registry_password,\n", @@ -603,7 +603,7 @@ "worker_pool_name = \"custom-pool\"\n", "custom_pool_pod_annotations = {\"test-custom-pool\": \"Test annotation for custom pool\"}\n", "custom_pool_pod_labels = {\"test-custom-pool\": \"test_label_for_custom_pool\"}\n", - "worker_pool_res = domain_client.api.services.worker_pool.launch(\n", + "worker_pool_res = datasite_client.api.services.worker_pool.launch(\n", " pool_name=worker_pool_name,\n", " image_uid=workerimage.id,\n", " num_workers=3,\n", @@ -643,7 +643,7 @@ "metadata": {}, "outputs": [], "source": [ - "worker_pool_list = domain_client.worker_pools.get_all()\n", + "worker_pool_list = datasite_client.worker_pools.get_all()\n", "worker_pool_list" ] }, @@ -711,7 +711,7 @@ "outputs": [], "source": [ "# We can filter pools based on the image id upon which the pools were built\n", - "filtered_result = domain_client.api.services.worker_pool.filter_by_image_id(\n", + "filtered_result = datasite_client.api.services.worker_pool.filter_by_image_id(\n", " image_uid=workerimage.id\n", ")\n", "filtered_result" @@ -753,7 +753,7 @@ "metadata": {}, "outputs": [], "source": [ - "worker_logs = domain_client.api.services.worker.logs(\n", + "worker_logs = datasite_client.api.services.worker.logs(\n", " uid=second_worker.id,\n", ")\n", "worker_logs" @@ -797,7 +797,7 @@ "data = np.array([1, 2, 3])\n", "data_action_obj = sy.ActionObject.from_obj(data)\n", "\n", - "data_pointer = data_action_obj.send(domain_client)\n", + "data_pointer = data_action_obj.send(datasite_client)\n", "data_pointer" ] }, @@ -848,7 +848,7 @@ "metadata": {}, "outputs": [], "source": [ - "request = domain_client.code.request_code_execution(custom_worker_func)\n", + "request = datasite_client.code.request_code_execution(custom_worker_func)\n", "request" ] }, @@ -859,7 +859,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.requests[-1].approve(approve_nested=True)" + "datasite_client.requests[-1].approve(approve_nested=True)" ] }, { @@ -869,7 +869,7 @@ "metadata": {}, "outputs": [], "source": [ - "job = domain_client.code.custom_worker_func(x=data_pointer, blocking=False)\n", + "job = datasite_client.code.custom_worker_func(x=data_pointer, blocking=False)\n", "job" ] }, @@ -880,7 +880,7 @@ "metadata": {}, "outputs": [], "source": [ - "worker_pool = domain_client.worker_pools[worker_pool_name]\n", + "worker_pool = datasite_client.worker_pools[worker_pool_name]\n", "worker_pool" ] }, @@ -911,7 +911,7 @@ "metadata": {}, "outputs": [], "source": [ - "job_list = domain_client.jobs.get_by_user_code_id(job.user_code_id)" + "job_list = datasite_client.jobs.get_by_user_code_id(job.user_code_id)" ] }, { @@ -956,7 +956,7 @@ "outputs": [], "source": [ "# Scale Down the workers\n", - "custom_pool_scale_res = domain_client.api.services.worker_pool.scale(\n", + "custom_pool_scale_res = datasite_client.api.services.worker_pool.scale(\n", " number=1, pool_name=worker_pool_name\n", ")\n", "assert not isinstance(custom_pool_scale_res, sy.SyftError), str(custom_pool_scale_res)\n", @@ -970,7 +970,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(domain_client.worker_pools[worker_pool_name].worker_list) == 1" + "assert len(datasite_client.worker_pools[worker_pool_name].worker_list) == 1" ] }, { @@ -1005,7 +1005,7 @@ "outputs": [], "source": [ "submit_result = None\n", - "submit_result = domain_client.api.services.worker_image.submit(\n", + "submit_result = datasite_client.api.services.worker_image.submit(\n", " worker_config=docker_config_opendp\n", ")\n", "submit_result" @@ -1028,7 +1028,7 @@ "metadata": {}, "outputs": [], "source": [ - "_images = domain_client.images\n", + "_images = datasite_client.images\n", "assert not isinstance(_images, sy.SyftError), str(_images)" ] }, @@ -1063,7 +1063,7 @@ "source": [ "docker_tag_opendp = \"openmined/custom-worker-opendp:latest\"\n", "\n", - "docker_build_result = domain_client.api.services.worker_image.build(\n", + "docker_build_result = datasite_client.api.services.worker_image.build(\n", " image_uid=workerimage_opendp.id,\n", " tag=docker_tag_opendp,\n", " registry_uid=registry_uid,\n", @@ -1089,7 +1089,7 @@ "metadata": {}, "outputs": [], "source": [ - "_images = domain_client.images\n", + "_images = datasite_client.images\n", "assert not isinstance(_images, sy.SyftError), str(_images)" ] }, @@ -1125,7 +1125,7 @@ "source": [ "# Push OpenDP Image to registry\n", "push_result = None\n", - "push_result = domain_client.api.services.worker_image.push(\n", + "push_result = datasite_client.api.services.worker_image.push(\n", " workerimage_opendp.id,\n", " username=external_registry_username,\n", " password=external_registry_password,\n", @@ -1143,7 +1143,7 @@ "pool_name_opendp = \"opendp-pool\"\n", "opendp_pod_annotations = {\"test-opendp-pool\": \"Test annotation for opendp pool\"}\n", "opendp_pod_labels = {\"test-opendp-pool\": \"test_label_for_opendp_pool\"}\n", - "pool_create_request = domain_client.api.services.worker_pool.pool_creation_request(\n", + "pool_create_request = datasite_client.api.services.worker_pool.pool_creation_request(\n", " pool_name=pool_name_opendp,\n", " num_workers=3,\n", " image_uid=workerimage_opendp.id,\n", @@ -1196,7 +1196,7 @@ "metadata": {}, "outputs": [], "source": [ - "pool_opendp = domain_client.worker_pools[pool_name_opendp]\n", + "pool_opendp = datasite_client.worker_pools[pool_name_opendp]\n", "assert not isinstance(pool_opendp, sy.SyftError), str(pool_opendp)\n", "assert len(pool_opendp.worker_list) == 3" ] @@ -1208,7 +1208,7 @@ "metadata": {}, "outputs": [], "source": [ - "worker_pool_list = domain_client.worker_pools.get_all()\n", + "worker_pool_list = datasite_client.worker_pools.get_all()\n", "\n", "assert not isinstance(worker_pool_list, sy.SyftError), str(worker_pool_list)\n", "assert len(worker_pool_list) == 3" @@ -1251,7 +1251,7 @@ "outputs": [], "source": [ "# Scale Down the workers\n", - "opendp_pool_scale_res = domain_client.api.services.worker_pool.scale(\n", + "opendp_pool_scale_res = datasite_client.api.services.worker_pool.scale(\n", " number=1, pool_name=pool_name_opendp\n", ")\n", "assert not isinstance(opendp_pool_scale_res, sy.SyftError), str(opendp_pool_scale_res)\n", @@ -1265,7 +1265,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(domain_client.worker_pools[pool_name_opendp].worker_list) == 1" + "assert len(datasite_client.worker_pools[pool_name_opendp].worker_list) == 1" ] }, { @@ -1308,7 +1308,7 @@ "recordlinkage_pod_labels = {\n", " \"test-recordlinkage-pool\": \"test_label_for_recordlinkage_pool\"\n", "}\n", - "pool_image_create_request = domain_client.api.services.worker_pool.create_image_and_pool_request(\n", + "pool_image_create_request = datasite_client.api.services.worker_pool.create_image_and_pool_request(\n", " pool_name=pool_name_recordlinkage,\n", " num_workers=2,\n", " tag=docker_tag_recordlinkage,\n", @@ -1377,7 +1377,7 @@ "metadata": {}, "outputs": [], "source": [ - "_requests = domain_client.requests\n", + "_requests = datasite_client.requests\n", "assert not isinstance(_requests, sy.SyftError), str(_requests)" ] }, @@ -1430,7 +1430,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.images" + "datasite_client.images" ] }, { @@ -1441,7 +1441,7 @@ "outputs": [], "source": [ "image_exists = False\n", - "for im in domain_client.images.get_all():\n", + "for im in datasite_client.images.get_all():\n", " if (\n", " im.image_identifier\n", " and im.image_identifier.repo_with_tag == docker_tag_recordlinkage\n", @@ -1458,8 +1458,8 @@ "metadata": {}, "outputs": [], "source": [ - "assert domain_client.worker_pools[pool_name_recordlinkage]\n", - "assert len(domain_client.worker_pools[pool_name_recordlinkage].worker_list) == 2" + "assert datasite_client.worker_pools[pool_name_recordlinkage]\n", + "assert len(datasite_client.worker_pools[pool_name_recordlinkage].worker_list) == 2" ] }, { @@ -1470,7 +1470,7 @@ "outputs": [], "source": [ "# Scale down the workers\n", - "recordlinkage_pool_scale_res = domain_client.api.services.worker_pool.scale(\n", + "recordlinkage_pool_scale_res = datasite_client.api.services.worker_pool.scale(\n", " number=1, pool_name=pool_name_recordlinkage\n", ")\n", "assert not isinstance(recordlinkage_pool_scale_res, sy.SyftError), str(\n", @@ -1486,7 +1486,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(domain_client.worker_pools[pool_name_recordlinkage].worker_list) == 1" + "assert len(datasite_client.worker_pools[pool_name_recordlinkage].worker_list) == 1" ] } ], diff --git a/notebooks/api/0.8/12-custom-api-endpoint.ipynb b/notebooks/api/0.8/12-custom-api-endpoint.ipynb index 20c269ea286..478446bcd17 100644 --- a/notebooks/api/0.8/12-custom-api-endpoint.ipynb +++ b/notebooks/api/0.8/12-custom-api-endpoint.ipynb @@ -11,7 +11,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Initialize the Node" + "### Initialize the Server" ] }, { @@ -28,8 +28,8 @@ "from syft import SyftError\n", "from syft import SyftSuccess\n", "\n", - "node = sy.orchestra.launch(\n", - " name=\"test-domain-1\",\n", + "server = sy.orchestra.launch(\n", + " name=\"test-datasite-1\",\n", " dev_mode=True,\n", " create_producer=True,\n", " n_consumers=3,\n", @@ -37,14 +37,14 @@ " port=8081,\n", ")\n", "\n", - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "domain_client.register(\n", + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "datasite_client.register(\n", " email=\"user@openmined.org\",\n", " password=\"verysecurepassword\",\n", " password_verify=\"verysecurepassword\",\n", " name=\"New User\",\n", ")\n", - "domain_guest = node.login(email=\"user@openmined.org\", password=\"verysecurepassword\")" + "datasite_guest = server.login(email=\"user@openmined.org\", password=\"verysecurepassword\")" ] }, { @@ -53,7 +53,7 @@ "source": [ "### Create a public custom API Endpoint by using the decorator\n", "\n", - "This allows node admin to create a new public endpoint by using only the decorator." + "This allows server admin to create a new public endpoint by using only the decorator." ] }, { @@ -73,8 +73,8 @@ " return context.settings[\"key\"] == \"value\"\n", "\n", "\n", - "# Add it to the node.\n", - "response = domain_client.api.services.api.add(endpoint=public_endpoint_method)\n", + "# Add it to the server.\n", + "response = datasite_client.api.services.api.add(endpoint=public_endpoint_method)\n", "response" ] }, @@ -93,7 +93,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.api.services.api.api_endpoints()" + "datasite_client.api.services.api.api_endpoints()" ] }, { @@ -102,7 +102,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(domain_client.api.services.api.api_endpoints()) == 1" + "assert len(datasite_client.api.services.api.api_endpoints()) == 1" ] }, { @@ -112,8 +112,8 @@ "outputs": [], "source": [ "# Once api refresh is done, remove this cell\n", - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "domain_guest = node.login(email=\"user@openmined.org\", password=\"verysecurepassword\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "datasite_guest = server.login(email=\"user@openmined.org\", password=\"verysecurepassword\")" ] }, { @@ -122,7 +122,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert domain_client.api.services.first.query(query=\"SELECT *\")" + "assert datasite_client.api.services.first.query(query=\"SELECT *\")" ] }, { @@ -131,7 +131,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = domain_guest.api.services.first.query(query=\"SELECT *\")\n", + "result = datasite_guest.api.services.first.query(query=\"SELECT *\")\n", "result" ] }, @@ -171,8 +171,8 @@ " description=\"Lore ipsulum ...\",\n", ")\n", "\n", - "# # Add it to the node.\n", - "response = domain_client.api.services.api.add(endpoint=new_endpoint)" + "# # Add it to the server.\n", + "response = datasite_client.api.services.api.add(endpoint=new_endpoint)" ] }, { @@ -181,7 +181,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.api.services.api.api_endpoints()" + "datasite_client.api.services.api.api_endpoints()" ] }, { @@ -191,7 +191,7 @@ "outputs": [], "source": [ "assert isinstance(response, SyftSuccess)\n", - "assert len(domain_client.api.services.api.api_endpoints()) == 2" + "assert len(datasite_client.api.services.api.api_endpoints()) == 2" ] }, { @@ -201,8 +201,8 @@ "outputs": [], "source": [ "# Once api refresh is done, remove this cell\n", - "domain_client.refresh()\n", - "domain_guest.refresh()" + "datasite_client.refresh()\n", + "datasite_guest.refresh()" ] }, { @@ -211,7 +211,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.api.services.third.query()" + "datasite_client.api.services.third.query()" ] }, { @@ -220,7 +220,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert domain_client.api.services.third.query() == \"Private Function Execution\"" + "assert datasite_client.api.services.third.query() == \"Private Function Execution\"" ] }, { @@ -229,7 +229,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert domain_guest.api.services.third.query() == \"Public Function Execution\"" + "assert datasite_guest.api.services.third.query() == \"Public Function Execution\"" ] }, { @@ -238,7 +238,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_guest.api.services.third.query()" + "datasite_guest.api.services.third.query()" ] }, { @@ -248,7 +248,7 @@ "outputs": [], "source": [ "@sy.syft_function_single_use(\n", - " endpoint=domain_guest.api.services.third.query,\n", + " endpoint=datasite_guest.api.services.third.query,\n", ")\n", "def job_function(endpoint):\n", " return endpoint()\n", @@ -258,10 +258,10 @@ "new_project = sy.Project(\n", " name=\"My Cool UN Project\",\n", " description=\"Hi, I want to calculate the trade volume in million's with my cool code.\",\n", - " members=[domain_guest],\n", + " members=[datasite_guest],\n", ")\n", "\n", - "result = new_project.create_code_request(job_function, domain_guest)\n", + "result = new_project.create_code_request(job_function, datasite_guest)\n", "assert isinstance(result, SyftSuccess)" ] }, @@ -272,7 +272,7 @@ "outputs": [], "source": [ "res = None\n", - "for r in domain_client.requests.get_all():\n", + "for r in datasite_client.requests.get_all():\n", " if r.requesting_user_email == \"user@openmined.org\":\n", " res = r.approve()\n", "assert res is not None, res\n", @@ -285,7 +285,9 @@ "metadata": {}, "outputs": [], "source": [ - "result = domain_guest.code.job_function(endpoint=domain_client.api.services.third.query)\n", + "result = datasite_guest.code.job_function(\n", + " endpoint=datasite_client.api.services.third.query\n", + ")\n", "result" ] }, @@ -295,7 +297,9 @@ "metadata": {}, "outputs": [], "source": [ - "result = domain_guest.code.job_function(endpoint=domain_client.api.services.third.query)\n", + "result = datasite_guest.code.job_function(\n", + " endpoint=datasite_client.api.services.third.query\n", + ")\n", "result" ] }, @@ -305,7 +309,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.api.services.third.query" + "datasite_client.api.services.third.query" ] }, { @@ -314,7 +318,9 @@ "metadata": {}, "outputs": [], "source": [ - "result = domain_guest.code.job_function(endpoint=domain_client.api.services.third.query)\n", + "result = datasite_guest.code.job_function(\n", + " endpoint=datasite_client.api.services.third.query\n", + ")\n", "result" ] }, @@ -342,7 +348,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert isinstance(domain_guest.api.services.third.query.private(), SyftError)" + "assert isinstance(datasite_guest.api.services.third.query.private(), SyftError)" ] }, { @@ -351,7 +357,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = domain_client.api.services.api.delete(endpoint_path=\"third.query\")\n", + "result = datasite_client.api.services.api.delete(endpoint_path=\"third.query\")\n", "assert isinstance(result, SyftSuccess), result" ] }, @@ -361,7 +367,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert len(domain_client.api.services.api.api_endpoints()) == 1" + "assert len(datasite_client.api.services.api.api_endpoints()) == 1" ] }, { @@ -395,8 +401,8 @@ " return context.settings[\"key\"] == \"value\"\n", "\n", "\n", - "# Add it to the node.\n", - "response = domain_client.api.services.api.add(endpoint=new_public_function)\n", + "# Add it to the server.\n", + "response = datasite_client.api.services.api.add(endpoint=new_public_function)\n", "assert isinstance(response, SyftSuccess), response\n", "response" ] @@ -421,7 +427,7 @@ " return \"Updated Public Function Execution\"\n", "\n", "\n", - "response = domain_client.api.services.api.update(\n", + "response = datasite_client.api.services.api.update(\n", " endpoint_path=\"test.update\", mock_function=updated_public_function\n", ")\n", "assert isinstance(response, SyftSuccess), response\n", @@ -448,7 +454,7 @@ " return \"Updated Private Function Execution\"\n", "\n", "\n", - "response = domain_client.api.services.api.update(\n", + "response = datasite_client.api.services.api.update(\n", " endpoint_path=\"test.update\", private_function=updated_private_function\n", ")\n", "assert isinstance(response, SyftSuccess), response\n", @@ -478,7 +484,7 @@ " return \"Updated Private Function Execution\"\n", "\n", "\n", - "response = domain_client.api.services.api.update(\n", + "response = datasite_client.api.services.api.update(\n", " endpoint_path=\"test.update\",\n", " mock_function=new_sig_public_function,\n", " private_function=new_sig_private_function,\n", @@ -510,7 +516,7 @@ "metadata": {}, "outputs": [], "source": [ - "response = domain_client.api.services.api.update(endpoint_path=\"test.update\")\n", + "response = datasite_client.api.services.api.update(endpoint_path=\"test.update\")\n", "assert isinstance(response, SyftError), response" ] }, @@ -532,7 +538,7 @@ " return \"Updated Public Function Execution\"\n", "\n", "\n", - "response = domain_client.api.services.api.update(\n", + "response = datasite_client.api.services.api.update(\n", " endpoint_path=\"test.update\", mock_function=bad_public_function\n", ")\n", "assert isinstance(response, SyftError), response" @@ -551,7 +557,7 @@ "metadata": {}, "outputs": [], "source": [ - "response = domain_client.api.services.api.update(\n", + "response = datasite_client.api.services.api.update(\n", " endpoint_path=\"nonexistent\", mock_function=bad_public_function\n", ")\n", "assert isinstance(response, SyftError), response" diff --git a/notebooks/experimental/enclaves/V1/00-do-setup-domain.ipynb b/notebooks/experimental/enclaves/V1/00-do-setup-domain.ipynb index 0cbeaa20897..de366a74e7e 100644 --- a/notebooks/experimental/enclaves/V1/00-do-setup-domain.ipynb +++ b/notebooks/experimental/enclaves/V1/00-do-setup-domain.ipynb @@ -19,11 +19,11 @@ "\n", "## Data Owners Workflow - Part 1\n", "[./00-do-setup-domain.ipynb](./00-do-setup-domain.ipynb)\n", - "- Launch two domain nodes for providing data to the data scientist.\n", - "- Launch one enclave node for performing the secure computation using data from both the domain nodes.\n", - "- Upload datasets to both the domain nodes.\n", - "- Register an account for the data scientist in both the domain nodes.\n", - "- Register the enclave node with one of the domain nodes for discoverability by the data scientist.\n", + "- Launch two domain servers for providing data to the data scientist.\n", + "- Launch one enclave server for performing the secure computation using data from both the domain servers.\n", + "- Upload datasets to both the domain servers.\n", + "- Register an account for the data scientist in both the domain servers.\n", + "- Register the enclave server with one of the domain servers for discoverability by the data scientist.\n", "\n", "## Data Scientist Workflow - Part 1\n", "[./01-ds-submit-project.ipynb](./01-ds-submit-project.ipynb)\n", @@ -58,8 +58,8 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "from syft.abstract_node import NodeType\n", - "from syft.service.network.routes import HTTPNodeRoute\n", + "from syft.abstract_server import ServerType\n", + "from syft.service.network.routes import HTTPServerRoute\n", "from syft.service.response import SyftAttributeError\n", "from syft.service.response import SyftSuccess\n", "\n", @@ -72,9 +72,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Launch nodes\n", + "# Launch servers\n", "\n", - "We will begin by launching two domain nodes and an enclave node." + "We will begin by launching two domain servers and an enclave server." ] }, { @@ -83,15 +83,15 @@ "metadata": {}, "outputs": [], "source": [ - "canada_node = sy.orchestra.launch(\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True, reset=True\n", ")\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True, reset=True\n", ")\n", "canada_enclave = sy.orchestra.launch(\n", " name=\"canada-enclave\",\n", - " node_type=NodeType.ENCLAVE,\n", + " server_type=ServerType.ENCLAVE,\n", " port=CANADA_ENCLAVE_PORT,\n", " dev_mode=True,\n", " reset=True,\n", @@ -104,11 +104,13 @@ "metadata": {}, "outputs": [], "source": [ - "do_canada_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "do_italy_client = italy_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "do_canada_client = canada_server.login(\n", + " email=\"info@openmined.org\", password=\"changethis\"\n", + ")\n", + "do_italy_client = italy_server.login(email=\"info@openmined.org\", password=\"changethis\")\n", "\n", - "assert do_canada_client.metadata.node_type == NodeType.DOMAIN\n", - "assert do_italy_client.metadata.node_type == NodeType.DOMAIN" + "assert do_canada_client.metadata.server_type == ServerType.DOMAIN\n", + "assert do_italy_client.metadata.server_type == ServerType.DOMAIN" ] }, { @@ -202,7 +204,7 @@ "metadata": {}, "outputs": [], "source": [ - "route = HTTPNodeRoute(host_or_ip=\"localhost\", port=CANADA_ENCLAVE_PORT)\n", + "route = HTTPServerRoute(host_or_ip=\"localhost\", port=CANADA_ENCLAVE_PORT)\n", "do_canada_client.enclaves.add(route=route)" ] }, @@ -236,7 +238,7 @@ "# Data scientist should not be able to add enclave to the domain\n", "with pytest.raises(SyftAttributeError) as exc_info:\n", " ds_canada_client.enclaves.add(\n", - " name=\"Dummy Enclave\", route=HTTPNodeRoute(host_or_ip=\"localhost\", port=9084)\n", + " name=\"Dummy Enclave\", route=HTTPServerRoute(host_or_ip=\"localhost\", port=9084)\n", " )\n", "print(exc_info.value)" ] @@ -265,11 +267,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()\n", + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()\n", "\n", "if canada_enclave.deployment_type.value == \"python\":\n", " canada_enclave.land()" diff --git a/notebooks/experimental/enclaves/V1/01-ds-submit-project.ipynb b/notebooks/experimental/enclaves/V1/01-ds-submit-project.ipynb index 786b30cff55..8ea9b677d7d 100644 --- a/notebooks/experimental/enclaves/V1/01-ds-submit-project.ipynb +++ b/notebooks/experimental/enclaves/V1/01-ds-submit-project.ipynb @@ -40,7 +40,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Log in to the domain nodes as a data scientist" + "# Log in to the domain servers as a data scientist" ] }, { @@ -49,11 +49,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Launch the domain nodes we setup in the previous notebook\n", - "canada_node = sy.orchestra.launch(\n", + "# Launch the domain servers we setup in the previous notebook\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True\n", ")\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True\n", ")" ] @@ -64,8 +64,10 @@ "metadata": {}, "outputs": [], "source": [ - "ds_canada_client = canada_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", - "ds_italy_client = italy_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" + "ds_canada_client = canada_server.login(\n", + " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", + ")\n", + "ds_italy_client = italy_server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" ] }, { @@ -220,8 +222,8 @@ "metadata": {}, "outputs": [], "source": [ - "assert project.requests[0].node_uid == ds_canada_client.id\n", - "assert project.requests[1].node_uid == ds_italy_client.id" + "assert project.requests[0].server_uid == ds_canada_client.id\n", + "assert project.requests[1].server_uid == ds_italy_client.id" ] }, { @@ -230,7 +232,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Test: data scientist should not be able to request execution without approval of all nodes\n", + "# Test: data scientist should not be able to request execution without approval of all servers\n", "assert project.pending_requests != 0 # There are pending requests\n", "with pytest.raises(SyftException) as exc_info:\n", " project.request_execution(blocking=True)\n", @@ -262,11 +264,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()" + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()" ] } ], diff --git a/notebooks/experimental/enclaves/V1/02-do-review-code.ipynb b/notebooks/experimental/enclaves/V1/02-do-review-code.ipynb index 88b09189c95..4dc3452f817 100644 --- a/notebooks/experimental/enclaves/V1/02-do-review-code.ipynb +++ b/notebooks/experimental/enclaves/V1/02-do-review-code.ipynb @@ -35,7 +35,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Log in to the first domain node as the data owner" + "# Log in to the first domain server as the data owner" ] }, { @@ -44,8 +44,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Launch the domain nodes we setup in the previous notebook\n", - "canada_node = sy.orchestra.launch(\n", + "# Launch the domain servers we setup in the previous notebook\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True\n", ")" ] @@ -56,7 +56,9 @@ "metadata": {}, "outputs": [], "source": [ - "do_canada_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")" + "do_canada_client = canada_server.login(\n", + " email=\"info@openmined.org\", password=\"changethis\"\n", + ")" ] }, { @@ -168,7 +170,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Approve the code from the other domain node" + "# Approve the code from the other domain server" ] }, { @@ -177,10 +179,10 @@ "metadata": {}, "outputs": [], "source": [ - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True\n", ")\n", - "do_italy_client = italy_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "do_italy_client = italy_server.login(email=\"info@openmined.org\", password=\"changethis\")\n", "\n", "projects = do_italy_client.projects.get_all()\n", "assert len(projects) == 1\n", @@ -210,11 +212,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()" + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()" ] } ], diff --git a/notebooks/experimental/enclaves/V1/03-ds-execute-code.ipynb b/notebooks/experimental/enclaves/V1/03-ds-execute-code.ipynb index 82a9768b4be..4ab56eb29a9 100644 --- a/notebooks/experimental/enclaves/V1/03-ds-execute-code.ipynb +++ b/notebooks/experimental/enclaves/V1/03-ds-execute-code.ipynb @@ -26,7 +26,7 @@ "source": [ "# syft absolute\n", "import syft as sy\n", - "from syft.abstract_node import NodeType\n", + "from syft.abstract_server import ServerType\n", "\n", "CANADA_DOMAIN_PORT = 9081\n", "ITALY_DOMAIN_PORT = 9082\n", @@ -39,16 +39,16 @@ "metadata": {}, "outputs": [], "source": [ - "# Launch the domain nodes we setup in the previous notebook\n", - "canada_node = sy.orchestra.launch(\n", + "# Launch the domain servers we setup in the previous notebook\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True\n", ")\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True\n", ")\n", "canada_enclave = sy.orchestra.launch(\n", " name=\"canada-enclave\",\n", - " node_type=NodeType.ENCLAVE,\n", + " server_type=ServerType.ENCLAVE,\n", " port=CANADA_ENCLAVE_PORT,\n", " dev_mode=True,\n", " create_producer=True,\n", @@ -63,8 +63,10 @@ "metadata": {}, "outputs": [], "source": [ - "ds_canada_client = canada_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", - "ds_italy_client = italy_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" + "ds_canada_client = canada_server.login(\n", + " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", + ")\n", + "ds_italy_client = italy_server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" ] }, { @@ -124,11 +126,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()\n", + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()\n", "\n", "if canada_enclave.deployment_type.value == \"python\":\n", " canada_enclave.land()" diff --git a/notebooks/experimental/enclaves/V2/00-do-setup-domain.ipynb b/notebooks/experimental/enclaves/V2/00-do-setup-domain.ipynb index 3f5f17c1afc..fc215ade52e 100644 --- a/notebooks/experimental/enclaves/V2/00-do-setup-domain.ipynb +++ b/notebooks/experimental/enclaves/V2/00-do-setup-domain.ipynb @@ -19,11 +19,11 @@ "\n", "## Data Owners Workflow - Part 1\n", "[./00-do-setup-domain.ipynb](./00-do-setup-domain.ipynb)\n", - "- Launch two domain nodes for providing data to the data scientist.\n", - "- Launch one enclave node for performing the secure computation using data from both the domain nodes.\n", - "- Upload datasets to both the domain nodes.\n", - "- Register an account for the data scientist in both the domain nodes.\n", - "- Register the enclave node with one of the domain nodes for discoverability by the data scientist.\n", + "- Launch two domain servers for providing data to the data scientist.\n", + "- Launch one enclave server for performing the secure computation using data from both the domain servers.\n", + "- Upload datasets to both the domain servers.\n", + "- Register an account for the data scientist in both the domain servers.\n", + "- Register the enclave server with one of the domain servers for discoverability by the data scientist.\n", "\n", "## Data Scientist Workflow - Part 1\n", "[./01-ds-submit-project.ipynb](./01-ds-submit-project.ipynb)\n", @@ -58,8 +58,8 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "from syft.abstract_node import NodeType\n", - "from syft.service.network.routes import HTTPNodeRoute\n", + "from syft.abstract_server import ServerType\n", + "from syft.service.network.routes import HTTPServerRoute\n", "from syft.service.response import SyftAttributeError\n", "from syft.service.response import SyftSuccess\n", "\n", @@ -72,9 +72,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Launch nodes\n", + "# Launch servers\n", "\n", - "We will begin by launching two domain nodes and an enclave node." + "We will begin by launching two domain servers and an enclave server." ] }, { @@ -83,15 +83,15 @@ "metadata": {}, "outputs": [], "source": [ - "canada_node = sy.orchestra.launch(\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True, reset=True\n", ")\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True, reset=True\n", ")\n", "canada_enclave = sy.orchestra.launch(\n", " name=\"canada-enclave\",\n", - " node_type=NodeType.ENCLAVE,\n", + " server_type=ServerType.ENCLAVE,\n", " port=CANADA_ENCLAVE_PORT,\n", " dev_mode=True,\n", " reset=True,\n", @@ -104,11 +104,13 @@ "metadata": {}, "outputs": [], "source": [ - "do_canada_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "do_italy_client = italy_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "do_canada_client = canada_server.login(\n", + " email=\"info@openmined.org\", password=\"changethis\"\n", + ")\n", + "do_italy_client = italy_server.login(email=\"info@openmined.org\", password=\"changethis\")\n", "\n", - "assert do_canada_client.metadata.node_type == NodeType.DOMAIN\n", - "assert do_italy_client.metadata.node_type == NodeType.DOMAIN" + "assert do_canada_client.metadata.server_type == ServerType.DOMAIN\n", + "assert do_italy_client.metadata.server_type == ServerType.DOMAIN" ] }, { @@ -202,7 +204,7 @@ "metadata": {}, "outputs": [], "source": [ - "route = HTTPNodeRoute(host_or_ip=\"localhost\", port=CANADA_ENCLAVE_PORT)\n", + "route = HTTPServerRoute(host_or_ip=\"localhost\", port=CANADA_ENCLAVE_PORT)\n", "do_canada_client.enclaves.add(route=route)" ] }, @@ -236,7 +238,7 @@ "# Data scientist should not be able to add enclave to the domain\n", "with pytest.raises(SyftAttributeError) as exc_info:\n", " ds_canada_client.enclaves.add(\n", - " name=\"Dummy Enclave\", route=HTTPNodeRoute(host_or_ip=\"localhost\", port=9084)\n", + " name=\"Dummy Enclave\", route=HTTPServerRoute(host_or_ip=\"localhost\", port=9084)\n", " )\n", "print(exc_info.value)" ] @@ -265,11 +267,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()\n", + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()\n", "\n", "if canada_enclave.deployment_type.value == \"python\":\n", " canada_enclave.land()" diff --git a/notebooks/experimental/enclaves/V2/01-connect-domains.ipynb b/notebooks/experimental/enclaves/V2/01-connect-domains.ipynb index da03fd9c57c..386694c974c 100644 --- a/notebooks/experimental/enclaves/V2/01-connect-domains.ipynb +++ b/notebooks/experimental/enclaves/V2/01-connect-domains.ipynb @@ -13,11 +13,11 @@ "\n", "3. The Data Scientist is able to create a project if and only if , all the domains could talk to each other. (i.e they have beeen previously associated)\n", "\n", - "4. This would mean that in our semi-Decentralized, leader based system , when ever a node would like to add an event/message to the project, it has to be sent to the leader, which is then broadcasted to all the other nodes.\n", + "4. This would mean that in our semi-Decentralized, leader based system , when ever a server would like to add an event/message to the project, it has to be sent to the leader, which is then broadcasted to all the other servers.\n", "\n", - "5. For other situations, for example when a node would like to asset metadata of another node in a Multi Domain User Code request, they could directly contact the node, to retrieve the info, instead of going through the leader.\n", + "5. For other situations, for example when a server would like to asset metadata of another server in a Multi Domain User Code request, they could directly contact the server, to retrieve the info, instead of going through the leader.\n", "\n", - "6. This would require us to create a Full Mesh Network Topology, where each node is connected to each other." + "6. This would require us to create a Full Mesh Network Topology, where each server is connected to each other." ] }, { @@ -28,8 +28,8 @@ "source": [ "# syft absolute\n", "import syft as sy\n", - "from syft.abstract_node import NodeType\n", - "from syft.service.network.node_peer import NodePeer\n", + "from syft.abstract_server import ServerType\n", + "from syft.service.network.server_peer import ServerPeer\n", "\n", "CANADA_DOMAIN_PORT = 9081\n", "ITALY_DOMAIN_PORT = 9082" @@ -39,9 +39,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Launch nodes\n", + "# Launch servers\n", "\n", - "We will begin by launching two domain nodes and an enclave node." + "We will begin by launching two domain servers and an enclave server." ] }, { @@ -50,12 +50,12 @@ "metadata": {}, "outputs": [], "source": [ - "canada_node = sy.orchestra.launch(\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\",\n", " port=CANADA_DOMAIN_PORT,\n", " dev_mode=True,\n", ")\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\",\n", " port=ITALY_DOMAIN_PORT,\n", " dev_mode=True,\n", @@ -68,11 +68,13 @@ "metadata": {}, "outputs": [], "source": [ - "ds_canada_client = canada_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", - "ds_italy_client = italy_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", + "ds_canada_client = canada_server.login(\n", + " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", + ")\n", + "ds_italy_client = italy_server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", "\n", - "assert ds_canada_client.metadata.node_type == NodeType.DOMAIN\n", - "assert ds_italy_client.metadata.node_type == NodeType.DOMAIN" + "assert ds_canada_client.metadata.server_type == ServerType.DOMAIN\n", + "assert ds_italy_client.metadata.server_type == ServerType.DOMAIN" ] }, { @@ -88,8 +90,8 @@ "metadata": {}, "outputs": [], "source": [ - "canada_node_peer = NodePeer.from_client(ds_canada_client)\n", - "canada_node_peer" + "canada_server_peer = ServerPeer.from_client(ds_canada_client)\n", + "canada_server_peer" ] }, { @@ -98,8 +100,8 @@ "metadata": {}, "outputs": [], "source": [ - "italy_node_peer = NodePeer.from_client(ds_italy_client)\n", - "italy_node_peer" + "italy_server_peer = ServerPeer.from_client(ds_italy_client)\n", + "italy_server_peer" ] }, { @@ -108,7 +110,7 @@ "metadata": {}, "outputs": [], "source": [ - "canada_conn_req = ds_canada_client.api.services.network.add_peer(italy_node_peer)\n", + "canada_conn_req = ds_canada_client.api.services.network.add_peer(italy_server_peer)\n", "canada_conn_req" ] }, @@ -118,7 +120,7 @@ "metadata": {}, "outputs": [], "source": [ - "italy_conn_req = ds_italy_client.api.services.network.add_peer(canada_node_peer)\n", + "italy_conn_req = ds_italy_client.api.services.network.add_peer(canada_server_peer)\n", "italy_conn_req" ] }, @@ -135,8 +137,10 @@ "metadata": {}, "outputs": [], "source": [ - "do_canada_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "do_italy_client = italy_node.login(email=\"info@openmined.org\", password=\"changethis\")" + "do_canada_client = canada_server.login(\n", + " email=\"info@openmined.org\", password=\"changethis\"\n", + ")\n", + "do_italy_client = italy_server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -228,11 +232,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()" + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()" ] }, { diff --git a/notebooks/experimental/enclaves/V2/02-ds-submit-project.ipynb b/notebooks/experimental/enclaves/V2/02-ds-submit-project.ipynb index ba6d6fc0147..105b26d2ade 100644 --- a/notebooks/experimental/enclaves/V2/02-ds-submit-project.ipynb +++ b/notebooks/experimental/enclaves/V2/02-ds-submit-project.ipynb @@ -37,7 +37,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Log in to the domain nodes as a data scientist" + "# Log in to the domain servers as a data scientist" ] }, { @@ -46,11 +46,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Launch the domain nodes we setup in the previous notebook\n", - "canada_node = sy.orchestra.launch(\n", + "# Launch the domain servers we setup in the previous notebook\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True\n", ")\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True\n", ")" ] @@ -61,8 +61,10 @@ "metadata": {}, "outputs": [], "source": [ - "ds_canada_client = canada_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", - "ds_italy_client = italy_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" + "ds_canada_client = canada_server.login(\n", + " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", + ")\n", + "ds_italy_client = italy_server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" ] }, { @@ -252,11 +254,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()" + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()" ] }, { diff --git a/notebooks/experimental/enclaves/V2/03-do-review-code.ipynb b/notebooks/experimental/enclaves/V2/03-do-review-code.ipynb index 97d88f05b6f..9b4a5dfd1d3 100644 --- a/notebooks/experimental/enclaves/V2/03-do-review-code.ipynb +++ b/notebooks/experimental/enclaves/V2/03-do-review-code.ipynb @@ -44,12 +44,12 @@ "metadata": {}, "outputs": [], "source": [ - "# Launch the domain nodes we setup in the previous notebook\n", - "canada_node = sy.orchestra.launch(\n", + "# Launch the domain servers we setup in the previous notebook\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True\n", ")\n", "\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True\n", ")" ] @@ -58,7 +58,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Log in to the first domain node as the data owner" + "# Log in to the first domain server as the data owner" ] }, { @@ -67,7 +67,9 @@ "metadata": {}, "outputs": [], "source": [ - "do_canada_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")" + "do_canada_client = canada_server.login(\n", + " email=\"info@openmined.org\", password=\"changethis\"\n", + ")" ] }, { @@ -222,7 +224,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Approve the code from the other domain node" + "# Approve the code from the other domain server" ] }, { @@ -231,7 +233,7 @@ "metadata": {}, "outputs": [], "source": [ - "do_italy_client = italy_node.login(email=\"info@openmined.org\", password=\"changethis\")" + "do_italy_client = italy_server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -326,11 +328,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()" + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()" ] }, { diff --git a/notebooks/experimental/enclaves/V2/04-ds-execute-code.ipynb b/notebooks/experimental/enclaves/V2/04-ds-execute-code.ipynb index f6337840449..a16665b9cf7 100644 --- a/notebooks/experimental/enclaves/V2/04-ds-execute-code.ipynb +++ b/notebooks/experimental/enclaves/V2/04-ds-execute-code.ipynb @@ -26,7 +26,7 @@ "source": [ "# syft absolute\n", "import syft as sy\n", - "from syft.abstract_node import NodeType\n", + "from syft.abstract_server import ServerType\n", "\n", "CANADA_DOMAIN_PORT = 9081\n", "ITALY_DOMAIN_PORT = 9082\n", @@ -39,16 +39,16 @@ "metadata": {}, "outputs": [], "source": [ - "# Launch the domain nodes we setup in the previous notebook\n", - "canada_node = sy.orchestra.launch(\n", + "# Launch the domain servers we setup in the previous notebook\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True\n", ")\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True\n", ")\n", "canada_enclave = sy.orchestra.launch(\n", " name=\"canada-enclave\",\n", - " node_type=NodeType.ENCLAVE,\n", + " server_type=ServerType.ENCLAVE,\n", " port=CANADA_ENCLAVE_PORT,\n", " dev_mode=True,\n", " create_producer=True,\n", @@ -63,8 +63,10 @@ "metadata": {}, "outputs": [], "source": [ - "ds_canada_client = canada_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", - "ds_italy_client = italy_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" + "ds_canada_client = canada_server.login(\n", + " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", + ")\n", + "ds_italy_client = italy_server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" ] }, { @@ -156,11 +158,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()\n", + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()\n", "\n", "if canada_enclave.deployment_type.value == \"python\":\n", " canada_enclave.land()" diff --git a/notebooks/experimental/enclaves/V2/V2-Enclave-Single-Notebook.ipynb b/notebooks/experimental/enclaves/V2/V2-Enclave-Single-Notebook.ipynb index 90ebdd79de6..dcdda3a5e4c 100644 --- a/notebooks/experimental/enclaves/V2/V2-Enclave-Single-Notebook.ipynb +++ b/notebooks/experimental/enclaves/V2/V2-Enclave-Single-Notebook.ipynb @@ -12,24 +12,24 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "from syft.abstract_node import NodeType\n", + "from syft.abstract_server import ServerType\n", "from syft.service.code.user_code import UserCodeStatus\n", - "from syft.service.network.node_peer import NodePeer\n", - "from syft.service.network.routes import HTTPNodeRoute\n", + "from syft.service.network.routes import HTTPServerRoute\n", + "from syft.service.network.server_peer import ServerPeer\n", "from syft.service.project.project import ProjectCode\n", "from syft.service.project.project import check_route_reachability\n", "from syft.service.response import SyftSuccess\n", "from syft.types.uid import UID\n", "\n", - "CANADA_DOMAIN_PORT = 9081\n", - "ITALY_DOMAIN_PORT = 9082\n", + "CANADA_DATASITE_PORT = 9081\n", + "ITALY_DATASITE_PORT = 9082\n", "CANADA_ENCLAVE_HOST = None\n", "CANADA_ENCLAVE_PORT = 9083\n", "\n", "CPU_ENCLAVE = \"13.90.101.161\"\n", "GPU_ENCLAVE = \"172.176.204.136\"\n", "#! Uncomment below line to run the code on a pre-provisioned remote Enclave\n", - "CANADA_ENCLAVE_HOST = CPU_ENCLAVE" + "CANADA_ENCLAVE_HOST = None" ] }, { @@ -37,9 +37,9 @@ "id": "1", "metadata": {}, "source": [ - "# Launch nodes\n", + "# Launch servers\n", "\n", - "We will begin by launching two domain nodes and an enclave node." + "We will begin by launching two datasite servers and an enclave server." ] }, { @@ -48,10 +48,10 @@ "metadata": {}, "source": [ "### For Kubernetes\n", - "To run the nodes in kubernetes, run the below commands and wait till the cluster becomes ready.\n", + "To run the servers in kubernetes, run the below commands and wait till the cluster becomes ready.\n", "```bash\n", - "CLUSTER_NAME=canada-domain CLUSTER_HTTP_PORT=9081 tox -e dev.k8s.launch.domain\n", - "CLUSTER_NAME=italy-domain CLUSTER_HTTP_PORT=9082 tox -e dev.k8s.launch.domain\n", + "CLUSTER_NAME=canada-datasite CLUSTER_HTTP_PORT=9081 tox -e dev.k8s.launch.datasite\n", + "CLUSTER_NAME=italy-datasite CLUSTER_HTTP_PORT=9082 tox -e dev.k8s.launch.datasite\n", "CLUSTER_NAME=canada-enclave CLUSTER_HTTP_PORT=9083 tox -e dev.k8s.launch.enclave\n", "```" ] @@ -63,15 +63,15 @@ "metadata": {}, "outputs": [], "source": [ - "canada_node = sy.orchestra.launch(\n", - " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True, reset=True\n", + "canada_server = sy.orchestra.launch(\n", + " name=\"canada-datasite\", port=CANADA_DATASITE_PORT, dev_mode=True, reset=True\n", ")\n", - "italy_node = sy.orchestra.launch(\n", - " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True, reset=True\n", + "italy_server = sy.orchestra.launch(\n", + " name=\"italy-datasite\", port=ITALY_DATASITE_PORT, dev_mode=True, reset=True\n", ")\n", "enclave_kwargs = {\n", " \"name\": \"canada-enclave\",\n", - " \"node_type\": NodeType.ENCLAVE,\n", + " \"server_type\": ServerType.ENCLAVE,\n", " \"port\": CANADA_ENCLAVE_PORT,\n", " \"create_producer\": True,\n", " \"n_consumers\": 3,\n", @@ -91,11 +91,13 @@ "metadata": {}, "outputs": [], "source": [ - "do_canada_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "do_italy_client = italy_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "do_canada_client = canada_server.login(\n", + " email=\"info@openmined.org\", password=\"changethis\"\n", + ")\n", + "do_italy_client = italy_server.login(email=\"info@openmined.org\", password=\"changethis\")\n", "\n", - "assert do_canada_client.metadata.node_type == NodeType.DOMAIN\n", - "assert do_italy_client.metadata.node_type == NodeType.DOMAIN" + "assert do_canada_client.metadata.server_type == ServerType.DATASITE\n", + "assert do_italy_client.metadata.server_type == ServerType.DATASITE" ] }, { @@ -103,7 +105,7 @@ "id": "5", "metadata": {}, "source": [ - "# Upload datasets to both domains" + "# Upload datasets to both datasites" ] }, { @@ -162,7 +164,7 @@ "id": "9", "metadata": {}, "source": [ - "# Create account for data scientist on both the domains" + "# Create account for data scientist on both the datasites" ] }, { @@ -187,7 +189,7 @@ "id": "11", "metadata": {}, "source": [ - "# Register the enclave with Canada domain" + "# Register the enclave with Canada datasite" ] }, { @@ -197,7 +199,7 @@ "metadata": {}, "outputs": [], "source": [ - "route = HTTPNodeRoute(host_or_ip=canada_enclave.url, port=canada_enclave.port)\n", + "route = HTTPServerRoute(host_or_ip=canada_enclave.url, port=canada_enclave.port)\n", "do_canada_client.enclaves.add(route=route)" ] }, @@ -227,8 +229,10 @@ "metadata": {}, "outputs": [], "source": [ - "ds_canada_client = canada_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", - "ds_italy_client = italy_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" + "ds_canada_client = canada_server.login(\n", + " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", + ")\n", + "ds_italy_client = italy_server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" ] }, { @@ -246,8 +250,8 @@ "metadata": {}, "outputs": [], "source": [ - "canada_node_peer = NodePeer.from_client(ds_canada_client)\n", - "canada_node_peer" + "canada_server_peer = ServerPeer.from_client(ds_canada_client)\n", + "canada_server_peer" ] }, { @@ -257,8 +261,8 @@ "metadata": {}, "outputs": [], "source": [ - "italy_node_peer = NodePeer.from_client(ds_italy_client)\n", - "italy_node_peer" + "italy_server_peer = ServerPeer.from_client(ds_italy_client)\n", + "italy_server_peer" ] }, { @@ -268,7 +272,7 @@ "metadata": {}, "outputs": [], "source": [ - "canada_conn_req = ds_canada_client.api.services.network.add_peer(italy_node_peer)\n", + "canada_conn_req = ds_canada_client.api.services.network.add_peer(italy_server_peer)\n", "canada_conn_req" ] }, @@ -279,7 +283,7 @@ "metadata": {}, "outputs": [], "source": [ - "italy_conn_req = ds_italy_client.api.services.network.add_peer(canada_node_peer)\n", + "italy_conn_req = ds_italy_client.api.services.network.add_peer(canada_server_peer)\n", "italy_conn_req" ] }, @@ -318,7 +322,7 @@ "id": "24", "metadata": {}, "source": [ - "# Find datasets across multiple domains" + "# Find datasets across multiple datasites" ] }, { @@ -643,7 +647,7 @@ "id": "48", "metadata": {}, "source": [ - "# Cleanup local domain servers" + "# Cleanup local datasite servers" ] }, { @@ -653,15 +657,23 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()\n", + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()\n", "\n", "if canada_enclave.deployment_type.value == \"python\":\n", " canada_enclave.land()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0b8747c8-5e4c-4115-aa8f-74c8a14c4461", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -680,7 +692,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.7" + "version": "3.11.8" } }, "nbformat": 4, diff --git a/notebooks/experimental/enclaves/V3/V3-Enclave-Model-HostingSingle-Notebook.ipynb b/notebooks/experimental/enclaves/V3/V3-Enclave-Model-HostingSingle-Notebook.ipynb index 608e3be0eb5..c9f244e75ca 100644 --- a/notebooks/experimental/enclaves/V3/V3-Enclave-Model-HostingSingle-Notebook.ipynb +++ b/notebooks/experimental/enclaves/V3/V3-Enclave-Model-HostingSingle-Notebook.ipynb @@ -11,10 +11,10 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "from syft.abstract_node import NodeType\n", + "from syft.abstract_server import ServerType\n", "from syft.service.code.user_code import UserCodeStatus\n", - "from syft.service.network.node_peer import NodePeer\n", - "from syft.service.network.routes import HTTPNodeRoute\n", + "from syft.service.network.routes import HTTPServerRoute\n", + "from syft.service.network.server_peer import ServerPeer\n", "from syft.service.project.project import ProjectCode\n", "from syft.service.project.project import check_route_reachability\n", "from syft.service.response import SyftSuccess\n", @@ -33,9 +33,9 @@ "id": "1", "metadata": {}, "source": [ - "# Launch nodes\n", + "# Launch servers\n", "\n", - "We will begin by launching two domain nodes and an enclave node." + "We will begin by launching two domain servers and an enclave server." ] }, { @@ -44,7 +44,7 @@ "metadata": {}, "source": [ "### For Kubernetes\n", - "To run the nodes in kubernetes, run the below commands and wait till the cluster becomes ready.\n", + "To run the servers in kubernetes, run the below commands and wait till the cluster becomes ready.\n", "```bash\n", "CLUSTER_NAME=canada-domain CLUSTER_HTTP_PORT=9081 tox -e dev.k8s.launch.domain\n", "CLUSTER_NAME=italy-domain CLUSTER_HTTP_PORT=9082 tox -e dev.k8s.launch.domain\n", @@ -59,15 +59,15 @@ "metadata": {}, "outputs": [], "source": [ - "canada_node = sy.orchestra.launch(\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=CANADA_DOMAIN_PORT, dev_mode=True, reset=True\n", ")\n", - "italy_node = sy.orchestra.launch(\n", + "italy_server = sy.orchestra.launch(\n", " name=\"italy-domain\", port=ITALY_DOMAIN_PORT, dev_mode=True, reset=True\n", ")\n", "enclave_kwargs = {\n", " \"name\": \"canada-enclave\",\n", - " \"node_type\": NodeType.ENCLAVE,\n", + " \"server_type\": ServerType.ENCLAVE,\n", " \"port\": CANADA_ENCLAVE_PORT,\n", " \"create_producer\": True,\n", " \"n_consumers\": 3,\n", @@ -87,11 +87,13 @@ "metadata": {}, "outputs": [], "source": [ - "do_canada_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "do_italy_client = italy_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "do_canada_client = canada_server.login(\n", + " email=\"info@openmined.org\", password=\"changethis\"\n", + ")\n", + "do_italy_client = italy_server.login(email=\"info@openmined.org\", password=\"changethis\")\n", "\n", - "assert do_canada_client.metadata.node_type == NodeType.DOMAIN\n", - "assert do_italy_client.metadata.node_type == NodeType.DOMAIN" + "assert do_canada_client.metadata.server_type == ServerType.DOMAIN\n", + "assert do_italy_client.metadata.server_type == ServerType.DOMAIN" ] }, { @@ -372,7 +374,7 @@ "metadata": {}, "outputs": [], "source": [ - "route = HTTPNodeRoute(host_or_ip=canada_enclave.url, port=canada_enclave.port)\n", + "route = HTTPServerRoute(host_or_ip=canada_enclave.url, port=canada_enclave.port)\n", "do_canada_client.enclaves.add(route=route)" ] }, @@ -402,8 +404,10 @@ "metadata": {}, "outputs": [], "source": [ - "ds_canada_client = canada_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")\n", - "ds_italy_client = italy_node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" + "ds_canada_client = canada_server.login(\n", + " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", + ")\n", + "ds_italy_client = italy_server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" ] }, { @@ -421,8 +425,8 @@ "metadata": {}, "outputs": [], "source": [ - "canada_node_peer = NodePeer.from_client(ds_canada_client)\n", - "canada_node_peer" + "canada_server_peer = ServerPeer.from_client(ds_canada_client)\n", + "canada_server_peer" ] }, { @@ -432,8 +436,8 @@ "metadata": {}, "outputs": [], "source": [ - "italy_node_peer = NodePeer.from_client(ds_italy_client)\n", - "italy_node_peer" + "italy_server_peer = ServerPeer.from_client(ds_italy_client)\n", + "italy_server_peer" ] }, { @@ -443,7 +447,7 @@ "metadata": {}, "outputs": [], "source": [ - "canada_conn_req = ds_canada_client.api.services.network.add_peer(italy_node_peer)\n", + "canada_conn_req = ds_canada_client.api.services.network.add_peer(italy_server_peer)\n", "canada_conn_req" ] }, @@ -454,7 +458,7 @@ "metadata": {}, "outputs": [], "source": [ - "italy_conn_req = ds_italy_client.api.services.network.add_peer(canada_node_peer)\n", + "italy_conn_req = ds_italy_client.api.services.network.add_peer(canada_server_peer)\n", "italy_conn_req" ] }, @@ -802,11 +806,11 @@ "metadata": {}, "outputs": [], "source": [ - "if canada_node.deployment_type.value == \"python\":\n", - " canada_node.land()\n", + "if canada_server.deployment_type.value == \"python\":\n", + " canada_server.land()\n", "\n", - "if italy_node.deployment_type.value == \"python\":\n", - " italy_node.land()\n", + "if italy_server.deployment_type.value == \"python\":\n", + " italy_server.land()\n", "\n", "if canada_enclave.deployment_type.value == \"python\":\n", " canada_enclave.land()" diff --git a/notebooks/experimental/model-hosting/3. model-hosting-iteration2.ipynb b/notebooks/experimental/model-hosting/3. model-hosting-iteration2.ipynb index ed4c7cf3a72..339899a5c62 100644 --- a/notebooks/experimental/model-hosting/3. model-hosting-iteration2.ipynb +++ b/notebooks/experimental/model-hosting/3. model-hosting-iteration2.ipynb @@ -85,7 +85,7 @@ "output_type": "stream", "text": [ "\n", - "WARNING: private key is based on node name: canada-domain in dev_mode. Don't run this in production.\n" + "WARNING: private key is based on server name: canada-domain in dev_mode. Don't run this in production.\n" ] }, { @@ -108,10 +108,10 @@ { "data": { "text/html": [ - "
SyftInfo:
You have launched a development node at http://0.0.0.0:53752.It is intended only for local use.

" + "
SyftInfo:
You have launched a development server at http://0.0.0.0:53752.It is intended only for local use.

" ], "text/plain": [ - "SyftInfo: You have launched a development node at http://0.0.0.0:53752.It is intended only for local use." + "SyftInfo: You have launched a development server at http://0.0.0.0:53752.It is intended only for local use." ] }, "metadata": {}, @@ -119,8 +119,8 @@ } ], "source": [ - "# Launch the domain nodes we setup in the previous notebook\n", - "canada_node = sy.orchestra.launch(\n", + "# Launch the domain servers we setup in the previous notebook\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\", port=\"auto\", dev_mode=True, reset=True\n", ")" ] @@ -157,7 +157,7 @@ } ], "source": [ - "domain_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")" + "domain_client = canada_server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -2940,10 +2940,10 @@ { "data": { "text/html": [ - "
SyftSuccess:
Dataset uploaded to 'canada-domain'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`

" + "
SyftSuccess:
Dataset uploaded to 'canada-domain'. To see the datasets uploaded by a client on this server, use command `[your_client].datasets`

" ], "text/plain": [ - "SyftSuccess: Dataset uploaded to 'canada-domain'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`" + "SyftSuccess: Dataset uploaded to 'canada-domain'. To see the datasets uploaded by a client on this server, use command `[your_client].datasets`" ] }, "execution_count": 19, @@ -5611,7 +5611,7 @@ } ], "source": [ - "ds_client = canada_node.login(\n", + "ds_client = canada_server.login(\n", " email=\"sheldon@caltech.edu\",\n", " password=\"changethis\",\n", ")" @@ -8279,7 +8279,7 @@ } ], "source": [ - "prompt_action_obj.syft_node_location = ds_client.id\n", + "prompt_action_obj.syft_server_location = ds_client.id\n", "prompt_action_obj.syft_client_verify_key = ds_client.api.signing_key.verify_key\n", "prompt_action_obj._save_to_blob_storage()" ] @@ -8392,16 +8392,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "SyftInfo: Closing the node after time_alive=300 (the default value)\n" + "SyftInfo: Closing the server after time_alive=300 (the default value)\n" ] }, { "data": { "text/html": [ - "
SyftInfo:
You have launched a development node at http://0.0.0.0:None.It is intended only for local use.

" + "
SyftInfo:
You have launched a development server at http://0.0.0.0:None.It is intended only for local use.

" ], "text/plain": [ - "SyftInfo: You have launched a development node at http://0.0.0.0:None.It is intended only for local use." + "SyftInfo: You have launched a development server at http://0.0.0.0:None.It is intended only for local use." ] }, "metadata": {}, @@ -8411,7 +8411,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Logged into as \n" + "Logged into as \n" ] }, { @@ -8433,8 +8433,8 @@ "INFO: 127.0.0.1:53833 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", "INFO: 127.0.0.1:53835 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", "INFO: 127.0.0.1:53837 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", - "Approving request on change run_eval for domain ephemeral_node_run_eval_1771\n", - "SyftInfo: Landing the ephmeral node...\n" + "Approving request on change run_eval for domain ephemeral_server_run_eval_1771\n", + "SyftInfo: Landing the ephmeral server...\n" ] }, { diff --git a/notebooks/model-hosting.ipynb b/notebooks/model-hosting.ipynb index bf4633cdc87..1a8b26eeb5f 100644 --- a/notebooks/model-hosting.ipynb +++ b/notebooks/model-hosting.ipynb @@ -58,8 +58,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Launch the domain nodes we setup in the previous notebook\n", - "canada_node = sy.orchestra.launch(\n", + "# Launch the domain servers we setup in the previous notebook\n", + "canada_server = sy.orchestra.launch(\n", " name=\"canada-domain\",\n", " port=\"auto\",\n", " dev_mode=True,\n", @@ -74,7 +74,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client = canada_node.login(email=\"info@openmined.org\", password=\"changethis\")" + "domain_client = canada_server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -665,9 +665,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "python311-leJXwuFJ", "language": "python", - "name": "python3" + "name": "python311-lejxwufj" }, "language_info": { "codemirror_mode": { diff --git a/notebooks/scenarios/bigquery/05-custom-api-endpoints.ipynb b/notebooks/scenarios/bigquery/05-custom-api-endpoints.ipynb index 47ff98c6d80..7fe15c502da 100644 --- a/notebooks/scenarios/bigquery/05-custom-api-endpoints.ipynb +++ b/notebooks/scenarios/bigquery/05-custom-api-endpoints.ipynb @@ -1,45 +1,45 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "248f62a8-94b6-481d-a32b-03d5c2b67ae0", - "metadata": {}, - "outputs": [], - "source": [ - "# -- adding custom endpoints that use custom pools\n", - "# -- mock vs private\n", - "# -- context.node\n", - "# -- settings and state\n", - "# -- service account via settings or k8s labels\n", - "# -- how to clear state\n", - "# -- how to implement a basic rate limiter using context.user\n", - "# -- creating a big query endpoint with a try catch\n", - "# -- how to delete / replace api endpoints\n", - "# -- testing endpoints and their mock / private versions\n", - "# -- executing a large query and checking blob storage" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "248f62a8-94b6-481d-a32b-03d5c2b67ae0", + "metadata": {}, + "outputs": [], + "source": [ + "# -- adding custom endpoints that use custom pools\n", + "# -- mock vs private\n", + "# -- context.server\n", + "# -- settings and state\n", + "# -- service account via settings or k8s labels\n", + "# -- how to clear state\n", + "# -- how to implement a basic rate limiter using context.user\n", + "# -- creating a big query endpoint with a try catch\n", + "# -- how to delete / replace api endpoints\n", + "# -- testing endpoints and their mock / private versions\n", + "# -- executing a large query and checking blob storage" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/notebooks/scenarios/enclave/01-primary-domain-setup.ipynb b/notebooks/scenarios/enclave/01-primary-datasite-setup.ipynb similarity index 99% rename from notebooks/scenarios/enclave/01-primary-domain-setup.ipynb rename to notebooks/scenarios/enclave/01-primary-datasite-setup.ipynb index aa25ebf9310..a2a2df5f6ad 100644 --- a/notebooks/scenarios/enclave/01-primary-domain-setup.ipynb +++ b/notebooks/scenarios/enclave/01-primary-datasite-setup.ipynb @@ -79,7 +79,7 @@ "output_type": "stream", "text": [ "\n", - "WARNING: private key is based on node name: model-owner in dev_mode. Don't run this in production.\n", + "WARNING: private key is based on server name: model-owner in dev_mode. Don't run this in production.\n", "Document Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/ea6fe92e4be5471da3d1a423d39773d0/db/ea6fe92e4be5471da3d1a423d39773d0.sqlite\n", "Action Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/ea6fe92e4be5471da3d1a423d39773d0/db/ea6fe92e4be5471da3d1a423d39773d0.sqlite\n", "INFO: 127.0.0.1:62672 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", @@ -89,10 +89,10 @@ { "data": { "text/html": [ - "
SyftInfo:
You have launched a development node at http://0.0.0.0:8081.It is intended only for local use.

" + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8081.It is intended only for local use.

" ], "text/plain": [ - "SyftInfo: You have launched a development node at http://0.0.0.0:8081.It is intended only for local use." + "SyftInfo: You have launched a development server at http://0.0.0.0:8081.It is intended only for local use." ] }, "metadata": {}, @@ -221,7 +221,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "ERROR:syft.node.routes:Register Error: User already exists with email: madhava@openmined.org. user={'id': , 'email': 'madhava@openmined.org', 'name': 'Ishan', 'role': None, 'password': 'changethis', 'password_verify': 'changethis', 'verify_key': None, 'institution': None, 'website': None, 'created_by': {'signing_key': }, 'mock_execution_permission': False}\n" + "ERROR:syft.server.routes:Register Error: User already exists with email: madhava@openmined.org. user={'id': , 'email': 'madhava@openmined.org', 'name': 'Ishan', 'role': None, 'password': 'changethis', 'password_verify': 'changethis', 'verify_key': None, 'institution': None, 'website': None, 'created_by': {'signing_key': }, 'mock_execution_permission': False}\n" ] }, { @@ -3151,7 +3151,7 @@ "output_type": "stream", "text": [ "\n", - "WARNING: private key is based on node name: model-owner in dev_mode. Don't run this in production.\n", + "WARNING: private key is based on server name: model-owner in dev_mode. Don't run this in production.\n", "Document Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/ea6fe92e4be5471da3d1a423d39773d0/db/ea6fe92e4be5471da3d1a423d39773d0.sqlite\n", "Action Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/ea6fe92e4be5471da3d1a423d39773d0/db/ea6fe92e4be5471da3d1a423d39773d0.sqlite\n", "INFO: 127.0.0.1:62771 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", @@ -3194,10 +3194,10 @@ { "data": { "text/html": [ - "
SyftSuccess:
Model uploaded to 'model-owner'. To see the models uploaded by a client on this node, use command `[your_client].models`

" + "
SyftSuccess:
Model uploaded to 'model-owner'. To see the models uploaded by a client on this server, use command `[your_client].models`

" ], "text/plain": [ - "SyftSuccess: Model uploaded to 'model-owner'. To see the models uploaded by a client on this node, use command `[your_client].models`" + "SyftSuccess: Model uploaded to 'model-owner'. To see the models uploaded by a client on this server, use command `[your_client].models`" ] }, "execution_count": 23, diff --git a/notebooks/scenarios/enclave/02-manual-enclave-setup.ipynb b/notebooks/scenarios/enclave/02-manual-enclave-setup.ipynb index 6d333d8eea5..ebc4d2f8f69 100644 --- a/notebooks/scenarios/enclave/02-manual-enclave-setup.ipynb +++ b/notebooks/scenarios/enclave/02-manual-enclave-setup.ipynb @@ -18,7 +18,7 @@ "metadata": {}, "outputs": [], "source": [ - "# create enclave node on Azure\n", + "# create enclave server on Azure\n", "# skip the need for custom worker pools and images by using transformers package already included" ] }, @@ -63,11 +63,11 @@ ], "source": [ "# syft absolute\n", - "from syft.abstract_node import NodeType\n", + "from syft.abstract_server import NodeType\n", "\n", "azure_h100_enclave = sy.orchestra.launch(\n", " name=\"azure-h100-enclave\",\n", - " node_type=NodeType.ENCLAVE,\n", + " server_type=NodeType.ENCLAVE,\n", " port=8083,\n", " create_producer=True,\n", " n_consumers=3,\n", @@ -152,28 +152,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "50d923c1-4d66-4b0b-b580-a138ba9aee2c", "metadata": {}, "outputs": [], "source": [ "# request attestations?" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0ef1e299-a95f-4263-8d87-2d98f286ccf3", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "python311-leJXwuFJ", "language": "python", - "name": "python3" + "name": "python311-lejxwufj" }, "language_info": { "codemirror_mode": { @@ -185,9 +177,11 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" - } + "version": "3.11.8" + }, + "nbformat": 4, + "nbformat_minor": 5 }, "nbformat": 4, - "nbformat_minor": 5 + "nbformat_minor": 2 } diff --git a/notebooks/scenarios/enclave/03-secondary-domain-setup.ipynb b/notebooks/scenarios/enclave/03-secondary-datasite-setup.ipynb similarity index 99% rename from notebooks/scenarios/enclave/03-secondary-domain-setup.ipynb rename to notebooks/scenarios/enclave/03-secondary-datasite-setup.ipynb index b424caec0ce..1e93bd2eb16 100644 --- a/notebooks/scenarios/enclave/03-secondary-domain-setup.ipynb +++ b/notebooks/scenarios/enclave/03-secondary-datasite-setup.ipynb @@ -69,7 +69,7 @@ "output_type": "stream", "text": [ "\n", - "WARNING: private key is based on node name: model-auditor in dev_mode. Don't run this in production.\n", + "WARNING: private key is based on server name: model-auditor in dev_mode. Don't run this in production.\n", "Document Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/d7ffd135e5914ab0b2138fc7d4f72ec0/db/d7ffd135e5914ab0b2138fc7d4f72ec0.sqlite\n", "Action Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/d7ffd135e5914ab0b2138fc7d4f72ec0/db/d7ffd135e5914ab0b2138fc7d4f72ec0.sqlite\n", "INFO: 127.0.0.1:58675 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", @@ -79,10 +79,10 @@ { "data": { "text/html": [ - "
SyftInfo:
You have launched a development node at http://0.0.0.0:8082.It is intended only for local use.

" + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8082.It is intended only for local use.

" ], "text/plain": [ - "SyftInfo: You have launched a development node at http://0.0.0.0:8082.It is intended only for local use." + "SyftInfo: You have launched a development server at http://0.0.0.0:8082.It is intended only for local use." ] }, "metadata": {}, @@ -303,10 +303,10 @@ { "data": { "text/html": [ - "
SyftSuccess:
Dataset uploaded to 'model-auditor'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`

" + "
SyftSuccess:
Dataset uploaded to 'model-auditor'. To see the datasets uploaded by a client on this server, use command `[your_client].datasets`

" ], "text/plain": [ - "SyftSuccess: Dataset uploaded to 'model-auditor'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`" + "SyftSuccess: Dataset uploaded to 'model-auditor'. To see the datasets uploaded by a client on this server, use command `[your_client].datasets`" ] }, "execution_count": 11, @@ -5807,7 +5807,7 @@ "outputs": [], "source": [ "# syft absolute\n", - "from syft.service.network.node_peer import NodePeer" + "from syft.service.network.server_peer import NodePeer" ] }, { @@ -5823,7 +5823,7 @@ "class NodePeer:\n", " id: str = d7ffd135e5914ab0b2138fc7d4f72ec0\n", " name: str = \"model-auditor\"\n", - " node_type: str = \"domain\"\n", + " server_type: str = \"domain\"\n", " admin_email: str = \"\"\n", " ping_status: str = None\n", " ping_status_message: str = None\n", @@ -5832,7 +5832,7 @@ "```" ], "text/plain": [ - "syft.service.network.node_peer.NodePeer" + "syft.service.network.server_peer.NodePeer" ] }, "execution_count": 18, @@ -5890,7 +5890,7 @@ "class NodePeer:\n", " id: str = ea6fe92e4be5471da3d1a423d39773d0\n", " name: str = \"model-owner\"\n", - " node_type: str = \"domain\"\n", + " server_type: str = \"domain\"\n", " admin_email: str = \"\"\n", " ping_status: str = None\n", " ping_status_message: str = None\n", @@ -5899,7 +5899,7 @@ "```" ], "text/plain": [ - "syft.service.network.node_peer.NodePeer" + "syft.service.network.server_peer.NodePeer" ] }, "execution_count": 20, diff --git a/notebooks/scenarios/enclave/05-domains-review.ipynb b/notebooks/scenarios/enclave/05-datasites-review.ipynb similarity index 100% rename from notebooks/scenarios/enclave/05-domains-review.ipynb rename to notebooks/scenarios/enclave/05-datasites-review.ipynb diff --git a/notebooks/scenarios/enclave/06-manual-execution.ipynb b/notebooks/scenarios/enclave/06-manual-execution.ipynb index a37a1114f9f..77d3163be85 100644 --- a/notebooks/scenarios/enclave/06-manual-execution.ipynb +++ b/notebooks/scenarios/enclave/06-manual-execution.ipynb @@ -3516,7 +3516,7 @@ "data": { "text/html": [ "
SyftError:
Exception calling enclave.request_enclave. Traceback (most recent call last):\n",
-       "  File \"/Users/madhavajay/dev/PySyft/packages/syft/src/syft/node/node.py\", line 1249, in handle_api_call_with_unsigned_result\n",
+       "  File \"/Users/madhavajay/dev/PySyft/packages/syft/src/syft/server/server.py\", line 1249, in handle_api_call_with_unsigned_result\n",
        "    result = method(context, *api_call.args, **api_call.kwargs)\n",
        "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
        "  File \"/Users/madhavajay/dev/PySyft/packages/syft/src/syft/service/service.py\", line 368, in _decorator\n",
@@ -3530,7 +3530,7 @@
       ],
       "text/plain": [
        "SyftError: Exception calling enclave.request_enclave. Traceback (most recent call last):\n",
-       "  File \"/Users/madhavajay/dev/PySyft/packages/syft/src/syft/node/node.py\", line 1249, in handle_api_call_with_unsigned_result\n",
+       "  File \"/Users/madhavajay/dev/PySyft/packages/syft/src/syft/server/server.py\", line 1249, in handle_api_call_with_unsigned_result\n",
        "    result = method(context, *api_call.args, **api_call.kwargs)\n",
        "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
        "  File \"/Users/madhavajay/dev/PySyft/packages/syft/src/syft/service/service.py\", line 368, in _decorator\n",
diff --git a/notebooks/scenarios/enclave/reset_nodes.ipynb b/notebooks/scenarios/enclave/reset_nodes.ipynb
index 2b401201109..13a93154fcc 100644
--- a/notebooks/scenarios/enclave/reset_nodes.ipynb
+++ b/notebooks/scenarios/enclave/reset_nodes.ipynb
@@ -42,7 +42,7 @@
      "output_type": "stream",
      "text": [
       "\n",
-      "WARNING: private key is based on node name: model-owner in dev_mode. Don't run this in production.\n",
+      "WARNING: private key is based on server name: model-owner in dev_mode. Don't run this in production.\n",
       "Document Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/ea6fe92e4be5471da3d1a423d39773d0/db/ea6fe92e4be5471da3d1a423d39773d0.sqlite\n",
       "Action Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/ea6fe92e4be5471da3d1a423d39773d0/db/ea6fe92e4be5471da3d1a423d39773d0.sqlite\n",
       "INFO:     127.0.0.1:58346 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n",
@@ -52,10 +52,10 @@
     {
      "data": {
       "text/html": [
-       "
SyftInfo:
You have launched a development node at http://0.0.0.0:8081.It is intended only for local use.

" + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8081.It is intended only for local use.

" ], "text/plain": [ - "SyftInfo: You have launched a development node at http://0.0.0.0:8081.It is intended only for local use." + "SyftInfo: You have launched a development server at http://0.0.0.0:8081.It is intended only for local use." ] }, "metadata": {}, @@ -134,7 +134,7 @@ "output_type": "stream", "text": [ "\n", - "WARNING: private key is based on node name: azure-h100-enclave in dev_mode. Don't run this in production.\n", + "WARNING: private key is based on server name: azure-h100-enclave in dev_mode. Don't run this in production.\n", "Document Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/3ea33ab41d1349d29d94545fde3b6721/db/3ea33ab41d1349d29d94545fde3b6721.sqlite\n", "Action Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/3ea33ab41d1349d29d94545fde3b6721/db/3ea33ab41d1349d29d94545fde3b6721.sqlite\n", "INFO: 127.0.0.1:58354 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", @@ -144,10 +144,10 @@ { "data": { "text/html": [ - "
SyftInfo:
You have launched a development node at http://0.0.0.0:8083.It is intended only for local use.

" + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8083.It is intended only for local use.

" ], "text/plain": [ - "SyftInfo: You have launched a development node at http://0.0.0.0:8083.It is intended only for local use." + "SyftInfo: You have launched a development server at http://0.0.0.0:8083.It is intended only for local use." ] }, "metadata": {}, @@ -175,11 +175,11 @@ "source": [ "# syft absolute\n", "# use to reset\n", - "from syft.abstract_node import NodeType\n", + "from syft.abstract_server import NodeType\n", "\n", "azure_h100_enclave = sy.orchestra.launch(\n", " name=\"azure-h100-enclave\",\n", - " node_type=NodeType.ENCLAVE,\n", + " server_type=NodeType.ENCLAVE,\n", " port=8083,\n", " create_producer=True,\n", " n_consumers=3,\n", @@ -235,7 +235,7 @@ "output_type": "stream", "text": [ "\n", - "WARNING: private key is based on node name: model-auditor in dev_mode. Don't run this in production.\n", + "WARNING: private key is based on server name: model-auditor in dev_mode. Don't run this in production.\n", "Document Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/d7ffd135e5914ab0b2138fc7d4f72ec0/db/d7ffd135e5914ab0b2138fc7d4f72ec0.sqlite\n", "Action Store's SQLite DB path: /var/folders/6_/7xx0tpq16h9cn40mq4w5gjk80000gn/T/syft/d7ffd135e5914ab0b2138fc7d4f72ec0/db/d7ffd135e5914ab0b2138fc7d4f72ec0.sqlite\n", "INFO: 127.0.0.1:58368 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", @@ -245,10 +245,10 @@ { "data": { "text/html": [ - "
SyftInfo:
You have launched a development node at http://0.0.0.0:8082.It is intended only for local use.

" + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8082.It is intended only for local use.

" ], "text/plain": [ - "SyftInfo: You have launched a development node at http://0.0.0.0:8082.It is intended only for local use." + "SyftInfo: You have launched a development server at http://0.0.0.0:8082.It is intended only for local use." ] }, "metadata": {}, diff --git a/notebooks/scenarios/getting-started/.gitignore b/notebooks/scenarios/getting-started/.gitignore new file mode 100644 index 00000000000..e96c8f2ef1a --- /dev/null +++ b/notebooks/scenarios/getting-started/.gitignore @@ -0,0 +1,2 @@ +secrets.json +*.csv \ No newline at end of file diff --git a/notebooks/scenarios/getting-started/.ruff.toml b/notebooks/scenarios/getting-started/.ruff.toml new file mode 100644 index 00000000000..4df73d6ade3 --- /dev/null +++ b/notebooks/scenarios/getting-started/.ruff.toml @@ -0,0 +1,2 @@ +[lint] +ignore = ["E501"] diff --git a/notebooks/scenarios/getting-started/01-installing-syft.ipynb b/notebooks/scenarios/getting-started/01-installing-syft.ipynb index 1926fbff596..aa3583f1e29 100644 --- a/notebooks/scenarios/getting-started/01-installing-syft.ipynb +++ b/notebooks/scenarios/getting-started/01-installing-syft.ipynb @@ -17,8 +17,52 @@ { "cell_type": "code", "execution_count": null, + "id": "ca568da2-70a3-4e83-b511-1aca1614a4e3", + "metadata": {}, + "outputs": [], + "source": [ + "# !pip install -U syft" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "29409187-9946-4f01-bb37-e13bb6a3ea57", + "metadata": {}, + "outputs": [], + "source": [ + "SYFT_VERSION = \">=0.8.7.b0,<0.9\"\n", + "package_string = f'\"syft{SYFT_VERSION}\"'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, "id": "5ddb5738-99a2-4d93-9f14-cfa3ac06d3c9", "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "✅ The installed version of syft==0.8.7b14 matches the requirement >=0.8.7b0 and the requirement <0.9\n", + "0.8.7-beta.14\n" + ] + } + ], + "source": [ + "# syft absolute\n", + "import syft as sy\n", + "\n", + "sy.requires(SYFT_VERSION)\n", + "print(sy.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ecfb8e42-b07f-41b0-821a-58b89d45e642", + "metadata": {}, "outputs": [], "source": [] } diff --git a/notebooks/scenarios/getting-started/02-running-python-server.ipynb b/notebooks/scenarios/getting-started/02-running-python-server.ipynb index 54eca1a1431..e666cf6eb93 100644 --- a/notebooks/scenarios/getting-started/02-running-python-server.ipynb +++ b/notebooks/scenarios/getting-started/02-running-python-server.ipynb @@ -16,9 +16,493 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, + "id": "acdedc06-f086-4fc4-94c8-df03f86be75d", + "metadata": {}, + "outputs": [], + "source": [ + "SYFT_VERSION = \">=0.8.7.b0,<0.9\"\n", + "package_string = f'\"syft{SYFT_VERSION}\"'" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "3d357db9-26e3-4401-ab32-1e1befd7af87", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "✅ The installed version of syft==0.8.7b14 matches the requirement >=0.8.7b0 and the requirement <0.9\n" + ] + } + ], + "source": [ + "# syft absolute\n", + "import syft as sy\n", + "\n", + "sy.requires(SYFT_VERSION)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, "id": "c204ff56-a6c6-4031-9ce2-f6b7a875af20", "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting test-datasite-1 server on 0.0.0.0:8081\n", + "INFO: 127.0.0.1:53964 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + " Done.\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8081.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:8081.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# run a local webserver\n", + "server = sy.orchestra.launch(name=\"test-datasite-1\", port=8081)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "057edd69-b843-4f1e-b777-1e8945abad59", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:53966 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "Logged into as GUEST\n", + "INFO: 127.0.0.1:53966 - \"POST /api/v2/login HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:53966 - \"GET /api/v2/api?verify_key=8142e576c07f91243818f902b3ec8b2f49cd5a7eec2c126e8df11ce1bc36d7c2&communication_protocol=dev HTTP/1.1\" 200 OK\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Started server process [15544]\n", + "INFO: Waiting for application startup.\n", + "INFO: Application startup complete.\n", + "ERROR: [Errno 48] error while attempting to bind on address ('0.0.0.0', 8081): address already in use\n", + "INFO: Waiting for application shutdown.\n", + "INFO: Application shutdown complete.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:53968 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "96599a40-95dc-46f4-8921-27a9defcfde0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:53970 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " \n", + "
\n", + " \"Logo\"\n",\n", + "

Welcome to test-datasite-1

\n", + "
\n", + " URL: http://localhost:8081
\n", + " Server Description: This is the default description for a Datasite Server.
\n", + " Server Type: Datasite
\n", + " Server Side Type:High Side
\n", + " Syft Version: 0.8.7-beta.14
\n", + "\n", + "
\n", + "
\n", + " ⓘ \n", + " This datasite is run by the library PySyft to learn more about how it works visit\n", + " github.com/OpenMined/PySyft.\n", + "
\n", + "

Commands to Get Started

\n", + " \n", + "
    \n", + " \n", + "
  • <your_client>.projects - list projects
  • \n", + "
  • <your_client>.requests - list requests
  • \n", + "
  • <your_client>.users - list users
  • \n", + "\n", + "
\n", + " \n", + "

\n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "034f839d-f656-4deb-92c5-f1e42fe8561f", + "metadata": {}, + "outputs": [], + "source": [ + "# what minimum settings do we need to be able to run jobs?\n", + "# create_producer=True,\n", + "# n_consumers=1," + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "b5a02956-9196-405e-aec8-3c59c1b016e7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Stopping test-datasite-1\n", + "INFO: 127.0.0.1:53987 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54156 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54204 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54204 - \"POST /api/v2/login HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54204 - \"GET /api/v2/api?verify_key=8142e576c07f91243818f902b3ec8b2f49cd5a7eec2c126e8df11ce1bc36d7c2&communication_protocol=dev HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54206 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "ERROR:root:Invalid SMTP credentials timed out\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:54209 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "ERROR:root:Invalid SMTP credentials timed out\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:54495 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54618 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54621 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54658 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54660 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54667 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54669 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54791 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54808 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54810 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54812 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54815 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54817 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54819 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54823 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54825 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54827 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54829 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54831 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54833 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54835 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54837 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54839 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54856 - \"GET /api/v2/api?verify_key=e117b2b343c7bf0dc2c88a0b2e104e04118143220f0d7ff8ed2f178d1da0c167&communication_protocol=dev HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54856 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54858 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "ERROR:syft.server.routes:Register Error: You don't have permission to create an account on the datasite: a. Please contact the Datasite Owner.. user={'id': , 'email': 'a@b.com', 'name': 'a', 'role': None, 'password': 'c', 'password_verify': 'c', 'verify_key': None, 'institution': None, 'website': None, 'created_by': {'signing_key': }, 'mock_execution_permission': False}\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:54862 - \"POST /api/v2/register HTTP/1.1\" 200 OK\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "ERROR:syft.server.routes:Register Error: You don't have permission to create an account on the datasite: a. Please contact the Datasite Owner.. user={'id': , 'email': 'a@b.com', 'name': 'a', 'role': None, 'password': 'c', 'password_verify': 'c', 'verify_key': None, 'institution': None, 'website': None, 'created_by': {'signing_key': }, 'mock_execution_permission': False}\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:54864 - \"POST /api/v2/register HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54868 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54870 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:54872 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55056 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55058 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55062 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55127 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55129 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55131 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55134 - \"GET /api/v2/api?verify_key=ca989d05c9ef083b8f56c281b097570223ca1567761515eaeac9e2794d103f0a&communication_protocol=dev HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55134 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55136 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55149 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55152 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55155 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55157 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55159 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55177 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55180 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55182 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55184 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55186 - \"GET /api/v2/api?verify_key=863592d8f09e8410733073b3388c0a9219cda01aa4ab3c602ad35790adaf3a2a&communication_protocol=dev HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55188 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "ERROR:syft.server.routes:Register Error: You don't have permission to create an account on the datasite: a. Please contact the Datasite Owner.. user={'id': , 'email': 'a@b.com', 'name': 'a', 'role': None, 'password': 'c', 'password_verify': 'c', 'verify_key': None, 'institution': None, 'website': None, 'created_by': {'signing_key': }, 'mock_execution_permission': False}\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:55186 - \"POST /api/v2/register HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55193 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55196 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55198 - \"POST /api/v2/register HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55200 - \"POST /api/v2/login HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55200 - \"GET /api/v2/api?verify_key=3a22a2dbfdfff9241974522007c3aefb3526cb709742bbe80594b39d5026621c&communication_protocol=dev HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55202 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55204 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55479 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55496 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55526 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55612 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55612 - \"POST /api/v2/login HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55612 - \"GET /api/v2/api?verify_key=8142e576c07f91243818f902b3ec8b2f49cd5a7eec2c126e8df11ce1bc36d7c2&communication_protocol=dev HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55614 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55617 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55619 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55621 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55623 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55625 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55627 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55633 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55635 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55637 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55639 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55641 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55643 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55645 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56162 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56164 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56166 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56168 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56170 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56172 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56174 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56177 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:56179 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n" + ] + } + ], + "source": [ + "server.land()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbfbb1b1-8a3b-4386-9b30-e373ca2cf8d5", + "metadata": {}, "outputs": [], "source": [] } diff --git a/notebooks/scenarios/getting-started/03-configuring-datasite.ipynb b/notebooks/scenarios/getting-started/03-configuring-datasite.ipynb new file mode 100644 index 00000000000..c4925c81311 --- /dev/null +++ b/notebooks/scenarios/getting-started/03-configuring-datasite.ipynb @@ -0,0 +1,1559 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "9278cdde-2ddc-493f-ab1a-7a224f2c2338", + "metadata": {}, + "outputs": [], + "source": [ + "# -- all the configuration and server settings options\n", + "# -- optional: adding email settings (smtp with sendgrid free account)\n", + "# -- changing passwords (importance of root account)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "f9cac3bb-9310-4c6e-bf02-08752e9d70df", + "metadata": {}, + "outputs": [], + "source": [ + "SYFT_VERSION = \">=0.8.7.b0,<0.9\"\n", + "package_string = f'\"syft{SYFT_VERSION}\"'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "d01b99d1-999f-4e85-abf3-c74484df8bef", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "✅ The installed version of syft==0.8.7b14 matches the requirement >=0.8.7b0 and the requirement <0.9\n" + ] + } + ], + "source": [ + "# syft absolute\n", + "import syft as sy\n", + "\n", + "sy.requires(SYFT_VERSION)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "2045933d-bc99-4483-9ca1-89c6ab166a0f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting test-datasite-1 server on 0.0.0.0:8081\n", + " Done.\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8081.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:8081.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# run a local webserver\n", + "server = sy.orchestra.launch(name=\"test-datasite-1\", port=8081)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "15470a8b-8760-49b5-8ab9-2947a4518075", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n", + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "84cdb3e3-a165-4f7b-9390-56ebb3cf02a4", + "metadata": {}, + "outputs": [], + "source": [ + "# email" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "1a0738a4-7acd-4e00-b659-651bf0af8a4e", + "metadata": {}, + "outputs": [], + "source": [ + "secrets = sy.get_nb_secrets(defaults={\"smtp_token\": \"ADD_POSTMARK_TOKEN\"})" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "488d9fd4-6de9-46e0-9565-3f479af534e2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftError:
Invalid SMTP credentials. Please check your username and password.

" + ], + "text/plain": [ + "SyftError: Invalid SMTP credentials. Please check your username and password." + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smtp_username, smtp_password, smtp_server, smtp_port = (\n", + " secrets[\"smtp_token\"],\n", + " secrets[\"smtp_token\"],\n", + " \"smtp.postmarkapp.com\",\n", + " \"25\",\n", + ")\n", + "# configure email settings\n", + "client.api.services.settings.enable_notifications(\n", + " email_username=smtp_username,\n", + " email_password=smtp_password,\n", + " email_sender=\"madhava@openmined.org\",\n", + " email_server=smtp_server,\n", + " email_port=smtp_port,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "e0cbb4dc-3400-418e-ae06-fd3033e042a1", + "metadata": {}, + "outputs": [], + "source": [ + "ns = client.api.services.notifications.settings()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "1ec5f362-ecc1-44f6-99ac-ea8771ceb040", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ns.email_enabled" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "0731f6fb-82bc-4ee3-bce1-9988e38b842a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{: syft.service.notifier.notifier.EmailNotifier}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ns.notifiers" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "ace1e4a6-f1db-4506-8b5a-47d2940b510e", + "metadata": {}, + "outputs": [], + "source": [ + "s = client.api.services.settings.get()" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "c5556eee-1f9e-4eb0-b82a-ec4f523a3260", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + "
\n", + "

Settings

\n", + "

Id: 2baa8fe9deb94b6ead87ea7be4cb230e

\n", + "

Name: test-datasite-1

\n", + "

Organization: OpenMined

\n", + "

Description: This is the default description for a Datasite Server.

\n", + "

Deployed on: 07/15/2024

\n", + "

Signup enabled: False

\n", + "

Notifications enabled: True via email

\n", + "

Admin email: info@openmined.org

\n", + "
\n", + "\n", + " " + ], + "text/markdown": [ + "```python\n", + "class ServerSettings:\n", + " id: str = 2baa8fe9deb94b6ead87ea7be4cb230e\n", + " name: str = \"test-datasite-1\"\n", + " organization: str = \"OpenMined\"\n", + " description: str = \"This is the default description for a Datasite Server.\"\n", + " deployed_on: str = \"07/15/2024\"\n", + " signup_enabled: str = False\n", + " admin_email: str = \"info@openmined.org\"\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.settings.settings.ServerSettings" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "s" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "f81fd6f7-6e61-48d6-bf0c-e22718a109fb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftError:
Arg is `SyftError: Duplication Key Error for syft.service.settings.settings.ServerSettings.\n",
+       "The fields that should be unique are `id`.`. \n",
+       "It must be of type `ServerSettings`, not `SyftError`

" + ], + "text/plain": [ + "SyftError: Arg is `SyftError: Duplication Key Error for syft.service.settings.settings.ServerSettings.\n", + "The fields that should be unique are `id`.`. \n", + "It must be of type `ServerSettings`, not `SyftError`" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.api.services.settings.set(s)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "id": "0ad33f86-3576-4dd7-9cac-5ccf7fefb943", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Settings updated successfully. You must call .refresh() to sync your client with the changes.

" + ], + "text/plain": [ + "SyftSuccess: Settings updated successfully. You must call .refresh() to sync your client with the changes." + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.api.services.settings.update(\n", + " name=\"a\",\n", + " organization=\"b\",\n", + " description=\"c\",\n", + " admin_email=\"info2@openmined.org\",\n", + " association_request_auto_approval=True,\n", + " eager_execution_enabled=False,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "id": "82eac269-4f51-45a8-8480-8a2e3a382862", + "metadata": {}, + "outputs": [], + "source": [ + "md = \"\"\"\n", + " An h1 header\n", + "============\n", + "\n", + "Paragraphs are separated by a blank line.\n", + "\n", + "2nd paragraph. *Italic*, **bold**, and `monospace`. Itemized lists\n", + "look like:\n", + "\n", + " * this one\n", + " * that one\n", + " * the other one\n", + "\n", + "Note that --- not considering the asterisk --- the actual text\n", + "content starts at 4-columns in.\n", + "\n", + "> Block quotes are\n", + "> written like so.\n", + ">\n", + "> They can span multiple paragraphs,\n", + "> if you like.\n", + "\n", + "Use 3 dashes for an em-dash. Use 2 dashes for ranges (ex., \"it's all\n", + "in chapters 12--14\"). Three dots ... will be converted to an ellipsis.\n", + "Unicode is supported. ☺\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "id": "8389d1a4-027e-4282-ad91-274c34e0011c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "\n", + " An h1 header\n", + "============\n", + "\n", + "Paragraphs are separated by a blank line.\n", + "\n", + "2nd paragraph. *Italic*, **bold**, and `monospace`. Itemized lists\n", + "look like:\n", + "\n", + " * this one\n", + " * that one\n", + " * the other one\n", + "\n", + "Note that --- not considering the asterisk --- the actual text\n", + "content starts at 4-columns in.\n", + "\n", + "> Block quotes are\n", + "> written like so.\n", + ">\n", + "> They can span multiple paragraphs,\n", + "> if you like.\n", + "\n", + "Use 3 dashes for an em-dash. Use 2 dashes for ranges (ex., \"it's all\n", + "in chapters 12--14\"). Three dots ... will be converted to an ellipsis.\n", + "Unicode is supported. ☺\n" + ], + "text/plain": [ + "syft.util.misc_objs.MarkdownDescription" + ] + }, + "execution_count": 49, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.api.services.settings.welcome_preview(markdown=md)" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "id": "53d16bdf-bcfa-4ca5-8b98-5124f480e81b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Welcome Markdown was successfully updated!

" + ], + "text/plain": [ + "SyftSuccess: Welcome Markdown was successfully updated!" + ] + }, + "execution_count": 53, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.api.services.settings.welcome_customize(markdown=md)" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "id": "d267ccf1-9cb0-4b02-80fe-2608d6c55549", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "\n", + " An h1 header\n", + "============\n", + "\n", + "Paragraphs are separated by a blank line.\n", + "\n", + "2nd paragraph. *Italic*, **bold**, and `monospace`. Itemized lists\n", + "look like:\n", + "\n", + " * this one\n", + " * that one\n", + " * the other one\n", + "\n", + "Note that --- not considering the asterisk --- the actual text\n", + "content starts at 4-columns in.\n", + "\n", + "> Block quotes are\n", + "> written like so.\n", + ">\n", + "> They can span multiple paragraphs,\n", + "> if you like.\n", + "\n", + "Use 3 dashes for an em-dash. Use 2 dashes for ranges (ex., \"it's all\n", + "in chapters 12--14\"). Three dots ... will be converted to an ellipsis.\n", + "Unicode is supported. ☺\n" + ], + "text/plain": [ + "syft.util.misc_objs.MarkdownDescription" + ] + }, + "execution_count": 54, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.api.services.settings.welcome_show()" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "id": "bf5318ef-0aa5-4bb5-b366-3f5fc06f00b6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n" + ] + }, + { + "data": { + "text/html": [ + "
An h1 header\n",
+       "
\n", + "

============

\n", + "

Paragraphs are separated by a blank line.

\n", + "

2nd paragraph. Italic, bold, and monospace. Itemized lists\n", + "look like:

\n", + "
    \n", + "
  • this one
  • \n", + "
  • that one
  • \n", + "
  • the other one
  • \n", + "
\n", + "

Note that --- not considering the asterisk --- the actual text\n", + "content starts at 4-columns in.

\n", + "
\n", + "

Block quotes are\n", + "written like so.

\n", + "

They can span multiple paragraphs,\n", + "if you like.

\n", + "
\n", + "

Use 3 dashes for an em-dash. Use 2 dashes for ranges (ex., \"it's all\n", + "in chapters 12--14\"). Three dots ... will be converted to an ellipsis.\n", + "Unicode is supported. ☺

" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 55, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "guest_client = client.login_as_guest()\n", + "guest_client" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "1abfe0af-9566-4f78-892e-0b7397ec081c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftError:
You don't have permission to create an account on the datasite: a. Please contact the Datasite Owner.

" + ], + "text/plain": [ + "SyftError: You don't have permission to create an account on the datasite: a. Please contact the Datasite Owner." + ] + }, + "execution_count": 56, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "guest_client.register(name=\"a\", email=\"a@b.com\", password=\"c\", password_verify=\"c\")" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "a19d34bd-0033-4c90-9ef7-c88fe4d87dad", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Registration feature successfully enabled

" + ], + "text/plain": [ + "SyftSuccess: Registration feature successfully enabled" + ] + }, + "execution_count": 58, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.api.services.settings.allow_guest_signup(enable=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "bd7e84e7-8bd5-42cc-b5d0-86efbd6a56eb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
User 'a' successfully registered!

" + ], + "text/plain": [ + "SyftSuccess: User 'a' successfully registered!" + ] + }, + "execution_count": 59, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "guest_client.register(name=\"a\", email=\"a@b.com\", password=\"c\", password_verify=\"c\")" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "id": "1270cfa4-c771-4e79-8744-6a8276db153d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
An h1 header\n",
+       "
\n", + "

============

\n", + "

Paragraphs are separated by a blank line.

\n", + "

2nd paragraph. Italic, bold, and monospace. Itemized lists\n", + "look like:

\n", + "
    \n", + "
  • this one
  • \n", + "
  • that one
  • \n", + "
  • the other one
  • \n", + "
\n", + "

Note that --- not considering the asterisk --- the actual text\n", + "content starts at 4-columns in.

\n", + "
\n", + "

Block quotes are\n", + "written like so.

\n", + "

They can span multiple paragraphs,\n", + "if you like.

\n", + "
\n", + "

Use 3 dashes for an em-dash. Use 2 dashes for ranges (ex., \"it's all\n", + "in chapters 12--14\"). Three dots ... will be converted to an ellipsis.\n", + "Unicode is supported. ☺

" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 60, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "guest_client.login(email=\"a@b.com\", password=\"c\")" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "id": "d8453c72-6095-44c5-82aa-6f11bb71599e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Stopping test-datasite-1\n" + ] + } + ], + "source": [ + "server.land()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b3571ba3-709a-4d5b-8e8e-346bf840a19e", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/getting-started/03-configuring-domain.ipynb b/notebooks/scenarios/getting-started/03-configuring-domain.ipynb deleted file mode 100644 index b152a87d872..00000000000 --- a/notebooks/scenarios/getting-started/03-configuring-domain.ipynb +++ /dev/null @@ -1,37 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "9278cdde-2ddc-493f-ab1a-7a224f2c2338", - "metadata": {}, - "outputs": [], - "source": [ - "# -- all the configuration and node settings options\n", - "# -- optional: adding email settings (smtp with sendgrid free account)\n", - "# -- changing passwords (importance of root account)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/scenarios/getting-started/04-uploading-data.ipynb b/notebooks/scenarios/getting-started/04-uploading-data.ipynb index 1ba607547bb..d6e0dc4ff22 100644 --- a/notebooks/scenarios/getting-started/04-uploading-data.ipynb +++ b/notebooks/scenarios/getting-started/04-uploading-data.ipynb @@ -17,9 +17,8300 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, + "id": "845fd5a5-03aa-44fb-b6aa-532936ffc941", + "metadata": {}, + "outputs": [], + "source": [ + "SYFT_VERSION = \">=0.8.7.b0,<0.9\"\n", + "package_string = f'\"syft{SYFT_VERSION}\"'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "43db561b-b80c-447a-ae85-cc37496a9965", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "✅ The installed version of syft==0.8.7b14 matches the requirement >=0.8.7b0 and the requirement <0.9\n" + ] + } + ], + "source": [ + "# syft absolute\n", + "import syft as sy\n", + "\n", + "sy.requires(SYFT_VERSION)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, "id": "150862d6-0395-4fbb-ad19-9bdae91e33fa", "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting test-datasite-1 server on 0.0.0.0:8081\n", + " Done.\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8081.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:8081.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# run a local webserver\n", + "server = sy.orchestra.launch(name=\"test-datasite-1\", port=8081)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "86e27ecb-f661-4fe4-a939-da060d001be2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n", + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "e2eeed1f-41d7-4e17-84db-e7897242bcec", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " % Total % Received % Xferd Average Speed Time Time Time Current\n", + " Dload Upload Total Spent Left Speed\n", + "100 157M 100 157M 0 0 2122k 0 0:01:16 0:01:16 --:--:-- 2137k48k 0 0 1694k 0 0:01:35 0:00:05 0:01:30 1921k 0 0:01:40 0:00:10 0:01:30 1516k 0:01:15 2333k 0 0 2257k 0 0:01:11 0:01:01 0:00:10 917k 0 2150k 0 0:01:15 0:01:08 0:00:07 1455k\n" + ] + } + ], + "source": [ + "# stdlib\n", + "import os\n", + "\n", + "if not os.path.exists(\"ages_dataset.csv\"):\n", + " !curl -O https://openminedblob.blob.core.windows.net/csvs/ages_dataset.csv" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "c25ffa3b-6415-4b63-b1ce-7ed3365eeeeb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
IdNameShort descriptionGenderCountryOccupationBirth yearDeath yearManner of deathAge of deathAssociated CountriesAssociated Country Coordinates (Lat/Lon)Associated Country Life Expectancy
0Q23George Washington1st president of the United States (1732–1799)MaleUnited States of America; Kingdom of Great Bri...Politician17321799.0natural causes67.0['United Kingdom', 'United States'][(55.378051, -3.435973), (37.09024, -95.712891)][81.3, 78.5]
1Q42Douglas AdamsEnglish writer and humoristMaleUnited KingdomArtist19522001.0natural causes49.0['United Kingdom'][(55.378051, -3.435973)][81.3]
2Q91Abraham Lincoln16th president of the United States (1809-1865)MaleUnited States of AmericaPolitician18091865.0homicide56.0['United States'][(37.09024, -95.712891)][78.5]
5Q260Jean-François ChampollionFrench classical scholarMaleKingdom of France; First French EmpireEgyptologist17901832.0natural causes42.0['France'][(46.227638, 2.213749)][82.5]
7Q296Claude MonetFrench impressionist painter (1840-1926)MaleFranceArtist18401926.0natural causes86.0['France'][(46.227638, 2.213749)][82.5]
\n", + "
" + ], + "text/plain": [ + " Id Name \\\n", + "0 Q23 George Washington \n", + "1 Q42 Douglas Adams \n", + "2 Q91 Abraham Lincoln \n", + "5 Q260 Jean-François Champollion \n", + "7 Q296 Claude Monet \n", + "\n", + " Short description Gender \\\n", + "0 1st president of the United States (1732–1799) Male \n", + "1 English writer and humorist Male \n", + "2 16th president of the United States (1809-1865) Male \n", + "5 French classical scholar Male \n", + "7 French impressionist painter (1840-1926) Male \n", + "\n", + " Country Occupation \\\n", + "0 United States of America; Kingdom of Great Bri... Politician \n", + "1 United Kingdom Artist \n", + "2 United States of America Politician \n", + "5 Kingdom of France; First French Empire Egyptologist \n", + "7 France Artist \n", + "\n", + " Birth year Death year Manner of death Age of death \\\n", + "0 1732 1799.0 natural causes 67.0 \n", + "1 1952 2001.0 natural causes 49.0 \n", + "2 1809 1865.0 homicide 56.0 \n", + "5 1790 1832.0 natural causes 42.0 \n", + "7 1840 1926.0 natural causes 86.0 \n", + "\n", + " Associated Countries \\\n", + "0 ['United Kingdom', 'United States'] \n", + "1 ['United Kingdom'] \n", + "2 ['United States'] \n", + "5 ['France'] \n", + "7 ['France'] \n", + "\n", + " Associated Country Coordinates (Lat/Lon) \\\n", + "0 [(55.378051, -3.435973), (37.09024, -95.712891)] \n", + "1 [(55.378051, -3.435973)] \n", + "2 [(37.09024, -95.712891)] \n", + "5 [(46.227638, 2.213749)] \n", + "7 [(46.227638, 2.213749)] \n", + "\n", + " Associated Country Life Expectancy \n", + "0 [81.3, 78.5] \n", + "1 [81.3] \n", + "2 [78.5] \n", + "5 [82.5] \n", + "7 [82.5] " + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# third party\n", + "import pandas as pd\n", + "\n", + "# syft absolute\n", + "import syft as sy\n", + "\n", + "age_df = pd.read_csv(\"ages_dataset.csv\")\n", + "age_df = age_df.dropna(how=\"any\")\n", + "age_df.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "b0fdc5b3-2cc5-4084-8904-525079b7fe00", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " % Total % Received % Xferd Average Speed Time Time Time Current\n", + " Dload Upload Total Spent Left Speed\n", + "100 8217k 100 8217k 0 0 1501k 0 0:00:05 0:00:05 --:--:-- 1671k\n" + ] + } + ], + "source": [ + "# stdlib\n", + "# TODO: also move to dataset repo\n", + "import os\n", + "\n", + "if not os.path.exists(\"ages_mock_dataset.csv\"):\n", + " !curl -O https://openminedblob.blob.core.windows.net/csvs/ages_mock_dataset.csv" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "0e130662-73f4-43a4-afb6-c6ecc107c5a7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
IdGenderAge of deathAssociated CountriesAssociated Country Life ExpectancyManner of deathNameShort descriptionOccupationDeath yearBirth yearCountryAssociated Country Coordinates (Lat/Lon)
0Q19723Gender 153.0['United States'][78.5]homicideNorma FisherMagazine truth stop whose group through despite.Corporate treasurer1989.01936Not AvailableNot Available
1Q20057Gender 151.0['United Kingdom'][81.3]natural causesBrandon LloydTotal financial role together range line beyon...Chief Financial Officer2018.01967Not AvailableNot Available
2Q8791Gender 184.0['Sweden'][82.5]natural causesMichelle GloverPartner stock four. Region as true develop sou...Speech and language therapist2000.01916Not AvailableNot Available
3Q30567Gender 164.0['Belgium'][81.6]natural causesWillie GoldenFeeling fact by four. Data son natural explain...Financial controller1989.01925Not AvailableNot Available
4Q14013Gender 188.0['United Kingdom'][81.3]suicideRoberto JohnsonAttorney quickly candidate change although bag...Sound technician, broadcasting/film/video2016.01928Not AvailableNot Available
\n", + "
" + ], + "text/plain": [ + " Id Gender Age of death Associated Countries \\\n", + "0 Q19723 Gender 1 53.0 ['United States'] \n", + "1 Q20057 Gender 1 51.0 ['United Kingdom'] \n", + "2 Q8791 Gender 1 84.0 ['Sweden'] \n", + "3 Q30567 Gender 1 64.0 ['Belgium'] \n", + "4 Q14013 Gender 1 88.0 ['United Kingdom'] \n", + "\n", + " Associated Country Life Expectancy Manner of death Name \\\n", + "0 [78.5] homicide Norma Fisher \n", + "1 [81.3] natural causes Brandon Lloyd \n", + "2 [82.5] natural causes Michelle Glover \n", + "3 [81.6] natural causes Willie Golden \n", + "4 [81.3] suicide Roberto Johnson \n", + "\n", + " Short description \\\n", + "0 Magazine truth stop whose group through despite. \n", + "1 Total financial role together range line beyon... \n", + "2 Partner stock four. Region as true develop sou... \n", + "3 Feeling fact by four. Data son natural explain... \n", + "4 Attorney quickly candidate change although bag... \n", + "\n", + " Occupation Death year Birth year \\\n", + "0 Corporate treasurer 1989.0 1936 \n", + "1 Chief Financial Officer 2018.0 1967 \n", + "2 Speech and language therapist 2000.0 1916 \n", + "3 Financial controller 1989.0 1925 \n", + "4 Sound technician, broadcasting/film/video 2016.0 1928 \n", + "\n", + " Country Associated Country Coordinates (Lat/Lon) \n", + "0 Not Available Not Available \n", + "1 Not Available Not Available \n", + "2 Not Available Not Available \n", + "3 Not Available Not Available \n", + "4 Not Available Not Available " + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "age_mock_df = pd.read_csv(\"ages_mock_dataset.csv\")\n", + "age_mock_df = age_mock_df.dropna(how=\"any\")\n", + "age_mock_df.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "0bbc6868-d66c-4a96-ad39-fb340e7037b3", + "metadata": {}, + "outputs": [], + "source": [ + "# How an asset for low side and high-side would be defined:\n", + "main_contributor = sy.Contributor(\n", + " name=\"Jeffrey Salazar\", role=\"Dataset Creator\", email=\"jsala@ailab.com\"\n", + ")\n", + "\n", + "asset = sy.Asset(\n", + " name=\"asset_name\",\n", + " data=age_df, # real dataframe\n", + " mock=age_mock_df, # mock dataframe\n", + " contributors=[main_contributor],\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "43474e47-20dd-437a-9b16-324831f692f4", + "metadata": {}, + "outputs": [], + "source": [ + "description_template = \"\"\"### About the dataset\n", + "This extensive dataset provides a rich collection of demographic and life events records for individuals across multiple countries. It covers a wide range of indicators and attributes related to personal information, birth and death events, gender, occupation, and associated countries. The dataset offers valuable insights into population dynamics and various aspects of human life, enabling comprehensive analyses and cross-country comparisons. The dataset is the largest one on notable deceased people and includes individ- uals from a variety of social groups, including but not limited to 107k females, 90k researchers, and 124 non-binary indi- viduals, spread across more than 300 contemporary or histor- ical regions.\n", + "\n", + "### Dataset usage policy\n", + "This dataset is subject to compliance with internal data use and mis-use policies at our organisation. The following rules apply:\n", + "- only aggregate statistics can be released from data computation\n", + "- data subjects should never be identifiable through the data computation outcomes\n", + "- a fixed privacy budget of eps=5 must be preserved by each researcher\n", + "\n", + "### Data collection and pre-processing\n", + "The dataset is based on open data hosted by Wikimedia Foundation.\n", + "\n", + "**Age**\n", + "Whenever possible, age was calculated based on the birth and death year mentioned in the description of the individual.\n", + "\n", + "**Gender**\n", + "Gender was available in the original dataset for 50% of participants. For the remaining, it was added from predictions based on name, country and century in which they lived. (97.51% accuracy and 98.89% F1-score)\n", + "\n", + "**Occupation**\n", + "The occupation was available in the original dataset for 66% of the individuals. For the remaining, it was added from predictions from a multiclass text classificator model. (93.4% accuracy for 84% of the dataset)\n", + "\n", + "More details about the features can be found by reading the paper.\n", + "\n", + "### Key features\n", + "1. **Id**: Unique identifier for each individual.\n", + "2. **Name**: Name of the person.\n", + "3. **Short description**: Brief description or summary of the individual.\n", + "4. **Gender**: Gender/s of the individual.\n", + "5. **Country**: Countries/Kingdoms of residence and/or origin.\n", + "6. **Occupation**: Occupation or profession of the individual.\n", + "7. **Birth year**: Year of birth for the individual.\n", + "8. **Death year**: Year of death for the individual.\n", + "9. **Manner of death**: Details about the circumstances or manner of death.\n", + "10. **Age of death**: Age at the time of death for the individual.\n", + "11. **Associated Countries**: Modern Day Countries associated with the individual.\n", + "12. **Associated Country Coordinates (Lat/Lon)**: Modern Day Latitude and longitude coordinates of the associated countries.\n", + "13. **Associated Country Life Expectancy**: Life expectancy of the associated countries.\n", + "\n", + "### Use cases\n", + "- Analyze demographic trends and birth rates in different countries.\n", + "- Investigate factors affecting life expectancy and mortality rates.\n", + "- Study the relationship between gender and occupation across regions.\n", + "- Explore correlations between age of death and associated country attributes.\n", + "- Examine patterns of migration and associated countries' life expectancy.\n", + "\n", + "\n", + "### Getting started\n", + "\n", + "```\n", + "!curl -O https://openminedblob.blob.core.windows.net/csvs/ages_dataset.csv\n", + "\n", + "age_df = pd.read_csv(\"ages_dataset.csv\")\n", + "```\n", + "\n", + "### Execution environment\n", + "The data is hosted in a remote compute environment with the following specifications:\n", + "- X CPU cores\n", + "- 1 GPU of type Y\n", + "- Z RAM\n", + "- A additional available storage\n", + "\n", + "### Citation\n", + "Annamoradnejad, Issa; Annamoradnejad, Rahimberdi (2022), “Age dataset: A structured general-purpose dataset on life, work, and death of 1.22 million distinguished people”, In Workshop Proceedings of the 16th International AAAI Conference on Web and Social Media (ICWSM), doi: 10.36190/2022.82\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "ebfed140-db5b-4bd1-886a-dabeb52dbe9a", + "metadata": {}, + "outputs": [], + "source": [ + "dataset = sy.Dataset(\n", + " name=\"Dataset name\",\n", + " description=description_template,\n", + " asset_list=[asset],\n", + " contributors=[main_contributor],\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6901ddcc-b72a-4e79-9071-a47531ff4eba", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "18b4056a-260e-4d61-9333-c2a2d3e50f21", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Uploading: asset_name: 100%|\u001b[32m████████████████\u001b[0m| 1/1 [00:00<00:00, 1.35it/s]\u001b[0m\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftSuccess:
Dataset uploaded to 'a'. To see the datasets uploaded by a client on this server, use command `[your_client].datasets`

" + ], + "text/plain": [ + "SyftSuccess: Dataset uploaded to 'a'. To see the datasets uploaded by a client on this server, use command `[your_client].datasets`" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Uploading the dataset\n", + "client.upload_dataset(dataset)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "0ade7507-77f7-450b-9b59-a34bad501609", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

Dataset Dicttuple

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.datasets" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "f19c0bc1-1466-4002-a23e-1f3afdd84f25", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + "
\n", + "

Dataset name

\n", + "

Summary

\n", + " \n", + " \n", + "

Description

\n", + "

About the dataset

\n", + "

This extensive dataset provides a rich collection of demographic and life events records for individuals across multiple countries. It covers a wide range of indicators and attributes related to personal information, birth and death events, gender, occupation, and associated countries. The dataset offers valuable insights into population dynamics and various aspects of human life, enabling comprehensive analyses and cross-country comparisons. The dataset is the largest one on notable deceased people and includes individ- uals from a variety of social groups, including but not limited to 107k females, 90k researchers, and 124 non-binary indi- viduals, spread across more than 300 contemporary or histor- ical regions.

\n", + "

Dataset usage policy

\n", + "

This dataset is subject to compliance with internal data use and mis-use policies at our organisation. The following rules apply:\n", + "- only aggregate statistics can be released from data computation\n", + "- data subjects should never be identifiable through the data computation outcomes\n", + "- a fixed privacy budget of eps=5 must be preserved by each researcher

\n", + "

Data collection and pre-processing

\n", + "

The dataset is based on open data hosted by Wikimedia Foundation.

\n", + "

Age\n", + "Whenever possible, age was calculated based on the birth and death year mentioned in the description of the individual.

\n", + "

Gender\n", + "Gender was available in the original dataset for 50% of participants. For the remaining, it was added from predictions based on name, country and century in which they lived. (97.51% accuracy and 98.89% F1-score)

\n", + "

Occupation\n", + "The occupation was available in the original dataset for 66% of the individuals. For the remaining, it was added from predictions from a multiclass text classificator model. (93.4% accuracy for 84% of the dataset)

\n", + "

More details about the features can be found by reading the paper.

\n", + "

Key features

\n", + "
    \n", + "
  1. Id: Unique identifier for each individual.
  2. \n", + "
  3. Name: Name of the person.
  4. \n", + "
  5. Short description: Brief description or summary of the individual.
  6. \n", + "
  7. Gender: Gender/s of the individual.
  8. \n", + "
  9. Country: Countries/Kingdoms of residence and/or origin.
  10. \n", + "
  11. Occupation: Occupation or profession of the individual.
  12. \n", + "
  13. Birth year: Year of birth for the individual.
  14. \n", + "
  15. Death year: Year of death for the individual.
  16. \n", + "
  17. Manner of death: Details about the circumstances or manner of death.
  18. \n", + "
  19. Age of death: Age at the time of death for the individual.
  20. \n", + "
  21. Associated Countries: Modern Day Countries associated with the individual.
  22. \n", + "
  23. Associated Country Coordinates (Lat/Lon): Modern Day Latitude and longitude coordinates of the associated countries.
  24. \n", + "
  25. Associated Country Life Expectancy: Life expectancy of the associated countries.
  26. \n", + "
\n", + "

Use cases

\n", + "
    \n", + "
  • Analyze demographic trends and birth rates in different countries.
  • \n", + "
  • Investigate factors affecting life expectancy and mortality rates.
  • \n", + "
  • Study the relationship between gender and occupation across regions.
  • \n", + "
  • Explore correlations between age of death and associated country attributes.
  • \n", + "
  • Examine patterns of migration and associated countries' life expectancy.
  • \n", + "
\n", + "

Getting started

\n", + "
!curl -O https://openminedblob.blob.core.windows.net/csvs/ages_dataset.csv\n",
+       "\n",
+       "age_df = pd.read_csv(\"ages_dataset.csv\")\n",
+       "
\n", + "

Execution environment

\n", + "

The data is hosted in a remote compute environment with the following specifications:\n", + "- X CPU cores\n", + "- 1 GPU of type Y\n", + "- Z RAM\n", + "- A additional available storage

\n", + "

Citation

\n", + "

Annamoradnejad, Issa; Annamoradnejad, Rahimberdi (2022), “Age dataset: A structured general-purpose dataset on life, work, and death of 1.22 million distinguished people”, In Workshop Proceedings of the 16th International AAAI Conference on Web and Social Media (ICWSM), doi: 10.36190/2022.82

\n", + " \n", + "

Dataset Details

\n", + "

Uploaded by: Jane Doe (info@openmined.org)

\n", + "

Created on: 2024-07-14 20:58:44

\n", + "

URL:\n", + " None

\n", + "

Contributors:\n", + " To see full details call dataset.contributors.

\n", + "

Assets

\n", + " \n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

Asset Dicttuple

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/markdown": [ + "Syft Dataset: Dataset name\n", + "\n", + "Assets:\n", + "\n", + "\tasset_name\n", + "\n", + "Description: \n", + "\n", + "### About the dataset\n", + "This extensive dataset provides a rich collection of demographic and life events records for individuals across multiple countries. It covers a wide range of indicators and attributes related to personal information, birth and death events, gender, occupation, and associated countries. The dataset offers valuable insights into population dynamics and various aspects of human life, enabling comprehensive analyses and cross-country comparisons. The dataset is the largest one on notable deceased people and includes individ- uals from a variety of social groups, including but not limited to 107k females, 90k researchers, and 124 non-binary indi- viduals, spread across more than 300 contemporary or histor- ical regions.\n", + "\n", + "### Dataset usage policy\n", + "This dataset is subject to compliance with internal data use and mis-use policies at our organisation. The following rules apply:\n", + "- only aggregate statistics can be released from data computation\n", + "- data subjects should never be identifiable through the data computation outcomes\n", + "- a fixed privacy budget of eps=5 must be preserved by each researcher\n", + "\n", + "### Data collection and pre-processing\n", + "The dataset is based on open data hosted by Wikimedia Foundation.\n", + "\n", + "**Age**\n", + "Whenever possible, age was calculated based on the birth and death year mentioned in the description of the individual.\n", + "\n", + "**Gender**\n", + "Gender was available in the original dataset for 50% of participants. For the remaining, it was added from predictions based on name, country and century in which they lived. (97.51% accuracy and 98.89% F1-score)\n", + "\n", + "**Occupation**\n", + "The occupation was available in the original dataset for 66% of the individuals. For the remaining, it was added from predictions from a multiclass text classificator model. (93.4% accuracy for 84% of the dataset)\n", + "\n", + "More details about the features can be found by reading the paper.\n", + "\n", + "### Key features\n", + "1. **Id**: Unique identifier for each individual.\n", + "2. **Name**: Name of the person.\n", + "3. **Short description**: Brief description or summary of the individual.\n", + "4. **Gender**: Gender/s of the individual.\n", + "5. **Country**: Countries/Kingdoms of residence and/or origin.\n", + "6. **Occupation**: Occupation or profession of the individual.\n", + "7. **Birth year**: Year of birth for the individual.\n", + "8. **Death year**: Year of death for the individual.\n", + "9. **Manner of death**: Details about the circumstances or manner of death.\n", + "10. **Age of death**: Age at the time of death for the individual.\n", + "11. **Associated Countries**: Modern Day Countries associated with the individual.\n", + "12. **Associated Country Coordinates (Lat/Lon)**: Modern Day Latitude and longitude coordinates of the associated countries.\n", + "13. **Associated Country Life Expectancy**: Life expectancy of the associated countries.\n", + "\n", + "### Use cases\n", + "- Analyze demographic trends and birth rates in different countries.\n", + "- Investigate factors affecting life expectancy and mortality rates.\n", + "- Study the relationship between gender and occupation across regions.\n", + "- Explore correlations between age of death and associated country attributes.\n", + "- Examine patterns of migration and associated countries' life expectancy.\n", + "\n", + "\n", + "### Getting started\n", + "\n", + "```\n", + "!curl -O https://openminedblob.blob.core.windows.net/csvs/ages_dataset.csv\n", + "\n", + "age_df = pd.read_csv(\"ages_dataset.csv\")\n", + "```\n", + "\n", + "### Execution environment\n", + "The data is hosted in a remote compute environment with the following specifications:\n", + "- X CPU cores\n", + "- 1 GPU of type Y\n", + "- Z RAM\n", + "- A additional available storage\n", + "\n", + "### Citation\n", + "Annamoradnejad, Issa; Annamoradnejad, Rahimberdi (2022), “Age dataset: A structured general-purpose dataset on life, work, and death of 1.22 million distinguished people”, In Workshop Proceedings of the 16th International AAAI Conference on Web and Social Media (ICWSM), doi: 10.36190/2022.82\n", + "\n", + "\n" + ], + "text/plain": [ + "syft.service.dataset.dataset.Dataset" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "d = client.datasets[0]\n", + "d" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "43e920d4-b400-4551-b4c2-a78b92f7f892", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + "\n", + "
\n", + "

asset_name

\n", + "

None

\n", + "

Asset ID: c8b177dd01d74fc4bde31cdcf3bafcc6

\n", + "

Action Object ID: 61d18e2bd00c4f4f8a284a4ddb2acbe0

\n", + "

Uploaded by: Jane Doe (info@openmined.org)

\n", + "

Created on: 2024-07-14 20:58:44

\n", + "

Data:

\n", + "
\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + "
IdNameShort descriptionGenderCountryOccupationBirth yearDeath yearManner of deathAge of deathAssociated CountriesAssociated Country Coordinates (Lat/Lon)Associated Country Life Expectancy
Loading... (need help?)
\n", + "\n", + "\n", + "
\n", + "\n", + "

Mock Data:

\n", + "
\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + "
IdGenderAge of deathAssociated CountriesAssociated Country Life ExpectancyManner of deathNameShort descriptionOccupationDeath yearBirth yearCountryAssociated Country Coordinates (Lat/Lon)
Loading... (need help?)
\n", + "\n", + "\n", + "
\n", + "\n", + "
" + ], + "text/markdown": [ + "```python\n", + "Asset: asset_name\n", + "Pointer Id: 61d18e2bd00c4f4f8a284a4ddb2acbe0\n", + "Description: None\n", + "Total Data Subjects: 0\n", + "Shape: (44211, 13)\n", + "Contributors: 2\n", + "\tJane Doe: info@openmined.org\n", + "\tJeffrey Salazar: jsala@ailab.com\n", + "\n", + "```" + ], + "text/plain": [ + "Asset(name='asset_name', server_uid='2baa8fe9deb94b6ead87ea7be4cb230e', action_id='61d18e2bd00c4f4f8a284a4ddb2acbe0')" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a = d.assets[0]\n", + "a" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "86eb6f5c-ac2e-4dee-bac7-dcaa0dd972cf", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
IdGenderAge of deathAssociated CountriesAssociated Country Life ExpectancyManner of deathNameShort descriptionOccupationDeath yearBirth yearCountryAssociated Country Coordinates (Lat/Lon)
0Q19723Gender 153.0['United States'][78.5]homicideNorma FisherMagazine truth stop whose group through despite.Corporate treasurer1989.01936Not AvailableNot Available
1Q20057Gender 151.0['United Kingdom'][81.3]natural causesBrandon LloydTotal financial role together range line beyon...Chief Financial Officer2018.01967Not AvailableNot Available
2Q8791Gender 184.0['Sweden'][82.5]natural causesMichelle GloverPartner stock four. Region as true develop sou...Speech and language therapist2000.01916Not AvailableNot Available
3Q30567Gender 164.0['Belgium'][81.6]natural causesWillie GoldenFeeling fact by four. Data son natural explain...Financial controller1989.01925Not AvailableNot Available
4Q14013Gender 188.0['United Kingdom'][81.3]suicideRoberto JohnsonAttorney quickly candidate change although bag...Sound technician, broadcasting/film/video2016.01928Not AvailableNot Available
..........................................
44206Q21223Gender 187.0['United States'][78.5]natural causesSteven HillOccur site mean. None imagine social collectio...Television/film/video producer2014.01927Not AvailableNot Available
44207Q18681Gender 175.0['Austria'][81.6]natural causesLaura SmithFive help event as sort. Class training possib...Race relations officer2018.01943Not AvailableNot Available
44208Q34424Gender 156.0['France'][82.5]natural causesDiana JacobsMiddle style capital describe increase. Fly si...Civil Service fast streamer2009.01953Not AvailableNot Available
44209Q33102Gender 175.0['France'][82.5]natural causesLarry FosterWatch size character piece speak moment outsid...Speech and language therapist1982.01907Not AvailableNot Available
44210Q34422Gender 163.0['Poland'][77.6]unnatural deathThomas GonzalesFact thousand week professional.Biochemist, clinical2005.01942Not AvailableNot Available
\n", + "

44211 rows × 13 columns

\n", + "
" + ], + "text/plain": [ + " Id Gender Age of death Associated Countries \\\n", + "0 Q19723 Gender 1 53.0 ['United States'] \n", + "1 Q20057 Gender 1 51.0 ['United Kingdom'] \n", + "2 Q8791 Gender 1 84.0 ['Sweden'] \n", + "3 Q30567 Gender 1 64.0 ['Belgium'] \n", + "4 Q14013 Gender 1 88.0 ['United Kingdom'] \n", + "... ... ... ... ... \n", + "44206 Q21223 Gender 1 87.0 ['United States'] \n", + "44207 Q18681 Gender 1 75.0 ['Austria'] \n", + "44208 Q34424 Gender 1 56.0 ['France'] \n", + "44209 Q33102 Gender 1 75.0 ['France'] \n", + "44210 Q34422 Gender 1 63.0 ['Poland'] \n", + "\n", + " Associated Country Life Expectancy Manner of death Name \\\n", + "0 [78.5] homicide Norma Fisher \n", + "1 [81.3] natural causes Brandon Lloyd \n", + "2 [82.5] natural causes Michelle Glover \n", + "3 [81.6] natural causes Willie Golden \n", + "4 [81.3] suicide Roberto Johnson \n", + "... ... ... ... \n", + "44206 [78.5] natural causes Steven Hill \n", + "44207 [81.6] natural causes Laura Smith \n", + "44208 [82.5] natural causes Diana Jacobs \n", + "44209 [82.5] natural causes Larry Foster \n", + "44210 [77.6] unnatural death Thomas Gonzales \n", + "\n", + " Short description \\\n", + "0 Magazine truth stop whose group through despite. \n", + "1 Total financial role together range line beyon... \n", + "2 Partner stock four. Region as true develop sou... \n", + "3 Feeling fact by four. Data son natural explain... \n", + "4 Attorney quickly candidate change although bag... \n", + "... ... \n", + "44206 Occur site mean. None imagine social collectio... \n", + "44207 Five help event as sort. Class training possib... \n", + "44208 Middle style capital describe increase. Fly si... \n", + "44209 Watch size character piece speak moment outsid... \n", + "44210 Fact thousand week professional. \n", + "\n", + " Occupation Death year Birth year \\\n", + "0 Corporate treasurer 1989.0 1936 \n", + "1 Chief Financial Officer 2018.0 1967 \n", + "2 Speech and language therapist 2000.0 1916 \n", + "3 Financial controller 1989.0 1925 \n", + "4 Sound technician, broadcasting/film/video 2016.0 1928 \n", + "... ... ... ... \n", + "44206 Television/film/video producer 2014.0 1927 \n", + "44207 Race relations officer 2018.0 1943 \n", + "44208 Civil Service fast streamer 2009.0 1953 \n", + "44209 Speech and language therapist 1982.0 1907 \n", + "44210 Biochemist, clinical 2005.0 1942 \n", + "\n", + " Country Associated Country Coordinates (Lat/Lon) \n", + "0 Not Available Not Available \n", + "1 Not Available Not Available \n", + "2 Not Available Not Available \n", + "3 Not Available Not Available \n", + "4 Not Available Not Available \n", + "... ... ... \n", + "44206 Not Available Not Available \n", + "44207 Not Available Not Available \n", + "44208 Not Available Not Available \n", + "44209 Not Available Not Available \n", + "44210 Not Available Not Available \n", + "\n", + "[44211 rows x 13 columns]" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a.mock" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "2fdd319f-8c51-4203-82f1-fbf29ad5d614", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
IdNameShort descriptionGenderCountryOccupationBirth yearDeath yearManner of deathAge of deathAssociated CountriesAssociated Country Coordinates (Lat/Lon)Associated Country Life Expectancy
0Q23George Washington1st president of the United States (1732–1799)MaleUnited States of America; Kingdom of Great Bri...Politician17321799.0natural causes67.0['United Kingdom', 'United States'][(55.378051, -3.435973), (37.09024, -95.712891)][81.3, 78.5]
1Q42Douglas AdamsEnglish writer and humoristMaleUnited KingdomArtist19522001.0natural causes49.0['United Kingdom'][(55.378051, -3.435973)][81.3]
2Q91Abraham Lincoln16th president of the United States (1809-1865)MaleUnited States of AmericaPolitician18091865.0homicide56.0['United States'][(37.09024, -95.712891)][78.5]
5Q260Jean-François ChampollionFrench classical scholarMaleKingdom of France; First French EmpireEgyptologist17901832.0natural causes42.0['France'][(46.227638, 2.213749)][82.5]
7Q296Claude MonetFrench impressionist painter (1840-1926)MaleFranceArtist18401926.0natural causes86.0['France'][(46.227638, 2.213749)][82.5]
..........................................
1215502Q75349931John SaxtonBritish diarist and settler in New ZealandMaleUnited Kingdom of Great Britain and IrelandSettler18071866.0suicide59.0['United Kingdom'][(55.378051, -3.435973)][81.3]
1217919Q75717629Antonín GrunclCzechoslovak musicianMaleCzechoslovakiaArtist19051942.0capital punishment37.0['Slovakia'][(48.669026, 19.699024)][77.2]
1221308Q76161186Sara ChampionBritish archaeologistFemaleUnited KingdomResearcher19462000.0natural causes54.0['United Kingdom'][(55.378051, -3.435973)][81.3]
1222916Q76857201Dolores Millanesbaaŋa ŋun nyɛ paɣaFemaleSpainArtist18591906.0shipwrecking47.0['Spain'][(40.463667, -3.74922)][83.3]
1222958Q76995470David Hillhouse BuelUnion Army officerMaleUnited States of AmericaMilitary personnel18391870.0homicide31.0['United States'][(37.09024, -95.712891)][78.5]
\n", + "

44211 rows × 13 columns

\n", + "
" + ], + "text/plain": [ + " Id Name \\\n", + "0 Q23 George Washington \n", + "1 Q42 Douglas Adams \n", + "2 Q91 Abraham Lincoln \n", + "5 Q260 Jean-François Champollion \n", + "7 Q296 Claude Monet \n", + "... ... ... \n", + "1215502 Q75349931 John Saxton \n", + "1217919 Q75717629 Antonín Gruncl \n", + "1221308 Q76161186 Sara Champion \n", + "1222916 Q76857201 Dolores Millanes \n", + "1222958 Q76995470 David Hillhouse Buel \n", + "\n", + " Short description Gender \\\n", + "0 1st president of the United States (1732–1799) Male \n", + "1 English writer and humorist Male \n", + "2 16th president of the United States (1809-1865) Male \n", + "5 French classical scholar Male \n", + "7 French impressionist painter (1840-1926) Male \n", + "... ... ... \n", + "1215502 British diarist and settler in New Zealand Male \n", + "1217919 Czechoslovak musician Male \n", + "1221308 British archaeologist Female \n", + "1222916 baaŋa ŋun nyɛ paɣa Female \n", + "1222958 Union Army officer Male \n", + "\n", + " Country \\\n", + "0 United States of America; Kingdom of Great Bri... \n", + "1 United Kingdom \n", + "2 United States of America \n", + "5 Kingdom of France; First French Empire \n", + "7 France \n", + "... ... \n", + "1215502 United Kingdom of Great Britain and Ireland \n", + "1217919 Czechoslovakia \n", + "1221308 United Kingdom \n", + "1222916 Spain \n", + "1222958 United States of America \n", + "\n", + " Occupation Birth year Death year Manner of death \\\n", + "0 Politician 1732 1799.0 natural causes \n", + "1 Artist 1952 2001.0 natural causes \n", + "2 Politician 1809 1865.0 homicide \n", + "5 Egyptologist 1790 1832.0 natural causes \n", + "7 Artist 1840 1926.0 natural causes \n", + "... ... ... ... ... \n", + "1215502 Settler 1807 1866.0 suicide \n", + "1217919 Artist 1905 1942.0 capital punishment \n", + "1221308 Researcher 1946 2000.0 natural causes \n", + "1222916 Artist 1859 1906.0 shipwrecking \n", + "1222958 Military personnel 1839 1870.0 homicide \n", + "\n", + " Age of death Associated Countries \\\n", + "0 67.0 ['United Kingdom', 'United States'] \n", + "1 49.0 ['United Kingdom'] \n", + "2 56.0 ['United States'] \n", + "5 42.0 ['France'] \n", + "7 86.0 ['France'] \n", + "... ... ... \n", + "1215502 59.0 ['United Kingdom'] \n", + "1217919 37.0 ['Slovakia'] \n", + "1221308 54.0 ['United Kingdom'] \n", + "1222916 47.0 ['Spain'] \n", + "1222958 31.0 ['United States'] \n", + "\n", + " Associated Country Coordinates (Lat/Lon) \\\n", + "0 [(55.378051, -3.435973), (37.09024, -95.712891)] \n", + "1 [(55.378051, -3.435973)] \n", + "2 [(37.09024, -95.712891)] \n", + "5 [(46.227638, 2.213749)] \n", + "7 [(46.227638, 2.213749)] \n", + "... ... \n", + "1215502 [(55.378051, -3.435973)] \n", + "1217919 [(48.669026, 19.699024)] \n", + "1221308 [(55.378051, -3.435973)] \n", + "1222916 [(40.463667, -3.74922)] \n", + "1222958 [(37.09024, -95.712891)] \n", + "\n", + " Associated Country Life Expectancy \n", + "0 [81.3, 78.5] \n", + "1 [81.3] \n", + "2 [78.5] \n", + "5 [82.5] \n", + "7 [82.5] \n", + "... ... \n", + "1215502 [81.3] \n", + "1217919 [77.2] \n", + "1221308 [81.3] \n", + "1222916 [83.3] \n", + "1222958 [78.5] \n", + "\n", + "[44211 rows x 13 columns]" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a.data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5aff9d42-9241-48f1-85c6-8a0dbc4ab83a", + "metadata": {}, "outputs": [], "source": [] } diff --git a/notebooks/scenarios/getting-started/05-adding-users.ipynb b/notebooks/scenarios/getting-started/05-adding-users.ipynb index 881fb6a5a01..6e6befee17b 100644 --- a/notebooks/scenarios/getting-started/05-adding-users.ipynb +++ b/notebooks/scenarios/getting-started/05-adding-users.ipynb @@ -14,9 +14,16101 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, + "id": "3de6bc42-4b2f-4902-9676-a36a0621fef3", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "import syft as sy" + ] + }, + { + "cell_type": "code", + "execution_count": 3, "id": "3ac0c932-601f-4424-ad80-f02da291b2a3", "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting test-datasite-1 server on 0.0.0.0:8081\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Started server process [63035]\n", + "INFO: Waiting for application startup.\n", + "INFO: Application startup complete.\n", + "INFO: Uvicorn running on http://0.0.0.0:8081 (Press CTRL+C to quit)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:55426 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "Waiting for server to start Done.\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftInfo:
You have launched a development server at http://0.0.0.0:8081.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:8081.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO: 127.0.0.1:55432 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55432 - \"POST /api/v2/login HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55432 - \"GET /api/v2/api?verify_key=d2dfbeeef378bf4737a4aec6a9406cd8a674a7831ec56f4ca8899e52e9c827e3&communication_protocol=dev HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55442 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55432 - \"POST /api/v2/register HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55456 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55472 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55474 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55490 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55500 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55516 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55518 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55534 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55542 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55542 - \"POST /api/v2/login HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:55542 - \"GET /api/v2/api?verify_key=d52fec1ad30b0934cf021e3925482276433f83c5943d5590a5298f4ce77fe8a5&communication_protocol=dev HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:40602 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:40604 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:40620 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:40632 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:40642 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:40652 - \"GET /api/v2/metadata HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:40652 - \"POST /api/v2/register HTTP/1.1\" 200 OK\n", + "INFO: 127.0.0.1:40662 - \"POST /api/v2/api_call HTTP/1.1\" 200 OK\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Shutting down\n", + "INFO: Waiting for application shutdown.\n", + "INFO: Application shutdown complete.\n", + "INFO: Finished server process [63035]\n" + ] + } + ], + "source": [ + "server = sy.orchestra.launch(name=\"test-datasite-1\", port=8081)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "5ef99d40-8b1e-4be8-af42-0edb50df7250", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n", + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# logging in as root client with default credentials\n", + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "d970eaef-e27e-4d81-b959-da18a0af2f01", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
User 'John Doe' successfully registered! To see users, run `[your_client].users`

" + ], + "text/plain": [ + "SyftSuccess: User 'John Doe' successfully registered! To see users, run `[your_client].users`" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# register a new user\n", + "client.register(\n", + " name=\"John Doe\", email=\"john@email.com\", password=\"pass\", password_verify=\"pass\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "fd8f9701-1fe8-4a96-9d49-7ecb1a29adab", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

UserView List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Returns a list of all the existing users in the domain\n", + "client.users" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "db6af0e7-dceb-48ae-887d-614c41a38797", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'John Doe'" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Selecting an user by index\n", + "client.users[1].name" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "74bc93da-0c86-4397-9a2f-fb4aa414ad3a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

UserView List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "[syft.service.user.user.UserView]" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Selecting an user by filtering\n", + "search_results = client.users.search(email=\"john@email.com\")\n", + "search_results" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "68d7a572-b90c-44a1-9ae8-e41db39f8f23", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'John Doe'" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "searched_user = search_results[0]\n", + "searched_user.name" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "859f20c6-21d2-4448-8f3b-097e6df98b90", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Successfully updated email for the user 'John Doe' to 'updatedjohn@email.com'.

" + ], + "text/plain": [ + "SyftSuccess: Successfully updated email for the user 'John Doe' to 'updatedjohn@email.com'." + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# update email\n", + "searched_user.set_email(email=\"updatedjohn@email.com\")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "ef702a91-0674-49ba-80bc-aaa66f83f434", + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class UserView:\n", + " id: str = e451c1c5c46e462dbc6e0484626559fc\n", + " name: str = \"John Doe\"\n", + " email: str = \"updatedjohn@email.com\"\n", + " institution: str = None\n", + " website: str = None\n", + " role: str = ServiceRole.DATA_SCIENTIST\n", + " notifications_enabled: str = {: True, : False, : False, : False}\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.user.user.UserView" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "searched_user" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "b8d5a650-2d4b-4fec-a45e-9f93f9e0f73e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Successfully updated password for user 'John Doe' with email 'updatedjohn@email.com'.

" + ], + "text/plain": [ + "SyftSuccess: Successfully updated password for user 'John Doe' with email 'updatedjohn@email.com'." + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# update password\n", + "searched_user.set_password(new_password=\"newpass\", confirm=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "83aba8a1-98a2-4dea-ae18-2f70f2df078e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

UserView List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.users" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "d4ca0ce8-4bb6-44a3-8e4b-6d89b91fd30d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
User details successfully updated.

" + ], + "text/plain": [ + "SyftSuccess: User details successfully updated." + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Update info of an existing user\n", + "searched_user.update(\n", + " name=\"Updated Jane Doe\",\n", + " institution=\"My institution\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "a4d7f854-3771-4dbd-94ad-ffa4a76272ee", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

UserView List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.users" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "b0d7b4fc-eaf3-45bf-b808-6caf91d74ccd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n", + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "\n", + " \n", + "
\n", + " \"Logo\"\n",\n", + "

Welcome to test-datasite-1

\n", + "
\n", + " URL: http://localhost:8081
\n", + " Server Description: This is the default description for a Datasite Server.
\n", + " Server Type: Datasite
\n", + " Server Side Type:High Side
\n", + " Syft Version: 0.8.7-beta.14
\n", + "\n", + "
\n", + "
\n", + " ⓘ \n", + " This datasite is run by the library PySyft to learn more about how it works visit\n", + " github.com/OpenMined/PySyft.\n", + "
\n", + "

Commands to Get Started

\n", + " \n", + "
    \n", + " \n", + "
  • <your_client>.datasets - list datasets
  • \n", + "
  • <your_client>.code - list code
  • \n", + "
  • <your_client>.projects - list projects
  • \n", + "\n", + "
\n", + " \n", + "

\n", + " " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# validate login for new user\n", + "server.login(email=\"updatedjohn@email.com\", password=\"newpass\")" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "4b4c078c-663f-437e-b103-92f76602eee1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
ID: e451c1c5c46e462dbc6e0484626559fc deleted

" + ], + "text/plain": [ + "SyftSuccess: ID: e451c1c5c46e462dbc6e0484626559fc deleted" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Delete user\n", + "client.users.delete(searched_user.id)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "3167816a-5efc-4ae5-be5d-389398b74e3c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

UserView List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.users" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "877d0945-0684-42b5-a5d4-4fd3e69f950b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Registration feature successfully enabled

" + ], + "text/plain": [ + "SyftSuccess: Registration feature successfully enabled" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Enable user registration\n", + "client.settings.allow_guest_signup(enable=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "c2260402-642f-41f5-8217-f0d44f75d62b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n", + "Logged into as GUEST\n" + ] + } + ], + "source": [ + "guest_user = server.login_as_guest()" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "fa874373-9289-4531-94cc-9342f1e244ff", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
User 'Curious Scientist' successfully registered!

" + ], + "text/plain": [ + "SyftSuccess: User 'Curious Scientist' successfully registered!" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# register the account\n", + "guest_user.register(\n", + " email=\"scientist@test.com\",\n", + " password=\"123\",\n", + " password_verify=\"123\",\n", + " name=\"Curious Scientist\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "a2db0aee-a380-4bf2-85bf-65c2914d719a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

UserView List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.users" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "17749554-e971-401b-977f-6d1819b2194a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Stopping test-datasite-1\n" + ] + } + ], + "source": [ + "server.land()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f6f2c835-7a79-4331-9486-dfeb8acdfbf5", + "metadata": {}, "outputs": [], "source": [] } @@ -37,7 +16129,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.11.5" } }, "nbformat": 4, diff --git a/notebooks/scenarios/reverse-tunnel/03-network-configuration.ipynb b/notebooks/scenarios/reverse-tunnel/03-network-configuration.ipynb index 477148a7c33..9946fac52c7 100644 --- a/notebooks/scenarios/reverse-tunnel/03-network-configuration.ipynb +++ b/notebooks/scenarios/reverse-tunnel/03-network-configuration.ipynb @@ -1,37 +1,37 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "6296126b-2575-44b4-9f09-2d73f444ca96", - "metadata": {}, - "outputs": [], - "source": [ - "# -- ingress\n", - "# -- open port for rathole server\n", - "# -- nodeport vs websockets" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "6296126b-2575-44b4-9f09-2d73f444ca96", + "metadata": {}, + "outputs": [], + "source": [ + "# -- ingress\n", + "# -- open port for rathole server\n", + "# -- serverport vs websockets" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/notebooks/scenarios/reverse-tunnel/04-setup-domain-with-tunnel.ipynb b/notebooks/scenarios/reverse-tunnel/04-setup-datasite-with-tunnel.ipynb similarity index 100% rename from notebooks/scenarios/reverse-tunnel/04-setup-domain-with-tunnel.ipynb rename to notebooks/scenarios/reverse-tunnel/04-setup-datasite-with-tunnel.ipynb diff --git a/notebooks/scenarios/reverse-tunnel/06-proxy-clients.ipynb b/notebooks/scenarios/reverse-tunnel/06-proxy-clients.ipynb index 536dce15404..3c8d070e544 100644 --- a/notebooks/scenarios/reverse-tunnel/06-proxy-clients.ipynb +++ b/notebooks/scenarios/reverse-tunnel/06-proxy-clients.ipynb @@ -1,36 +1,36 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "1c3868bb-a140-451a-ac51-a4fd17bf2ab8", - "metadata": {}, - "outputs": [], - "source": [ - "# -- how to list domains on gateway\n", - "# -- getting a proxy client" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "1c3868bb-a140-451a-ac51-a4fd17bf2ab8", + "metadata": {}, + "outputs": [], + "source": [ + "# -- how to list datasites on gateway\n", + "# -- getting a proxy client" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb b/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb index 3a1863c0bdd..1803bc2257c 100644 --- a/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb +++ b/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb @@ -50,8 +50,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"private-data-example-domain-1\", port=\"auto\", reset=True\n", + "server = sy.orchestra.launch(\n", + " name=\"private-data-example-datasite-1\", port=\"auto\", reset=True\n", ")" ] }, @@ -82,7 +82,7 @@ "source": [ "# syft absolute\n", "\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -322,9 +322,9 @@ "metadata": {}, "outputs": [], "source": [ - "# Cleanup local domain server\n", - "if node.node_type.value == \"python\":\n", - " node.land()" + "# Cleanup local datasite server\n", + "if server.server_type.value == \"python\":\n", + " server.land()" ] } ], diff --git a/notebooks/tutorials/data-owner/02-account-management.ipynb b/notebooks/tutorials/data-owner/02-account-management.ipynb index a4e64b74698..4f145816565 100644 --- a/notebooks/tutorials/data-owner/02-account-management.ipynb +++ b/notebooks/tutorials/data-owner/02-account-management.ipynb @@ -48,8 +48,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"account-management-example-domain-1\", port=8041, reset=True\n", + "server = sy.orchestra.launch(\n", + " name=\"account-management-example-datasite-1\", port=8041, reset=True\n", ")" ] }, @@ -78,10 +78,8 @@ "source": [ "# syft absolute\n", "from syft.service.user.user import ServiceRole\n", - "from syft.service.user.user import UserCreate\n", - "from syft.service.user.user import UserUpdate\n", "\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -107,9 +105,7 @@ "metadata": {}, "outputs": [], "source": [ - "client.users.create(\n", - " UserCreate(email=\"newuser@openmined.org\", name=\"John Doe\", password=\"pw\")\n", - ")" + "client.users.create(email=\"newuser@openmined.org\", name=\"John Doe\", password=\"pw\")" ] }, { @@ -209,7 +205,7 @@ "outputs": [], "source": [ "updated_user = client.users.update(\n", - " new_user.id, UserUpdate(role=ServiceRole.DATA_SCIENTIST, password=\"123\")\n", + " uid=new_user.id, role=ServiceRole.DATA_SCIENTIST, password=\"123\"\n", ")" ] }, @@ -238,7 +234,7 @@ "metadata": {}, "outputs": [], "source": [ - "ds_client = node.login(email=\"newuser@openmined.org\", password=\"123\")" + "ds_client = server.login(email=\"newuser@openmined.org\", password=\"123\")" ] }, { @@ -328,7 +324,7 @@ "metadata": {}, "outputs": [], "source": [ - "new_user = node.login(email=\"joker@test.com\", password=\"joker123\")" + "new_user = server.login(email=\"joker@test.com\", password=\"joker123\")" ] }, { @@ -482,7 +478,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.3" + "version": "3.11.5" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb b/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb index c05d6f7d556..398862601f2 100644 --- a/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb +++ b/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb @@ -48,8 +48,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"messages-requests-example-domain-1-do\", port=7021, reset=True\n", + "server = sy.orchestra.launch(\n", + " name=\"messages-requests-example-datasite-1-do\", port=7021, reset=True\n", ")" ] }, @@ -76,7 +76,7 @@ "metadata": {}, "outputs": [], "source": [ - "admin_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "admin_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -122,7 +122,7 @@ "metadata": {}, "outputs": [], "source": [ - "guest_client = node.client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = server.client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { diff --git a/notebooks/tutorials/data-owner/05-syft-services-api.ipynb b/notebooks/tutorials/data-owner/05-syft-services-api.ipynb index 7c3f409105a..760ec6c481d 100644 --- a/notebooks/tutorials/data-owner/05-syft-services-api.ipynb +++ b/notebooks/tutorials/data-owner/05-syft-services-api.ipynb @@ -56,8 +56,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"services-api-example-domain-1\", port=\"auto\", reset=True\n", + "server = sy.orchestra.launch(\n", + " name=\"services-api-example-datasite-1\", port=\"auto\", reset=True\n", ")" ] }, @@ -78,7 +78,7 @@ "source": [ "# syft absolute\n", "\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { diff --git a/notebooks/tutorials/data-scientist/02-finding-datasets.ipynb b/notebooks/tutorials/data-scientist/02-finding-datasets.ipynb index 30dcf080d8c..bfae6dbdb2d 100644 --- a/notebooks/tutorials/data-scientist/02-finding-datasets.ipynb +++ b/notebooks/tutorials/data-scientist/02-finding-datasets.ipynb @@ -21,7 +21,7 @@ "id": "2", "metadata": {}, "source": [ - "## Connecting to a Domain" + "## Connecting to a Datasite" ] }, { diff --git a/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb b/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb index acf4ec170df..731123c88d4 100644 --- a/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb +++ b/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb @@ -48,8 +48,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"private-datasets-example-domain-1\", port=8062, reset=True\n", + "server = sy.orchestra.launch(\n", + " name=\"private-datasets-example-datasite-1\", port=8062, reset=True\n", ")" ] }, @@ -76,7 +76,7 @@ "metadata": {}, "outputs": [], "source": [ - "root_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -147,7 +147,7 @@ "metadata": {}, "outputs": [], "source": [ - "guest_client = node.client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = server.client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -437,7 +437,7 @@ "metadata": {}, "outputs": [], "source": [ - "root_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { diff --git a/notebooks/tutorials/data-scientist/04-syft-functions.ipynb b/notebooks/tutorials/data-scientist/04-syft-functions.ipynb index cbee1755a3d..64812ef5333 100644 --- a/notebooks/tutorials/data-scientist/04-syft-functions.ipynb +++ b/notebooks/tutorials/data-scientist/04-syft-functions.ipynb @@ -48,8 +48,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"syft-functions-example-domain-1\", port=7022, reset=True\n", + "server = sy.orchestra.launch(\n", + " name=\"syft-functions-example-datasite-1\", port=7022, reset=True\n", ")" ] }, @@ -78,7 +78,7 @@ "source": [ "# syft absolute\n", "\n", - "admin_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "admin_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -140,7 +140,7 @@ "metadata": {}, "outputs": [], "source": [ - "guest_client = node.client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = server.client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -252,7 +252,7 @@ "id": "23", "metadata": {}, "source": [ - "You have probably noticed that in the last example we also specified the output policy. Its purpose has to do with the release of information for a given function and controlling the parameters that this release comes with. For example, if a data owner and a data scientist agree on the content of a function run on a domain and on what private data that can be run on, their work might not be done yet. They might negotiate how many times that function can be run, whether or not the data scientist can have access or what happens before releasing the output (maybe we add some noise like in the case of differential privacy). At the moment we have policies that allow data scientist to ask for a certain amount of runs on function, but the ones you will find most often is `SingleExecutionExactOutput` that ask for a single use on a function. We have used it so much that we came with the `syft_function_single_use` decorator that use by default that output policy. What is also cool is that you can pass the input for an input policy to this decorator to get a shorter version like this:" + "You have probably noticed that in the last example we also specified the output policy. Its purpose has to do with the release of information for a given function and controlling the parameters that this release comes with. For example, if a data owner and a data scientist agree on the content of a function run on a datasite and on what private data that can be run on, their work might not be done yet. They might negotiate how many times that function can be run, whether or not the data scientist can have access or what happens before releasing the output (maybe we add some noise like in the case of differential privacy). At the moment we have policies that allow data scientist to ask for a certain amount of runs on function, but the ones you will find most often is `SingleExecutionExactOutput` that ask for a single use on a function. We have used it so much that we came with the `syft_function_single_use` decorator that use by default that output policy. What is also cool is that you can pass the input for an input policy to this decorator to get a shorter version like this:" ] }, { @@ -435,7 +435,7 @@ "id": "42", "metadata": {}, "source": [ - "Right! Our code was not approved, so we should wait for the review from the data owner. As we also deployed the domain, we will do that quickly here, but for more details on what is happening check the data owner sections under tutorials:" + "Right! Our code was not approved, so we should wait for the review from the data owner. As we also deployed the datasite, we will do that quickly here, but for more details on what is happening check the data owner sections under tutorials:" ] }, { @@ -509,7 +509,7 @@ "id": "50", "metadata": {}, "source": [ - "Notice that the result we see is still `1.0` which looks like the result on the mock data. That is because it actually is! The object returned is an `ActionObject` which here behaves like a pointer for the data on the domain:" + "Notice that the result we see is still `1.0` which looks like the result on the mock data. That is because it actually is! The object returned is an `ActionObject` which here behaves like a pointer for the data on the datasite:" ] }, { diff --git a/notebooks/tutorials/data-scientist/05-messaging-and-requests.ipynb b/notebooks/tutorials/data-scientist/05-messaging-and-requests.ipynb index 5d7ff62fa94..fa8cfc445ba 100644 --- a/notebooks/tutorials/data-scientist/05-messaging-and-requests.ipynb +++ b/notebooks/tutorials/data-scientist/05-messaging-and-requests.ipynb @@ -48,8 +48,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"messages-requests-example-domain-1-ds\", port=7023, reset=True\n", + "server = sy.orchestra.launch(\n", + " name=\"messages-requests-example-datasite-1-ds\", port=7023, reset=True\n", ")" ] }, @@ -76,7 +76,7 @@ "metadata": {}, "outputs": [], "source": [ - "admin_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "admin_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -122,7 +122,7 @@ "metadata": {}, "outputs": [], "source": [ - "guest_client = node.client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = server.client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { diff --git a/notebooks/tutorials/deployments/01-deploy-python.ipynb b/notebooks/tutorials/deployments/01-deploy-python.ipynb index cd80e629126..0dedeb13ea5 100644 --- a/notebooks/tutorials/deployments/01-deploy-python.ipynb +++ b/notebooks/tutorials/deployments/01-deploy-python.ipynb @@ -59,9 +59,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Working with Python Domain\n", + "## Working with Python Datasite\n", "\n", - "`PySyft` makes it very easy to develop against a domain in a notebook by providing the `sy.orchestra` interface. It allows you to start a domain with a webserver in a notebook in the background, which is a lightweight version of a Domain that would be used in production. You can specify options such as what kind of database you are using, whether you want to use networking and how many processes you want to use. You can launch a Domain by simply executing:" + "`PySyft` makes it very easy to develop against a datasite in a notebook by providing the `sy.orchestra` interface. It allows you to start a datasite with a webserver in a notebook in the background, which is a lightweight version of a Datasite that would be used in production. You can specify options such as what kind of database you are using, whether you want to use networking and how many processes you want to use. You can launch a Datasite by simply executing:" ] }, { @@ -80,8 +80,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", - " name=\"dev-mode-example-domain-1\", port=8020, reset=True, dev_mode=True\n", + "server = sy.orchestra.launch(\n", + " name=\"dev-mode-example-datasite-1\", port=8020, reset=True, dev_mode=True\n", ")" ] }, @@ -92,12 +92,12 @@ "If we don't need a webserver (for development this is true in many cases), we can omit the port and instead use\n", "\n", "```python\n", - "node = sy.orchestra.launch(name=\"dev-mode-example-domain-1\", dev_mode=True, reset=True)\n", + "server = sy.orchestra.launch(name=\"dev-mode-example-datasite-1\", dev_mode=True, reset=True)\n", "```\n", "\n", "One of the benefits of not using a port is that you can use a debugger and set breakpoints within api calls. This makes debugging way faster in many cases.\n", "\n", - "Now, we are ready to start using the domain. The domain comes with test login credentials for the admin." + "Now, we are ready to start using the datasite. The datasite comes with test login credentials for the admin." ] }, { @@ -106,14 +106,14 @@ "metadata": {}, "outputs": [], "source": [ - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Once you are logged in, you are ready to start using the domain, for instance for creating a dataset (this one is empty, just as a example)." + "Once you are logged in, you are ready to start using the datasite, for instance for creating a dataset (this one is empty, just as a example)." ] }, { @@ -130,7 +130,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Lastly to stop or terminate your Domain, we can execute the following command:" + "Lastly to stop or terminate your Datasite, we can execute the following command:" ] }, { @@ -139,7 +139,7 @@ "metadata": {}, "outputs": [], "source": [ - "node.land()" + "server.land()" ] }, { @@ -157,7 +157,7 @@ "- [04-pytorch-example.ipynb](../../api/0.8/04-pytorch-example.ipynb)\n", "- [05-custom-policy.ipynb](../../api/0.8/05-custom-policy.ipynb)\n", "- [06-multiple-code-requests.ipynb](../../api/0.8/06-multiple-code-requests.ipynb)\n", - "- [07-domain-register-control-flow.ipynb](../../api/0.8/07-domain-register-control-flow.ipynb)\n", + "- [07-datasite-register-control-flow.ipynb](../../api/0.8/07-datasite-register-control-flow.ipynb)\n", "- [08-code-version.ipynb](../../api/0.8/08-code-version.ipynb)\n", "- [09-blob-storage.ipynb](../../api/0.8/09-blob-storage.ipynb)\n", "- [10-container-images.ipynb](../../api/0.8/10-container-images.ipynb)\n", diff --git a/notebooks/tutorials/deployments/02-deploy-container.ipynb b/notebooks/tutorials/deployments/02-deploy-container.ipynb index a8717135d3f..6c8b5bbfdb8 100644 --- a/notebooks/tutorials/deployments/02-deploy-container.ipynb +++ b/notebooks/tutorials/deployments/02-deploy-container.ipynb @@ -58,14 +58,14 @@ "source": [ "``` bash\n", "docker run -it \\\n", - " -e NODE_NAME=syft-example-domain-1 \\\n", - " -e NODE_TYPE=domain \\\n", + " -e SERVER_NAME=syft-example-datasite-1 \\\n", + " -e SERVER_TYPE=datasite \\\n", " -e N_CONSUMERS=1 \\\n", " -e SINGLE_CONTAINER_MODE=true \\\n", " -e CREATE_PRODUCER=true \\\n", " -e INMEMORY_WORKERS=true \\\n", " -p 8080:80 --add-host=host.docker.internal:host-gateway \\\n", - " --name syft-example-domain-1 openmined/grid-backend:$SYFT_VERSION\n", + " --name syft-example-datasite-1 openmined/syft-backend:$SYFT_VERSION\n", "```" ] }, @@ -75,7 +75,7 @@ "source": [ "## Working with the single container deployment\n", "\n", - "PySyft makes it very simple to connect to any existing Syft cluster by providing the `sy.orchestra` interface. You can connect to the domain by executing these steps in your jupyter notebook:" + "PySyft makes it very simple to connect to any existing Syft cluster by providing the `sy.orchestra` interface. You can connect to the datasite by executing these steps in your jupyter notebook:" ] }, { @@ -86,7 +86,7 @@ "# syft absolute\n", "import syft as sy\n", "\n", - "node = sy.orchestra.launch(name=\"syft-example-domain-1\", deploy_to=\"remote\")\n", + "server = sy.orchestra.launch(name=\"syft-example-datasite-1\", deploy_to=\"remote\")\n", "```" ] }, @@ -94,12 +94,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "This will return a node handle by connecting to `http://localhost:8080` which is the default host and port where your docker container will be running. You can connect to a different host and port by setting the environment variables `NODE_URL` and `NODE_PORT`.\n", + "This will return a server handle by connecting to `http://localhost:8080` which is the default host and port where your docker container will be running. You can connect to a different host and port by setting the environment variables `SERVER_URL` and `SERVER_PORT`.\n", "```python\n", "import os\n", "\n", - "os.environ[\"NODE_URL\"] = \"\"\n", - "os.environ[\"NODE_PORT\"] = \"\"\n", + "os.environ[\"SERVER_URL\"] = \"\"\n", + "os.environ[\"SERVER_PORT\"] = \"\"\n", "```" ] }, @@ -107,7 +107,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Now, we are ready to start using the domain. The domain comes with default login credentials for the admin." + "Now, we are ready to start using the datasite. The datasite comes with default login credentials for the admin." ] }, { @@ -115,7 +115,7 @@ "metadata": {}, "source": [ "```python3\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")\n", "```" ] }, @@ -123,7 +123,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Once you are logged in, you are ready to start using the domain, for instance for creating a dataset (this one is empty, just as a example)." + "Once you are logged in, you are ready to start using the datasite, for instance for creating a dataset (this one is empty, just as a example)." ] }, { @@ -151,7 +151,7 @@ "- [04-pytorch-example.ipynb](../../api/0.8/04-pytorch-example.ipynb)\n", "- [05-custom-policy.ipynb](../../api/0.8/05-custom-policy.ipynb)\n", "- [06-multiple-code-requests.ipynb](../../api/0.8/06-multiple-code-requests.ipynb)\n", - "- [07-domain-register-control-flow.ipynb](../../api/0.8/07-domain-register-control-flow.ipynb)\n", + "- [07-datasite-register-control-flow.ipynb](../../api/0.8/07-datasite-register-control-flow.ipynb)\n", "- [08-code-version.ipynb](../../api/0.8/08-code-version.ipynb)\n", "- [09-blob-storage.ipynb](../../api/0.8/09-blob-storage.ipynb)\n", "- [10-container-images.ipynb](../../api/0.8/10-container-images.ipynb)\n", diff --git a/notebooks/tutorials/deployments/03-deploy-k8s-k3d.ipynb b/notebooks/tutorials/deployments/03-deploy-k8s-k3d.ipynb index 3ba644ee5dc..c64e6c40f4a 100644 --- a/notebooks/tutorials/deployments/03-deploy-k8s-k3d.ipynb +++ b/notebooks/tutorials/deployments/03-deploy-k8s-k3d.ipynb @@ -78,7 +78,7 @@ "If you want to deploy your Kubernetes cluster in a resource-constrained environment, use the following flags to override the default configurations. Please note that you will need at least 1 CPU and 2 GB of RAM on Docker, and some tests may not work in such low-resource environments:\n", "\n", "```sh\n", - "helm install my-syft openmined/syft --version $SYFT_VERSION --namespace syft --create-namespace --set ingress.className=\"traefik\" --set node.resourcesPreset=null --set seaweedfs.resourcesPreset=null --set mongo.resourcesPreset=null --set registry.resourcesPreset=null --set proxy.resourcesPreset=null --set frontend.resourcesPreset=null\n", + "helm install my-syft openmined/syft --version $SYFT_VERSION --namespace syft --create-namespace --set ingress.className=\"traefik\" --set server.resourcesPreset=null --set seaweedfs.resourcesPreset=null --set mongo.resourcesPreset=null --set registry.resourcesPreset=null --set proxy.resourcesPreset=null --set frontend.resourcesPreset=null\n", "```\n", "\n", "
\n", @@ -89,7 +89,7 @@ "If you would like to set your own default password even for the production style deployment, use the following command:\n", "\n", "```sh\n", - "helm install my-syft openmined/syft --version $SYFT_VERSION --namespace syft --create-namespace --set ingress.className=\"traefik\" --set global.randomizedSecrets=false --set node.secret.defaultRootPassword=\"changethis\" --set seaweedfs.secret.s3RootPassword=\"admin\" --set mongo.secret.rootPassword=\"example\"\n", + "helm install my-syft openmined/syft --version $SYFT_VERSION --namespace syft --create-namespace --set ingress.className=\"traefik\" --set global.randomizedSecrets=false --set server.secret.defaultRootPassword=\"changethis\" --set seaweedfs.secret.s3RootPassword=\"admin\" --set mongo.secret.rootPassword=\"example\"\n", "```\n", "\n" ] @@ -104,7 +104,7 @@ "source": [ "## Working with the local Kubernetes deployment\n", "\n", - "PySyft makes it very simple to connect to your existing Syft cluster by providing the `sy.orchestra` interface. You can connect to the domain by executing these steps in your jupyter notebook:" + "PySyft makes it very simple to connect to your existing Syft cluster by providing the `sy.orchestra` interface. You can connect to the datasite by executing these steps in your jupyter notebook:" ] }, { @@ -115,7 +115,7 @@ "# syft absolute\n", "import syft as sy\n", "\n", - "node = sy.orchestra.launch(name=\"syft-example-domain-1\", deploy_to=\"remote\")\n", + "server = sy.orchestra.launch(name=\"syft-example-datasite-1\", deploy_to=\"remote\")\n", "```" ] }, @@ -123,13 +123,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "This will return a node handle by connecting to `http://localhost:8080` which is the default host and port where your kubernetes cluster will be running. You can connect to a different host and port by setting the environment variables `NODE_URL` and `NODE_PORT`.\n", + "This will return a server handle by connecting to `http://localhost:8080` which is the default host and port where your kubernetes cluster will be running. You can connect to a different host and port by setting the environment variables `SERVER_URL` and `SERVER_PORT`.\n", "\n", "```python\n", "import os\n", "\n", - "os.environ[\"NODE_URL\"] = \"\"\n", - "os.environ[\"NODE_PORT\"] = \"\"\n", + "os.environ[\"SERVER_URL\"] = \"\"\n", + "os.environ[\"SERVER_PORT\"] = \"\"\n", "```" ] }, @@ -137,7 +137,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Now, we are ready to start using the domain. Since helm is a product grade deployement stack, the domain comes with a randomized password for the default email credentials for the admin. Either run with Step 5 with your custom password or to extract the randomized password using `kubectl`, run the following command (in case you use a custom cluster name in step 1, replace `--context=k3d-$CLUSTER_NAME` appropriately): \n", + "Now, we are ready to start using the datasite. Since helm is a product grade deployement stack, the datasite comes with a randomized password for the default email credentials for the admin. Either run with Step 5 with your custom password or to extract the randomized password using `kubectl`, run the following command (in case you use a custom cluster name in step 1, replace `--context=k3d-$CLUSTER_NAME` appropriately): \n", "\n", "```sh\n", "kubectl --context=k3d-syft get secret backend-secret -n syft -o jsonpath='{.data.defaultRootPassword}' | base64 --decode\n", @@ -156,7 +156,7 @@ "metadata": {}, "source": [ "```python\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")\n", "```" ] }, @@ -164,7 +164,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Once you are logged in, you are ready to start using the domain, for instance for creating a dataset (this one is empty, just as a example)." + "Once you are logged in, you are ready to start using the datasite, for instance for creating a dataset (this one is empty, just as a example)." ] }, { @@ -192,7 +192,7 @@ "- [04-pytorch-example.ipynb](../../api/0.8/04-pytorch-example.ipynb)\n", "- [05-custom-policy.ipynb](../../api/0.8/05-custom-policy.ipynb)\n", "- [06-multiple-code-requests.ipynb](../../api/0.8/06-multiple-code-requests.ipynb)\n", - "- [07-domain-register-control-flow.ipynb](../../api/0.8/07-domain-register-control-flow.ipynb)\n", + "- [07-datasite-register-control-flow.ipynb](../../api/0.8/07-datasite-register-control-flow.ipynb)\n", "- [08-code-version.ipynb](../../api/0.8/08-code-version.ipynb)\n", "- [09-blob-storage.ipynb](../../api/0.8/09-blob-storage.ipynb)\n", "- [10-container-images.ipynb](../../api/0.8/10-container-images.ipynb)\n", diff --git a/notebooks/tutorials/deployments/07-deploy-devspace.ipynb b/notebooks/tutorials/deployments/07-deploy-devspace.ipynb index 29632c99dec..1033814036b 100644 --- a/notebooks/tutorials/deployments/07-deploy-devspace.ipynb +++ b/notebooks/tutorials/deployments/07-deploy-devspace.ipynb @@ -82,12 +82,12 @@ "You can use the command `k9s` or `lazydocker` (will require installing lazydocker) to view your cluster.\n", "\n", "\n", - "#### Launching Different Node Types\n", - "Alternatively, you can also use the following commands to launch different node types:\n", - "* Launching a Domain: `CLUSTER_NAME=testdomain1 CLUSTER_HTTP_PORT=9082 tox -e dev.k8s.launch.domain`\n", + "#### Launching Different Server Types\n", + "Alternatively, you can also use the following commands to launch different server types:\n", + "* Launching a Datasite: `CLUSTER_NAME=testdatasite1 CLUSTER_HTTP_PORT=9082 tox -e dev.k8s.launch.datasite`\n", "* Launching a Gateway: `CLUSTER_NAME=testgateway1 CLUSTER_HTTP_PORT=9081 tox -e dev.k8s.launch.gateway`\n", "* Launching a Enclave: `CLUSTER_NAME=testenclave1 CLUSTER_HTTP_PORT=9083 tox -e dev.k8s.launch.enclave`\n", - "* Launching Nodes with `hotreload` using tox posargs: `tox -e dev.k8s.launch.domain -- hotreload`" + "* Launching Servers with `hotreload` using tox posargs: `tox -e dev.k8s.launch.datasite -- hotreload`" ] }, { @@ -105,7 +105,7 @@ "- [04-jax-example.ipynb](../../api/0.8/04-jax-example.ipynb)\n", "- [05-custom-policy.ipynb](../../api/0.8/05-custom-policy.ipynb)\n", "- [06-multiple-code-requests.ipynb](../../api/0.8/06-multiple-code-requests.ipynb)\n", - "- [07-domain-register-control-flow.ipynb](../../api/0.8/07-domain-register-control-flow.ipynb)\n", + "- [07-datasite-register-control-flow.ipynb](../../api/0.8/07-datasite-register-control-flow.ipynb)\n", "- [08-code-version.ipynb](../../api/0.8/08-code-version.ipynb)\n", "- [09-blob-storage.ipynb](../../api/0.8/09-blob-storage.ipynb)\n", "- [10-container-images.ipynb](../../api/0.8/10-container-images.ipynb)\n", diff --git a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb index e51018e2be0..b710b59a92f 100644 --- a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb +++ b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb @@ -93,9 +93,9 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"hello-syft-usa-server\", port=9000, reset=True)\n", - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "root_domain_client.register(\n", + "server = sy.orchestra.launch(name=\"hello-syft-usa-server\", port=9000, reset=True)\n", + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "root_datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"janedoe@caltech.edu\",\n", " password=\"abc123\",\n", @@ -104,7 +104,7 @@ " website=\"https://www.caltech.edu/\",\n", ")\n", "\n", - "ds_client = node.login(email=\"janedoe@caltech.edu\", password=\"abc123\")" + "ds_client = server.login(email=\"janedoe@caltech.edu\", password=\"abc123\")" ] }, { @@ -114,7 +114,7 @@ "source": [ "## Data owner - Part 1\n", "\n", - "### Upload Data to Domain" + "### Upload Data to Datasite" ] }, { @@ -162,7 +162,7 @@ " )\n", " ],\n", ")\n", - "root_domain_client.upload_dataset(dataset)" + "root_datasite_client.upload_dataset(dataset)" ] }, { @@ -319,7 +319,7 @@ "metadata": {}, "outputs": [], "source": [ - "root_domain_client.requests" + "root_datasite_client.requests" ] }, { @@ -329,7 +329,7 @@ "metadata": {}, "outputs": [], "source": [ - "request = root_domain_client.requests[0]" + "request = root_datasite_client.requests[0]" ] }, { @@ -578,7 +578,7 @@ "metadata": {}, "outputs": [], "source": [ - "node.land()" + "server.land()" ] } ], diff --git a/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb b/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb index 6e1e438c04e..1fd95c66ebb 100644 --- a/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb +++ b/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb @@ -13,7 +13,7 @@ "id": "1", "metadata": {}, "source": [ - "In this tutorial, we show how external parties can audit internal AI systems without accessing them — mitigating privacy, security, and IP costs and risks. **This tutorial uses syft 0.8.2.b0, with a domain setup that does not use networking, to run the tutorial with networking read more in section 1.1.1**\n", + "In this tutorial, we show how external parties can audit internal AI systems without accessing them — mitigating privacy, security, and IP costs and risks. **This tutorial uses syft 0.8.2.b0, with a datasite setup that does not use networking, to run the tutorial with networking read more in section 1.1.1**\n", "\n", "You can read more about this tutorial and the follow up tutorials here on the [blog post](https://blog.openmined.org/)." ] @@ -70,7 +70,7 @@ "id": "6", "metadata": {}, "source": [ - "### Launch PySyft domain server" + "### Launch PySyft datasite server" ] }, { @@ -78,7 +78,7 @@ "id": "7", "metadata": {}, "source": [ - "To start we launch a `PySyft` domain server. This is the backend that stores the private data." + "To start we launch a `PySyft` datasite server. This is the backend that stores the private data." ] }, { @@ -90,7 +90,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"syft-domain\", reset=True)" + "server = sy.orchestra.launch(name=\"syft-datasite\", reset=True)" ] }, { @@ -98,15 +98,15 @@ "id": "9", "metadata": {}, "source": [ - "There are 3 ways to launch a `PySyft` domain\n", + "There are 3 ways to launch a `PySyft` datasite\n", "\n", "**A) From a notebook, with simulated networking \\*\\*THIS NOTEBOOK\\*\\***\n", " - Apart from the network calls, this uses exactly the same code as other setups\n", - " - run orchestra **without a port**: `sy.orchestra.launch(name=\"syft-domain\")`\n", + " - run orchestra **without a port**: `sy.orchestra.launch(name=\"syft-datasite\")`\n", " \n", "**B) From a notebook with networking (also supports docker)**\n", " - This spawns a separate process that starts a uvicorn webserver\n", - " - run orchestra **with a port**:`sy.orchestra.launch(name=\"syft-domain\", port=8080)`\n", + " - run orchestra **with a port**:`sy.orchestra.launch(name=\"syft-datasite\", port=8080)`\n", " \n", "**C) From the command line (supports docker/kubernetes)**\n", " - setup for production\n", @@ -129,7 +129,7 @@ "id": "11", "metadata": {}, "source": [ - "We can now login to our domain using the default admin credentials. In production we would change these." + "We can now login to our datasite using the default admin credentials. In production we would change these." ] }, { @@ -141,7 +141,7 @@ }, "outputs": [], "source": [ - "mo_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "mo_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -149,7 +149,7 @@ "id": "13", "metadata": {}, "source": [ - "### Configure node to allow user registration" + "### Configure server to allow user registration" ] }, { @@ -157,7 +157,7 @@ "id": "14", "metadata": {}, "source": [ - "For this tutorial we allow other users to create their own account. New accounts will get limited permissions and will only be able to see the mock version of any datasets we upload to the domain." + "For this tutorial we allow other users to create their own account. New accounts will get limited permissions and will only be able to see the mock version of any datasets we upload to the datasite." ] }, { @@ -223,7 +223,7 @@ "id": "21", "metadata": {}, "source": [ - "To upload our dataset to the domain we need to wrap it in a `Syft Dataset` object. We can add some metadata to the object." + "To upload our dataset to the datasite we need to wrap it in a `Syft Dataset` object. We can add some metadata to the object." ] }, { @@ -296,13 +296,13 @@ }, "outputs": [], "source": [ - "auditor_client = node.register(\n", + "auditor_client = server.register(\n", " name=\"Peter Jones\",\n", " email=\"pjones@aisb.org\",\n", " password=\"password1234\",\n", " password_verify=\"password1234\",\n", ")\n", - "auditor_client = node.login(email=\"pjones@aisb.org\", password=\"password1234\")" + "auditor_client = server.login(email=\"pjones@aisb.org\", password=\"password1234\")" ] }, { diff --git a/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb b/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb index 10287b41fe5..adca3805b12 100644 --- a/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb +++ b/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb @@ -26,7 +26,7 @@ "id": "1", "metadata": {}, "source": [ - "## 1. Launch the domain, upload the data" + "## 1. Launch the datasite, upload the data" ] }, { @@ -36,8 +36,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"mnist-torch-domain\", dev_mode=True, reset=True)\n", - "root_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "server = sy.orchestra.launch(name=\"mnist-torch-datasite\", dev_mode=True, reset=True)\n", + "root_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { diff --git a/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb b/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb index 7b65d033cbc..13e52c83015 100644 --- a/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb +++ b/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb @@ -4,7 +4,9 @@ "cell_type": "code", "execution_count": null, "id": "0", - "metadata": {}, + "metadata": { + "metadata": {} + }, "outputs": [], "source": [ "# third party\n", @@ -20,18 +22,20 @@ "id": "1", "metadata": {}, "source": [ - "## 1. DS logins to the domain with the credentials created by the DO" + "## 1. DS logins to the datasite with the credentials created by the DO" ] }, { "cell_type": "code", "execution_count": null, "id": "2", - "metadata": {}, + "metadata": { + "metadata": {} + }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"mnist-torch-domain\", dev_mode=True)\n", - "ds_client = node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" + "server = sy.orchestra.launch(name=\"mnist-torch-datasite\", dev_mode=True)\n", + "ds_client = server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" ] }, { @@ -39,14 +43,16 @@ "id": "3", "metadata": {}, "source": [ - "### Inspect the datasets on the domain" + "### Inspect the datasets on the datasite" ] }, { "cell_type": "code", "execution_count": null, "id": "4", - "metadata": {}, + "metadata": { + "metadata": {} + }, "outputs": [], "source": [ "datasets = ds_client.datasets.get_all()\n", @@ -103,7 +109,8 @@ "metadata": {}, "outputs": [], "source": [ - "assert training_images.data is None" + "assert training_images.data is None\n", + "training_labels.data" ] }, { diff --git a/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb b/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb index d4a9c70cc5e..5606ec79111 100644 --- a/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb +++ b/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb @@ -21,8 +21,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"mnist-torch-domain\", dev_mode=True)\n", - "root_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "server = sy.orchestra.launch(name=\"mnist-torch-datasite\", dev_mode=True)\n", + "root_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { diff --git a/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb b/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb index de5c4270d2e..250a9f23dcc 100644 --- a/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb +++ b/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb @@ -22,8 +22,8 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"mnist-torch-domain\", dev_mode=True)\n", - "ds_client = node.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" + "server = sy.orchestra.launch(name=\"mnist-torch-datasite\", dev_mode=True)\n", + "ds_client = server.login(email=\"sheldon@caltech.edu\", password=\"changethis\")" ] }, { diff --git a/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb b/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb index 368d7090d57..df6b188c0c4 100644 --- a/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb +++ b/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb @@ -54,7 +54,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"pandas-test-domain-1\", port=7081, reset=True)" + "server = sy.orchestra.launch(name=\"pandas-test-datasite-1\", port=7081, reset=True)" ] }, { @@ -74,7 +74,7 @@ }, "outputs": [], "source": [ - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -256,7 +256,7 @@ " name=\"test\",\n", " asset_list=[sy.Asset(name=\"bikes\", data=fixed_df, mock=mock, mock_is_real=False)],\n", ")\n", - "root_domain_client.upload_dataset(dataset)" + "root_datasite_client.upload_dataset(dataset)" ] }, { @@ -276,7 +276,7 @@ }, "outputs": [], "source": [ - "user = root_domain_client.register(\n", + "user = root_datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -307,7 +307,7 @@ }, "outputs": [], "source": [ - "guest_domain_client = node.client" + "guest_datasite_client = server.client" ] }, { @@ -319,7 +319,7 @@ }, "outputs": [], "source": [ - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -355,8 +355,8 @@ }, "outputs": [], "source": [ - "guest_domain_client = node.client\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_datasite_client = server.client\n", + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -368,7 +368,7 @@ }, "outputs": [], "source": [ - "ds = guest_domain_client.datasets[0]" + "ds = guest_datasite_client.datasets[0]" ] }, { @@ -632,7 +632,7 @@ }, "outputs": [], "source": [ - "domain_client = node.client.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.client.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -644,7 +644,7 @@ }, "outputs": [], "source": [ - "notifications = domain_client.notifications.get_all_unread()" + "notifications = datasite_client.notifications.get_all_unread()" ] }, { @@ -750,7 +750,7 @@ }, "outputs": [], "source": [ - "real_data = domain_client.datasets[0].assets[0].data" + "real_data = datasite_client.datasets[0].assets[0].data" ] }, { diff --git a/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb b/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb index c7a8d83c7f6..bdb331d2f4e 100644 --- a/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb +++ b/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb @@ -54,7 +54,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"pandas-test-domain-2\", port=9082, reset=True)" + "server = sy.orchestra.launch(name=\"pandas-test-datasite-2\", port=9082, reset=True)" ] }, { @@ -74,7 +74,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -340,7 +340,7 @@ " sy.Asset(name=\"complaints\", data=complaints, mock=mock, mock_is_real=False)\n", " ],\n", ")\n", - "domain_client.upload_dataset(dataset)" + "datasite_client.upload_dataset(dataset)" ] }, { @@ -360,7 +360,7 @@ }, "outputs": [], "source": [ - "user = domain_client.register(\n", + "user = datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -371,9 +371,9 @@ "\n", "# todo: give user data scientist role\n", "\n", - "guest_domain_client = node.client\n", + "guest_datasite_client = server.client\n", "\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -409,7 +409,7 @@ }, "outputs": [], "source": [ - "guest_domain_client = node.client" + "guest_datasite_client = server.client" ] }, { @@ -421,8 +421,8 @@ }, "outputs": [], "source": [ - "# guest_domain_client = worker.guest_client\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "# guest_datasite_client = worker.guest_client\n", + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -434,7 +434,7 @@ }, "outputs": [], "source": [ - "ds = guest_domain_client.datasets[0]" + "ds = guest_datasite_client.datasets[0]" ] }, { @@ -830,7 +830,7 @@ }, "outputs": [], "source": [ - "domain_client = node.client.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.client.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -850,7 +850,7 @@ }, "outputs": [], "source": [ - "notifications = domain_client.notifications.get_all_unread()" + "notifications = datasite_client.notifications.get_all_unread()" ] }, { @@ -925,7 +925,7 @@ }, "outputs": [], "source": [ - "real_data = domain_client.datasets[0].assets[0].data" + "real_data = datasite_client.datasets[0].assets[0].data" ] }, { @@ -1020,7 +1020,7 @@ }, "outputs": [], "source": [ - "node.land()" + "server.land()" ] } ], diff --git a/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb b/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb index 407f6507c3a..7dc6a47b948 100644 --- a/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb +++ b/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb @@ -57,7 +57,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"pandas-test-domain-3\", port=7083, reset=True)" + "server = sy.orchestra.launch(name=\"pandas-test-datasite-3\", port=7083, reset=True)" ] }, { @@ -77,7 +77,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -355,7 +355,7 @@ " sy.Asset(name=\"complaints\", data=complaints, mock=mock, mock_is_real=False)\n", " ],\n", ")\n", - "domain_client.upload_dataset(dataset)" + "datasite_client.upload_dataset(dataset)" ] }, { @@ -375,7 +375,7 @@ }, "outputs": [], "source": [ - "user = domain_client.register(\n", + "user = datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -386,9 +386,9 @@ "\n", "# todo: give user data scientist role\n", "\n", - "guest_domain_client = node.client\n", + "guest_datasite_client = server.client\n", "\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -424,8 +424,8 @@ }, "outputs": [], "source": [ - "guest_domain_client = node.client\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_datasite_client = server.client\n", + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -437,7 +437,7 @@ }, "outputs": [], "source": [ - "ds = guest_domain_client.datasets[0]" + "ds = guest_datasite_client.datasets[0]" ] }, { @@ -955,7 +955,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -967,7 +967,7 @@ }, "outputs": [], "source": [ - "notifications = domain_client.notifications.get_all_unread()" + "notifications = datasite_client.notifications.get_all_unread()" ] }, { @@ -1042,7 +1042,7 @@ }, "outputs": [], "source": [ - "real_data = domain_client.datasets[0].assets[0].data" + "real_data = datasite_client.datasets[0].assets[0].data" ] }, { @@ -1137,7 +1137,7 @@ }, "outputs": [], "source": [ - "node.land()" + "server.land()" ] } ], diff --git a/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb b/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb index daed660972d..1d065baae95 100644 --- a/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb +++ b/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb @@ -46,7 +46,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"pandas-test-domain-4\", port=9084, reset=True)" + "server = sy.orchestra.launch(name=\"pandas-test-datasite-4\", port=9084, reset=True)" ] }, { @@ -66,7 +66,7 @@ }, "outputs": [], "source": [ - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -211,7 +211,7 @@ " name=\"bikes2\",\n", " asset_list=[sy.Asset(name=\"bikes\", data=df, mock=mock, mock_is_real=False)],\n", ")\n", - "root_domain_client.upload_dataset(dataset)" + "root_datasite_client.upload_dataset(dataset)" ] }, { @@ -231,7 +231,7 @@ }, "outputs": [], "source": [ - "user = root_domain_client.register(\n", + "user = root_datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -242,9 +242,9 @@ "\n", "# todo: give user data scientist role\n", "\n", - "guest_domain_client = node.client\n", + "guest_datasite_client = server.client\n", "\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -288,7 +288,7 @@ }, "outputs": [], "source": [ - "ds = guest_domain_client.datasets[0]" + "ds = guest_datasite_client.datasets[0]" ] }, { @@ -716,7 +716,7 @@ }, "outputs": [], "source": [ - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -736,7 +736,7 @@ }, "outputs": [], "source": [ - "notifications = root_domain_client.notifications.get_all_unread()" + "notifications = root_datasite_client.notifications.get_all_unread()" ] }, { @@ -811,7 +811,7 @@ }, "outputs": [], "source": [ - "real_data = root_domain_client.datasets[0].assets[0].data" + "real_data = root_datasite_client.datasets[0].assets[0].data" ] }, { @@ -906,7 +906,7 @@ }, "outputs": [], "source": [ - "node.land()" + "server.land()" ] } ], diff --git a/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb b/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb index 0bff86b5c06..8bece628ede 100644 --- a/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb +++ b/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb @@ -54,7 +54,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"pandas-test-domain-5\", port=9085, reset=True)" + "server = sy.orchestra.launch(name=\"pandas-test-datasite-5\", port=9085, reset=True)" ] }, { @@ -74,7 +74,7 @@ }, "outputs": [], "source": [ - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -298,7 +298,7 @@ "outputs": [], "source": [ "dataset = sy.Dataset(name=\"test\", asset_list=assets)\n", - "root_domain_client.upload_dataset(dataset)" + "root_datasite_client.upload_dataset(dataset)" ] }, { @@ -330,7 +330,7 @@ }, "outputs": [], "source": [ - "user = root_domain_client.register(\n", + "user = root_datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -341,9 +341,9 @@ "\n", "# todo: give user data scientist role\n", "\n", - "guest_domain_client = node.client\n", + "guest_datasite_client = server.client\n", "\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -382,7 +382,7 @@ }, "outputs": [], "source": [ - "ds = guest_domain_client.datasets[-1]" + "ds = guest_datasite_client.datasets[-1]" ] }, { @@ -903,7 +903,7 @@ }, "outputs": [], "source": [ - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -923,7 +923,7 @@ }, "outputs": [], "source": [ - "notifications = root_domain_client.notifications.get_all_unread()" + "notifications = root_datasite_client.notifications.get_all_unread()" ] }, { @@ -999,8 +999,8 @@ "outputs": [], "source": [ "real_data1, real_data2 = (\n", - " root_domain_client.datasets[-1].assets[\"weather1\"].data,\n", - " root_domain_client.datasets[-1].assets[\"weather2\"].data,\n", + " root_datasite_client.datasets[-1].assets[\"weather1\"].data,\n", + " root_datasite_client.datasets[-1].assets[\"weather2\"].data,\n", ")" ] }, @@ -1070,7 +1070,7 @@ }, "outputs": [], "source": [ - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -1175,7 +1175,7 @@ }, "outputs": [], "source": [ - "node.land()" + "server.land()" ] } ], diff --git a/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb b/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb index 896f842e6cb..2fc567f9be9 100644 --- a/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb +++ b/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb @@ -54,7 +54,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"pandas-test-domain-6\", port=9086, reset=True)" + "server = sy.orchestra.launch(name=\"pandas-test-datasite-6\", port=9086, reset=True)" ] }, { @@ -74,7 +74,7 @@ }, "outputs": [], "source": [ - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -287,7 +287,7 @@ " sy.Asset(name=\"weather\", data=weather_2012_final, mock=mock, mock_is_real=False)\n", " ],\n", ")\n", - "root_domain_client.upload_dataset(dataset)" + "root_datasite_client.upload_dataset(dataset)" ] }, { @@ -319,7 +319,7 @@ }, "outputs": [], "source": [ - "user = root_domain_client.register(\n", + "user = root_datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -328,8 +328,8 @@ " website=\"https://www.caltech.edu/\",\n", ")\n", "# todo: give user data scientist role\n", - "guest_domain_client = node.client\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_datasite_client = server.client\n", + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -368,7 +368,7 @@ }, "outputs": [], "source": [ - "ds = guest_domain_client.datasets[0]" + "ds = guest_datasite_client.datasets[0]" ] }, { @@ -805,7 +805,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -826,7 +826,7 @@ }, "outputs": [], "source": [ - "notifications = domain_client.notifications.get_all_unread()" + "notifications = datasite_client.notifications.get_all_unread()" ] }, { @@ -901,7 +901,7 @@ }, "outputs": [], "source": [ - "real_data = domain_client.datasets[0].assets[0].data" + "real_data = datasite_client.datasets[0].assets[0].data" ] }, { @@ -958,7 +958,7 @@ }, "outputs": [], "source": [ - "guest_client = node.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_client = server.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -1072,7 +1072,7 @@ }, "outputs": [], "source": [ - "node.land()" + "server.land()" ] } ], diff --git a/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb b/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb index dab79f0c217..bbbfe46751e 100644 --- a/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb +++ b/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb @@ -54,7 +54,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"pandas-test-domain-7\", port=9087, reset=True)" + "server = sy.orchestra.launch(name=\"pandas-test-datasite-7\", port=9087, reset=True)" ] }, { @@ -74,7 +74,7 @@ }, "outputs": [], "source": [ - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -307,7 +307,7 @@ " )\n", " ],\n", ")\n", - "root_domain_client.upload_dataset(dataset)" + "root_datasite_client.upload_dataset(dataset)" ] }, { @@ -327,7 +327,7 @@ }, "outputs": [], "source": [ - "user = root_domain_client.register(\n", + "user = root_datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -336,8 +336,8 @@ " website=\"https://www.caltech.edu/\",\n", ")\n", "# todo: give user data scientist role\n", - "guest_domain_client = node.client\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_datasite_client = server.client\n", + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -860,7 +860,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -881,7 +881,7 @@ }, "outputs": [], "source": [ - "notifications = domain_client.notifications.get_all_unread()" + "notifications = datasite_client.notifications.get_all_unread()" ] }, { @@ -956,7 +956,7 @@ }, "outputs": [], "source": [ - "real_data = domain_client.datasets[0].assets[0].data" + "real_data = datasite_client.datasets[0].assets[0].data" ] }, { @@ -1086,7 +1086,7 @@ }, "outputs": [], "source": [ - "node.land()" + "server.land()" ] } ], diff --git a/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb b/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb index 32beba9af48..153ec4d16e0 100644 --- a/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb +++ b/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb @@ -56,7 +56,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"pandas-test-domain-8\", port=9088, reset=True)" + "server = sy.orchestra.launch(name=\"pandas-test-datasite-8\", port=9088, reset=True)" ] }, { @@ -77,7 +77,7 @@ }, "outputs": [], "source": [ - "root_domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "root_datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -328,7 +328,7 @@ " )\n", " ],\n", ")\n", - "root_domain_client.upload_dataset(dataset)" + "root_datasite_client.upload_dataset(dataset)" ] }, { @@ -361,7 +361,7 @@ }, "outputs": [], "source": [ - "user = root_domain_client.register(\n", + "user = root_datasite_client.register(\n", " name=\"Jane Doe\",\n", " email=\"jane@caltech.edu\",\n", " password=\"abc123\",\n", @@ -370,8 +370,8 @@ " website=\"https://www.caltech.edu/\",\n", ")\n", "# todo: give user data scientist role\n", - "guest_domain_client = node.client\n", - "guest_client = guest_domain_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" + "guest_datasite_client = server.client\n", + "guest_client = guest_datasite_client.login(email=\"jane@caltech.edu\", password=\"abc123\")" ] }, { @@ -425,7 +425,7 @@ }, "outputs": [], "source": [ - "ds = guest_domain_client.datasets[0]" + "ds = guest_datasite_client.datasets[0]" ] }, { @@ -811,7 +811,7 @@ }, "outputs": [], "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -832,7 +832,7 @@ }, "outputs": [], "source": [ - "notifications = domain_client.notifications.get_all_unread()" + "notifications = datasite_client.notifications.get_all_unread()" ] }, { @@ -907,7 +907,7 @@ }, "outputs": [], "source": [ - "real_data = domain_client.datasets[0].assets[0].data" + "real_data = datasite_client.datasets[0].assets[0].data" ] }, { @@ -1014,7 +1014,7 @@ }, "outputs": [], "source": [ - "node.land()" + "server.land()" ] } ], diff --git a/notebooks/tutorials/version-upgrades/0-prepare-migration-data.ipynb b/notebooks/tutorials/version-upgrades/0-prepare-migration-data.ipynb index 37be2b11305..ed8a496c407 100644 --- a/notebooks/tutorials/version-upgrades/0-prepare-migration-data.ipynb +++ b/notebooks/tutorials/version-upgrades/0-prepare-migration-data.ipynb @@ -55,7 +55,7 @@ "id": "4", "metadata": {}, "source": [ - "# Launch Node" + "# Launch Server" ] }, { @@ -65,7 +65,7 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", + "server = sy.orchestra.launch(\n", " name=\"test_upgradability\",\n", " dev_mode=True,\n", " reset=True,\n", @@ -82,7 +82,7 @@ "metadata": {}, "outputs": [], "source": [ - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -112,7 +112,7 @@ "metadata": {}, "outputs": [], "source": [ - "client_ds = node.login(email=\"ds@openmined.org\", password=\"pw\")" + "client_ds = server.login(email=\"ds@openmined.org\", password=\"pw\")" ] }, { @@ -149,7 +149,7 @@ "\n", "\n", "@sy.syft_function_single_use(data=data_low)\n", - "def compute_mean(domain, data) -> float:\n", + "def compute_mean(datasite, data) -> float:\n", " # launch another job\n", " print(\"Computing mean...\")\n", " return data" @@ -222,8 +222,8 @@ "metadata": {}, "outputs": [], "source": [ - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/tutorials/version-upgrades/1-dump-database-to-file.ipynb b/notebooks/tutorials/version-upgrades/1-dump-database-to-file.ipynb index e5ecd841e8f..ad247d91a38 100644 --- a/notebooks/tutorials/version-upgrades/1-dump-database-to-file.ipynb +++ b/notebooks/tutorials/version-upgrades/1-dump-database-to-file.ipynb @@ -21,7 +21,7 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", + "server = sy.orchestra.launch(\n", " name=\"test_upgradability\",\n", " dev_mode=True,\n", " local_db=True,\n", @@ -30,7 +30,7 @@ " migrate=False,\n", ")\n", "\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -50,7 +50,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Check if this node has data on it\n", + "# Check if this server has data on it\n", "assert len(client.users.get_all()) == 2" ] }, @@ -112,8 +112,8 @@ "metadata": {}, "outputs": [], "source": [ - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/notebooks/tutorials/version-upgrades/2-migrate-from-file.ipynb b/notebooks/tutorials/version-upgrades/2-migrate-from-file.ipynb index 24329301c61..993894948d4 100644 --- a/notebooks/tutorials/version-upgrades/2-migrate-from-file.ipynb +++ b/notebooks/tutorials/version-upgrades/2-migrate-from-file.ipynb @@ -31,14 +31,14 @@ "metadata": {}, "outputs": [], "source": [ - "node = sy.orchestra.launch(\n", + "server = sy.orchestra.launch(\n", " name=\"test_upgradability\",\n", " dev_mode=True,\n", " reset=True,\n", " port=\"auto\",\n", ")\n", "\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + "client = server.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { @@ -48,7 +48,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Check if this is a new node\n", + "# Check if this is a new server\n", "migration_data = client.get_migration_data()\n", "\n", "assert len(migration_data.store_objects[User]) == 1\n", @@ -102,7 +102,7 @@ "metadata": {}, "outputs": [], "source": [ - "client_ds = node.login(email=\"ds@openmined.org\", password=\"pw\")" + "client_ds = server.login(email=\"ds@openmined.org\", password=\"pw\")" ] }, { @@ -258,8 +258,8 @@ "metadata": {}, "outputs": [], "source": [ - "if node.node_type.value == \"python\":\n", - " node.land()" + "if server.server_type.value == \"python\":\n", + " server.land()" ] }, { diff --git a/packages/grid/VERSION b/packages/grid/VERSION index df1dec5602a..8742f2ae357 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.13" +__version__ = "0.8.8-beta.1" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/backend.dockerfile b/packages/grid/backend/backend.dockerfile index 606569c49f4..256fdfd447b 100644 --- a/packages/grid/backend/backend.dockerfile +++ b/packages/grid/backend/backend.dockerfile @@ -60,7 +60,7 @@ WORKDIR /root/app/ # Copy pre-built syft dependencies COPY --from=syft_deps /root/app/.venv .venv -# copy grid +# copy server COPY grid/backend/grid ./grid/ # copy syft @@ -73,8 +73,8 @@ ENV \ VIRTUAL_ENV="/root/app/.venv" \ # Syft APPDIR="/root/app" \ - NODE_NAME="default_node_name" \ - NODE_TYPE="domain" \ + SERVER_NAME="default_server_name" \ + SERVER_TYPE="datasite" \ SERVICE_NAME="backend" \ RELEASE="production" \ DEV_MODE="False" \ diff --git a/packages/grid/backend/grid/api/new/new.py b/packages/grid/backend/grid/api/new/new.py index 2f0a6910bdf..3ff61bb041b 100644 --- a/packages/grid/backend/grid/api/new/new.py +++ b/packages/grid/backend/grid/api/new/new.py @@ -1,7 +1,7 @@ # syft absolute -from syft.node.routes import make_routes +from syft.server.routes import make_routes -# grid absolute -from grid.core.node import worker +# server absolute +from grid.core.server import worker router = make_routes(worker=worker) diff --git a/packages/grid/backend/grid/api/router.py b/packages/grid/backend/grid/api/router.py index 020491323a6..8412869db53 100644 --- a/packages/grid/backend/grid/api/router.py +++ b/packages/grid/backend/grid/api/router.py @@ -7,7 +7,7 @@ # third party from fastapi import APIRouter -# grid absolute +# server absolute from grid.api.new.new import router as new_router api_router = APIRouter() diff --git a/packages/grid/backend/grid/bootstrap.py b/packages/grid/backend/grid/bootstrap.py index 0e1878e2ee1..914411ec864 100644 --- a/packages/grid/backend/grid/bootstrap.py +++ b/packages/grid/backend/grid/bootstrap.py @@ -9,7 +9,7 @@ from nacl.encoding import HexEncoder from nacl.signing import SigningKey -# we want to bootstrap nodes with persistent uids and keys and allow a variety of ways +# we want to bootstrap servers with persistent uids and keys and allow a variety of ways # to resolve these at startup # first we check the environment variables @@ -27,8 +27,8 @@ def get_env(key: str, default: str = "") -> str | None: CREDENTIALS_PATH = str(get_env("CREDENTIALS_PATH", "credentials.json")) -NODE_PRIVATE_KEY = "NODE_PRIVATE_KEY" -NODE_UID = "NODE_UID" +SERVER_PRIVATE_KEY = "SERVER_PRIVATE_KEY" +SERVER_UID = "SERVER_UID" def get_credentials_file() -> dict[str, str]: @@ -64,7 +64,7 @@ def save_credential(key: str, value: str) -> str: return value -def generate_node_uid() -> str: +def generate_server_uid() -> str: return str(uuid.uuid4()) @@ -77,11 +77,11 @@ def generate_private_key() -> str: def get_private_key_env() -> str | None: - return get_env(NODE_PRIVATE_KEY) + return get_env(SERVER_PRIVATE_KEY) -def get_node_uid_env() -> str | None: - return get_env(NODE_UID) +def get_server_uid_env() -> str | None: + return get_env(SERVER_UID) def validate_private_key(private_key: str | bytes) -> str: @@ -95,17 +95,17 @@ def validate_private_key(private_key: str | bytes) -> str: return str_key except Exception: pass - raise Exception(f"{NODE_PRIVATE_KEY} is invalid") + raise Exception(f"{SERVER_PRIVATE_KEY} is invalid") -def validate_uid(node_uid: str) -> str: +def validate_uid(server_uid: str) -> str: try: - uid = uuid.UUID(node_uid) - if node_uid == uid.hex or node_uid == str(uid): + uid = uuid.UUID(server_uid) + if server_uid == uid.hex or server_uid == str(uid): return str(uid) except Exception: pass - raise Exception(f"{NODE_UID} is invalid") + raise Exception(f"{SERVER_UID} is invalid") def get_credential( @@ -140,11 +140,13 @@ def get_credential( def get_private_key() -> str: - return get_credential(NODE_PRIVATE_KEY, validate_private_key, generate_private_key) + return get_credential( + SERVER_PRIVATE_KEY, validate_private_key, generate_private_key + ) -def get_node_uid() -> str: - return get_credential(NODE_UID, validate_uid, generate_node_uid) +def get_server_uid() -> str: + return get_credential(SERVER_UID, validate_uid, generate_server_uid) def delete_credential_file() -> None: @@ -168,15 +170,15 @@ def delete_credential_file() -> None: if args.private_key: print(get_private_key()) elif args.uid: - print(get_node_uid()) + print(get_server_uid()) elif args.file: delete_credential_file() get_private_key() - get_node_uid() + get_server_uid() print(f"Generated credentials file at '{CREDENTIALS_PATH}'") elif args.debug: print("Credentials File", get_credentials_file()) - print(NODE_PRIVATE_KEY, "=", get_private_key_env()) - print(NODE_UID, "=", get_node_uid_env()) + print(SERVER_PRIVATE_KEY, "=", get_private_key_env()) + print(SERVER_UID, "=", get_server_uid_env()) else: parser.print_help() diff --git a/packages/grid/backend/grid/core/celery_config.py b/packages/grid/backend/grid/core/celery_config.py deleted file mode 100644 index 20fa3f55544..00000000000 --- a/packages/grid/backend/grid/core/celery_config.py +++ /dev/null @@ -1,13 +0,0 @@ -worker_send_task_event = False -task_ignore_result = True -task_time_limit = 1500 # Rasswanth: should modify after optimizing PC -task_acks_late = True -broker_pool_limit = 500 -worker_prefetch_multiplier = 1 -task_routes = { - "grid.worker.msg_without_reply": "main-queue", - "delivery_mode": "transient", -} -accept_content = ["application/syft"] -task_serializer = "syft" -result_serializer = "syft" diff --git a/packages/grid/backend/grid/core/celery_serde.py b/packages/grid/backend/grid/core/celery_serde.py deleted file mode 100644 index 005e4a4f514..00000000000 --- a/packages/grid/backend/grid/core/celery_serde.py +++ /dev/null @@ -1,46 +0,0 @@ -# stdlib -from typing import Any - -# third party -from kombu import serialization - -# syft absolute -import syft as sy -from syft.logger import error - - -def loads(data: bytes) -> Any: - # original payload might have nested bytes in the args - org_payload = sy.deserialize(data, from_bytes=True) - # original payload is found at org_payload[0][0] - if ( - len(org_payload) > 0 - and len(org_payload[0]) > 0 - and isinstance(org_payload[0][0], bytes) - ): - try: - nested_data = org_payload[0][0] - org_obj = sy.deserialize(nested_data, from_bytes=True) - org_payload[0][0] = org_obj - except Exception as e: - error(f"Unable to deserialize nested payload. {e}") - raise e - - return org_payload - - -def dumps(obj: Any) -> bytes: - # this is usually a Tuple of args where the first one is what we send to the task - # but it can also get other arbitrary data which we need to serde - # since we might get bytes directly from the web endpoint we can avoid double - # unserializing it by keeping it inside the nested args list org_payload[0][0] - return sy.serialize(obj, to_bytes=True) - - -serialization.register( - "syft", - dumps, - loads, - content_type="application/syft", - content_encoding="binary", -) diff --git a/packages/grid/backend/grid/core/config.py b/packages/grid/backend/grid/core/config.py index 619637c06c6..0d7d090743a 100644 --- a/packages/grid/backend/grid/core/config.py +++ b/packages/grid/backend/grid/core/config.py @@ -97,13 +97,13 @@ def get_emails_enabled(self) -> Self: DEFAULT_ROOT_PASSWORD: str = "changethis" USERS_OPEN_REGISTRATION: bool = False - NODE_NAME: str = "default_node_name" + SERVER_NAME: str = "default_server_name" STREAM_QUEUE: bool = False - NODE_TYPE: str = "domain" + SERVER_TYPE: str = "datasite" OPEN_REGISTRATION: bool = True - # DOMAIN_ASSOCIATION_REQUESTS_AUTOMATICALLY_ACCEPTED: bool = True + # DATASITE_ASSOCIATION_REQUESTS_AUTOMATICALLY_ACCEPTED: bool = True USE_BLOB_STORAGE: bool = ( True if os.getenv("USE_BLOB_STORAGE", "false").lower() == "true" else False ) @@ -124,7 +124,7 @@ def get_emails_enabled(self) -> Self: # STORE_DB_ID: int = int(os.getenv("STORE_DB_ID", 0)) # LEDGER_DB_ID: int = int(os.getenv("LEDGER_DB_ID", 1)) # NETWORK_CHECK_INTERVAL: int = int(os.getenv("NETWORK_CHECK_INTERVAL", 60)) - # DOMAIN_CHECK_INTERVAL: int = int(os.getenv("DOMAIN_CHECK_INTERVAL", 60)) + # DATASITE_CHECK_INTERVAL: int = int(os.getenv("DATASITE_CHECK_INTERVAL", 60)) CONTAINER_HOST: str = str(os.getenv("CONTAINER_HOST", "docker")) MONGO_HOST: str = str(os.getenv("MONGO_HOST", "")) MONGO_PORT: int = int(os.getenv("MONGO_PORT", 27017)) diff --git a/packages/grid/backend/grid/core/node.py b/packages/grid/backend/grid/core/server.py similarity index 75% rename from packages/grid/backend/grid/core/node.py rename to packages/grid/backend/grid/core/server.py index 926cbbc5556..9802eece8e8 100644 --- a/packages/grid/backend/grid/core/node.py +++ b/packages/grid/backend/grid/core/server.py @@ -1,15 +1,15 @@ # syft absolute -from syft.abstract_node import NodeType -from syft.node.domain import Domain -from syft.node.domain import Node -from syft.node.enclave import Enclave -from syft.node.gateway import Gateway -from syft.node.node import get_default_bucket_name -from syft.node.node import get_enable_warnings -from syft.node.node import get_node_name -from syft.node.node import get_node_side_type -from syft.node.node import get_node_type -from syft.node.node import get_node_uid_env +from syft.abstract_server import ServerType +from syft.server.datasite import Datasite +from syft.server.datasite import Server +from syft.server.enclave import Enclave +from syft.server.gateway import Gateway +from syft.server.server import get_default_bucket_name +from syft.server.server import get_enable_warnings +from syft.server.server import get_server_name +from syft.server.server import get_server_side_type +from syft.server.server import get_server_type +from syft.server.server import get_server_uid_env from syft.service.queue.zmq_queue import ZMQClientConfig from syft.service.queue.zmq_queue import ZMQQueueConfig from syft.store.blob_storage.seaweedfs import SeaweedFSClientConfig @@ -20,7 +20,7 @@ from syft.store.sqlite_document_store import SQLiteStoreConfig from syft.types.uid import UID -# grid absolute +# server absolute from grid.core.config import settings @@ -49,7 +49,7 @@ def mongo_store_config() -> MongoStoreConfig: def sql_store_config() -> SQLiteStoreConfig: client_config = SQLiteStoreClientConfig( - filename=f"{UID.from_string(get_node_uid_env())}.sqlite", + filename=f"{UID.from_string(get_server_uid_env())}.sqlite", path=settings.SQLITE_PATH, ) return SQLiteStoreConfig(client_config=client_config) @@ -72,28 +72,28 @@ def seaweedfs_config() -> SeaweedFSConfig: ) -node_type = NodeType(get_node_type()) -node_name = get_node_name() +server_type = ServerType(get_server_type()) +server_name = get_server_name() -node_side_type = get_node_side_type() +server_side_type = get_server_side_type() enable_warnings = get_enable_warnings() worker_classes = { - NodeType.DOMAIN: Domain, - NodeType.GATEWAY: Gateway, - NodeType.ENCLAVE: Enclave, + ServerType.DATASITE: Datasite, + ServerType.GATEWAY: Gateway, + ServerType.ENCLAVE: Enclave, } -worker_class = worker_classes[node_type] +worker_class = worker_classes[server_type] single_container_mode = settings.SINGLE_CONTAINER_MODE store_config = sql_store_config() if single_container_mode else mongo_store_config() blob_storage_config = None if single_container_mode else seaweedfs_config() queue_config = queue_config() -worker: Node = worker_class( - name=node_name, - node_side_type=node_side_type, +worker: Server = worker_class( + name=server_name, + server_side_type=server_side_type, action_store_config=store_config, document_store_config=store_config, enable_warnings=enable_warnings, diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile index 94297fee0a9..ce0ed514280 100644 --- a/packages/grid/backend/grid/images/worker_cpu.dockerfile +++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile @@ -2,11 +2,11 @@ # Build as-is to create a base worker image # Build with args to create a custom worker image -# NOTE: This dockerfile will be built inside a grid-backend container in PROD +# NOTE: This dockerfile will be built inside a syft-backend container in PROD # Hence COPY will not work the same way in DEV vs. PROD -ARG SYFT_VERSION_TAG="0.8.7-beta.13" -FROM openmined/grid-backend:${SYFT_VERSION_TAG} +ARG SYFT_VERSION_TAG="0.8.8-beta.1" +FROM openmined/syft-backend:${SYFT_VERSION_TAG} # should match base image python version ARG PYTHON_VERSION="3.12" diff --git a/packages/grid/backend/grid/main.py b/packages/grid/backend/grid/main.py index 459448c5f01..497a2dd7a90 100644 --- a/packages/grid/backend/grid/main.py +++ b/packages/grid/backend/grid/main.py @@ -11,10 +11,10 @@ # syft absolute from syft.protocol.data_protocol import stage_protocol_changes -# grid absolute +# server absolute from grid.api.router import api_router from grid.core.config import settings -from grid.core.node import worker +from grid.core.server import worker class EndpointFilter(logging.Filter): diff --git a/packages/grid/backend/grid/start.sh b/packages/grid/backend/grid/start.sh index 4b3d5de4cf2..02194eb840b 100755 --- a/packages/grid/backend/grid/start.sh +++ b/packages/grid/backend/grid/start.sh @@ -7,7 +7,7 @@ APP_MODULE=grid.main:app LOG_LEVEL=${LOG_LEVEL:-info} HOST=${HOST:-0.0.0.0} PORT=${PORT:-80} -NODE_TYPE=${NODE_TYPE:-domain} +SERVER_TYPE=${SERVER_TYPE:-datasite} APPDIR=${APPDIR:-$HOME/app} RELOAD="" DEBUG_CMD="" @@ -26,11 +26,11 @@ then fi export CREDENTIALS_PATH=${CREDENTIALS_PATH:-$HOME/data/creds/credentials.json} -export NODE_PRIVATE_KEY=$(python $APPDIR/grid/bootstrap.py --private_key) -export NODE_UID=$(python $APPDIR/grid/bootstrap.py --uid) -export NODE_TYPE=$NODE_TYPE +export SERVER_PRIVATE_KEY=$(python $APPDIR/grid/bootstrap.py --private_key) +export SERVER_UID=$(python $APPDIR/grid/bootstrap.py --uid) +export SERVER_TYPE=$SERVER_TYPE -echo "NODE_UID=$NODE_UID" -echo "NODE_TYPE=$NODE_TYPE" +echo "SERVER_UID=$SERVER_UID" +echo "SERVER_TYPE=$SERVER_TYPE" exec $DEBUG_CMD uvicorn $RELOAD --host $HOST --port $PORT --log-config=$APPDIR/grid/logging.yaml --log-level $LOG_LEVEL "$APP_MODULE" diff --git a/packages/grid/default.env b/packages/grid/default.env index 0aae09f1026..e3edfdc38df 100644 --- a/packages/grid/default.env +++ b/packages/grid/default.env @@ -1,8 +1,8 @@ #!/bin/bash -DOMAIN=localhost -NODE_NAME=default_node_name -NODE_TYPE=domain -FRONTEND_TARGET=grid-ui-development +DATASITE=localhost +SERVER_NAME=default_server_name +SERVER_TYPE=datasite +FRONTEND_TARGET=syft-ui-development PORT=80 HTTP_PORT=80 HTTPS_PORT=443 @@ -15,26 +15,26 @@ IGNORE_TLS_ERRORS=False TRAEFIK_TLS_CONF=./traefik/dynamic-configurations TRAEFIK_TLS_CERTS=./traefik/certs TRAEFIK_PUBLIC_NETWORK=traefik-public -TRAEFIK_TAG=grid.openmined.org +TRAEFIK_TAG=syft.openmined.org TRAEFIK_PUBLIC_TAG=traefik-public -STACK_NAME=grid-openmined-org -DOCKER_IMAGE_BACKEND=openmined/grid-backend -DOCKER_IMAGE_FRONTEND=openmined/grid-frontend -DOCKER_IMAGE_RATHOLE=openmined/grid-rathole +STACK_NAME=syft-openmined-org +DOCKER_IMAGE_BACKEND=openmined/syft-backend +DOCKER_IMAGE_FRONTEND=openmined/syft-frontend +DOCKER_IMAGE_RATHOLE=openmined/syft-rathole DOCKER_IMAGE_TRAEFIK=traefik TRAEFIK_VERSION=v2.11.0 REDIS_VERSION=6.2 RABBITMQ_VERSION=3 -DOCKER_IMAGE_SEAWEEDFS=openmined/grid-seaweedfs +DOCKER_IMAGE_SEAWEEDFS=openmined/syft-seaweedfs VERSION=latest VERSION_HASH=unknown STACK_API_KEY="" # Backend -BACKEND_CORS_ORIGINS='["http://localhost","http://localhost:4200","http://localhost:3000","http://localhost:8080","https://localhost","https://localhost:4200","https://localhost:3000","https://localhost:8080","http://dev.grid.openmined.org","https://stag.grid.openmined.org","https://grid.openmined.org"]' +BACKEND_CORS_ORIGINS='["http://localhost","http://localhost:4200","http://localhost:3000","http://localhost:8080","https://localhost","https://localhost:4200","https://localhost:3000","https://localhost:8080","http://dev.syft.openmined.org","https://stag.syft.openmined.org","https://syft.openmined.org"]' SEAWEED_MOUNT_PORT=4001 -PROJECT_NAME=grid +PROJECT_NAME=syft SECRET_KEY=changethis DEFAULT_ROOT_EMAIL=info@openmined.org DEFAULT_ROOT_PASSWORD=changethis @@ -44,9 +44,9 @@ SMTP_HOST= SMTP_USERNAME= SMTP_PASSWORD= EMAIL_SENDER= -SERVER_HOST="https://${DOMAIN}" +SERVER_HOST="https://${DATASITE}" NETWORK_CHECK_INTERVAL=60 -DOMAIN_CHECK_INTERVAL=60 +DATASITE_CHECK_INTERVAL=60 ASSOCIATION_TIMEOUT=10 USERS_OPEN_REGISTRATION=False DEV_MODE=False @@ -101,7 +101,7 @@ JAEGER_PORT=14268 # Syft SYFT_TUTORIAL_MODE=False ENABLE_WARNINGS=True -NODE_SIDE_TYPE=high +SERVER_SIDE_TYPE=high # Worker USE_BLOB_STORAGE=False @@ -110,7 +110,7 @@ USE_BLOB_STORAGE=False ENABLE_SIGNUP=False # Enclave Attestation -DOCKER_IMAGE_ENCLAVE_ATTESTATION=openmined/grid-enclave-attestation +DOCKER_IMAGE_ENCLAVE_ATTESTATION=openmined/syft-enclave-attestation # Rathole Config RATHOLE_PORT=2333 \ No newline at end of file diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 4644e2d72e1..07985f06baa 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -22,13 +22,13 @@ pipelines: create_deployments --all vars: - DOCKER_IMAGE_BACKEND: openmined/grid-backend - DOCKER_IMAGE_FRONTEND: openmined/grid-frontend - DOCKER_IMAGE_SEAWEEDFS: openmined/grid-seaweedfs - DOCKER_IMAGE_RATHOLE: openmined/grid-rathole - DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/grid-enclave-attestation + DOCKER_IMAGE_BACKEND: openmined/syft-backend + DOCKER_IMAGE_FRONTEND: openmined/syft-frontend + DOCKER_IMAGE_SEAWEEDFS: openmined/syft-seaweedfs + DOCKER_IMAGE_RATHOLE: openmined/syft-rathole + DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/syft-enclave-attestation CONTAINER_REGISTRY: "docker.io" - VERSION: "0.8.7-beta.13" + VERSION: "0.8.8-beta.1" PLATFORM: $(uname -m | grep -q 'arm64' && echo "arm64" || echo "amd64") # This is a list of `images` that DevSpace can build for this project @@ -48,7 +48,7 @@ images: buildKit: args: ["--platform", "linux/${PLATFORM}"] dockerfile: ./frontend/frontend.dockerfile - target: "grid-ui-production" + target: "syft-ui-production" context: ./frontend tags: - dev-${DEVSPACE_TIMESTAMP} @@ -73,7 +73,7 @@ deployments: global: registry: ${CONTAINER_REGISTRY} version: dev-${DEVSPACE_TIMESTAMP} - node: {} + server: {} # anything that does not need templating should go in helm/examples/dev/base.yaml # or profile specific values files valuesFiles: @@ -117,23 +117,23 @@ dev: localPort: 3480 profiles: - - name: domain-low - description: "Deploy a low-side domain" + - name: datasite-low + description: "Deploy a low-side datasite" patches: - op: add - path: deployments.syft.helm.values.node + path: deployments.syft.helm.values.server value: side: low - - name: migrated-domain - description: "Deploy a migrated domain" + - name: migrated-datasite + description: "Deploy a migrated datasite" patches: - op: add path: deployments.syft.helm.valuesFiles value: ./helm/examples/dev/migration.yaml - - name: domain-tunnel - description: "Deploy a domain with tunneling enabled" + - name: datasite-tunnel + description: "Deploy a datasite with tunneling enabled" patches: # enable rathole image - op: add @@ -150,10 +150,10 @@ profiles: # use rathole client-specific chart values - op: add path: deployments.syft.helm.valuesFiles - value: ./helm/examples/dev/domain.tunnel.yaml + value: ./helm/examples/dev/datasite.tunnel.yaml - name: gateway - description: "Deploy a Gateway Node with tunnel enabled" + description: "Deploy a Gateway Server with tunnel enabled" patches: # enable rathole image - op: add @@ -201,7 +201,7 @@ profiles: value: 2334:2333 - name: gcp - description: "Deploy a high-side domain on GCP" + description: "Deploy a high-side datasite on GCP" patches: - op: replace path: deployments.syft.helm.valuesFiles @@ -209,7 +209,7 @@ profiles: - ./helm/examples/gcp/gcp.high.yaml - name: gcp-low - description: "Deploy a low-side domain on GCP" + description: "Deploy a low-side datasite on GCP" patches: - op: replace path: deployments.syft.helm.valuesFiles @@ -217,7 +217,7 @@ profiles: - ./helm/examples/gcp/gcp.low.yaml - name: azure - description: "Deploy a high-side domain on AKS" + description: "Deploy a high-side datasite on AKS" patches: - op: replace path: deployments.syft.helm.valuesFiles @@ -225,7 +225,7 @@ profiles: - ./helm/examples/azure/azure.high.yaml - name: enclave - description: "Deploy an enclave node" + description: "Deploy an enclave server" patches: # enable image build for enclave-attestation - op: add diff --git a/packages/grid/enclave/attestation/enclave-development.md b/packages/grid/enclave/attestation/enclave-development.md index 5a9a2526e54..9e1563504b6 100644 --- a/packages/grid/enclave/attestation/enclave-development.md +++ b/packages/grid/enclave/attestation/enclave-development.md @@ -95,9 +95,9 @@ from nv_attestation_sdk import attestation NRAS_URL="https://nras.attestation.nvidia.com/v1/attest/gpu" client = attestation.Attestation() -client.set_name("thisNode1") +client.set_name("thisServer1") client.set_nonce("931d8dd0add203ac3d8b4fbde75e115278eefcdceac5b87671a748f32364dfcb") -print ("[RemoteGPUTest] node name :", client.get_name()) +print ("[RemoteGPUTest] server name :", client.get_name()) client.add_verifier(attestation.Devices.GPU, attestation.Environment.REMOTE, NRAS_URL, "") client.attest() diff --git a/packages/grid/enclave/attestation/server/gpu_attestation.py b/packages/grid/enclave/attestation/server/gpu_attestation.py index 299ad663aeb..38eccc8a6df 100644 --- a/packages/grid/enclave/attestation/server/gpu_attestation.py +++ b/packages/grid/enclave/attestation/server/gpu_attestation.py @@ -23,10 +23,10 @@ def extract_token(captured_value: str) -> str: def attest_gpu() -> tuple[str, str]: # Fetch report from Nvidia Attestation SDK - client = attestation.Attestation("Attestation Node") + client = attestation.Attestation("Attestation Server") # TODO: Add the ability to generate nonce later. - logger.info("[RemoteGPUTest] node name : {}", client.get_name()) + logger.info("[RemoteGPUTest] server name : {}", client.get_name()) client.add_verifier( attestation.Devices.GPU, attestation.Environment.REMOTE, NRAS_URL, "" diff --git a/packages/grid/frontend/README.md b/packages/grid/frontend/README.md index e912f1dd172..6684d9fcba0 100644 --- a/packages/grid/frontend/README.md +++ b/packages/grid/frontend/README.md @@ -1,12 +1,12 @@ -# PyGrid UI +# Syft UI -The PyGrid UI is the user interface that allows data owners to manage their -**deployed** PyGrid domains and networks. +The Syft UI is the user interface that allows data owners to manage their +**deployed** Syft datasites and gateways. ## Installation ```bash -cd /packages/grid/frontend +cd /packages/grid/frontend pnpm install ``` diff --git a/packages/grid/frontend/frontend.dockerfile b/packages/grid/frontend/frontend.dockerfile index d003cd86ca2..7dd46d4a888 100644 --- a/packages/grid/frontend/frontend.dockerfile +++ b/packages/grid/frontend/frontend.dockerfile @@ -22,16 +22,16 @@ FROM base AS dependencies RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile -FROM dependencies as grid-ui-tests +FROM dependencies as syft-ui-tests COPY vite.config.ts ./ COPY ./tests ./tests COPY ./src/ ./src CMD pnpm test:unit -FROM dependencies as grid-ui-development +FROM dependencies as syft-ui-development -ENV NODE_ENV=development +ENV SERVER_ENV=development COPY . . CMD pnpm dev @@ -41,9 +41,9 @@ FROM dependencies AS builder COPY . . RUN pnpm build -FROM base AS grid-ui-production +FROM base AS syft-ui-production -ENV NODE_ENV=production +ENV SERVER_ENV=production COPY --from=dependencies /app/node_modules ./node_modules COPY --from=builder /app ./ diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 15fddc2a6d2..06a5c96b4a7 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { - "name": "pygrid-ui", - "version": "0.8.7-beta.13", + "name": "syft-ui", + "version": "0.8.8-beta.1", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/frontend/src/_routes/(app)/account/+page.svelte b/packages/grid/frontend/src/_routes/(app)/account/+page.svelte index 10780f2c120..f9771b7d562 100644 --- a/packages/grid/frontend/src/_routes/(app)/account/+page.svelte +++ b/packages/grid/frontend/src/_routes/(app)/account/+page.svelte @@ -6,7 +6,7 @@ import GreenCheck from '$lib/components/icons/GreenCheck.svelte'; import Info from '$lib/components/icons/Info.svelte'; - let showDeleteNodeModal = false; + let showDeleteServerModal = false; let showDeleteAccountModal = false; let showDeleteConfirmModal = false; @@ -23,7 +23,7 @@ >

- Your profile information is public-facing information that other users and node owners can + Your profile information is public-facing information that other users and server owners can see.

@@ -85,8 +85,8 @@

When you delete your user account all information relating to you will be deleted as well - as any permissions and requests. If you are the domain owner the domain node will be - deleted as well and will be closed to all users. To transfer ownership of a domain node + as any permissions and requests. If you are the datasite owner the datasite server will be + deleted as well and will be closed to all users. To transfer ownership of a datasite server before deleting your account you can follow the instructions here

@@ -127,27 +127,27 @@ {/if} - {#if showDeleteNodeModal} + {#if showDeleteServerModal}
-

Are you sure you want to delete your node?

+

Are you sure you want to delete your server?

- Because you are the domain owner, the domain node along with all uploaded datasets, user + Because you are the datasite owner, the datasite server along with all uploaded datasets, user accounts, and requests will be deleted. All network memberships will also be removed. If you - would like to keep this domain node but no longer want to be an owner press “cancel” and - follow the instructions here to transfer ownership of your domain node. + would like to keep this datasite server but no longer want to be an owner press “cancel” and + follow the instructions here to transfer ownership of your datasite server.

Cancel @@ -165,7 +165,7 @@

To help us improve future experiences could you share with us any frustrations or - suggestions you have with or for the PyGridUI Platform? + suggestions you have with or for the Syft UI Platform?

diff --git a/packages/grid/frontend/src/_routes/(app)/config/+page.svelte b/packages/grid/frontend/src/_routes/(app)/config/+page.svelte index 03469b6e007..80b79071e63 100644 --- a/packages/grid/frontend/src/_routes/(app)/config/+page.svelte +++ b/packages/grid/frontend/src/_routes/(app)/config/+page.svelte @@ -19,7 +19,7 @@ let current_user = data.current_user let tabs = [ - { label: "Domain", id: "tab1" }, + { label: "Datasite", id: "tab1" }, // { label: 'Connection', id: 'tab2' } // { label: 'Permissions', id: 'tab3' }, ] @@ -27,7 +27,7 @@ let currentTab = tabs[0].id $: profileInformation = [ - { label: "Domain name", id: "domain_name", value: metadata?.name }, + { label: "Datasite name", id: "datasite_name", value: metadata?.name }, { label: "Organization", id: "organization", @@ -44,7 +44,7 @@ const handleUpdate = async () => { let newMetadata = { - name: openModal === "domain_name" ? name : null, + name: openModal === "datasite_name" ? name : null, organization: openModal === "organization" ? organization : null, description: openModal === "description" ? description : null, } @@ -69,7 +69,7 @@ $: current_user = data.current_user $: metadata = data.metadata - $: domainInitials = getInitials(metadata?.name) + $: datasiteInitials = getInitials(metadata?.name) $: userInitials = getInitials(current_user?.name) @@ -82,7 +82,7 @@ Loading... {:else} - +
@@ -121,7 +121,7 @@

ID#:

- {metadata.node_id} + {metadata.server_id}

Deployed on:

@@ -141,11 +141,11 @@ Turned off

- When remote execution is turned on for the domain, it means that you are allowing + When remote execution is turned on for the datasite, it means that you are allowing PySyft to execute code submitted by users as is against the real private data instead of the mock data when a request is approved. If this is a third-party user please review the function and policy code carefully before approving this request. You can - turn off "Remote Execution" for your domain by going to your "Domain Settings" or + turn off "Remote Execution" for your datasite by going to your "Datasite Settings" or clicking the link below.

@@ -167,8 +167,8 @@
- {#if openModal === "domain_name"} - + {#if openModal === "datasite_name"} + {:else if openModal === "organization"} { diff --git a/packages/grid/frontend/src/_routes/(app)/users/[slug]/+page.svelte b/packages/grid/frontend/src/_routes/(app)/users/[slug]/+page.svelte index f893eece0e9..a261ab5ebae 100644 --- a/packages/grid/frontend/src/_routes/(app)/users/[slug]/+page.svelte +++ b/packages/grid/frontend/src/_routes/(app)/users/[slug]/+page.svelte @@ -3,7 +3,7 @@ import Badge from "$lib/components/Badge.svelte" import CaretLeft from "$lib/components/icons/CaretLeft.svelte" import { getInitials, getUserRole } from "$lib/utils" - import type { UserView } from "../../../../types/domain/users" + import type { UserView } from "../../../../types/datasite/users" import type { PageData } from "./$types" export let data: PageData diff --git a/packages/grid/frontend/src/_routes/(auth)/login/+page.server.ts b/packages/grid/frontend/src/_routes/(auth)/login/+page.server.ts index 55ae3091828..38c9793b054 100644 --- a/packages/grid/frontend/src/_routes/(auth)/login/+page.server.ts +++ b/packages/grid/frontend/src/_routes/(auth)/login/+page.server.ts @@ -8,7 +8,7 @@ export const actions: Actions = { default: async ({ cookies, request }) => { try { const data = await request.formData() - const { email, password, node_id } = get_form_data_values(data) + const { email, password, server_id } = get_form_data_values(data) if ( !email || @@ -16,14 +16,14 @@ export const actions: Actions = { typeof email !== "string" || typeof password !== "string" ) { - throw new Error(`invalid form data: email:${email} node:${node_id}`) + throw new Error(`invalid form data: email:${email} server:${server_id}`) } const { signing_key, uid } = await login({ email, password }) const cookie_user = { uid, - node_id, + server_id, } cookies.set( diff --git a/packages/grid/frontend/src/_routes/(auth)/login/+page.svelte b/packages/grid/frontend/src/_routes/(auth)/login/+page.svelte index 1b6ddaf5b69..26b2438d9f2 100644 --- a/packages/grid/frontend/src/_routes/(auth)/login/+page.svelte +++ b/packages/grid/frontend/src/_routes/(auth)/login/+page.svelte @@ -2,10 +2,10 @@ import { enhance } from "$app/forms" import Button from "$lib/components/Button.svelte" import Modal from "$lib/components/Modal.svelte" - import DomainMetadataPanel from "$lib/components/authentication/DomainMetadataPanel.svelte" + import DatasiteMetadataPanel from "$lib/components/authentication/DatasiteMetadataPanel.svelte" import Input from "$lib/components/Input.svelte" - import DomainOnlineIndicator from "$lib/components/DomainOnlineIndicator.svelte" - import type { DomainOnlineStatus } from "../../../types/domain/onlineIndicator" + import DatasiteOnlineIndicator from "$lib/components/DatasiteOnlineIndicator.svelte" + import type { DatasiteOnlineStatus } from "../../../types/datasite/onlineIndicator" import type { PageData, ActionData } from "./$types" export let data: PageData @@ -13,13 +13,13 @@ const { metadata } = data - let status: DomainOnlineStatus = "online" + let status: DatasiteOnlineStatus = "online"
- +
@@ -38,16 +38,16 @@

{/if}
- +

{#if status === "pending"} Checking connection {:else} - Domain {status} + Datasite {status} {/if}

- + import { enhance } from "$app/forms" import Button from "$lib/components/Button.svelte" - import DomainMetadataPanel from "$lib/components/authentication/DomainMetadataPanel.svelte" + import DatasiteMetadataPanel from "$lib/components/authentication/DatasiteMetadataPanel.svelte" import Input from "$lib/components/Input.svelte" import Modal from "$lib/components/Modal.svelte" import type { ActionData, PageData } from "./$types" @@ -15,7 +15,7 @@
- +
{ try { const { page_size, page_index } = get_url_page_params(url) - const { signing_key, node_id } = unload_cookies(cookies) + const { signing_key, server_id } = unload_cookies(cookies) const dataset = await jsSyftCall({ path: "dataset.get_all", payload: { page_size, page_index }, - node_id, + server_id, signing_key, }) diff --git a/packages/grid/frontend/src/_routes/_syft_api/metadata/+server.ts b/packages/grid/frontend/src/_routes/_syft_api/metadata/+server.ts index c774c41899b..366cf4dba1a 100644 --- a/packages/grid/frontend/src/_routes/_syft_api/metadata/+server.ts +++ b/packages/grid/frontend/src/_routes/_syft_api/metadata/+server.ts @@ -17,9 +17,9 @@ export const GET: RequestHandler = async () => { highest_version: metadata_raw?.highest_version, lowest_version: metadata_raw?.lowest_version, name: metadata_raw?.name, - node_id: metadata_raw?.id?.value, - node_side: metadata_raw?.node_side_type, - node_type: metadata_raw?.node_type?.value, + server_id: metadata_raw?.id?.value, + server_side: metadata_raw?.server_side_type, + server_type: metadata_raw?.server_type?.value, organization: metadata_raw?.organization, signup_enabled: metadata_raw?.signup_enabled, syft_version: metadata_raw?.syft_version, @@ -32,7 +32,7 @@ export const GET: RequestHandler = async () => { export const PATCH: RequestHandler = async ({ cookies, request }) => { try { - const { signing_key, node_id } = unload_cookies(cookies) + const { signing_key, server_id } = unload_cookies(cookies) const metadata = await request.json() @@ -41,10 +41,10 @@ export const PATCH: RequestHandler = async ({ cookies, request }) => { payload: { settings: { ...metadata, - fqn: "syft.service.settings.settings.NodeSettingsUpdate", + fqn: "syft.service.settings.settings.ServerSettingsUpdate", }, }, - node_id, + server_id, signing_key, }) diff --git a/packages/grid/frontend/src/_routes/_syft_api/users/+server.ts b/packages/grid/frontend/src/_routes/_syft_api/users/+server.ts index f9351d70286..f757af854cf 100644 --- a/packages/grid/frontend/src/_routes/_syft_api/users/+server.ts +++ b/packages/grid/frontend/src/_routes/_syft_api/users/+server.ts @@ -8,12 +8,12 @@ export const GET: RequestHandler = async ({ cookies, url }) => { const page_size = parseInt(url.searchParams.get("page_size") || "10") const page_index = parseInt(url.searchParams.get("page_index") || "0") - const { signing_key, node_id } = unload_cookies(cookies) + const { signing_key, server_id } = unload_cookies(cookies) const users = await jsSyftCall({ path: "user.get_all", payload: { page_size, page_index }, - node_id, + server_id, signing_key, }) @@ -26,7 +26,7 @@ export const GET: RequestHandler = async ({ cookies, url }) => { export const POST: RequestHandler = async ({ cookies, request }) => { try { - const { signing_key, node_id } = unload_cookies(cookies) + const { signing_key, server_id } = unload_cookies(cookies) const new_user = await request.json() @@ -35,7 +35,7 @@ export const POST: RequestHandler = async ({ cookies, request }) => { payload: { user_create: { ...new_user, fqn: "syft.service.user.user.UserCreate" }, }, - node_id, + server_id, signing_key, }) diff --git a/packages/grid/frontend/src/_routes/_syft_api/users/[uid]/+server.ts b/packages/grid/frontend/src/_routes/_syft_api/users/[uid]/+server.ts index 491d773808a..2523a2776db 100644 --- a/packages/grid/frontend/src/_routes/_syft_api/users/[uid]/+server.ts +++ b/packages/grid/frontend/src/_routes/_syft_api/users/[uid]/+server.ts @@ -7,12 +7,12 @@ export const GET: RequestHandler = async ({ cookies, params }) => { try { const requested_uid = params.uid - const { signing_key, node_id } = unload_cookies(cookies) + const { signing_key, server_id } = unload_cookies(cookies) const user = await jsSyftCall({ path: "user.view", payload: { uid: { value: requested_uid, fqn: "syft.types.uid.UID" } }, - node_id, + server_id, signing_key, }) @@ -34,7 +34,7 @@ export const GET: RequestHandler = async ({ cookies, params }) => { export const PUT: RequestHandler = async ({ cookies, params, request }) => { try { - const { signing_key, node_id } = unload_cookies(cookies) + const { signing_key, server_id } = unload_cookies(cookies) const requested_uid = params.uid const body = await request.json() @@ -54,7 +54,7 @@ export const PUT: RequestHandler = async ({ cookies, params, request }) => { fqn: "syft.service.user.user.UserUpdate", }, }, - node_id, + server_id, signing_key, }) diff --git a/packages/grid/frontend/src/_routes/_syft_api/users/search/+server.ts b/packages/grid/frontend/src/_routes/_syft_api/users/search/+server.ts index 19678b3c292..7ec0322ff51 100644 --- a/packages/grid/frontend/src/_routes/_syft_api/users/search/+server.ts +++ b/packages/grid/frontend/src/_routes/_syft_api/users/search/+server.ts @@ -9,7 +9,7 @@ export const GET: RequestHandler = async ({ cookies, url }) => { const page_index = parseInt(url.searchParams.get("page_index") || "0") const name = url.searchParams.get("name") - const { signing_key, node_id } = unload_cookies(cookies) + const { signing_key, server_id } = unload_cookies(cookies) const users = await jsSyftCall({ path: "user.search", @@ -21,7 +21,7 @@ export const GET: RequestHandler = async ({ cookies, url }) => { page_index, page_size, }, - node_id, + server_id, signing_key, }) diff --git a/packages/grid/frontend/src/app.html b/packages/grid/frontend/src/app.html index 5ee015f0cf1..e1934dec53b 100644 --- a/packages/grid/frontend/src/app.html +++ b/packages/grid/frontend/src/app.html @@ -5,7 +5,7 @@ %sveltekit.head% - PyGrid + Syft UI
%sveltekit.body%
diff --git a/packages/grid/frontend/src/lib/api/syft_api.ts b/packages/grid/frontend/src/lib/api/syft_api.ts index f7ddec743f1..2261bf99455 100644 --- a/packages/grid/frontend/src/lib/api/syft_api.ts +++ b/packages/grid/frontend/src/lib/api/syft_api.ts @@ -7,7 +7,7 @@ const SYFT_MSG_URL = `${API_BASE_URL}/api_call` const FQN = { VERIFY_KEY: "nacl.signing.VerifyKey", - SYFT_VERIFY_KEY: "syft.node.credentials.SyftVerifyKey", + SYFT_VERIFY_KEY: "syft.server.credentials.SyftVerifyKey", UID: "syft.types.uid.UID", SYFT_API_CALL: "syft.client.api.SyftAPICall", SIGNED_SYFT_API_CALL: "syft.client.api.SignedSyftAPICall", @@ -17,7 +17,7 @@ interface SyftAPICall { path: string payload: Record signing_key: string | Uint8Array - node_id: string + server_id: string } const getKeyPair = (signing_key: string | Uint8Array) => @@ -28,12 +28,12 @@ const getKeyPair = (signing_key: string | Uint8Array) => const getSignedMessage = ({ path, payload, - node_id, + server_id, signing_key, }: Omit & { signing_key: Uint8Array }) => { const syftAPIPayload = { path, - node_uid: { value: node_id, fqn: FQN.UID }, + server_uid: { value: server_id, fqn: FQN.UID }, args: [], kwargs: new Map(Object.entries(payload)), fqn: FQN.SYFT_API_CALL, @@ -87,13 +87,13 @@ export const jsSyftCall = async ({ path, payload, signing_key, - node_id, + server_id, }: SyftAPICall): Promise => { const key = getKeyPair(signing_key) const signedMessage = getSignedMessage({ path, payload, - node_id, + server_id, signing_key: key, }) return await send(signedMessage) diff --git a/packages/grid/frontend/src/lib/client/jsPyClassMap.js b/packages/grid/frontend/src/lib/client/jsPyClassMap.js index 91bce7c022d..2cc055b774f 100644 --- a/packages/grid/frontend/src/lib/client/jsPyClassMap.js +++ b/packages/grid/frontend/src/lib/client/jsPyClassMap.js @@ -3,7 +3,7 @@ import { UUID } from "$lib/client/objects/uid.ts" import { APICall } from "$lib/client/messages/syftMessage.ts" export const classMapping = { - "syft.node.credentials.SyftVerifyKey": SyftVerifyKey, + "syft.server.credentials.SyftVerifyKey": SyftVerifyKey, "nacl.signing.VerifyKey": VerifyKey, "syft.types.uid.UID": UUID, "syft.client.api.SyftAPICall": APICall, diff --git a/packages/grid/frontend/src/lib/client/jsclient/jsClient.svelte b/packages/grid/frontend/src/lib/client/jsclient/jsClient.svelte index c68c8364ead..3baa59e61db 100644 --- a/packages/grid/frontend/src/lib/client/jsclient/jsClient.svelte +++ b/packages/grid/frontend/src/lib/client/jsclient/jsClient.svelte @@ -25,7 +25,7 @@ } this.userId = window.localStorage.getItem('id'); - this.nodeId = window.localStorage.getItem('nodeId'); + this.serverId = window.localStorage.getItem('serverId'); return this; } @@ -150,7 +150,7 @@ // Create a new object called 'newMetadata' with updated fields and a new property called 'fqn' with a value. const updateMetadata = { ...newMetadata, - fqn: 'syft.service.metadata.node_metadata.NodeMetadataUpdate' + fqn: 'syft.service.metadata.server_metadata.ServerMetadataUpdate' }; // Create a new object called 'reqFields' with one property: 'metadata', which is set to 'updateMetadata'. @@ -249,7 +249,7 @@ * @throws {Error} An error is thrown if the message signature and public key don't match. */ async send(args, kwargs, path) { - const signedCall = new APICall(this.nodeId, path, args, kwargs).sign(this.key, this.serde); + const signedCall = new APICall(this.serverId, path, args, kwargs).sign(this.key, this.serde); try { // Make a POST request to the server with the signed call. diff --git a/packages/grid/frontend/src/lib/client/messages/syftMessage.ts b/packages/grid/frontend/src/lib/client/messages/syftMessage.ts index 1dbb2a82285..24d5dabd56c 100644 --- a/packages/grid/frontend/src/lib/client/messages/syftMessage.ts +++ b/packages/grid/frontend/src/lib/client/messages/syftMessage.ts @@ -30,14 +30,14 @@ export class SignedAPICall { } export class APICall { - node_uid: UUID + server_uid: UUID path: string args: object kwargs: object blocking: boolean constructor(id, path, args, kwargs, blocking = true) { - this.node_uid = new UUID(id) + this.server_uid = new UUID(id) this.path = path this.args = args if (kwargs) { diff --git a/packages/grid/frontend/src/lib/client/objects/key.ts b/packages/grid/frontend/src/lib/client/objects/key.ts index a340db2a9c0..964d0debb5b 100644 --- a/packages/grid/frontend/src/lib/client/objects/key.ts +++ b/packages/grid/frontend/src/lib/client/objects/key.ts @@ -14,6 +14,6 @@ export class SyftVerifyKey { constructor(verify_key: Uint8Array) { this.verify_key = new VerifyKey(verify_key) - this.fqn = "syft.node.credentials.SyftVerifyKey" + this.fqn = "syft.server.credentials.SyftVerifyKey" } } diff --git a/packages/grid/frontend/src/lib/client/objects/userCode.ts b/packages/grid/frontend/src/lib/client/objects/userCode.ts index 733487eb1ed..49e4e96cc6e 100644 --- a/packages/grid/frontend/src/lib/client/objects/userCode.ts +++ b/packages/grid/frontend/src/lib/client/objects/userCode.ts @@ -39,7 +39,7 @@ interface InputObject { class InputPolicy { id: UUID inputs: Map - node_id?: UUID + server_id?: UUID /** * Creates a new instance of InputPolicy. diff --git a/packages/grid/frontend/src/lib/components/AccountSettings.svelte b/packages/grid/frontend/src/lib/components/AccountSettings.svelte index 0b08334b172..9aa88814ef4 100644 --- a/packages/grid/frontend/src/lib/components/AccountSettings.svelte +++ b/packages/grid/frontend/src/lib/components/AccountSettings.svelte @@ -296,8 +296,8 @@

When you delete your user account all information relating to you will be deleted as well as - any permissions and requests. If you are the domain owner the domain node will be deleted as - well and will be closed to all users. To transfer ownership of a domain node before deleting + any permissions and requests. If you are the datasite owner the datasite server will be deleted as + well and will be closed to all users. To transfer ownership of a datasite server before deleting your account you can follow the instructions here.

diff --git a/packages/grid/frontend/src/lib/components/Datasets/DatasetListItem.svelte b/packages/grid/frontend/src/lib/components/Datasets/DatasetListItem.svelte index 606ebebce17..b4b1da788e0 100644 --- a/packages/grid/frontend/src/lib/components/Datasets/DatasetListItem.svelte +++ b/packages/grid/frontend/src/lib/components/Datasets/DatasetListItem.svelte @@ -3,7 +3,7 @@ import TableIcon from '$lib/components/icons/TableIcon.svelte'; import Tooltip from '$lib/components/Tooltip.svelte'; import TooltipText from '../TooltipText.svelte'; - import type { Dataset } from '../../../types/domain/dataset'; + import type { Dataset } from '../../../types/datasite/dataset'; export let dataset: Dataset; diff --git a/packages/grid/frontend/src/lib/components/Datasets/DatasetNoneFound.svelte b/packages/grid/frontend/src/lib/components/Datasets/DatasetNoneFound.svelte index fe02b87ef03..21977baa9ea 100644 --- a/packages/grid/frontend/src/lib/components/Datasets/DatasetNoneFound.svelte +++ b/packages/grid/frontend/src/lib/components/Datasets/DatasetNoneFound.svelte @@ -1,7 +1,7 @@

No Datasets Uploaded

- To begin adding datasets to this domain node please click the "+" button and follow + To begin adding datasets to this datasite server please click the "+" button and follow instructions.

diff --git a/packages/grid/frontend/src/lib/components/DomainOnlineIndicator.svelte b/packages/grid/frontend/src/lib/components/DatasiteOnlineIndicator.svelte similarity index 68% rename from packages/grid/frontend/src/lib/components/DomainOnlineIndicator.svelte rename to packages/grid/frontend/src/lib/components/DatasiteOnlineIndicator.svelte index 1232283ac6c..b534a481813 100644 --- a/packages/grid/frontend/src/lib/components/DomainOnlineIndicator.svelte +++ b/packages/grid/frontend/src/lib/components/DatasiteOnlineIndicator.svelte @@ -1,6 +1,6 @@
diff --git a/packages/grid/frontend/src/lib/components/Navigation/SideNavDOHandbook.svelte b/packages/grid/frontend/src/lib/components/Navigation/SideNavDOHandbook.svelte index f72f9dddd45..081139b9cd8 100644 --- a/packages/grid/frontend/src/lib/components/Navigation/SideNavDOHandbook.svelte +++ b/packages/grid/frontend/src/lib/components/Navigation/SideNavDOHandbook.svelte @@ -27,7 +27,7 @@

Data Owner handbook

Check out the data owner handbook to learn more tips & tricks about how to manage your - domain node. + datasite server.

diff --git a/packages/grid/frontend/src/lib/components/Navigation/TopNav.svelte b/packages/grid/frontend/src/lib/components/Navigation/TopNav.svelte index 9e127766cf3..1a3dc762b4e 100644 --- a/packages/grid/frontend/src/lib/components/Navigation/TopNav.svelte +++ b/packages/grid/frontend/src/lib/components/Navigation/TopNav.svelte @@ -13,7 +13,7 @@ { href: "", icon: BellIcon, disabled: true }, ] - $: domainInitials = getInitials(metadata.name) + $: datasiteInitials = getInitials(metadata.name) $: userInitials = getInitials(user.name) @@ -21,7 +21,7 @@ class="w-full py-2 px-6 flex items-center justify-between tablet:justify-end shadow-topbar-1 tablet:shadow-none" >
- +
    {#each links as link} diff --git a/packages/grid/frontend/src/lib/components/OnboardingModal.svelte b/packages/grid/frontend/src/lib/components/OnboardingModal.svelte index 8e381c18630..9f2fe27871f 100644 --- a/packages/grid/frontend/src/lib/components/OnboardingModal.svelte +++ b/packages/grid/frontend/src/lib/components/OnboardingModal.svelte @@ -7,7 +7,7 @@ import Progress from "$lib/components/Progress.svelte" import Input from "$lib/components/Input.svelte" import ButtonGhost from "$lib/components/ButtonGhost.svelte" - import NodeIcon from "$lib/components/icons/NodeIcon.svelte" + import ServerIcon from "$lib/components/icons/ServerIcon.svelte" import CheckIcon from "$lib/components/icons/CheckIcon.svelte" export let metadata @@ -23,15 +23,15 @@ website: "", } - let domainSettings = { + let datasiteSettings = { name: "", description: "", organization: "", on_board: false, } - let checkRequiredDomainFields = () => { - return domainSettings.name !== "" ? true : false + let checkRequiredDatasiteFields = () => { + return datasiteSettings.name !== "" ? true : false } let checkRequiredUserFields = () => { @@ -49,7 +49,7 @@ headers: { "Content-Type": "application/json", }, - body: JSON.stringify(domainSettings), + body: JSON.stringify(datasiteSettings), }) await fetch(`/_syft_api/users/${userSettings.id}`, { @@ -86,7 +86,7 @@ website: "", } - domainSettings = { + datasiteSettings = { name: "", description: "", organization: "", @@ -105,13 +105,13 @@ Welcome to PyGrid

- Welcome to PyGrid Admin! + Welcome to Syft UI!

Step 1 of 4

@@ -125,7 +125,7 @@

- Congratulations on logging into {metadata?.name ?? ""} node. This wizard + Congratulations on logging into {metadata?.name ?? ""} server. This wizard will help get you started in setting up your user account. You can skip this wizard by pressing “Cancel” below. You can edit any of your responses later by going to "Account Settings" indicated by your avatar in the top @@ -148,10 +148,10 @@

- +
-

Domain Profile

+

Datasite Profile

Step 2 of 4

@@ -182,28 +182,28 @@

- Let's begin by describing some basic information about this domain - node. This information will be shown to outside users to help them - find and understand what your domain offers. + Let's begin by describing some basic information about this datasite + server. This information will be shown to outside users to help them + find and understand what your datasite offers.

@@ -232,7 +232,7 @@

- Now that we have described our domain, let's update our password and + Now that we have described our datasite, let's update our password and describe some basic information about ourselves for our "User Profile". User profile information will be shown to teammates and collaborators when working on studies together. diff --git a/packages/grid/frontend/src/lib/components/Users/UserCreateModal.svelte b/packages/grid/frontend/src/lib/components/Users/UserCreateModal.svelte index 77ebcc3fb70..05c4a8ee18c 100644 --- a/packages/grid/frontend/src/lib/components/Users/UserCreateModal.svelte +++ b/packages/grid/frontend/src/lib/components/Users/UserCreateModal.svelte @@ -16,19 +16,19 @@ roleId: 2, title: "Data Scientist", description: - "This role is for users who will be performing computations on your datasets. They may be users you know directly or those who found your domain through search and discovery. By default this user can see a list of your datasets and can request to get results.", + "This role is for users who will be performing computations on your datasets. They may be users you know directly or those who found your datasite through search and discovery. By default this user can see a list of your datasets and can request to get results.", }, { roleId: 32, title: "Data Owner", description: - "This role is for users on your team who will be responsible for uploading data to the domain.", + "This role is for users on your team who will be responsible for uploading data to the datasite.", }, { roleId: 128, title: "Admin", description: - "This role is for users who will help you manage your node. This should be users you trust as they will be users who will have full permissions to the node.", + "This role is for users who will help you manage your server. This should be users you trust as they will be users who will have full permissions to the server.", }, ] @@ -105,7 +105,7 @@

To begin let's select the role this user is going to have on your - domain node. + datasite server.

{#each cardsContent as { title, description, roleId }} @@ -221,8 +221,8 @@ Welcome to {metadata?.name} {name},
- You are formally invited you to join {metadata?.name} Domain. Below is - your login credentials and the URL to the domain. After logging in you + You are formally invited you to join {metadata?.name} Datasite. Below is + your login credentials and the URL to the datasite. After logging in you will be prompted to customize your account.

{href} diff --git a/packages/grid/frontend/src/lib/components/Users/UserListItem.svelte b/packages/grid/frontend/src/lib/components/Users/UserListItem.svelte index b5ca6737d6f..ad34edaf1cb 100644 --- a/packages/grid/frontend/src/lib/components/Users/UserListItem.svelte +++ b/packages/grid/frontend/src/lib/components/Users/UserListItem.svelte @@ -1,6 +1,6 @@
{#if metadata}
- +
- +

{metadata.name}

{#if metadata.organization} @@ -29,15 +29,15 @@

Id#:

- {metadata.node_id} + {metadata.server_id}

Side:

-

{metadata.node_side}

+

{metadata.server_side}

Type:

-

{metadata.node_type}

+

{metadata.server_type}

{/if} diff --git a/packages/grid/frontend/src/lib/components/authentication/Nav.svelte b/packages/grid/frontend/src/lib/components/authentication/Nav.svelte index da591ae26f7..5b2ed997dfa 100644 --- a/packages/grid/frontend/src/lib/components/authentication/Nav.svelte +++ b/packages/grid/frontend/src/lib/components/authentication/Nav.svelte @@ -9,7 +9,7 @@
PyGrid logo {#if version} diff --git a/packages/grid/frontend/src/lib/components/icons/NodeIcon.svelte b/packages/grid/frontend/src/lib/components/icons/ServerIcon.svelte similarity index 100% rename from packages/grid/frontend/src/lib/components/icons/NodeIcon.svelte rename to packages/grid/frontend/src/lib/components/icons/ServerIcon.svelte diff --git a/packages/grid/frontend/src/lib/utils.ts b/packages/grid/frontend/src/lib/utils.ts index 2ffed30dec5..cd8dcc89b52 100644 --- a/packages/grid/frontend/src/lib/utils.ts +++ b/packages/grid/frontend/src/lib/utils.ts @@ -1,4 +1,4 @@ -import { ServiceRoles } from "../types/domain/users" +import { ServiceRoles } from "../types/datasite/users" import { COOKIES } from "./constants" import type { CookieSerializeOptions } from "cookie" import type { Cookies } from "@sveltejs/kit" @@ -33,7 +33,7 @@ export function getInitials(name: string) { export function logout() { window.localStorage.removeItem("id") - window.localStorage.removeItem("nodeId") + window.localStorage.removeItem("serverId") window.localStorage.removeItem("key") } @@ -51,7 +51,7 @@ export const default_cookie_config: CookieSerializeOptions = { interface CookieData { uid: string - node_id: string + server_id: string signing_key: string } diff --git a/packages/grid/frontend/src/routes/+page.svelte b/packages/grid/frontend/src/routes/+page.svelte index 3637adb7bd1..da607f84d1b 100644 --- a/packages/grid/frontend/src/routes/+page.svelte +++ b/packages/grid/frontend/src/routes/+page.svelte @@ -15,5 +15,5 @@
- PyGrid + Syft UI
diff --git a/packages/grid/frontend/src/routes/[...all]/+page.svelte b/packages/grid/frontend/src/routes/[...all]/+page.svelte index b1bb3b1cb01..0994b12a178 100644 --- a/packages/grid/frontend/src/routes/[...all]/+page.svelte +++ b/packages/grid/frontend/src/routes/[...all]/+page.svelte @@ -1,6 +1,6 @@