diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e1c50cd3b96..521e3f9b60c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,8 @@ repos: packages/syft/src/syft/proto.*| packages/syft/tests/syft/lib/python.*| packages/grid.*| - packages/syft/src/syft/federated/model_serialization/protos.py + packages/syft/src/syft/federated/model_serialization/protos.py| + packages/syft/src/syft/util/test_helpers/.*| )$ - repo: https://github.com/MarcoGorelli/absolufy-imports @@ -161,6 +162,7 @@ repos: "--non-interactive", "--config-file=tox.ini", ] + exclude: ^(packages/syft/src/syft/util/test_helpers) - repo: https://github.com/kynan/nbstripout rev: 0.7.1 diff --git a/justfile b/justfile new file mode 100644 index 00000000000..90feb202155 --- /dev/null +++ b/justfile @@ -0,0 +1,479 @@ +set dotenv-load + +# --------------------------------------------------------------------------------------------------------------------- + +cluster_default := "syft-dev" +cluster_high := "syft-high" +cluster_low := "syft-low" +cluster_gw := "syft-gw" +cluster_signoz := "signoz" +ns_default := "syft" +ns_high := "high" +ns_low := "low" +ns_gw := "gw" + +# --------------------------------------------------------------------------------------------------------------------- + +port_default := "8080" +port_high := port_default +port_low := "8081" +port_gw := "8082" +port_signoz_ui := "3301" +port_signoz_otel := "4317" +port_registry := "5800" + +registry_url := "k3d-registry.localhost:" + port_registry +signoz_otel_url := "http://host.k3d.internal:" + port_signoz_otel + +# --------------------------------------------------------------------------------------------------------------------- + +# devspace profiles (comma-separated) +profiles := "" + +# enable tracing by adding "tracing" profile in devspace +tracing := "true" + +_g_profiles := if tracing == "true" { profiles + ",tracing" } else { profiles } + +# --------------------------------------------------------------------------------------------------------------------- + +# this might break if you have alias python = python3 or either of the executable not pointing to the correct one +# just fix your system instead of making of fixing this +python_path := `which python || which python3` + +# --------------------------------------------------------------------------------------------------------------------- + +@default: + just --list + +# --------------------------------------------------------------------------------------------------------------------- + +# Start a local registry on http://k3d-registry.localhost:{{port_registry}} +[group('registry')] +start-registry: + k3d --version + @-docker volume create k3d-registry-vol + @-k3d registry create registry.localhost --port {{ port_registry }} -v k3d-registry-vol:/var/lib/registry --no-help + + if ! grep -q k3d-registry.localhost /etc/hosts; then \ + sudo {{ python_path }} scripts/patch_hosts.py --add-k3d-registry --fix-docker-hosts; \ + fi + + @curl --silent --retry 5 --retry-all-errors http://k3d-registry.localhost:{{ port_registry }}/v2/_catalog | jq + @echo "\033[1;32mRegistring running at http://k3d-registry.localhost:{{ port_registry }}\033[0m" + +[group('registry')] +delete-registry: + -k3d registry delete registry.localhost + -docker volume rm k3d-registry-vol + +# --------------------------------------------------------------------------------------------------------------------- + +# Launch a Datasite high-side cluster on http://localhost:{{port_high}} +[group('highside')] +start-high: (delete-cluster cluster_high) (create-cluster cluster_high port_high) + +# Stop the Datasite high-side cluster +[group('highside')] +delete-high: (delete-cluster cluster_high) + +# Deploy Syft to the high-side cluster +[group('highside')] +deploy-high: (deploy-devspace cluster_high ns_default) + +# Reset Syft DB state in the high-side cluster +[group('highside')] +reset-high: (reset-syft cluster_high ns_default) + +# Remove devpsace deployment + namespace from the high-side cluster +[group('highside')] +cleanup-high: (purge-devspace cluster_high ns_default) (delete-ns cluster_high ns_default) + +# --------------------------------------------------------------------------------------------------------------------- + +# Launch a Datasite low-side cluster on http://localhost:{{port_low}} +[group('lowside')] +start-low: (create-cluster cluster_low port_low) + +# Stop the Datasite low-side cluster +[group('lowside')] +delete-low: (delete-cluster cluster_low) + +# Deploy Syft to the low-side cluster +[group('lowside')] +deploy-low: (deploy-devspace cluster_low ns_default "-p datasite-low") + +# Reset Syft DB state in the low-side cluster +[group('lowside')] +reset-low: (reset-syft cluster_low ns_default) + +# Remove devpsace deployment + namespace from the low-side cluster +[group('lowside')] +cleanup-low: (purge-devspace cluster_low ns_default) (delete-ns cluster_low ns_default) + +# --------------------------------------------------------------------------------------------------------------------- + +# Launch a Gateway cluster on http://localhost:{{port_gw}} +[group('gateway')] +start-gw: (create-cluster cluster_gw port_gw) + +# Delete the Gateway cluster +[group('gateway')] +delete-gw: (delete-cluster cluster_gw) + +# Deploy Syft to the gateway cluster +[group('gateway')] +deploy-gw: (deploy-devspace cluster_gw ns_default "-p gateway") + +# Reset Syft DB state in the gateway cluster +[group('gateway')] +reset-gw: (reset-syft cluster_gw ns_default) + +# Remove devpsace deployment + namespace from the gateway cluster +[group('gateway')] +cleanup-gw: (purge-devspace cluster_gw ns_default) (delete-ns cluster_gw ns_default) + +# --------------------------------------------------------------------------------------------------------------------- + +# TODO - multi-namespace -> unique k3d ports +# # Launch a multi-agent cluster on http://localhost:{{port_default}} +# [group('shared')] +# start-shared: (create-cluster cluster_default port_default "--agents 2") + +# # Stop the multi-agent cluster +# [group('shared')] +# delete-shared: (delete-cluster cluster_default) + +# [group('shared')] +# deploy-ns-high: (deploy-devspace cluster_default ns_high) + +# [group('shared')] +# delete-ns-high: (delete-ns cluster_default ns_high) + +# [group('shared')] +# deploy-ns-low: (deploy-devspace cluster_default ns_low "-p datasite-low") + +# [group('shared')] +# delete-ns-low: (delete-ns cluster_default ns_low) + +# [group('shared')] +# deploy-ns-gw: (deploy-devspace cluster_default ns_gw "-p gateway") + +# [group('shared')] +# delete-ns-gw: (delete-ns cluster_default ns_gw) + +# --------------------------------------------------------------------------------------------------------------------- + +# Launch SigNoz on http://localhost:{{port_signoz_ui}} +[group('signoz')] +start-signoz: && apply-signoz setup-signoz + k3d cluster create {{ cluster_signoz }} \ + --port {{ port_signoz_ui }}:3301@loadbalancer \ + --port {{ port_signoz_otel }}:4317@loadbalancer \ + --k3s-arg "--disable=metrics-server@server:*" + + # Since k3d adds k3d- prefix to the cluster name + # we create a new context without the prefix + kubectl config set-context {{ cluster_signoz }} --cluster=k3d-{{ cluster_signoz }} \ + --user=admin@k3d-{{ cluster_signoz }} + + @printf "Started SigNoz\n\ + Dashboard: \033[1;36mhttp://localhost:{{ port_signoz_ui }}\033[0m\n\ + OTEL Endpoint: \033[1;36mhttp://localhost:{{ port_signoz_otel }}\033[0m\n" + +# Remove SigNoz from the cluster +[group('signoz')] +delete-signoz-agent: + helm uninstall k8s-infra + +# Remove SigNoz from the cluster +[group('signoz')] +delete-signoz: (delete-cluster cluster_signoz) + +[group('signoz')] +[private] +apply-signoz-agent cluster: + @echo "Installing SigNoz OTel Agent" + helm install k8s-infra k8s-infra \ + --repo https://charts.signoz.io \ + --kube-context {{ cluster }} \ + --set global.deploymentEnvironment=local \ + --set clusterName={{ cluster }} \ + --set otelCollectorEndpoint={{ signoz_otel_url }} \ + --set otelInsecure=true \ + --set presets.otlpExporter.enabled=true \ + --set presets.loggingExporter.enabled=true + +[group('signoz')] +[private] +apply-signoz: + @echo "Installing SigNoz on the cluster" + helm install signoz signoz \ + --repo https://charts.signoz.io \ + --kube-context {{ cluster_signoz }} \ + --namespace platform \ + --create-namespace \ + --version 0.52.0 \ + --set frontend.service.type=LoadBalancer \ + --set otelCollector.service.type=LoadBalancer \ + --set otelCollectorMetrics.service.type=LoadBalancer + +[group('signoz')] +[private] +setup-signoz: + @echo "Waiting for SigNoz frontend to be available..." + @bash ./packages/grid/scripts/wait_for.sh service signoz-frontend \ + --namespace platform --context {{ cluster_signoz }} &> /dev/null + + @echo "Setting up SigNoz account" + @curl --retry 5 --retry-all-errors -X POST \ + -H "Content-Type: application/json" \ + --data '{"email":"admin@localhost","name":"admin","orgName":"openmined","password":"password"}' \ + http://localhost:3301/api/v1/register + + @printf '\nSignoz is running on http://localhost:3301\n\ + Email: \033[1;36madmin@localhost\033[0m\n\ + Password: \033[1;36mpassword\033[0m\n' + +# --------------------------------------------------------------------------------------------------------------------- + +# List all clusters +[group('cluster')] +list-clusters: + k3d cluster list + +# Stop all clusters +[group('cluster')] +delete-clusters: + k3d cluster delete --all + +[group('cluster')] +[private] +create-cluster cluster port *args='': start-registry && (apply-coredns cluster) (apply-signoz-agent cluster) + #!/bin/bash + set -euo pipefail + + k3d cluster create {{cluster}} \ + --port {{ port }}:80@loadbalancer \ + --registry-use k3d-registry.localhost:5800 {{ args }} + + # Since k3d adds k3d- prefix to the cluster name + # we create a new context without the prefix + kubectl config set-context {{ cluster }} --cluster=k3d-{{ cluster}} --user=admin@k3d-{{ cluster}} + +[group('cluster')] +[private] +delete-cluster *args='': + #!/bin/bash + set -euo pipefail + + k3d cluster delete {{ args }} + +[group('cluster')] +[private] +delete-ns context namespace: + kubectl delete ns {{ namespace }} --force --grace-period=0 --context {{ context }} + +[group('cluster')] +[private] +apply-coredns cluster: + @echo "Applying custom CoreDNS config" + + kubectl apply -f ./scripts/k8s-coredns-custom.yml --context {{ cluster }} + kubectl delete pod -n kube-system -l k8s-app=kube-dns --context {{ cluster }} + +# --------------------------------------------------------------------------------------------------------------------- + +[group('devspace')] +[private] +deploy-devspace cluster namespace *args='': + #!/bin/bash + set -euo pipefail + + cd packages/grid + + PROFILE="{{ _g_profiles }}" + PROFILE=$(echo "$PROFILE" | sed -E 's/^,*|,*$//g') + if [ -n "$PROFILE" ]; then + PROFILE="-p $PROFILE" + fi + + echo "Deploying to {{ cluster }}" + + devspace deploy -b \ + --no-warn \ + --kube-context {{ cluster }} \ + --namespace {{ namespace }} \ + $PROFILE \ + {{ args }} \ + --var CONTAINER_REGISTRY={{ registry_url }} + +[group('devspace')] +[private] +purge-devspace cluster namespace: + #!/bin/bash + set -euo pipefail + + cd packages/grid + devspace purge --force-purge --kube-context {{ cluster }} --no-warn --namespace {{ namespace }} + sleep 3 + +# --------------------------------------------------------------------------------------------------------------------- + +[group('cloud')] +[private] +check-platform: + #!/bin/bash + set -euo pipefail + + OSTYPE=$(uname -sm) + MSG="==================================================================================================\n\ + Deploying dev->cloud k8s (x64 nodes) requires images to be built with --platform=linux/amd64\n\ + On Apple Silicon, cross-platform image is unstable on different providers\n\n\ + Current status:\n\ + ✅ | Docker Desktop | 4.34.0+ | *Enable* containerd and *uncheck* 'Use Rosetta for x86_64/amd64...'\n\ + ❌ | OrbStack | 1.7.2 | Rosetta: gets stuck & qemu: errors with 'illegal instruction'\n\ + ❌ | Lima VM/Colima | 0.23.2 | Rosetta: gets stuck & qemu: errors with 'illegal instruction'\n\ + ==================================================================================================" + + if [[ "$OSTYPE" == "Darwin arm64" ]]; then + echo -e $MSG + fi + +[group('cloud')] +[private] +deploy-cloud cluster_ctx registry_url namespace profile: check-platform + #!/bin/bash + + CONTEXT_NAME=$(kubectl config get-contexts -o=name | grep "{{ cluster_ctx }}") + + if [ -z "$CONTEXT_NAME" ]; then + echo "Context not found: {{ cluster_ctx }}. Authorized with cloud providers to get relevant K8s cluster contexts" + exit 1 + fi + + set -euo pipefail + + # cloud deployments always have tracing false + platform=amd64 + just tracing=false registry_url={{ registry_url }} \ + deploy-devspace $CONTEXT_NAME {{ namespace }} "-p {{ profile }} --var PLATFORM=amd64" + +[group('cloud')] +[private] +purge-cloud cluster_ctx namespace: + #!/bin/bash + + CONTEXT_NAME=$(kubectl config get-contexts -o=name | grep "{{ cluster_ctx }}") + + if [ -z "$CONTEXT_NAME" ]; then + echo "Context not found: {{ cluster_ctx }}. Authorized with cloud providers to get relevant K8s cluster contexts" + exit 1 + fi + + set -euo pipefail + + just purge-devspace $CONTEXT_NAME {{ namespace }} + kubectl delete ns {{ namespace }} --force --grace-period=0 --context $CONTEXT_NAME + +# --------------------------------------------------------------------------------------------------------------------- + +# Auth all components required for deploying Syft to Google Cloud +[group('cloud-gcp')] +auth-gcloud: + #!/bin/bash + set -euo pipefail + + # login to gcloud + ACCOUNT=$(gcloud config get-value account) + if [ -z "$ACCOUNT" ]; then + gcloud auth login + fi + + echo "Logged in as \"$(gcloud config get-value account)\"" + + # install gke-gcloud-auth-plugin + gke_installed=$(gcloud components list --only-local-state --filter gke-gcloud-auth-plugin --format=list 2>/dev/null) + if [ -z "$gke_installed" ]; then + gcloud components install gke-gcloud-auth-plugin + echo "Installed gke-gcloud-auth-plugin" + fi + +# Deploy local code as datasite-high to Google Kubernetes Engine +[group('cloud-gcp')] +deploy-gcp-high gcp_cluster gcp_registry_url namespace="syft": (deploy-cloud gcp_cluster gcp_registry_url namespace "gcp") + +# Deploy local code as datasite-high to Google Kubernetes Engine +[group('cloud-gcp')] +deploy-gcp-low gcp_cluster gcp_registry_url namespace="syft": (deploy-cloud gcp_cluster gcp_registry_url namespace "gcp-low") + +# Purge deployment from a cluster +[group('cloud-gcp')] +purge-gcp gcp_cluster namespace="syft": (purge-cloud gcp_cluster namespace) + +# --------------------------------------------------------------------------------------------------------------------- + +[group('cloud-az')] +auth-az tenant="creditsopenmined.onmicrosoft.com": + #!/bin/bash + + # login to azure + ACCOUNT=$(az account show --query user.name) + if [ -z "$ACCOUNT" ]; then + az login --tenant {{ tenant }} + fi + + echo "Logged in as $(az account show --query user.name)" + +# Deploy local code as datasite-high to Azure Kubernetes Service +[group('cloud-az')] +deploy-az-high aks_cluster az_registry namespace="syft": (deploy-cloud aks_cluster az_registry namespace "azure") + +# --------------------------------------------------------------------------------------------------------------------- + +# Reset Syft state in a cluster +# TODO: make reset_k8s.sh take in context and namespace as args +[group('utils')] +[private] +reset-syft name namespace: + kubectl config use-context {{ name }} + scripts/reset_k8s.sh + +# K9s into the Datasite High cluster +[group('utils')] +k9s-high: + k9s --context {{ cluster_high }} + +# K9s into the Datesite Low cluster +[group('utils')] +k9s-low: + k9s --context {{ cluster_low }} + +# K9s into the Gateway cluster +[group('utils')] +k9s-gw: + k9s --context {{ cluster_gw }} + +# K9s into the Signoz cluster +[group('utils')] +k9s-signoz: + k9s --context {{ cluster_signoz }} + +# Stop all Syft clusters + registry +[group('utils')] +delete-all: delete-clusters delete-registry + @echo "Stopped all Syft components" + +[confirm('Confirm prune all docker resources?')] +[group('utils')] +prune-docker: + -docker container prune -f + -docker volume prune -af + -docker image prune -af + -docker builder prune -af + -docker buildx prune -af + -docker system prune -af --volumes + +[group('utils')] +yank-ns namespace: + -kubectl delete ns {{ namespace }} --now --timeout=5s + kubectl get ns {{ namespace }} -o json | jq '.spec.finalizers = []' | kubectl replace --raw /api/v1/namespaces/{{ namespace }}/finalize -f - diff --git a/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb b/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb index 8035f5e61e1..86cbdc836c6 100644 --- a/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb +++ b/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb @@ -20,24 +20,12 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# stdlib\n", "from os import environ as env\n", "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", - "\n", - "# third party\n", - "from email_helpers import get_email_server\n", - "# isort: on" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Launch & login" + "from syft.util.test_helpers.email_helpers import get_email_server" ] }, { @@ -249,7 +237,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.4" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb index be9579059eb..a8299b5cdcd 100644 --- a/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb +++ b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb @@ -20,18 +20,13 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# stdlib\n", "import os\n", "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", - "\n", - "# third party\n", - "from email_helpers import Timeout\n", - "from email_helpers import get_email_server\n", - "# isort: on" + "from syft.util.test_helpers.email_helpers import Timeout\n", + "from syft.util.test_helpers.email_helpers import get_email_server" ] }, { @@ -40,14 +35,6 @@ "id": "2", "metadata": {}, "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], "source": [ "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", "\n", @@ -60,7 +47,7 @@ }, { "cell_type": "markdown", - "id": "4", + "id": "3", "metadata": {}, "source": [ "### Launch server & login" @@ -69,7 +56,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -86,7 +73,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -96,7 +83,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -108,7 +95,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -118,7 +105,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -128,7 +115,7 @@ }, { "cell_type": "markdown", - "id": "10", + "id": "9", "metadata": {}, "source": [ "### Scale Worker pool" @@ -136,7 +123,7 @@ }, { "cell_type": "markdown", - "id": "11", + "id": "10", "metadata": {}, "source": [ "##### Scale up" @@ -145,7 +132,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -159,7 +146,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -169,7 +156,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -189,7 +176,7 @@ }, { "cell_type": "markdown", - "id": "15", + "id": "14", "metadata": {}, "source": [ "##### Scale down" @@ -198,7 +185,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -213,7 +200,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -232,7 +219,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -245,7 +232,7 @@ }, { "cell_type": "markdown", - "id": "19", + "id": "18", "metadata": {}, "source": [ "#### Delete Worker Pool" @@ -254,7 +241,7 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -267,7 +254,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -277,7 +264,7 @@ }, { "cell_type": "markdown", - "id": "22", + "id": "21", "metadata": {}, "source": [ "#### Re-launch the default worker pool" @@ -286,7 +273,7 @@ { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -296,7 +283,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -310,7 +297,7 @@ { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -324,7 +311,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -334,7 +321,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -344,7 +331,7 @@ { "cell_type": "code", "execution_count": null, - "id": "28", + "id": "27", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb b/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb index 22f6dfaa977..d72a82f9eb1 100644 --- a/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb +++ b/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb @@ -18,18 +18,13 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# stdlib\n", "import os\n", "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", "from syft import test_settings\n", - "\n", - "# third party\n", - "from email_helpers import get_email_server\n", - "# isort: on" + "from syft.util.test_helpers.email_helpers import get_email_server" ] }, { @@ -526,6 +521,11 @@ } ], "metadata": { + "kernelspec": { + "display_name": "syft", + "language": "python", + "name": "python3" + }, "language_info": { "codemirror_mode": { "name": "ipython", @@ -536,7 +536,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.5" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb b/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb index b87fd2a7731..9a8bfdcdf9c 100644 --- a/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb +++ b/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb @@ -22,21 +22,16 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# stdlib\n", "import os\n", "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", - "\n", - "# third party\n", - "from email_helpers import SENDER\n", - "from email_helpers import create_user\n", - "from email_helpers import get_email_server\n", - "from email_helpers import make_user\n", - "from email_helpers import save_users\n", - "# isort: on" + "from syft.util.test_helpers.email_helpers import SENDER\n", + "from syft.util.test_helpers.email_helpers import create_user\n", + "from syft.util.test_helpers.email_helpers import get_email_server\n", + "from syft.util.test_helpers.email_helpers import make_user\n", + "from syft.util.test_helpers.email_helpers import save_users" ] }, { diff --git a/notebooks/scenarios/bigquery/020-configure-api.ipynb b/notebooks/scenarios/bigquery/020-configure-api.ipynb index 83abef20ff7..1371dfaf5c7 100644 --- a/notebooks/scenarios/bigquery/020-configure-api.ipynb +++ b/notebooks/scenarios/bigquery/020-configure-api.ipynb @@ -28,22 +28,17 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# stdlib\n", "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", "from syft import test_settings\n", - "\n", - "# third party\n", - "from apis import make_schema\n", - "from apis import make_submit_query\n", - "from apis import make_test_query\n", + "from syft.util.test_helpers.apis import make_schema\n", + "from syft.util.test_helpers.apis import make_submit_query\n", + "from syft.util.test_helpers.apis import make_test_query\n", "\n", "# run email server\n", - "from email_helpers import get_email_server\n", - "# isort: on" + "from syft.util.test_helpers.email_helpers import get_email_server" ] }, { diff --git a/notebooks/scenarios/bigquery/021-create-jobs.ipynb b/notebooks/scenarios/bigquery/021-create-jobs.ipynb index 5a14895133a..392103a751c 100644 --- a/notebooks/scenarios/bigquery/021-create-jobs.ipynb +++ b/notebooks/scenarios/bigquery/021-create-jobs.ipynb @@ -33,19 +33,14 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# stdlib\n", "from collections import Counter\n", "import os\n", "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", "from syft.service.job.job_stash import JobStatus\n", - "\n", - "# third party\n", - "from email_helpers import get_email_server\n", - "# isort: on" + "from syft.util.test_helpers.email_helpers import get_email_server" ] }, { @@ -125,8 +120,8 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "from email_helpers import load_users" + "# syft absolute\n", + "from syft.util.test_helpers.email_helpers import load_users" ] }, { @@ -154,10 +149,10 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "from job_helpers import TestJob\n", - "from job_helpers import create_jobs\n", - "from job_helpers import extract_code_path" + "# syft absolute\n", + "from syft.util.test_helpers.job_helpers import TestJob\n", + "from syft.util.test_helpers.job_helpers import create_jobs\n", + "from syft.util.test_helpers.job_helpers import extract_code_path" ] }, { @@ -199,8 +194,8 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "from job_helpers import save_jobs" + "# syft absolute\n", + "from syft.util.test_helpers.job_helpers import save_jobs" ] }, { diff --git a/notebooks/scenarios/bigquery/040-do-review-requests.ipynb b/notebooks/scenarios/bigquery/040-do-review-requests.ipynb index aa4a7b0c2a1..8acc4e55274 100644 --- a/notebooks/scenarios/bigquery/040-do-review-requests.ipynb +++ b/notebooks/scenarios/bigquery/040-do-review-requests.ipynb @@ -18,21 +18,16 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# stdlib\n", "import random\n", "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", "from syft.service.job.job_stash import Job\n", - "\n", - "# third party\n", - "from email_helpers import get_email_server\n", - "from job_helpers import approve_by_running\n", - "from job_helpers import get_job_emails\n", - "from job_helpers import get_request_for_job_info\n", - "# isort: on" + "from syft.util.test_helpers.email_helpers import get_email_server\n", + "from syft.util.test_helpers.job_helpers import approve_by_running\n", + "from syft.util.test_helpers.job_helpers import get_job_emails\n", + "from syft.util.test_helpers.job_helpers import get_request_for_job_info" ] }, { @@ -100,10 +95,10 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "from email_helpers import load_users\n", - "from job_helpers import load_jobs\n", - "from job_helpers import save_jobs" + "# syft absolute\n", + "from syft.util.test_helpers.email_helpers import load_users\n", + "from syft.util.test_helpers.job_helpers import load_jobs\n", + "from syft.util.test_helpers.job_helpers import save_jobs" ] }, { diff --git a/notebooks/scenarios/bigquery/050-ds-get-results.ipynb b/notebooks/scenarios/bigquery/050-ds-get-results.ipynb index 9a9bc1ef588..35791771b2f 100644 --- a/notebooks/scenarios/bigquery/050-ds-get-results.ipynb +++ b/notebooks/scenarios/bigquery/050-ds-get-results.ipynb @@ -18,17 +18,12 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", - "\n", - "# third party\n", - "from email_helpers import get_email_server\n", - "from email_helpers import load_users\n", - "from job_helpers import load_jobs\n", - "from job_helpers import save_jobs\n", - "# isort: on" + "from syft.util.test_helpers.email_helpers import get_email_server\n", + "from syft.util.test_helpers.email_helpers import load_users\n", + "from syft.util.test_helpers.job_helpers import load_jobs\n", + "from syft.util.test_helpers.job_helpers import save_jobs" ] }, { diff --git a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb b/notebooks/scenarios/bigquery/sync/000-setup-high-low-datasites.ipynb similarity index 94% rename from notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb rename to notebooks/scenarios/bigquery/sync/000-setup-high-low-datasites.ipynb index 633a73c38e4..82e69b5b829 100644 --- a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb +++ b/notebooks/scenarios/bigquery/sync/000-setup-high-low-datasites.ipynb @@ -31,8 +31,8 @@ "source": [ "# stdlib\n", "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"auto\")\n", - "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", + "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"9081\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"9083\")\n", "print(environment, high_port, low_port)" ] }, @@ -42,15 +42,15 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", "from syft import test_settings\n", - "\n", - "from worker_helpers import build_and_launch_worker_pool_from_docker_str\n", - "from worker_helpers import launch_worker_pool_from_docker_tag_and_registry\n", - "# isort: on" + "from syft.util.test_helpers.worker_helpers import (\n", + " build_and_launch_worker_pool_from_docker_str,\n", + ")\n", + "from syft.util.test_helpers.worker_helpers import (\n", + " launch_worker_pool_from_docker_tag_and_registry,\n", + ")" ] }, { @@ -327,7 +327,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/001-scale-delete-worker-pools.ipynb b/notebooks/scenarios/bigquery/sync/001-scale-delete-worker-pools.ipynb new file mode 100644 index 00000000000..d2ca74cd56f --- /dev/null +++ b/notebooks/scenarios/bigquery/sync/001-scale-delete-worker-pools.ipynb @@ -0,0 +1,374 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "0", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import os\n", + "\n", + "# Testing works over 4 possibilities\n", + "# 1. (python/in-memory workers and using tox commands)\n", + "# 2. (python/in-memory workers and manually running notebooks)\n", + "# 3. (using k8s and using tox commands)\n", + "# 4. (using k8s and manually running notebooks)\n", + "# Uncomment the lines below if in the 4th possibility\n", + "\n", + "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + "# os.environ[\"DEV_MODE\"] = \"True\"\n", + "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_HIGH\"] = \"9081\"\n", + "# os.environ[\"CLUSTER_HTTP_PORT_LOW\"] = \"9083\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", + "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"9081\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"9083\")\n", + "print(environment, high_port, low_port)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import os\n", + "\n", + "# syft absolute\n", + "import syft as sy\n", + "from syft.util.test_helpers.email_helpers import Timeout\n", + "from syft.util.test_helpers.email_helpers import get_email_server" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "num_workers = int(os.environ.get(\"NUM_TEST_WORKERS\", 1))\n", + "\n", + "# ROOT_EMAIL = \"admin@bigquery.org\"\n", + "# ROOT_PASSWORD = \"bqpw\"\n", + "environment" + ] + }, + { + "cell_type": "markdown", + "id": "4", + "metadata": {}, + "source": [ + "### Launch server & login" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "server_low = sy.orchestra.launch(\n", + " name=\"bigquery-low\",\n", + " server_side_type=\"low\",\n", + " dev_mode=True,\n", + " n_consumers=1,\n", + " create_producer=True,\n", + " port=low_port,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "email_server, smtp_server = get_email_server(reset=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "low_client = server_low.login(email=\"info@openmined.org\", password=\"changethis\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "assert len(low_client.worker_pools.get_all()) == 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": {}, + "outputs": [], + "source": [ + "default_worker_pool = low_client.worker_pools.get_by_name(\"default-pool\")\n", + "default_worker_pool" + ] + }, + { + "cell_type": "markdown", + "id": "10", + "metadata": {}, + "source": [ + "### Scale Worker pool" + ] + }, + { + "cell_type": "markdown", + "id": "11", + "metadata": {}, + "source": [ + "##### Scale up" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12", + "metadata": {}, + "outputs": [], + "source": [ + "# Scale to 1\n", + "if environment == \"remote\":\n", + " low_client.api.worker_pool.scale(\n", + " number=num_workers, pool_name=default_worker_pool.name\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13", + "metadata": {}, + "outputs": [], + "source": [ + "low_client.api.services.worker_pool[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "14", + "metadata": {}, + "outputs": [], + "source": [ + "# Scale up workers\n", + "if environment == \"remote\":\n", + " scale_up_result = low_client.api.worker_pool.scale(\n", + " number=5, pool_name=default_worker_pool.name\n", + " )\n", + " if environment == \"remote\":\n", + " assert scale_up_result, scale_up_result\n", + "\n", + " assert (\n", + " low_client.api.services.worker_pool[default_worker_pool.name].max_count == 5\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "15", + "metadata": {}, + "source": [ + "##### Scale down" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16", + "metadata": {}, + "outputs": [], + "source": [ + "# Scale down workers, this gracefully shutdowns the consumers\n", + "if environment == \"remote\":\n", + " scale_down_result = low_client.api.worker_pool.scale(\n", + " number=num_workers, pool_name=default_worker_pool.name\n", + " )\n", + " assert scale_down_result, scale_down_result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17", + "metadata": {}, + "outputs": [], + "source": [ + "if environment == \"remote\":\n", + "\n", + " def has_worker_scaled_down():\n", + " return (\n", + " low_client.api.worker_pool[default_worker_pool.name].max_count\n", + " == num_workers\n", + " )\n", + "\n", + " worker_scale_timeout = Timeout(timeout_duration=20)\n", + " worker_scale_timeout.run_with_timeout(has_worker_scaled_down)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "18", + "metadata": {}, + "outputs": [], + "source": [ + "if environment == \"remote\":\n", + " assert (\n", + " low_client.api.services.worker_pool[default_worker_pool.name].max_count\n", + " == num_workers\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": [ + "#### Delete Worker Pool" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "pool_delete_result = low_client.api.services.worker_pool.delete(\n", + " pool_name=default_worker_pool.name\n", + ")\n", + "pool_delete_result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21", + "metadata": {}, + "outputs": [], + "source": [ + "with sy.raises(KeyError):\n", + " _ = low_client.api.services.worker_pool[default_worker_pool.name]" + ] + }, + { + "cell_type": "markdown", + "id": "22", + "metadata": {}, + "source": [ + "#### Re-launch the default worker pool" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23", + "metadata": {}, + "outputs": [], + "source": [ + "default_worker_image = default_worker_pool.image" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24", + "metadata": {}, + "outputs": [], + "source": [ + "launch_result = low_client.api.services.worker_pool.launch(\n", + " pool_name=default_worker_pool.name,\n", + " image_uid=default_worker_image.id,\n", + " num_workers=num_workers,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25", + "metadata": {}, + "outputs": [], + "source": [ + "assert low_client.api.services.worker_pool[default_worker_pool.name]\n", + "assert (\n", + " low_client.api.services.worker_pool[default_worker_pool.name].max_count\n", + " == num_workers\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26", + "metadata": {}, + "outputs": [], + "source": [ + "smtp_server.stop()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27", + "metadata": {}, + "outputs": [], + "source": [ + "if environment != \"remote\":\n", + " server_low.land()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "syft", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb b/notebooks/scenarios/bigquery/sync/020-configure-api-and-sync.ipynb similarity index 97% rename from notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb rename to notebooks/scenarios/bigquery/sync/020-configure-api-and-sync.ipynb index 094841ef58e..2f9658634e3 100644 --- a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb +++ b/notebooks/scenarios/bigquery/sync/020-configure-api-and-sync.ipynb @@ -25,8 +25,8 @@ "# stdlib\n", "\n", "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"auto\")\n", - "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", + "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"9081\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"9083\")\n", "print(environment, high_port, low_port)" ] }, @@ -36,24 +36,21 @@ "metadata": {}, "outputs": [], "source": [ - "# isort: off\n", "# stdlib\n", "\n", - "# syft absolute\n", - "import syft as sy\n", - "from syft import test_helpers # noqa: F401\n", - "from syft import test_settings\n", - "from syft.client.syncing import compare_clients\n", - "\n", "# set to use the live APIs\n", "# import os\n", "# os.environ[\"TEST_BIGQUERY_APIS_LIVE\"] = \"True\"\n", "# third party\n", - "from apis import make_schema\n", - "from apis import make_submit_query\n", - "from apis import make_test_query\n", "import pandas as pd\n", - "# isort: on" + "\n", + "# syft absolute\n", + "import syft as sy\n", + "from syft import test_settings\n", + "from syft.client.syncing import compare_clients\n", + "from syft.util.test_helpers.apis import make_schema\n", + "from syft.util.test_helpers.apis import make_submit_query\n", + "from syft.util.test_helpers.apis import make_test_query" ] }, { @@ -617,7 +614,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb b/notebooks/scenarios/bigquery/sync/030-ds-submit-request.ipynb similarity index 98% rename from notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb rename to notebooks/scenarios/bigquery/sync/030-ds-submit-request.ipynb index a2759038134..21e8437f283 100644 --- a/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb +++ b/notebooks/scenarios/bigquery/sync/030-ds-submit-request.ipynb @@ -24,7 +24,7 @@ "# stdlib\n", "\n", "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"9083\")\n", "print(environment, low_port)" ] }, @@ -319,7 +319,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb b/notebooks/scenarios/bigquery/sync/040-do-review-requests.ipynb similarity index 97% rename from notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb rename to notebooks/scenarios/bigquery/sync/040-do-review-requests.ipynb index 4eec3d6e7b1..07b32abbc34 100644 --- a/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb +++ b/notebooks/scenarios/bigquery/sync/040-do-review-requests.ipynb @@ -28,8 +28,8 @@ "outputs": [], "source": [ "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"auto\")\n", - "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")" + "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"9081\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"9083\")" ] }, { @@ -300,7 +300,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb b/notebooks/scenarios/bigquery/sync/050-ds-get-results.ipynb similarity index 97% rename from notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb rename to notebooks/scenarios/bigquery/sync/050-ds-get-results.ipynb index 1e61e0d8587..531f0c8f410 100644 --- a/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb +++ b/notebooks/scenarios/bigquery/sync/050-ds-get-results.ipynb @@ -26,7 +26,7 @@ "outputs": [], "source": [ "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", - "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", + "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"9083\")\n", "print(environment, low_port)" ] }, @@ -141,7 +141,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index fb0fdfa69b1..2534f22077e 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -154,14 +154,6 @@ def _test_settings() -> Any: return test_settings() -@module_property -def _test_helpers() -> None: - # relative - from .util.util import add_helper_path_to_python_path - - add_helper_path_to_python_path() - - @module_property def hello_baby() -> None: print("Hello baby!") diff --git a/packages/syft/src/syft/util/test_helpers/__init__.py b/packages/syft/src/syft/util/test_helpers/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_helpers/apis/__init__.py b/packages/syft/src/syft/util/test_helpers/apis/__init__.py similarity index 90% rename from test_helpers/apis/__init__.py rename to packages/syft/src/syft/util/test_helpers/apis/__init__.py index 7231b580696..e8221857fba 100644 --- a/test_helpers/apis/__init__.py +++ b/packages/syft/src/syft/util/test_helpers/apis/__init__.py @@ -1,10 +1,8 @@ # stdlib import os -# syft absolute -from syft.util.util import str_to_bool - # relative +from ...util import str_to_bool from .submit_query import make_submit_query env_var = "TEST_BIGQUERY_APIS_LIVE" diff --git a/packages/syft/src/syft/util/test_helpers/apis/live/__init__.py b/packages/syft/src/syft/util/test_helpers/apis/live/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_helpers/apis/live/schema.py b/packages/syft/src/syft/util/test_helpers/apis/live/schema.py similarity index 99% rename from test_helpers/apis/live/schema.py rename to packages/syft/src/syft/util/test_helpers/apis/live/schema.py index 5b39d9d9066..7a63ab467d1 100644 --- a/test_helpers/apis/live/schema.py +++ b/packages/syft/src/syft/util/test_helpers/apis/live/schema.py @@ -3,9 +3,9 @@ # syft absolute import syft as sy -from syft import test_settings # relative +from ..... import test_settings from ..rate_limiter import is_within_rate_limit diff --git a/test_helpers/apis/live/test_query.py b/packages/syft/src/syft/util/test_helpers/apis/live/test_query.py similarity index 99% rename from test_helpers/apis/live/test_query.py rename to packages/syft/src/syft/util/test_helpers/apis/live/test_query.py index 344879dcb62..cca61eae533 100644 --- a/test_helpers/apis/live/test_query.py +++ b/packages/syft/src/syft/util/test_helpers/apis/live/test_query.py @@ -3,9 +3,9 @@ # syft absolute import syft as sy -from syft import test_settings # relative +from ..... import test_settings from ..rate_limiter import is_within_rate_limit diff --git a/packages/syft/src/syft/util/test_helpers/apis/mock/__init__.py b/packages/syft/src/syft/util/test_helpers/apis/mock/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_helpers/apis/mock/data.py b/packages/syft/src/syft/util/test_helpers/apis/mock/data.py similarity index 100% rename from test_helpers/apis/mock/data.py rename to packages/syft/src/syft/util/test_helpers/apis/mock/data.py diff --git a/test_helpers/apis/mock/schema.py b/packages/syft/src/syft/util/test_helpers/apis/mock/schema.py similarity index 100% rename from test_helpers/apis/mock/schema.py rename to packages/syft/src/syft/util/test_helpers/apis/mock/schema.py diff --git a/test_helpers/apis/mock/test_query.py b/packages/syft/src/syft/util/test_helpers/apis/mock/test_query.py similarity index 100% rename from test_helpers/apis/mock/test_query.py rename to packages/syft/src/syft/util/test_helpers/apis/mock/test_query.py diff --git a/test_helpers/apis/rate_limiter.py b/packages/syft/src/syft/util/test_helpers/apis/rate_limiter.py similarity index 100% rename from test_helpers/apis/rate_limiter.py rename to packages/syft/src/syft/util/test_helpers/apis/rate_limiter.py diff --git a/test_helpers/apis/submit_query.py b/packages/syft/src/syft/util/test_helpers/apis/submit_query.py similarity index 100% rename from test_helpers/apis/submit_query.py rename to packages/syft/src/syft/util/test_helpers/apis/submit_query.py diff --git a/test_helpers/email_helpers.py b/packages/syft/src/syft/util/test_helpers/email_helpers.py similarity index 98% rename from test_helpers/email_helpers.py rename to packages/syft/src/syft/util/test_helpers/email_helpers.py index f58d41a20f8..ddfee82fef3 100644 --- a/test_helpers/email_helpers.py +++ b/packages/syft/src/syft/util/test_helpers/email_helpers.py @@ -11,8 +11,8 @@ from aiosmtpd.controller import Controller from faker import Faker -# syft absolute -from syft.service.user.user_roles import ServiceRole +# relative +from ...service.user.user_roles import ServiceRole fake = Faker() @@ -162,7 +162,7 @@ def get_token(self) -> str: try: token = get_token(email) break - except Exception: + except Exception: # nosec pass self.reset_token = token return token @@ -220,7 +220,7 @@ def user_exists(root_client, email: str) -> bool: class SMTPTestServer: def __init__(self, email_server): self.port = 9025 - self.hostname = "0.0.0.0" + self.hostname = "0.0.0.0" # nosec: B104 self._stop_event = asyncio.Event() # Simple email handler class diff --git a/test_helpers/job_helpers.py b/packages/syft/src/syft/util/test_helpers/job_helpers.py similarity index 94% rename from test_helpers/job_helpers.py rename to packages/syft/src/syft/util/test_helpers/job_helpers.py index 78494d381e7..bac08bad5d6 100644 --- a/test_helpers/job_helpers.py +++ b/packages/syft/src/syft/util/test_helpers/job_helpers.py @@ -10,13 +10,11 @@ import textwrap from typing import Any -# third party -from email_helpers import TestUser +# relative +from ... import test_settings +from .email_helpers import TestUser -# syft absolute -from syft import test_settings - -from syft.client.client import SyftClient # noqa +from ...client.client import SyftClient # noqa dataset_1 = test_settings.get("dataset_1", default="dataset_1") dataset_2 = test_settings.get("dataset_2", default="dataset_2") @@ -87,7 +85,7 @@ def make_query(settings: dict) -> str: SELECT {settings['groupby_col']}, AVG({settings['score_col']}) AS average_score FROM {settings['dataset']}.{settings['table']} GROUP BY {settings['groupby_col']} - LIMIT {settings['limit']}""".strip() + LIMIT {settings['limit']}""".strip() # nosec: B608 return textwrap.dedent(query) @@ -96,14 +94,14 @@ def create_simple_query_job(user: TestUser) -> TestJob: job_type = "simple_query" func_name = f"{job_type}_{secrets.token_hex(3)}" - dataset = random.choice([dataset_1, dataset_2]) - table, groupby_col, score_col = random.choice( + dataset = random.choice([dataset_1, dataset_2]) # nosec: B311 + table, groupby_col, score_col = random.choice( # nosec: B311 [ (table_1, table_1_col_id, table_1_col_score), (table_2, table_2_col_id, table_2_col_score), ] ) - limit = random.randint(1, 1_000_000) + limit = random.randint(1, 1_000_000) # nosec: B311 settings = { "dataset": dataset, @@ -133,7 +131,7 @@ def create_wrong_asset_query(user: TestUser) -> TestJob: valid_job = create_simple_query_job(user) settings = valid_job.settings - corrupted_asset = random.choice(["dataset", "table"]) + corrupted_asset = random.choice(["dataset", "table"]) # nosec: B311 settings[corrupted_asset] = "wrong_asset" query = make_query(settings) @@ -240,7 +238,7 @@ def create_job_many_columns(user: TestUser) -> TestJob: job.job_type = job_type job.func_name = func_name settings = job.settings - job.settings["num_extra_cols"] = random.randint(100, 1000) + job.settings["num_extra_cols"] = random.randint(100, 1000) # nosec: B311 new_columns_string = ", ".join( f"{settings['score_col']} as col_{i}" for i in range(settings["num_extra_cols"]) @@ -250,13 +248,13 @@ def create_job_many_columns(user: TestUser) -> TestJob: SELECT {settings['groupby_col']}, AVG({settings['score_col']}) AS average_score, {new_columns_string} FROM {settings['dataset']}.{settings['table']} GROUP BY {settings['groupby_col']} - LIMIT {settings['limit']}""".strip() + LIMIT {settings['limit']}""".strip() # nosec: B608 return job def create_random_job(user: TestUser) -> TestJob: - job_func = random.choice(create_job_functions) + job_func = random.choice(create_job_functions) # nosec: B311 return job_func(user) diff --git a/test_helpers/sync_helpers.py b/packages/syft/src/syft/util/test_helpers/sync_helpers.py similarity index 92% rename from test_helpers/sync_helpers.py rename to packages/syft/src/syft/util/test_helpers/sync_helpers.py index e1d558016ba..7252b896ea2 100644 --- a/test_helpers/sync_helpers.py +++ b/packages/syft/src/syft/util/test_helpers/sync_helpers.py @@ -3,15 +3,17 @@ # syft absolute import syft as sy -from syft.client.datasite_client import DatasiteClient -from syft.client.syncing import compare_clients -from syft.service.code.user_code import UserCode -from syft.service.job.job_stash import Job -from syft.service.job.job_stash import JobStatus -from syft.service.request.request import Request -from syft.service.request.request import RequestStatus -from syft.service.sync.diff_state import ObjectDiffBatch -from syft.types.result import Err + +# relative +from ...client.datasite_client import DatasiteClient +from ...client.syncing import compare_clients +from ...service.code.user_code import UserCode +from ...service.job.job_stash import Job +from ...service.job.job_stash import JobStatus +from ...service.request.request import Request +from ...service.request.request import RequestStatus +from ...service.sync.diff_state import ObjectDiffBatch +from ...types.result import Err def deny_requests_without_autosync_tag(client_low: DatasiteClient): diff --git a/packages/syft/src/syft/util/test_helpers/worker_helpers.py b/packages/syft/src/syft/util/test_helpers/worker_helpers.py new file mode 100644 index 00000000000..3c2667fecc8 --- /dev/null +++ b/packages/syft/src/syft/util/test_helpers/worker_helpers.py @@ -0,0 +1,86 @@ +# syft absolute +import syft as sy + + +def build_and_launch_worker_pool_from_docker_str( + environment: str, + client: sy.DatasiteClient, + worker_pool_name: str, + custom_pool_pod_annotations: dict, + custom_pool_pod_labels: dict, + worker_dockerfile: str, + external_registry: str, + docker_tag: str, + scale_to: int, +): + result = client.api.services.image_registry.add(external_registry) + assert "success" in result.message # nosec: B101 + + # For some reason, when using k9s, result.value is empty so can't use the below line + # local_registry = result.value + local_registry = client.api.services.image_registry[0] + + docker_config = sy.DockerWorkerConfig(dockerfile=worker_dockerfile) + assert docker_config.dockerfile == worker_dockerfile # nosec: B101 + submit_result = client.api.services.worker_image.submit(worker_config=docker_config) + print(submit_result.message) + assert "success" in submit_result.message # nosec: B101 + + worker_image = submit_result.value + + if environment == "remote": + docker_build_result = client.api.services.worker_image.build( + image_uid=worker_image.id, + tag=docker_tag, + registry_uid=local_registry.id, + ) + print(docker_build_result) + + if environment == "remote": + push_result = client.api.services.worker_image.push(worker_image.id) + print(push_result) + + result = client.api.services.worker_pool.launch( + pool_name=worker_pool_name, + image_uid=worker_image.id, + num_workers=1, + pod_annotations=custom_pool_pod_annotations, + pod_labels=custom_pool_pod_labels, + ) + print(result) + # assert 'success' in str(result.message) + + if environment == "remote": + result = client.worker_pools.scale(number=scale_to, pool_name=worker_pool_name) + print(result) + + +def launch_worker_pool_from_docker_tag_and_registry( + environment: str, + client: sy.DatasiteClient, + worker_pool_name: str, + custom_pool_pod_annotations: dict, + custom_pool_pod_labels: dict, + docker_tag: str, + external_registry: str, + scale_to: int = 1, +): + res = client.api.services.image_registry.add(external_registry) + assert "success" in res.message # nosec: B101 + docker_config = sy.PrebuiltWorkerConfig(tag=docker_tag) + image_result = client.api.services.worker_image.submit(worker_config=docker_config) + assert "success" in res.message # nosec: B101 + worker_image = image_result.value + + launch_result = client.api.services.worker_pool.launch( + pool_name=worker_pool_name, + image_uid=worker_image.id, + num_workers=1, + pod_annotations=custom_pool_pod_annotations, + pod_labels=custom_pool_pod_labels, + ) + if environment == "remote" and scale_to > 1: + result = client.worker_pools.scale(number=scale_to, pool_name=worker_pool_name) + print(result) + + return launch_result diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 83efaa196e7..fa20c3fc2c2 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -1143,21 +1143,6 @@ def test_settings() -> Any: return test_settings -def add_helper_path_to_python_path() -> None: - current_path = "." - - # jupyter uses "." which resolves to the notebook - if not is_interpreter_jupyter(): - # python uses the file which has from syft import test_settings in it - import_path = get_caller_file_path() - if import_path: - current_path = import_path - - base_dir = find_base_dir_with_tox_ini(current_path) - notebook_helper_path = os.path.join(base_dir, "test_helpers") - sys.path.append(notebook_helper_path) - - class CustomRepr(reprlib.Repr): def repr_str(self, obj: Any, level: int = 0) -> str: if len(obj) <= self.maxstring: diff --git a/tox.ini b/tox.ini index 15353917ac2..6a54b437098 100644 --- a/tox.ini +++ b/tox.ini @@ -380,6 +380,7 @@ deps = nbmake db-dtypes google-cloud-bigquery + aiosmtpd changedir = {toxinidir}/notebooks allowlist_externals = bash @@ -494,6 +495,7 @@ deps = nbmake db-dtypes google-cloud-bigquery + aiosmtpd changedir = {toxinidir}/notebooks allowlist_externals = bash