From 95d2f08a2a582c7e4fc2a8bae56f7641c547eb60 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Thu, 12 Sep 2024 16:32:37 +0200 Subject: [PATCH 1/7] refactor syncing notebooks --- .../sync/01-setup-high-low-datasites.ipynb | 19303 +++++++++++++++- .../sync/02-configure-api-and-sync.ipynb | 16025 ++++++++++++- .../bigquery/sync/03-ds-submit-request.ipynb | 1243 +- .../bigquery/sync/04-do-review-requests.ipynb | 2 +- .../bigquery/sync/05-ds-get-results.ipynb | 2 +- 5 files changed, 36240 insertions(+), 335 deletions(-) diff --git a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb b/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb index 4719ec98261..4d641ccb8f0 100644 --- a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb +++ b/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb @@ -2,28 +2,37 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "# stdlib\n", "import os\n", "\n", - "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", - "# os.environ[\"DEV_MODE\"] = \"True\"\n", - "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"\n", - "# os.environ[\"CLUSTER_HTTP_PORT_HIGH\"] = \"9081\"\n", - "# os.environ[\"CLUSTER_HTTP_PORT_LOW\"] = \"9083\"" + "use_k8s_dev = False\n", + "if use_k8s_dev:\n", + " os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", + " os.environ[\"DEV_MODE\"] = \"True\"\n", + " os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"\n", + " os.environ[\"CLUSTER_HTTP_PORT_HIGH\"] = \"9081\"\n", + " os.environ[\"CLUSTER_HTTP_PORT_LOW\"] = \"9083\"" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "python auto auto\n" + ] + } + ], "source": [ "# stdlib\n", - "\n", "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", "high_port = os.environ.get(\"CLUSTER_HTTP_PORT_HIGH\", \"auto\")\n", "low_port = os.environ.get(\"CLUSTER_HTTP_PORT_LOW\", \"auto\")\n", @@ -32,7 +41,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -41,11 +50,1785 @@ "from syft import test_settings" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Launch server & login" + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Autoreload enabled\n", + "Starting bigquery-low server on 0.0.0.0:61875\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", + "INFO: Uvicorn running on http://0.0.0.0:61875 (Press CTRL+C to quit)\n", + "INFO: Started reloader process [13204] using WatchFiles\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found `reset=True` in the launch configuration. Resetting the server...\n", + "Waiting for server to start" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Started server process [13209]\n", + "INFO: Waiting for application startup.\n", + "INFO: Application startup complete.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Done.\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftInfo:
You have launched a development server at http://0.0.0.0:61875.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:61875.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n", + "Autoreload enabled\n", + "Starting bigquery-high server on 0.0.0.0:61888\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", + "INFO: Uvicorn running on http://0.0.0.0:61888 (Press CTRL+C to quit)\n", + "INFO: Started reloader process [13214] using WatchFiles\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found `reset=True` in the launch configuration. Resetting the server...\n", + "Waiting for server to start." + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Started server process [13219]\n", + "INFO: Waiting for application startup.\n", + "INFO: Application startup complete.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Done.\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftInfo:
You have launched a development server at http://0.0.0.0:61888.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:61888.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "server_low = sy.orchestra.launch(\n", " name=\"bigquery-low\",\n", @@ -70,25 +1853,1732 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [], + "source": [ + "# start email server here" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "low_client = server_low.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "high_client = server_high.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -103,11 +3593,37 @@ "# Setup High First" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "- If you want to use the k8s registry, we submit and build an image, and we scale a worker pool with that image\n", + "- If you want to use the k8s registry, do ?????" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "- helper for launching worker pools" + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "external_registry = test_settings.get(\"external_registry\", default=\"docker.io\")\n", "external_registry" @@ -115,9 +3631,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Image Registry ID: 0e55741d145444d487d8861fdf85dea5 created successfully

" + ], + "text/plain": [ + "SyftSuccess: Image Registry ID: 0e55741d145444d487d8861fdf85dea5 created successfully" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result = high_client.api.services.image_registry.add(external_registry)\n", "result" @@ -125,9 +4484,2618 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

SyftImageRegistry List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "[SyftImageRegistry(url=us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us)]" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "image_registry_list = high_client.api.services.image_registry.get_all()\n", "image_registry_list" @@ -135,9 +7103,28 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class SyftImageRegistry:\n", + " id: str = 0e55741d145444d487d8861fdf85dea5\n", + " url: str = \"us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us\"\n", + "\n", + "```" + ], + "text/plain": [ + "SyftImageRegistry(url=us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us)" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "local_registry = image_registry_list[0]\n", "local_registry" @@ -145,9 +7132,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class SyftWorkerImage:\n", + " id: str = 588c466cd5c34fe9a418f5197bde5458\n", + " image_identifier: str = docker.io/openmined/syft-backend:local-dev\n", + " image_hash: str = None\n", + " created_at: str = 2024-09-12 14:08:13\n", + " built_at: str = None\n", + " config: str = prebuilt tag='openmined/syft-backend:local-dev' description='Prebuilt default worker image'\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.worker.worker_image.SyftWorkerImage" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "base_worker_image = high_client.images.get_all()[0]\n", "base_worker_image" @@ -155,9 +7165,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'FROM docker.io/openmined/syft-backend:local-dev\\n\\nRUN uv pip install db-dtypes google-cloud-bigquery'" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "worker_dockerfile = f\"\"\"\n", "FROM {str(base_worker_image.image_identifier)}\n", @@ -170,7 +7191,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -180,9 +7201,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Dockerfile ID: 60ffba80397f4c29a238eee407450b76 successfully submitted.

" + ], + "text/plain": [ + "SyftSuccess: Dockerfile ID: 60ffba80397f4c29a238eee407450b76 successfully submitted." + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "submit_result = high_client.api.services.worker_image.submit(\n", " worker_config=docker_config\n", @@ -192,9 +8056,34 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class SyftWorkerImage:\n", + " id: str = 60ffba80397f4c29a238eee407450b76\n", + " image_identifier: str = None\n", + " image_hash: str = None\n", + " created_at: str = 2024-09-12 14:08:13\n", + " built_at: str = None\n", + " config: str = FROM docker.io/openmined/syft-backend:local-dev\n", + "\n", + "RUN uv pip install db-dtypes google-cloud-bigquery\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.worker.worker_image.SyftWorkerImage" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# get non prebuilt\n", "dockerfile_list = high_client.images.get_all()\n", @@ -211,9 +8100,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'docker.io/openmined/syft-worker-bigquery:local-dev'" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "docker_tag = str(base_worker_image.image_identifier).replace(\n", " \"backend\", \"worker-bigquery\"\n", @@ -223,7 +8123,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ @@ -238,7 +8138,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, "outputs": [], "source": [ @@ -249,9 +8149,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "PrebuiltWorkerConfig(tag='docker.io/openmined/syft-worker-bigquery:local-dev', description=None)" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "docker_config = sy.PrebuiltWorkerConfig(tag=docker_tag)\n", "docker_config" @@ -259,9 +8170,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Dockerfile ID: 11326a215aae4a949cd6032e99b6593c successfully submitted.

" + ], + "text/plain": [ + "SyftSuccess: Dockerfile ID: 11326a215aae4a949cd6032e99b6593c successfully submitted." + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result = high_client.api.services.worker_image.submit(worker_config=docker_config)\n", "worker_image_id = result.value.id\n", @@ -270,7 +9024,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, "outputs": [], "source": [ @@ -280,9 +9034,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class SyftWorkerImage:\n", + " id: str = 11326a215aae4a949cd6032e99b6593c\n", + " image_identifier: str = docker.io/openmined/syft-worker-bigquery:local-dev\n", + " image_hash: str = None\n", + " created_at: str = 2024-09-12 14:08:13\n", + " built_at: str = None\n", + " config: str = prebuilt tag='docker.io/openmined/syft-worker-bigquery:local-dev'\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.worker.worker_image.SyftWorkerImage" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# get prebuilt\n", "# dockerfile_list = high_client.images.get_all()\n", @@ -308,7 +9085,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": {}, "outputs": [], "source": [ @@ -317,7 +9094,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": {}, "outputs": [], "source": [ @@ -328,9 +9105,2618 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

ContainerSpawnStatus List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "[ContainerSpawnStatus(worker_name='bigquery-pool-1', worker=syft.service.worker.worker_pool.SyftWorker, error=None)]" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result = high_client.api.services.worker_pool.launch(\n", " pool_name=worker_pool_name,\n", @@ -344,7 +11730,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ @@ -355,7 +11741,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 30, "metadata": {}, "outputs": [], "source": [ @@ -364,7 +11750,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 31, "metadata": {}, "outputs": [], "source": [ @@ -373,16 +11759,859 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 32, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
No batches to resolve

" + ], + "text/plain": [ + "SyftSuccess: No batches to resolve" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "widget" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 33, "metadata": {}, "outputs": [], "source": [ @@ -391,9 +12620,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 34, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Registration feature successfully disabled

" + ], + "text/plain": [ + "SyftSuccess: Registration feature successfully disabled" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "high_client.settings.allow_guest_signup(enable=False)" ] @@ -407,9 +13479,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 35, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Image Registry ID: 946570589ea140b298bdd783a830e497 created successfully

" + ], + "text/plain": [ + "SyftSuccess: Image Registry ID: 946570589ea140b298bdd783a830e497 created successfully" + ] + }, + "execution_count": 35, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result = low_client.api.services.image_registry.add(external_registry)\n", "result" @@ -417,9 +14332,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 36, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "PrebuiltWorkerConfig(tag='docker.io/openmined/syft-worker-bigquery:local-dev', description=None)" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "docker_config = sy.PrebuiltWorkerConfig(tag=docker_tag)\n", "docker_config" @@ -427,9 +14353,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 37, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Dockerfile ID: 1663b0c6c28e43a5ac0efa2c8311a83d successfully submitted.

" + ], + "text/plain": [ + "SyftSuccess: Dockerfile ID: 1663b0c6c28e43a5ac0efa2c8311a83d successfully submitted." + ] + }, + "execution_count": 37, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result = low_client.api.services.worker_image.submit(worker_config=docker_config)\n", "result" @@ -437,9 +15206,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 38, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class SyftWorkerImage:\n", + " id: str = 1663b0c6c28e43a5ac0efa2c8311a83d\n", + " image_identifier: str = docker.io/openmined/syft-worker-bigquery:local-dev\n", + " image_hash: str = None\n", + " created_at: str = 2024-09-12 14:07:58\n", + " built_at: str = None\n", + " config: str = prebuilt tag='docker.io/openmined/syft-worker-bigquery:local-dev'\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.worker.worker_image.SyftWorkerImage" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# get prebuilt\n", "dockerfile_list = low_client.images.get_all()\n", @@ -461,9 +15253,2618 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 39, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

ContainerSpawnStatus List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "[ContainerSpawnStatus(worker_name='bigquery-pool-1', worker=syft.service.worker.worker_pool.SyftWorker, error=None)]" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "result = low_client.api.services.worker_pool.launch(\n", " pool_name=worker_pool_name,\n", @@ -477,7 +17878,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 40, "metadata": {}, "outputs": [], "source": [ @@ -486,9 +17887,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 41, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
User 'John Doe' successfully registered! To see users, run `[your_client].users`

" + ], + "text/plain": [ + "SyftSuccess: User 'John Doe' successfully registered! To see users, run `[your_client].users`" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low_client.register(\n", " email=\"data_scientist@openmined.org\",\n", @@ -500,43 +18744,926 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 44, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Registration feature successfully disabled

" + ], + "text/plain": [ + "SyftSuccess: Registration feature successfully disabled" + ] + }, + "execution_count": 44, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "# widget = sy.sync(from_client=low_client, to_client=high_client, hide_usercode=False)" + "low_client.settings.allow_guest_signup(enable=False)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 45, "metadata": {}, "outputs": [], "source": [ - "# widget" + "assert (\n", + " len(low_client.api.services.user.get_all()) == 2\n", + "), \"Only DS and Admin should be at low side\"\n", + "assert (\n", + " len(high_client.api.services.user.get_all()) == 1\n", + "), \"Only Admin should be at high side\"" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 46, "metadata": {}, "outputs": [], "source": [ - "low_client.settings.allow_guest_signup(enable=False)" + "#TODO: close email client" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 47, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Stopping bigquery-high\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Shutting down\n", + "INFO: Waiting for application shutdown.\n", + "INFO: Application shutdown complete.\n", + "INFO: Finished server process [13219]\n", + "INFO: Stopping reloader process [13214]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "killed\n", + "Stopping bigquery-low\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Shutting down\n", + "INFO: Waiting for application shutdown.\n", + "INFO: Application shutdown complete.\n", + "INFO: Finished server process [13209]\n", + "INFO: Stopping reloader process [13204]\n" + ] + } + ], "source": [ - "assert (\n", - " len(low_client.api.services.user.get_all()) == 2\n", - "), \"Only DS and Admin should be at low side\"\n", - "assert (\n", - " len(high_client.api.services.user.get_all()) == 1\n", - "), \"Only Admin should be at high side\"" + "if environment != \"remote\":\n", + " server_high.land()\n", + " server_low.land()" ] }, { @@ -544,11 +19671,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "if environment != \"remote\":\n", - " server_high.land()\n", - " server_low.land()" - ] + "source": [] } ], "metadata": { @@ -567,7 +19690,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.5" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb index 576b74e526a..44610b168c5 100644 --- a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb +++ b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb @@ -2,13 +2,13 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "# stdlib\n", "import os\n", - "\n", + "# TODO: if\n", "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", "# os.environ[\"DEV_MODE\"] = \"True\"\n", "# os.environ[\"TEST_EXTERNAL_REGISTRY\"] = \"k3d-registry.localhost:5800\"\n", @@ -18,9 +18,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "python auto auto\n" + ] + } + ], "source": [ "# stdlib\n", "\n", @@ -30,11 +38,26 @@ "print(environment, high_port, low_port)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Launch server and login" + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Using Mock API Code, this will query BigQuery. $TEST_BIGQUERY_APIS_LIVE==False\n" + ] + } + ], "source": [ "# stdlib\n", "import os\n", @@ -60,7 +83,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -71,9 +94,1774 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Autoreload enabled\n", + "Starting bigquery-low server on 0.0.0.0:62045\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", + "INFO: Uvicorn running on http://0.0.0.0:62045 (Press CTRL+C to quit)\n", + "INFO: Started reloader process [13235] using WatchFiles\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Waiting for server to start" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Started server process [13236]\n", + "INFO: Waiting for application startup.\n", + "INFO: Application startup complete.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Done.\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftInfo:
You have launched a development server at http://0.0.0.0:62045.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:62045.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n", + "Autoreload enabled\n", + "Starting bigquery-high server on 0.0.0.0:62058\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", + "INFO: Uvicorn running on http://0.0.0.0:62058 (Press CTRL+C to quit)\n", + "INFO: Started reloader process [13239] using WatchFiles\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Waiting for server to start" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Started server process [13243]\n", + "INFO: Waiting for application startup.\n", + "INFO: Application startup complete.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Done.\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftInfo:
You have launched a development server at http://0.0.0.0:62058.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:62058.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "server_low = sy.orchestra.launch(\n", " name=\"bigquery-low\",\n", @@ -96,9 +1884,1706 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "low_client = server_low.login(email=\"info@openmined.org\", password=\"changethis\")\n", "high_client = server_high.login(email=\"info@openmined.org\", password=\"changethis\")" @@ -106,7 +3591,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -116,7 +3601,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, "outputs": [], "source": [ @@ -125,7 +3610,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, "outputs": [], "source": [ @@ -134,13 +3619,34 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "# !pip install db-dtypes google-cloud-bigquery" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# load schema data" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "dataset_1 = test_settings.get(\"dataset_1\", default=\"dataset_1\")\n", + "dataset_2 = test_settings.get(\"dataset_2\", default=\"dataset_2\")\n", + "table_1 = test_settings.get(\"table_1\", default=\"table_1\")\n", + "table_2 = test_settings.get(\"table_2\", default=\"table_2\")\n", + "table_2_col_id = test_settings.get(\"table_2_col_id\", default=\"table_id\")\n", + "table_2_col_score = test_settings.get(\"table_2_col_score\", default=\"colname\")" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -150,7 +3656,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ @@ -164,7 +3670,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ @@ -177,9 +3683,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Endpoint successfully created.

" + ], + "text/plain": [ + "SyftSuccess: Endpoint successfully created." + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "new_endpoint = sy.TwinAPIEndpoint(\n", " path=\"bigquery.test_query\",\n", @@ -194,9 +4543,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Endpoint successfully updated.

" + ], + "text/plain": [ + "SyftSuccess: Endpoint successfully updated." + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Here, we update the endpoint to timeout after 100s (rather the default of 60s)\n", "high_client.api.services.api.update(\n", @@ -206,9 +5398,852 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Endpoint successfully updated.

" + ], + "text/plain": [ + "SyftSuccess: Endpoint successfully updated." + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "high_client.api.services.api.update(\n", " endpoint_path=\"bigquery.test_query\", hide_mock_definition=True\n", @@ -217,7 +6252,3234 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARN: private key is based on server name: bigquery-high in dev_mode. Don't run this in production.\n", + "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/fbdf5a287e58454cbbd3fac4ad744d37/db/fbdf5a287e58454cbbd3fac4ad744d37.sqlite\n", + "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/fbdf5a287e58454cbbd3fac4ad744d37/db/fbdf5a287e58454cbbd3fac4ad744d37.sqlite\n", + "Adding producer for queue: api_call on: tcp://localhost:62069\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
int64_field_0idnamesubscribers_countpermalinknsfwspam
04t5_via1x/channel/mylittlepony4323081/channel//channel/mylittleponyNaNFalse
15t5_cv9gn/channel/polyamory2425929/channel//channel/polyamoryNaNFalse
210t5_8p2tq/channel/Catholicism4062607/channel//channel/CatholicismNaNFalse
316t5_8fcro/channel/cordcutters7543226/channel//channel/cordcuttersNaNFalse
417t5_td5of/channel/stevenuniverse2692168/channel//channel/stevenuniverseNaNFalse
523t5_z01fv/channel/entitledbitch2709080/channel//channel/entitledbitchNaNFalse
624t5_hmqjk/channel/engineering8766144/channel//channel/engineeringNaNFalse
725t5_1flyj/channel/nottheonion2580984/channel//channel/nottheonionNaNFalse
827t5_5rwej/channel/FoodPorn7784809/channel//channel/FoodPornNaNFalse
940t5_uurcv/channel/puppysmiles3715991/channel//channel/puppysmilesNaNFalse
\n", + "
" + ], + "text/markdown": [ + "\n", + "**Pointer**\n", + "\n", + " int64_field_0 id name subscribers_count \\\n", + "0 4 t5_via1x /channel/mylittlepony 4323081 \n", + "1 5 t5_cv9gn /channel/polyamory 2425929 \n", + "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", + "3 16 t5_8fcro /channel/cordcutters 7543226 \n", + "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", + "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", + "6 24 t5_hmqjk /channel/engineering 8766144 \n", + "7 25 t5_1flyj /channel/nottheonion 2580984 \n", + "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", + "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", + "\n", + " permalink nsfw spam \n", + "0 /channel//channel/mylittlepony NaN False \n", + "1 /channel//channel/polyamory NaN False \n", + "2 /channel//channel/Catholicism NaN False \n", + "3 /channel//channel/cordcutters NaN False \n", + "4 /channel//channel/stevenuniverse NaN False \n", + "5 /channel//channel/entitledbitch NaN False \n", + "6 /channel//channel/engineering NaN False \n", + "7 /channel//channel/nottheonion NaN False \n", + "8 /channel//channel/FoodPorn NaN False \n", + "9 /channel//channel/puppysmiles NaN False \n" + ], + "text/plain": [ + "Pointer:\n", + " int64_field_0 id name subscribers_count \\\n", + "0 4 t5_via1x /channel/mylittlepony 4323081 \n", + "1 5 t5_cv9gn /channel/polyamory 2425929 \n", + "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", + "3 16 t5_8fcro /channel/cordcutters 7543226 \n", + "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", + "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", + "6 24 t5_hmqjk /channel/engineering 8766144 \n", + "7 25 t5_1flyj /channel/nottheonion 2580984 \n", + "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", + "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", + "\n", + " permalink nsfw spam \n", + "0 /channel//channel/mylittlepony NaN False \n", + "1 /channel//channel/polyamory NaN False \n", + "2 /channel//channel/Catholicism NaN False \n", + "3 /channel//channel/cordcutters NaN False \n", + "4 /channel//channel/stevenuniverse NaN False \n", + "5 /channel//channel/entitledbitch NaN False \n", + "6 /channel//channel/engineering NaN False \n", + "7 /channel//channel/nottheonion NaN False \n", + "8 /channel//channel/FoodPorn NaN False \n", + "9 /channel//channel/puppysmiles NaN False " + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Test mock version\n", + "result = high_client.api.services.bigquery.test_query.mock(\n", + " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", + ")\n", + "result" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
int64_field_0idnamesubscribers_countpermalinknsfwspam
04t5_via1x/channel/mylittlepony4323081/channel//channel/mylittleponyNaNFalse
15t5_cv9gn/channel/polyamory2425929/channel//channel/polyamoryNaNFalse
210t5_8p2tq/channel/Catholicism4062607/channel//channel/CatholicismNaNFalse
316t5_8fcro/channel/cordcutters7543226/channel//channel/cordcuttersNaNFalse
417t5_td5of/channel/stevenuniverse2692168/channel//channel/stevenuniverseNaNFalse
523t5_z01fv/channel/entitledbitch2709080/channel//channel/entitledbitchNaNFalse
624t5_hmqjk/channel/engineering8766144/channel//channel/engineeringNaNFalse
725t5_1flyj/channel/nottheonion2580984/channel//channel/nottheonionNaNFalse
827t5_5rwej/channel/FoodPorn7784809/channel//channel/FoodPornNaNFalse
940t5_uurcv/channel/puppysmiles3715991/channel//channel/puppysmilesNaNFalse
\n", + "
" + ], + "text/markdown": [ + "\n", + "**Pointer**\n", + "\n", + " int64_field_0 id name subscribers_count \\\n", + "0 4 t5_via1x /channel/mylittlepony 4323081 \n", + "1 5 t5_cv9gn /channel/polyamory 2425929 \n", + "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", + "3 16 t5_8fcro /channel/cordcutters 7543226 \n", + "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", + "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", + "6 24 t5_hmqjk /channel/engineering 8766144 \n", + "7 25 t5_1flyj /channel/nottheonion 2580984 \n", + "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", + "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", + "\n", + " permalink nsfw spam \n", + "0 /channel//channel/mylittlepony NaN False \n", + "1 /channel//channel/polyamory NaN False \n", + "2 /channel//channel/Catholicism NaN False \n", + "3 /channel//channel/cordcutters NaN False \n", + "4 /channel//channel/stevenuniverse NaN False \n", + "5 /channel//channel/entitledbitch NaN False \n", + "6 /channel//channel/engineering NaN False \n", + "7 /channel//channel/nottheonion NaN False \n", + "8 /channel//channel/FoodPorn NaN False \n", + "9 /channel//channel/puppysmiles NaN False \n" + ], + "text/plain": [ + "Pointer:\n", + " int64_field_0 id name subscribers_count \\\n", + "0 4 t5_via1x /channel/mylittlepony 4323081 \n", + "1 5 t5_cv9gn /channel/polyamory 2425929 \n", + "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", + "3 16 t5_8fcro /channel/cordcutters 7543226 \n", + "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", + "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", + "6 24 t5_hmqjk /channel/engineering 8766144 \n", + "7 25 t5_1flyj /channel/nottheonion 2580984 \n", + "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", + "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", + "\n", + " permalink nsfw spam \n", + "0 /channel//channel/mylittlepony NaN False \n", + "1 /channel//channel/polyamory NaN False \n", + "2 /channel//channel/Catholicism NaN False \n", + "3 /channel//channel/cordcutters NaN False \n", + "4 /channel//channel/stevenuniverse NaN False \n", + "5 /channel//channel/entitledbitch NaN False \n", + "6 /channel//channel/engineering NaN False \n", + "7 /channel//channel/nottheonion NaN False \n", + "8 /channel//channel/FoodPorn NaN False \n", + "9 /channel//channel/puppysmiles NaN False " + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Test mock version\n", + "result = high_client.api.services.bigquery.test_query.mock(\n", + " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", + ")\n", + "result" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "with sy.raises successfully caught the following exception:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + " \n", + " \n", + " SyftException:\n", + "
Function failed to complete: An error was raised during the execution of the API endpoint call: \n",
+       " *must be qualified with a dataset*. 400 BadRequest: 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/project-id/queries?prettyPrint=false:  Table "invalid_table" must be qualified with a dataset (e.g. dataset.table).
\n", + "
\n", + "
\n", + "
\n", + " \n", + "
Server Trace:
\n", + "
\n",
+       "      \n",
+       "        Traceback (most recent call last):\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/server/server.py, line 1271, in handle_api_call_with_unsigned_result\n",
+       "    result = method(context, *api_call.args, **api_call.kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py, line 490, in _decorator\n",
+       "    result = func(self, *args, **kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 321, in call_public_in_jobs\n",
+       "    ).unwrap()\n",
+       "      ^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 90, in unwrap\n",
+       "    raise self.value\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 113, in wrapper\n",
+       "    output = func(*args, **kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 376, in _call_in_jobs\n",
+       "    raise SyftException(\n",
+       "syft.types.errors.SyftException: Function failed to complete: An error was raised during the execution of the API endpoint call: \n",
+       " *must be qualified with a dataset*. 400 BadRequest: 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/project-id/queries?prettyPrint=false:  Table \"invalid_table\" must be qualified with a dataset (e.g. dataset.table).\n",
+       "\n",
+       "      \n",
+       "    
\n", + "
\n", + "
\n", + " \n", + "
Client Trace:
\n", + "
\n",
+       "      \n",
+       "        Traceback (most recent call last):\n",
+       "  File \"/var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/ipykernel_13227/1716145944.py, line 7, in \n",
+       "    high_client.api.services.bigquery.test_query.mock(\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/client/api.py, line 383, in __call__\n",
+       "    return remote_func.function_call(\n",
+       "           ^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/client/api.py, line 371, in function_call\n",
+       "    return post_process_result(result, self.unwrap_on_success)\n",
+       "           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "syft.types.errors.SyftException: Function failed to complete: An error was raised during the execution of the API endpoint call: \n",
+       " *must be qualified with a dataset*. 400 BadRequest: 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/project-id/queries?prettyPrint=false:  Table \"invalid_table\" must be qualified with a dataset (e.g. dataset.table).\n",
+       "server_trace: Traceback (most recent call last):\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/server/server.py, line 1271, in handle_api_call_with_unsigned_result\n",
+       "    result = method(context, *api_call.args, **api_call.kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py, line 490, in _decorator\n",
+       "    result = func(self, *args, **kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 321, in call_public_in_jobs\n",
+       "    ).unwrap()\n",
+       "      ^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 90, in unwrap\n",
+       "    raise self.value\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 113, in wrapper\n",
+       "    output = func(*args, **kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 376, in _call_in_jobs\n",
+       "    raise SyftException(\n",
+       "syft.types.errors.SyftException: Function failed to complete: An error was raised during the execution of the API endpoint call: \n",
+       " *must be qualified with a dataset*. 400 BadRequest: 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/project-id/queries?prettyPrint=false:  Table \"invalid_table\" must be qualified with a dataset (e.g. dataset.table).\n",
+       "\n",
+       "\n",
+       "      \n",
+       "    
\n", + "
\n", + "
\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Bug with the new Error PR: message printed multiple times. TODO clean up the duplicate exception messages.\n", + "\n", + "# Test mock version for wrong queries\n", + "with sy.raises(\n", + " sy.SyftException(public_message=\"*must be qualified with a dataset*\"), show=True\n", + "):\n", + " high_client.api.services.bigquery.test_query.mock(\n", + " sql_query=\"SELECT * FROM invalid_table LIMIT 1\"\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
int64_field_0idnamesubscribers_countpermalinknsfwspam
04t5_via1x/channel/mylittlepony4323081/channel//channel/mylittleponyNaNFalse
15t5_cv9gn/channel/polyamory2425929/channel//channel/polyamoryNaNFalse
210t5_8p2tq/channel/Catholicism4062607/channel//channel/CatholicismNaNFalse
316t5_8fcro/channel/cordcutters7543226/channel//channel/cordcuttersNaNFalse
417t5_td5of/channel/stevenuniverse2692168/channel//channel/stevenuniverseNaNFalse
523t5_z01fv/channel/entitledbitch2709080/channel//channel/entitledbitchNaNFalse
624t5_hmqjk/channel/engineering8766144/channel//channel/engineeringNaNFalse
725t5_1flyj/channel/nottheonion2580984/channel//channel/nottheonionNaNFalse
827t5_5rwej/channel/FoodPorn7784809/channel//channel/FoodPornNaNFalse
940t5_uurcv/channel/puppysmiles3715991/channel//channel/puppysmilesNaNFalse
\n", + "
" + ], + "text/markdown": [ + "\n", + "**Pointer**\n", + "\n", + " int64_field_0 id name subscribers_count \\\n", + "0 4 t5_via1x /channel/mylittlepony 4323081 \n", + "1 5 t5_cv9gn /channel/polyamory 2425929 \n", + "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", + "3 16 t5_8fcro /channel/cordcutters 7543226 \n", + "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", + "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", + "6 24 t5_hmqjk /channel/engineering 8766144 \n", + "7 25 t5_1flyj /channel/nottheonion 2580984 \n", + "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", + "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", + "\n", + " permalink nsfw spam \n", + "0 /channel//channel/mylittlepony NaN False \n", + "1 /channel//channel/polyamory NaN False \n", + "2 /channel//channel/Catholicism NaN False \n", + "3 /channel//channel/cordcutters NaN False \n", + "4 /channel//channel/stevenuniverse NaN False \n", + "5 /channel//channel/entitledbitch NaN False \n", + "6 /channel//channel/engineering NaN False \n", + "7 /channel//channel/nottheonion NaN False \n", + "8 /channel//channel/FoodPorn NaN False \n", + "9 /channel//channel/puppysmiles NaN False \n" + ], + "text/plain": [ + "Pointer:\n", + " int64_field_0 id name subscribers_count \\\n", + "0 4 t5_via1x /channel/mylittlepony 4323081 \n", + "1 5 t5_cv9gn /channel/polyamory 2425929 \n", + "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", + "3 16 t5_8fcro /channel/cordcutters 7543226 \n", + "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", + "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", + "6 24 t5_hmqjk /channel/engineering 8766144 \n", + "7 25 t5_1flyj /channel/nottheonion 2580984 \n", + "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", + "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", + "\n", + " permalink nsfw spam \n", + "0 /channel//channel/mylittlepony NaN False \n", + "1 /channel//channel/polyamory NaN False \n", + "2 /channel//channel/Catholicism NaN False \n", + "3 /channel//channel/cordcutters NaN False \n", + "4 /channel//channel/stevenuniverse NaN False \n", + "5 /channel//channel/entitledbitch NaN False \n", + "6 /channel//channel/engineering NaN False \n", + "7 /channel//channel/nottheonion NaN False \n", + "8 /channel//channel/FoodPorn NaN False \n", + "9 /channel//channel/puppysmiles NaN False " + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Test private version\n", + "result = high_client.api.services.bigquery.test_query.private(\n", + " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", + ")\n", + "result" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Create `biquery.schema` endpoint" + ] + }, + { + "cell_type": "code", + "execution_count": 22, "metadata": {}, "outputs": [], "source": [ @@ -231,53 +9493,2080 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Endpoint successfully created.

" + ], + "text/plain": [ + "SyftSuccess: Endpoint successfully created." + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "high_client.custom_api.add(endpoint=schema_function)\n", - "high_client.refresh()" + "# can we delete this?\n", + "# high_client.refresh()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
projectdataset_idtable_idschema_nameschema_fielddescriptionnum_rows
0example-projecttest_1gbpostsint64_field_0INTEGERNone2000000
1example-projecttest_1gbpostsidSTRINGNone2000000
2example-projecttest_1gbpostsnameSTRINGNone2000000
3example-projecttest_1gbpostssubscribers_countINTEGERNone2000000
4example-projecttest_1gbpostspermalinkSTRINGNone2000000
5example-projecttest_1gbpostsnsfwFLOATNone2000000
6example-projecttest_1gbpostsspamBOOLEANNone2000000
7example-projecttest_1gbcommentsint64_field_0INTEGERNone2000000
8example-projecttest_1gbcommentsidSTRINGNone2000000
9example-projecttest_1gbcommentsbodySTRINGNone2000000
10example-projecttest_1gbcommentsparent_idSTRINGNone2000000
11example-projecttest_1gbcommentscreated_atINTEGERNone2000000
12example-projecttest_1gbcommentslast_modified_atINTEGERNone2000000
13example-projecttest_1gbcommentsgildedBOOLEANNone2000000
14example-projecttest_1gbcommentspermalinkSTRINGNone2000000
15example-projecttest_1gbcommentsscoreINTEGERNone2000000
16example-projecttest_1gbcommentscomment_idSTRINGNone2000000
17example-projecttest_1gbcommentspost_idSTRINGNone2000000
18example-projecttest_1gbcommentsauthor_idSTRINGNone2000000
19example-projecttest_1gbcommentsspamBOOLEANNone2000000
20example-projecttest_1gbcommentsdeletedBOOLEANNone2000000
21example-projecttest_1gbcommentsupvote_raioFLOATNone2000000
22example-projecttest_1gbcommentscollapsed_in_crowd_controlBOOLEANNone2000000
\n", + "
" + ], + "text/markdown": [ + "\n", + "**Pointer**\n", + "\n", + " project dataset_id table_id schema_name \\\n", + "0 example-project test_1gb posts int64_field_0 \n", + "1 example-project test_1gb posts id \n", + "2 example-project test_1gb posts name \n", + "3 example-project test_1gb posts subscribers_count \n", + "4 example-project test_1gb posts permalink \n", + "5 example-project test_1gb posts nsfw \n", + "6 example-project test_1gb posts spam \n", + "7 example-project test_1gb comments int64_field_0 \n", + "8 example-project test_1gb comments id \n", + "9 example-project test_1gb comments body \n", + "10 example-project test_1gb comments parent_id \n", + "11 example-project test_1gb comments created_at \n", + "12 example-project test_1gb comments last_modified_at \n", + "13 example-project test_1gb comments gilded \n", + "14 example-project test_1gb comments permalink \n", + "15 example-project test_1gb comments score \n", + "16 example-project test_1gb comments comment_id \n", + "17 example-project test_1gb comments post_id \n", + "18 example-project test_1gb comments author_id \n", + "19 example-project test_1gb comments spam \n", + "20 example-project test_1gb comments deleted \n", + "21 example-project test_1gb comments upvote_raio \n", + "22 example-project test_1gb comments collapsed_in_crowd_control \n", + "\n", + " schema_field description num_rows \n", + "0 INTEGER None 2000000 \n", + "1 STRING None 2000000 \n", + "2 STRING None 2000000 \n", + "3 INTEGER None 2000000 \n", + "4 STRING None 2000000 \n", + "5 FLOAT None 2000000 \n", + "6 BOOLEAN None 2000000 \n", + "7 INTEGER None 2000000 \n", + "8 STRING None 2000000 \n", + "9 STRING None 2000000 \n", + "10 STRING None 2000000 \n", + "11 INTEGER None 2000000 \n", + "12 INTEGER None 2000000 \n", + "13 BOOLEAN None 2000000 \n", + "14 STRING None 2000000 \n", + "15 INTEGER None 2000000 \n", + "16 STRING None 2000000 \n", + "17 STRING None 2000000 \n", + "18 STRING None 2000000 \n", + "19 BOOLEAN None 2000000 \n", + "20 BOOLEAN None 2000000 \n", + "21 FLOAT None 2000000 \n", + "22 BOOLEAN None 2000000 \n" + ], + "text/plain": [ + "Pointer:\n", + " project dataset_id table_id schema_name \\\n", + "0 example-project test_1gb posts int64_field_0 \n", + "1 example-project test_1gb posts id \n", + "2 example-project test_1gb posts name \n", + "3 example-project test_1gb posts subscribers_count \n", + "4 example-project test_1gb posts permalink \n", + "5 example-project test_1gb posts nsfw \n", + "6 example-project test_1gb posts spam \n", + "7 example-project test_1gb comments int64_field_0 \n", + "8 example-project test_1gb comments id \n", + "9 example-project test_1gb comments body \n", + "10 example-project test_1gb comments parent_id \n", + "11 example-project test_1gb comments created_at \n", + "12 example-project test_1gb comments last_modified_at \n", + "13 example-project test_1gb comments gilded \n", + "14 example-project test_1gb comments permalink \n", + "15 example-project test_1gb comments score \n", + "16 example-project test_1gb comments comment_id \n", + "17 example-project test_1gb comments post_id \n", + "18 example-project test_1gb comments author_id \n", + "19 example-project test_1gb comments spam \n", + "20 example-project test_1gb comments deleted \n", + "21 example-project test_1gb comments upvote_raio \n", + "22 example-project test_1gb comments collapsed_in_crowd_control \n", + "\n", + " schema_field description num_rows \n", + "0 INTEGER None 2000000 \n", + "1 STRING None 2000000 \n", + "2 STRING None 2000000 \n", + "3 INTEGER None 2000000 \n", + "4 STRING None 2000000 \n", + "5 FLOAT None 2000000 \n", + "6 BOOLEAN None 2000000 \n", + "7 INTEGER None 2000000 \n", + "8 STRING None 2000000 \n", + "9 STRING None 2000000 \n", + "10 STRING None 2000000 \n", + "11 INTEGER None 2000000 \n", + "12 INTEGER None 2000000 \n", + "13 BOOLEAN None 2000000 \n", + "14 STRING None 2000000 \n", + "15 INTEGER None 2000000 \n", + "16 STRING None 2000000 \n", + "17 STRING None 2000000 \n", + "18 STRING None 2000000 \n", + "19 BOOLEAN None 2000000 \n", + "20 BOOLEAN None 2000000 \n", + "21 FLOAT None 2000000 \n", + "22 BOOLEAN None 2000000 " + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "dataset_1 = test_settings.get(\"dataset_1\", default=\"dataset_1\")\n", - "dataset_2 = test_settings.get(\"dataset_2\", default=\"dataset_2\")\n", - "table_1 = test_settings.get(\"table_1\", default=\"table_1\")\n", - "table_2 = test_settings.get(\"table_2\", default=\"table_2\")\n", - "table_2_col_id = test_settings.get(\"table_2_col_id\", default=\"table_id\")\n", - "table_2_col_score = test_settings.get(\"table_2_col_score\", default=\"colname\")" + "high_client.api.services.bigquery.schema()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ - "# Test mock version\n", - "result = high_client.api.services.bigquery.test_query.mock(\n", - " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", - ")\n", - "result" + "# todo add tests" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "high_client.api.services.bigquery.schema()" + "# Create `submit_query` endpoint" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": {}, "outputs": [], "source": [ @@ -288,18 +11577,1704 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Endpoint successfully created.

" + ], + "text/plain": [ + "SyftSuccess: Endpoint successfully created." + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "high_client.custom_api.add(endpoint=submit_query_function)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftSuccess:
Endpoint successfully updated.

" + ], + "text/plain": [ + "SyftSuccess: Endpoint successfully updated." + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "high_client.api.services.api.update(\n", " endpoint_path=\"bigquery.submit_query\", hide_mock_definition=True\n", @@ -308,142 +13283,2760 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ - "high_client.custom_api.api_endpoints()" + "# Testing submit query\n", + "result = high_client.api.services.bigquery.submit_query(\n", + " func_name=\"my_func\",\n", + " query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 1\",\n", + ")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 30, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "\n", + "**Pointer**\n", + "\n", + "'Query submitted syft.service.request.request.Request. Use `client.code.my_func()` to run your query'\n" + ], + "text/plain": [ + "Pointer:\n", + "'Query submitted syft.service.request.request.Request. Use `client.code.my_func()` to run your query'" + ] + }, + "execution_count": 30, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "assert len(high_client.custom_api.api_endpoints()) == 3" + "assert \"Query submitted\" in result\n", + "result" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 31, "metadata": {}, "outputs": [], "source": [ - "assert (\n", - " high_client.api.services.bigquery.test_query\n", - " and high_client.api.services.bigquery.submit_query\n", - ")" + "job = high_client.code.my_func(blocking=False)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 32, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "SyftSuccess: Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.\n" + ] + } + ], "source": [ - "# Test mock version\n", - "result = high_client.api.services.bigquery.test_query.mock(\n", - " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", - ")\n", - "result" + "res = job.wait().get()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 33, "metadata": {}, "outputs": [], "source": [ - "# Bug with the new Error PR: message printed multiple times. TODO clean up the duplicate exception messages.\n", - "\n", - "# Test mock version for wrong queries\n", - "with sy.raises(\n", - " sy.SyftException(public_message=\"*must be qualified with a dataset*\"), show=True\n", - "):\n", - " high_client.api.services.bigquery.test_query.mock(\n", - " sql_query=\"SELECT * FROM invalid_table LIMIT 1\"\n", - " )" + "import pandas as pd\n", + "assert isinstance(res, pd.DataFrame)" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# Test private version\n", - "result = high_client.api.services.bigquery.test_query.private(\n", - " sql_query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 10\"\n", - ")\n", - "result" + "# Test endpoints" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 34, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

TwinAPIEndpointView List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "[syft.service.api.api.TwinAPIEndpointView,\n", + " syft.service.api.api.TwinAPIEndpointView,\n", + " syft.service.api.api.TwinAPIEndpointView]" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "# Testing submit query\n", - "result = high_client.api.services.bigquery.submit_query(\n", - " func_name=\"my_func\",\n", - " query=f\"SELECT * FROM {dataset_1}.{table_1} LIMIT 1\",\n", - ")" + "high_client.custom_api.api_endpoints()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 35, "metadata": {}, "outputs": [], "source": [ - "assert \"Query submitted\" in result\n", - "result" + "assert len(high_client.custom_api.api_endpoints()) == 3" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 36, "metadata": {}, "outputs": [], "source": [ - "job = high_client.code.my_func(blocking=False)" + "assert (\n", + " high_client.api.services.bigquery.test_query\n", + " and high_client.api.services.bigquery.submit_query\n", + ")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 37, "metadata": {}, "outputs": [], "source": [ - "job.result" + "# syft absolute\n", + "from syft.client.syncing import compare_clients\n", + "from syft.service.job.job_stash import Job\n", + "from syft.service.job.job_stash import JobStatus" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "job.wait()" + "# Syncing" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 38, "metadata": {}, "outputs": [], "source": [ - "# syft absolute\n", - "from syft.client.syncing import compare_clients\n", - "from syft.service.job.job_stash import Job\n", - "from syft.service.job.job_stash import JobStatus" + "# todo: move to helper" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 39, "metadata": {}, "outputs": [], "source": [ @@ -458,73 +16051,132 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ - "def sync_new_objects(\n", - " from_client, to_client, dry_run: bool = True, private_data: bool = False\n", - "):\n", - " sim = \"Simulating \" if dry_run else \"\"\n", - " priv = \"WITH PRIVATE DATA\" if private_data else \"\"\n", - " print(f\"{sim}Syncing from {from_client.name} to {to_client.name} {priv}\")\n", - " changes = []\n", - " diff = compare_clients(\n", - " from_client=from_client, to_client=to_client, hide_usercode=False\n", - " )\n", - " if isinstance(diff, sy.SyftError):\n", - " return diff\n", + "# Request\n", + "\n", + "# UserCode - UserCodeStatus\n", "\n", - " for batch in diff.batches:\n", - " try:\n", - " if is_job_to_sync(batch) or batch.status == \"NEW\":\n", - " w = batch.resolve(build_state=False)\n", - " if private_data:\n", - " w.click_share_all_private_data()\n", - " if not dry_run:\n", - " w.click_sync()\n", - " change_text = f\"Synced {batch.status} {batch.root_type.__name__}\"\n", - " if not dry_run:\n", - " changes.append(change_text)\n", - " else:\n", - " print(f\"Would have run: {change_text}\")\n", - " except Exception as e:\n", - " print(\"sync_new_objects\", e)\n", - " raise e\n", - " return changes" + "# Job - Log - Result - ExecutionOutput\n", + "\n", + "# TwinAPIEndpoint - EndPoint" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 41, "metadata": {}, "outputs": [], "source": [ - "result = sync_new_objects(high_client, low_client)\n", - "result" + "diff = compare_clients(\n", + " from_client=high_client, to_client=low_client, hide_usercode=False\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "- verify that jobs are actually filtered out\n", + "- we need to think about whether its possible for the admin to create more data here that would break sync" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 45, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "widget = diff.resolve()" + ] + }, + { + "cell_type": "code", + "execution_count": 48, "metadata": {}, "outputs": [], "source": [ - "result = sync_new_objects(high_client, low_client, dry_run=False)\n", - "result" + "# widget._share_all()\n", + "# widget._sync_all()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 50, "metadata": {}, "outputs": [], "source": [ - "assert [\n", - " \"Synced NEW TwinAPIEndpoint\",\n", - " \"Synced NEW TwinAPIEndpoint\",\n", - " \"Synced NEW TwinAPIEndpoint\",\n", - "] == result" + "# def sync_new_objects(\n", + "# from_client, to_client, dry_run: bool = True, private_data: bool = False\n", + "# ):\n", + "# sim = \"Simulating \" if dry_run else \"\"\n", + "# priv = \"WITH PRIVATE DATA\" if private_data else \"\"\n", + "# print(f\"{sim}Syncing from {from_client.name} to {to_client.name} {priv}\")\n", + "# changes = []\n", + "# diff = compare_clients(\n", + "# from_client=from_client, to_client=to_client, hide_usercode=False\n", + "# )\n", + "# if isinstance(diff, sy.SyftError):\n", + "# return diff\n", + "\n", + "# for batch in diff.batches:\n", + "# try:\n", + "# if is_job_to_sync(batch) or batch.status == \"NEW\":\n", + "# w = batch.resolve(build_state=False)\n", + "# if private_data:\n", + "# w.click_share_all_private_data()\n", + "# if not dry_run:\n", + "# w.click_sync()\n", + "# change_text = f\"Synced {batch.status} {batch.root_type.__name__}\"\n", + "# if not dry_run:\n", + "# changes.append(change_text)\n", + "# else:\n", + "# print(f\"Would have run: {change_text}\")\n", + "# except Exception as e:\n", + "# print(\"sync_new_objects\", e)\n", + "# raise e\n", + "# return changes" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [], + "source": [ + "# result = sync_new_objects(high_client, low_client)\n", + "# result\n", + "\n", + "# result = sync_new_objects(high_client, low_client, dry_run=False)\n", + "# result" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": {}, + "outputs": [], + "source": [ + "# assert [\n", + "# \"Synced NEW TwinAPIEndpoint\",\n", + "# \"Synced NEW TwinAPIEndpoint\",\n", + "# \"Synced NEW TwinAPIEndpoint\",\n", + "# ] == result" ] }, { @@ -571,24 +16223,85 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 58, "metadata": {}, "outputs": [], "source": [ - "assert len(low_client.custom_api.api_endpoints()) == 3\n", - "assert len(high_client.custom_api.api_endpoints()) == 3" + "assert len(low_client.jobs.get_all()) ==0" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 53, "metadata": {}, "outputs": [], + "source": [ + "assert len(low_client.custom_api.api_endpoints()) == 3" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": {}, + "outputs": [], + "source": [ + "assert len(high_client.custom_api.api_endpoints()) == 3" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Stopping bigquery-high\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Shutting down\n", + "INFO: Waiting for application shutdown.\n", + "INFO: Application shutdown complete.\n", + "INFO: Finished server process [13243]\n", + "INFO: Stopping reloader process [13239]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Stopping bigquery-low\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Shutting down\n", + "INFO: Waiting for application shutdown.\n", + "INFO: Application shutdown complete.\n", + "INFO: Finished server process [13236]\n", + "INFO: Stopping reloader process [13235]\n" + ] + } + ], "source": [ "if environment != \"remote\":\n", " server_high.land()\n", " server_low.land()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -607,7 +16320,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.5" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb b/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb index 309e37755ea..226f205f9c8 100644 --- a/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb +++ b/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -17,9 +17,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "python auto\n" + ] + } + ], "source": [ "# stdlib\n", "\n", @@ -30,13 +38,14 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "# syft absolute\n", "import syft as sy\n", - "from syft import test_settings" + "from syft import test_settings\n", + "import pandas as pd" ] }, { @@ -48,9 +57,891 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Autoreload enabled\n", + "Starting bigquery-low server on 0.0.0.0:62820\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", + "INFO: Uvicorn running on http://0.0.0.0:62820 (Press CTRL+C to quit)\n", + "INFO: Started reloader process [13539] using WatchFiles\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Waiting for server to start" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Started server process [13541]\n", + "INFO: Waiting for application startup.\n", + "INFO: Application startup complete.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Done.\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
SyftInfo:
You have launched a development server at http://0.0.0.0:62820.It is intended only for local use.

" + ], + "text/plain": [ + "SyftInfo: You have launched a development server at http://0.0.0.0:62820.It is intended only for local use." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "server_low = sy.orchestra.launch(\n", " name=\"bigquery-low\",\n", @@ -64,9 +955,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + } + ], "source": [ "ds_client = server_low.login(\n", " email=\"data_scientist@openmined.org\", password=\"verysecurepassword\"\n", @@ -82,7 +981,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -91,7 +990,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -104,113 +1003,226 @@ ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "result = ds_client.api.services.bigquery.test_query.mock(\n", - " sql_query=f\"SELECT * from {dataset_2}.{table_2} limit 10\"\n", - ")\n", - "assert len(result) == 10" + "# Test the schema" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARN: private key is based on server name: bigquery-low in dev_mode. Don't run this in production.\n", + "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/4a471a09f56b4a1d809c0a7614074283/db/4a471a09f56b4a1d809c0a7614074283.sqlite\n", + "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/4a471a09f56b4a1d809c0a7614074283/db/4a471a09f56b4a1d809c0a7614074283.sqlite\n", + "Adding producer for queue: api_call on: tcp://localhost:62829\n" + ] + } + ], "source": [ - "with sy.raises(sy.SyftException, show=True):\n", - " ds_client.api.services.bigquery.test_query.private(\n", - " sql_query=f\"SELECT * from {dataset_2}.{table_2} limit 10\"\n", - " )" + "res = ds_client.api.services.bigquery.schema()\n", + "assert isinstance(res.get(), pd.DataFrame)" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "res = ds_client.api.services.bigquery.schema()\n", - "# third party\n", - "import pandas as pd\n", - "\n", - "assert isinstance(res.get(), pd.DataFrame)" + "# Test the mock" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ - "FUNC_NAME = \"large_sample\"\n", - "LARGE_SAMPLE_QUERY = f\"SELECT * FROM {dataset_2}.{table_2} LIMIT 10000\"" + "result = ds_client.api.services.bigquery.test_query.mock(\n", + " sql_query=f\"SELECT * from {dataset_2}.{table_2} limit 10\"\n", + ")\n", + "assert len(result) == 10" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "with sy.raises successfully caught the following exception:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + " \n", + " \n", + " SyftException:\n", + "
Function failed to complete: No private code available
\n", + "
\n", + "
\n", + "
\n", + " \n", + "
Server Trace:
\n", + "
\n",
+       "      \n",
+       "        Traceback (most recent call last):\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/server/server.py, line 1271, in handle_api_call_with_unsigned_result\n",
+       "    result = method(context, *api_call.args, **api_call.kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py, line 490, in _decorator\n",
+       "    result = func(self, *args, **kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 304, in call_private_in_jobs\n",
+       "    ).unwrap()\n",
+       "      ^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 90, in unwrap\n",
+       "    raise self.value\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 113, in wrapper\n",
+       "    output = func(*args, **kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 376, in _call_in_jobs\n",
+       "    raise SyftException(\n",
+       "syft.types.errors.SyftException: Function failed to complete: No private code available\n",
+       "\n",
+       "      \n",
+       "    
\n", + "
\n", + "
\n", + " \n", + "
Client Trace:
\n", + "
\n",
+       "      \n",
+       "        Traceback (most recent call last):\n",
+       "  File \"/var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/ipykernel_13528/888537078.py, line 2, in \n",
+       "    ds_client.api.services.bigquery.test_query.private(\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/client/api.py, line 403, in __call__\n",
+       "    return remote_func.function_call(\n",
+       "           ^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/client/api.py, line 371, in function_call\n",
+       "    return post_process_result(result, self.unwrap_on_success)\n",
+       "           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "syft.types.errors.SyftException: Function failed to complete: No private code available\n",
+       "server_trace: Traceback (most recent call last):\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/server/server.py, line 1271, in handle_api_call_with_unsigned_result\n",
+       "    result = method(context, *api_call.args, **api_call.kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py, line 490, in _decorator\n",
+       "    result = func(self, *args, **kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 304, in call_private_in_jobs\n",
+       "    ).unwrap()\n",
+       "      ^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 90, in unwrap\n",
+       "    raise self.value\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 113, in wrapper\n",
+       "    output = func(*args, **kwargs)\n",
+       "             ^^^^^^^^^^^^^^^^^^^^^\n",
+       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 376, in _call_in_jobs\n",
+       "    raise SyftException(\n",
+       "syft.types.errors.SyftException: Function failed to complete: No private code available\n",
+       "\n",
+       "\n",
+       "      \n",
+       "    
\n", + "
\n", + "
\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ - "mock_res = ds_client.api.services.bigquery.test_query(sql_query=LARGE_SAMPLE_QUERY)" + "with sy.raises(sy.SyftException, show=True):\n", + " ds_client.api.services.bigquery.test_query.private(\n", + " sql_query=f\"SELECT * from {dataset_2}.{table_2} limit 10\"\n", + " )" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ - "submission = ds_client.api.services.bigquery.submit_query(\n", - " func_name=FUNC_NAME, query=LARGE_SAMPLE_QUERY\n", - ")" + "FUNC_NAME = \"large_sample\"\n", + "LARGE_SAMPLE_QUERY = f\"SELECT * FROM {dataset_2}.{table_2} LIMIT 10000\"" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "SyftSuccess: Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.\n" + ] + } + ], "source": [ - "def extract_code_path(response):\n", - " # stdlib\n", - " import re\n", - "\n", - " pattern = r\"client\\.code\\.(\\w+)\\(\\)\"\n", - " match = re.search(pattern, str(response))\n", - " if match:\n", - " extracted_code = match.group(1)\n", - " return extracted_code\n", - " return None" + "mock_res = ds_client.api.services.bigquery.test_query(sql_query=LARGE_SAMPLE_QUERY)" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# why are we randomizing things here?\n", - "func_name = extract_code_path(submission)" + "# Submit a query" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ - "api_method = getattr(ds_client.code, func_name, None)\n", - "api_method" + "submission = ds_client.api.services.bigquery.submit_query(\n", + " func_name=FUNC_NAME, query=LARGE_SAMPLE_QUERY\n", + ")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ @@ -218,48 +1230,105 @@ "with sy.raises(\n", " sy.SyftException(\n", " public_message=\"*Please wait for the admin to allow the execution of this code*\"\n", - " ),\n", - " show=True,\n", + " )\n", "):\n", - " result = api_method(blocking=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "assert \"large_sample\" in func_name" + " ds_client.code.large_sample(blocking=False)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "AssertionError", + "evalue": "Expected to be raised, but got .", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mConnectionRefusedError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connection.py:196\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 195\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 196\u001b[0m sock \u001b[38;5;241m=\u001b[39m \u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_connection\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 197\u001b[0m \u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dns_host\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mport\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 198\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 199\u001b[0m \u001b[43m \u001b[49m\u001b[43msource_address\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msource_address\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 200\u001b[0m \u001b[43m \u001b[49m\u001b[43msocket_options\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msocket_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 201\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 202\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m socket\u001b[38;5;241m.\u001b[39mgaierror \u001b[38;5;28;01mas\u001b[39;00m e:\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/util/connection.py:85\u001b[0m, in \u001b[0;36mcreate_connection\u001b[0;34m(address, timeout, source_address, socket_options)\u001b[0m\n\u001b[1;32m 84\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 85\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m err\n\u001b[1;32m 86\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 87\u001b[0m \u001b[38;5;66;03m# Break explicitly a reference cycle\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/util/connection.py:73\u001b[0m, in \u001b[0;36mcreate_connection\u001b[0;34m(address, timeout, source_address, socket_options)\u001b[0m\n\u001b[1;32m 72\u001b[0m sock\u001b[38;5;241m.\u001b[39mbind(source_address)\n\u001b[0;32m---> 73\u001b[0m \u001b[43msock\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43msa\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 74\u001b[0m \u001b[38;5;66;03m# Break explicitly a reference cycle\u001b[39;00m\n", + "\u001b[0;31mConnectionRefusedError\u001b[0m: [Errno 61] Connection refused", + "\nThe above exception was the direct cause of the following exception:\n", + "\u001b[0;31mNewConnectionError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connectionpool.py:789\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, preload_content, decode_content, **response_kw)\u001b[0m\n\u001b[1;32m 788\u001b[0m \u001b[38;5;66;03m# Make the request on the HTTPConnection object\u001b[39;00m\n\u001b[0;32m--> 789\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_make_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 790\u001b[0m \u001b[43m \u001b[49m\u001b[43mconn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 791\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 792\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 793\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 794\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 795\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 796\u001b[0m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 797\u001b[0m \u001b[43m \u001b[49m\u001b[43mretries\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mretries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 798\u001b[0m \u001b[43m \u001b[49m\u001b[43mresponse_conn\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mresponse_conn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 799\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreload_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 800\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 801\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mresponse_kw\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 802\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 804\u001b[0m \u001b[38;5;66;03m# Everything went great!\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connectionpool.py:495\u001b[0m, in \u001b[0;36mHTTPConnectionPool._make_request\u001b[0;34m(self, conn, method, url, body, headers, retries, timeout, chunked, response_conn, preload_content, decode_content, enforce_content_length)\u001b[0m\n\u001b[1;32m 494\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 495\u001b[0m \u001b[43mconn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 496\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 497\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 498\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 499\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 500\u001b[0m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 501\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreload_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 502\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 503\u001b[0m \u001b[43m \u001b[49m\u001b[43menforce_content_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43menforce_content_length\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 504\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 506\u001b[0m \u001b[38;5;66;03m# We are swallowing BrokenPipeError (errno.EPIPE) since the server is\u001b[39;00m\n\u001b[1;32m 507\u001b[0m \u001b[38;5;66;03m# legitimately able to close the connection after sending a valid response.\u001b[39;00m\n\u001b[1;32m 508\u001b[0m \u001b[38;5;66;03m# With this behaviour, the received response is still readable.\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connection.py:398\u001b[0m, in \u001b[0;36mHTTPConnection.request\u001b[0;34m(self, method, url, body, headers, chunked, preload_content, decode_content, enforce_content_length)\u001b[0m\n\u001b[1;32m 397\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mputheader(header, value)\n\u001b[0;32m--> 398\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mendheaders\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 400\u001b[0m \u001b[38;5;66;03m# If we're given a body we start sending that in chunks.\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/http/client.py:1331\u001b[0m, in \u001b[0;36mHTTPConnection.endheaders\u001b[0;34m(self, message_body, encode_chunked)\u001b[0m\n\u001b[1;32m 1330\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m CannotSendHeader()\n\u001b[0;32m-> 1331\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmessage_body\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mencode_chunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencode_chunked\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/http/client.py:1091\u001b[0m, in \u001b[0;36mHTTPConnection._send_output\u001b[0;34m(self, message_body, encode_chunked)\u001b[0m\n\u001b[1;32m 1090\u001b[0m \u001b[38;5;28;01mdel\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_buffer[:]\n\u001b[0;32m-> 1091\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmsg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1093\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m message_body \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 1094\u001b[0m \n\u001b[1;32m 1095\u001b[0m \u001b[38;5;66;03m# create a consistent interface to message_body\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/http/client.py:1035\u001b[0m, in \u001b[0;36mHTTPConnection.send\u001b[0;34m(self, data)\u001b[0m\n\u001b[1;32m 1034\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mauto_open:\n\u001b[0;32m-> 1035\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1036\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connection.py:236\u001b[0m, in \u001b[0;36mHTTPConnection.connect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 235\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mconnect\u001b[39m(\u001b[38;5;28mself\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 236\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msock \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_new_conn\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 237\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_tunnel_host:\n\u001b[1;32m 238\u001b[0m \u001b[38;5;66;03m# If we're tunneling it means we're connected to our proxy.\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connection.py:211\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 210\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mOSError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m--> 211\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m NewConnectionError(\n\u001b[1;32m 212\u001b[0m \u001b[38;5;28mself\u001b[39m, \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFailed to establish a new connection: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 213\u001b[0m ) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n\u001b[1;32m 215\u001b[0m \u001b[38;5;66;03m# Audit hooks are only available in Python 3.8+\u001b[39;00m\n", + "\u001b[0;31mNewConnectionError\u001b[0m: : Failed to establish a new connection: [Errno 61] Connection refused", + "\nThe above exception was the direct cause of the following exception:\n", + "\u001b[0;31mMaxRetryError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/adapters.py:667\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 666\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 667\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[43mconn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43murlopen\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 668\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 669\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 670\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 671\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 672\u001b[0m \u001b[43m \u001b[49m\u001b[43mredirect\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 673\u001b[0m \u001b[43m \u001b[49m\u001b[43massert_same_host\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 674\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 675\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 676\u001b[0m \u001b[43m \u001b[49m\u001b[43mretries\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmax_retries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 677\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 678\u001b[0m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 679\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 681\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (ProtocolError, \u001b[38;5;167;01mOSError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m err:\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connectionpool.py:843\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, preload_content, decode_content, **response_kw)\u001b[0m\n\u001b[1;32m 841\u001b[0m new_e \u001b[38;5;241m=\u001b[39m ProtocolError(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mConnection aborted.\u001b[39m\u001b[38;5;124m\"\u001b[39m, new_e)\n\u001b[0;32m--> 843\u001b[0m retries \u001b[38;5;241m=\u001b[39m \u001b[43mretries\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mincrement\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 844\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43merror\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnew_e\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m_pool\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m_stacktrace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msys\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexc_info\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m2\u001b[39;49m\u001b[43m]\u001b[49m\n\u001b[1;32m 845\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 846\u001b[0m retries\u001b[38;5;241m.\u001b[39msleep()\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/util/retry.py:519\u001b[0m, in \u001b[0;36mRetry.increment\u001b[0;34m(self, method, url, response, error, _pool, _stacktrace)\u001b[0m\n\u001b[1;32m 518\u001b[0m reason \u001b[38;5;241m=\u001b[39m error \u001b[38;5;129;01mor\u001b[39;00m ResponseError(cause)\n\u001b[0;32m--> 519\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m MaxRetryError(_pool, url, reason) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mreason\u001b[39;00m \u001b[38;5;66;03m# type: ignore[arg-type]\u001b[39;00m\n\u001b[1;32m 521\u001b[0m log\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mIncremented Retry for (url=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m): \u001b[39m\u001b[38;5;132;01m%r\u001b[39;00m\u001b[38;5;124m\"\u001b[39m, url, new_retry)\n", + "\u001b[0;31mMaxRetryError\u001b[0m: HTTPConnectionPool(host='localhost', port=62820): Max retries exceeded with url: /api/v2/api_call (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 61] Connection refused'))", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mConnectionError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[29], line 4\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m sy\u001b[38;5;241m.\u001b[39mraises(\n\u001b[1;32m 2\u001b[0m sy\u001b[38;5;241m.\u001b[39mSyftException(public_message\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m*Your code is waiting for approval*\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 3\u001b[0m ):\n\u001b[0;32m----> 4\u001b[0m \u001b[43mds_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlarge_sample\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/api.py:374\u001b[0m, in \u001b[0;36mRemoteFunction.__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 373\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs: Any, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Any:\n\u001b[0;32m--> 374\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfunction_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/api.py:357\u001b[0m, in \u001b[0;36mRemoteFunction.function_call\u001b[0;34m(self, path, cache_result, *args, **kwargs)\u001b[0m\n\u001b[1;32m 356\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m\n\u001b[0;32m--> 357\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmake_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43mapi_call\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mapi_call\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcache_result\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_result\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 359\u001b[0m \u001b[38;5;66;03m# TODO: annotate this on the service method decorator\u001b[39;00m\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/api.py:1068\u001b[0m, in \u001b[0;36mSyftAPI.make_call\u001b[0;34m(self, api_call, cache_result)\u001b[0m\n\u001b[1;32m 1067\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconnection \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 1068\u001b[0m signed_result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmake_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43msigned_call\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1069\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:463\u001b[0m, in \u001b[0;36mHTTPConnection.make_call\u001b[0;34m(self, signed_call)\u001b[0m\n\u001b[1;32m 461\u001b[0m api_url \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapi_url\n\u001b[0;32m--> 463\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mrequests\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpost\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# nosec\u001b[39;49;00m\n\u001b[1;32m 464\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mapi_url\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 465\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmsg_bytes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 466\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 467\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 469\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m200\u001b[39m:\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/api.py:115\u001b[0m, in \u001b[0;36mpost\u001b[0;34m(url, data, json, **kwargs)\u001b[0m\n\u001b[1;32m 104\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"Sends a POST request.\u001b[39;00m\n\u001b[1;32m 105\u001b[0m \n\u001b[1;32m 106\u001b[0m \u001b[38;5;124;03m:param url: URL for the new :class:`Request` object.\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 112\u001b[0m \u001b[38;5;124;03m:rtype: requests.Response\u001b[39;00m\n\u001b[1;32m 113\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m--> 115\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mpost\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mjson\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjson\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/api.py:59\u001b[0m, in \u001b[0;36mrequest\u001b[0;34m(method, url, **kwargs)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m sessions\u001b[38;5;241m.\u001b[39mSession() \u001b[38;5;28;01mas\u001b[39;00m session:\n\u001b[0;32m---> 59\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43msession\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmethod\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/sessions.py:589\u001b[0m, in \u001b[0;36mSession.request\u001b[0;34m(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\u001b[0m\n\u001b[1;32m 588\u001b[0m send_kwargs\u001b[38;5;241m.\u001b[39mupdate(settings)\n\u001b[0;32m--> 589\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprep\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43msend_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m resp\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/sessions.py:703\u001b[0m, in \u001b[0;36mSession.send\u001b[0;34m(self, request, **kwargs)\u001b[0m\n\u001b[1;32m 702\u001b[0m \u001b[38;5;66;03m# Send the request\u001b[39;00m\n\u001b[0;32m--> 703\u001b[0m r \u001b[38;5;241m=\u001b[39m \u001b[43madapter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 705\u001b[0m \u001b[38;5;66;03m# Total elapsed time of the request (approximately)\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/adapters.py:700\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 698\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SSLError(e, request\u001b[38;5;241m=\u001b[39mrequest)\n\u001b[0;32m--> 700\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mConnectionError\u001b[39;00m(e, request\u001b[38;5;241m=\u001b[39mrequest)\n\u001b[1;32m 702\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ClosedPoolError \u001b[38;5;28;01mas\u001b[39;00m e:\n", + "\u001b[0;31mConnectionError\u001b[0m: HTTPConnectionPool(host='localhost', port=62820): Max retries exceeded with url: /api/v2/api_call (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 61] Connection refused'))", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[29], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;43;01mwith\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mraises\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2\u001b[0m \u001b[43m \u001b[49m\u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mSyftException\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpublic_message\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m*Your code is waiting for approval*\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3\u001b[0m \u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[1;32m 4\u001b[0m \u001b[43m \u001b[49m\u001b[43mds_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlarge_sample\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/types/errors.py:214\u001b[0m, in \u001b[0;36mraises.__exit__\u001b[0;34m(self, exc_type, exc_value, traceback)\u001b[0m\n\u001b[1;32m 209\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAssertionError\u001b[39;00m(\n\u001b[1;32m 210\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExpected \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mexpected_exception\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m to be raised, \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 211\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbut no exception was raised.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 212\u001b[0m )\n\u001b[1;32m 213\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28missubclass\u001b[39m(exc_type, expected_exception_type):\n\u001b[0;32m--> 214\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAssertionError\u001b[39;00m(\n\u001b[1;32m 215\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExpected \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mexpected_exception_type\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m to be raised, but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mexc_type\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 216\u001b[0m )\n\u001b[1;32m 217\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m message \u001b[38;5;129;01mand\u001b[39;00m message \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m exc_value\u001b[38;5;241m.\u001b[39mpublic_message:\n\u001b[1;32m 218\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAssertionError\u001b[39;00m(\n\u001b[1;32m 219\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExpected \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mexpected_exception_type\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m to be raised, \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 220\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdid not contain \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmessage\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 221\u001b[0m )\n", + "\u001b[0;31mAssertionError\u001b[0m: Expected to be raised, but got ." + ] + } + ], "source": [ - "api_method_2 = getattr(ds_client.code, func_name, None)\n", - "api_method_2" + "with sy.raises(\n", + " sy.SyftException(public_message=\"*Your code is waiting for approval*\")\n", + "):\n", + " ds_client.code.large_sample()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": {}, "outputs": [], "source": [ - "with sy.raises(\n", - " sy.SyftException(public_message=\"*Your code is waiting for approval*\"), show=True\n", - "):\n", - " result = api_method_2()" + "# todo: add email server shutdown" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Stopping bigquery-low\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO: Shutting down\n", + "INFO: Waiting for application shutdown.\n", + "INFO: Application shutdown complete.\n", + "INFO: Finished server process [13541]\n", + "INFO: Stopping reloader process [13539]\n" + ] + } + ], "source": [ "if environment != \"remote\":\n", " server_low.land()" @@ -289,7 +1358,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.5" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb b/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb index aaf83dc0d01..b3ca7b71d53 100644 --- a/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb +++ b/notebooks/scenarios/bigquery/sync/04-do-review-requests.ipynb @@ -379,7 +379,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.5" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb b/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb index f4bddbd75d1..cc4e50a6306 100644 --- a/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb +++ b/notebooks/scenarios/bigquery/sync/05-ds-get-results.ipynb @@ -139,7 +139,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.5" } }, "nbformat": 4, From 0cbf0b539b5db30b6f84f3657e0903dc9e5f4f9b Mon Sep 17 00:00:00 2001 From: Sameer Wagh Date: Thu, 12 Sep 2024 13:14:07 -0400 Subject: [PATCH 2/7] Added a PoC for helper movement --- notebooks/notebook_helpers/email_helpers.py | 338 ++++++++++++++++++++ packages/syft/src/syft/__init__.py | 8 + packages/syft/src/syft/util/util.py | 15 + 3 files changed, 361 insertions(+) create mode 100644 notebooks/notebook_helpers/email_helpers.py diff --git a/notebooks/notebook_helpers/email_helpers.py b/notebooks/notebook_helpers/email_helpers.py new file mode 100644 index 00000000000..f58d41a20f8 --- /dev/null +++ b/notebooks/notebook_helpers/email_helpers.py @@ -0,0 +1,338 @@ +# stdlib +import asyncio +from dataclasses import dataclass +from dataclasses import field +import json +import re +import time +from typing import Any + +# third party +from aiosmtpd.controller import Controller +from faker import Faker + +# syft absolute +from syft.service.user.user_roles import ServiceRole + +fake = Faker() + + +@dataclass +class Email: + email_from: str + email_to: str + email_content: str + + def to_dict(self) -> dict: + output = {} + for k, v in self.__dict__.items(): + output[k] = v + return output + + def __iter__(self): + yield from self.to_dict().items() + + def __getitem__(self, key): + return self.to_dict()[key] + + def __repr__(self) -> str: + return f"{self.email_to}\n{self.email_from}\n\n{self.email_content}" + + +class EmailServer: + def __init__(self, filepath="./emails.json"): + self.filepath = filepath + self._emails: dict[str, list[Email]] = self.load_emails() + + def load_emails(self) -> dict[str, list[Email]]: + try: + with open(self.filepath) as f: + data = json.load(f) + return {k: [Email(**email) for email in v] for k, v in data.items()} + except Exception as e: + print("Issues reading email file", e) + return {} + + def save_emails(self) -> None: + with open(self.filepath, "w") as f: + data = { + k: [email.to_dict() for email in v] for k, v in self._emails.items() + } + f.write(json.dumps(data)) + + def add_email_for_user(self, user_email: str, email: Email) -> None: + if user_email not in self._emails: + self._emails[user_email] = [] + self._emails[user_email].append(email) + self.save_emails() + + def get_emails_for_user(self, user_email: str) -> list[Email]: + self._emails: dict[str, list[Email]] = self.load_emails() + return self._emails.get(user_email, []) + + def reset_emails(self) -> None: + self._emails = {} + self.save_emails() + + +SENDER = "noreply@openmined.org" + + +def get_token(email) -> str: + # stdlib + import re + + pattern = r"syft_client\.reset_password\(token='(.*?)', new_password=.*?\)" + try: + token = re.search(pattern, email.email_content).group(1) + except Exception: + raise Exception(f"No token found in email: {email.email_content}") + return token + + +@dataclass +class TestUser: + name: str + email: str + password: str + role: ServiceRole + new_password: str | None = None + email_disabled: bool = False + reset_password: bool = False + reset_token: str | None = None + _client_cache: Any | None = field(default=None, repr=False, init=False) + _email_server: EmailServer | None = None + + @property + def latest_password(self) -> str: + if self.new_password: + return self.new_password + return self.password + + def make_new_password(self) -> str: + self.new_password = fake.password() + return self.new_password + + @property + def client(self): + return self._client_cache + + def relogin(self) -> None: + self.client = self.client + + @client.setter + def client(self, client): + client = client.login(email=self.email, password=self.latest_password) + self._client_cache = client + + def to_dict(self) -> dict: + output = {} + for k, v in self.__dict__.items(): + if k.startswith("_"): + continue + if k == "role": + v = str(v) + output[k] = v + return output + + def __iter__(self): + for key, val in self.to_dict().items(): + if not key.startswith("_"): + yield key, val + + def __getitem__(self, key): + if key.startswith("_"): + return None + return self.to_dict()[key] + + def update_password(self): + self.password = self.new_password + self.new_password = None + + @property + def emails(self) -> list[Email]: + if not self._email_server: + print("Not connected to email server object") + return [] + return self._email_server.get_emails_for_user(self.email) + + def get_token(self) -> str: + for email in reversed(self.emails): + token = None + try: + token = get_token(email) + break + except Exception: + pass + self.reset_token = token + return token + + +def save_users(users): + user_dicts = [] + for user in users: + user_dicts.append(user.to_dict()) + print(user_dicts) + with open("./users.json", "w") as f: + f.write(json.dumps(user_dicts)) + + +def load_users(high_client: None, path="./users.json"): + users = [] + with open(path) as f: + data = f.read() + user_dicts = json.loads(data) + for user in user_dicts: + test_user = TestUser(**user) + if high_client: + test_user.client = high_client + users.append(test_user) + return users + + +def make_user( + name: str | None = None, + email: str | None = None, + password: str | None = None, + role: ServiceRole = ServiceRole.DATA_SCIENTIST, +): + fake = Faker() + if name is None: + name = fake.name() + if email is None: + ascii_string = re.sub(r"[^a-zA-Z\s]", "", name).lower() + dashed_string = ascii_string.replace(" ", "-") + email = f"{dashed_string}-fake@openmined.org" + if password is None: + password = fake.password() + + return TestUser(name=name, email=email, password=password, role=role) + + +def user_exists(root_client, email: str) -> bool: + users = root_client.api.services.user + for user in users: + if user.email == email: + return True + return False + + +class SMTPTestServer: + def __init__(self, email_server): + self.port = 9025 + self.hostname = "0.0.0.0" + self._stop_event = asyncio.Event() + + # Simple email handler class + class SimpleHandler: + async def handle_DATA(self, server, session, envelope): + try: + print(f"> SMTPTestServer got an email for {envelope.rcpt_tos}") + email = Email( + email_from=envelope.mail_from, + email_to=envelope.rcpt_tos, + email_content=envelope.content.decode( + "utf-8", errors="replace" + ), + ) + email_server.add_email_for_user(envelope.rcpt_tos[0], email) + email_server.save_emails() + return "250 Message accepted for delivery" + except Exception as e: + print(f"> Error handling email: {e}") + return "550 Internal Server Error" + + try: + self.handler = SimpleHandler() + self.controller = Controller( + self.handler, hostname=self.hostname, port=self.port + ) + except Exception as e: + print(f"> Error initializing SMTPTestServer Controller: {e}") + + def start(self): + print(f"> Starting SMTPTestServer on: {self.hostname}:{self.port}") + asyncio.create_task(self.async_loop()) + + async def async_loop(self): + try: + print(f"> Starting SMTPTestServer on: {self.hostname}:{self.port}") + self.controller.start() + await ( + self._stop_event.wait() + ) # Wait until the event is set to stop the server + except Exception as e: + print(f"> Error with SMTPTestServer: {e}") + + def stop(self): + try: + print("> Stopping SMTPTestServer") + loop = asyncio.get_running_loop() + if loop.is_running(): + loop.create_task(self.async_stop()) + else: + asyncio.run(self.async_stop()) + except Exception as e: + print(f"> Error stopping SMTPTestServer: {e}") + + async def async_stop(self): + self.controller.stop() + self._stop_event.set() # Stop the server by setting the event + + +class TimeoutError(Exception): + pass + + +class Timeout: + def __init__(self, timeout_duration): + if timeout_duration > 60: + raise ValueError("Timeout duration cannot exceed 60 seconds.") + self.timeout_duration = timeout_duration + + def run_with_timeout(self, condition_func, *args, **kwargs): + start_time = time.time() + result = None + + while True: + elapsed_time = time.time() - start_time + if elapsed_time > self.timeout_duration: + raise TimeoutError( + f"Function execution exceeded {self.timeout_duration} seconds." + ) + + # Check if the condition is met + try: + if condition_func(): + print("Condition met, exiting early.") + break + except Exception as e: + print(f"Exception in target function: {e}") + break # Exit the loop if an exception occurs in the function + time.sleep(1) + + return result + + +def get_email_server(reset=False): + email_server = EmailServer() + if reset: + email_server.reset_emails() + smtp_server = SMTPTestServer(email_server) + smtp_server.start() + return email_server, smtp_server + + +def create_user(root_client, test_user): + if not user_exists(root_client, test_user.email): + fake = Faker() + root_client.register( + name=test_user.name, + email=test_user.email, + password=test_user.password, + password_verify=test_user.password, + institution=fake.company(), + website=fake.url(), + ) + else: + print("User already exists", test_user) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index 62655f0aed1..dd6b6fa7aff 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -154,6 +154,14 @@ def _test_settings() -> Any: return test_settings() +@module_property +def _get_helpers() -> Any: + # relative + from .util.util import add_helper_path + + return add_helper_path() + + @module_property def hello_baby() -> None: print("Hello baby!") diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index fa20c3fc2c2..07487c60bfe 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -1143,6 +1143,21 @@ def test_settings() -> Any: return test_settings +def add_helper_path_to_python_path() -> Any: + current_path = "." + + # jupyter uses "." which resolves to the notebook + if not is_interpreter_jupyter(): + # python uses the file which has from syft import test_settings in it + import_path = get_caller_file_path() + if import_path: + current_path = import_path + + base_dir = find_base_dir_with_tox_ini(current_path) + notebook_helper_path = os.path.join(base_dir, "notebooks/notebook_helpers") + sys.path.append(notebook_helper_path) + + class CustomRepr(reprlib.Repr): def repr_str(self, obj: Any, level: int = 0) -> str: if len(obj) <= self.maxstring: From 10384914b1cb7e6d640aaaa0d6474a98a9e21751 Mon Sep 17 00:00:00 2001 From: Sameer Wagh Date: Thu, 12 Sep 2024 13:22:18 -0400 Subject: [PATCH 3/7] Moved all helper files over --- notebooks/notebook_helpers/apis/__init__.py | 23 + .../notebook_helpers/apis/live/__init__.py | 0 .../notebook_helpers/apis/live/schema.py | 108 +++++ .../notebook_helpers/apis/live/test_query.py | 113 +++++ .../notebook_helpers/apis/mock/__init__.py | 0 notebooks/notebook_helpers/apis/mock/data.py | 268 ++++++++++++ .../notebook_helpers/apis/mock/schema.py | 52 +++ .../notebook_helpers/apis/mock/test_query.py | 138 ++++++ .../notebook_helpers/apis/rate_limiter.py | 16 + .../notebook_helpers/apis/submit_query.py | 42 ++ notebooks/notebook_helpers/job_helpers.py | 400 ++++++++++++++++++ notebooks/notebook_helpers/sync_helpers.py | 190 +++++++++ packages/syft/src/syft/__init__.py | 6 +- packages/syft/src/syft/util/util.py | 2 +- 14 files changed, 1354 insertions(+), 4 deletions(-) create mode 100644 notebooks/notebook_helpers/apis/__init__.py create mode 100644 notebooks/notebook_helpers/apis/live/__init__.py create mode 100644 notebooks/notebook_helpers/apis/live/schema.py create mode 100644 notebooks/notebook_helpers/apis/live/test_query.py create mode 100644 notebooks/notebook_helpers/apis/mock/__init__.py create mode 100644 notebooks/notebook_helpers/apis/mock/data.py create mode 100644 notebooks/notebook_helpers/apis/mock/schema.py create mode 100644 notebooks/notebook_helpers/apis/mock/test_query.py create mode 100644 notebooks/notebook_helpers/apis/rate_limiter.py create mode 100644 notebooks/notebook_helpers/apis/submit_query.py create mode 100644 notebooks/notebook_helpers/job_helpers.py create mode 100644 notebooks/notebook_helpers/sync_helpers.py diff --git a/notebooks/notebook_helpers/apis/__init__.py b/notebooks/notebook_helpers/apis/__init__.py new file mode 100644 index 00000000000..7231b580696 --- /dev/null +++ b/notebooks/notebook_helpers/apis/__init__.py @@ -0,0 +1,23 @@ +# stdlib +import os + +# syft absolute +from syft.util.util import str_to_bool + +# relative +from .submit_query import make_submit_query + +env_var = "TEST_BIGQUERY_APIS_LIVE" +use_live = str_to_bool(str(os.environ.get(env_var, "False"))) +env_name = "Live" if use_live else "Mock" +print(f"Using {env_name} API Code, this will query BigQuery. ${env_var}=={use_live}") + + +if use_live: + # relative + from .live.schema import make_schema + from .live.test_query import make_test_query +else: + # relative + from .mock.schema import make_schema + from .mock.test_query import make_test_query diff --git a/notebooks/notebook_helpers/apis/live/__init__.py b/notebooks/notebook_helpers/apis/live/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/notebooks/notebook_helpers/apis/live/schema.py b/notebooks/notebook_helpers/apis/live/schema.py new file mode 100644 index 00000000000..5b39d9d9066 --- /dev/null +++ b/notebooks/notebook_helpers/apis/live/schema.py @@ -0,0 +1,108 @@ +# stdlib +from collections.abc import Callable + +# syft absolute +import syft as sy +from syft import test_settings + +# relative +from ..rate_limiter import is_within_rate_limit + + +def make_schema(settings: dict, worker_pool: str) -> Callable: + updated_settings = { + "calls_per_min": 5, + "rate_limiter_enabled": True, + "credentials": test_settings.gce_service_account.to_dict(), + "region": test_settings.gce_region, + "project_id": test_settings.gce_project_id, + "dataset_1": test_settings.dataset_1, + "table_1": test_settings.table_1, + "table_2": test_settings.table_2, + } | settings + + @sy.api_endpoint( + path="bigquery.schema", + description="This endpoint allows for visualising the metadata of tables available in BigQuery.", + settings=updated_settings, + helper_functions=[ + is_within_rate_limit + ], # Adds ratelimit as this is also a method available to data scientists + worker_pool=worker_pool, + ) + def live_schema( + context, + ) -> str: + # stdlib + import datetime + + # third party + from google.cloud import bigquery # noqa: F811 + from google.oauth2 import service_account + import pandas as pd + + # syft absolute + from syft import SyftException + + # Auth for Bigquer based on the workload identity + credentials = service_account.Credentials.from_service_account_info( + context.settings["credentials"] + ) + scoped_credentials = credentials.with_scopes( + ["https://www.googleapis.com/auth/cloud-platform"] + ) + + client = bigquery.Client( + credentials=scoped_credentials, + location=context.settings["region"], + ) + + # Store a dict with the calltimes for each user, via the email. + if context.settings["rate_limiter_enabled"]: + if context.user.email not in context.state.keys(): + context.state[context.user.email] = [] + + if not context.code.is_within_rate_limit(context): + raise SyftException( + public_message="Rate limit of calls per minute has been reached." + ) + context.state[context.user.email].append(datetime.datetime.now()) + + try: + # Formats the data schema in a data frame format + # Warning: the only supported format types are primitives, np.ndarrays and pd.DataFrames + + data_schema = [] + for table_id in [ + f"{context.settings['dataset_1']}.{context.settings['table_1']}", + f"{context.settings['dataset_1']}.{context.settings['table_2']}", + ]: + table = client.get_table(table_id) + for schema in table.schema: + data_schema.append( + { + "project": str(table.project), + "dataset_id": str(table.dataset_id), + "table_id": str(table.table_id), + "schema_name": str(schema.name), + "schema_field": str(schema.field_type), + "description": str(table.description), + "num_rows": str(table.num_rows), + } + ) + return pd.DataFrame(data_schema) + + except Exception as e: + # not a bigquery exception + if not hasattr(e, "_errors"): + output = f"got exception e: {type(e)} {str(e)}" + raise SyftException( + public_message=f"An error occured executing the API call {output}" + ) + + # Should add appropriate error handling for what should be exposed to the data scientists. + raise SyftException( + public_message="An error occured executing the API call, please contact the domain owner." + ) + + return live_schema diff --git a/notebooks/notebook_helpers/apis/live/test_query.py b/notebooks/notebook_helpers/apis/live/test_query.py new file mode 100644 index 00000000000..344879dcb62 --- /dev/null +++ b/notebooks/notebook_helpers/apis/live/test_query.py @@ -0,0 +1,113 @@ +# stdlib +from collections.abc import Callable + +# syft absolute +import syft as sy +from syft import test_settings + +# relative +from ..rate_limiter import is_within_rate_limit + + +def make_test_query(settings) -> Callable: + updated_settings = { + "calls_per_min": 10, + "rate_limiter_enabled": True, + "credentials": test_settings.gce_service_account.to_dict(), + "region": test_settings.gce_region, + "project_id": test_settings.gce_project_id, + } | settings + + # these are the same if you allow the rate limiter to be turned on and off + @sy.api_endpoint_method( + settings=updated_settings, + helper_functions=[is_within_rate_limit], + ) + def live_test_query( + context, + sql_query: str, + ) -> str: + # stdlib + import datetime + + # third party + from google.cloud import bigquery # noqa: F811 + from google.oauth2 import service_account + + # syft absolute + from syft import SyftException + + # Auth for Bigquer based on the workload identity + credentials = service_account.Credentials.from_service_account_info( + context.settings["credentials"] + ) + scoped_credentials = credentials.with_scopes( + ["https://www.googleapis.com/auth/cloud-platform"] + ) + + client = bigquery.Client( + credentials=scoped_credentials, + location=context.settings["region"], + ) + + # Store a dict with the calltimes for each user, via the email. + if context.settings["rate_limiter_enabled"]: + if context.user.email not in context.state.keys(): + context.state[context.user.email] = [] + + if not context.code.is_within_rate_limit(context): + raise SyftException( + public_message="Rate limit of calls per minute has been reached." + ) + context.state[context.user.email].append(datetime.datetime.now()) + + try: + rows = client.query_and_wait( + sql_query, + project=context.settings["project_id"], + ) + + if rows.total_rows > 1_000_000: + raise SyftException( + public_message="Please only write queries that gather aggregate statistics" + ) + + return rows.to_dataframe() + + except Exception as e: + # not a bigquery exception + if not hasattr(e, "_errors"): + output = f"got exception e: {type(e)} {str(e)}" + raise SyftException( + public_message=f"An error occured executing the API call {output}" + ) + + # Treat all errors that we would like to be forwarded to the data scientists + # By default, any exception is only visible to the data owner. + + if e._errors[0]["reason"] in [ + "badRequest", + "blocked", + "duplicate", + "invalidQuery", + "invalid", + "jobBackendError", + "jobInternalError", + "notFound", + "notImplemented", + "rateLimitExceeded", + "resourceInUse", + "resourcesExceeded", + "tableUnavailable", + "timeout", + ]: + raise SyftException( + public_message="Error occured during the call: " + + e._errors[0]["message"] + ) + else: + raise SyftException( + public_message="An error occured executing the API call, please contact the domain owner." + ) + + return live_test_query diff --git a/notebooks/notebook_helpers/apis/mock/__init__.py b/notebooks/notebook_helpers/apis/mock/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/notebooks/notebook_helpers/apis/mock/data.py b/notebooks/notebook_helpers/apis/mock/data.py new file mode 100644 index 00000000000..82262bf7a01 --- /dev/null +++ b/notebooks/notebook_helpers/apis/mock/data.py @@ -0,0 +1,268 @@ +# stdlib +from math import nan + +schema_dict = { + "project": { + 0: "example-project", + 1: "example-project", + 2: "example-project", + 3: "example-project", + 4: "example-project", + 5: "example-project", + 6: "example-project", + 7: "example-project", + 8: "example-project", + 9: "example-project", + 10: "example-project", + 11: "example-project", + 12: "example-project", + 13: "example-project", + 14: "example-project", + 15: "example-project", + 16: "example-project", + 17: "example-project", + 18: "example-project", + 19: "example-project", + 20: "example-project", + 21: "example-project", + 22: "example-project", + }, + "dataset_id": { + 0: "test_1gb", + 1: "test_1gb", + 2: "test_1gb", + 3: "test_1gb", + 4: "test_1gb", + 5: "test_1gb", + 6: "test_1gb", + 7: "test_1gb", + 8: "test_1gb", + 9: "test_1gb", + 10: "test_1gb", + 11: "test_1gb", + 12: "test_1gb", + 13: "test_1gb", + 14: "test_1gb", + 15: "test_1gb", + 16: "test_1gb", + 17: "test_1gb", + 18: "test_1gb", + 19: "test_1gb", + 20: "test_1gb", + 21: "test_1gb", + 22: "test_1gb", + }, + "table_id": { + 0: "posts", + 1: "posts", + 2: "posts", + 3: "posts", + 4: "posts", + 5: "posts", + 6: "posts", + 7: "comments", + 8: "comments", + 9: "comments", + 10: "comments", + 11: "comments", + 12: "comments", + 13: "comments", + 14: "comments", + 15: "comments", + 16: "comments", + 17: "comments", + 18: "comments", + 19: "comments", + 20: "comments", + 21: "comments", + 22: "comments", + }, + "schema_name": { + 0: "int64_field_0", + 1: "id", + 2: "name", + 3: "subscribers_count", + 4: "permalink", + 5: "nsfw", + 6: "spam", + 7: "int64_field_0", + 8: "id", + 9: "body", + 10: "parent_id", + 11: "created_at", + 12: "last_modified_at", + 13: "gilded", + 14: "permalink", + 15: "score", + 16: "comment_id", + 17: "post_id", + 18: "author_id", + 19: "spam", + 20: "deleted", + 21: "upvote_raio", + 22: "collapsed_in_crowd_control", + }, + "schema_field": { + 0: "INTEGER", + 1: "STRING", + 2: "STRING", + 3: "INTEGER", + 4: "STRING", + 5: "FLOAT", + 6: "BOOLEAN", + 7: "INTEGER", + 8: "STRING", + 9: "STRING", + 10: "STRING", + 11: "INTEGER", + 12: "INTEGER", + 13: "BOOLEAN", + 14: "STRING", + 15: "INTEGER", + 16: "STRING", + 17: "STRING", + 18: "STRING", + 19: "BOOLEAN", + 20: "BOOLEAN", + 21: "FLOAT", + 22: "BOOLEAN", + }, + "description": { + 0: "None", + 1: "None", + 2: "None", + 3: "None", + 4: "None", + 5: "None", + 6: "None", + 7: "None", + 8: "None", + 9: "None", + 10: "None", + 11: "None", + 12: "None", + 13: "None", + 14: "None", + 15: "None", + 16: "None", + 17: "None", + 18: "None", + 19: "None", + 20: "None", + 21: "None", + 22: "None", + }, + "num_rows": { + 0: "2000000", + 1: "2000000", + 2: "2000000", + 3: "2000000", + 4: "2000000", + 5: "2000000", + 6: "2000000", + 7: "2000000", + 8: "2000000", + 9: "2000000", + 10: "2000000", + 11: "2000000", + 12: "2000000", + 13: "2000000", + 14: "2000000", + 15: "2000000", + 16: "2000000", + 17: "2000000", + 18: "2000000", + 19: "2000000", + 20: "2000000", + 21: "2000000", + 22: "2000000", + }, +} + + +query_dict = { + "int64_field_0": { + 0: 4, + 1: 5, + 2: 10, + 3: 16, + 4: 17, + 5: 23, + 6: 24, + 7: 25, + 8: 27, + 9: 40, + }, + "id": { + 0: "t5_via1x", + 1: "t5_cv9gn", + 2: "t5_8p2tq", + 3: "t5_8fcro", + 4: "t5_td5of", + 5: "t5_z01fv", + 6: "t5_hmqjk", + 7: "t5_1flyj", + 8: "t5_5rwej", + 9: "t5_uurcv", + }, + "name": { + 0: "/channel/mylittlepony", + 1: "/channel/polyamory", + 2: "/channel/Catholicism", + 3: "/channel/cordcutters", + 4: "/channel/stevenuniverse", + 5: "/channel/entitledbitch", + 6: "/channel/engineering", + 7: "/channel/nottheonion", + 8: "/channel/FoodPorn", + 9: "/channel/puppysmiles", + }, + "subscribers_count": { + 0: 4323081, + 1: 2425929, + 2: 4062607, + 3: 7543226, + 4: 2692168, + 5: 2709080, + 6: 8766144, + 7: 2580984, + 8: 7784809, + 9: 3715991, + }, + "permalink": { + 0: "/channel//channel/mylittlepony", + 1: "/channel//channel/polyamory", + 2: "/channel//channel/Catholicism", + 3: "/channel//channel/cordcutters", + 4: "/channel//channel/stevenuniverse", + 5: "/channel//channel/entitledbitch", + 6: "/channel//channel/engineering", + 7: "/channel//channel/nottheonion", + 8: "/channel//channel/FoodPorn", + 9: "/channel//channel/puppysmiles", + }, + "nsfw": { + 0: nan, + 1: nan, + 2: nan, + 3: nan, + 4: nan, + 5: nan, + 6: nan, + 7: nan, + 8: nan, + 9: nan, + }, + "spam": { + 0: False, + 1: False, + 2: False, + 3: False, + 4: False, + 5: False, + 6: False, + 7: False, + 8: False, + 9: False, + }, +} diff --git a/notebooks/notebook_helpers/apis/mock/schema.py b/notebooks/notebook_helpers/apis/mock/schema.py new file mode 100644 index 00000000000..a95e04f2f1d --- /dev/null +++ b/notebooks/notebook_helpers/apis/mock/schema.py @@ -0,0 +1,52 @@ +# stdlib +from collections.abc import Callable + +# syft absolute +import syft as sy + +# relative +from ..rate_limiter import is_within_rate_limit +from .data import schema_dict + + +def make_schema(settings, worker_pool) -> Callable: + updated_settings = { + "calls_per_min": 5, + "rate_limiter_enabled": True, + "schema_dict": schema_dict, + } | settings + + @sy.api_endpoint( + path="bigquery.schema", + description="This endpoint allows for visualising the metadata of tables available in BigQuery.", + settings=updated_settings, + helper_functions=[is_within_rate_limit], + worker_pool=worker_pool, + ) + def mock_schema( + context, + ) -> str: + # syft absolute + from syft import SyftException + + # Store a dict with the calltimes for each user, via the email. + if context.settings["rate_limiter_enabled"]: + # stdlib + import datetime + + if context.user.email not in context.state.keys(): + context.state[context.user.email] = [] + + if not context.code.is_within_rate_limit(context): + raise SyftException( + public_message="Rate limit of calls per minute has been reached." + ) + context.state[context.user.email].append(datetime.datetime.now()) + + # third party + import pandas as pd + + df = pd.DataFrame(context.settings["schema_dict"]) + return df + + return mock_schema diff --git a/notebooks/notebook_helpers/apis/mock/test_query.py b/notebooks/notebook_helpers/apis/mock/test_query.py new file mode 100644 index 00000000000..ae028a8cf36 --- /dev/null +++ b/notebooks/notebook_helpers/apis/mock/test_query.py @@ -0,0 +1,138 @@ +# stdlib +from collections.abc import Callable + +# syft absolute +import syft as sy + +# relative +from ..rate_limiter import is_within_rate_limit +from .data import query_dict + + +def extract_limit_value(sql_query: str) -> int: + # stdlib + import re + + limit_pattern = re.compile(r"\bLIMIT\s+(\d+)\b", re.IGNORECASE) + match = limit_pattern.search(sql_query) + if match: + return int(match.group(1)) + return None + + +def is_valid_sql(query: str) -> bool: + # stdlib + import sqlite3 + + # Prepare an in-memory SQLite database + conn = sqlite3.connect(":memory:") + cursor = conn.cursor() + + try: + # Use the EXPLAIN QUERY PLAN command to get the query plan + cursor.execute(f"EXPLAIN QUERY PLAN {query}") + except sqlite3.Error as e: + if "no such table" in str(e).lower(): + return True + return False + finally: + conn.close() + + +def adjust_dataframe_rows(df, target_rows: int): + # third party + import pandas as pd + + current_rows = len(df) + + if target_rows > current_rows: + # Repeat rows to match target_rows + repeat_times = (target_rows + current_rows - 1) // current_rows + df_expanded = pd.concat([df] * repeat_times, ignore_index=True).head( + target_rows + ) + else: + # Truncate rows to match target_rows + df_expanded = df.head(target_rows) + + return df_expanded + + +def make_test_query(settings: dict) -> Callable: + updated_settings = { + "calls_per_min": 10, + "rate_limiter_enabled": True, + "query_dict": query_dict, + } | settings + + # these are the same if you allow the rate limiter to be turned on and off + @sy.api_endpoint_method( + settings=updated_settings, + helper_functions=[ + is_within_rate_limit, + extract_limit_value, + is_valid_sql, + adjust_dataframe_rows, + ], + ) + def mock_test_query( + context, + sql_query: str, + ) -> str: + # stdlib + import datetime + + # third party + from google.api_core.exceptions import BadRequest + + # syft absolute + from syft import SyftException + + # Store a dict with the calltimes for each user, via the email. + if context.settings["rate_limiter_enabled"]: + if context.user.email not in context.state.keys(): + context.state[context.user.email] = [] + + if not context.code.is_within_rate_limit(context): + raise SyftException( + public_message="Rate limit of calls per minute has been reached." + ) + context.state[context.user.email].append(datetime.datetime.now()) + + bad_table = "invalid_table" + bad_post = ( + "BadRequest: 400 POST " + "https://bigquery.googleapis.com/bigquery/v2/projects/project-id/" + "queries?prettyPrint=false: " + ) + if bad_table in sql_query: + try: + raise BadRequest( + f'{bad_post} Table "{bad_table}" must be qualified ' + "with a dataset (e.g. dataset.table)." + ) + except Exception as e: + raise SyftException( + public_message=f"*must be qualified with a dataset*. {e}" + ) + + if not context.code.is_valid_sql(sql_query): + raise BadRequest( + f'{bad_post} Syntax error: Unexpected identifier "{sql_query}" at [1:1]' + ) + + # third party + import pandas as pd + + limit = context.code.extract_limit_value(sql_query) + if limit > 1_000_000: + raise SyftException( + public_message="Please only write queries that gather aggregate statistics" + ) + + base_df = pd.DataFrame(context.settings["query_dict"]) + + df = context.code.adjust_dataframe_rows(base_df, limit) + return df + + return mock_test_query diff --git a/notebooks/notebook_helpers/apis/rate_limiter.py b/notebooks/notebook_helpers/apis/rate_limiter.py new file mode 100644 index 00000000000..8ce319b61f4 --- /dev/null +++ b/notebooks/notebook_helpers/apis/rate_limiter.py @@ -0,0 +1,16 @@ +def is_within_rate_limit(context) -> bool: + """Rate limiter for custom API calls made by users.""" + # stdlib + import datetime + + state = context.state + settings = context.settings + email = context.user.email + + current_time = datetime.datetime.now() + calls_last_min = [ + 1 if (current_time - call_time).seconds < 60 else 0 + for call_time in state[email] + ] + + return sum(calls_last_min) < settings.get("calls_per_min", 5) diff --git a/notebooks/notebook_helpers/apis/submit_query.py b/notebooks/notebook_helpers/apis/submit_query.py new file mode 100644 index 00000000000..a0125ee009b --- /dev/null +++ b/notebooks/notebook_helpers/apis/submit_query.py @@ -0,0 +1,42 @@ +# syft absolute +import syft as sy + + +def make_submit_query(settings, worker_pool): + updated_settings = {"user_code_worker": worker_pool} | settings + + @sy.api_endpoint( + path="bigquery.submit_query", + description="API endpoint that allows you to submit SQL queries to run on the private data.", + worker_pool=worker_pool, + settings=updated_settings, + ) + def submit_query( + context, + func_name: str, + query: str, + ) -> str: + # syft absolute + import syft as sy + + @sy.syft_function( + name=func_name, + input_policy=sy.MixedInputPolicy( + endpoint=sy.Constant( + val=context.admin_client.api.services.bigquery.test_query + ), + query=sy.Constant(val=query), + client=context.admin_client, + ), + worker_pool_name=context.settings["user_code_worker"], + ) + def execute_query(query: str, endpoint): + res = endpoint(sql_query=query) + return res + + request = context.user_client.code.request_code_execution(execute_query) + context.admin_client.requests.set_tags(request, ["autosync"]) + + return f"Query submitted {request}. Use `client.code.{func_name}()` to run your query" + + return submit_query diff --git a/notebooks/notebook_helpers/job_helpers.py b/notebooks/notebook_helpers/job_helpers.py new file mode 100644 index 00000000000..f87cfdc7324 --- /dev/null +++ b/notebooks/notebook_helpers/job_helpers.py @@ -0,0 +1,400 @@ +# stdlib +from collections import defaultdict +from collections.abc import Callable +from dataclasses import dataclass +from dataclasses import field +import json +import random +import re +import secrets +import textwrap +from typing import Any + +# third party +from helpers import TestUser + +# syft absolute +from syft import test_settings + +from syft.client.client import SyftClient # noqa + +dataset_1 = test_settings.get("dataset_1", default="dataset_1") +dataset_2 = test_settings.get("dataset_2", default="dataset_2") +table_1 = test_settings.get("table_1", default="table_1") +table_2 = test_settings.get("table_2", default="table_2") +table_1_col_id = test_settings.get("table_1_col_id", default="table_id") +table_1_col_score = test_settings.get("table_1_col_score", default="colname") +table_2_col_id = test_settings.get("table_2_col_id", default="table_id") +table_2_col_score = test_settings.get("table_2_col_score", default="colname") + + +@dataclass +class TestJob: + user_email: str + func_name: str + query: str + job_type: str + settings: dict # make a type so we can rely on attributes + should_succeed: bool + should_submit: bool = True + code_path: str | None = field(default=None) + admin_reviewed: bool = False + result_as_expected: bool | None = None + + _client_cache: SyftClient | None = field(default=None, repr=False, init=False) + + @property + def is_submitted(self) -> bool: + return self.code_path is not None + + @property + def client(self): + return self._client_cache + + @client.setter + def client(self, client): + self._client_cache = client + + def to_dict(self) -> dict: + output = {} + for k, v in self.__dict__.items(): + if k.startswith("_"): + continue + output[k] = v + return output + + def __iter__(self): + for key, val in self.to_dict().items(): + if key.startswith("_"): + yield key, val + + def __getitem__(self, key): + if key.startswith("_"): + return None + return self.to_dict()[key] + + @property + def code_method(self) -> None | Callable: + try: + return getattr(self.client.code, self.func_name, None) + except Exception as e: + print(f"Cant find code method. {e}") + return None + + +def make_query(settings: dict) -> str: + query = f""" + SELECT {settings['groupby_col']}, AVG({settings['score_col']}) AS average_score + FROM {settings['dataset']}.{settings['table']} + GROUP BY {settings['groupby_col']} + LIMIT {settings['limit']}""".strip() + + return textwrap.dedent(query) + + +def create_simple_query_job(user: TestUser) -> TestJob: + job_type = "simple_query" + func_name = f"{job_type}_{secrets.token_hex(3)}" + + dataset = random.choice([dataset_1, dataset_2]) + table, groupby_col, score_col = random.choice( + [ + (table_1, table_1_col_id, table_1_col_score), + (table_2, table_2_col_id, table_2_col_score), + ] + ) + limit = random.randint(1, 1_000_000) + + settings = { + "dataset": dataset, + "table": table, + "groupby_col": groupby_col, + "score_col": score_col, + "limit": limit, + } + query = make_query(settings) + + result = TestJob( + user_email=user.email, + func_name=func_name, + query=query, + job_type=job_type, + settings=settings, + should_succeed=True, + ) + + result.client = user.client + return result + + +def create_wrong_asset_query(user: TestUser) -> TestJob: + job_type = "wrong_asset_query" + func_name = f"{job_type}_{secrets.token_hex(3)}" + + valid_job = create_simple_query_job(user) + settings = valid_job.settings + corrupted_asset = random.choice(["dataset", "table"]) + settings[corrupted_asset] = "wrong_asset" + query = make_query(settings) + + result = TestJob( + user_email=user.email, + func_name=func_name, + query=query, + job_type=job_type, + settings=settings, + should_succeed=False, + ) + + result.client = user.client + return result + + +def create_wrong_syntax_query(user: TestUser) -> TestJob: + job_type = "wrong_syntax_query" + func_name = f"{job_type}_{secrets.token_hex(3)}" + + query = "SELECT * FROM table INCORRECT SYNTAX" + + result = TestJob( + user_email=user.email, + func_name=func_name, + query=query, + job_type=job_type, + settings={}, + should_succeed=False, + ) + + result.client = user.client + return result + + +def create_long_query_job(user: TestUser) -> TestJob: + job_type = "job_too_much_text" + func_name = f"{job_type}_{secrets.token_hex(3)}" + + query = "a" * 1_000 + + result = TestJob( + user_email=user.email, + func_name=func_name, + query=query, + job_type=job_type, + settings={}, + should_succeed=False, + ) + + result.client = user.client + return result + + +def create_query_long_name(user: TestUser) -> TestJob: + job_type = "job_long_name" + func_name = f"{job_type}_{secrets.token_hex(3)}" + + job = create_simple_query_job(user) + + job.job_type = job_type + job.func_name = func_name + "a" * 1_000 + + return job + + +def create_job_funcname_xss(user: TestUser) -> TestJob: + job_type = "job_funcname_xss" + func_name = f"{job_type}_{secrets.token_hex(3)}" + func_name += "" + + job = create_simple_query_job(user) + job.job_type = job_type + job.func_name = func_name + job.should_submit = False + return job + + +def get_request_for_job_info(requests, job): + job_requests = [r for r in requests if r.code.service_func_name == job.func_name] + if len(job_requests) != 1: + raise Exception(f"Too many or too few requests: {job} in requests: {requests}") + return job_requests[0] + + +def create_job_query_xss(user: TestUser) -> TestJob: + job_type = "job_query_xss" + func_name = f"{job_type}_{secrets.token_hex(3)}" + + job = create_simple_query_job(user) + job.job_type = job_type + job.func_name = func_name + job.query += "" + job.should_succeed = False + + return job + + +def create_job_many_columns(user: TestUser) -> TestJob: + job_type = "job_many_columns" + func_name = f"{job_type}_{secrets.token_hex(3)}" + + job = create_simple_query_job(user) + job.job_type = job_type + job.func_name = func_name + settings = job.settings + job.settings["num_extra_cols"] = random.randint(100, 1000) + + new_columns_string = ", ".join( + f"{settings['score_col']} as col_{i}" for i in range(settings["num_extra_cols"]) + ) + + job.query = f""" + SELECT {settings['groupby_col']}, AVG({settings['score_col']}) AS average_score, {new_columns_string} + FROM {settings['dataset']}.{settings['table']} + GROUP BY {settings['groupby_col']} + LIMIT {settings['limit']}""".strip() + + return job + + +def create_random_job(user: TestUser) -> TestJob: + job_func = random.choice(create_job_functions) + return job_func(user) + + +def create_jobs(users: list[TestUser], total_jobs: int = 10) -> list[TestJob]: + jobs = [] + num_users = len(users) + user_index = 0 + each_count = 0 + # keep making jobs until we have enough + while len(jobs) < total_jobs: + # if we havent used each job type yet keep getting the next one + if each_count < len(create_job_functions): + job_func = create_job_functions[each_count] + each_count += 1 + else: + # otherwise lets get a random one + job_func = create_random_job + # use the current index of user + jobs.append(job_func(users[user_index])) + + # only go as high as the last user index + if user_index < num_users - 1: + user_index += 1 + else: + # reset back to the first user + user_index = 0 + + # in case we stuffed up + if len(jobs) > total_jobs: + jobs = jobs[:total_jobs] + return jobs + + +def submit_job(job: TestJob) -> tuple[Any, str]: + client = job.client + response = client.api.services.bigquery.submit_query( + func_name=job.func_name, query=job.query + ) + job.code_path = extract_code_path(response) + return response + + +def extract_code_path(response) -> str | None: + pattern = r"client\.code\.(\w+)\(\)" + match = re.search(pattern, str(response)) + if match: + extracted_code = match.group(1) + return extracted_code + return None + + +def approve_by_running(request): + job = request.code(blocking=False) + result = job.wait() + print("got result of type", type(result), "bool", bool(result)) + # got result of type bool False + # assert result won't work unless we know what type is coming back + job_info = job.info(result=True) + # need force when running multiple times + # todo check and dont run if its already done + response = request.deposit_result(job_info, approve=True, force=True) + return response + + +def get_job_emails(jobs, client, email_server): + all_requests = client.requests + res = {} + for job in jobs: + request = get_request_for_job_info(all_requests, job) + emails = email_server.get_emails_for_user(request.requesting_user_email) + res[request.requesting_user_email] = emails + return res + + +def resolve_request(request): + service_func_name = request.code.service_func_name + if service_func_name.startswith("simple_query"): + request.approve() # approve because it is good + if service_func_name.startswith("wrong_asset_query"): + request.approve() # approve because it is bad + if service_func_name.startswith("wrong_syntax_query"): + request.approve() # approve because it is bad + if service_func_name.startswith("job_too_much_text"): + request.deny(reason="too long, boring!") # deny because it is bad + if service_func_name.startswith("job_long_name"): + request.approve() + if service_func_name.startswith("job_funcname_xss"): + request.deny(reason="too long, boring!") # never reach doesnt matter + if service_func_name.startswith("job_query_xss"): + request.approve() # approve because it is bad + if service_func_name.startswith("job_many_columns"): + request.approve() # approve because it is bad + + return (request.id, request.status) + + +create_job_functions = [ + create_simple_query_job, # quick way to increase the odds + create_simple_query_job, + create_simple_query_job, + create_simple_query_job, + create_simple_query_job, + create_simple_query_job, + create_wrong_syntax_query, + create_long_query_job, + create_query_long_name, + create_job_funcname_xss, + create_job_query_xss, + create_job_many_columns, +] + + +def save_jobs(jobs, filepath="./jobs.json"): + user_jobs = defaultdict(list) + for job in jobs: + user_jobs[job.user_email].append(job.to_dict()) + with open(filepath, "w") as f: + f.write(json.dumps(user_jobs)) + + +def load_jobs(users, high_client, filepath="./jobs.json"): + data = {} + try: + with open(filepath) as f: + data = json.loads(f.read()) + except Exception as e: + print(f"cant read file: {filepath}: {e}") + data = {} + jobs_list = [] + for user in users: + if user.email not in data: + print(f"{user.email} missing from jobs") + continue + user_jobs = data[user.email] + for user_job in user_jobs: + test_job = TestJob(**user_job) + if user._client_cache is None: + user.client = high_client + test_job.client = user.client + jobs_list.append(test_job) + return jobs_list diff --git a/notebooks/notebook_helpers/sync_helpers.py b/notebooks/notebook_helpers/sync_helpers.py new file mode 100644 index 00000000000..e1d558016ba --- /dev/null +++ b/notebooks/notebook_helpers/sync_helpers.py @@ -0,0 +1,190 @@ +# third party +from tqdm import tqdm + +# syft absolute +import syft as sy +from syft.client.datasite_client import DatasiteClient +from syft.client.syncing import compare_clients +from syft.service.code.user_code import UserCode +from syft.service.job.job_stash import Job +from syft.service.job.job_stash import JobStatus +from syft.service.request.request import Request +from syft.service.request.request import RequestStatus +from syft.service.sync.diff_state import ObjectDiffBatch +from syft.types.result import Err + + +def deny_requests_without_autosync_tag(client_low: DatasiteClient): + # Deny all requests that are not autosync + requests = client_low.requests.get_all() + if isinstance(requests, sy.SyftError): + print(requests) + return + + denied_requests = [] + for request in tqdm(requests): + if request.status != RequestStatus.PENDING: + continue + if "autosync" not in request.tags: + request.deny( + reason="This request has been denied automatically. " + "Please use the designated API to submit your request." + ) + denied_requests.append(request.id) + print(f"Denied {len(denied_requests)} requests without autosync tag") + + +def is_request_to_sync(batch: ObjectDiffBatch) -> bool: + # True if this is a new low-side request + # TODO add condition for sql requests/usercodes + low_request = batch.root.low_obj + return ( + isinstance(low_request, Request) + and batch.status == "NEW" + and "autosync" in low_request.tags + ) + + +def is_job_to_sync(batch: ObjectDiffBatch): + # True if this is a new high-side job that is either COMPLETED or ERRORED + if batch.status != "NEW": + return False + if not isinstance(batch.root.high_obj, Job): + return False + job = batch.root.high_obj + return job.status in (JobStatus.ERRORED, JobStatus.COMPLETED) + + +def execute_requests( + client_high: DatasiteClient, request_ids: list[sy.UID] +) -> dict[sy.UID, Job]: + jobs_by_request_id = {} + for request_id in request_ids: + request = client_high.requests.get_by_uid(request_id) + if not isinstance(request, Request): + continue + + code = request.code + if not isinstance(code, UserCode): + continue + + func_name = request.code.service_func_name + api_func = getattr(client_high.code, func_name, None) + if api_func is None: + continue + + job = api_func(blocking=False) + jobs_by_request_id[request_id] = job + + return jobs_by_request_id + + +def deny_failed_jobs( + client_low: DatasiteClient, + jobs: list[Job], +) -> None: + # NOTE no syncing is needed, requests are denied on the low side + denied_requests = [] + + for job in jobs: + if job.status != JobStatus.ERRORED: + continue + + error_result = job.result + if isinstance(error_result, Err): + error_msg = error_result.err_value + else: + error_msg = "An unknown error occurred, please check the Job logs for more information." + + code_id = job.user_code_id + if code_id is None: + continue + requests = client_low.requests.get_by_usercode_id(code_id) + if isinstance(requests, list) and len(requests) > 0: + request = requests[0] + request.deny(reason=f"Execution failed: {error_msg}") + denied_requests.append(request.id) + else: + print(f"Failed to deny request for job {job.id}") + + print(f"Denied {len(denied_requests)} failed requests") + + +def sync_finished_jobs( + client_low: DatasiteClient, + client_high: DatasiteClient, +) -> dict[sy.UID, sy.SyftError | sy.SyftSuccess] | sy.SyftError: + sync_job_results = {} + synced_jobs = [] + diff = compare_clients( + from_client=client_high, to_client=client_low, include_types=["job"] + ) + if isinstance(diff, sy.SyftError): + print(diff) + return diff + + for batch in diff.batches: + if is_job_to_sync(batch): + job = batch.root.high_obj + + w = batch.resolve(build_state=False) + share_result = w.click_share_all_private_data() + if isinstance(share_result, sy.SyftError): + sync_job_results[job.id] = share_result + continue + sync_result = w.click_sync() + + synced_jobs.append(job) + sync_job_results[job.id] = sync_result + + print(f"Sharing {len(sync_job_results)} new results") + deny_failed_jobs(client_low, synced_jobs) + return sync_job_results + + +def sync_new_requests( + client_low: DatasiteClient, + client_high: DatasiteClient, +) -> dict[sy.UID, sy.SyftSuccess | sy.SyftError] | sy.SyftError: + sync_request_results = {} + diff = compare_clients( + from_client=client_low, to_client=client_high, include_types=["request"] + ) + if isinstance(diff, sy.SyftError): + print(diff) + return sync_request_results + print(f"{len(diff.batches)} request batches found") + for batch in tqdm(diff.batches): + if is_request_to_sync(batch): + request_id = batch.root.low_obj.id + w = batch.resolve(build_state=False) + result = w.click_sync() + sync_request_results[request_id] = result + return sync_request_results + + +def sync_and_execute_new_requests( + client_low: DatasiteClient, client_high: DatasiteClient +) -> None: + sync_results = sync_new_requests(client_low, client_high) + if isinstance(sync_results, sy.SyftError): + print(sync_results) + return + + request_ids = [ + uid for uid, res in sync_results.items() if isinstance(res, sy.SyftSuccess) + ] + print(f"Synced {len(request_ids)} new requests") + + jobs_by_request = execute_requests(client_high, request_ids) + print(f"Started {len(jobs_by_request)} new jobs") + + +def auto_sync(client_low: DatasiteClient, client_high: DatasiteClient) -> None: + print("Starting auto sync") + print("Denying non tagged jobs") + deny_requests_without_autosync_tag(client_low) + print("Syncing and executing") + sync_and_execute_new_requests(client_low, client_high) + sync_finished_jobs(client_low, client_high) + print("Finished auto sync") diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index dd6b6fa7aff..1f677851039 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -155,11 +155,11 @@ def _test_settings() -> Any: @module_property -def _get_helpers() -> Any: +def _get_helpers() -> None: # relative - from .util.util import add_helper_path + from .util.util import add_helper_path_to_python_path - return add_helper_path() + add_helper_path_to_python_path() @module_property diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 07487c60bfe..a2c392c09c7 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -1143,7 +1143,7 @@ def test_settings() -> Any: return test_settings -def add_helper_path_to_python_path() -> Any: +def add_helper_path_to_python_path() -> None: current_path = "." # jupyter uses "." which resolves to the notebook From e0d1f33d744af5d8d1e1aa58f0ce5b344bceb374 Mon Sep 17 00:00:00 2001 From: Sameer Wagh Date: Thu, 12 Sep 2024 14:02:01 -0400 Subject: [PATCH 4/7] Broken inbetween stage --- notebooks/notebook_helpers/job_helpers.py | 2 +- ...tart-and-configure-server-and-admins.ipynb | 19 +- .../001-scale-delete-worker-pools.ipynb | 78 +- .../bigquery/010-setup-bigquery-pool.ipynb | 11 +- .../bigquery/011-users-emails-passwords.ipynb | 27 +- .../bigquery/020-configure-api.ipynb | 15 +- .../scenarios/bigquery/021-create-jobs.ipynb | 13 +- .../bigquery/040-do-review-requests.ipynb | 15 +- .../bigquery/050-ds-get-results.ipynb | 13 +- notebooks/scenarios/bigquery/helpers.py | 338 - notebooks/scenarios/bigquery/job_helpers.py | 400 - .../sync/01-setup-high-low-datasites.ipynb | 19238 +--------------- .../sync/02-configure-api-and-sync.ipynb | 15755 +------------ .../bigquery/sync/03-ds-submit-request.ipynb | 1159 +- .../scenarios/bigquery/sync/sync_helpers.py | 190 - 15 files changed, 304 insertions(+), 36969 deletions(-) delete mode 100644 notebooks/scenarios/bigquery/helpers.py delete mode 100644 notebooks/scenarios/bigquery/job_helpers.py delete mode 100644 notebooks/scenarios/bigquery/sync/sync_helpers.py diff --git a/notebooks/notebook_helpers/job_helpers.py b/notebooks/notebook_helpers/job_helpers.py index f87cfdc7324..78494d381e7 100644 --- a/notebooks/notebook_helpers/job_helpers.py +++ b/notebooks/notebook_helpers/job_helpers.py @@ -11,7 +11,7 @@ from typing import Any # third party -from helpers import TestUser +from email_helpers import TestUser # syft absolute from syft import test_settings diff --git a/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb b/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb index 444ba2fe199..5d20d0919d6 100644 --- a/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb +++ b/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb @@ -20,15 +20,17 @@ "metadata": {}, "outputs": [], "source": [ + "# isort: off\n", "# stdlib\n", "from os import environ as env\n", "\n", - "# third party\n", - "# run email server\n", - "from helpers import get_email_server\n", - "\n", "# syft absolute\n", - "import syft as sy" + "import syft as sy\n", + "import syft as get_helpers # noqa: F401\n", + "\n", + "# third party\n", + "from email_helpers import get_email_server\n", + "# isort: on" ] }, { @@ -232,6 +234,11 @@ } ], "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, "language_info": { "codemirror_mode": { "name": "ipython", @@ -242,7 +249,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.5" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb index e46587d23e5..478935e614d 100644 --- a/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb +++ b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb @@ -20,15 +20,18 @@ "metadata": {}, "outputs": [], "source": [ + "# isort: off\n", "# stdlib\n", "import os\n", "\n", - "# third party\n", - "from helpers import Timeout\n", - "from helpers import get_email_server\n", - "\n", "# syft absolute\n", - "import syft as sy" + "import syft as sy\n", + "import syft as get_helpers # noqa: F401\n", + "\n", + "# third party\n", + "from email_helpers import Timeout\n", + "from email_helpers import get_email_server\n", + "# isort: on" ] }, { @@ -37,6 +40,14 @@ "id": "2", "metadata": {}, "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": {}, + "outputs": [], "source": [ "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", "\n", @@ -49,7 +60,7 @@ }, { "cell_type": "markdown", - "id": "3", + "id": "4", "metadata": {}, "source": [ "### Launch server & login" @@ -58,7 +69,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -75,7 +86,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -85,7 +96,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -97,7 +108,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -107,7 +118,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -117,7 +128,7 @@ }, { "cell_type": "markdown", - "id": "9", + "id": "10", "metadata": {}, "source": [ "### Scale Worker pool" @@ -125,7 +136,7 @@ }, { "cell_type": "markdown", - "id": "10", + "id": "11", "metadata": {}, "source": [ "##### Scale up" @@ -134,7 +145,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -148,7 +159,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -158,7 +169,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -178,7 +189,7 @@ }, { "cell_type": "markdown", - "id": "14", + "id": "15", "metadata": {}, "source": [ "##### Scale down" @@ -187,7 +198,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -202,7 +213,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -221,7 +232,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -234,7 +245,7 @@ }, { "cell_type": "markdown", - "id": "18", + "id": "19", "metadata": {}, "source": [ "#### Delete Worker Pool" @@ -243,7 +254,7 @@ { "cell_type": "code", "execution_count": null, - "id": "19", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -256,7 +267,7 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -266,7 +277,7 @@ }, { "cell_type": "markdown", - "id": "21", + "id": "22", "metadata": {}, "source": [ "#### Re-launch the default worker pool" @@ -275,7 +286,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -285,7 +296,7 @@ { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -299,7 +310,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -313,7 +324,7 @@ { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -323,7 +334,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "27", "metadata": {}, "outputs": [], "source": [ @@ -333,13 +344,18 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "28", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { + "kernelspec": { + "display_name": "syft", + "language": "python", + "name": "python3" + }, "language_info": { "codemirror_mode": { "name": "ipython", @@ -350,7 +366,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.5" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb b/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb index 1280a4e19d7..38eb97f2219 100644 --- a/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb +++ b/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb @@ -18,15 +18,18 @@ "metadata": {}, "outputs": [], "source": [ + "# isort: off\n", "# stdlib\n", "import os\n", "\n", - "# third party\n", - "from helpers import get_email_server\n", - "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_settings" + "import syft as get_helpers # noqa: F401\n", + "from syft import test_settings\n", + "\n", + "# third party\n", + "from email_helpers import get_email_server\n", + "# isort: on" ] }, { diff --git a/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb b/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb index 990f91183d8..4e67eaba372 100644 --- a/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb +++ b/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb @@ -22,19 +22,21 @@ "metadata": {}, "outputs": [], "source": [ - "# stdlib\n", + "# isort: off\n", "# stdlib\n", "import os\n", "\n", - "# third party\n", - "from helpers import SENDER\n", - "from helpers import create_user\n", - "from helpers import get_email_server\n", - "from helpers import make_user\n", - "from helpers import save_users\n", - "\n", "# syft absolute\n", - "import syft as sy" + "import syft as sy\n", + "import syft as get_helpers # noqa: F401\n", + "\n", + "# third party\n", + "from email_helpers import SENDER\n", + "from email_helpers import create_user\n", + "from email_helpers import get_email_server\n", + "from email_helpers import make_user\n", + "from email_helpers import save_users\n", + "# isort: on" ] }, { @@ -630,6 +632,11 @@ } ], "metadata": { + "kernelspec": { + "display_name": "syft", + "language": "python", + "name": "python3" + }, "language_info": { "codemirror_mode": { "name": "ipython", @@ -640,7 +647,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.5" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/020-configure-api.ipynb b/notebooks/scenarios/bigquery/020-configure-api.ipynb index c3ad678a00b..7f5ea6cec76 100644 --- a/notebooks/scenarios/bigquery/020-configure-api.ipynb +++ b/notebooks/scenarios/bigquery/020-configure-api.ipynb @@ -28,17 +28,22 @@ "metadata": {}, "outputs": [], "source": [ + "# isort: off\n", + "# stdlib\n", + "\n", + "# syft absolute\n", + "import syft as sy\n", + "import syft as get_helpers # noqa: F401\n", + "from syft import test_settings\n", + "\n", "# third party\n", "from apis import make_schema\n", "from apis import make_submit_query\n", "from apis import make_test_query\n", "\n", "# run email server\n", - "from helpers import get_email_server\n", - "\n", - "# syft absolute\n", - "import syft as sy\n", - "from syft import test_settings" + "from email_helpers import get_email_server\n", + "# isort: on" ] }, { diff --git a/notebooks/scenarios/bigquery/021-create-jobs.ipynb b/notebooks/scenarios/bigquery/021-create-jobs.ipynb index 3625af65c88..113a0990876 100644 --- a/notebooks/scenarios/bigquery/021-create-jobs.ipynb +++ b/notebooks/scenarios/bigquery/021-create-jobs.ipynb @@ -33,16 +33,19 @@ "metadata": {}, "outputs": [], "source": [ + "# isort: off\n", "# stdlib\n", "from collections import Counter\n", "import os\n", "\n", - "# third party\n", - "from helpers import get_email_server\n", - "\n", "# syft absolute\n", "import syft as sy\n", - "from syft.service.job.job_stash import JobStatus" + "import syft as get_helpers # noqa: F401\n", + "from syft.service.job.job_stash import JobStatus\n", + "\n", + "# third party\n", + "from email_helpers import get_email_server\n", + "# isort: on" ] }, { @@ -123,7 +126,7 @@ "outputs": [], "source": [ "# third party\n", - "from helpers import load_users" + "from email_helpers import load_users" ] }, { diff --git a/notebooks/scenarios/bigquery/040-do-review-requests.ipynb b/notebooks/scenarios/bigquery/040-do-review-requests.ipynb index 9f0a301f2e3..b32d155a962 100644 --- a/notebooks/scenarios/bigquery/040-do-review-requests.ipynb +++ b/notebooks/scenarios/bigquery/040-do-review-requests.ipynb @@ -18,18 +18,21 @@ "metadata": {}, "outputs": [], "source": [ + "# isort: off\n", "# stdlib\n", "import random\n", "\n", + "# syft absolute\n", + "import syft as sy\n", + "import syft as get_helpers # noqa: F401\n", + "from syft.service.job.job_stash import Job\n", + "\n", "# third party\n", - "from helpers import get_email_server\n", + "from email_helpers import get_email_server\n", "from job_helpers import approve_by_running\n", "from job_helpers import get_job_emails\n", "from job_helpers import get_request_for_job_info\n", - "\n", - "# syft absolute\n", - "import syft as sy\n", - "from syft.service.job.job_stash import Job" + "# isort: on" ] }, { @@ -98,7 +101,7 @@ "outputs": [], "source": [ "# third party\n", - "from helpers import load_users\n", + "from email_helpers import load_users\n", "from job_helpers import load_jobs\n", "from job_helpers import save_jobs" ] diff --git a/notebooks/scenarios/bigquery/050-ds-get-results.ipynb b/notebooks/scenarios/bigquery/050-ds-get-results.ipynb index 72ab42122c1..dc898382af0 100644 --- a/notebooks/scenarios/bigquery/050-ds-get-results.ipynb +++ b/notebooks/scenarios/bigquery/050-ds-get-results.ipynb @@ -18,14 +18,17 @@ "metadata": {}, "outputs": [], "source": [ + "# isort: off\n", + "# syft absolute\n", + "import syft as sy\n", + "import syft as get_helpers # noqa: F401\n", + "\n", "# third party\n", - "from helpers import get_email_server\n", - "from helpers import load_users\n", + "from email_helpers import get_email_server\n", + "from email_helpers import load_users\n", "from job_helpers import load_jobs\n", "from job_helpers import save_jobs\n", - "\n", - "# syft absolute\n", - "import syft as sy" + "# isort: on" ] }, { diff --git a/notebooks/scenarios/bigquery/helpers.py b/notebooks/scenarios/bigquery/helpers.py deleted file mode 100644 index f58d41a20f8..00000000000 --- a/notebooks/scenarios/bigquery/helpers.py +++ /dev/null @@ -1,338 +0,0 @@ -# stdlib -import asyncio -from dataclasses import dataclass -from dataclasses import field -import json -import re -import time -from typing import Any - -# third party -from aiosmtpd.controller import Controller -from faker import Faker - -# syft absolute -from syft.service.user.user_roles import ServiceRole - -fake = Faker() - - -@dataclass -class Email: - email_from: str - email_to: str - email_content: str - - def to_dict(self) -> dict: - output = {} - for k, v in self.__dict__.items(): - output[k] = v - return output - - def __iter__(self): - yield from self.to_dict().items() - - def __getitem__(self, key): - return self.to_dict()[key] - - def __repr__(self) -> str: - return f"{self.email_to}\n{self.email_from}\n\n{self.email_content}" - - -class EmailServer: - def __init__(self, filepath="./emails.json"): - self.filepath = filepath - self._emails: dict[str, list[Email]] = self.load_emails() - - def load_emails(self) -> dict[str, list[Email]]: - try: - with open(self.filepath) as f: - data = json.load(f) - return {k: [Email(**email) for email in v] for k, v in data.items()} - except Exception as e: - print("Issues reading email file", e) - return {} - - def save_emails(self) -> None: - with open(self.filepath, "w") as f: - data = { - k: [email.to_dict() for email in v] for k, v in self._emails.items() - } - f.write(json.dumps(data)) - - def add_email_for_user(self, user_email: str, email: Email) -> None: - if user_email not in self._emails: - self._emails[user_email] = [] - self._emails[user_email].append(email) - self.save_emails() - - def get_emails_for_user(self, user_email: str) -> list[Email]: - self._emails: dict[str, list[Email]] = self.load_emails() - return self._emails.get(user_email, []) - - def reset_emails(self) -> None: - self._emails = {} - self.save_emails() - - -SENDER = "noreply@openmined.org" - - -def get_token(email) -> str: - # stdlib - import re - - pattern = r"syft_client\.reset_password\(token='(.*?)', new_password=.*?\)" - try: - token = re.search(pattern, email.email_content).group(1) - except Exception: - raise Exception(f"No token found in email: {email.email_content}") - return token - - -@dataclass -class TestUser: - name: str - email: str - password: str - role: ServiceRole - new_password: str | None = None - email_disabled: bool = False - reset_password: bool = False - reset_token: str | None = None - _client_cache: Any | None = field(default=None, repr=False, init=False) - _email_server: EmailServer | None = None - - @property - def latest_password(self) -> str: - if self.new_password: - return self.new_password - return self.password - - def make_new_password(self) -> str: - self.new_password = fake.password() - return self.new_password - - @property - def client(self): - return self._client_cache - - def relogin(self) -> None: - self.client = self.client - - @client.setter - def client(self, client): - client = client.login(email=self.email, password=self.latest_password) - self._client_cache = client - - def to_dict(self) -> dict: - output = {} - for k, v in self.__dict__.items(): - if k.startswith("_"): - continue - if k == "role": - v = str(v) - output[k] = v - return output - - def __iter__(self): - for key, val in self.to_dict().items(): - if not key.startswith("_"): - yield key, val - - def __getitem__(self, key): - if key.startswith("_"): - return None - return self.to_dict()[key] - - def update_password(self): - self.password = self.new_password - self.new_password = None - - @property - def emails(self) -> list[Email]: - if not self._email_server: - print("Not connected to email server object") - return [] - return self._email_server.get_emails_for_user(self.email) - - def get_token(self) -> str: - for email in reversed(self.emails): - token = None - try: - token = get_token(email) - break - except Exception: - pass - self.reset_token = token - return token - - -def save_users(users): - user_dicts = [] - for user in users: - user_dicts.append(user.to_dict()) - print(user_dicts) - with open("./users.json", "w") as f: - f.write(json.dumps(user_dicts)) - - -def load_users(high_client: None, path="./users.json"): - users = [] - with open(path) as f: - data = f.read() - user_dicts = json.loads(data) - for user in user_dicts: - test_user = TestUser(**user) - if high_client: - test_user.client = high_client - users.append(test_user) - return users - - -def make_user( - name: str | None = None, - email: str | None = None, - password: str | None = None, - role: ServiceRole = ServiceRole.DATA_SCIENTIST, -): - fake = Faker() - if name is None: - name = fake.name() - if email is None: - ascii_string = re.sub(r"[^a-zA-Z\s]", "", name).lower() - dashed_string = ascii_string.replace(" ", "-") - email = f"{dashed_string}-fake@openmined.org" - if password is None: - password = fake.password() - - return TestUser(name=name, email=email, password=password, role=role) - - -def user_exists(root_client, email: str) -> bool: - users = root_client.api.services.user - for user in users: - if user.email == email: - return True - return False - - -class SMTPTestServer: - def __init__(self, email_server): - self.port = 9025 - self.hostname = "0.0.0.0" - self._stop_event = asyncio.Event() - - # Simple email handler class - class SimpleHandler: - async def handle_DATA(self, server, session, envelope): - try: - print(f"> SMTPTestServer got an email for {envelope.rcpt_tos}") - email = Email( - email_from=envelope.mail_from, - email_to=envelope.rcpt_tos, - email_content=envelope.content.decode( - "utf-8", errors="replace" - ), - ) - email_server.add_email_for_user(envelope.rcpt_tos[0], email) - email_server.save_emails() - return "250 Message accepted for delivery" - except Exception as e: - print(f"> Error handling email: {e}") - return "550 Internal Server Error" - - try: - self.handler = SimpleHandler() - self.controller = Controller( - self.handler, hostname=self.hostname, port=self.port - ) - except Exception as e: - print(f"> Error initializing SMTPTestServer Controller: {e}") - - def start(self): - print(f"> Starting SMTPTestServer on: {self.hostname}:{self.port}") - asyncio.create_task(self.async_loop()) - - async def async_loop(self): - try: - print(f"> Starting SMTPTestServer on: {self.hostname}:{self.port}") - self.controller.start() - await ( - self._stop_event.wait() - ) # Wait until the event is set to stop the server - except Exception as e: - print(f"> Error with SMTPTestServer: {e}") - - def stop(self): - try: - print("> Stopping SMTPTestServer") - loop = asyncio.get_running_loop() - if loop.is_running(): - loop.create_task(self.async_stop()) - else: - asyncio.run(self.async_stop()) - except Exception as e: - print(f"> Error stopping SMTPTestServer: {e}") - - async def async_stop(self): - self.controller.stop() - self._stop_event.set() # Stop the server by setting the event - - -class TimeoutError(Exception): - pass - - -class Timeout: - def __init__(self, timeout_duration): - if timeout_duration > 60: - raise ValueError("Timeout duration cannot exceed 60 seconds.") - self.timeout_duration = timeout_duration - - def run_with_timeout(self, condition_func, *args, **kwargs): - start_time = time.time() - result = None - - while True: - elapsed_time = time.time() - start_time - if elapsed_time > self.timeout_duration: - raise TimeoutError( - f"Function execution exceeded {self.timeout_duration} seconds." - ) - - # Check if the condition is met - try: - if condition_func(): - print("Condition met, exiting early.") - break - except Exception as e: - print(f"Exception in target function: {e}") - break # Exit the loop if an exception occurs in the function - time.sleep(1) - - return result - - -def get_email_server(reset=False): - email_server = EmailServer() - if reset: - email_server.reset_emails() - smtp_server = SMTPTestServer(email_server) - smtp_server.start() - return email_server, smtp_server - - -def create_user(root_client, test_user): - if not user_exists(root_client, test_user.email): - fake = Faker() - root_client.register( - name=test_user.name, - email=test_user.email, - password=test_user.password, - password_verify=test_user.password, - institution=fake.company(), - website=fake.url(), - ) - else: - print("User already exists", test_user) diff --git a/notebooks/scenarios/bigquery/job_helpers.py b/notebooks/scenarios/bigquery/job_helpers.py deleted file mode 100644 index f87cfdc7324..00000000000 --- a/notebooks/scenarios/bigquery/job_helpers.py +++ /dev/null @@ -1,400 +0,0 @@ -# stdlib -from collections import defaultdict -from collections.abc import Callable -from dataclasses import dataclass -from dataclasses import field -import json -import random -import re -import secrets -import textwrap -from typing import Any - -# third party -from helpers import TestUser - -# syft absolute -from syft import test_settings - -from syft.client.client import SyftClient # noqa - -dataset_1 = test_settings.get("dataset_1", default="dataset_1") -dataset_2 = test_settings.get("dataset_2", default="dataset_2") -table_1 = test_settings.get("table_1", default="table_1") -table_2 = test_settings.get("table_2", default="table_2") -table_1_col_id = test_settings.get("table_1_col_id", default="table_id") -table_1_col_score = test_settings.get("table_1_col_score", default="colname") -table_2_col_id = test_settings.get("table_2_col_id", default="table_id") -table_2_col_score = test_settings.get("table_2_col_score", default="colname") - - -@dataclass -class TestJob: - user_email: str - func_name: str - query: str - job_type: str - settings: dict # make a type so we can rely on attributes - should_succeed: bool - should_submit: bool = True - code_path: str | None = field(default=None) - admin_reviewed: bool = False - result_as_expected: bool | None = None - - _client_cache: SyftClient | None = field(default=None, repr=False, init=False) - - @property - def is_submitted(self) -> bool: - return self.code_path is not None - - @property - def client(self): - return self._client_cache - - @client.setter - def client(self, client): - self._client_cache = client - - def to_dict(self) -> dict: - output = {} - for k, v in self.__dict__.items(): - if k.startswith("_"): - continue - output[k] = v - return output - - def __iter__(self): - for key, val in self.to_dict().items(): - if key.startswith("_"): - yield key, val - - def __getitem__(self, key): - if key.startswith("_"): - return None - return self.to_dict()[key] - - @property - def code_method(self) -> None | Callable: - try: - return getattr(self.client.code, self.func_name, None) - except Exception as e: - print(f"Cant find code method. {e}") - return None - - -def make_query(settings: dict) -> str: - query = f""" - SELECT {settings['groupby_col']}, AVG({settings['score_col']}) AS average_score - FROM {settings['dataset']}.{settings['table']} - GROUP BY {settings['groupby_col']} - LIMIT {settings['limit']}""".strip() - - return textwrap.dedent(query) - - -def create_simple_query_job(user: TestUser) -> TestJob: - job_type = "simple_query" - func_name = f"{job_type}_{secrets.token_hex(3)}" - - dataset = random.choice([dataset_1, dataset_2]) - table, groupby_col, score_col = random.choice( - [ - (table_1, table_1_col_id, table_1_col_score), - (table_2, table_2_col_id, table_2_col_score), - ] - ) - limit = random.randint(1, 1_000_000) - - settings = { - "dataset": dataset, - "table": table, - "groupby_col": groupby_col, - "score_col": score_col, - "limit": limit, - } - query = make_query(settings) - - result = TestJob( - user_email=user.email, - func_name=func_name, - query=query, - job_type=job_type, - settings=settings, - should_succeed=True, - ) - - result.client = user.client - return result - - -def create_wrong_asset_query(user: TestUser) -> TestJob: - job_type = "wrong_asset_query" - func_name = f"{job_type}_{secrets.token_hex(3)}" - - valid_job = create_simple_query_job(user) - settings = valid_job.settings - corrupted_asset = random.choice(["dataset", "table"]) - settings[corrupted_asset] = "wrong_asset" - query = make_query(settings) - - result = TestJob( - user_email=user.email, - func_name=func_name, - query=query, - job_type=job_type, - settings=settings, - should_succeed=False, - ) - - result.client = user.client - return result - - -def create_wrong_syntax_query(user: TestUser) -> TestJob: - job_type = "wrong_syntax_query" - func_name = f"{job_type}_{secrets.token_hex(3)}" - - query = "SELECT * FROM table INCORRECT SYNTAX" - - result = TestJob( - user_email=user.email, - func_name=func_name, - query=query, - job_type=job_type, - settings={}, - should_succeed=False, - ) - - result.client = user.client - return result - - -def create_long_query_job(user: TestUser) -> TestJob: - job_type = "job_too_much_text" - func_name = f"{job_type}_{secrets.token_hex(3)}" - - query = "a" * 1_000 - - result = TestJob( - user_email=user.email, - func_name=func_name, - query=query, - job_type=job_type, - settings={}, - should_succeed=False, - ) - - result.client = user.client - return result - - -def create_query_long_name(user: TestUser) -> TestJob: - job_type = "job_long_name" - func_name = f"{job_type}_{secrets.token_hex(3)}" - - job = create_simple_query_job(user) - - job.job_type = job_type - job.func_name = func_name + "a" * 1_000 - - return job - - -def create_job_funcname_xss(user: TestUser) -> TestJob: - job_type = "job_funcname_xss" - func_name = f"{job_type}_{secrets.token_hex(3)}" - func_name += "" - - job = create_simple_query_job(user) - job.job_type = job_type - job.func_name = func_name - job.should_submit = False - return job - - -def get_request_for_job_info(requests, job): - job_requests = [r for r in requests if r.code.service_func_name == job.func_name] - if len(job_requests) != 1: - raise Exception(f"Too many or too few requests: {job} in requests: {requests}") - return job_requests[0] - - -def create_job_query_xss(user: TestUser) -> TestJob: - job_type = "job_query_xss" - func_name = f"{job_type}_{secrets.token_hex(3)}" - - job = create_simple_query_job(user) - job.job_type = job_type - job.func_name = func_name - job.query += "" - job.should_succeed = False - - return job - - -def create_job_many_columns(user: TestUser) -> TestJob: - job_type = "job_many_columns" - func_name = f"{job_type}_{secrets.token_hex(3)}" - - job = create_simple_query_job(user) - job.job_type = job_type - job.func_name = func_name - settings = job.settings - job.settings["num_extra_cols"] = random.randint(100, 1000) - - new_columns_string = ", ".join( - f"{settings['score_col']} as col_{i}" for i in range(settings["num_extra_cols"]) - ) - - job.query = f""" - SELECT {settings['groupby_col']}, AVG({settings['score_col']}) AS average_score, {new_columns_string} - FROM {settings['dataset']}.{settings['table']} - GROUP BY {settings['groupby_col']} - LIMIT {settings['limit']}""".strip() - - return job - - -def create_random_job(user: TestUser) -> TestJob: - job_func = random.choice(create_job_functions) - return job_func(user) - - -def create_jobs(users: list[TestUser], total_jobs: int = 10) -> list[TestJob]: - jobs = [] - num_users = len(users) - user_index = 0 - each_count = 0 - # keep making jobs until we have enough - while len(jobs) < total_jobs: - # if we havent used each job type yet keep getting the next one - if each_count < len(create_job_functions): - job_func = create_job_functions[each_count] - each_count += 1 - else: - # otherwise lets get a random one - job_func = create_random_job - # use the current index of user - jobs.append(job_func(users[user_index])) - - # only go as high as the last user index - if user_index < num_users - 1: - user_index += 1 - else: - # reset back to the first user - user_index = 0 - - # in case we stuffed up - if len(jobs) > total_jobs: - jobs = jobs[:total_jobs] - return jobs - - -def submit_job(job: TestJob) -> tuple[Any, str]: - client = job.client - response = client.api.services.bigquery.submit_query( - func_name=job.func_name, query=job.query - ) - job.code_path = extract_code_path(response) - return response - - -def extract_code_path(response) -> str | None: - pattern = r"client\.code\.(\w+)\(\)" - match = re.search(pattern, str(response)) - if match: - extracted_code = match.group(1) - return extracted_code - return None - - -def approve_by_running(request): - job = request.code(blocking=False) - result = job.wait() - print("got result of type", type(result), "bool", bool(result)) - # got result of type bool False - # assert result won't work unless we know what type is coming back - job_info = job.info(result=True) - # need force when running multiple times - # todo check and dont run if its already done - response = request.deposit_result(job_info, approve=True, force=True) - return response - - -def get_job_emails(jobs, client, email_server): - all_requests = client.requests - res = {} - for job in jobs: - request = get_request_for_job_info(all_requests, job) - emails = email_server.get_emails_for_user(request.requesting_user_email) - res[request.requesting_user_email] = emails - return res - - -def resolve_request(request): - service_func_name = request.code.service_func_name - if service_func_name.startswith("simple_query"): - request.approve() # approve because it is good - if service_func_name.startswith("wrong_asset_query"): - request.approve() # approve because it is bad - if service_func_name.startswith("wrong_syntax_query"): - request.approve() # approve because it is bad - if service_func_name.startswith("job_too_much_text"): - request.deny(reason="too long, boring!") # deny because it is bad - if service_func_name.startswith("job_long_name"): - request.approve() - if service_func_name.startswith("job_funcname_xss"): - request.deny(reason="too long, boring!") # never reach doesnt matter - if service_func_name.startswith("job_query_xss"): - request.approve() # approve because it is bad - if service_func_name.startswith("job_many_columns"): - request.approve() # approve because it is bad - - return (request.id, request.status) - - -create_job_functions = [ - create_simple_query_job, # quick way to increase the odds - create_simple_query_job, - create_simple_query_job, - create_simple_query_job, - create_simple_query_job, - create_simple_query_job, - create_wrong_syntax_query, - create_long_query_job, - create_query_long_name, - create_job_funcname_xss, - create_job_query_xss, - create_job_many_columns, -] - - -def save_jobs(jobs, filepath="./jobs.json"): - user_jobs = defaultdict(list) - for job in jobs: - user_jobs[job.user_email].append(job.to_dict()) - with open(filepath, "w") as f: - f.write(json.dumps(user_jobs)) - - -def load_jobs(users, high_client, filepath="./jobs.json"): - data = {} - try: - with open(filepath) as f: - data = json.loads(f.read()) - except Exception as e: - print(f"cant read file: {filepath}: {e}") - data = {} - jobs_list = [] - for user in users: - if user.email not in data: - print(f"{user.email} missing from jobs") - continue - user_jobs = data[user.email] - for user_job in user_jobs: - test_job = TestJob(**user_job) - if user._client_cache is None: - user.client = high_client - test_job.client = user.client - jobs_list.append(test_job) - return jobs_list diff --git a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb b/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb index 4d641ccb8f0..84c4238008a 100644 --- a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb +++ b/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -20,17 +20,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "python auto auto\n" - ] - } - ], + "outputs": [], "source": [ "# stdlib\n", "environment = os.environ.get(\"ORCHESTRA_DEPLOYMENT_TYPE\", \"python\")\n", @@ -41,12 +33,13 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# syft absolute\n", "import syft as sy\n", + "from syft import get_helpers # noqa: F401\n", "from syft import test_settings" ] }, @@ -59,1776 +52,9 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Autoreload enabled\n", - "Starting bigquery-low server on 0.0.0.0:61875\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", - "INFO: Uvicorn running on http://0.0.0.0:61875 (Press CTRL+C to quit)\n", - "INFO: Started reloader process [13204] using WatchFiles\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found `reset=True` in the launch configuration. Resetting the server...\n", - "Waiting for server to start" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Started server process [13209]\n", - "INFO: Waiting for application startup.\n", - "INFO: Application startup complete.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " Done.\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftInfo:
You have launched a development server at http://0.0.0.0:61875.It is intended only for local use.

" - ], - "text/plain": [ - "SyftInfo: You have launched a development server at http://0.0.0.0:61875.It is intended only for local use." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The autoreload extension is already loaded. To reload it, use:\n", - " %reload_ext autoreload\n", - "Autoreload enabled\n", - "Starting bigquery-high server on 0.0.0.0:61888\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", - "INFO: Uvicorn running on http://0.0.0.0:61888 (Press CTRL+C to quit)\n", - "INFO: Started reloader process [13214] using WatchFiles\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found `reset=True` in the launch configuration. Resetting the server...\n", - "Waiting for server to start." - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Started server process [13219]\n", - "INFO: Waiting for application startup.\n", - "INFO: Application startup complete.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " Done.\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftInfo:
You have launched a development server at http://0.0.0.0:61888.It is intended only for local use.

" - ], - "text/plain": [ - "SyftInfo: You have launched a development server at http://0.0.0.0:61888.It is intended only for local use." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "server_low = sy.orchestra.launch(\n", " name=\"bigquery-low\",\n", @@ -1853,7 +79,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1862,1723 +88,25 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "low_client = server_low.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "high_client = server_high.login(email=\"info@openmined.org\", password=\"changethis\")" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -3610,20 +138,9 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us'" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "external_registry = test_settings.get(\"external_registry\", default=\"docker.io\")\n", "external_registry" @@ -3631,852 +148,9 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Image Registry ID: 0e55741d145444d487d8861fdf85dea5 created successfully

" - ], - "text/plain": [ - "SyftSuccess: Image Registry ID: 0e55741d145444d487d8861fdf85dea5 created successfully" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result = high_client.api.services.image_registry.add(external_registry)\n", "result" @@ -4484,2618 +158,9 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

SyftImageRegistry List

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/plain": [ - "[SyftImageRegistry(url=us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us)]" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "image_registry_list = high_client.api.services.image_registry.get_all()\n", "image_registry_list" @@ -7103,28 +168,9 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class SyftImageRegistry:\n", - " id: str = 0e55741d145444d487d8861fdf85dea5\n", - " url: str = \"us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us\"\n", - "\n", - "```" - ], - "text/plain": [ - "SyftImageRegistry(url=us-central1-docker.pkg.dev/reddit-testing-415005/syft-registry-us)" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "local_registry = image_registry_list[0]\n", "local_registry" @@ -7132,32 +178,9 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class SyftWorkerImage:\n", - " id: str = 588c466cd5c34fe9a418f5197bde5458\n", - " image_identifier: str = docker.io/openmined/syft-backend:local-dev\n", - " image_hash: str = None\n", - " created_at: str = 2024-09-12 14:08:13\n", - " built_at: str = None\n", - " config: str = prebuilt tag='openmined/syft-backend:local-dev' description='Prebuilt default worker image'\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.worker.worker_image.SyftWorkerImage" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "base_worker_image = high_client.images.get_all()[0]\n", "base_worker_image" @@ -7165,20 +188,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'FROM docker.io/openmined/syft-backend:local-dev\\n\\nRUN uv pip install db-dtypes google-cloud-bigquery'" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "worker_dockerfile = f\"\"\"\n", "FROM {str(base_worker_image.image_identifier)}\n", @@ -7191,7 +203,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -7201,852 +213,9 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Dockerfile ID: 60ffba80397f4c29a238eee407450b76 successfully submitted.

" - ], - "text/plain": [ - "SyftSuccess: Dockerfile ID: 60ffba80397f4c29a238eee407450b76 successfully submitted." - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "submit_result = high_client.api.services.worker_image.submit(\n", " worker_config=docker_config\n", @@ -8056,34 +225,9 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class SyftWorkerImage:\n", - " id: str = 60ffba80397f4c29a238eee407450b76\n", - " image_identifier: str = None\n", - " image_hash: str = None\n", - " created_at: str = 2024-09-12 14:08:13\n", - " built_at: str = None\n", - " config: str = FROM docker.io/openmined/syft-backend:local-dev\n", - "\n", - "RUN uv pip install db-dtypes google-cloud-bigquery\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.worker.worker_image.SyftWorkerImage" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# get non prebuilt\n", "dockerfile_list = high_client.images.get_all()\n", @@ -8100,20 +244,9 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'docker.io/openmined/syft-worker-bigquery:local-dev'" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "docker_tag = str(base_worker_image.image_identifier).replace(\n", " \"backend\", \"worker-bigquery\"\n", @@ -8123,7 +256,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -8138,7 +271,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -8149,20 +282,9 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "PrebuiltWorkerConfig(tag='docker.io/openmined/syft-worker-bigquery:local-dev', description=None)" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "docker_config = sy.PrebuiltWorkerConfig(tag=docker_tag)\n", "docker_config" @@ -8170,852 +292,9 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Dockerfile ID: 11326a215aae4a949cd6032e99b6593c successfully submitted.

" - ], - "text/plain": [ - "SyftSuccess: Dockerfile ID: 11326a215aae4a949cd6032e99b6593c successfully submitted." - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result = high_client.api.services.worker_image.submit(worker_config=docker_config)\n", "worker_image_id = result.value.id\n", @@ -9024,7 +303,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -9034,32 +313,9 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class SyftWorkerImage:\n", - " id: str = 11326a215aae4a949cd6032e99b6593c\n", - " image_identifier: str = docker.io/openmined/syft-worker-bigquery:local-dev\n", - " image_hash: str = None\n", - " created_at: str = 2024-09-12 14:08:13\n", - " built_at: str = None\n", - " config: str = prebuilt tag='docker.io/openmined/syft-worker-bigquery:local-dev'\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.worker.worker_image.SyftWorkerImage" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# get prebuilt\n", "# dockerfile_list = high_client.images.get_all()\n", @@ -9085,7 +341,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -9094,7 +350,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -9105,2618 +361,9 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

ContainerSpawnStatus List

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/plain": [ - "[ContainerSpawnStatus(worker_name='bigquery-pool-1', worker=syft.service.worker.worker_pool.SyftWorker, error=None)]" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result = high_client.api.services.worker_pool.launch(\n", " pool_name=worker_pool_name,\n", @@ -11730,7 +377,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -11741,7 +388,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -11750,7 +397,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -11759,859 +406,16 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
No batches to resolve

" - ], - "text/plain": [ - "SyftSuccess: No batches to resolve" - ] - }, - "execution_count": 32, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "widget" ] }, { "cell_type": "code", - "execution_count": 33, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -12620,852 +424,9 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Registration feature successfully disabled

" - ], - "text/plain": [ - "SyftSuccess: Registration feature successfully disabled" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "high_client.settings.allow_guest_signup(enable=False)" ] @@ -13479,852 +440,9 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Image Registry ID: 946570589ea140b298bdd783a830e497 created successfully

" - ], - "text/plain": [ - "SyftSuccess: Image Registry ID: 946570589ea140b298bdd783a830e497 created successfully" - ] - }, - "execution_count": 35, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result = low_client.api.services.image_registry.add(external_registry)\n", "result" @@ -14332,20 +450,9 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "PrebuiltWorkerConfig(tag='docker.io/openmined/syft-worker-bigquery:local-dev', description=None)" - ] - }, - "execution_count": 36, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "docker_config = sy.PrebuiltWorkerConfig(tag=docker_tag)\n", "docker_config" @@ -14353,852 +460,9 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Dockerfile ID: 1663b0c6c28e43a5ac0efa2c8311a83d successfully submitted.

" - ], - "text/plain": [ - "SyftSuccess: Dockerfile ID: 1663b0c6c28e43a5ac0efa2c8311a83d successfully submitted." - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result = low_client.api.services.worker_image.submit(worker_config=docker_config)\n", "result" @@ -15206,32 +470,9 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class SyftWorkerImage:\n", - " id: str = 1663b0c6c28e43a5ac0efa2c8311a83d\n", - " image_identifier: str = docker.io/openmined/syft-worker-bigquery:local-dev\n", - " image_hash: str = None\n", - " created_at: str = 2024-09-12 14:07:58\n", - " built_at: str = None\n", - " config: str = prebuilt tag='docker.io/openmined/syft-worker-bigquery:local-dev'\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.worker.worker_image.SyftWorkerImage" - ] - }, - "execution_count": 38, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# get prebuilt\n", "dockerfile_list = low_client.images.get_all()\n", @@ -15253,2618 +494,9 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

ContainerSpawnStatus List

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/plain": [ - "[ContainerSpawnStatus(worker_name='bigquery-pool-1', worker=syft.service.worker.worker_pool.SyftWorker, error=None)]" - ] - }, - "execution_count": 39, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "result = low_client.api.services.worker_pool.launch(\n", " pool_name=worker_pool_name,\n", @@ -17878,7 +510,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -17887,852 +519,9 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
User 'John Doe' successfully registered! To see users, run `[your_client].users`

" - ], - "text/plain": [ - "SyftSuccess: User 'John Doe' successfully registered! To see users, run `[your_client].users`" - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low_client.register(\n", " email=\"data_scientist@openmined.org\",\n", @@ -18744,859 +533,16 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Registration feature successfully disabled

" - ], - "text/plain": [ - "SyftSuccess: Registration feature successfully disabled" - ] - }, - "execution_count": 44, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low_client.settings.allow_guest_signup(enable=False)" ] }, { "cell_type": "code", - "execution_count": 45, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -19610,56 +556,18 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "#TODO: close email client" + "# TODO: close email client" ] }, { "cell_type": "code", - "execution_count": 47, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Stopping bigquery-high\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Shutting down\n", - "INFO: Waiting for application shutdown.\n", - "INFO: Application shutdown complete.\n", - "INFO: Finished server process [13219]\n", - "INFO: Stopping reloader process [13214]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "killed\n", - "Stopping bigquery-low\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Shutting down\n", - "INFO: Waiting for application shutdown.\n", - "INFO: Application shutdown complete.\n", - "INFO: Finished server process [13209]\n", - "INFO: Stopping reloader process [13204]\n" - ] - } - ], + "outputs": [], "source": [ "if environment != \"remote\":\n", " server_high.land()\n", diff --git a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb index 44610b168c5..2d66e0d2464 100644 --- a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb +++ b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb @@ -2,12 +2,13 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# stdlib\n", "import os\n", + "\n", "# TODO: if\n", "# os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"remote\"\n", "# os.environ[\"DEV_MODE\"] = \"True\"\n", @@ -18,17 +19,9 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "python auto auto\n" - ] - } - ], + "outputs": [], "source": [ "# stdlib\n", "\n", @@ -38,39 +31,19 @@ "print(environment, high_port, low_port)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Launch server and login" - ] - }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Using Mock API Code, this will query BigQuery. $TEST_BIGQUERY_APIS_LIVE==False\n" - ] - } - ], + "outputs": [], "source": [ + "# isort: off\n", "# stdlib\n", - "import os\n", - "import sys\n", - "\n", - "# Get the current script's directory\n", - "current_dir = os.path.dirname(os.path.abspath(\".\"))\n", "\n", - "# Get the parent directory (one level up)\n", - "parent_dir = os.path.abspath(os.path.join(current_dir, os.pardir))\n", - "\n", - "# Add the parent directory to the system path\n", - "sys.path.insert(0, current_dir)\n", + "# syft absolute\n", + "import syft as sy\n", + "import syft as get_helpers # noqa: F401\n", + "from syft import test_settings\n", "\n", "# set to use the live APIs\n", "# import os\n", @@ -78,1790 +51,22 @@ "# third party\n", "from apis import make_schema\n", "from apis import make_submit_query\n", - "from apis import make_test_query" + "from apis import make_test_query\n", + "# isort: on" ] }, { - "cell_type": "code", - "execution_count": 4, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# syft absolute\n", - "import syft as sy\n", - "from syft import test_settings" + "# Launch server and login" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Autoreload enabled\n", - "Starting bigquery-low server on 0.0.0.0:62045\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", - "INFO: Uvicorn running on http://0.0.0.0:62045 (Press CTRL+C to quit)\n", - "INFO: Started reloader process [13235] using WatchFiles\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for server to start" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Started server process [13236]\n", - "INFO: Waiting for application startup.\n", - "INFO: Application startup complete.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " Done.\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftInfo:
You have launched a development server at http://0.0.0.0:62045.It is intended only for local use.

" - ], - "text/plain": [ - "SyftInfo: You have launched a development server at http://0.0.0.0:62045.It is intended only for local use." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The autoreload extension is already loaded. To reload it, use:\n", - " %reload_ext autoreload\n", - "Autoreload enabled\n", - "Starting bigquery-high server on 0.0.0.0:62058\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", - "INFO: Uvicorn running on http://0.0.0.0:62058 (Press CTRL+C to quit)\n", - "INFO: Started reloader process [13239] using WatchFiles\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for server to start" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Started server process [13243]\n", - "INFO: Waiting for application startup.\n", - "INFO: Application startup complete.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " Done.\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftInfo:
You have launched a development server at http://0.0.0.0:62058.It is intended only for local use.

" - ], - "text/plain": [ - "SyftInfo: You have launched a development server at http://0.0.0.0:62058.It is intended only for local use." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "server_low = sy.orchestra.launch(\n", " name=\"bigquery-low\",\n", @@ -1884,1706 +89,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftWarning:
You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].account.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "low_client = server_low.login(email=\"info@openmined.org\", password=\"changethis\")\n", "high_client = server_high.login(email=\"info@openmined.org\", password=\"changethis\")" @@ -3591,7 +99,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -3601,7 +109,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -3610,7 +118,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -3619,7 +127,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -3635,7 +143,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -3656,7 +164,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -3670,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -3683,852 +191,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Endpoint successfully created.

" - ], - "text/plain": [ - "SyftSuccess: Endpoint successfully created." - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "new_endpoint = sy.TwinAPIEndpoint(\n", " path=\"bigquery.test_query\",\n", @@ -4543,852 +208,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Endpoint successfully updated.

" - ], - "text/plain": [ - "SyftSuccess: Endpoint successfully updated." - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Here, we update the endpoint to timeout after 100s (rather the default of 60s)\n", "high_client.api.services.api.update(\n", @@ -5398,852 +220,9 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Endpoint successfully updated.

" - ], - "text/plain": [ - "SyftSuccess: Endpoint successfully updated." - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "high_client.api.services.api.update(\n", " endpoint_path=\"bigquery.test_query\", hide_mock_definition=True\n", @@ -6252,1031 +231,9 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "WARN: private key is based on server name: bigquery-high in dev_mode. Don't run this in production.\n", - "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/fbdf5a287e58454cbbd3fac4ad744d37/db/fbdf5a287e58454cbbd3fac4ad744d37.sqlite\n", - "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/fbdf5a287e58454cbbd3fac4ad744d37/db/fbdf5a287e58454cbbd3fac4ad744d37.sqlite\n", - "Adding producer for queue: api_call on: tcp://localhost:62069\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
int64_field_0idnamesubscribers_countpermalinknsfwspam
04t5_via1x/channel/mylittlepony4323081/channel//channel/mylittleponyNaNFalse
15t5_cv9gn/channel/polyamory2425929/channel//channel/polyamoryNaNFalse
210t5_8p2tq/channel/Catholicism4062607/channel//channel/CatholicismNaNFalse
316t5_8fcro/channel/cordcutters7543226/channel//channel/cordcuttersNaNFalse
417t5_td5of/channel/stevenuniverse2692168/channel//channel/stevenuniverseNaNFalse
523t5_z01fv/channel/entitledbitch2709080/channel//channel/entitledbitchNaNFalse
624t5_hmqjk/channel/engineering8766144/channel//channel/engineeringNaNFalse
725t5_1flyj/channel/nottheonion2580984/channel//channel/nottheonionNaNFalse
827t5_5rwej/channel/FoodPorn7784809/channel//channel/FoodPornNaNFalse
940t5_uurcv/channel/puppysmiles3715991/channel//channel/puppysmilesNaNFalse
\n", - "
" - ], - "text/markdown": [ - "\n", - "**Pointer**\n", - "\n", - " int64_field_0 id name subscribers_count \\\n", - "0 4 t5_via1x /channel/mylittlepony 4323081 \n", - "1 5 t5_cv9gn /channel/polyamory 2425929 \n", - "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", - "3 16 t5_8fcro /channel/cordcutters 7543226 \n", - "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", - "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", - "6 24 t5_hmqjk /channel/engineering 8766144 \n", - "7 25 t5_1flyj /channel/nottheonion 2580984 \n", - "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", - "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", - "\n", - " permalink nsfw spam \n", - "0 /channel//channel/mylittlepony NaN False \n", - "1 /channel//channel/polyamory NaN False \n", - "2 /channel//channel/Catholicism NaN False \n", - "3 /channel//channel/cordcutters NaN False \n", - "4 /channel//channel/stevenuniverse NaN False \n", - "5 /channel//channel/entitledbitch NaN False \n", - "6 /channel//channel/engineering NaN False \n", - "7 /channel//channel/nottheonion NaN False \n", - "8 /channel//channel/FoodPorn NaN False \n", - "9 /channel//channel/puppysmiles NaN False \n" - ], - "text/plain": [ - "Pointer:\n", - " int64_field_0 id name subscribers_count \\\n", - "0 4 t5_via1x /channel/mylittlepony 4323081 \n", - "1 5 t5_cv9gn /channel/polyamory 2425929 \n", - "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", - "3 16 t5_8fcro /channel/cordcutters 7543226 \n", - "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", - "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", - "6 24 t5_hmqjk /channel/engineering 8766144 \n", - "7 25 t5_1flyj /channel/nottheonion 2580984 \n", - "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", - "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", - "\n", - " permalink nsfw spam \n", - "0 /channel//channel/mylittlepony NaN False \n", - "1 /channel//channel/polyamory NaN False \n", - "2 /channel//channel/Catholicism NaN False \n", - "3 /channel//channel/cordcutters NaN False \n", - "4 /channel//channel/stevenuniverse NaN False \n", - "5 /channel//channel/entitledbitch NaN False \n", - "6 /channel//channel/engineering NaN False \n", - "7 /channel//channel/nottheonion NaN False \n", - "8 /channel//channel/FoodPorn NaN False \n", - "9 /channel//channel/puppysmiles NaN False " - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Test mock version\n", "result = high_client.api.services.bigquery.test_query.mock(\n", @@ -7287,1021 +244,9 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
int64_field_0idnamesubscribers_countpermalinknsfwspam
04t5_via1x/channel/mylittlepony4323081/channel//channel/mylittleponyNaNFalse
15t5_cv9gn/channel/polyamory2425929/channel//channel/polyamoryNaNFalse
210t5_8p2tq/channel/Catholicism4062607/channel//channel/CatholicismNaNFalse
316t5_8fcro/channel/cordcutters7543226/channel//channel/cordcuttersNaNFalse
417t5_td5of/channel/stevenuniverse2692168/channel//channel/stevenuniverseNaNFalse
523t5_z01fv/channel/entitledbitch2709080/channel//channel/entitledbitchNaNFalse
624t5_hmqjk/channel/engineering8766144/channel//channel/engineeringNaNFalse
725t5_1flyj/channel/nottheonion2580984/channel//channel/nottheonionNaNFalse
827t5_5rwej/channel/FoodPorn7784809/channel//channel/FoodPornNaNFalse
940t5_uurcv/channel/puppysmiles3715991/channel//channel/puppysmilesNaNFalse
\n", - "
" - ], - "text/markdown": [ - "\n", - "**Pointer**\n", - "\n", - " int64_field_0 id name subscribers_count \\\n", - "0 4 t5_via1x /channel/mylittlepony 4323081 \n", - "1 5 t5_cv9gn /channel/polyamory 2425929 \n", - "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", - "3 16 t5_8fcro /channel/cordcutters 7543226 \n", - "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", - "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", - "6 24 t5_hmqjk /channel/engineering 8766144 \n", - "7 25 t5_1flyj /channel/nottheonion 2580984 \n", - "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", - "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", - "\n", - " permalink nsfw spam \n", - "0 /channel//channel/mylittlepony NaN False \n", - "1 /channel//channel/polyamory NaN False \n", - "2 /channel//channel/Catholicism NaN False \n", - "3 /channel//channel/cordcutters NaN False \n", - "4 /channel//channel/stevenuniverse NaN False \n", - "5 /channel//channel/entitledbitch NaN False \n", - "6 /channel//channel/engineering NaN False \n", - "7 /channel//channel/nottheonion NaN False \n", - "8 /channel//channel/FoodPorn NaN False \n", - "9 /channel//channel/puppysmiles NaN False \n" - ], - "text/plain": [ - "Pointer:\n", - " int64_field_0 id name subscribers_count \\\n", - "0 4 t5_via1x /channel/mylittlepony 4323081 \n", - "1 5 t5_cv9gn /channel/polyamory 2425929 \n", - "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", - "3 16 t5_8fcro /channel/cordcutters 7543226 \n", - "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", - "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", - "6 24 t5_hmqjk /channel/engineering 8766144 \n", - "7 25 t5_1flyj /channel/nottheonion 2580984 \n", - "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", - "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", - "\n", - " permalink nsfw spam \n", - "0 /channel//channel/mylittlepony NaN False \n", - "1 /channel//channel/polyamory NaN False \n", - "2 /channel//channel/Catholicism NaN False \n", - "3 /channel//channel/cordcutters NaN False \n", - "4 /channel//channel/stevenuniverse NaN False \n", - "5 /channel//channel/entitledbitch NaN False \n", - "6 /channel//channel/engineering NaN False \n", - "7 /channel//channel/nottheonion NaN False \n", - "8 /channel//channel/FoodPorn NaN False \n", - "9 /channel//channel/puppysmiles NaN False " - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Test mock version\n", "result = high_client.api.services.bigquery.test_query.mock(\n", @@ -8312,127 +257,9 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "with sy.raises successfully caught the following exception:\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - " \n", - " \n", - " SyftException:\n", - "
Function failed to complete: An error was raised during the execution of the API endpoint call: \n",
-       " *must be qualified with a dataset*. 400 BadRequest: 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/project-id/queries?prettyPrint=false:  Table "invalid_table" must be qualified with a dataset (e.g. dataset.table).
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
Server Trace:
\n", - "
\n",
-       "      \n",
-       "        Traceback (most recent call last):\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/server/server.py, line 1271, in handle_api_call_with_unsigned_result\n",
-       "    result = method(context, *api_call.args, **api_call.kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py, line 490, in _decorator\n",
-       "    result = func(self, *args, **kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 321, in call_public_in_jobs\n",
-       "    ).unwrap()\n",
-       "      ^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 90, in unwrap\n",
-       "    raise self.value\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 113, in wrapper\n",
-       "    output = func(*args, **kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 376, in _call_in_jobs\n",
-       "    raise SyftException(\n",
-       "syft.types.errors.SyftException: Function failed to complete: An error was raised during the execution of the API endpoint call: \n",
-       " *must be qualified with a dataset*. 400 BadRequest: 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/project-id/queries?prettyPrint=false:  Table \"invalid_table\" must be qualified with a dataset (e.g. dataset.table).\n",
-       "\n",
-       "      \n",
-       "    
\n", - "
\n", - "
\n", - " \n", - "
Client Trace:
\n", - "
\n",
-       "      \n",
-       "        Traceback (most recent call last):\n",
-       "  File \"/var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/ipykernel_13227/1716145944.py, line 7, in \n",
-       "    high_client.api.services.bigquery.test_query.mock(\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/client/api.py, line 383, in __call__\n",
-       "    return remote_func.function_call(\n",
-       "           ^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/client/api.py, line 371, in function_call\n",
-       "    return post_process_result(result, self.unwrap_on_success)\n",
-       "           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "syft.types.errors.SyftException: Function failed to complete: An error was raised during the execution of the API endpoint call: \n",
-       " *must be qualified with a dataset*. 400 BadRequest: 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/project-id/queries?prettyPrint=false:  Table \"invalid_table\" must be qualified with a dataset (e.g. dataset.table).\n",
-       "server_trace: Traceback (most recent call last):\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/server/server.py, line 1271, in handle_api_call_with_unsigned_result\n",
-       "    result = method(context, *api_call.args, **api_call.kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py, line 490, in _decorator\n",
-       "    result = func(self, *args, **kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 321, in call_public_in_jobs\n",
-       "    ).unwrap()\n",
-       "      ^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 90, in unwrap\n",
-       "    raise self.value\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 113, in wrapper\n",
-       "    output = func(*args, **kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 376, in _call_in_jobs\n",
-       "    raise SyftException(\n",
-       "syft.types.errors.SyftException: Function failed to complete: An error was raised during the execution of the API endpoint call: \n",
-       " *must be qualified with a dataset*. 400 BadRequest: 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/project-id/queries?prettyPrint=false:  Table \"invalid_table\" must be qualified with a dataset (e.g. dataset.table).\n",
-       "\n",
-       "\n",
-       "      \n",
-       "    
\n", - "
\n", - "
\n", - "\n", - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "# Bug with the new Error PR: message printed multiple times. TODO clean up the duplicate exception messages.\n", "\n", @@ -8447,1021 +274,9 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
int64_field_0idnamesubscribers_countpermalinknsfwspam
04t5_via1x/channel/mylittlepony4323081/channel//channel/mylittleponyNaNFalse
15t5_cv9gn/channel/polyamory2425929/channel//channel/polyamoryNaNFalse
210t5_8p2tq/channel/Catholicism4062607/channel//channel/CatholicismNaNFalse
316t5_8fcro/channel/cordcutters7543226/channel//channel/cordcuttersNaNFalse
417t5_td5of/channel/stevenuniverse2692168/channel//channel/stevenuniverseNaNFalse
523t5_z01fv/channel/entitledbitch2709080/channel//channel/entitledbitchNaNFalse
624t5_hmqjk/channel/engineering8766144/channel//channel/engineeringNaNFalse
725t5_1flyj/channel/nottheonion2580984/channel//channel/nottheonionNaNFalse
827t5_5rwej/channel/FoodPorn7784809/channel//channel/FoodPornNaNFalse
940t5_uurcv/channel/puppysmiles3715991/channel//channel/puppysmilesNaNFalse
\n", - "
" - ], - "text/markdown": [ - "\n", - "**Pointer**\n", - "\n", - " int64_field_0 id name subscribers_count \\\n", - "0 4 t5_via1x /channel/mylittlepony 4323081 \n", - "1 5 t5_cv9gn /channel/polyamory 2425929 \n", - "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", - "3 16 t5_8fcro /channel/cordcutters 7543226 \n", - "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", - "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", - "6 24 t5_hmqjk /channel/engineering 8766144 \n", - "7 25 t5_1flyj /channel/nottheonion 2580984 \n", - "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", - "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", - "\n", - " permalink nsfw spam \n", - "0 /channel//channel/mylittlepony NaN False \n", - "1 /channel//channel/polyamory NaN False \n", - "2 /channel//channel/Catholicism NaN False \n", - "3 /channel//channel/cordcutters NaN False \n", - "4 /channel//channel/stevenuniverse NaN False \n", - "5 /channel//channel/entitledbitch NaN False \n", - "6 /channel//channel/engineering NaN False \n", - "7 /channel//channel/nottheonion NaN False \n", - "8 /channel//channel/FoodPorn NaN False \n", - "9 /channel//channel/puppysmiles NaN False \n" - ], - "text/plain": [ - "Pointer:\n", - " int64_field_0 id name subscribers_count \\\n", - "0 4 t5_via1x /channel/mylittlepony 4323081 \n", - "1 5 t5_cv9gn /channel/polyamory 2425929 \n", - "2 10 t5_8p2tq /channel/Catholicism 4062607 \n", - "3 16 t5_8fcro /channel/cordcutters 7543226 \n", - "4 17 t5_td5of /channel/stevenuniverse 2692168 \n", - "5 23 t5_z01fv /channel/entitledbitch 2709080 \n", - "6 24 t5_hmqjk /channel/engineering 8766144 \n", - "7 25 t5_1flyj /channel/nottheonion 2580984 \n", - "8 27 t5_5rwej /channel/FoodPorn 7784809 \n", - "9 40 t5_uurcv /channel/puppysmiles 3715991 \n", - "\n", - " permalink nsfw spam \n", - "0 /channel//channel/mylittlepony NaN False \n", - "1 /channel//channel/polyamory NaN False \n", - "2 /channel//channel/Catholicism NaN False \n", - "3 /channel//channel/cordcutters NaN False \n", - "4 /channel//channel/stevenuniverse NaN False \n", - "5 /channel//channel/entitledbitch NaN False \n", - "6 /channel//channel/engineering NaN False \n", - "7 /channel//channel/nottheonion NaN False \n", - "8 /channel//channel/FoodPorn NaN False \n", - "9 /channel//channel/puppysmiles NaN False " - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Test private version\n", "result = high_client.api.services.bigquery.test_query.private(\n", @@ -9479,7 +294,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -9493,852 +308,9 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Endpoint successfully created.

" - ], - "text/plain": [ - "SyftSuccess: Endpoint successfully created." - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "high_client.custom_api.add(endpoint=schema_function)\n", "# can we delete this?\n", @@ -10347,1210 +319,16 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
projectdataset_idtable_idschema_nameschema_fielddescriptionnum_rows
0example-projecttest_1gbpostsint64_field_0INTEGERNone2000000
1example-projecttest_1gbpostsidSTRINGNone2000000
2example-projecttest_1gbpostsnameSTRINGNone2000000
3example-projecttest_1gbpostssubscribers_countINTEGERNone2000000
4example-projecttest_1gbpostspermalinkSTRINGNone2000000
5example-projecttest_1gbpostsnsfwFLOATNone2000000
6example-projecttest_1gbpostsspamBOOLEANNone2000000
7example-projecttest_1gbcommentsint64_field_0INTEGERNone2000000
8example-projecttest_1gbcommentsidSTRINGNone2000000
9example-projecttest_1gbcommentsbodySTRINGNone2000000
10example-projecttest_1gbcommentsparent_idSTRINGNone2000000
11example-projecttest_1gbcommentscreated_atINTEGERNone2000000
12example-projecttest_1gbcommentslast_modified_atINTEGERNone2000000
13example-projecttest_1gbcommentsgildedBOOLEANNone2000000
14example-projecttest_1gbcommentspermalinkSTRINGNone2000000
15example-projecttest_1gbcommentsscoreINTEGERNone2000000
16example-projecttest_1gbcommentscomment_idSTRINGNone2000000
17example-projecttest_1gbcommentspost_idSTRINGNone2000000
18example-projecttest_1gbcommentsauthor_idSTRINGNone2000000
19example-projecttest_1gbcommentsspamBOOLEANNone2000000
20example-projecttest_1gbcommentsdeletedBOOLEANNone2000000
21example-projecttest_1gbcommentsupvote_raioFLOATNone2000000
22example-projecttest_1gbcommentscollapsed_in_crowd_controlBOOLEANNone2000000
\n", - "
" - ], - "text/markdown": [ - "\n", - "**Pointer**\n", - "\n", - " project dataset_id table_id schema_name \\\n", - "0 example-project test_1gb posts int64_field_0 \n", - "1 example-project test_1gb posts id \n", - "2 example-project test_1gb posts name \n", - "3 example-project test_1gb posts subscribers_count \n", - "4 example-project test_1gb posts permalink \n", - "5 example-project test_1gb posts nsfw \n", - "6 example-project test_1gb posts spam \n", - "7 example-project test_1gb comments int64_field_0 \n", - "8 example-project test_1gb comments id \n", - "9 example-project test_1gb comments body \n", - "10 example-project test_1gb comments parent_id \n", - "11 example-project test_1gb comments created_at \n", - "12 example-project test_1gb comments last_modified_at \n", - "13 example-project test_1gb comments gilded \n", - "14 example-project test_1gb comments permalink \n", - "15 example-project test_1gb comments score \n", - "16 example-project test_1gb comments comment_id \n", - "17 example-project test_1gb comments post_id \n", - "18 example-project test_1gb comments author_id \n", - "19 example-project test_1gb comments spam \n", - "20 example-project test_1gb comments deleted \n", - "21 example-project test_1gb comments upvote_raio \n", - "22 example-project test_1gb comments collapsed_in_crowd_control \n", - "\n", - " schema_field description num_rows \n", - "0 INTEGER None 2000000 \n", - "1 STRING None 2000000 \n", - "2 STRING None 2000000 \n", - "3 INTEGER None 2000000 \n", - "4 STRING None 2000000 \n", - "5 FLOAT None 2000000 \n", - "6 BOOLEAN None 2000000 \n", - "7 INTEGER None 2000000 \n", - "8 STRING None 2000000 \n", - "9 STRING None 2000000 \n", - "10 STRING None 2000000 \n", - "11 INTEGER None 2000000 \n", - "12 INTEGER None 2000000 \n", - "13 BOOLEAN None 2000000 \n", - "14 STRING None 2000000 \n", - "15 INTEGER None 2000000 \n", - "16 STRING None 2000000 \n", - "17 STRING None 2000000 \n", - "18 STRING None 2000000 \n", - "19 BOOLEAN None 2000000 \n", - "20 BOOLEAN None 2000000 \n", - "21 FLOAT None 2000000 \n", - "22 BOOLEAN None 2000000 \n" - ], - "text/plain": [ - "Pointer:\n", - " project dataset_id table_id schema_name \\\n", - "0 example-project test_1gb posts int64_field_0 \n", - "1 example-project test_1gb posts id \n", - "2 example-project test_1gb posts name \n", - "3 example-project test_1gb posts subscribers_count \n", - "4 example-project test_1gb posts permalink \n", - "5 example-project test_1gb posts nsfw \n", - "6 example-project test_1gb posts spam \n", - "7 example-project test_1gb comments int64_field_0 \n", - "8 example-project test_1gb comments id \n", - "9 example-project test_1gb comments body \n", - "10 example-project test_1gb comments parent_id \n", - "11 example-project test_1gb comments created_at \n", - "12 example-project test_1gb comments last_modified_at \n", - "13 example-project test_1gb comments gilded \n", - "14 example-project test_1gb comments permalink \n", - "15 example-project test_1gb comments score \n", - "16 example-project test_1gb comments comment_id \n", - "17 example-project test_1gb comments post_id \n", - "18 example-project test_1gb comments author_id \n", - "19 example-project test_1gb comments spam \n", - "20 example-project test_1gb comments deleted \n", - "21 example-project test_1gb comments upvote_raio \n", - "22 example-project test_1gb comments collapsed_in_crowd_control \n", - "\n", - " schema_field description num_rows \n", - "0 INTEGER None 2000000 \n", - "1 STRING None 2000000 \n", - "2 STRING None 2000000 \n", - "3 INTEGER None 2000000 \n", - "4 STRING None 2000000 \n", - "5 FLOAT None 2000000 \n", - "6 BOOLEAN None 2000000 \n", - "7 INTEGER None 2000000 \n", - "8 STRING None 2000000 \n", - "9 STRING None 2000000 \n", - "10 STRING None 2000000 \n", - "11 INTEGER None 2000000 \n", - "12 INTEGER None 2000000 \n", - "13 BOOLEAN None 2000000 \n", - "14 STRING None 2000000 \n", - "15 INTEGER None 2000000 \n", - "16 STRING None 2000000 \n", - "17 STRING None 2000000 \n", - "18 STRING None 2000000 \n", - "19 BOOLEAN None 2000000 \n", - "20 BOOLEAN None 2000000 \n", - "21 FLOAT None 2000000 \n", - "22 BOOLEAN None 2000000 " - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "high_client.api.services.bigquery.schema()" ] }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -11566,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -11577,1704 +355,18 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Endpoint successfully created.

" - ], - "text/plain": [ - "SyftSuccess: Endpoint successfully created." - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "high_client.custom_api.add(endpoint=submit_query_function)" ] }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftSuccess:
Endpoint successfully updated.

" - ], - "text/plain": [ - "SyftSuccess: Endpoint successfully updated." - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "high_client.api.services.api.update(\n", " endpoint_path=\"bigquery.submit_query\", hide_mock_definition=True\n", @@ -13283,7 +375,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -13296,27 +388,9 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "\n", - "**Pointer**\n", - "\n", - "'Query submitted syft.service.request.request.Request. Use `client.code.my_func()` to run your query'\n" - ], - "text/plain": [ - "Pointer:\n", - "'Query submitted syft.service.request.request.Request. Use `client.code.my_func()` to run your query'" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "assert \"Query submitted\" in result\n", "result" @@ -13324,7 +398,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -13333,28 +407,22 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "SyftSuccess: Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.\n" - ] - } - ], + "outputs": [], "source": [ "res = job.wait().get()" ] }, { "cell_type": "code", - "execution_count": 33, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ + "# third party\n", "import pandas as pd\n", + "\n", "assert isinstance(res, pd.DataFrame)" ] }, @@ -13367,2627 +435,16 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

TwinAPIEndpointView List

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/plain": [ - "[syft.service.api.api.TwinAPIEndpointView,\n", - " syft.service.api.api.TwinAPIEndpointView,\n", - " syft.service.api.api.TwinAPIEndpointView]" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "high_client.custom_api.api_endpoints()" ] }, { "cell_type": "code", - "execution_count": 35, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -15996,7 +453,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16008,7 +465,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16027,7 +484,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16036,7 +493,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16051,7 +508,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16066,7 +523,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16085,29 +542,16 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "widget = diff.resolve()" ] }, { "cell_type": "code", - "execution_count": 48, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16117,7 +561,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16155,7 +599,7 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16168,7 +612,7 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16223,16 +667,16 @@ }, { "cell_type": "code", - "execution_count": 58, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "assert len(low_client.jobs.get_all()) ==0" + "assert len(low_client.jobs.get_all()) == 0" ] }, { "cell_type": "code", - "execution_count": 53, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16241,7 +685,7 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -16250,46 +694,9 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Stopping bigquery-high\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Shutting down\n", - "INFO: Waiting for application shutdown.\n", - "INFO: Application shutdown complete.\n", - "INFO: Finished server process [13243]\n", - "INFO: Stopping reloader process [13239]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Stopping bigquery-low\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Shutting down\n", - "INFO: Waiting for application shutdown.\n", - "INFO: Application shutdown complete.\n", - "INFO: Finished server process [13236]\n", - "INFO: Stopping reloader process [13235]\n" - ] - } - ], + "outputs": [], "source": [ "if environment != \"remote\":\n", " server_high.land()\n", diff --git a/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb b/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb index 226f205f9c8..fe6bf1b1166 100644 --- a/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb +++ b/notebooks/scenarios/bigquery/sync/03-ds-submit-request.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -17,17 +17,9 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "python auto\n" - ] - } - ], + "outputs": [], "source": [ "# stdlib\n", "\n", @@ -38,14 +30,16 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ + "# third party\n", + "import pandas as pd\n", + "\n", "# syft absolute\n", "import syft as sy\n", - "from syft import test_settings\n", - "import pandas as pd" + "from syft import test_settings" ] }, { @@ -57,891 +51,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Autoreload enabled\n", - "Starting bigquery-low server on 0.0.0.0:62820\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Will watch for changes in these directories: ['/Users/koen/workspace/PySyft/packages/syft/src/syft']\n", - "INFO: Uvicorn running on http://0.0.0.0:62820 (Press CTRL+C to quit)\n", - "INFO: Started reloader process [13539] using WatchFiles\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for server to start" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Started server process [13541]\n", - "INFO: Waiting for application startup.\n", - "INFO: Application startup complete.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " Done.\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
SyftInfo:
You have launched a development server at http://0.0.0.0:62820.It is intended only for local use.

" - ], - "text/plain": [ - "SyftInfo: You have launched a development server at http://0.0.0.0:62820.It is intended only for local use." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "server_low = sy.orchestra.launch(\n", " name=\"bigquery-low\",\n", @@ -955,17 +67,9 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - } - ], + "outputs": [], "source": [ "ds_client = server_low.login(\n", " email=\"data_scientist@openmined.org\", password=\"verysecurepassword\"\n", @@ -981,7 +85,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -990,7 +94,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1011,20 +115,9 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "WARN: private key is based on server name: bigquery-low in dev_mode. Don't run this in production.\n", - "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/4a471a09f56b4a1d809c0a7614074283/db/4a471a09f56b4a1d809c0a7614074283.sqlite\n", - "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/4a471a09f56b4a1d809c0a7614074283/db/4a471a09f56b4a1d809c0a7614074283.sqlite\n", - "Adding producer for queue: api_call on: tcp://localhost:62829\n" - ] - } - ], + "outputs": [], "source": [ "res = ds_client.api.services.bigquery.schema()\n", "assert isinstance(res.get(), pd.DataFrame)" @@ -1039,7 +132,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1051,123 +144,9 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "with sy.raises successfully caught the following exception:\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - " \n", - " \n", - " SyftException:\n", - "
Function failed to complete: No private code available
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
Server Trace:
\n", - "
\n",
-       "      \n",
-       "        Traceback (most recent call last):\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/server/server.py, line 1271, in handle_api_call_with_unsigned_result\n",
-       "    result = method(context, *api_call.args, **api_call.kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py, line 490, in _decorator\n",
-       "    result = func(self, *args, **kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 304, in call_private_in_jobs\n",
-       "    ).unwrap()\n",
-       "      ^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 90, in unwrap\n",
-       "    raise self.value\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 113, in wrapper\n",
-       "    output = func(*args, **kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 376, in _call_in_jobs\n",
-       "    raise SyftException(\n",
-       "syft.types.errors.SyftException: Function failed to complete: No private code available\n",
-       "\n",
-       "      \n",
-       "    
\n", - "
\n", - "
\n", - " \n", - "
Client Trace:
\n", - "
\n",
-       "      \n",
-       "        Traceback (most recent call last):\n",
-       "  File \"/var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/ipykernel_13528/888537078.py, line 2, in \n",
-       "    ds_client.api.services.bigquery.test_query.private(\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/client/api.py, line 403, in __call__\n",
-       "    return remote_func.function_call(\n",
-       "           ^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/client/api.py, line 371, in function_call\n",
-       "    return post_process_result(result, self.unwrap_on_success)\n",
-       "           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "syft.types.errors.SyftException: Function failed to complete: No private code available\n",
-       "server_trace: Traceback (most recent call last):\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/server/server.py, line 1271, in handle_api_call_with_unsigned_result\n",
-       "    result = method(context, *api_call.args, **api_call.kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py, line 490, in _decorator\n",
-       "    result = func(self, *args, **kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 304, in call_private_in_jobs\n",
-       "    ).unwrap()\n",
-       "      ^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 90, in unwrap\n",
-       "    raise self.value\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/types/result.py, line 113, in wrapper\n",
-       "    output = func(*args, **kwargs)\n",
-       "             ^^^^^^^^^^^^^^^^^^^^^\n",
-       "  File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/api/api_service.py, line 376, in _call_in_jobs\n",
-       "    raise SyftException(\n",
-       "syft.types.errors.SyftException: Function failed to complete: No private code available\n",
-       "\n",
-       "\n",
-       "      \n",
-       "    
\n", - "
\n", - "
\n", - "\n", - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "with sy.raises(sy.SyftException, show=True):\n", " ds_client.api.services.bigquery.test_query.private(\n", @@ -1177,7 +156,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1187,17 +166,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "SyftSuccess: Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.\n" - ] - } - ], + "outputs": [], "source": [ "mock_res = ds_client.api.services.bigquery.test_query(sql_query=LARGE_SAMPLE_QUERY)" ] @@ -1211,7 +182,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1222,7 +193,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1237,68 +208,17 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "AssertionError", - "evalue": "Expected to be raised, but got .", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mConnectionRefusedError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connection.py:196\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 195\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 196\u001b[0m sock \u001b[38;5;241m=\u001b[39m \u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_connection\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 197\u001b[0m \u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dns_host\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mport\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 198\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 199\u001b[0m \u001b[43m \u001b[49m\u001b[43msource_address\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msource_address\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 200\u001b[0m \u001b[43m \u001b[49m\u001b[43msocket_options\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msocket_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 201\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 202\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m socket\u001b[38;5;241m.\u001b[39mgaierror \u001b[38;5;28;01mas\u001b[39;00m e:\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/util/connection.py:85\u001b[0m, in \u001b[0;36mcreate_connection\u001b[0;34m(address, timeout, source_address, socket_options)\u001b[0m\n\u001b[1;32m 84\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 85\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m err\n\u001b[1;32m 86\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 87\u001b[0m \u001b[38;5;66;03m# Break explicitly a reference cycle\u001b[39;00m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/util/connection.py:73\u001b[0m, in \u001b[0;36mcreate_connection\u001b[0;34m(address, timeout, source_address, socket_options)\u001b[0m\n\u001b[1;32m 72\u001b[0m sock\u001b[38;5;241m.\u001b[39mbind(source_address)\n\u001b[0;32m---> 73\u001b[0m \u001b[43msock\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43msa\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 74\u001b[0m \u001b[38;5;66;03m# Break explicitly a reference cycle\u001b[39;00m\n", - "\u001b[0;31mConnectionRefusedError\u001b[0m: [Errno 61] Connection refused", - "\nThe above exception was the direct cause of the following exception:\n", - "\u001b[0;31mNewConnectionError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connectionpool.py:789\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, preload_content, decode_content, **response_kw)\u001b[0m\n\u001b[1;32m 788\u001b[0m \u001b[38;5;66;03m# Make the request on the HTTPConnection object\u001b[39;00m\n\u001b[0;32m--> 789\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_make_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 790\u001b[0m \u001b[43m \u001b[49m\u001b[43mconn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 791\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 792\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 793\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 794\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 795\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 796\u001b[0m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 797\u001b[0m \u001b[43m \u001b[49m\u001b[43mretries\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mretries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 798\u001b[0m \u001b[43m \u001b[49m\u001b[43mresponse_conn\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mresponse_conn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 799\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreload_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 800\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 801\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mresponse_kw\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 802\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 804\u001b[0m \u001b[38;5;66;03m# Everything went great!\u001b[39;00m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connectionpool.py:495\u001b[0m, in \u001b[0;36mHTTPConnectionPool._make_request\u001b[0;34m(self, conn, method, url, body, headers, retries, timeout, chunked, response_conn, preload_content, decode_content, enforce_content_length)\u001b[0m\n\u001b[1;32m 494\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 495\u001b[0m \u001b[43mconn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 496\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 497\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 498\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 499\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 500\u001b[0m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 501\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreload_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 502\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 503\u001b[0m \u001b[43m \u001b[49m\u001b[43menforce_content_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43menforce_content_length\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 504\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 506\u001b[0m \u001b[38;5;66;03m# We are swallowing BrokenPipeError (errno.EPIPE) since the server is\u001b[39;00m\n\u001b[1;32m 507\u001b[0m \u001b[38;5;66;03m# legitimately able to close the connection after sending a valid response.\u001b[39;00m\n\u001b[1;32m 508\u001b[0m \u001b[38;5;66;03m# With this behaviour, the received response is still readable.\u001b[39;00m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connection.py:398\u001b[0m, in \u001b[0;36mHTTPConnection.request\u001b[0;34m(self, method, url, body, headers, chunked, preload_content, decode_content, enforce_content_length)\u001b[0m\n\u001b[1;32m 397\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mputheader(header, value)\n\u001b[0;32m--> 398\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mendheaders\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 400\u001b[0m \u001b[38;5;66;03m# If we're given a body we start sending that in chunks.\u001b[39;00m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/http/client.py:1331\u001b[0m, in \u001b[0;36mHTTPConnection.endheaders\u001b[0;34m(self, message_body, encode_chunked)\u001b[0m\n\u001b[1;32m 1330\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m CannotSendHeader()\n\u001b[0;32m-> 1331\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmessage_body\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mencode_chunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencode_chunked\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/http/client.py:1091\u001b[0m, in \u001b[0;36mHTTPConnection._send_output\u001b[0;34m(self, message_body, encode_chunked)\u001b[0m\n\u001b[1;32m 1090\u001b[0m \u001b[38;5;28;01mdel\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_buffer[:]\n\u001b[0;32m-> 1091\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmsg\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1093\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m message_body \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 1094\u001b[0m \n\u001b[1;32m 1095\u001b[0m \u001b[38;5;66;03m# create a consistent interface to message_body\u001b[39;00m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/http/client.py:1035\u001b[0m, in \u001b[0;36mHTTPConnection.send\u001b[0;34m(self, data)\u001b[0m\n\u001b[1;32m 1034\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mauto_open:\n\u001b[0;32m-> 1035\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1036\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connection.py:236\u001b[0m, in \u001b[0;36mHTTPConnection.connect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 235\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mconnect\u001b[39m(\u001b[38;5;28mself\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 236\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msock \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_new_conn\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 237\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_tunnel_host:\n\u001b[1;32m 238\u001b[0m \u001b[38;5;66;03m# If we're tunneling it means we're connected to our proxy.\u001b[39;00m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connection.py:211\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 210\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mOSError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m--> 211\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m NewConnectionError(\n\u001b[1;32m 212\u001b[0m \u001b[38;5;28mself\u001b[39m, \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFailed to establish a new connection: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 213\u001b[0m ) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n\u001b[1;32m 215\u001b[0m \u001b[38;5;66;03m# Audit hooks are only available in Python 3.8+\u001b[39;00m\n", - "\u001b[0;31mNewConnectionError\u001b[0m: : Failed to establish a new connection: [Errno 61] Connection refused", - "\nThe above exception was the direct cause of the following exception:\n", - "\u001b[0;31mMaxRetryError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/adapters.py:667\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 666\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 667\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[43mconn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43murlopen\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 668\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 669\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 670\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 671\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 672\u001b[0m \u001b[43m \u001b[49m\u001b[43mredirect\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 673\u001b[0m \u001b[43m \u001b[49m\u001b[43massert_same_host\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 674\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 675\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 676\u001b[0m \u001b[43m \u001b[49m\u001b[43mretries\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmax_retries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 677\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 678\u001b[0m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 679\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 681\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (ProtocolError, \u001b[38;5;167;01mOSError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m err:\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/connectionpool.py:843\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, preload_content, decode_content, **response_kw)\u001b[0m\n\u001b[1;32m 841\u001b[0m new_e \u001b[38;5;241m=\u001b[39m ProtocolError(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mConnection aborted.\u001b[39m\u001b[38;5;124m\"\u001b[39m, new_e)\n\u001b[0;32m--> 843\u001b[0m retries \u001b[38;5;241m=\u001b[39m \u001b[43mretries\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mincrement\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 844\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43merror\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnew_e\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m_pool\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m_stacktrace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msys\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexc_info\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m2\u001b[39;49m\u001b[43m]\u001b[49m\n\u001b[1;32m 845\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 846\u001b[0m retries\u001b[38;5;241m.\u001b[39msleep()\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/urllib3/util/retry.py:519\u001b[0m, in \u001b[0;36mRetry.increment\u001b[0;34m(self, method, url, response, error, _pool, _stacktrace)\u001b[0m\n\u001b[1;32m 518\u001b[0m reason \u001b[38;5;241m=\u001b[39m error \u001b[38;5;129;01mor\u001b[39;00m ResponseError(cause)\n\u001b[0;32m--> 519\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m MaxRetryError(_pool, url, reason) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mreason\u001b[39;00m \u001b[38;5;66;03m# type: ignore[arg-type]\u001b[39;00m\n\u001b[1;32m 521\u001b[0m log\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mIncremented Retry for (url=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m): \u001b[39m\u001b[38;5;132;01m%r\u001b[39;00m\u001b[38;5;124m\"\u001b[39m, url, new_retry)\n", - "\u001b[0;31mMaxRetryError\u001b[0m: HTTPConnectionPool(host='localhost', port=62820): Max retries exceeded with url: /api/v2/api_call (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 61] Connection refused'))", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mConnectionError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[29], line 4\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m sy\u001b[38;5;241m.\u001b[39mraises(\n\u001b[1;32m 2\u001b[0m sy\u001b[38;5;241m.\u001b[39mSyftException(public_message\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m*Your code is waiting for approval*\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 3\u001b[0m ):\n\u001b[0;32m----> 4\u001b[0m \u001b[43mds_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlarge_sample\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/api.py:374\u001b[0m, in \u001b[0;36mRemoteFunction.__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 373\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs: Any, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Any:\n\u001b[0;32m--> 374\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfunction_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/api.py:357\u001b[0m, in \u001b[0;36mRemoteFunction.function_call\u001b[0;34m(self, path, cache_result, *args, **kwargs)\u001b[0m\n\u001b[1;32m 356\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m\n\u001b[0;32m--> 357\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmake_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43mapi_call\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mapi_call\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcache_result\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_result\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 359\u001b[0m \u001b[38;5;66;03m# TODO: annotate this on the service method decorator\u001b[39;00m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/api.py:1068\u001b[0m, in \u001b[0;36mSyftAPI.make_call\u001b[0;34m(self, api_call, cache_result)\u001b[0m\n\u001b[1;32m 1067\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconnection \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 1068\u001b[0m signed_result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmake_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43msigned_call\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1069\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:463\u001b[0m, in \u001b[0;36mHTTPConnection.make_call\u001b[0;34m(self, signed_call)\u001b[0m\n\u001b[1;32m 461\u001b[0m api_url \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapi_url\n\u001b[0;32m--> 463\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mrequests\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpost\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# nosec\u001b[39;49;00m\n\u001b[1;32m 464\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mapi_url\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 465\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmsg_bytes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 466\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 467\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 469\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m200\u001b[39m:\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/api.py:115\u001b[0m, in \u001b[0;36mpost\u001b[0;34m(url, data, json, **kwargs)\u001b[0m\n\u001b[1;32m 104\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"Sends a POST request.\u001b[39;00m\n\u001b[1;32m 105\u001b[0m \n\u001b[1;32m 106\u001b[0m \u001b[38;5;124;03m:param url: URL for the new :class:`Request` object.\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 112\u001b[0m \u001b[38;5;124;03m:rtype: requests.Response\u001b[39;00m\n\u001b[1;32m 113\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m--> 115\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mpost\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mjson\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjson\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/api.py:59\u001b[0m, in \u001b[0;36mrequest\u001b[0;34m(method, url, **kwargs)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m sessions\u001b[38;5;241m.\u001b[39mSession() \u001b[38;5;28;01mas\u001b[39;00m session:\n\u001b[0;32m---> 59\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43msession\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmethod\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/sessions.py:589\u001b[0m, in \u001b[0;36mSession.request\u001b[0;34m(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\u001b[0m\n\u001b[1;32m 588\u001b[0m send_kwargs\u001b[38;5;241m.\u001b[39mupdate(settings)\n\u001b[0;32m--> 589\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprep\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43msend_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m resp\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/sessions.py:703\u001b[0m, in \u001b[0;36mSession.send\u001b[0;34m(self, request, **kwargs)\u001b[0m\n\u001b[1;32m 702\u001b[0m \u001b[38;5;66;03m# Send the request\u001b[39;00m\n\u001b[0;32m--> 703\u001b[0m r \u001b[38;5;241m=\u001b[39m \u001b[43madapter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 705\u001b[0m \u001b[38;5;66;03m# Total elapsed time of the request (approximately)\u001b[39;00m\n", - "File \u001b[0;32m~/miniconda3/envs/syft/lib/python3.12/site-packages/requests/adapters.py:700\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 698\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SSLError(e, request\u001b[38;5;241m=\u001b[39mrequest)\n\u001b[0;32m--> 700\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mConnectionError\u001b[39;00m(e, request\u001b[38;5;241m=\u001b[39mrequest)\n\u001b[1;32m 702\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ClosedPoolError \u001b[38;5;28;01mas\u001b[39;00m e:\n", - "\u001b[0;31mConnectionError\u001b[0m: HTTPConnectionPool(host='localhost', port=62820): Max retries exceeded with url: /api/v2/api_call (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 61] Connection refused'))", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[29], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;43;01mwith\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mraises\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2\u001b[0m \u001b[43m \u001b[49m\u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mSyftException\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpublic_message\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m*Your code is waiting for approval*\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3\u001b[0m \u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[1;32m 4\u001b[0m \u001b[43m \u001b[49m\u001b[43mds_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlarge_sample\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/types/errors.py:214\u001b[0m, in \u001b[0;36mraises.__exit__\u001b[0;34m(self, exc_type, exc_value, traceback)\u001b[0m\n\u001b[1;32m 209\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAssertionError\u001b[39;00m(\n\u001b[1;32m 210\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExpected \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mexpected_exception\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m to be raised, \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 211\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbut no exception was raised.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 212\u001b[0m )\n\u001b[1;32m 213\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28missubclass\u001b[39m(exc_type, expected_exception_type):\n\u001b[0;32m--> 214\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAssertionError\u001b[39;00m(\n\u001b[1;32m 215\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExpected \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mexpected_exception_type\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m to be raised, but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mexc_type\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 216\u001b[0m )\n\u001b[1;32m 217\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m message \u001b[38;5;129;01mand\u001b[39;00m message \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m exc_value\u001b[38;5;241m.\u001b[39mpublic_message:\n\u001b[1;32m 218\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAssertionError\u001b[39;00m(\n\u001b[1;32m 219\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExpected \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mexpected_exception_type\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m to be raised, \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 220\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdid not contain \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmessage\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 221\u001b[0m )\n", - "\u001b[0;31mAssertionError\u001b[0m: Expected to be raised, but got ." - ] - } - ], + "outputs": [], "source": [ - "with sy.raises(\n", - " sy.SyftException(public_message=\"*Your code is waiting for approval*\")\n", - "):\n", + "with sy.raises(sy.SyftException(public_message=\"*Your code is waiting for approval*\")):\n", " ds_client.code.large_sample()" ] }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1307,28 +227,9 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Stopping bigquery-low\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: Shutting down\n", - "INFO: Waiting for application shutdown.\n", - "INFO: Application shutdown complete.\n", - "INFO: Finished server process [13541]\n", - "INFO: Stopping reloader process [13539]\n" - ] - } - ], + "outputs": [], "source": [ "if environment != \"remote\":\n", " server_low.land()" diff --git a/notebooks/scenarios/bigquery/sync/sync_helpers.py b/notebooks/scenarios/bigquery/sync/sync_helpers.py deleted file mode 100644 index e1d558016ba..00000000000 --- a/notebooks/scenarios/bigquery/sync/sync_helpers.py +++ /dev/null @@ -1,190 +0,0 @@ -# third party -from tqdm import tqdm - -# syft absolute -import syft as sy -from syft.client.datasite_client import DatasiteClient -from syft.client.syncing import compare_clients -from syft.service.code.user_code import UserCode -from syft.service.job.job_stash import Job -from syft.service.job.job_stash import JobStatus -from syft.service.request.request import Request -from syft.service.request.request import RequestStatus -from syft.service.sync.diff_state import ObjectDiffBatch -from syft.types.result import Err - - -def deny_requests_without_autosync_tag(client_low: DatasiteClient): - # Deny all requests that are not autosync - requests = client_low.requests.get_all() - if isinstance(requests, sy.SyftError): - print(requests) - return - - denied_requests = [] - for request in tqdm(requests): - if request.status != RequestStatus.PENDING: - continue - if "autosync" not in request.tags: - request.deny( - reason="This request has been denied automatically. " - "Please use the designated API to submit your request." - ) - denied_requests.append(request.id) - print(f"Denied {len(denied_requests)} requests without autosync tag") - - -def is_request_to_sync(batch: ObjectDiffBatch) -> bool: - # True if this is a new low-side request - # TODO add condition for sql requests/usercodes - low_request = batch.root.low_obj - return ( - isinstance(low_request, Request) - and batch.status == "NEW" - and "autosync" in low_request.tags - ) - - -def is_job_to_sync(batch: ObjectDiffBatch): - # True if this is a new high-side job that is either COMPLETED or ERRORED - if batch.status != "NEW": - return False - if not isinstance(batch.root.high_obj, Job): - return False - job = batch.root.high_obj - return job.status in (JobStatus.ERRORED, JobStatus.COMPLETED) - - -def execute_requests( - client_high: DatasiteClient, request_ids: list[sy.UID] -) -> dict[sy.UID, Job]: - jobs_by_request_id = {} - for request_id in request_ids: - request = client_high.requests.get_by_uid(request_id) - if not isinstance(request, Request): - continue - - code = request.code - if not isinstance(code, UserCode): - continue - - func_name = request.code.service_func_name - api_func = getattr(client_high.code, func_name, None) - if api_func is None: - continue - - job = api_func(blocking=False) - jobs_by_request_id[request_id] = job - - return jobs_by_request_id - - -def deny_failed_jobs( - client_low: DatasiteClient, - jobs: list[Job], -) -> None: - # NOTE no syncing is needed, requests are denied on the low side - denied_requests = [] - - for job in jobs: - if job.status != JobStatus.ERRORED: - continue - - error_result = job.result - if isinstance(error_result, Err): - error_msg = error_result.err_value - else: - error_msg = "An unknown error occurred, please check the Job logs for more information." - - code_id = job.user_code_id - if code_id is None: - continue - requests = client_low.requests.get_by_usercode_id(code_id) - if isinstance(requests, list) and len(requests) > 0: - request = requests[0] - request.deny(reason=f"Execution failed: {error_msg}") - denied_requests.append(request.id) - else: - print(f"Failed to deny request for job {job.id}") - - print(f"Denied {len(denied_requests)} failed requests") - - -def sync_finished_jobs( - client_low: DatasiteClient, - client_high: DatasiteClient, -) -> dict[sy.UID, sy.SyftError | sy.SyftSuccess] | sy.SyftError: - sync_job_results = {} - synced_jobs = [] - diff = compare_clients( - from_client=client_high, to_client=client_low, include_types=["job"] - ) - if isinstance(diff, sy.SyftError): - print(diff) - return diff - - for batch in diff.batches: - if is_job_to_sync(batch): - job = batch.root.high_obj - - w = batch.resolve(build_state=False) - share_result = w.click_share_all_private_data() - if isinstance(share_result, sy.SyftError): - sync_job_results[job.id] = share_result - continue - sync_result = w.click_sync() - - synced_jobs.append(job) - sync_job_results[job.id] = sync_result - - print(f"Sharing {len(sync_job_results)} new results") - deny_failed_jobs(client_low, synced_jobs) - return sync_job_results - - -def sync_new_requests( - client_low: DatasiteClient, - client_high: DatasiteClient, -) -> dict[sy.UID, sy.SyftSuccess | sy.SyftError] | sy.SyftError: - sync_request_results = {} - diff = compare_clients( - from_client=client_low, to_client=client_high, include_types=["request"] - ) - if isinstance(diff, sy.SyftError): - print(diff) - return sync_request_results - print(f"{len(diff.batches)} request batches found") - for batch in tqdm(diff.batches): - if is_request_to_sync(batch): - request_id = batch.root.low_obj.id - w = batch.resolve(build_state=False) - result = w.click_sync() - sync_request_results[request_id] = result - return sync_request_results - - -def sync_and_execute_new_requests( - client_low: DatasiteClient, client_high: DatasiteClient -) -> None: - sync_results = sync_new_requests(client_low, client_high) - if isinstance(sync_results, sy.SyftError): - print(sync_results) - return - - request_ids = [ - uid for uid, res in sync_results.items() if isinstance(res, sy.SyftSuccess) - ] - print(f"Synced {len(request_ids)} new requests") - - jobs_by_request = execute_requests(client_high, request_ids) - print(f"Started {len(jobs_by_request)} new jobs") - - -def auto_sync(client_low: DatasiteClient, client_high: DatasiteClient) -> None: - print("Starting auto sync") - print("Denying non tagged jobs") - deny_requests_without_autosync_tag(client_low) - print("Syncing and executing") - sync_and_execute_new_requests(client_low, client_high) - sync_finished_jobs(client_low, client_high) - print("Finished auto sync") From 05e97d9e3257be6d6109f93051ff282dbd5f2388 Mon Sep 17 00:00:00 2001 From: Brendan Schell Date: Thu, 12 Sep 2024 14:17:11 -0400 Subject: [PATCH 5/7] fix import nb2 Co-authored-by: Sameer Wagh --- .../sync/02-configure-api-and-sync.ipynb | 34 ++++++++++++++----- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb index 2d66e0d2464..6e5e0f453b5 100644 --- a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb +++ b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -19,9 +19,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "python auto auto\n" + ] + } + ], "source": [ "# stdlib\n", "\n", @@ -33,16 +41,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Using Mock API Code, this will query BigQuery. $TEST_BIGQUERY_APIS_LIVE==False\n" + ] + } + ], "source": [ "# isort: off\n", "# stdlib\n", "\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "from syft import test_settings\n", "\n", "# set to use the live APIs\n", @@ -555,8 +571,8 @@ "metadata": {}, "outputs": [], "source": [ - "# widget._share_all()\n", - "# widget._sync_all()" + "#widget._share_all()\n", + "#widget._sync_all()" ] }, { @@ -727,7 +743,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.5" + "version": "3.12.4" } }, "nbformat": 4, From ea445e4b8fe67e0d73a118dfe99756d6be35dabb Mon Sep 17 00:00:00 2001 From: Brendan Schell Date: Thu, 12 Sep 2024 14:36:08 -0400 Subject: [PATCH 6/7] fix import statements --- ...tart-and-configure-server-and-admins.ipynb | 4 +-- .../001-scale-delete-worker-pools.ipynb | 2 +- .../bigquery/010-setup-bigquery-pool.ipynb | 2 +- .../bigquery/011-users-emails-passwords.ipynb | 2 +- .../bigquery/020-configure-api.ipynb | 2 +- .../scenarios/bigquery/021-create-jobs.ipynb | 2 +- .../bigquery/040-do-review-requests.ipynb | 2 +- .../bigquery/050-ds-get-results.ipynb | 2 +- .../sync/02-configure-api-and-sync.ipynb | 30 +++++-------------- 9 files changed, 16 insertions(+), 32 deletions(-) diff --git a/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb b/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb index 5d20d0919d6..f89a05e61aa 100644 --- a/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb +++ b/notebooks/scenarios/bigquery/000-start-and-configure-server-and-admins.ipynb @@ -26,7 +26,7 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "\n", "# third party\n", "from email_helpers import get_email_server\n", @@ -249,7 +249,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb index 478935e614d..6c5f07a1c19 100644 --- a/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb +++ b/notebooks/scenarios/bigquery/001-scale-delete-worker-pools.ipynb @@ -26,7 +26,7 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "\n", "# third party\n", "from email_helpers import Timeout\n", diff --git a/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb b/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb index 38eb97f2219..79fc982462b 100644 --- a/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb +++ b/notebooks/scenarios/bigquery/010-setup-bigquery-pool.ipynb @@ -24,7 +24,7 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "from syft import test_settings\n", "\n", "# third party\n", diff --git a/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb b/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb index 4e67eaba372..0bad0701e14 100644 --- a/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb +++ b/notebooks/scenarios/bigquery/011-users-emails-passwords.ipynb @@ -28,7 +28,7 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "\n", "# third party\n", "from email_helpers import SENDER\n", diff --git a/notebooks/scenarios/bigquery/020-configure-api.ipynb b/notebooks/scenarios/bigquery/020-configure-api.ipynb index 7f5ea6cec76..718bdf352ca 100644 --- a/notebooks/scenarios/bigquery/020-configure-api.ipynb +++ b/notebooks/scenarios/bigquery/020-configure-api.ipynb @@ -33,7 +33,7 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "from syft import test_settings\n", "\n", "# third party\n", diff --git a/notebooks/scenarios/bigquery/021-create-jobs.ipynb b/notebooks/scenarios/bigquery/021-create-jobs.ipynb index 113a0990876..8224177b4cf 100644 --- a/notebooks/scenarios/bigquery/021-create-jobs.ipynb +++ b/notebooks/scenarios/bigquery/021-create-jobs.ipynb @@ -40,7 +40,7 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "from syft.service.job.job_stash import JobStatus\n", "\n", "# third party\n", diff --git a/notebooks/scenarios/bigquery/040-do-review-requests.ipynb b/notebooks/scenarios/bigquery/040-do-review-requests.ipynb index b32d155a962..b42e79d2048 100644 --- a/notebooks/scenarios/bigquery/040-do-review-requests.ipynb +++ b/notebooks/scenarios/bigquery/040-do-review-requests.ipynb @@ -24,7 +24,7 @@ "\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "from syft.service.job.job_stash import Job\n", "\n", "# third party\n", diff --git a/notebooks/scenarios/bigquery/050-ds-get-results.ipynb b/notebooks/scenarios/bigquery/050-ds-get-results.ipynb index dc898382af0..d82e2efff39 100644 --- a/notebooks/scenarios/bigquery/050-ds-get-results.ipynb +++ b/notebooks/scenarios/bigquery/050-ds-get-results.ipynb @@ -21,7 +21,7 @@ "# isort: off\n", "# syft absolute\n", "import syft as sy\n", - "import syft as get_helpers # noqa: F401\n", + "from syft import get_helpers # noqa: F401\n", "\n", "# third party\n", "from email_helpers import get_email_server\n", diff --git a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb index 6e5e0f453b5..84e130581e8 100644 --- a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb +++ b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -19,17 +19,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "python auto auto\n" - ] - } - ], + "outputs": [], "source": [ "# stdlib\n", "\n", @@ -41,17 +33,9 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Using Mock API Code, this will query BigQuery. $TEST_BIGQUERY_APIS_LIVE==False\n" - ] - } - ], + "outputs": [], "source": [ "# isort: off\n", "# stdlib\n", @@ -571,8 +555,8 @@ "metadata": {}, "outputs": [], "source": [ - "#widget._share_all()\n", - "#widget._sync_all()" + "# widget._share_all()\n", + "# widget._sync_all()" ] }, { From e3aa3d986fbb90dab4ed4f7e02af3dd51ab77291 Mon Sep 17 00:00:00 2001 From: Sameer Wagh Date: Thu, 12 Sep 2024 15:38:39 -0400 Subject: [PATCH 7/7] fixing sync bug --- .../bigquery/sync/01-setup-high-low-datasites.ipynb | 2 +- .../scenarios/bigquery/sync/02-configure-api-and-sync.ipynb | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb b/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb index 84c4238008a..2b9b3b68201 100644 --- a/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb +++ b/notebooks/scenarios/bigquery/sync/01-setup-high-low-datasites.ipynb @@ -598,7 +598,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.5" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb index 84e130581e8..3e9a5700d72 100644 --- a/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb +++ b/notebooks/scenarios/bigquery/sync/02-configure-api-and-sync.ipynb @@ -555,8 +555,8 @@ "metadata": {}, "outputs": [], "source": [ - "# widget._share_all()\n", - "# widget._sync_all()" + "widget._share_all()\n", + "widget._sync_all()" ] }, { @@ -727,7 +727,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.4" + "version": "3.12.3" } }, "nbformat": 4,