diff --git a/.github/workflows/pr-tests-stack-arm64.yml b/.github/workflows/pr-tests-stack-arm64.yml index 1f9dff53a15..705a95ac16b 100644 --- a/.github/workflows/pr-tests-stack-arm64.yml +++ b/.github/workflows/pr-tests-stack-arm64.yml @@ -94,7 +94,7 @@ jobs: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - name: Run integration tests - uses: nick-fields/retry@v2 + uses: nick-fields/retry@v3 with: timeout_seconds: 36000 max_attempts: 3 diff --git a/.github/workflows/pr-tests-stack-public.yml b/.github/workflows/pr-tests-stack-public.yml index 7a173000d02..2d8d8798dee 100644 --- a/.github/workflows/pr-tests-stack-public.yml +++ b/.github/workflows/pr-tests-stack-public.yml @@ -148,7 +148,7 @@ jobs: # tox -e stack.test.integration - name: Run integration tests - uses: nick-fields/retry@v2 + uses: nick-fields/retry@v3 if: steps.changes.outputs.stack == 'true' env: HAGRID_ART: false diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index 0852db9269b..b83f12b5cb5 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -174,7 +174,7 @@ jobs: pip install --upgrade tox packaging wheel --default-timeout=60 - name: Run notebook tests - uses: nick-fields/retry@v2 + uses: nick-fields/retry@v3 if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' env: ORCHESTRA_DEPLOYMENT_TYPE: "${{ matrix.deployment-type }}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ad05bb12a3e..78d7205afb6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -172,15 +172,15 @@ repos: - id: mypy name: "mypy: syft" always_run: true - files: "^packages/syft/src/syft/serde|^packages/syft/src/syft/util/env.py|^packages/syft/src/syft/util/logger.py|^packages/syft/src/syft/util/markdown.py|^packages/syft/src/syft/util/notebook_ui/notebook_addons.py|^packages/syft/src/syft/img/base64.py|^packages/syft/src/syft/store/mongo_codecs.py|^packages/syft/src/syft/service/warnings.py|^packages/syft/src/syft/util/util.py|^packages/syft/src/syft/client/api.py|^packages/syft/src/syft/service/worker|^packages/syft/src/syft/service/user|^packages/syft/src/syft/service/dataset" - #files: "^packages/syft/src/syft/serde" + files: "^packages/syft/src/syft/serde|^packages/syft/src/syft/util|^packages/syft/src/syft/service" + # files: "^packages/syft/src/syft/" args: [ "--follow-imports=skip", "--ignore-missing-imports", "--scripts-are-modules", "--disallow-incomplete-defs", "--no-implicit-optional", - "--warn-unused-ignores", + # "--warn-unused-ignores", "--warn-redundant-casts", "--strict-equality", "--warn-unreachable", diff --git a/docs/requirements.txt b/docs/requirements.txt index d9247032aa1..6f3176dae92 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,6 @@ certifi>=2023.7.22 # not directly required, pinned by Snyk to avoid a vulnerability ipython==8.10.0 +jinja2>=3.1.3 # not directly required, pinned by Snyk to avoid a vulnerability markupsafe==2.0.1 pydata-sphinx-theme==0.7.2 pygments>=2.15.0 # not directly required, pinned by Snyk to avoid a vulnerability diff --git a/notebooks/Experimental/Madhava/veilid_1.ipynb b/notebooks/Experimental/Madhava/veilid_1.ipynb deleted file mode 100644 index c7497907c73..00000000000 --- a/notebooks/Experimental/Madhava/veilid_1.ipynb +++ /dev/null @@ -1,516 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "6920a2bb-3561-4c6a-ab98-93ef9f8e329d", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "\n", - "# third party\n", - "import veilid" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "904cae54", - "metadata": {}, - "outputs": [], - "source": [ - "NONCE_LENGTH = 24\n", - "QUIT = \"QUIT\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1db013a8-680f-4006-a0ce-6fd397839a02", - "metadata": {}, - "outputs": [], - "source": [ - "host = \"localhost\"\n", - "port = 5959" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d98401b9-f3d1-48f8-8a2d-ed8550a56a90", - "metadata": {}, - "outputs": [], - "source": [ - "async def connect(host: str, port: int):\n", - " async def noop_callback(*args, **kwargs):\n", - " # print(\"got callback\", args, kwargs)\n", - " return\n", - "\n", - " conn = await veilid.json_api_connect(host, port, noop_callback)\n", - " return conn" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dadca923-6526-4d08-8952-0cd65edc6a2e", - "metadata": {}, - "outputs": [], - "source": [ - "conn = await connect(host, port)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ba09dc27-6599-4d96-8663-d59da66dce41", - "metadata": {}, - "outputs": [], - "source": [ - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "async with crypto_system:\n", - " print(\"keygen:Generating a keypair\")\n", - " my_keypair = await crypto_system.generate_key_pair()\n", - " print(f\"keygen:Got {my_keypair=}\")\n", - "\n", - "# await config.store_self_key(conn, my_keypair)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "96fab6ed-5578-4f5a-852f-b724d932e82a", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3dd03cf0-5627-48a7-aa51-6f7d455ae940", - "metadata": {}, - "outputs": [], - "source": [ - "def load_keys():\n", - " key_pair = \"eLaCWyV-XoCKbRKx4k_2XxJoiGbDLY9S4y_wfVQ4Bzs:rlWN82ncYWg8uHt_OPfa8n0IXlqd24D8gneVncCyclY\"\n", - " my_keypair = veilid.KeyPair(key_pair)\n", - " print(\"Own keypair:\")\n", - " print(\" Public: \", my_keypair.key())\n", - " print(\" Private: \", my_keypair.secret())\n", - " their_key = veilid.PublicKey(\"7HQ3RmvFLN1Q2Rv_PbVDRvYAOX9Te4VypjglS3g018A\")\n", - " return my_keypair, their_key" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a12f9ef0-c473-4005-a757-93ab293d43cd", - "metadata": {}, - "outputs": [], - "source": [ - "my_keypair, their_key = load_keys()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ead0d3c8-2b45-4957-a332-4e30686e2a38", - "metadata": {}, - "outputs": [], - "source": [ - "their_key" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f57189e2-aa3f-48eb-ae51-ec72e8661fc4", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b7931251-5051-4510-a900-f694a512e6db", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f14a2cbb-0c1e-45ee-add4-3f7ee60c4021", - "metadata": {}, - "outputs": [], - "source": [ - "async def create_sender(\n", - " router: veilid.api.RoutingContext,\n", - " crypto_system: veilid.CryptoSystem,\n", - " key: veilid.TypedKey,\n", - " secret: veilid.SharedSecret,\n", - " send_subkey: veilid.ValueSubkey,\n", - "):\n", - " \"\"\"Read input and write it to the DHT.\"\"\"\n", - "\n", - " async def encrypt(cleartext: str) -> bytes:\n", - " \"\"\"Encrypt the message with the shared secret and a random nonce.\"\"\"\n", - "\n", - " print(\"encrypt:Getting a nonce\")\n", - " nonce = await crypto_system.random_nonce()\n", - " print(f\"encrypt:Received {nonce=}\")\n", - " print(f\"encrypt:Encrypting {cleartext=}, {nonce=}, {secret=}\")\n", - " encrypted = await crypto_system.crypt_no_auth(cleartext.encode(), nonce, secret)\n", - " ciphertext = nonce.to_bytes() + encrypted\n", - " print(f\"encrypt:{ciphertext=}\")\n", - " return ciphertext\n", - "\n", - " async def send(cleartext: str):\n", - " \"\"\"Write the encrypted version of the text to the DHT.\"\"\"\n", - "\n", - " ciphertext = await encrypt(cleartext)\n", - " print(f\"send:Setting DHT {key=}, {send_subkey=}, to {ciphertext=}\")\n", - " await router.set_dht_value(key, send_subkey, ciphertext)\n", - "\n", - " # Prime the pumps. Especially when starting the conversation, this\n", - " # causes the DHT key to propagate to the network.\n", - " return send" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "329f5324-990f-41a3-9e5d-798e311682f4", - "metadata": {}, - "outputs": [], - "source": [ - "async def receiver(\n", - " router: veilid.api.RoutingContext,\n", - " crypto_system: veilid.CryptoSystem,\n", - " key: veilid.TypedKey,\n", - " secret: veilid.SharedSecret,\n", - " recv_subkey: veilid.ValueSubkey,\n", - " name: str,\n", - "):\n", - " \"\"\"Wait for new data from the DHT and write it to the screen.\"\"\"\n", - "\n", - " async def decrypt(payload: bytes) -> str:\n", - " \"\"\"Decrypt the payload with the shared secret and the payload's nonce.\"\"\"\n", - "\n", - " print(f\"decrypt:Decrypting {payload!r}\")\n", - " nonce = veilid.Nonce.from_bytes(payload[:NONCE_LENGTH])\n", - " print(f\"decrypt:Found {nonce=}\")\n", - " ciphertext = payload[NONCE_LENGTH:]\n", - " print(f\"decrypt:Decrypting {ciphertext=}, {nonce=}, {secret=}\")\n", - " cleartext = await crypto_system.crypt_no_auth(ciphertext, nonce, secret)\n", - " print(f\"decrypt:{cleartext=}\")\n", - " return cleartext.decode()\n", - "\n", - " last_seq = -1\n", - " while True:\n", - " # In the real world, don't do this. People may tease you for it.\n", - " # This is meant to be easy to understand for demonstration\n", - " # purposes, not a great pattern. Instead, you'd want to use the\n", - " # callback function to handle events asynchronously.\n", - "\n", - " # Try to get an updated version of the receiving subkey.\n", - " print(f\"receiver:Getting the DHT value of {key=}, {recv_subkey=}\")\n", - " resp = await router.get_dht_value(key, recv_subkey, True)\n", - " if resp is None:\n", - " print(\"receiver:Didn't find a value\")\n", - " continue\n", - "\n", - " # If the other party hasn't sent a newer message, try again.\n", - " if resp.seq == last_seq:\n", - " print(f\"receiver:DHT {key=} is still at {resp.seq=}\")\n", - " continue\n", - "\n", - " msg = await decrypt(resp.data)\n", - " if msg == QUIT:\n", - " print(\"receiver:Received the QUIT signal from the other end.\")\n", - " print(\"Other end closed the chat.\")\n", - " return\n", - "\n", - " print(\"receiver:Got new {msg=} at DHT {key=}, {resp.seq=}\")\n", - " print(f\"\\n{name}> {msg}\")\n", - " last_seq = resp.seq" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f149ea8b-0379-44fb-9f6a-7c49aece27fb", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "89740953-4783-42c9-be69-e41da5443bc7", - "metadata": {}, - "outputs": [], - "source": [ - "def create_members(my_keypair, their_key):\n", - " members = [\n", - " veilid.DHTSchemaSMPLMember(my_keypair.key(), 1),\n", - " veilid.DHTSchemaSMPLMember(their_key, 1),\n", - " ]\n", - " return members" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7eb4c147-6063-427b-8c6b-0605c19794bd", - "metadata": {}, - "outputs": [], - "source": [ - "members = create_members(my_keypair, their_key)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9615df55-c28e-4e50-a00b-6798f3f66ec3", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "602aa1f6-2d42-422b-8887-7622706aa89d", - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "\n", - "name = \"friendsname\"\n", - "\n", - "async with crypto_system, router:\n", - " print(\"start:Getting a DH shared secret\")\n", - " secret = await crypto_system.cached_dh(their_key, my_keypair.secret())\n", - " print(f\"start:Got {secret=}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3bd06829-41a4-4e10-95d1-286a18d93b32", - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "\n", - "name = \"friendsname\"\n", - "\n", - "async with crypto_system, router:\n", - " print(\"start:Getting a DH shared secret\")\n", - " secret = await crypto_system.cached_dh(their_key, my_keypair.secret())\n", - " print(f\"start:Got {secret=}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27179b5f-62d4-43ec-97e8-4bb86fb3c2ae", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4bb2e0dd-f97a-4cab-a0bb-cb22cf6428a8", - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "\n", - "name = \"friendsname\"\n", - "\n", - "async with crypto_system, router:\n", - " print(\"start:Creating a new DHT record\")\n", - " record = await router.create_dht_record(veilid.DHTSchema.smpl(0, members))\n", - " print(f\"New chat key: {record.key}\")\n", - " print(\"Give that to your friend!\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1c028545-72dd-48de-8889-98dea2778d37", - "metadata": {}, - "outputs": [], - "source": [ - "record.key" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d8ef5b96-79e3-45cf-b545-8d027117017e", - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "\n", - "name = \"friendsname\"\n", - "\n", - "async with crypto_system, router:\n", - " print(f\"start:Reopening the DHT record {record.key=} with {my_keypair=}\")\n", - " await router.close_dht_record(record.key)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "be3945e6-4d34-4fb1-943a-a64ba9feca30", - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "\n", - "name = \"friendsname\"\n", - "\n", - "async with crypto_system, router:\n", - " print(f\"start:Reopening the DHT record {record.key=} with {my_keypair=}\")\n", - " await router.open_dht_record(record.key, my_keypair)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c6210ce0-14a8-4097-ae2d-d96bb0b13471", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6cddc089-7711-4fd2-afb4-210f348aa1c3", - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "\n", - "name = \"friendsname\"\n", - "\n", - "async with crypto_system, router:\n", - " # The party initiating the chat writes to subkey 0 and reads from subkey 1.\n", - " # send_task = asyncio.create_task(sender(router, crypto_system, record.key, secret, 0))\n", - " sender = await create_sender(router, crypto_system, record.key, secret, 0)\n", - " await sender(\"testtesttest\")\n", - "\n", - " # send = await send_task\n", - " # await send(\"hello world\")\n", - " # recv_task = asyncio.create_task(receiver(router, crypto_system, record.key, secret, 1, name))\n", - "\n", - " # try:\n", - " # print(\"start:Starting the chat\")\n", - " # await asyncio.wait([send_task, recv_task], return_when=asyncio.FIRST_COMPLETED)\n", - " # finally:\n", - " # print(f\"start:Closing the DHT record {record.key=}\")\n", - " # await router.close_dht_record(record.key)\n", - " # print(f\"start:Deleting the DHT record {record.key=}\")\n", - " # await router.delete_dht_record(record.key)\n", - "\n", - " # print(\"start:Cleaning up\")\n", - " # recv_task.cancel()\n", - " # send_task.cancel()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c9d70f55-d451-49dd-af5e-eb6bbe5ef61e", - "metadata": {}, - "outputs": [], - "source": [ - "record.key" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f208817b-dada-4c20-9cdf-48d19ab5a27a", - "metadata": {}, - "outputs": [], - "source": [ - "# await send(\"hello world2\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0bf04175-67ac-4ede-bd42-92f60c188671", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "import time" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a846437e-783c-477e-a693-b7f9f05891ba", - "metadata": {}, - "outputs": [], - "source": [ - "message = f\"Hello from the world! {time.time()}\"\n", - "message" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e4429621-0af4-4aab-b0db-28a148e87773", - "metadata": {}, - "outputs": [], - "source": [ - "# await asyncio.create_task(sender(router, crypto_system, record.key, secret, 0, message))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24890cde-dbc2-4462-b4c5-ccaff9bcb598", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/Experimental/Madhava/veilid_2.ipynb b/notebooks/Experimental/Madhava/veilid_2.ipynb deleted file mode 100644 index b7d1a9dca00..00000000000 --- a/notebooks/Experimental/Madhava/veilid_2.ipynb +++ /dev/null @@ -1,541 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "170613d5-d4cc-42dc-ba32-8ac241830b5b", - "metadata": {}, - "outputs": [], - "source": [ - "# import syft as sy\n", - "\n", - "# node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4c796510-742b-4621-a1b1-24f3c45eb419", - "metadata": {}, - "outputs": [], - "source": [ - "# !pip install veilid==0.2.5" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1a8daffc-338d-4c7d-a0a4-40aa6df03829", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6920a2bb-3561-4c6a-ab98-93ef9f8e329d", - "metadata": {}, - "outputs": [], - "source": [ - "# third party\n", - "import veilid" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d432b10b-9fb2-4a74-b4b9-f93ba1614179", - "metadata": {}, - "outputs": [], - "source": [ - "host = \"localhost\"\n", - "port = 5960" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28956832-eae6-4ba7-a03c-b306a37659ea", - "metadata": {}, - "outputs": [], - "source": [ - "async def noop_callback(*args, **kwargs):\n", - " # print(\"got callback\", args, kwargs)\n", - " return" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5092eaf4-8724-44b3-b023-405d33abd06b", - "metadata": {}, - "outputs": [], - "source": [ - "conn = await veilid.json_api_connect(host, port, noop_callback)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "96fab6ed-5578-4f5a-852f-b724d932e82a", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8933f388-c256-4d6b-8f3d-93f646f91a0b", - "metadata": {}, - "outputs": [], - "source": [ - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "async with crypto_system:\n", - " print(\"keygen:Generating a keypair\")\n", - " my_keypair = await crypto_system.generate_key_pair()\n", - " print(f\"keygen:Got {my_keypair=}\")\n", - "\n", - "# await config.store_self_key(conn, my_keypair)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "50db9b83-bebb-407d-8690-12d9194574a7", - "metadata": {}, - "outputs": [], - "source": [ - "key_pair = \"7HQ3RmvFLN1Q2Rv_PbVDRvYAOX9Te4VypjglS3g018A:Lt5Yf-IRUUrH73Tlvyf1QR2fn9salvnHjEm0zBnQK0Q\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25464cf0-ac13-4733-9dbb-f74f2a1e79d6", - "metadata": {}, - "outputs": [], - "source": [ - "my_keypair = veilid.KeyPair(key_pair)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "701f4ad1-216f-4b12-ba36-d440db911ca8", - "metadata": {}, - "outputs": [], - "source": [ - "print(\"Own keypair:\")\n", - "print(\" Public: \", my_keypair.key())\n", - "print(\" Private: \", my_keypair.secret())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a12f9ef0-c473-4005-a757-93ab293d43cd", - "metadata": {}, - "outputs": [], - "source": [ - "their_key = veilid.PublicKey(\"eLaCWyV-XoCKbRKx4k_2XxJoiGbDLY9S4y_wfVQ4Bzs\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ead0d3c8-2b45-4957-a332-4e30686e2a38", - "metadata": {}, - "outputs": [], - "source": [ - "their_key" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f57189e2-aa3f-48eb-ae51-ec72e8661fc4", - "metadata": {}, - "outputs": [], - "source": [ - "members = [\n", - " veilid.DHTSchemaSMPLMember(my_keypair.key(), 1),\n", - " veilid.DHTSchemaSMPLMember(their_key, 1),\n", - "]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8806c1ee-1924-4b17-8ab5-10bc0ac4915a", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cc0e6ab9-f4c2-42f1-9265-116d37fbaa45", - "metadata": {}, - "outputs": [], - "source": [ - "key = \"VLD0:r6NoOmRrZxPHVZkpmbC32ob05jOQNBf445kxZwEAF7o\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1ca6fa71-d94c-422d-a7b2-b80aacf8d2c1", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2fb0184d-cda6-4472-8309-71494fea39e1", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "061d3bae-449c-44a3-8baf-578560659dcf", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f14a2cbb-0c1e-45ee-add4-3f7ee60c4021", - "metadata": {}, - "outputs": [], - "source": [ - "# async def sender(\n", - "# router: veilid.api.RoutingContext,\n", - "# crypto_system: veilid.CryptoSystem,\n", - "# key: veilid.TypedKey,\n", - "# secret: veilid.SharedSecret,\n", - "# send_subkey: veilid.ValueSubkey,\n", - "# ):\n", - "# \"\"\"Read input and write it to the DHT.\"\"\"\n", - "\n", - "# async def encrypt(cleartext: str) -> bytes:\n", - "# \"\"\"Encrypt the message with the shared secret and a random nonce.\"\"\"\n", - "\n", - "# print(\"encrypt:Getting a nonce\")\n", - "# nonce = await crypto_system.random_nonce()\n", - "# print(f\"encrypt:Received {nonce=}\")\n", - "# print(f\"encrypt:Encrypting {cleartext=}, {nonce=}, {secret=}\")\n", - "# encrypted = await crypto_system.crypt_no_auth(cleartext.encode(), nonce, secret)\n", - "# ciphertext = nonce.to_bytes() + encrypted\n", - "# print(f\"encrypt:{ciphertext=}\")\n", - "# return ciphertext\n", - "\n", - "# async def send(cleartext: str):\n", - "# \"\"\"Write the encrypted version of the text to the DHT.\"\"\"\n", - "\n", - "# ciphertext = await encrypt(cleartext)\n", - "# print(f\"send:Setting DHT {key=}, {send_subkey=}, to {ciphertext=}\")\n", - "# await router.set_dht_value(key, send_subkey, ciphertext)\n", - "\n", - "# # Prime the pumps. Especially when starting the conversation, this\n", - "# # causes the DHT key to propagate to the network.\n", - "# await send(\"Hello from the world!\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b9904281-3aea-4f72-8338-21db4ad292ca", - "metadata": {}, - "outputs": [], - "source": [ - "NONCE_LENGTH = 24\n", - "QUIT = \"QUIT\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "329f5324-990f-41a3-9e5d-798e311682f4", - "metadata": {}, - "outputs": [], - "source": [ - "async def receiver(\n", - " router: veilid.api.RoutingContext,\n", - " crypto_system: veilid.CryptoSystem,\n", - " key: veilid.TypedKey,\n", - " secret: veilid.SharedSecret,\n", - " recv_subkey: veilid.ValueSubkey,\n", - " name: str,\n", - "):\n", - " \"\"\"Wait for new data from the DHT and write it to the screen.\"\"\"\n", - "\n", - " async def decrypt(payload: bytes) -> str:\n", - " \"\"\"Decrypt the payload with the shared secret and the payload's nonce.\"\"\"\n", - "\n", - " print(f\"decrypt:Decrypting {payload!r}\")\n", - " nonce = veilid.Nonce.from_bytes(payload[:NONCE_LENGTH])\n", - " print(f\"decrypt:Found {nonce=}\")\n", - " ciphertext = payload[NONCE_LENGTH:]\n", - " print(f\"decrypt:Decrypting {ciphertext=}, {nonce=}, {secret=}\")\n", - " cleartext = await crypto_system.crypt_no_auth(ciphertext, nonce, secret)\n", - " print(f\"decrypt:{cleartext=}\")\n", - " return cleartext.decode()\n", - "\n", - " last_seq = -1\n", - " while True:\n", - " # In the real world, don't do this. People may tease you for it.\n", - " # This is meant to be easy to understand for demonstration\n", - " # purposes, not a great pattern. Instead, you'd want to use the\n", - " # callback function to handle events asynchronously.\n", - "\n", - " # Try to get an updated version of the receiving subkey.\n", - " print(f\"receiver:Getting the DHT value of {key=}, {recv_subkey=}\")\n", - " resp = await router.get_dht_value(key, recv_subkey, True)\n", - " if resp is None:\n", - " print(\"receiver:Didn't find a value\")\n", - " continue\n", - "\n", - " # If the other party hasn't sent a newer message, try again.\n", - " if resp.seq == last_seq:\n", - " print(f\"receiver:DHT {key=} is still at {resp.seq=}\")\n", - " continue\n", - "\n", - " msg = await decrypt(resp.data)\n", - " if msg == QUIT:\n", - " print(\"receiver:Received the QUIT signal from the other end.\")\n", - " print(\"Other end closed the chat.\")\n", - " return\n", - "\n", - " print(\"receiver:Got new {msg=} at DHT {key=}, {resp.seq=}\")\n", - " print(f\"\\n{name}> {msg}\")\n", - " last_seq = resp.seq" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f149ea8b-0379-44fb-9f6a-7c49aece27fb", - "metadata": {}, - "outputs": [], - "source": [ - "name = \"friendsname\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6cddc089-7711-4fd2-afb4-210f348aa1c3", - "metadata": {}, - "outputs": [], - "source": [ - "# router = await (await conn.new_routing_context()).with_default_safety()\n", - "# crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "# async with crypto_system, router:\n", - "# print(\"start:Getting a DH shared secret\")\n", - "# secret = await crypto_system.cached_dh(their_key, my_keypair.secret())\n", - "# print(f\"start:Got {secret=}\")\n", - "\n", - "# print(\"start:Creating a new DHT record\")\n", - "# record = await router.create_dht_record(veilid.DHTSchema.smpl(0, members))\n", - "# print(f\"New chat key: {record.key}\")\n", - "# print(\"Give that to your friend!\")\n", - "\n", - "# # Close this key first. We'll reopen it for writing with our saved key.\n", - "# print(f\"start:Closing the DHT record {record.key=}\")\n", - "# await router.close_dht_record(record.key)\n", - "\n", - "# print(f\"start:Reopening the DHT record {record.key=} with {my_keypair=}\")\n", - "# await router.open_dht_record(record.key, my_keypair)\n", - "\n", - "# # The party initiating the chat writes to subkey 0 and reads from subkey 1.\n", - "# send_task = asyncio.create_task(sender(router, crypto_system, record.key, secret, 0))\n", - "# recv_task = asyncio.create_task(receiver(router, crypto_system, record.key, secret, 1, name))\n", - "\n", - "# try:\n", - "# print(\"start:Starting the chat\")\n", - "# await asyncio.wait([send_task, recv_task], return_when=asyncio.FIRST_COMPLETED)\n", - "# finally:\n", - "# print(f\"start:Closing the DHT record {record.key=}\")\n", - "# await router.close_dht_record(record.key)\n", - "# print(f\"start:Deleting the DHT record {record.key=}\")\n", - "# await router.delete_dht_record(record.key)\n", - "\n", - "# print(\"start:Cleaning up\")\n", - "# recv_task.cancel()\n", - "# send_task.cancel()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c911ff07-f18f-44d5-ae07-47012481f017", - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "\n", - "name = \"friendsname\"\n", - "\n", - "async with crypto_system, router:\n", - " print(f\"start:Reopening the DHT record {key=} with {my_keypair=}\")\n", - " await router.close_dht_record(key)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1abe5f0f-e77a-44e7-b991-9b526629ffcf", - "metadata": {}, - "outputs": [], - "source": [ - "print(\"respond:Opening a private routing context\")\n", - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "print(\"respond:Getting a crypto system\")\n", - "crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "async with crypto_system, router:\n", - " print(\"respond:Getting a DH shared secret\")\n", - " secret = await crypto_system.cached_dh(their_key, my_keypair.secret())\n", - " print(f\"respond:Got {secret=}\")\n", - "\n", - " print(f\"respond:Opening the DHT record {key=} with {my_keypair=}\")\n", - " await router.open_dht_record(key, my_keypair)\n", - "\n", - " # The party responding to the chat writes to subkey 1 and reads from subkey 0.\n", - " # send_task = asyncio.create_task(sender(router, crypto_system, key, secret, 1))\n", - " # recv_task = asyncio.create_task(receiver(router, crypto_system, key, secret, 0, name))\n", - " await receiver(router, crypto_system, key, secret, 0, name)\n", - "\n", - " # try:\n", - " # LOG.debug(\"respond:Starting the chat\")\n", - " # await asyncio.wait([send_task, recv_task], return_when=asyncio.FIRST_COMPLETED)\n", - " # finally:\n", - " # LOG.debug(f\"respond:Closing the DHT record {key=}\")\n", - " # await router.close_dht_record(key)\n", - " # LOG.debug(f\"respond:Deleting the DHT record {key=}\")\n", - " # await router.delete_dht_record(key)\n", - "\n", - " # LOG.debug(\"respond:Cleaning up\")\n", - " # recv_task.cancel()\n", - " # send_task.cancel()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "95bed690-4a13-400f-a6f9-3fb49696ea03", - "metadata": {}, - "outputs": [], - "source": [ - "await receiver(router, crypto_system, key, secret, 0, name)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4fbf15e3-f2dc-4670-aacf-4fee37672c83", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8e478b86-4ab8-4f20-af50-b8d669de8697", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e3e090c0-7058-4996-adeb-0f4f3962d084", - "metadata": {}, - "outputs": [], - "source": [ - "# if await config.load_self_key(conn):\n", - "# print(\"You already have a keypair.\")\n", - "\n", - "# # print the keypair\n", - "# print(await dump_keystore(host, port))\n", - "\n", - "# sys.exit(1)\n", - "\n", - "# LOG.debug(\"keygen:Getting a crypto system\")\n", - "# crypto_system = await conn.get_crypto_system(veilid.CryptoKind.CRYPTO_KIND_VLD0)\n", - "# async with crypto_system:\n", - "# LOG.debug(\"keygen:Generating a keypair\")\n", - "# my_keypair = await crypto_system.generate_key_pair()\n", - "# LOG.debug(f\"keygen:Got {my_keypair=}\")\n", - "\n", - "# await config.store_self_key(conn, my_keypair)\n", - "\n", - "# print(f\"Your new public key is: {my_keypair.key()}\")\n", - "# print(\"Share it with your friends!\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "61bbe160-2328-4ce0-9622-88db1ca594a8", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b9f05403-1b8d-437f-8ca8-8defaf4091c7", - "metadata": {}, - "outputs": [], - "source": [ - "# await config.store_friend_key(conn, name, veilid.PublicKey(pubkey))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "07680800-cb9a-4b65-8335-9f4b2ff3b6f8", - "metadata": {}, - "outputs": [], - "source": [ - "# poetry run chat add-friend MyFriend L0nGkEyStR1ng\n", - "# async def store_friend_key(\n", - "# conn: veilid.json_api._JsonVeilidAPI, name: str, pubkey: veilid.PublicKey\n", - "# ):\n", - "# \"\"\"Write a friend's public key to the keystore.\"\"\"\n", - "\n", - "# await store_key(conn, f\"{FRIEND_PREFIX}{name}\", str(pubkey))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.5" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/packages/grid/frontend/pnpm-lock.yaml b/packages/grid/frontend/pnpm-lock.yaml index 3d7b6901efe..087164903a7 100644 --- a/packages/grid/frontend/pnpm-lock.yaml +++ b/packages/grid/frontend/pnpm-lock.yaml @@ -552,7 +552,7 @@ packages: sirv: 2.0.3 svelte: 3.59.2 tiny-glob: 0.2.9 - undici: 5.27.0 + undici: 6.6.2 vite: 4.5.2(@types/node@20.8.2) transitivePeerDependencies: - supports-color @@ -2519,9 +2519,9 @@ packages: resolution: {integrity: sha512-uY/99gMLIOlJPwATcMVYfqDSxUR9//AUcgZMzwfSTJPDKzA1S8mX4VLqa+fiAtveraQUBCz4FFcwVZBGbwBXIw==} dev: true - /undici@5.27.0: - resolution: {integrity: sha512-l3ydWhlhOJzMVOYkymLykcRRXqbUaQriERtR70B9LzNkZ4bX52Fc8wbTDneMiwo8T+AemZXvXaTx+9o5ROxrXg==} - engines: {node: '>=14.0'} + /undici@6.6.2: + resolution: {integrity: sha512-vSqvUE5skSxQJ5sztTZ/CdeJb1Wq0Hf44hlYMciqHghvz+K88U0l7D6u1VsndoFgskDcnU+nG3gYmMzJVzd9Qg==} + engines: {node: '>=18.0'} dependencies: '@fastify/busboy': 2.0.0 dev: true diff --git a/packages/grid/seaweedfs/app.py b/packages/grid/seaweedfs/app.py index 130c32c360d..f84981da2ae 100644 --- a/packages/grid/seaweedfs/app.py +++ b/packages/grid/seaweedfs/app.py @@ -1,4 +1,3 @@ -# type: ignore # stdlib import json import subprocess diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 1a0afd7d28c..16c23faac6b 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -34,7 +34,8 @@ syft = networkx==2.8 packaging>=23.0 pyarrow==14.0.1 - pycapnp==1.3.0 + # pycapnp is beta version, update to stable version when available + pycapnp==2.0.0b2 pydantic[email]==1.10.13 pymongo==4.6.1 pynacl==1.5.0 @@ -42,7 +43,7 @@ syft = redis==4.6.0 requests==2.31.0 RestrictedPython==7.0 - result==0.10.0 + result==0.16.0 tqdm==4.66.1 typeguard==2.13.3 typing_extensions==4.8.0 @@ -91,7 +92,7 @@ data_science = dev = %(test_plugins)s %(telemetry)s - bandit==1.7.5 + bandit==1.7.7 ruff==0.1.6 importlib-metadata==6.8.0 isort==5.12.0 diff --git a/packages/syft/src/syft/abstract_node.py b/packages/syft/src/syft/abstract_node.py index f162b8a4201..2341d6e4926 100644 --- a/packages/syft/src/syft/abstract_node.py +++ b/packages/syft/src/syft/abstract_node.py @@ -35,6 +35,7 @@ class AbstractNode: name: Optional[str] node_type: Optional[NodeType] node_side_type: Optional[NodeSideType] + in_memory_workers: bool def get_service(self, path_or_func: Union[str, Callable]) -> Callable: raise NotImplementedError diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 2e7c7fc25e3..cb4e2d8aa2f 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -510,7 +510,7 @@ def debox_signed_syftapicall_response( signed_result: SignedSyftAPICall, ) -> Union[Any, SyftError]: if not isinstance(signed_result, SignedSyftAPICall): - return SyftError(message="The result is not signed") # type: ignore + return SyftError(message="The result is not signed") if not signed_result.is_valid: return SyftError(message="The result signature is invalid") diff --git a/packages/syft/src/syft/client/registry.py b/packages/syft/src/syft/client/registry.py index 95a8c470cb3..c23d21b7b8e 100644 --- a/packages/syft/src/syft/client/registry.py +++ b/packages/syft/src/syft/client/registry.py @@ -7,7 +7,6 @@ from typing import Dict from typing import List from typing import Optional -from typing import TYPE_CHECKING from typing import Tuple from typing import Union @@ -23,12 +22,9 @@ from ..util.constants import DEFAULT_TIMEOUT from ..util.logger import error from ..util.logger import warning +from .client import SyftClient as Client from .enclave_client import EnclaveClient -if TYPE_CHECKING: - # relative - from .client import Client - NETWORK_REGISTRY_URL = ( "https://raw.githubusercontent.com/OpenMined/NetworkRegistry/main/gateways.json" ) @@ -112,7 +108,7 @@ def __repr__(self) -> str: return pd.DataFrame(on).to_string() @staticmethod - def create_client(network: Dict[str, Any]) -> Client: # type: ignore + def create_client(network: Dict[str, Any]) -> Client: # relative from ..client.client import connect @@ -127,7 +123,7 @@ def create_client(network: Dict[str, Any]) -> Client: # type: ignore error(f"Failed to login with: {network}. {e}") raise SyftException(f"Failed to login with: {network}. {e}") - def __getitem__(self, key: Union[str, int]) -> Client: # type: ignore + def __getitem__(self, key: Union[str, int]) -> Client: if isinstance(key, int): return self.create_client(network=self.online_networks[key]) else: @@ -266,14 +262,14 @@ def __repr__(self) -> str: return "(no domains online - try syft.domains.all_domains to see offline domains)" return pd.DataFrame(on).to_string() - def create_client(self, peer: NodePeer) -> Client: # type: ignore + def create_client(self, peer: NodePeer) -> Client: try: return peer.guest_client except Exception as e: error(f"Failed to login to: {peer}. {e}") raise SyftException(f"Failed to login to: {peer}. {e}") - def __getitem__(self, key: Union[str, int]) -> Client: # type: ignore + def __getitem__(self, key: Union[str, int]) -> Client: if isinstance(key, int): return self.create_client(self.online_domains[key][0]) else: @@ -360,7 +356,7 @@ def __repr__(self) -> str: return pd.DataFrame(on).to_string() @staticmethod - def create_client(enclave: Dict[str, Any]) -> Client: # type: ignore + def create_client(enclave: Dict[str, Any]) -> Client: # relative from ..client.client import connect @@ -375,7 +371,7 @@ def create_client(enclave: Dict[str, Any]) -> Client: # type: ignore error(f"Failed to login with: {enclave}. {e}") raise SyftException(f"Failed to login with: {enclave}. {e}") - def __getitem__(self, key: Union[str, int]) -> EnclaveClient: # type: ignore + def __getitem__(self, key: Union[str, int]) -> EnclaveClient: if isinstance(key, int): return self.create_client(enclave=self.online_enclaves[key]) else: diff --git a/packages/syft/src/syft/gevent_patch.py b/packages/syft/src/syft/gevent_patch.py index 2265a2ae713..d96abf5be2c 100644 --- a/packages/syft/src/syft/gevent_patch.py +++ b/packages/syft/src/syft/gevent_patch.py @@ -20,8 +20,11 @@ def str_to_bool(bool_str: Optional[str]) -> bool: def is_notebook() -> bool: + # third party + from IPython import get_ipython + try: - shell = get_ipython().__class__.__name__ # type: ignore + shell = get_ipython().__class__.__name__ if shell == "ZMQInteractiveShell": return True # Jupyter notebook or qtconsole elif shell == "TerminalInteractiveShell": diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index a9b161bc63f..fe5872f1aba 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -1151,14 +1151,14 @@ def handle_api_call_with_unsigned_result( api_call: Union[SyftAPICall, SignedSyftAPICall], job_id: Optional[UID] = None, check_call_location=True, - ) -> Result[Union[QueueItem, SyftObject], Err]: + ) -> Union[Result, QueueItem, SyftObject, SyftError]: if self.required_signed_calls and isinstance(api_call, SyftAPICall): return SyftError( - message=f"You sent a {type(api_call)}. This node requires SignedSyftAPICall." # type: ignore + message=f"You sent a {type(api_call)}. This node requires SignedSyftAPICall." ) else: if not api_call.is_valid: - return SyftError(message="Your message signature is invalid") # type: ignore + return SyftError(message="Your message signature is invalid") if api_call.message.node_uid != self.id and check_call_location: return self.forward_message(api_call=api_call) @@ -1190,11 +1190,11 @@ def handle_api_call_with_unsigned_result( return SyftError( message=f"As a `{role}`," f"you have has no access to: {api_call.path}" - ) # type: ignore + ) else: return SyftError( message=f"API call not in registered services: {api_call.path}" - ) # type: ignore + ) _private_api_path = user_config_registry.private_path_for(api_call.path) method = self.get_service_method(_private_api_path) @@ -1213,11 +1213,11 @@ def handle_api_call_with_unsigned_result( def add_action_to_queue( self, action, - credentials, + credentials: SyftVerifyKey, parent_job_id=None, has_execute_permissions: bool = False, worker_pool_name: Optional[str] = None, - ): + ) -> Union[Job, SyftError]: job_id = UID() task_uid = UID() worker_settings = WorkerSettings.from_node(node=self) @@ -1267,8 +1267,12 @@ def add_action_to_queue( ) def add_queueitem_to_queue( - self, queue_item, credentials, action=None, parent_job_id=None - ): + self, + queue_item: ActionQueueItem, + credentials: SyftVerifyKey, + action=None, + parent_job_id=None, + ) -> Union[Job, SyftError]: log_id = UID() role = self.get_role_for_credentials(credentials=credentials) context = AuthedServiceContext(node=self, credentials=credentials, role=role) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 814140d8d98..1baa64af0a8 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -7,5 +7,16 @@ }, "3": { "release_name": "0.8.4.json" + }, + "dev": { + "object_versions": { + "UserCode": { + "4": { + "version": 4, + "hash": "09e2c6a119246d8beb14f34a365c6e016947f854db86658bbef99c8970bf7e27", + "action": "add" + } + } + } } } diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 83d414d16b5..eeb1236b749 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -269,7 +269,7 @@ def rs_proto2object(proto: _DynamicStructBuilder) -> Any: # clean this mess, Tudor module_parts = proto.fullyQualifiedName.split(".") klass = module_parts.pop() - class_type: Type = type(None) + class_type: Union[Type, Any] = type(None) if klass != "NoneType": try: diff --git a/packages/syft/src/syft/service/action/action_data_empty.py b/packages/syft/src/syft/service/action/action_data_empty.py index b9c3960fad3..e32f4e339bb 100644 --- a/packages/syft/src/syft/service/action/action_data_empty.py +++ b/packages/syft/src/syft/service/action/action_data_empty.py @@ -19,7 +19,7 @@ class ActionDataEmpty(SyftObject): __canonical_name__ = "ActionDataEmpty" __version__ = SYFT_OBJECT_VERSION_1 - syft_internal_type: Optional[Type] = NoneType + syft_internal_type: Optional[Type] = NoneType # type: ignore def __repr__(self) -> str: return f"{type(self).__name__} <{self.syft_internal_type}>" diff --git a/packages/syft/src/syft/service/action/action_graph.py b/packages/syft/src/syft/service/action/action_graph.py index c3e25ac098b..8456f6852a1 100644 --- a/packages/syft/src/syft/service/action/action_graph.py +++ b/packages/syft/src/syft/service/action/action_graph.py @@ -78,37 +78,39 @@ class NodeActionData(SyftObject): def make_created_at(cls, v: Optional[DateTime]) -> DateTime: return DateTime.now() if v is None else v - @staticmethod - def from_action(action: Action, credentials: SyftVerifyKey): + @classmethod + def from_action(cls, action: Action, credentials: SyftVerifyKey) -> Self: is_mutagen = action.remote_self is not None and ( action.remote_self == action.result_id ) - return NodeActionData( + return cls( id=action.id, type=NodeType.ACTION, user_verify_key=credentials, is_mutagen=is_mutagen, ) - @staticmethod - def from_action_obj(action_obj: ActionObject, credentials: SyftVerifyKey): - return NodeActionData( + @classmethod + def from_action_obj( + cls, action_obj: ActionObject, credentials: SyftVerifyKey + ) -> Self: + return cls( id=action_obj.id, type=NodeType.ACTION_OBJECT, user_verify_key=credentials, ) - def __hash__(self): + def __hash__(self) -> int: return hash(self.id) - def __eq__(self, other: Self): + def __eq__(self, other: Any) -> bool: if not isinstance(other, NodeActionData): raise NotImplementedError( "Comparisions can be made with NodeActionData type objects only." ) return hash(self) == hash(other) - def __repr__(self): + def __repr__(self) -> str: return self._repr_debug_() @@ -148,7 +150,7 @@ def get(self, uid: Any) -> Any: def delete(self, uid: Any) -> None: raise NotImplementedError - def find_neighbors(self, uid: Any) -> List[Any]: + def find_neighbors(self, uid: Any) -> Optional[List]: raise NotImplementedError def update(self, uid: Any, data: Any) -> None: @@ -229,7 +231,9 @@ def lock(self) -> SyftLock: def db(self) -> nx.Graph: return self._db - def _thread_safe_cbk(self, cbk: Callable, *args, **kwargs): + def _thread_safe_cbk( + self, cbk: Callable, *args: Any, **kwargs: Any + ) -> Result[Any, str]: # TODO copied method from document_store, have it in one place and reuse? locked = self.lock.acquire(blocking=True) if not locked: @@ -267,10 +271,11 @@ def _delete(self, uid: UID) -> None: self.db.remove_node(uid) self.save() - def find_neighbors(self, uid: UID) -> Optional[Iterable]: + def find_neighbors(self, uid: UID) -> Optional[List]: if self.exists(uid=uid): neighbors = self.db.neighbors(uid) return neighbors + return None def update(self, uid: UID, data: Any) -> None: self._thread_safe_cbk(self._update, uid=uid, data=data) @@ -294,7 +299,7 @@ def _remove_edge(self, parent: Any, child: Any) -> None: self.db.remove_edge(parent, child) self.save() - def visualize(self, seed: int = 3113794652, figsize=(20, 10)) -> None: + def visualize(self, seed: int = 3113794652, figsize: tuple = (20, 10)) -> None: plt.figure(figsize=figsize) pos = nx.spring_layout(self.db, seed=seed) return nx.draw_networkx(self.db, pos=pos, with_labels=True) @@ -305,10 +310,10 @@ def nodes(self) -> Iterable: def edges(self) -> Iterable: return self.db.edges() - def get_predecessors(self, uid: UID) -> Iterable: + def get_predecessors(self, uid: UID) -> List: return self.db.predecessors(uid) - def get_successors(self, uid: UID) -> Iterable: + def get_successors(self, uid: UID) -> List: return self.db.successors(uid) def is_parent(self, parent: Any, child: Any) -> bool: @@ -362,7 +367,7 @@ class InMemoryActionGraphStore(ActionGraphStore): def __init__(self, store_config: StoreConfig, reset: bool = False): self.store_config: StoreConfig = store_config - self.graph: Type[BaseGraphStore] = self.store_config.store_type( + self.graph: BaseGraphStore = self.store_config.store_type( self.store_config, reset ) diff --git a/packages/syft/src/syft/service/action/action_graph_service.py b/packages/syft/src/syft/service/action/action_graph_service.py index 23c2b579401..6e06a9c84a0 100644 --- a/packages/syft/src/syft/service/action/action_graph_service.py +++ b/packages/syft/src/syft/service/action/action_graph_service.py @@ -1,6 +1,6 @@ # stdlib from typing import List -from typing import Tuple +from typing import Optional from typing import Union # third party @@ -39,7 +39,7 @@ def __init__(self, store: ActionGraphStore): @service_method(path="graph.add_action", name="add_action") def add_action( self, context: AuthedServiceContext, action: Action - ) -> Union[NodeActionData, SyftError]: + ) -> Union[tuple[NodeActionData, NodeActionData], SyftError]: # Create a node for the action input_uids, output_uid = self._extract_input_and_output_from_action( action=action @@ -60,6 +60,8 @@ def add_action( if action_node.is_mutagen: # updated non-mutated successor for all nodes between # node_id and nm_successor_id + if action.remote_self is None: + return SyftError(message=f"action {action}'s remote_self is None") result = self.store.update_non_mutated_successor( node_id=action.remote_self.id, nm_successor_id=action_node.id, @@ -102,7 +104,9 @@ def add_action_obj( return result.ok() - def _extract_input_and_output_from_action(self, action: Action) -> Tuple[UID]: + def _extract_input_and_output_from_action( + self, action: Action + ) -> tuple[set[UID], Optional[UID]]: input_uids = set() if action.remote_self is not None: @@ -114,7 +118,10 @@ def _extract_input_and_output_from_action(self, action: Action) -> Tuple[UID]: for _, kwarg in action.kwargs.items(): input_uids.add(kwarg.id) - output_uid = action.result_id.id + if action.result_id is not None: + output_uid = action.result_id.id + else: + output_uid = None return input_uids, output_uid diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 5b21513f27b..bbff783a49a 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -77,7 +77,7 @@ class ActionType(Enum): SYFTFUNCTION = 32 -def repr_cls(c): +def repr_cls(c: Any) -> str: return f"{c.__module__}.{c.__name__}" @@ -196,7 +196,9 @@ def syft_history_hash(self) -> int: return hashes @classmethod - def syft_function_action_from_kwargs_and_id(cls, kwargs, user_code_id): + def syft_function_action_from_kwargs_and_id( + cls, kwargs: dict[str, Any], user_code_id: UID + ) -> Self: kwarg_ids = {} for k, v in kwargs.items(): kwarg_ids[k] = LineageID(v) @@ -230,8 +232,8 @@ def from_api_call(cls, api_call: SyftAPICall) -> Action: ) return action - def __repr__(self): - def repr_uid(_id): + def __repr__(self) -> str: + def repr_uid(_id: LineageID) -> str: return f"{str(_id)[:3]}..{str(_id)[-1]}" arg_repr = ", ".join([repr_uid(x) for x in self.args]) @@ -247,7 +249,7 @@ def repr_uid(_id): @migrate(Action, ActionV1) -def downgrade_action_v2_to_v1(): +def downgrade_action_v2_to_v1() -> list[Callable]: return [ drop("user_code_id"), make_set_default("op", ""), @@ -256,7 +258,7 @@ def downgrade_action_v2_to_v1(): @migrate(ActionV1, Action) -def upgrade_action_v1_to_v2(): +def upgrade_action_v1_to_v2() -> list[Callable]: return [make_set_default("user_code_id", None)] @@ -390,20 +392,20 @@ def make_action_side_effect( action_type=context.action_type, ) context.action = action - except Exception as e: - raise e + except Exception: print(f"make_action_side_effect failed with {traceback.format_exc()}") return Err(f"make_action_side_effect failed with {traceback.format_exc()}") + return Ok((context, args, kwargs)) class TraceResult: - result = [] - _client = None - is_tracing = False + result: list = [] + _client: SyftClient = None + is_tracing: bool = False @classmethod - def reset(cls): + def reset(cls) -> None: cls.result = [] cls._client = None @@ -431,7 +433,7 @@ def convert_to_pointers( if args is not None: for arg in args: if not isinstance(arg, (ActionObject, Asset, UID)): - arg = ActionObject.from_obj( + arg = ActionObject.from_obj( # type: ignore[unreachable] syft_action_data=arg, syft_client_verify_key=api.signing_key.verify_key, syft_node_location=api.node_uid, @@ -446,7 +448,7 @@ def convert_to_pointers( if kwargs is not None: for k, arg in kwargs.items(): if not isinstance(arg, (ActionObject, Asset, UID)): - arg = ActionObject.from_obj( + arg = ActionObject.from_obj( # type: ignore[unreachable] syft_action_data=arg, syft_client_verify_key=api.signing_key.verify_key, syft_node_location=api.node_uid, @@ -646,7 +648,7 @@ def syft_action_data(self) -> Any: return self.syft_action_data_cache - def reload_cache(self): + def reload_cache(self) -> Optional[SyftError]: # If ActionDataEmpty then try to fetch it from store. if isinstance(self.syft_action_data_cache, ActionDataEmpty): blob_storage_read_method = from_api_or_context( @@ -659,33 +661,30 @@ def reload_cache(self): blob_retrieval_object = blob_storage_read_method( uid=self.syft_blob_storage_entry_id ) - if isinstance(blob_retrieval_object, SyftError): - print( - "Detached action object, object exists but is not linked to data in the blob storage", - blob_retrieval_object, - ) - return blob_retrieval_object # relative from ...store.blob_storage import BlobRetrieval if isinstance(blob_retrieval_object, SyftError): - raise SyftException( - message=f"Failed to retrieve object from blob storage: {blob_retrieval_object.message}" - ) + return blob_retrieval_object elif isinstance(blob_retrieval_object, BlobRetrieval): # TODO: This change is temporary to for gateway to be compatible with the new blob storage self.syft_action_data_cache = blob_retrieval_object.read() self.syft_action_data_type = type(self.syft_action_data) + return None else: # In the case of gateway, we directly receive the actual object # TODO: The ideal solution would be to stream the object from the domain through the gateway # Currently , we are just passing the object as it is, which would be fixed later. self.syft_action_data_cache = blob_retrieval_object self.syft_action_data_type = type(self.syft_action_data) + return None else: print("cannot reload cache") + return None + + return None - def _save_to_blob_storage_(self, data: Any) -> None: + def _save_to_blob_storage_(self, data: Any) -> Optional[SyftError]: # relative from ...types.blob_storage import BlobFile from ...types.blob_storage import CreateBlobStorageEntry @@ -737,6 +736,8 @@ def _save_to_blob_storage_(self, data: Any) -> None: self.syft_action_data_cache = data + return None + def _save_to_blob_storage(self) -> Optional[SyftError]: data = self.syft_action_data if isinstance(data, SyftError): @@ -748,6 +749,7 @@ def _save_to_blob_storage(self) -> Optional[SyftError]: return result if not TraceResult.is_tracing: self.syft_action_data_cache = self.as_empty_data() + return None @property def is_pointer(self) -> bool: @@ -777,7 +779,7 @@ def __check_action_data(cls, values: dict) -> dict: else: values["syft_action_data_repr_"] = ( v._repr_markdown_() - if hasattr(v, "_repr_markdown_") + if v is not None and hasattr(v, "_repr_markdown_") else v.__repr__() ) values["syft_action_data_str_"] = str(v) @@ -785,15 +787,15 @@ def __check_action_data(cls, values: dict) -> dict: return values @property - def is_mock(self): + def is_mock(self) -> bool: return self.syft_twin_type == TwinMode.MOCK @property - def is_real(self): + def is_real(self) -> bool: return self.syft_twin_type == TwinMode.PRIVATE @property - def is_twin(self): + def is_twin(self) -> bool: return self.syft_twin_type != TwinMode.NONE # @pydantic.validator("syft_action_data", pre=True, always=True) @@ -857,7 +859,7 @@ def syft_execute_action( ) return api.make_call(api_call) - def request(self, client): + def request(self, client: SyftClient) -> Union[Any, SyftError]: # relative from ..request.request import ActionStoreChange from ..request.request import SubmitRequest @@ -873,7 +875,7 @@ def request(self, client): ) return client.api.services.request.submit(submit_request) - def _syft_try_to_save_to_store(self, obj) -> None: + def _syft_try_to_save_to_store(self, obj: SyftObject) -> None: if self.syft_node_uid is None or self.syft_client_verify_key is None: return elif obj.syft_node_uid is not None: @@ -924,7 +926,7 @@ def _syft_try_to_save_to_store(self, obj) -> None: if isinstance(res, SyftError): print(f"Failed to to store (arg) {obj} to store, {res}") - def _syft_prepare_obj_uid(self, obj) -> LineageID: + def _syft_prepare_obj_uid(self, obj: Any) -> LineageID: # We got the UID if isinstance(obj, (UID, LineageID)): return LineageID(obj.id) @@ -1045,7 +1047,11 @@ def syft_make_action_with_self( def syft_get_path(self) -> str: """Get the type path of the underlying object""" - if isinstance(self, AnyActionObject) and self.syft_internal_type: + if ( + isinstance(self, AnyActionObject) + and self.syft_internal_type + and self.syft_action_data_type is not None + ): # avoids AnyActionObject errors return f"{self.syft_action_data_type.__name__}" return f"{type(self).__name__}" @@ -1112,7 +1118,7 @@ def get(self, block: bool = False) -> Any: nested_res.syft_client_verify_key = res.syft_client_verify_key return nested_res - def as_empty(self): + def as_empty(self) -> ActionObject: id = self.id # TODO: fix if isinstance(id, LineageID): @@ -1128,7 +1134,7 @@ def from_path( syft_lineage_id: Optional[LineageID] = None, syft_client_verify_key: Optional[SyftVerifyKey] = None, syft_node_location: Optional[UID] = None, - ): + ) -> ActionObject: """Create an Action Object from a file.""" # relative from ...types.blob_storage import BlobFile @@ -1204,20 +1210,20 @@ def from_obj( return action_object @classmethod - def add_trace_hook(cls): + def add_trace_hook(cls) -> bool: return True # if trace_action_side_effect not in self._syft_pre_hooks__[HOOK_ALWAYS]: # self._syft_pre_hooks__[HOOK_ALWAYS].append(trace_action_side_effect) @classmethod - def remove_trace_hook(cls): + def remove_trace_hook(cls) -> bool: return True # self._syft_pre_hooks__[HOOK_ALWAYS].pop(trace_action_side_effct, None) def as_empty_data(self) -> ActionDataEmpty: return ActionDataEmpty(syft_internal_type=self.syft_internal_type) - def wait(self): + def wait(self) -> ActionObject: # relative from ...client.api import APIRegistry @@ -1232,6 +1238,7 @@ def wait(self): while not api.services.action.is_resolved(obj_id): time.sleep(1) + return self @staticmethod @@ -1260,7 +1267,8 @@ def obj_not_ready( @staticmethod def empty( - syft_internal_type: Type[Any] = NoneType, + # TODO: fix the mypy issue + syft_internal_type: Optional[Type[Any]] = None, id: Optional[UID] = None, syft_lineage_id: Optional[LineageID] = None, syft_resolved: Optional[bool] = True, @@ -1276,6 +1284,9 @@ def empty( Which LineageID to use for the ActionObject. Optional """ + syft_internal_type = ( + type(None) if syft_internal_type is None else syft_internal_type + ) empty = ActionDataEmpty(syft_internal_type=syft_internal_type) res = ActionObject.from_obj( id=id, @@ -1399,7 +1410,7 @@ def _syft_output_action_object( constructor = action_type_for_type(result) syft_twin_type = TwinMode.NONE - if context.result_twin_type is not None: + if context is not None and context.result_twin_type is not None: syft_twin_type = context.result_twin_type result = constructor( syft_twin_type=syft_twin_type, @@ -1426,11 +1437,13 @@ def _syft_get_attr_context(self, name: str) -> Any: # use the custom defined version context_self = self if not defined_on_self: - context_self = self.syft_action_data # type: ignore + context_self = self.syft_action_data return context_self - def _syft_attr_propagate_ids(self, context, name: str, result: Any) -> Any: + def _syft_attr_propagate_ids( + self, context: PreHookContext, name: str, result: Any + ) -> Any: """Patch the results with the syft_history_hash, node_uid, and result_id.""" if name in self._syft_dont_wrap_attrs(): return result @@ -1574,7 +1587,7 @@ def _base_wrapper(*args: Any, **kwargs: Any) -> Any: if inspect.ismethod(original_func) or inspect.ismethoddescriptor(original_func): debug("Running method: ", name) - def wrapper(_self: Any, *args: Any, **kwargs: Any): + def wrapper(_self: Any, *args: Any, **kwargs: Any) -> Any: return _base_wrapper(*args, **kwargs) wrapper = types.MethodType(wrapper, type(self)) @@ -1599,9 +1612,9 @@ def wrapper(_self: Any, *args: Any, **kwargs: Any): # third party return wrapper - def _syft_setattr(self, name, value): + def _syft_setattr(self, name: str, value: Any) -> Any: args = (name, value) - kwargs = {} + kwargs: dict = {} op_name = "__setattr__" def fake_func(*args: Any, **kwargs: Any) -> Any: @@ -1685,7 +1698,7 @@ def __setattr__(self, name: str, value: Any) -> Any: return value else: self._syft_setattr(name, value) - context_self = self.syft_action_data # type: ignore + context_self = self.syft_action_data return context_self.__setattr__(name, value) # def keys(self) -> KeysView[str]: @@ -1712,7 +1725,10 @@ def _repr_markdown_(self) -> str: else: data_repr_ = ( self.syft_action_data_cache._repr_markdown_() - if hasattr(self.syft_action_data_cache, "_repr_markdown_") + if ( + self.syft_action_data_cache is not None + and hasattr(self.syft_action_data_cache, "_repr_markdown_") + ) else self.syft_action_data_cache.__repr__() ) @@ -1842,10 +1858,10 @@ def __lshift__(self, other: Any) -> Any: def __rshift__(self, other: Any) -> Any: return self._syft_output_action_object(self.__rshift__(other)) - def __iter__(self): + def __iter__(self) -> Any: return self._syft_output_action_object(self.__iter__()) - def __next__(self): + def __next__(self) -> Any: return self._syft_output_action_object(self.__next__()) # r ops @@ -1891,14 +1907,14 @@ def __rrshift__(self, other: Any) -> Any: @migrate(ActionObject, ActionObjectV1) -def downgrade_actionobject_v2_to_v1(): +def downgrade_actionobject_v2_to_v1() -> list[Callable]: return [ drop("syft_resolved"), ] @migrate(ActionObjectV1, ActionObject) -def upgrade_actionobject_v1_to_v2(): +def upgrade_actionobject_v1_to_v2() -> list[Callable]: return [ make_set_default("syft_resolved", True), ] @@ -1932,7 +1948,7 @@ def __int__(self) -> float: @migrate(AnyActionObject, AnyActionObjectV1) -def downgrade_anyactionobject_v2_to_v1(): +def downgrade_anyactionobject_v2_to_v1() -> list[Callable]: return [ drop("syft_action_data_str"), drop("syft_resolved"), @@ -1940,7 +1956,7 @@ def downgrade_anyactionobject_v2_to_v1(): @migrate(AnyActionObjectV1, AnyActionObject) -def upgrade_anyactionobject_v1_to_v2(): +def upgrade_anyactionobject_v1_to_v2() -> list[Callable]: return [ make_set_default("syft_action_data_str", ""), make_set_default("syft_resolved", True), diff --git a/packages/syft/src/syft/service/action/action_permissions.py b/packages/syft/src/syft/service/action/action_permissions.py index e358de3be1a..1ce4552dc12 100644 --- a/packages/syft/src/syft/service/action/action_permissions.py +++ b/packages/syft/src/syft/service/action/action_permissions.py @@ -46,7 +46,9 @@ def permission_string(self) -> str: if self.permission in COMPOUND_ACTION_PERMISSION: return f"{self.permission.name}" else: - return f"{self.credentials.verify}_{self.permission.name}" + if self.credentials is not None: + return f"{self.credentials.verify}_{self.permission.name}" + return f"{self.permission.name}" def __repr__(self) -> str: if self.credentials is not None: diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 0c46b9871e9..056cb90e7af 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -4,7 +4,9 @@ from typing import Dict from typing import List from typing import Optional +from typing import Tuple from typing import Union +from typing import cast # third party import numpy as np @@ -13,6 +15,8 @@ from result import Result # relative +from ...abstract_node import AbstractNode +from ...node.credentials import SyftVerifyKey from ...serde.serializable import serializable from ...types.datetime import DateTime from ...types.syft_object import SyftObject @@ -22,6 +26,7 @@ from ..code.user_code import UserCode from ..code.user_code import execute_byte_code from ..context import AuthedServiceContext +from ..policy.policy import OutputPolicy from ..policy.policy import retrieve_from_db from ..response import SyftError from ..response import SyftSuccess @@ -57,6 +62,8 @@ def __init__(self, store: ActionStore) -> None: def np_array(self, context: AuthedServiceContext, data: Any) -> Any: if not isinstance(data, np.ndarray): data = np.array(data) + # cast here since we are sure that AuthedServiceContext has a node + context.node = cast(AbstractNode, context.node) np_obj = NumpyArrayObject( dtype=data.dtype, shape=data.shape, @@ -116,6 +123,7 @@ def _set( action_object = action_object.private else: action_object = action_object.mock + context.node = cast(AbstractNode, context.node) action_object.syft_point_to(context.node.id) return Ok(action_object) return result.err() @@ -202,7 +210,7 @@ def _get( context: AuthedServiceContext, uid: UID, twin_mode: TwinMode = TwinMode.PRIVATE, - has_permission=False, + has_permission: bool = False, resolve_nested: bool = True, ) -> Result[ActionObject, str]: """Get an object from the action store""" @@ -214,7 +222,7 @@ def _get( result = self.store.get( uid=uid, credentials=context.credentials, has_permission=has_permission ) - if result.is_ok(): + if result.is_ok() and context.node is not None: obj: Union[TwinObject, ActionObject] = result.ok() obj._set_obj_location_( context.node.id, @@ -255,6 +263,7 @@ def get_pointer( self, context: AuthedServiceContext, uid: UID ) -> Result[ActionObjectPointer, str]: """Get a pointer from the action store""" + context.node = cast(AbstractNode, context.node) result = self.store.get_pointer( uid=uid, credentials=context.credentials, node_uid=context.node.id ) @@ -291,6 +300,10 @@ def _user_code_execute( if not override_execution_permission: input_policy = code_item.input_policy + if input_policy is None: + if not code_item.output_policy_approved: + return Err("Execution denied: Your code is waiting for approval") + return Err(f"No input poliicy defined for user code: {code_item.id}") filtered_kwargs = input_policy.filter_kwargs( kwargs=kwargs, context=context, code_item_id=code_item.id ) @@ -302,7 +315,7 @@ def _user_code_execute( # update input policy to track any input state # code_item.input_policy = input_policy - if not override_execution_permission: + if not override_execution_permission and code_item.input_policy is not None: expected_input_kwargs = set() for _inp_kwarg in code_item.input_policy.inputs.values(): keys = _inp_kwarg.keys() @@ -382,7 +395,12 @@ def _user_code_execute( return Err(f"_user_code_execute failed. {e}") return Ok(result_action_object) - def set_result_to_store(self, result_action_object, context, output_policy=None): + def set_result_to_store( + self, + result_action_object: ActionObject, + context: AuthedServiceContext, + output_policy: Optional[OutputPolicy] = None, + ) -> Union[Result[ActionObject, str], SyftError]: result_id = result_action_object.id # result_blob_id = result_action_object.syft_blob_storage_entry_id @@ -396,7 +414,7 @@ def set_result_to_store(self, result_action_object, context, output_policy=None) output_readers = [] read_permission = ActionPermission.READ - + context.node = cast(AbstractNode, context.node) result_action_object._set_obj_location_( context.node.id, context.credentials, @@ -423,10 +441,14 @@ def set_result_to_store(self, result_action_object, context, output_policy=None) BlobStorageService ) - def store_permission(x): + def store_permission( + x: Optional[SyftVerifyKey] = None, + ) -> ActionObjectPermission: return ActionObjectPermission(result_id, read_permission, x) - def blob_permission(x): + def blob_permission( + x: Optional[SyftVerifyKey] = None, + ) -> ActionObjectPermission: return ActionObjectPermission(result_blob_id, read_permission, x) if len(output_readers) > 0: @@ -439,8 +461,11 @@ def blob_permission(x): return set_result def execute_plan( - self, plan, context: AuthedServiceContext, plan_kwargs: Dict[str, ActionObject] - ): + self, + plan: Any, + context: AuthedServiceContext, + plan_kwargs: Dict[str, ActionObject], + ) -> Union[Result[ActionObject, str], SyftError]: id2inpkey = {v.id: k for k, v in plan.inputs.items()} for plan_action in plan.actions: @@ -466,7 +491,9 @@ def execute_plan( result_id = plan.outputs[0].id return self._get(context, result_id, TwinMode.MOCK, has_permission=True) - def call_function(self, context: AuthedServiceContext, action: Action): + def call_function( + self, context: AuthedServiceContext, action: Action + ) -> Union[Result[ActionObject, str], Err]: # run function/class init _user_lib_config_registry = UserLibConfigRegistry.from_user(context.credentials) absolute_path = f"{action.path}.{action.op}" @@ -484,7 +511,7 @@ def set_attribute( context: AuthedServiceContext, action: Action, resolved_self: Union[ActionObject, TwinObject], - ): + ) -> Result[Union[TwinObject, ActionObject], str]: args, _ = resolve_action_args(action, context, self) if args.is_err(): return Err( @@ -536,7 +563,7 @@ def set_attribute( def get_attribute( self, action: Action, resolved_self: Union[ActionObject, TwinObject] - ): + ) -> Ok[Union[TwinObject, ActionObject]]: if isinstance(resolved_self, TwinObject): private_result = getattr(resolved_self.private.syft_action_data, action.op) mock_result = getattr(resolved_self.mock.syft_action_data, action.op) @@ -558,7 +585,7 @@ def call_method( context: AuthedServiceContext, action: Action, resolved_self: Union[ActionObject, TwinObject], - ): + ) -> Result[Union[TwinObject, Any], str]: if isinstance(resolved_self, TwinObject): # method private_result = execute_object( @@ -603,6 +630,7 @@ def execute( # relative from .plan import Plan + context.node = cast(AbstractNode, context.node) if action.action_type == ActionType.CREATEOBJECT: result_action_object = Ok(action.create_object) # print(action.create_object, "already in blob storage") @@ -612,7 +640,7 @@ def execute( for k, v in action.kwargs.items(): # transform lineage ids into ids kwarg_ids[k] = v.id - result_action_object: Result[ActionObject, Err] = usercode_service._call( + result_action_object = usercode_service._call( context, action.user_code_id, action.result_id, **kwarg_ids ) return result_action_object @@ -707,7 +735,7 @@ def exists( def resolve_action_args( action: Action, context: AuthedServiceContext, service: ActionService -): +) -> Tuple[Ok[Dict], bool]: has_twin_inputs = False args = [] for arg_id in action.args: @@ -724,7 +752,7 @@ def resolve_action_args( def resolve_action_kwargs( action: Action, context: AuthedServiceContext, service: ActionService -): +) -> Tuple[Ok[Dict], bool]: has_twin_inputs = False kwargs = {} for key, arg_id in action.kwargs.items(): @@ -761,7 +789,7 @@ def execute_callable( # stdlib # TODO: get from CMPTree is probably safer - def _get_target_callable(path: str, op: str): + def _get_target_callable(path: str, op: str) -> Any: path_elements = path.split(".") res = importlib.import_module(path_elements[0]) for p in path_elements[1:]: @@ -861,15 +889,15 @@ def execute_object( private_obj=result_action_object_private, mock_obj=result_action_object_mock, ) - elif twin_mode == twin_mode.PRIVATE: # type: ignore + elif twin_mode == twin_mode.PRIVATE: # type:ignore # twin private path - private_args = filter_twin_args(args, twin_mode=twin_mode) + private_args = filter_twin_args(args, twin_mode=twin_mode) # type:ignore[unreachable] private_kwargs = filter_twin_kwargs(kwargs, twin_mode=twin_mode) result = target_method(*private_args, **private_kwargs) result_action_object = wrap_result(action.result_id, result) - elif twin_mode == twin_mode.MOCK: # type: ignore + elif twin_mode == twin_mode.MOCK: # type:ignore # twin mock path - mock_args = filter_twin_args(args, twin_mode=twin_mode) + mock_args = filter_twin_args(args, twin_mode=twin_mode) # type:ignore[unreachable] mock_kwargs = filter_twin_kwargs(kwargs, twin_mode=twin_mode) target_method = getattr(unboxed_resolved_self, action.op, None) result = target_method(*mock_args, **mock_kwargs) diff --git a/packages/syft/src/syft/service/action/action_store.py b/packages/syft/src/syft/service/action/action_store.py index b939de6aada..d44e5181498 100644 --- a/packages/syft/src/syft/service/action/action_store.py +++ b/packages/syft/src/syft/service/action/action_store.py @@ -65,7 +65,7 @@ def __init__( self.root_verify_key = root_verify_key def get( - self, uid: UID, credentials: SyftVerifyKey, has_permission=False + self, uid: UID, credentials: SyftVerifyKey, has_permission: bool = False ) -> Result[SyftObject, str]: uid = uid.id # We only need the UID from LineageID or UID @@ -212,7 +212,10 @@ def has_permission(self, permission: ActionObjectPermission) -> bool: if not isinstance(permission.permission, ActionPermission): raise Exception(f"ObjectPermission type: {permission.permission} not valid") - if self.root_verify_key.verify == permission.credentials.verify: + if ( + permission.credentials is not None + and self.root_verify_key.verify == permission.credentials.verify + ): return True if ( @@ -241,7 +244,7 @@ def add_permission(self, permission: ActionObjectPermission) -> None: permissions.add(permission.permission_string) self.permissions[permission.uid] = permissions - def remove_permission(self, permission: ActionObjectPermission): + def remove_permission(self, permission: ActionObjectPermission) -> None: permissions = self.permissions[permission.uid] permissions.remove(permission.permission_string) self.permissions[permission.uid] = permissions @@ -250,7 +253,9 @@ def add_permissions(self, permissions: List[ActionObjectPermission]) -> None: for permission in permissions: self.add_permission(permission) - def migrate_data(self, to_klass: SyftObject, credentials: SyftVerifyKey): + def migrate_data( + self, to_klass: SyftObject, credentials: SyftVerifyKey + ) -> Result[bool, str]: has_root_permission = credentials == self.root_verify_key if has_root_permission: diff --git a/packages/syft/src/syft/service/action/action_types.py b/packages/syft/src/syft/service/action/action_types.py index bb9fa98504b..3fbe4b9c9f5 100644 --- a/packages/syft/src/syft/service/action/action_types.py +++ b/packages/syft/src/syft/service/action/action_types.py @@ -6,7 +6,7 @@ from ...util.logger import debug from .action_data_empty import ActionDataEmpty -action_types = {} +action_types: dict = {} def action_type_for_type(obj_or_type: Any) -> Type: diff --git a/packages/syft/src/syft/service/action/numpy.py b/packages/syft/src/syft/service/action/numpy.py index 45c778b58ab..dfd43907b92 100644 --- a/packages/syft/src/syft/service/action/numpy.py +++ b/packages/syft/src/syft/service/action/numpy.py @@ -1,10 +1,13 @@ # stdlib from typing import Any +from typing import Callable from typing import ClassVar from typing import Type +from typing import Union # third party import numpy as np +from typing_extensions import Self # relative from ...serde.serializable import serializable @@ -14,6 +17,7 @@ from ...types.transforms import drop from ...types.transforms import make_set_default from .action_object import ActionObject +from .action_object import ActionObjectPointer from .action_object import ActionObjectV1 from .action_object import BASE_PASSTHROUGH_ATTRS from .action_types import action_types @@ -28,7 +32,7 @@ # return domain_client.api.services.action.get(self.id).syft_action_data -class NumpyArrayObjectPointer: +class NumpyArrayObjectPointer(ActionObjectPointer): pass @@ -74,7 +78,9 @@ class NumpyArrayObject(ActionObject, np.lib.mixins.NDArrayOperatorsMixin): # ) # return self == other - def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): + def __array_ufunc__( + self, ufunc: Any, method: str, *inputs: Any, **kwargs: Any + ) -> Union[Self, tuple[Self, ...]]: inputs = tuple( np.array(x.syft_action_data, dtype=x.dtype) if isinstance(x, NumpyArrayObject) @@ -95,14 +101,14 @@ def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): @migrate(NumpyArrayObject, NumpyArrayObjectV1) -def downgrade_numpyarrayobject_v2_to_v1(): +def downgrade_numpyarrayobject_v2_to_v1() -> list[Callable]: return [ drop("syft_resolved"), ] @migrate(NumpyArrayObjectV1, NumpyArrayObject) -def upgrade_numpyarrayobject_v1_to_v2(): +def upgrade_numpyarrayobject_v1_to_v2() -> list[Callable]: return [ make_set_default("syft_resolved", True), ] @@ -132,14 +138,14 @@ def __float__(self) -> float: @migrate(NumpyScalarObject, NumpyScalarObjectV1) -def downgrade_numpyscalarobject_v2_to_v1(): +def downgrade_numpyscalarobject_v2_to_v1() -> list[Callable]: return [ drop("syft_resolved"), ] @migrate(NumpyScalarObjectV1, NumpyScalarObject) -def upgrade_numpyscalarobject_v1_to_v2(): +def upgrade_numpyscalarobject_v1_to_v2() -> list[Callable]: return [ make_set_default("syft_resolved", True), ] @@ -166,14 +172,14 @@ class NumpyBoolObject(ActionObject, np.lib.mixins.NDArrayOperatorsMixin): @migrate(NumpyBoolObject, NumpyBoolObjectV1) -def downgrade_numpyboolobject_v2_to_v1(): +def downgrade_numpyboolobject_v2_to_v1() -> list[Callable]: return [ drop("syft_resolved"), ] @migrate(NumpyBoolObjectV1, NumpyBoolObject) -def upgrade_numpyboolobject_v1_to_v2(): +def upgrade_numpyboolobject_v1_to_v2() -> list[Callable]: return [ make_set_default("syft_resolved", True), ] diff --git a/packages/syft/src/syft/service/action/pandas.py b/packages/syft/src/syft/service/action/pandas.py index a466545b363..2dac63f3b46 100644 --- a/packages/syft/src/syft/service/action/pandas.py +++ b/packages/syft/src/syft/service/action/pandas.py @@ -1,5 +1,6 @@ # stdlib from typing import Any +from typing import Callable from typing import ClassVar from typing import Type @@ -56,14 +57,14 @@ def syft_is_property(self, obj: Any, method: str) -> bool: @migrate(PandasDataFrameObject, PandasDataFrameObjectV1) -def downgrade_pandasdataframeobject_v2_to_v1(): +def downgrade_pandasdataframeobject_v2_to_v1() -> list[Callable]: return [ drop("syft_resolved"), ] @migrate(PandasDataFrameObjectV1, PandasDataFrameObject) -def upgrade_pandasdataframeobject_v1_to_v2(): +def upgrade_pandasdataframeobject_v1_to_v2() -> list[Callable]: return [ make_set_default("syft_resolved", True), ] @@ -105,14 +106,14 @@ def syft_is_property(self, obj: Any, method: str) -> bool: @migrate(PandasSeriesObject, PandasSeriesObjectV1) -def downgrade_pandasseriesframeobject_v2_to_v1(): +def downgrade_pandasseriesframeobject_v2_to_v1() -> list[Callable]: return [ drop("syft_resolved"), ] @migrate(PandasSeriesObjectV1, PandasSeriesObject) -def upgrade_pandasseriesframeobject_v1_to_v2(): +def upgrade_pandasseriesframeobject_v1_to_v2() -> list[Callable]: return [ make_set_default("syft_resolved", True), ] diff --git a/packages/syft/src/syft/service/action/plan.py b/packages/syft/src/syft/service/action/plan.py index 298f34693bc..a2a81b6f473 100644 --- a/packages/syft/src/syft/service/action/plan.py +++ b/packages/syft/src/syft/service/action/plan.py @@ -1,9 +1,11 @@ # stdlib import inspect +from typing import Any from typing import Callable from typing import Dict from typing import List from typing import Optional +from typing import Union # relative from ... import ActionObject @@ -45,17 +47,19 @@ def __repr__(self) -> str: return f"{obj_str}\n{inp_str}\n{act_str}\n{out_str}\n\n{plan_str}" - def remap_actions_to_inputs(self, **new_inputs): + def remap_actions_to_inputs(self, **new_inputs: Any) -> None: pass - def __call__(self, *args, **kwargs): + def __call__( + self, *args: Any, **kwargs: Any + ) -> Union[ActionObject, list[ActionObject]]: if len(self.outputs) == 1: return self.outputs[0] else: return self.outputs -def planify(func): +def planify(func: Callable) -> ActionObject: TraceResult.reset() ActionObject.add_trace_hook() TraceResult.is_tracing = True diff --git a/packages/syft/src/syft/service/blob_storage/service.py b/packages/syft/src/syft/service/blob_storage/service.py index d1ef73aab9f..1a6bb94c932 100644 --- a/packages/syft/src/syft/service/blob_storage/service.py +++ b/packages/syft/src/syft/service/blob_storage/service.py @@ -3,11 +3,13 @@ from typing import List from typing import Optional from typing import Union +from typing import cast # third party import requests # relative +from ...abstract_node import AbstractNode from ...serde.serializable import serializable from ...service.action.action_object import ActionObject from ...store.blob_storage import BlobRetrieval @@ -64,10 +66,8 @@ def mount_azure( account_key: str, container_name: str, bucket_name: str, - use_direct_connections=True, - ): - # stdlib - + use_direct_connections: bool = True, + ) -> Union[SyftSuccess, SyftError]: # TODO: fix arguments remote_name = f"{account_name}{container_name}" @@ -90,6 +90,9 @@ def mount_azure( if res.is_err(): return SyftError(message=res.value) remote_profile = res.ok() + + context.node = cast(AbstractNode, context.node) + seaweed_config = context.node.blob_storage_client.config # we cache this here such that we can use it when reading a file from azure # from the remote_name @@ -139,7 +142,9 @@ def mount_azure( @service_method( path="blob_storage.get_files_from_bucket", name="get_files_from_bucket" ) - def get_files_from_bucket(self, context: AuthedServiceContext, bucket_name: str): + def get_files_from_bucket( + self, context: AuthedServiceContext, bucket_name: str + ) -> Union[list, SyftError]: result = self.stash.find_all(context.credentials, bucket_name=bucket_name) if result.is_err(): return result @@ -159,7 +164,8 @@ def get_files_from_bucket(self, context: AuthedServiceContext, bucket_name: str) blob_file = ActionObject.empty() blob_file.syft_blob_storage_entry_id = bse.id blob_file.syft_client_verify_key = context.credentials - blob_file.syft_node_location = context.node.id + if context.node is not None: + blob_file.syft_node_location = context.node.id blob_file.reload_cache() blob_files.append(blob_file.syft_action_data) @@ -198,7 +204,7 @@ def read( obj: BlobStorageEntry = result.ok() if obj is None: return SyftError(message=f"No blob storage entry exists for uid: {uid}") - + context.node = cast(AbstractNode, context.node) with context.node.blob_storage_client.connect() as conn: res: BlobRetrieval = conn.read( obj.location, obj.type_, bucket_name=obj.bucket_name @@ -216,6 +222,7 @@ def read( def allocate( self, context: AuthedServiceContext, obj: CreateBlobStorageEntry ) -> Union[BlobDepositType, SyftError]: + context.node = cast(AbstractNode, context.node) with context.node.blob_storage_client.connect() as conn: secure_location = conn.allocate(obj) @@ -294,7 +301,7 @@ def mark_write_complete( ) if result.is_err(): return SyftError(message=f"{result.err()}") - + context.node = cast(AbstractNode, context.node) with context.node.blob_storage_client.connect() as conn: result = conn.complete_multipart_upload(obj, etags) @@ -310,6 +317,9 @@ def delete( if obj is None: return SyftError(message=f"No blob storage entry exists for uid: {uid}") + + context.node = cast(AbstractNode, context.node) + try: with context.node.blob_storage_client.connect() as conn: file_unlinked_result = conn.delete(obj.location) diff --git a/packages/syft/src/syft/service/code/code_parse.py b/packages/syft/src/syft/service/code/code_parse.py index 6e985e35010..1f04fb786c7 100644 --- a/packages/syft/src/syft/service/code/code_parse.py +++ b/packages/syft/src/syft/service/code/code_parse.py @@ -5,7 +5,7 @@ class GlobalsVisitor(ast.NodeVisitor): - def generic_visit(self, node): + def generic_visit(self, node: Any) -> None: if isinstance(node, ast.Global): raise Exception("No Globals allowed!") ast.NodeVisitor.generic_visit(self, node) @@ -13,10 +13,10 @@ def generic_visit(self, node): class LaunchJobVisitor(ast.NodeVisitor): def visit_Module(self, node: Module) -> Any: - self.nested_calls = [] + self.nested_calls: list = [] self.generic_visit(node) - def visit_Call(self, node): + def visit_Call(self, node: Any) -> None: if isinstance(node.func, ast.Attribute): if ( getattr(node.func.value, "id", None) == "domain" diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index a449845115d..6954e2c0dec 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -18,11 +18,13 @@ from typing import Any from typing import Callable from typing import Dict +from typing import Generator from typing import List from typing import Optional from typing import Tuple from typing import Type from typing import Union +from typing import cast from typing import final # third party @@ -31,6 +33,7 @@ from typing_extensions import Self # relative +from ...abstract_node import AbstractNode from ...abstract_node import NodeType from ...client.api import APIRegistry from ...client.api import NodeIdentity @@ -46,6 +49,7 @@ from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_3 +from ...types.syft_object import SYFT_OBJECT_VERSION_4 from ...types.syft_object import SyftHashableObject from ...types.syft_object import SyftObject from ...types.transforms import TransformContext @@ -70,7 +74,6 @@ from ..policy.policy import ExactMatch from ..policy.policy import InputPolicy from ..policy.policy import OutputPolicy -from ..policy.policy import Policy from ..policy.policy import SingleExecutionExactOutput from ..policy.policy import SubmitUserPolicy from ..policy.policy import UserPolicy @@ -137,10 +140,10 @@ class UserCodeStatusCollection(SyftHashableObject): def __init__(self, status_dict: Dict): self.status_dict = status_dict - def __repr__(self): + def __repr__(self) -> str: return str(self.status_dict) - def _repr_html_(self): + def _repr_html_(self) -> str: string = f"""