diff --git a/notebooks/helm/Untitled.ipynb b/notebooks/helm/Untitled.ipynb deleted file mode 100644 index 339179d2e33..00000000000 --- a/notebooks/helm/Untitled.ipynb +++ /dev/null @@ -1,283 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "e85480a1", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "\n", - "\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ed90f23e", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5aa49887", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ffdcc5b2", - "metadata": {}, - "outputs": [], - "source": [ - "client.start_workers(n=5, configs=[{ram=16, cpu=8}.....], config=(ram=16, cpu=8))\n", - "\n", - "\n", - "\n", - "\n", - "def execute_bytecode():\n", - " context\n", - " \n", - " def LocalDomainClient():\n", - " \n", - " \n", - " def logs(str):\n", - " context.write.logs(str)\n", - " \n", - " \n", - " byte_code:str\n", - " \n", - " \n", - " def my_syft_function():\n", - " \n", - " print(locals(), globals())\n", - " \n", - " execute_byte_code(byte_code, globals={domain: LocalDomainClient()})\n", - "\n", - "\n", - "\n", - "@syft_function(ram=16, cpus=8)\n", - "def x():\n", - " pass\n", - "\n", - "METHOD 1 vs METHOD 3\n", - "1. building container\n", - "2. spinning up the container that does the work\n", - "3. get inputs into contianer, get outputs out\n", - "4. get some controls into the container\n", - "5. monitoring (logs, cpu usage, ram usage)\n", - "6. UX\n", - "\n", - "\n", - "The work that needs to be done to complete 1 vs 3 is completely different, is it a good stepping stone?\n", - "\n", - "1.\n", - "scenario 1: big container with Domain + syft_function specific libs\n", - "scenario 3: small container with only syft function specific libs\n", - "\n", - "2. spinning up the container that does the work\n", - "scenario 1: not spinning up a container\n", - "scneario 3: spinning up with k8s vs spinning up in docker\n", - "\n", - "3. get inputs into contianer, get outputs out\n", - "inputs: in scenario 1 you would pass in credentials, in scenario 3 we would use single use urls\n", - "outputs: in scenario 1 you would pass in credentials, in scenario 3 single use urls\n", - "\n", - "4. get some controls into container (launch_job/write logs / set progress)\n", - "Can be done naively by basically including a context object into the container.\n", - "In scenario 1 we are not using a worker container (only manager container), so this is essentially already\n", - "implemented. In scenario 3 we have to implement this\n", - "\n", - "5. monitoring (logs, cpu usage, ram usage)\n", - "1: already implemented to some extended\n", - "3a/3bdocker vs kubernetes\n", - "\n", - "6. UX\n", - "scenario 1: spin up containers with fixed image and fixed nr of workers\n", - " \n", - "scenario 3: spin up worker managers fixed nr of worker managers, but possibly flexible nr of workers\n", - "scenario 3: in scenario 3 the hardware would be decoupled from the image\n", - "scenario 3: in scenario 3 we would possibly have some kind automatic syft function placement (on which hardware) \n", - " \n", - " \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4457efb8", - "metadata": {}, - "outputs": [], - "source": [ - "n=5,9,13" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4cca82a3", - "metadata": {}, - "outputs": [], - "source": [ - "Question:\n", - " \n", - "do we want work on worker managers to be always scheduled on the same hardware OR\n", - "\n", - "should it be scheduled with hardware contstraints, but not necessarily on the same hardware. This means that you\n", - "will always meet constraints on a function level, but you have no control over distribution of the functions over\n", - "the hardware\n", - "\n", - " \n" - ] - }, - { - "cell_type": "markdown", - "id": "d24ee09c", - "metadata": {}, - "source": [ - "3A" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "de4f304f", - "metadata": {}, - "outputs": [], - "source": [ - "client.start_worker_managers(n=5, configs=[{ram=16, cpu=8}.....],)\n", - "\n", - "\n", - "image = \"\"\n", - "client.upload(image)\n", - "\n", - "syft_function(image=\"\")\n", - "def abc():\n", - " pass\n", - "\n", - "\n", - "def main_job():\n", - " for n in [5,9,13]:\n", - " if n in [5,9]:\n", - " abc(worker=1)\n", - " if n == 13:\n", - " abc(worker=2)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "521192a0", - "metadata": {}, - "outputs": [], - "source": [ - "worker = client.jobs[\"abc\"].worker" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9cde3100", - "metadata": {}, - "outputs": [], - "source": [ - "worker.cpu_history()" - ] - }, - { - "cell_type": "markdown", - "id": "8cfbd697", - "metadata": {}, - "source": [ - "3B" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "007ba97d", - "metadata": {}, - "outputs": [], - "source": [ - "client.allocate_resources(n=5, configs=[{ram=16, cpu=8}.....],\n", - " config=(ram=16, cpu=8))\n", - "\n", - "image = \"\"\n", - "client.upload(image)\n", - "\n", - "syft_function(image=\"\")\n", - "def abc():\n", - " pass\n", - "\n", - "\n", - "def main_job():\n", - " for n in [5,9,13]:\n", - " if n in [5,9]:\n", - " abc(ram=16)\n", - " if n == 13:\n", - " abc(ram=32)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4bc3b5e5", - "metadata": {}, - "outputs": [], - "source": [ - "worker = client.jobs[\"abc\"].worker" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "43fac896", - "metadata": {}, - "outputs": [], - "source": [ - "worker.cpu_history()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/helm/consumer node.ipynb b/notebooks/helm/consumer node.ipynb deleted file mode 100644 index 0448e1aaa35..00000000000 --- a/notebooks/helm/consumer node.ipynb +++ /dev/null @@ -1,134 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "a196017f", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n" - ] - } - ], - "source": [ - "import syft as sy\n", - "from syft import ActionObject\n", - "from syft import syft_function, syft_function_single_use\n", - "from time import sleep\n", - "from syft.service.queue.zmq_queue import ZMQQueueConfig, ZMQClientConfig" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "830cb5cf", - "metadata": {}, - "outputs": [], - "source": [ - "worker_node = sy.orchestra.launch(name=\"worker-node-helm1\", dev_mode=True,\n", - " reset=True,\n", - " n_consumers=3,\n", - " create_producer=False,\n", - " queue_port=62249)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "cf3a5520", - "metadata": {}, - "outputs": [], - "source": [ - "# worker_node.python_node.queue_manager.producers" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "88fb4211", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'tcp://localhost:62249'" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "worker_node.python_node.queue_manager.consumers[\"api_call\"][0].address" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d1f13284", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n", - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n", - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n" - ] - } - ], - "source": [ - "#make sure to run this\n", - "sleep(1000000)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "146b67f5", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/helm/dataset_ux.ipynb b/notebooks/helm/dataset_ux.ipynb deleted file mode 100644 index 9fde2a68205..00000000000 --- a/notebooks/helm/dataset_ux.ipynb +++ /dev/null @@ -1,1379 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /home/teo/OpenMined/PySyft\n" - ] - } - ], - "source": [ - "import syft as sy\n", - "from syft.store.blob_storage import BlobStorageConfig, BlobStorageClientConfig\n", - "from syft.store.blob_storage.seaweedfs import SeaweedFSClient, SeaweedFSClientConfig\n", - "from syft import ActionObject\n", - "from syft.service.action.action_data_empty import ActionFileData\n", - "from syft.service.queue.zmq_queue import ZMQQueueConfig, ZMQClientConfig\n", - "from collections import defaultdict" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INITIALIZING CONSUMER\n", - "ABCDEF\n", - "INITIALIZING CONSUMER\n", - "ABCDEFINITIALIZING CONSUMER\n", - "\n", - "ABCDEF\n", - "INITIALIZING CONSUMER\n", - "ABCDEF\n", - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"test-domain-helm2\", dev_mode=True,\n", - " reset=True,\n", - " n_consumers=4,\n", - " create_producer=True)\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```bash\n", - "docker run --entrypoint /bin/sh -p 8333:8333 -p 8888:8888 chrislusf/seaweedfs -c \"echo 's3.configure -access_key admin -secret_key admin -user iam -actions Read,Write,List,Tagging,Admin -apply' | weed shell > /dev/null 2>&1 & weed server -s3 -s3.port=8333 -master.volumeSizeLimitMB=2048\"\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "blob_config = BlobStorageConfig(client_type=SeaweedFSClient,\n", - " client_config=SeaweedFSClientConfig(host=\"http://0.0.0.0\",\n", - " port=\"8333\",\n", - " access_key=\"admin\",\n", - " secret_key=\"admin\",\n", - " default_bucket_name=\"test_bucket\",\n", - " region=\"us-east-1\")\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "node.python_node.init_blob_storage(blob_config)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "node.python_node.blob_storage_client.connect().client" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "# client.mount(\n", - "# account_name=,\n", - "# account_key=,\n", - "# container_name=,\n", - "# remote_name=,\n", - "# bucket_name=,\n", - "# )" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "b'{\"end_time\":1697805356.8281186,\"error\":\"I1020 12:35:56.387512 masterclient.go:210 master localhost:9333 redirected to leader 172.17.0.2:9333\\\\n.I1020 12:35:56.606281 masterclient.go:210 master localhost:9333 redirected to leader 172.17.0.2:9333\\\\n.create bucket under /buckets\\\\ncreated bucket azurebucket\\\\nI1020 12:35:56.735374 masterclient.go:210 master localhost:9333 redirected to leader 172.17.0.2:9333\\\\n.error: pull metadata: dir /buckets/azurebucket is not empty\\\\n\",\"key\":\"3ab48a65\",\"process_time\":0.4712975025177002,\"report\":\"master: localhost:9333 filers: [172.17.0.2:8888]\\\\n> > master: localhost:9333 filers: [172.17.0.2:8888]\\\\n> > master: localhost:9333 filers: [172.17.0.2:8888]\\\\n> > \",\"returncode\":0,\"start_time\":1697805356.356821}\\n'\n", - "{'ResponseMetadata': {'RequestId': '1697805356836708549', 'HostId': '', 'HTTPStatusCode': 200, 'HTTPHeaders': {'accept-ranges': 'bytes', 'content-length': '719', 'content-type': 'application/xml', 'server': 'SeaweedFS S3', 'x-amz-request-id': '1697805356836708549', 'date': 'Fri, 20 Oct 2023 12:35:56 GMT'}, 'RetryAttempts': 0}, 'IsTruncated': False, 'Marker': '', 'Contents': [{'Key': '0266f72a-edae-4812-8ce2-ea2a57b52529.txt', 'LastModified': datetime.datetime(2023, 9, 13, 12, 8, 7, tzinfo=tzutc()), 'ETag': '\"d41d8cd98f00b204e9800998ecf8427e-0\"', 'Size': 13, 'StorageClass': 'STANDARD', 'Owner': {'ID': '0'}}, {'Key': 'example.txt', 'LastModified': datetime.datetime(2023, 9, 14, 16, 20, 32, tzinfo=tzutc()), 'ETag': '\"d41d8cd98f00b204e9800998ecf8427e-0\"', 'Size': 13, 'StorageClass': 'STANDARD', 'Owner': {'ID': '0'}}], 'Name': 'azurebucket', 'Prefix': '', 'MaxKeys': 10000}\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "azurebucket\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Mounting Azure Successful!

" - ], - "text/plain": [ - "SyftSuccess: Mounting Azure Successful!" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.api.services.blob_storage.mount_azure(\n", - " account_name='azure',\n", - " account_key='t7Y5enmCiG2k8o5rvItSn3Ak9tHaVTXQUTn1LQ74jQ1g5bjvs0ui/O2FXJeDaCsfI6xMPz0txtoH+AStss/Xmg==',\n", - " container_name='manual-test',\n", - " remote_name='azure',\n", - " bucket_name='azurebucket',\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[syft.types.blob_storage.BlobStorageEntry, syft.types.blob_storage.BlobStorageEntry]\n" - ] - } - ], - "source": [ - "blob_files = client.api.services.blob_storage.get_files_from_bucket(bucket_name='azurebucket')" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "Pointer\n", - "```\n", - "```python\n", - "class BlobFile:\n", - " id: str = 5d07536ca0084074a7b44aa9c76a5a42\n", - "\n", - "```" - ], - "text/plain": [ - "Pointer:\n", - "syft.types.blob_storage.BlobFile" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "blob_files[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [], - "source": [ - "obj = sy.ActionObject.from_obj(blob_files)" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "Pointer\n", - "```\n", - "```python\n", - "class BlobFile:\n", - " id: str = 5d07536ca0084074a7b44aa9c76a5a42\n", - "\n", - "```" - ], - "text/plain": [ - "Pointer:\n", - "syft.types.blob_storage.BlobFile" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "obj[0]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### TODO: client.mount(account_name=..., account_key=..., ) -> [BlobStorageEntry]\n", - "#### TODO: client.create_blob_files([BlobStorageEntry])" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "ename": "AssertionError", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m/home/teo/OpenMined/PySyft/notebooks/helm/dataset_ux.ipynb Cell 11\u001b[0m line \u001b[0;36m1\n\u001b[0;32m----> 1\u001b[0m \u001b[39massert\u001b[39;00m \u001b[39mFalse\u001b[39;00m\n", - "\u001b[0;31mAssertionError\u001b[0m: " - ] - } - ], - "source": [ - "assert False" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "{'ResponseMetadata': {'RequestId': '1697804467822687223', 'HostId': '', 'HTTPStatusCode': 200, 'HTTPHeaders': {'accept-ranges': 'bytes', 'content-length': '238', 'content-type': 'application/xml', 'server': 'SeaweedFS S3', 'x-amz-request-id': '1697804467822687223', 'date': 'Fri, 20 Oct 2023 12:21:07 GMT'}, 'RetryAttempts': 0}, 'IsTruncated': False, 'Marker': '', 'Name': 'azurebucket', 'Prefix': '', 'MaxKeys': 10000}" - ], - "text/plain": [ - "{'ResponseMetadata': {'RequestId': '1697804467822687223',\n", - " 'HostId': '',\n", - " 'HTTPStatusCode': 200,\n", - " 'HTTPHeaders': {'accept-ranges': 'bytes',\n", - " 'content-length': '238',\n", - " 'content-type': 'application/xml',\n", - " 'server': 'SeaweedFS S3',\n", - " 'x-amz-request-id': '1697804467822687223',\n", - " 'date': 'Fri, 20 Oct 2023 12:21:07 GMT'},\n", - " 'RetryAttempts': 0},\n", - " 'IsTruncated': False,\n", - " 'Marker': '',\n", - " 'Name': 'azurebucket',\n", - " 'Prefix': '',\n", - " 'MaxKeys': 10000}" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "node.python_node.blob_storage_client.connect().client.list_objects(Bucket='azurebucket')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "{'ResponseMetadata': {'RequestId': '1697804408254563768', 'HostId': '', 'HTTPStatusCode': 200, 'HTTPHeaders': {'accept-ranges': 'bytes', 'content-length': '238', 'content-type': 'application/xml', 'server': 'SeaweedFS S3', 'x-amz-request-id': '1697804408254563768', 'date': 'Fri, 20 Oct 2023 12:20:08 GMT'}, 'RetryAttempts': 0}, 'IsTruncated': False, 'Marker': '', 'Name': 'azurebucket', 'Prefix': '', 'MaxKeys': 10000}" - ], - "text/plain": [ - "{'ResponseMetadata': {'RequestId': '1697804408254563768',\n", - " 'HostId': '',\n", - " 'HTTPStatusCode': 200,\n", - " 'HTTPHeaders': {'accept-ranges': 'bytes',\n", - " 'content-length': '238',\n", - " 'content-type': 'application/xml',\n", - " 'server': 'SeaweedFS S3',\n", - " 'x-amz-request-id': '1697804408254563768',\n", - " 'date': 'Fri, 20 Oct 2023 12:20:08 GMT'},\n", - " 'RetryAttempts': 0},\n", - " 'IsTruncated': False,\n", - " 'Marker': '',\n", - " 'Name': 'azurebucket',\n", - " 'Prefix': '',\n", - " 'MaxKeys': 10000}" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "node.python_node.blob_storage_client.connect().client.list_objects(Bucket='azurebucket')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "[{'Key': '0266f72a-edae-4812-8ce2-ea2a57b52529.txt', 'LastModified': datetime.datetime(2023, 9, 13, 12, 8, 7, tzinfo=tzutc()), 'ETag': '"e1de79d74ebc"', 'Size': 13, 'StorageClass': 'STANDARD', 'Owner': {'ID': '0'}}, {'Key': 'example.txt', 'LastModified': datetime.datetime(2023, 9, 14, 16, 20, 32, tzinfo=tzutc()), 'ETag': '"f5a69a6dae5e"', 'Size': 13, 'StorageClass': 'STANDARD', 'Owner': {'ID': '0'}}, {'Key': 'test/example.txt', 'LastModified': datetime.datetime(2023, 10, 20, 9, 11, 35, tzinfo=tzutc()), 'ETag': '"59ca0efa9f5633cb0371bbc0355478d8"', 'Size': 13, 'StorageClass': 'STANDARD', 'Owner': {'ID': '0'}}]" - ], - "text/plain": [ - "[{'Key': '0266f72a-edae-4812-8ce2-ea2a57b52529.txt',\n", - " 'LastModified': datetime.datetime(2023, 9, 13, 12, 8, 7, tzinfo=tzutc()),\n", - " 'ETag': '\"e1de79d74ebc\"',\n", - " 'Size': 13,\n", - " 'StorageClass': 'STANDARD',\n", - " 'Owner': {'ID': '0'}},\n", - " {'Key': 'example.txt',\n", - " 'LastModified': datetime.datetime(2023, 9, 14, 16, 20, 32, tzinfo=tzutc()),\n", - " 'ETag': '\"f5a69a6dae5e\"',\n", - " 'Size': 13,\n", - " 'StorageClass': 'STANDARD',\n", - " 'Owner': {'ID': '0'}},\n", - " {'Key': 'test/example.txt',\n", - " 'LastModified': datetime.datetime(2023, 10, 20, 9, 11, 35, tzinfo=tzutc()),\n", - " 'ETag': '\"59ca0efa9f5633cb0371bbc0355478d8\"',\n", - " 'Size': 13,\n", - " 'StorageClass': 'STANDARD',\n", - " 'Owner': {'ID': '0'}}]" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "objects" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "[13, 13, 13]" - ], - "text/plain": [ - "[13, 13, 13]" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file_sizes = [object['Size'] for object in objects]\n", - "file_sizes" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "file_paths = [object['Key'] for object in objects]\n", - "from syft.types.blob_storage import SecureFilePathLocation, BlobStorageEntry, BlobFileType\n", - "secure_file_paths = [SecureFilePathLocation(path=file_path) for file_path in file_paths]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "47f8c8f3db3a30695a28e4a51e44916669ac3d111924cb614181c64b2c3b8323" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.credentials.verify_key" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "bse_list = [BlobStorageEntry(location=sfp, uploaded_by=client.credentials.verify_key, file_size=13, type_=BlobFileType, bucket_name=\"azurebucket\") for sfp in secure_file_paths]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "for bse in bse_list:\n", - " blob_storage = node.python_node.get_service(\"BlobStorageService\")\n", - " blob_storage.stash.set(obj=bse, credentials=client.credentials)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

BlobStorageEntry List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n" - ], - "text/plain": [ - "[syft.types.blob_storage.BlobStorageEntry,\n", - " syft.types.blob_storage.BlobStorageEntry,\n", - " syft.types.blob_storage.BlobStorageEntry]" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "blob_storage.stash.get_all(credentials=client.credentials).ok()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "syft.node.credentials.SyftVerifyKey" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type(client.credentials.verify_key)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.id" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.api.node_uid" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "action_object = sy.ActionObject.empty()\n", - "\n", - "action_object.syft_blob_storage_entry_id = bse_list[0].id\n", - "action_object.syft_client_verify_key = client.credentials.verify_key \n", - "action_object.syft_node_location = client.id" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class ActionDataEmpty:\n", - " id: str = 1954a89a6e174b5ea8cee97246fd88d4\n", - "\n", - "```" - ], - "text/plain": [ - "ActionDataEmpty UID: 1954a89a6e174b5ea8cee97246fd88d4 <>" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "action_object.syft_action_data_cache" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "syft.service.action.action_data_empty.ActionDataEmpty" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type(action_object.syft_action_data_cache)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "action_object.reload_cache()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "http://0.0.0.0:8333/azurebucket/0266f72a-edae-4812-8ce2-ea2a57b52529.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=admin%2F20231020%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20231020T095949Z&X-Amz-Expires=1800&X-Amz-SignedHeaders=host&X-Amz-Signature=567949c25c2b95b2269e9ac331eeeba5b5b71c9a180d3d4096645c1d6214fc35\n" - ] - }, - { - "data": { - "text/markdown": [ - "```python\n", - "Pointer\n", - "```\n", - "b'Hello, World!'" - ], - "text/plain": [ - "Pointer:\n", - "b'Hello, World!'" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res = next(action_object.iter_lines())\n", - "res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "http://0.0.0.0:8333/azurebucket/0266f72a-edae-4812-8ce2-ea2a57b52529.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=admin%2F20231020%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20231020T095012Z&X-Amz-Expires=1800&X-Amz-SignedHeaders=host&X-Amz-Signature=c1b86f6c1a4c2bf0f22864286ed7b5551419fc0378a1c93f6307f331b86cfde2\n" - ] - }, - { - "data": { - "text/plain": [ - "b'Hello, World!'" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "action_object.syft_action_data_cache.read()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "action_objects = []" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "client.get_bucket_files(bucket_name='azurebucket') # -> List of BlobFile" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/helm/debug_nested_syft_functions.ipynb b/notebooks/helm/debug_nested_syft_functions.ipynb deleted file mode 100644 index 3cc53084740..00000000000 --- a/notebooks/helm/debug_nested_syft_functions.ipynb +++ /dev/null @@ -1,435 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "a196017f", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n" - ] - } - ], - "source": [ - "import syft as sy\n", - "from syft import ActionObject\n", - "from syft import syft_function, syft_function_single_use\n", - "from time import sleep\n", - "import os\n", - "import psutil\n", - "import inspect" - ] - }, - { - "cell_type": "markdown", - "id": "cb2d07de", - "metadata": {}, - "source": [ - "with server" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "9b31c627", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Staging Protocol Changes...\n", - "Data Migrated to latest version !!!\n", - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"test-domain-helm2\", dev_mode=True,\n", - " reset=True, \n", - " n_consumers=3,\n", - " create_producer=True,\n", - " queue_port=3322)\n", - " \n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "507740d2", - "metadata": {}, - "outputs": [], - "source": [ - "res = client.register(name=\"a\", email=\"aa@b.org\", password=\"c\", password_verify=\"c\")" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "0c33d096", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - } - ], - "source": [ - "ds_client = node.login(email=\"aa@b.org\", password=\"c\")" - ] - }, - { - "cell_type": "markdown", - "id": "176addfb", - "metadata": {}, - "source": [ - "setup: compute train-test overlap between a very large train set and a smaller test set. Small test is still to big for memory, so we split it into 54 parts. We keep 1 of those parts in memory. We dont keep the train set in memory, but read and compare with 1/54 parts line by line. Each part takes ~30 hours, but we can run 54 processes in parallel." - ] - }, - { - "cell_type": "markdown", - "id": "a0cea81b", - "metadata": {}, - "source": [ - "# Setup syft functions" - ] - }, - { - "cell_type": "markdown", - "id": "da2b114a", - "metadata": {}, - "source": [ - "## Dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "83307a2f", - "metadata": {}, - "outputs": [], - "source": [ - "x = ActionObject.from_obj([1, 2])\n", - "x_ptr = x.send(ds_client)" - ] - }, - { - "cell_type": "markdown", - "id": "31bbb3ff", - "metadata": {}, - "source": [ - "## Batch function" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "5d2fd248", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@syft_function()\n", - "def process_batch(batch):\n", - " # takes 30 hours normally\n", - " print(f\"starting batch {batch}\")\n", - " from time import sleep\n", - " sleep(1)\n", - " print(\"done\")\n", - " return batch+1" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "9ba22655", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User Code Submitted

" - ], - "text/plain": [ - "SyftSuccess: User Code Submitted" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ds_client.code.submit(process_batch)" - ] - }, - { - "cell_type": "markdown", - "id": "01319f1f", - "metadata": {}, - "source": [ - "## Main function" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "ca1b95ee", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@syft_function_single_use(x=x_ptr)\n", - "def process_all(domain, x):\n", - " jobs = []\n", - " print(\"Launching jobs\")\n", - " for elem in x:\n", - " # We inject a domain object in the scope\n", - " batch_job = domain.launch_job(process_batch, batch=elem)\n", - " jobs += [batch_job]\n", - " print(\"starting aggregation\")\n", - " print(\"Done\")\n", - "# results = [x.wait().get() for x in jobs]\n", - " return 3\n", - "# return sum(results)" - ] - }, - { - "cell_type": "markdown", - "id": "1e77c5db", - "metadata": {}, - "source": [ - "# Approve & run" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "0ab572f9", - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Request approved for domain test-domain-helm2\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request f19e660afac5467d8b9a3401978d68da changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request f19e660afac5467d8b9a3401978d68da changes applied" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "r = ds_client.code.request_code_execution(process_all)\n", - "client.requests[-1].approve(approve_nested=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "375ed965", - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n", - "28/11/23 16:53:22 FUNCTION LOG (e4f4db893879494d9b0b570f728753fe): Launching jobs\n", - "28/11/23 16:53:22 FUNCTION LOG (e4f4db893879494d9b0b570f728753fe): starting aggregation\n", - "28/11/23 16:53:22 FUNCTION LOG (e4f4db893879494d9b0b570f728753fe): Done\n" - ] - } - ], - "source": [ - "job = ds_client.code.process_all(x=x_ptr, blocking=False)\n", - "sleep(5)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "2db04ddd", - "metadata": {}, - "outputs": [], - "source": [ - "# job.subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "c3d71844", - "metadata": {}, - "outputs": [], - "source": [ - "# job.subjobs" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "7d8a2f95", - "metadata": {}, - "outputs": [], - "source": [ - "# client.jobs[0].subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "cc0db669", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "28/11/23 16:53:26 FUNCTION LOG (38062e3886b64aa59f5bcab139ed4544): starting batch 1\n", - "28/11/23 16:53:26 FUNCTION LOG (97293057bba2494a83be5cbd1553e8cf): starting batch 2\n", - "28/11/23 16:53:27 FUNCTION LOG (38062e3886b64aa59f5bcab139ed4544): done\n", - "28/11/23 16:53:27 FUNCTION LOG (97293057bba2494a83be5cbd1553e8cf): done\n" - ] - }, - { - "data": { - "text/plain": [ - "Pointer:\n", - "None" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.wait()" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "5bf0974f", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "5" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sum([j.wait().get() for j in job.subjobs])" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "948d9162", - "metadata": {}, - "outputs": [], - "source": [ - "node.land()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/helm/docker helm syft.ipynb b/notebooks/helm/docker-helm-syft.ipynb similarity index 93% rename from notebooks/helm/docker helm syft.ipynb rename to notebooks/helm/docker-helm-syft.ipynb index 739a1cc6f6d..e6d32b32774 100644 --- a/notebooks/helm/docker helm syft.ipynb +++ b/notebooks/helm/docker-helm-syft.ipynb @@ -41,27 +41,10 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "7cda8c72", "metadata": {}, - "outputs": [ - { - "ename": "ConnectionError", - "evalue": "Failed to fetch http://localhost:8080/api/v2/metadata. Response returned with code 502", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mConnectionError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[2], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m client \u001b[38;5;241m=\u001b[39m \u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlogin\u001b[49m\u001b[43m(\u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mhttp://localhost:8080\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43memail\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43minfo@openmined.org\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpassword\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mchangethis\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:922\u001b[0m, in \u001b[0;36mlogin\u001b[0;34m(email, url, node, port, password, cache)\u001b[0m\n\u001b[1;32m 913\u001b[0m \u001b[38;5;129m@instrument\u001b[39m\n\u001b[1;32m 914\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mlogin\u001b[39m(\n\u001b[1;32m 915\u001b[0m email: \u001b[38;5;28mstr\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 920\u001b[0m cache: \u001b[38;5;28mbool\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 921\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m SyftClient:\n\u001b[0;32m--> 922\u001b[0m _client \u001b[38;5;241m=\u001b[39m \u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnode\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mport\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mport\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 924\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(_client, SyftError):\n\u001b[1;32m 925\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m _client\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:864\u001b[0m, in \u001b[0;36mconnect\u001b[0;34m(url, node, port)\u001b[0m\n\u001b[1;32m 861\u001b[0m url\u001b[38;5;241m.\u001b[39mset_port(\u001b[38;5;28mint\u001b[39m(port))\n\u001b[1;32m 862\u001b[0m connection \u001b[38;5;241m=\u001b[39m HTTPConnection(url\u001b[38;5;241m=\u001b[39murl)\n\u001b[0;32m--> 864\u001b[0m client_type \u001b[38;5;241m=\u001b[39m \u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_client_type\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 866\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(client_type, SyftError):\n\u001b[1;32m 867\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m client_type\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:314\u001b[0m, in \u001b[0;36mHTTPConnection.get_client_type\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 311\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01menclave_client\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m EnclaveClient\n\u001b[1;32m 312\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mgateway_client\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m GatewayClient\n\u001b[0;32m--> 314\u001b[0m metadata \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_node_metadata\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mSyftSigningKey\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 315\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m metadata\u001b[38;5;241m.\u001b[39mnode_type \u001b[38;5;241m==\u001b[39m NodeType\u001b[38;5;241m.\u001b[39mDOMAIN\u001b[38;5;241m.\u001b[39mvalue:\n\u001b[1;32m 316\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m DomainClient\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:216\u001b[0m, in \u001b[0;36mHTTPConnection.get_node_metadata\u001b[0;34m(self, credentials)\u001b[0m\n\u001b[1;32m 214\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m response\n\u001b[1;32m 215\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 216\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_make_get\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mroutes\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mROUTE_METADATA\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mvalue\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 217\u001b[0m metadata_json \u001b[38;5;241m=\u001b[39m json\u001b[38;5;241m.\u001b[39mloads(response)\n\u001b[1;32m 218\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m NodeMetadataJSON(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mmetadata_json)\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:177\u001b[0m, in \u001b[0;36mHTTPConnection._make_get\u001b[0;34m(self, path, params)\u001b[0m\n\u001b[1;32m 173\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msession\u001b[38;5;241m.\u001b[39mget(\n\u001b[1;32m 174\u001b[0m \u001b[38;5;28mstr\u001b[39m(url), verify\u001b[38;5;241m=\u001b[39mverify_tls(), proxies\u001b[38;5;241m=\u001b[39m{}, params\u001b[38;5;241m=\u001b[39mparams\n\u001b[1;32m 175\u001b[0m )\n\u001b[1;32m 176\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m200\u001b[39m:\n\u001b[0;32m--> 177\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m requests\u001b[38;5;241m.\u001b[39mConnectionError(\n\u001b[1;32m 178\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFailed to fetch \u001b[39m\u001b[38;5;132;01m{\u001b[39;00murl\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m. Response returned with code \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresponse\u001b[38;5;241m.\u001b[39mstatus_code\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 179\u001b[0m )\n\u001b[1;32m 181\u001b[0m \u001b[38;5;66;03m# upgrade to tls if available\u001b[39;00m\n\u001b[1;32m 182\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39murl \u001b[38;5;241m=\u001b[39m upgrade_tls(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39murl, response)\n", - "\u001b[0;31mConnectionError\u001b[0m: Failed to fetch http://localhost:8080/api/v2/metadata. Response returned with code 502" - ] - } - ], + "outputs": [], "source": [ "client = sy.login(url=\"http://localhost:8080\", email=\"info@openmined.org\", password=\"changethis\")" ] @@ -1519,7 +1502,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "\r", + "\r\n", " 0%| | 0/2 [00:00SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.
" - ], - "text/plain": [ - "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "from syft.service.policy.policy import OutputPolicyExecuteCount\n", - "\n", - "# Question 2: What should the UX be for ExecuteOncePerCombination?\n", - "# \n", - "# Right now I have worked on using the first option from the previous question\n", - "# and using on the fly created lists. We can break this question into more specific ones:\n", - "#\n", - "# Sub-Question 1: What should we pass for each argument? Should the list be already on the server?\n", - "# Or can it be defined by the data scientist? \n", - "# Could it be made of data outside the domain?\n", - "#\n", - "# Sub-Question 2: Will anything change if instead of data we talk about files?\n", - "# The final use case actually will iterate for SyftFiles, so can this affect the UX?\n", - "#\n", - "\n", - "@sy.syft_function(input_policy=sy.ExecuteOncePerCombination(\n", - " x=[ptr_1, ptr_2, ptr_3],\n", - " y=[ptr_1, ptr_2, ptr_3],\n", - " z=[ptr_1, ptr_2, ptr_3],\n", - " ),\n", - " output_policy=OutputPolicyExecuteCount(limit=27))\n", - "def func(x, y, z):\n", - " return x, y, z" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "request = client.code.submit(func)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'main_func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'main_func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@sy.syft_function_single_use(list=list_ptr)\n", - "def main_func(domain, list):\n", - " jobs = []\n", - " print(\"start\")\n", - " # domain.init_progress(27)\n", - " for x in list:\n", - " for y in list:\n", - " for z in list:\n", - " print(x,y,z)\n", - " # domain.progress()\n", - " batch_job = domain.launch_job(func, x=x, y=y, z=z)\n", - " jobs.append(batch_job)\n", - " \n", - " print(\"done\")\n", - " \n", - " return None" + "client.worker.start_workers(n=3)" ] }, { "cell_type": "code", "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Request approved for domain test-domain-helm2\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request 51fa624adc3d47e7a8dc97886df8dfdc changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request 51fa624adc3d47e7a8dc97886df8dfdc changes applied" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.code.request_code_execution(main_func)\n", - "client.requests[-1].approve()" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "job = client.code.main_func(list=list_ptr, blocking=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class Job:\n", - " id: UID = 02936696f4a64aac98af478b04decb3d\n", - " status: JobStatus.CREATED\n", - " has_parent: False\n", - " result: None\n", - " logs:\n", - "\n", - "0 \n", - " \n", - "```" - ], - "text/plain": [ - "syft.service.job.job_stash.Job" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job" - ] - }, - { - "cell_type": "code", - "execution_count": 10, + "id": "c0c8331c", "metadata": {}, "outputs": [], "source": [ - "jobs = client.jobs" + "workers = client.worker.list()" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 8, + "id": "f8fc2e1b", "metadata": {}, "outputs": [ { @@ -266,18 +89,20 @@ "text/html": [ "\n", "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

Job List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n" - ], - "text/plain": [ - "[syft.service.job.job_stash.Job, syft.service.job.job_stash.Job]" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n", - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n" - ] - } - ], - "source": [ - "job.subjobs" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "7d8a2f95", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "30/11/23 01:18:41 FUNCTION LOG (95af6d0864e446f38d22f36d145056ba): starting batch 2\n", - "30/11/23 01:18:41 FUNCTION LOG (1a529a3e2b5248ce9a00fa79c73885b4): starting batch 1\n" - ] - } - ], - "source": [ - "# client.jobs[0].subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "cc0db669", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "30/11/23 01:18:42 FUNCTION LOG (95af6d0864e446f38d22f36d145056ba): done\n", - "30/11/23 01:18:42 FUNCTION LOG (1a529a3e2b5248ce9a00fa79c73885b4): done\n" - ] - }, - { - "data": { - "text/plain": [ - "Pointer:\n", - "None" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.wait()" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "5bf0974f", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "5" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sum([j.wait().get() for j in job.subjobs])" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "948d9162", - "metadata": {}, - "outputs": [], - "source": [ - "node.land()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/helm/helm_audit_without_syft.ipynb b/notebooks/helm/helm-audit-without-syft.ipynb similarity index 98% rename from notebooks/helm/helm_audit_without_syft.ipynb rename to notebooks/helm/helm-audit-without-syft.ipynb index 75170fc5de1..403b2baa440 100644 --- a/notebooks/helm/helm_audit_without_syft.ipynb +++ b/notebooks/helm/helm-audit-without-syft.ipynb @@ -267,10 +267,6 @@ } ], "source": [ - "\n", - "# for key in ngram_index:\n", - "# for gram in ngram_index[key]:\n", - "# print(ngram_index[key][gram])\n", "\n", "stats_key_to_input_ids = []\n", "stats_key_to_reference_ids = []\n", diff --git a/notebooks/helm/helm-syft-mongo.ipynb b/notebooks/helm/helm-syft-mongo.ipynb deleted file mode 100644 index 48ba643b170..00000000000 --- a/notebooks/helm/helm-syft-mongo.ipynb +++ /dev/null @@ -1,1487 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import syft as sy" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "# from gevent import monkey" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "# monkey.patch_all()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from syft.store.blob_storage import BlobStorageConfig, BlobStorageClientConfig\n", - "from syft.store.blob_storage.seaweedfs import SeaweedFSClient, SeaweedFSClientConfig\n", - "from syft import ActionObject\n", - "from syft.service.action.action_data_empty import ActionFileData\n", - "from syft.service.queue.zmq_queue import ZMQQueueConfig, ZMQClientConfig\n", - "from collections import defaultdict" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "from syft.store.mongo_client import MongoStoreClientConfig\n", - "from syft.store.mongo_document_store import MongoStoreConfig" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "# from packagesgrid.core.config import settings" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "448cb501cca248c7b9845081ed40744c 448cb501cca288c739845081ed40744cd5e74a874df77f9d949982b68482d445 47f8c8f3db3a30695a28e4a51e44916669ac3d111924cb614181c64b2c3b8323 info@openmined.org Jane Doe\n", - "INITIALIZING CONSUMER\n", - "ABCDEF\n", - "INITIALIZING CONSUMER\n", - "ABCDEF\n", - "INITIALIZING CONSUMER\n", - "ABCDEF\n", - "INITIALIZING CONSUMER\n", - "ABCDEF\n", - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"test-domain-helm2\",\n", - " dev_mode=True,\n", - " reset=True,\n", - " n_consumers=4,\n", - " create_producer=True)\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "mongo_client_config = MongoStoreClientConfig(\n", - " hostname=\"localhost\",\n", - " port=63455,\n", - " username=\"root\",\n", - " password=\"example\",\n", - ")\n", - "\n", - "mongo_store_config = MongoStoreConfig(client_config=mongo_client_config)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "node.python_node.init_stores(node.python_node.document_store_config,\n", - " mongo_store_config)\n", - "node.python_node._construct_services()" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "node.python_node.action_store._collection.drop()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```bash\n", - "docker run --entrypoint /bin/sh -p 8333:8333 -p 8888:8888 chrislusf/seaweedfs -c \"echo 's3.configure -access_key admin -secret_key admin -user iam -actions Read,Write,List,Tagging,Admin -apply' | weed shell > /dev/null 2>&1 & weed server -s3 -s3.port=8333 -master.volumeSizeLimitMB=2048\"\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "blob_config = BlobStorageConfig(client_type=SeaweedFSClient,\n", - " client_config=SeaweedFSClientConfig(host=\"http://0.0.0.0\",\n", - " port=\"8333\",\n", - " access_key=\"admin\",\n", - " secret_key=\"admin\",\n", - " bucket_name=\"test_bucket\",\n", - " region=\"us-east-1\")\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "node.python_node.init_blob_storage(blob_config)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Inputs" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "Pointer\n", - "```\n", - "```python\n", - "class BlobFile:\n", - " id: str = fa0c8717fed64811a6ada07a2e9f3585\n", - "\n", - "```" - ], - "text/plain": [ - "Pointer:\n", - "syft.types.blob_storage.BlobFile" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sy.ActionObject.from_path(path=\"scenario_data.jsonl\").send(client)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "# TODO: fix way we send list of files\n", - "scenario_obj = ActionObject.from_obj([\n", - " sy.ActionObject.from_path(path=\"scenario_data.jsonl\").send(client).syft_action_data for i in range(2)])\n", - "scenario_files_ptr = scenario_obj.send(client)" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "# TODO: fix way we send list of files\n", - "input_obj = ActionObject.from_obj([\n", - " sy.ActionObject.from_path(\"short_input.jsonl\").send(client).syft_action_data for i in range(2)])\n", - "input_files_ptr = input_obj.send(client)" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "# for line in input_files_ptr.syft_action_data[0].iter_lines():\n", - "# print(line)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Syft functions" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'compute_document_data_overlap' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'compute_document_data_overlap' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@sy.syft_function()\n", - "def compute_document_data_overlap(scenario_file, input_files, n):\n", - " print(\"starting overlap computation\")\n", - "\n", - " from nltk import ngrams\n", - " from collections import defaultdict\n", - " from string import punctuation\n", - " import re, json\n", - "\n", - " r = re.compile(r\"[\\s{}]+\".format(re.escape(punctuation)))\n", - " \n", - " def create_ngram_index(light_scenarios, n_values, stats_key_counts):\n", - " ngram_index = {n:{} for n in n_values}\n", - " for scenario in light_scenarios:\n", - " for n in n_values:\n", - " stats_key = scenario['scenario_key'] + '_' + str(n)\n", - " stats_key_counts[stats_key] = len(scenario['instances'])\n", - " for instance in scenario['instances']:\n", - " id = instance['id'] \n", - " input_tokens = r.split(instance['input'].lower())\n", - " for input_ngram in ngrams(input_tokens, n):\n", - " if input_ngram not in ngram_index[n]:\n", - " ngram_index[n][input_ngram] = set()\n", - " ngram_index[n][input_ngram].add(stats_key + '+' + id + '+' + 'input')\n", - "\n", - " # compute reference ngrams\n", - " for reference in instance['references']:\n", - " reference_unigrams = r.split(reference.lower())\n", - " for reference_ngram in ngrams(reference_unigrams, n):\n", - " if reference_ngram not in ngram_index[n]:\n", - " ngram_index[n][reference_ngram] = set()\n", - " ngram_index[n][reference_ngram].add(stats_key + '+' + id + '+' + 'references')\n", - " return ngram_index\n", - " \n", - " # # SETUP\n", - " print(\"preparing scenarios and creating indexes\")\n", - " light_scenarios = []\n", - " for light_scenario_json in scenario_file.iter_lines():\n", - " light_scenario_dict: dict = json.loads(light_scenario_json)\n", - "\n", - " light_scenario_key_dict: dict = light_scenario_dict[\"scenario_key\"]\n", - " subject_spec = light_scenario_key_dict[\"scenario_spec\"]['args']['subject']\n", - " light_scenario_key = subject_spec + '_' + light_scenario_key_dict[\"split\"]\n", - " light_instances = [\n", - " {\n", - " 'input': instance_dict['input'], \n", - " 'references': instance_dict['references'], \n", - " 'id': instance_dict[\"id\"]\n", - " }\n", - " for instance_dict in light_scenario_dict[\"instances\"]\n", - " ]\n", - " light_scenarios.append({'scenario_key': light_scenario_key, 'instances': light_instances})\n", - " \n", - " stats_key_counts = defaultdict(int)\n", - " \n", - " ngram_index = create_ngram_index(\n", - " light_scenarios=light_scenarios, n_values=[n], stats_key_counts=stats_key_counts\n", - " )\n", - " \n", - " r = re.compile(r\"[\\s{}]+\".format(re.escape(punctuation)))\n", - " stats_key_to_input_ids = defaultdict(set)\n", - " stats_key_to_reference_ids = defaultdict(set)\n", - " print(\"computing overlap\")\n", - " \n", - " for input_file in input_files:\n", - " for line in input_file.iter_lines():\n", - " document = json.loads(line)[\"text\"]\n", - " document_tokens = r.split(document.lower())\n", - " for n in ngram_index.keys():\n", - " for document_ngram in ngrams(document_tokens, n):\n", - " if document_ngram in ngram_index[n]:\n", - " for entry_overlap_key in ngram_index[n][document_ngram]:\n", - " stats_key, id, part = entry_overlap_key.split(\"+\")\n", - " if part == \"input\":\n", - " stats_key_to_input_ids[stats_key].add(id)\n", - " elif part == \"references\":\n", - " stats_key_to_reference_ids[stats_key].add(id)\n", - " print(\"done\")\n", - " \n", - " return stats_key_to_input_ids, stats_key_to_reference_ids, stats_key_counts" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User Code Submitted

" - ], - "text/plain": [ - "SyftSuccess: User Code Submitted" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.code.submit(compute_document_data_overlap)" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [], - "source": [ - "# client.api.services.action.exists(input_files_ptr.id)" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'main_function' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'main_function' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@sy.syft_function_single_use(input_files=input_files_ptr, scenario_files=scenario_files_ptr)\n", - "def main_function(domain, input_files, scenario_files):\n", - " N = [5, 9, 13]\n", - " jobs = []\n", - " for n in N[:1]:\n", - " for scenario_file in scenario_files:\n", - " batch_job = domain.launch_job(\n", - " compute_document_data_overlap,\n", - " scenario_file=scenario_file,\n", - " input_files=input_files,\n", - " n=n\n", - " )\n", - " jobs.append(batch_job)\n", - "\n", - " return None\n" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "# %debug" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Request approved for domain test-domain-helm2\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request 3da44e52c3a843d0be1a9780f3e42784 changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request 3da44e52c3a843d0be1a9780f3e42784 changes applied" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.code.request_code_execution(main_function)\n", - "client.requests[-1].approve()" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [], - "source": [ - "job = client.code.main_function(input_files=input_files_ptr, scenario_files=scenario_files_ptr, blocking=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Get results" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class Job:\n", - " id: UID = ed6b4c52d48b4f278ac0c7b165eb6b1d\n", - " status: created\n", - " has_parent: False\n", - " result: None\n", - " logs:\n", - "\n", - "0 \n", - " \n", - "```" - ], - "text/plain": [ - "syft.service.job.job_stash.Job" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "SENDING JOB FROM PRODUCER\n", - "HANDLING MESSAGE IN CONSUMER\n", - "LAUNCHING JOB compute_document_data_overlap\n", - "LAUNCHING JOB compute_document_data_overlap\n", - "done handling message\n", - "SENDING JOB FROM PRODUCER\n", - "SENDING JOB FROM PRODUCER\n", - "HANDLING MESSAGE IN CONSUMER\n", - "HANDLING MESSAGE IN CONSUMER\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "FUNCTION LOG (bdd3001ee39e43ca96ccdf3d0bdef119): starting overlap computation\n", - "FUNCTION LOG (68a7f873e7534d108725cc20ee657c7e): starting overlap computation\n", - "FUNCTION LOG (68a7f873e7534d108725cc20ee657c7e): preparing scenarios and creating indexes\n", - "FUNCTION LOG (68a7f873e7534d108725cc20ee657c7e): computing overlap\n", - "FUNCTION LOG (68a7f873e7534d108725cc20ee657c7e): done\n", - "FUNCTION LOG (68a7f873e7534d108725cc20ee657c7e): preparing scenarios and creating indexes\n", - "FUNCTION LOG (68a7f873e7534d108725cc20ee657c7e): computing overlap\n", - "FUNCTION LOG (68a7f873e7534d108725cc20ee657c7e): done\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "done handling message\n", - "done handling message\n" - ] - }, - { - "data": { - "text/plain": [ - "Pointer:\n", - "None" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.wait()" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

Job List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n" - ], - "text/plain": [ - "[syft.service.job.job_stash.Job, syft.service.job.job_stash.Job]" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.subjobs" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting overlap computation\n", - "\n" - ] - } - ], - "source": [ - "job.subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting overlap computation\n", - "preparing scenarios and creating indexes\n", - "computing overlap\n", - "done\n", - "preparing scenarios and creating indexes\n", - "computing overlap\n", - "done\n", - "\n" - ] - } - ], - "source": [ - "job.subjobs[1].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [], - "source": [ - "results = [j.wait().get() for j in job.subjobs]" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "[(defaultdict(<class 'set'>, {'philosophy_test_5': {'id328'}, 'philosophy_valid_5': {'id12'}}), defaultdict(<class 'set'>, {}), defaultdict(<class 'int'>, {'philosophy_train_5': 5, 'philosophy_valid_5': 34, 'philosophy_test_5': 311, 'anatomy_train_5': 5, 'anatomy_valid_5': 14, 'anatomy_test_5': 135})), (defaultdict(<class 'set'>, {'philosophy_test_5': {'id328'}, 'philosophy_valid_5': {'id12'}}), defaultdict(<class 'set'>, {}), defaultdict(<class 'int'>, {'philosophy_train_5': 5, 'philosophy_valid_5': 34, 'philosophy_test_5': 311, 'anatomy_train_5': 5, 'anatomy_valid_5': 14, 'anatomy_test_5': 135}))]" - ], - "text/plain": [ - "[(defaultdict(set,\n", - " {'philosophy_test_5': {'id328'},\n", - " 'philosophy_valid_5': {'id12'}}),\n", - " defaultdict(set, {}),\n", - " defaultdict(int,\n", - " {'philosophy_train_5': 5,\n", - " 'philosophy_valid_5': 34,\n", - " 'philosophy_test_5': 311,\n", - " 'anatomy_train_5': 5,\n", - " 'anatomy_valid_5': 14,\n", - " 'anatomy_test_5': 135})),\n", - " (defaultdict(set,\n", - " {'philosophy_test_5': {'id328'},\n", - " 'philosophy_valid_5': {'id12'}}),\n", - " defaultdict(set, {}),\n", - " defaultdict(int,\n", - " {'philosophy_train_5': 5,\n", - " 'philosophy_valid_5': 34,\n", - " 'philosophy_test_5': 311,\n", - " 'anatomy_train_5': 5,\n", - " 'anatomy_valid_5': 14,\n", - " 'anatomy_test_5': 135}))]" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "#stats_key_to_input_ids, stats_key_to_reference_ids, stats_key_counts\n", - "results" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(defaultdict(set,\n", - " {'philosophy_test_5': {'id328'}, 'philosophy_valid_5': {'id12'}}),\n", - " defaultdict(set, {}),\n", - " defaultdict(int,\n", - " {'philosophy_train_5': 5,\n", - " 'philosophy_valid_5': 34,\n", - " 'philosophy_test_5': 311,\n", - " 'anatomy_train_5': 5,\n", - " 'anatomy_valid_5': 14,\n", - " 'anatomy_test_5': 135}))" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "results[0]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Aggregate" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [], - "source": [ - "stats_key_to_input_ids, stats_key_to_reference_ids, stats_key_counts = zip(*results)\n", - "\n", - "total_input_ids = defaultdict(set)\n", - "total_reference_ids = defaultdict(set)\n", - "total_stats_key_counts = defaultdict(int)\n", - "\n", - "for d in stats_key_counts:\n", - " for key, val in d.items():\n", - " total_stats_key_counts[key] += val\n", - "\n", - "\n", - "for d in stats_key_to_input_ids:\n", - " for key in d:\n", - " new_set = set()\n", - " if key in total_input_ids:\n", - " new_set = total_input_ids[key]\n", - " new_set = new_set.union(d[key])\n", - " total_input_ids[key] = new_set\n", - "\n", - "for d in stats_key_to_reference_ids:\n", - " for key in d:\n", - " new_set = set()\n", - " if key in total_reference_ids:\n", - " new_set = total_reference_ids[key]\n", - " new_set = total_reference_ids[key].union(d[key])\n", - " total_reference_ids[key] = new_set\n", - "\n", - "all_data_overlap_stats = []\n", - "for stats_key, count in total_stats_key_counts.items():\n", - " data_overlap_stats = {\n", - " 'data_overlap_stats_key': None,\n", - " 'num_instances': count,\n", - " 'instance_ids_with_overlapping_input': sorted(total_input_ids[stats_key]),\n", - " 'instance_ids_with_overlapping_reference': sorted(total_reference_ids[stats_key]),\n", - " }\n", - " subject, split, n_str = stats_key.split('_')\n", - " data_overlap_stats['data_overlap_stats_key'] = {\n", - " 'light_scenario_key': {'subject': subject, 'split': split},\n", - " 'overlap_protocol_spec': {'n': int(n_str)}\n", - " }\n", - " all_data_overlap_stats.append(data_overlap_stats)\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[{'data_overlap_stats_key': {'light_scenario_key': {'split': 'train',\n", - " 'subject': 'philosophy'},\n", - " 'overlap_protocol_spec': {'n': 5}},\n", - " 'instance_ids_with_overlapping_input': [],\n", - " 'instance_ids_with_overlapping_reference': [],\n", - " 'num_instances': 10},\n", - " {'data_overlap_stats_key': {'light_scenario_key': {'split': 'valid',\n", - " 'subject': 'philosophy'},\n", - " 'overlap_protocol_spec': {'n': 5}},\n", - " 'instance_ids_with_overlapping_input': ['id12'],\n", - " 'instance_ids_with_overlapping_reference': [],\n", - " 'num_instances': 68},\n", - " {'data_overlap_stats_key': {'light_scenario_key': {'split': 'test',\n", - " 'subject': 'philosophy'},\n", - " 'overlap_protocol_spec': {'n': 5}},\n", - " 'instance_ids_with_overlapping_input': ['id328'],\n", - " 'instance_ids_with_overlapping_reference': [],\n", - " 'num_instances': 622},\n", - " {'data_overlap_stats_key': {'light_scenario_key': {'split': 'train',\n", - " 'subject': 'anatomy'},\n", - " 'overlap_protocol_spec': {'n': 5}},\n", - " 'instance_ids_with_overlapping_input': [],\n", - " 'instance_ids_with_overlapping_reference': [],\n", - " 'num_instances': 10},\n", - " {'data_overlap_stats_key': {'light_scenario_key': {'split': 'valid',\n", - " 'subject': 'anatomy'},\n", - " 'overlap_protocol_spec': {'n': 5}},\n", - " 'instance_ids_with_overlapping_input': [],\n", - " 'instance_ids_with_overlapping_reference': [],\n", - " 'num_instances': 28},\n", - " {'data_overlap_stats_key': {'light_scenario_key': {'split': 'test',\n", - " 'subject': 'anatomy'},\n", - " 'overlap_protocol_spec': {'n': 5}},\n", - " 'instance_ids_with_overlapping_input': [],\n", - " 'instance_ids_with_overlapping_reference': [],\n", - " 'num_instances': 270}]\n" - ] - } - ], - "source": [ - "from pprint import pprint\n", - "pprint(all_data_overlap_stats)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": { - "height": "calc(100% - 180px)", - "left": "10px", - "top": "150px", - "width": "398.22px" - }, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/helm/helm_syft.ipynb b/notebooks/helm/helm-syft.ipynb similarity index 100% rename from notebooks/helm/helm_syft.ipynb rename to notebooks/helm/helm-syft.ipynb diff --git a/notebooks/helm/kill-restart-jobs.ipynb b/notebooks/helm/kill-restart-jobs.ipynb deleted file mode 100644 index 9029c49e313..00000000000 --- a/notebooks/helm/kill-restart-jobs.ipynb +++ /dev/null @@ -1,1672 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "a196017f", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n" - ] - } - ], - "source": [ - "import syft as sy\n", - "from syft import ActionObject\n", - "from syft import syft_function, syft_function_single_use\n", - "from time import sleep\n", - "import os\n", - "import psutil\n", - "import inspect" - ] - }, - { - "cell_type": "markdown", - "id": "cb2d07de", - "metadata": {}, - "source": [ - "with server" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "21b24a60", - "metadata": {}, - "outputs": [], - "source": [ - "# import pickle\n", - "\n", - "# pickle.dumps(ActionObject.empty())" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "8e9d0fad", - "metadata": {}, - "outputs": [], - "source": [ - "# obj = ActionObject.empty()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "673cdd4f", - "metadata": {}, - "outputs": [], - "source": [ - "# pickle.dumps(obj.syft_action_data)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "9b31c627", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Staging Protocol Changes...\n", - "Data Migrated to latest version !!!\n", - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"test-domain-helm2\", dev_mode=True,\n", - " reset=True, \n", - " n_consumers=3,\n", - " create_producer=True,\n", - " queue_port=3322)\n", - " \n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "507740d2", - "metadata": {}, - "outputs": [], - "source": [ - "res = client.register(name=\"a\", email=\"aa@b.org\", password=\"c\", password_verify=\"c\")" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "0c33d096", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - } - ], - "source": [ - "ds_client = node.login(email=\"aa@b.org\", password=\"c\")" - ] - }, - { - "cell_type": "markdown", - "id": "176addfb", - "metadata": {}, - "source": [ - "setup: compute train-test overlap between a very large train set and a smaller test set. Small test is still to big for memory, so we split it into 54 parts. We keep 1 of those parts in memory. We dont keep the train set in memory, but read and compare with 1/54 parts line by line. Each part takes ~30 hours, but we can run 54 processes in parallel." - ] - }, - { - "cell_type": "markdown", - "id": "a0cea81b", - "metadata": {}, - "source": [ - "# Setup syft functions" - ] - }, - { - "cell_type": "markdown", - "id": "da2b114a", - "metadata": {}, - "source": [ - "## Dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "83307a2f", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CALLING THIS\n", - "CALLING \n", - "attrs 448cb501cca248c7b9845081ed40744c 69981d84f2721b8a56271a3b81fe0dd603523d6cf2e870627bc84fd75f941624\n", - "service_method .wrapper at 0x7fb279787d30>\n", - "ALLOCATE METHOD .wrapper at 0x7fb279787d30>\n", - "CALLING THIS 2\n", - "CALLING THIS 3\n", - "service_method .wrapper at 0x7fb279787dc0>\n", - "setting id to 4d772b26035747f9b72f16f07c194f00\n" - ] - } - ], - "source": [ - "x = ActionObject.from_obj([1])\n", - "x_ptr = x.send(ds_client)" - ] - }, - { - "cell_type": "markdown", - "id": "31bbb3ff", - "metadata": {}, - "source": [ - "## Batch function" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "5d2fd248", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@syft_function()\n", - "def process_batch(batch):\n", - " # takes 30 hours normally\n", - " print(f\"starting batch {batch}\")\n", - " from time import sleep\n", - " sleep(3)\n", - "# for i in range(30):\n", - "# print(i)\n", - "# sleep(1)\n", - " print(\"done\")\n", - " return batch+1" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "9ba22655", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User Code Submitted

" - ], - "text/plain": [ - "SyftSuccess: User Code Submitted" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ds_client.code.submit(process_batch)" - ] - }, - { - "cell_type": "markdown", - "id": "01319f1f", - "metadata": {}, - "source": [ - "## Main function" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "ca1b95ee", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@syft_function_single_use(x=x_ptr)\n", - "def process_all(domain, x):\n", - " jobs = []\n", - " print(\"Launching jobs\")\n", - " for elem in x:\n", - " # We inject a domain object in the scope\n", - " batch_job = domain.launch_job(process_batch, batch=elem)\n", - " jobs += [batch_job]\n", - " print(\"starting aggregation\")\n", - " print(\"Done\")\n", - "# results = [x.wait().get() for x in jobs]\n", - " return 3\n", - "# return sum(results)" - ] - }, - { - "cell_type": "markdown", - "id": "1e77c5db", - "metadata": {}, - "source": [ - "# Approve & run" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "0ab572f9", - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Request approved for domain test-domain-helm2\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request ea678f0fb59d4b43a36b793bf64b2366 changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request ea678f0fb59d4b43a36b793bf64b2366 changes applied" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "r = ds_client.code.request_code_execution(process_all)\n", - "client.requests[-1].approve(approve_nested=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "375ed965", - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "service_method .wrapper at 0x7fb2755161f0>\n", - "[1]\n", - "[1]\n", - "service_method .wrapper at 0x7fb2755161f0>\n", - "[1]\n", - "[1]\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n", - "28/11/23 13:51:39 FUNCTION LOG (3fb82ef9039f4273a9a70c56d45c75ea): Launching jobs\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1\n", - "1\n", - "1\n", - "1\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "28/11/23 13:51:40 FUNCTION LOG (3fb82ef9039f4273a9a70c56d45c75ea): starting aggregation\n", - "28/11/23 13:51:40 FUNCTION LOG (3fb82ef9039f4273a9a70c56d45c75ea): Done\n", - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n" - ] - } - ], - "source": [ - "job = ds_client.code.process_all(x=x_ptr, blocking=False)\n", - "sleep(10)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "9468bbee", - "metadata": {}, - "outputs": [], - "source": [ - "# ptr2.syft_blob_storage_entry_id" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "2c7cd8a7", - "metadata": {}, - "outputs": [], - "source": [ - "# sleep(10)" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "2d5491de", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1\n", - "1\n", - "entry id1 None\n", - "entry id2 None\n", - "entry id3 None\n", - "CALLING THIS\n", - "CALLING \n", - "attrs 448cb501cca248c7b9845081ed40744c 69981d84f2721b8a56271a3b81fe0dd603523d6cf2e870627bc84fd75f941624\n", - "ALLOCATE METHOD functools.partial(>, syft.service.context.AuthedServiceContext)\n", - "CALLING THIS 2\n", - "CALLING THIS 3\n", - "setting id to 05d0cc34f92c45a8a9d03773def96883\n", - "entry id 05d0cc34f92c45a8a9d03773def96883\n" - ] - } - ], - "source": [ - "subjob = job.subjobs[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "2082a407", - "metadata": {}, - "outputs": [], - "source": [ - "ptr = subjob.wait()" - ] - }, - { - "cell_type": "markdown", - "id": "a9cfb50b", - "metadata": {}, - "source": [ - "its about insertion, not retrieval" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "f01fc8b5", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "service_method .wrapper at 0x7fb279a249d0>\n", - "2\n", - "2\n" - ] - } - ], - "source": [ - "ptr2 = ptr.get()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7a39ddae", - "metadata": {}, - "outputs": [], - "source": [ - "obj = ActionObject.from_big_file(really_big_file)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c841f41e", - "metadata": {}, - "outputs": [], - "source": [ - "obj.stream_upload()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "47d84c79", - "metadata": {}, - "outputs": [], - "source": [ - "obj.reload_cache()" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "id": "ac27c4bb", - "metadata": {}, - "outputs": [], - "source": [ - "# ptr2 = client.api.services.action.get(ptr.id)" - ] - }, - { - "cell_type": "code", - "execution_count": 36, - "id": "ea955936", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Detached action object, object exists but is not linked to data in the blob storage SyftError: `uid` must be of type `UID` not `NoneType`\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftError: `uid` must be of type `UID` not `NoneType`

" - ], - "text/plain": [ - "SyftError: `uid` must be of type `UID` not `NoneType`" - ] - }, - "execution_count": 36, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ptr2.reload_cache()" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "5fca0436", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "cc5c80529f30f7b962e0e15867d5995d36b443ed2acf84e85d7aec8f4605869d" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ptr.syft_client_verify_key" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "f93d766e", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ptr.id" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "8d8eaeec", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class ActionDataEmpty:\n", - " id: str = bbe6063631014d6faaf9e0bf387f33cc\n", - "\n", - "```" - ], - "text/plain": [ - "ActionDataEmpty " - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ptr.get()" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "id": "8a540f30", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "Pointer\n", - "```\n", - "2" - ], - "text/plain": [ - "Pointer:\n", - "2" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 35, - "id": "cb38a50f", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "47f8c8f3db3a30695a28e4a51e44916669ac3d111924cb614181c64b2c3b8323" - ] - }, - "execution_count": 35, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.verify_key" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "id": "813e4c51", - "metadata": {}, - "outputs": [], - "source": [ - "from syft.client.api import APIRegistry\n", - "\n", - "api = APIRegistry.api_for(\n", - " node_uid=ptr.syft_node_location,\n", - " user_verify_key=ptr.syft_client_verify_key,\n", - ")\n", - "res = api.services.action.get(ptr.id)" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "id": "c493dcef", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Detached action object, object exists but is not linked to data in the blob storage SyftError: `uid` must be of type `UID` not `NoneType`\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftError: `uid` must be of type `UID` not `NoneType`

" - ], - "text/plain": [ - "SyftError: `uid` must be of type `UID` not `NoneType`" - ] - }, - "execution_count": 44, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res.reload_cache()" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "id": "6edc8a7e", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class ActionDataEmpty:\n", - " id: str = 04cb2ab44e7545a8ad100be6251cdb82\n", - "\n", - "```" - ], - "text/plain": [ - "ActionDataEmpty " - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res.syft_action_data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "df46aee6", - "metadata": {}, - "outputs": [], - "source": [ - "ptr." - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "id": "072a7007", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class ActionDataEmpty:\n", - " id: str = 04cb2ab44e7545a8ad100be6251cdb82\n", - "\n", - "```" - ], - "text/plain": [ - "ActionDataEmpty " - ] - }, - "execution_count": 33, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ptr.get()" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "541f8e59", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "cbb2b22db3b8496350bcb2045612680cddb017f4e91f39e445fdb8a87fb15d88" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ptr.syft_client_verify_key" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "id": "237fbe23", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ptr.id" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "57df66ee", - "metadata": {}, - "outputs": [], - "source": [ - "# subjob.wait().get()" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "7c14ef8d", - "metadata": {}, - "outputs": [], - "source": [ - "# subjob.kill()" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "3a08a621", - "metadata": {}, - "outputs": [], - "source": [ - "# subjob.restart()" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "e8816757", - "metadata": {}, - "outputs": [], - "source": [ - "# sleep(3)" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "69fb56cd", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting batch 1\n", - "done\n", - "\n" - ] - } - ], - "source": [ - "subjob.logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "2db04ddd", - "metadata": {}, - "outputs": [], - "source": [ - "# job.subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "c3d71844", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

Job List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n" - ], - "text/plain": [ - "[syft.service.job.job_stash.Job]" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.subjobs" - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "id": "7d8a2f95", - "metadata": {}, - "outputs": [], - "source": [ - "# client.jobs[0].subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "id": "cc0db669", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Pointer:\n", - "None" - ] - }, - "execution_count": 40, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.wait()" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "id": "711ce53a", - "metadata": {}, - "outputs": [], - "source": [ - "ptr = job.subjobs[0].wait()" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "id": "69d4dcf9", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class ActionDataEmpty:\n", - " id: str = 363d0e89a7b44953b44f443378d51204\n", - "\n", - "```" - ], - "text/plain": [ - "ActionDataEmpty " - ] - }, - "execution_count": 42, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ptr.get()" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "id": "5bf0974f", - "metadata": {}, - "outputs": [], - "source": [ - "# sum([j.wait().get() for j in job.subjobs])" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "948d9162", - "metadata": {}, - "outputs": [], - "source": [ - "node.land()" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "6de3096d", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "STOPPING\n", - "STOPPINGSTOPPING\n", - "\n" - ] - } - ], - "source": [ - "sleep(5)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "e61760f5", - "metadata": {}, - "outputs": [], - "source": [ - "import traceback\n", - "import sys" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/helm/local_jobs_execution.ipynb b/notebooks/helm/local_jobs_execution.ipynb deleted file mode 100644 index dfc54054113..00000000000 --- a/notebooks/helm/local_jobs_execution.ipynb +++ /dev/null @@ -1,3345 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "a196017f", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n" - ] - } - ], - "source": [ - "import syft as sy\n", - "from syft import ActionObject\n", - "from syft import syft_function, syft_function_single_use\n", - "from time import sleep\n", - "import os\n", - "import psutil\n", - "import inspect" - ] - }, - { - "cell_type": "markdown", - "id": "cb2d07de", - "metadata": {}, - "source": [ - "with server" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "9b31c627", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Staging Protocol Changes...\n", - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"test-domain-helm2\",\n", - " dev_mode=True,\n", - " reset=True, \n", - " n_consumers=3,\n", - " create_producer=True,\n", - " queue_port=3322)\n", - " \n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "507740d2", - "metadata": {}, - "outputs": [], - "source": [ - "res = client.register(name=\"a\", email=\"aa@b.org\", password=\"c\", password_verify=\"c\")" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "0c33d096", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - } - ], - "source": [ - "ds_client = node.login(email=\"aa@b.org\", password=\"c\")" - ] - }, - { - "cell_type": "markdown", - "id": "176addfb", - "metadata": {}, - "source": [ - "setup: compute train-test overlap between a very large train set and a smaller test set. Small test is still to big for memory, so we split it into 54 parts. We keep 1 of those parts in memory. We dont keep the train set in memory, but read and compare with 1/54 parts line by line. Each part takes ~30 hours, but we can run 54 processes in parallel." - ] - }, - { - "cell_type": "markdown", - "id": "a0cea81b", - "metadata": {}, - "source": [ - "# Setup syft functions" - ] - }, - { - "cell_type": "markdown", - "id": "da2b114a", - "metadata": {}, - "source": [ - "## Dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "4f1b8f95", - "metadata": {}, - "outputs": [], - "source": [ - "dataset = sy.Dataset(\n", - " name=\"My dataset\",\n", - " asset_list=[\n", - " sy.Asset(\n", - " name=\"input asset\",\n", - " data=ActionObject.from_obj([1, 2]),\n", - " mock=ActionObject.from_obj([1, 2]),\n", - " ),\n", - " ]\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "e419fe18", - "metadata": {}, - "outputs": [], - "source": [ - "# dataset.assets[0].mock.syft_action_data\n", - "\n", - "# dataset.assets[0].data.syft_action_data\n", - "\n", - "# dataset.assets[0].mock.__len__()\n", - "\n", - "# dataset.assets[0].data.__len__()" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "dff061bd", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "[1, 2]" - ], - "text/plain": [ - "[1, 2]" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dataset.assets[0].data.syft_action_data" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "2d135883", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:00<00:00, 10.25it/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Uploading: input asset\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Dataset uploaded to 'test-domain-helm2'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`

" - ], - "text/plain": [ - "SyftSuccess: Dataset uploaded to 'test-domain-helm2'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.upload_dataset(dataset)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "4c0c3963", - "metadata": {}, - "outputs": [], - "source": [ - "asset = ds_client.datasets[0].assets[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "83307a2f", - "metadata": {}, - "outputs": [], - "source": [ - "# x = ActionObject.from_obj([1, 2])\n", - "# x_ptr = x.send(ds_client)" - ] - }, - { - "cell_type": "markdown", - "id": "31bbb3ff", - "metadata": {}, - "source": [ - "## Batch function" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "5d2fd248", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@syft_function()\n", - "def process_batch(batch):\n", - " # takes 30 hours normally\n", - " print(f\"starting batch {batch}\")\n", - " from time import sleep\n", - " sleep(1)\n", - " print(\"done\")\n", - " return batch+1" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "9ba22655", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User Code Submitted

" - ], - "text/plain": [ - "SyftSuccess: User Code Submitted" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ds_client.code.submit(process_batch)" - ] - }, - { - "cell_type": "markdown", - "id": "01319f1f", - "metadata": {}, - "source": [ - "## Main function" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "ca1b95ee", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@syft_function_single_use(x=asset)\n", - "def process_all(domain, x):\n", - " \n", - " jobs = []\n", - " print(\"Launching jobs\")\n", - " for elem in x:\n", - " # We inject a domain object in the scope\n", - " batch_job = domain.launch_job(process_batch, batch=elem)\n", - " jobs += [batch_job]\n", - " print(\"starting aggregation\")\n", - " print(\"Done\")\n", - " return 3" - ] - }, - { - "cell_type": "markdown", - "id": "1e77c5db", - "metadata": {}, - "source": [ - "# Approve & run" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "0ab572f9", - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Request approved for domain test-domain-helm2\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request 5147062631e1422f8a113120f153afa4 changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request 5147062631e1422f8a113120f153afa4 changes applied" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ds_client.code.request_code_execution(process_all)\n", - "client.requests[-1].approve()" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "0217416f", - "metadata": {}, - "outputs": [], - "source": [ - "# ds_client.code.process_all(x=asset, blocking=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "5105253b", - "metadata": {}, - "outputs": [], - "source": [ - "mock =asset.mock" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "2d0ab6e2", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "[1, 2]" - ], - "text/plain": [ - "[1, 2]" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mock" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f4ddb2f0", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "97226040", - "metadata": {}, - "outputs": [], - "source": [ - "# \"domain\" in sig.parameters" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "ca89a86a", - "metadata": {}, - "outputs": [], - "source": [ - "from syft.node.worker import Worker" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "4944d0bb", - "metadata": {}, - "outputs": [], - "source": [ - "from syft.node.node import Node" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "07651817", - "metadata": {}, - "outputs": [], - "source": [ - "# process_all\n", - "# process_all(x=[3,4])" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "id": "e3933bbc", - "metadata": {}, - "outputs": [], - "source": [ - "# domain = Node.named(name=\"plan_building\", reset=True, processes=0)" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "id": "6fe20837", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Staging Protocol Changes...\n", - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "domain = sy.orchestra.launch(name=\"test\",\n", - " dev_mode=True,\n", - " reset=True, \n", - " n_consumers=3,\n", - " create_producer=True,\n", - " queue_port=22221)\n", - " \n", - "temp_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "d4493fd4", - "metadata": {}, - "outputs": [], - "source": [ - "# handle = NodeHandle(\n", - "# node_type=node_type_enum,\n", - "# deployment_type=deployment_type_enum,\n", - "# name=name,\n", - "# python_node=worker,\n", - "# node_side_type=node_side_type,\n", - "# )" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "id": "8aa1cd10", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - ".wrapper(*args, **kwargs)>" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ds_client.code.process_all" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "id": "09a80eee", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - ">" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.launch_job" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "553ec197", - "metadata": {}, - "outputs": [], - "source": [ - "# from hagrid.hagrid.orchestra" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "98511c00", - "metadata": {}, - "outputs": [], - "source": [ - "# handle.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "id": "f54863a2", - "metadata": {}, - "outputs": [], - "source": [ - "filtered_kwargs = {\"x\": [3,4]}" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "id": "7485f486", - "metadata": {}, - "outputs": [], - "source": [ - "filtered_kwargs[\"domain\"] = temp_client" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "id": "7ad56294", - "metadata": {}, - "outputs": [], - "source": [ - "action_kwargs = {}\n", - "for k, v in filtered_kwargs.items():\n", - " val = ActionObject.from_obj(v)\n", - " action_kwargs[k] = val.send(temp_client)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "id": "2ec774d7", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Launching jobs\n" - ] - }, - { - "ename": "AttributeError", - "evalue": "'SQLiteStorePartition' object has no attribute 'lock'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[38], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mprocess_all\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlocal_function\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43maction_kwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "Cell \u001b[0;32mIn[13], line 8\u001b[0m, in \u001b[0;36mprocess_all\u001b[0;34m(domain, x)\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mLaunching jobs\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 6\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m elem \u001b[38;5;129;01min\u001b[39;00m x:\n\u001b[1;32m 7\u001b[0m \u001b[38;5;66;03m# We inject a domain object in the scope\u001b[39;00m\n\u001b[0;32m----> 8\u001b[0m batch_job \u001b[38;5;241m=\u001b[39m \u001b[43mdomain\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlaunch_job\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprocess_batch\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbatch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43melem\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 9\u001b[0m jobs \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m [batch_job]\n\u001b[1;32m 10\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstarting aggregation\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/service/action/action_object.py:1400\u001b[0m, in \u001b[0;36mActionObject._syft_wrap_attribute_for_methods..wrapper\u001b[0;34m(_self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1399\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapper\u001b[39m(_self: Any, \u001b[38;5;241m*\u001b[39margs: Any, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any):\n\u001b[0;32m-> 1400\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_base_wrapper\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/service/action/action_object.py:1389\u001b[0m, in \u001b[0;36mActionObject._syft_wrap_attribute_for_methods.._base_wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 1385\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1386\u001b[0m original_args, original_kwargs \u001b[38;5;241m=\u001b[39m debox_args_and_kwargs(\n\u001b[1;32m 1387\u001b[0m pre_hook_args, pre_hook_kwargs\n\u001b[1;32m 1388\u001b[0m )\n\u001b[0;32m-> 1389\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43moriginal_func\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43moriginal_args\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43moriginal_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1391\u001b[0m post_result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_syft_run_post_hooks__(context, name, result)\n\u001b[1;32m 1392\u001b[0m post_result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_syft_attr_propagate_ids(context, name, post_result)\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/domain_client.py:151\u001b[0m, in \u001b[0;36mDomainClient.launch_job\u001b[0;34m(self, func, *args, **kwargs)\u001b[0m\n\u001b[1;32m 149\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mlaunch_job\u001b[39m(\u001b[38;5;28mself\u001b[39m, func, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 150\u001b[0m kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mblocking\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[0;32m--> 151\u001b[0m func \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mgetattr\u001b[39m(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcode\u001b[49m, func\u001b[38;5;241m.\u001b[39mfunc_name)\n\u001b[1;32m 152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/domain_client.py:158\u001b[0m, in \u001b[0;36mDomainClient.code\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 154\u001b[0m \u001b[38;5;129m@property\u001b[39m\n\u001b[1;32m 155\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcode\u001b[39m(\u001b[38;5;28mself\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Optional[APIModule]:\n\u001b[1;32m 156\u001b[0m \u001b[38;5;66;03m# if self.api.refresh_api_callback is not None:\u001b[39;00m\n\u001b[1;32m 157\u001b[0m \u001b[38;5;66;03m# self.api.refresh_api_callback()\u001b[39;00m\n\u001b[0;32m--> 158\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapi\u001b[49m\u001b[38;5;241m.\u001b[39mhas_service(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcode\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[1;32m 159\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapi\u001b[38;5;241m.\u001b[39mservices\u001b[38;5;241m.\u001b[39mcode\n\u001b[1;32m 160\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:586\u001b[0m, in \u001b[0;36mSyftClient.api\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 582\u001b[0m \u001b[38;5;129m@property\u001b[39m\n\u001b[1;32m 583\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mapi\u001b[39m(\u001b[38;5;28mself\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m SyftAPI:\n\u001b[1;32m 584\u001b[0m \u001b[38;5;66;03m# invalidate API\u001b[39;00m\n\u001b[1;32m 585\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_api \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mor\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_api\u001b[38;5;241m.\u001b[39msigning_key \u001b[38;5;241m!=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcredentials):\n\u001b[0;32m--> 586\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_fetch_api\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcredentials\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 588\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_api\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:824\u001b[0m, in \u001b[0;36mSyftClient._fetch_api\u001b[0;34m(self, credentials)\u001b[0m\n\u001b[1;32m 823\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_fetch_api\u001b[39m(\u001b[38;5;28mself\u001b[39m, credentials: SyftSigningKey):\n\u001b[0;32m--> 824\u001b[0m _api: SyftAPI \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_api\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 825\u001b[0m \u001b[43m \u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcredentials\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 826\u001b[0m \u001b[43m \u001b[49m\u001b[43mcommunication_protocol\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcommunication_protocol\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 827\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 829\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mrefresh_callback\u001b[39m():\n\u001b[1;32m 830\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_fetch_api(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcredentials)\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/client.py:371\u001b[0m, in \u001b[0;36mPythonConnection.get_api\u001b[0;34m(self, credentials, communication_protocol)\u001b[0m\n\u001b[1;32m 360\u001b[0m obj \u001b[38;5;241m=\u001b[39m forward_message_to_proxy(\n\u001b[1;32m 361\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmake_call,\n\u001b[1;32m 362\u001b[0m proxy_target_uid\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mproxy_target_uid,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 368\u001b[0m credentials\u001b[38;5;241m=\u001b[39mcredentials,\n\u001b[1;32m 369\u001b[0m )\n\u001b[1;32m 370\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 371\u001b[0m obj \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_api\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 372\u001b[0m \u001b[43m \u001b[49m\u001b[43mfor_user\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mverify_key\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 373\u001b[0m \u001b[43m \u001b[49m\u001b[43mcommunication_protocol\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcommunication_protocol\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 374\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 375\u001b[0m obj\u001b[38;5;241m.\u001b[39mconnection \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\n\u001b[1;32m 376\u001b[0m obj\u001b[38;5;241m.\u001b[39msigning_key \u001b[38;5;241m=\u001b[39m credentials\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/node/node.py:1090\u001b[0m, in \u001b[0;36mNode.get_api\u001b[0;34m(self, for_user, communication_protocol)\u001b[0m\n\u001b[1;32m 1085\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_api\u001b[39m(\n\u001b[1;32m 1086\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 1087\u001b[0m for_user: Optional[SyftVerifyKey] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 1088\u001b[0m communication_protocol: Optional[PROTOCOL_TYPE] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 1089\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m SyftAPI:\n\u001b[0;32m-> 1090\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mSyftAPI\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfor_user\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1091\u001b[0m \u001b[43m \u001b[49m\u001b[43mnode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1092\u001b[0m \u001b[43m \u001b[49m\u001b[43muser_verify_key\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfor_user\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1093\u001b[0m \u001b[43m \u001b[49m\u001b[43mcommunication_protocol\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcommunication_protocol\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1094\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/client/api.py:507\u001b[0m, in \u001b[0;36mSyftAPI.for_user\u001b[0;34m(node, communication_protocol, user_verify_key)\u001b[0m\n\u001b[1;32m 503\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mservice\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mcode\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01muser_code_service\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m UserCodeService\n\u001b[1;32m 505\u001b[0m \u001b[38;5;66;03m# find user role by verify_key\u001b[39;00m\n\u001b[1;32m 506\u001b[0m \u001b[38;5;66;03m# TODO: we should probably not allow empty verify keys but instead make user always register\u001b[39;00m\n\u001b[0;32m--> 507\u001b[0m role \u001b[38;5;241m=\u001b[39m \u001b[43mnode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_role_for_credentials\u001b[49m\u001b[43m(\u001b[49m\u001b[43muser_verify_key\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 508\u001b[0m _user_service_config_registry \u001b[38;5;241m=\u001b[39m UserServiceConfigRegistry\u001b[38;5;241m.\u001b[39mfrom_role(role)\n\u001b[1;32m 509\u001b[0m _user_lib_config_registry \u001b[38;5;241m=\u001b[39m UserLibConfigRegistry\u001b[38;5;241m.\u001b[39mfrom_user(user_verify_key)\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/node/node.py:921\u001b[0m, in \u001b[0;36mNode.get_role_for_credentials\u001b[0;34m(self, credentials)\u001b[0m\n\u001b[1;32m 920\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_role_for_credentials\u001b[39m(\u001b[38;5;28mself\u001b[39m, credentials: SyftVerifyKey) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ServiceRole:\n\u001b[0;32m--> 921\u001b[0m role \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_service\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43muserservice\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_role_for_credentials\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 922\u001b[0m \u001b[43m \u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcredentials\u001b[49m\n\u001b[1;32m 923\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 924\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m role\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/service/user/user_service.py:138\u001b[0m, in \u001b[0;36mUserService.get_role_for_credentials\u001b[0;34m(self, credentials)\u001b[0m\n\u001b[1;32m 132\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_role_for_credentials\u001b[39m(\n\u001b[1;32m 133\u001b[0m \u001b[38;5;28mself\u001b[39m, credentials: Union[SyftVerifyKey, SyftSigningKey]\n\u001b[1;32m 134\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Union[Optional[ServiceRole], SyftError]:\n\u001b[1;32m 135\u001b[0m \u001b[38;5;66;03m# they could be different\u001b[39;00m\n\u001b[1;32m 137\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(credentials, SyftVerifyKey):\n\u001b[0;32m--> 138\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstash\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_by_verify_key\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 139\u001b[0m \u001b[43m \u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcredentials\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mverify_key\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcredentials\u001b[49m\n\u001b[1;32m 140\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 141\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 142\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstash\u001b[38;5;241m.\u001b[39mget_by_signing_key(\n\u001b[1;32m 143\u001b[0m credentials\u001b[38;5;241m=\u001b[39mcredentials, signing_key\u001b[38;5;241m=\u001b[39mcredentials\n\u001b[1;32m 144\u001b[0m )\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/service/user/user_stash.py:106\u001b[0m, in \u001b[0;36mUserStash.get_by_verify_key\u001b[0;34m(self, credentials, verify_key)\u001b[0m\n\u001b[1;32m 104\u001b[0m verify_key \u001b[38;5;241m=\u001b[39m SyftVerifyKey\u001b[38;5;241m.\u001b[39mfrom_string(verify_key)\n\u001b[1;32m 105\u001b[0m qks \u001b[38;5;241m=\u001b[39m QueryKeys(qks\u001b[38;5;241m=\u001b[39m[VerifyKeyPartitionKey\u001b[38;5;241m.\u001b[39mwith_obj(verify_key)])\n\u001b[0;32m--> 106\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery_one\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcredentials\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mqks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mqks\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/store/document_store.py:636\u001b[0m, in \u001b[0;36mBaseStash.query_one\u001b[0;34m(self, credentials, qks, order_by)\u001b[0m\n\u001b[1;32m 630\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mquery_one\u001b[39m(\n\u001b[1;32m 631\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 632\u001b[0m credentials: SyftVerifyKey,\n\u001b[1;32m 633\u001b[0m qks: Union[QueryKey, QueryKeys],\n\u001b[1;32m 634\u001b[0m order_by: Optional[PartitionKey] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 635\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Result[Optional[BaseStash\u001b[38;5;241m.\u001b[39mobject_type], \u001b[38;5;28mstr\u001b[39m]:\n\u001b[0;32m--> 636\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery_all\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 637\u001b[0m \u001b[43m \u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcredentials\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mqks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mqks\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43morder_by\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43morder_by\u001b[49m\n\u001b[1;32m 638\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mand_then(first_or_none)\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/store/document_store.py:614\u001b[0m, in \u001b[0;36mBaseStash.query_all\u001b[0;34m(self, credentials, qks, order_by)\u001b[0m\n\u001b[1;32m 611\u001b[0m index_qks \u001b[38;5;241m=\u001b[39m QueryKeys(qks\u001b[38;5;241m=\u001b[39munique_keys)\n\u001b[1;32m 612\u001b[0m search_qks \u001b[38;5;241m=\u001b[39m QueryKeys(qks\u001b[38;5;241m=\u001b[39msearchable_keys)\n\u001b[0;32m--> 614\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpartition\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfind_index_or_search_keys\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 615\u001b[0m \u001b[43m \u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcredentials\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 616\u001b[0m \u001b[43m \u001b[49m\u001b[43mindex_qks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mindex_qks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 617\u001b[0m \u001b[43m \u001b[49m\u001b[43msearch_qks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msearch_qks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 618\u001b[0m \u001b[43m \u001b[49m\u001b[43morder_by\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43morder_by\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 619\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/store/document_store.py:401\u001b[0m, in \u001b[0;36mStorePartition.find_index_or_search_keys\u001b[0;34m(self, credentials, index_qks, search_qks, order_by)\u001b[0m\n\u001b[1;32m 394\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mfind_index_or_search_keys\u001b[39m(\n\u001b[1;32m 395\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 396\u001b[0m credentials: SyftVerifyKey,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 399\u001b[0m order_by: Optional[PartitionKey] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 400\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Result[List[SyftObject], \u001b[38;5;28mstr\u001b[39m]:\n\u001b[0;32m--> 401\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_thread_safe_cbk\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 402\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_find_index_or_search_keys\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 403\u001b[0m \u001b[43m \u001b[49m\u001b[43mcredentials\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 404\u001b[0m \u001b[43m \u001b[49m\u001b[43mindex_qks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mindex_qks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 405\u001b[0m \u001b[43m \u001b[49m\u001b[43msearch_qks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msearch_qks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 406\u001b[0m \u001b[43m \u001b[49m\u001b[43morder_by\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43morder_by\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 407\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/store/document_store.py:355\u001b[0m, in \u001b[0;36mStorePartition._thread_safe_cbk\u001b[0;34m(self, cbk, *args, **kwargs)\u001b[0m\n\u001b[1;32m 354\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_thread_safe_cbk\u001b[39m(\u001b[38;5;28mself\u001b[39m, cbk: Callable, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[0;32m--> 355\u001b[0m locked \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlock\u001b[49m\u001b[38;5;241m.\u001b[39macquire(blocking\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 356\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m locked:\n\u001b[1;32m 357\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFAILED TO LOCK\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", - "\u001b[0;31mAttributeError\u001b[0m: 'SQLiteStorePartition' object has no attribute 'lock'" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Idle worker expired: b'5C5B-F4BE'\n", - "Idle worker expired: b'D140-D0EF'\n", - "Idle worker expired: b'4A71-B0A6'\n", - "Idle worker expired: b'4999-97E5'\n", - "Idle worker expired: b'897B-20D9'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Exception in thread Thread-6:\n", - "Traceback (most recent call last):\n", - " File \"/opt/anaconda3/envs/syft/lib/python3.9/threading.py\", line 980, in _bootstrap_inner\n", - " self.run()\n", - " File \"/opt/anaconda3/envs/syft/lib/python3.9/threading.py\", line 917, in run\n", - " self._target(*self._args, **self._kwargs)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/queue/zmq_queue.py\", line 113, in read_items\n", - " for item in items:\n", - "TypeError: 'NoneType' object is not iterable\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "\n", - "Timeout elapsed after 30 seconds while trying to acquiring lock.\n", - "FAILED TO LOCK\n", - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Exception in thread Thread-12:\n", - "Traceback (most recent call last):\n", - " File \"/opt/anaconda3/envs/syft/lib/python3.9/threading.py\", line 980, in _bootstrap_inner\n", - " self.run()\n", - " File \"/opt/anaconda3/envs/syft/lib/python3.9/threading.py\", line 917, in run\n", - " self._target(*self._args, **self._kwargs)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/queue/zmq_queue.py\", line 113, in read_items\n", - " for item in items:\n", - "TypeError: 'NoneType' object is not iterable\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[Errno 2] No such file or directory: '/tmp/sherlock/QueueItem.lock'\n", - "Timeout elapsed after 30 seconds while trying to acquiring lock.\n", - "FAILED TO LOCK\n", - "Idle worker expired: b'5C5B-F4BE'\n", - "Idle worker expired: b'C78E-56CA'\n", - "Idle worker expired: b'D140-D0EF'\n", - "Idle worker expired: b'897B-20D9'\n", - "Idle worker expired: b'4A71-B0A6'\n", - "Idle worker expired: b'4999-97E5'\n", - "Idle worker expired: b'D140-D0EF'\n", - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n", - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n", - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n", - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n", - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n", - "Heartbeat failure, worker can't reach queue, reconnecting in 1s\n", - "Idle worker expired: b'897B-20D9'\n", - "Idle worker expired: b'4A71-B0A6'\n", - "Idle worker expired: b'4999-97E5'\n", - "Idle worker expired: b'5C5B-F4BE'\n", - "Idle worker expired: b'C78E-56CA'\n", - "Idle worker expired: b'C416-92ED'\n", - "Idle worker expired: b'60D7-BC5C'\n", - "Idle worker expired: b'9425-66ED'\n", - "Idle worker expired: b'CB72-46F6'\n", - "Idle worker expired: b'9F8A-71A1'\n", - "Idle worker expired: b'F6CB-C950'\n" - ] - } - ], - "source": [ - "process_all.local_function(**action_kwargs)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cba5afd4", - "metadata": {}, - "outputs": [], - "source": [ - "# args, _, _, values = inspect.getargvalues(process_all.local_function)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "124c7d94", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "e2df0312", - "metadata": {}, - "outputs": [ - { - "ename": "TypeError", - "evalue": "process_all() missing 1 required positional argument: 'domain'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[11], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mprocess_all\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m3\u001b[39;49m\u001b[43m,\u001b[49m\u001b[38;5;241;43m4\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/service/code/user_code.py:593\u001b[0m, in \u001b[0;36mSubmitUserCode.__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m on_mock_data:\n\u001b[1;32m 592\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mWarning: The result you see is computed on MOCK data.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 593\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlocal_function\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mfiltered_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 594\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 595\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mNotImplementedError\u001b[39;00m\n", - "\u001b[0;31mTypeError\u001b[0m: process_all() missing 1 required positional argument: 'domain'" - ] - } - ], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4f7c191e", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d5e2e567", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "720e84cd", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5d505c08", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "14f969e1", - "metadata": {}, - "source": [ - "Usecase 1: What if you have a syft function thats nested with jobs?\n", - "\n", - "Subproblem 1.1: We need to code before submitting\n", - "\n", - "- solution 1: remove the job aspect from local testing\n", - "- solution 2: you mimick the domain architecture locally\n", - "- solution 3: you run this on the low side on mock data\n", - "\n", - "\n", - "Subproblem 1.2: We need to be able to run this on the high side\n", - "\n", - "Solution\n", - "- move requests / code / inputs from low side to high side\n", - "- run job on the high side\n", - "- get results from job and load them into the low side\n", - "\n", - "Usecase 2: What if you have a syft function that takes days to run on the high side?\n", - "\n", - "See solution for 1.2\n", - "\n", - "Solutions based on example\n", - "\n", - "- we have an optional mock sample in asset creation\n", - "- we split submission and code_request on the DS side\n", - "- DS can call code that is submitted\n", - "\n", - "- what is the UX for running on the low side as a test vs a code request\n", - "\n", - "ux for running as a test locally\n", - "my_function(asset.mock_sample)\n", - "\n", - "ux for running as a test on the low side\n", - "\n", - "ds_client.code.my_function(asset.mock)\n", - "-> Should print info: runs on mock data\n", - "\n", - "ux for running the code on real data\n", - "\n", - "ds_client.code.my_function(asset)\n", - "-> Should print info: runs on real data\n", - "\n", - "the ux for a code request\n", - "\n", - "ds_client.make_code_request(\n", - "\n", - "ds_client.submit(code2)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "43f75da1", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2d8b884f", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0e8644a1", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "db018fe7", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "c6016213", - "metadata": {}, - "outputs": [ - { - "ename": "AttributeError", - "evalue": "'DomainClient' object has no attribute 'submit'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[17], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mds_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msubmit\u001b[49m(code1)\n", - "\u001b[0;31mAttributeError\u001b[0m: 'DomainClient' object has no attribute 'submit'" - ] - } - ], - "source": [ - "ds_client.submit(code1)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b8917983", - "metadata": {}, - "outputs": [], - "source": [ - "ds_client.request(code1)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c462106e", - "metadata": {}, - "outputs": [], - "source": [ - "do_client.code" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "05231920", - "metadata": {}, - "outputs": [], - "source": [ - "request = do_client.requests[0]" - ] - }, - { - "cell_type": "markdown", - "id": "5093a607", - "metadata": {}, - "source": [ - "code1:\n", - " blbalblab\n", - " code()\n", - "\n", - "code2:\n", - " blballbbla" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "04ca62ac", - "metadata": {}, - "outputs": [], - "source": [ - "request.code2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24667fed", - "metadata": {}, - "outputs": [], - "source": [ - "do_client.all_code.code2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6f196e29", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0e6ba0c3", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "b625d8e4", - "metadata": {}, - "source": [ - "When doing local execution **without** a domain" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f57288dd", - "metadata": {}, - "outputs": [], - "source": [ - "my_syft_function(x, y)" - ] - }, - { - "cell_type": "markdown", - "id": "0063f077", - "metadata": {}, - "source": [ - "When doing local execution **with** a domain" - ] - }, - { - "cell_type": "markdown", - "id": "89bc4c16", - "metadata": {}, - "source": [ - "- is it relevant to think about whether we need to configure the domain (e.g. consumsers)\n", - "- do we create the domain explicitly or does it happen on the fly?\n", - "- how do we indicate in the function call that we want to create a domain?" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "430ed596", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "9a5c81e7", - "metadata": {}, - "source": [ - "When doing **high side** execution **without** a domain" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c773ffa7", - "metadata": {}, - "outputs": [], - "source": [ - "users_function = func.unsafe_function\n", - "real_result = users_function(trade_data=pvt_data)" - ] - }, - { - "cell_type": "markdown", - "id": "7412c16f", - "metadata": {}, - "source": [ - "When doing **high side** exeuction **with** a domain" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "375ed965", - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "16/11/23 19:29:10 FUNCTION LOG (6470a06b282445ab8b912302c8c33a52): Launching jobs\n", - "16/11/23 19:29:10 FUNCTION LOG (6470a06b282445ab8b912302c8c33a52): starting aggregation\n", - "16/11/23 19:29:10 FUNCTION LOG (6470a06b282445ab8b912302c8c33a52): Done\n", - "16/11/23 19:29:11 FUNCTION LOG (511d17d79d0b450ab0f4d410bbbbe688): starting batch 1\n", - "16/11/23 19:29:11 EXCEPTION LOG (511d17d79d0b450ab0f4d410bbbbe688):\n", - "\n", - "Encountered while executing process_batch:\n", - "Traceback (most recent call last):\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/code/user_code.py\", line 1087, in execute_byte_code\n", - " result = eval(evil_string, _globals, _locals) # nosec\n", - " File \"\", line 1, in \n", - " File \"\", line 10, in user_func_process_batch_563f3f1899d092d0501158fbb086f3fd2c79a010c4e85bfe4fe12e0e1c01abde_3fa7f836b5a65455ef117b87f10f6a3f711d956132679e32522b6ae4c1440ec5\n", - " File \"\", line 6, in process_batch\n", - "IndexError: list index out of range\n", - "\n", - " 3 print(f'starting batch {batch}')\n", - " 4 from time import sleep\n", - "--> 5 [1, 2, 3, 4][5]\n", - " 6 sleep(1)\n", - "16/11/23 19:29:11 FUNCTION LOG (1f4245154b5a4e8ab20f7b3907203542): starting batch 2\n", - "16/11/23 19:29:11 EXCEPTION LOG (1f4245154b5a4e8ab20f7b3907203542):\n", - "\n", - "Encountered while executing process_batch:\n", - "Traceback (most recent call last):\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/code/user_code.py\", line 1087, in execute_byte_code\n", - " result = eval(evil_string, _globals, _locals) # nosec\n", - " File \"\", line 1, in \n", - " File \"\", line 10, in user_func_process_batch_563f3f1899d092d0501158fbb086f3fd2c79a010c4e85bfe4fe12e0e1c01abde_3fa7f836b5a65455ef117b87f10f6a3f711d956132679e32522b6ae4c1440ec5\n", - " File \"\", line 6, in process_batch\n", - "IndexError: list index out of range\n", - "\n", - " 3 print(f'starting batch {batch}')\n", - " 4 from time import sleep\n", - "--> 5 [1, 2, 3, 4][5]\n", - " 6 sleep(1)\n" - ] - } - ], - "source": [ - "job = ds_client.code.process_all(x=x_ptr, blocking=False)\n", - "sleep(5)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "0f8e12c0", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting batch 1\n", - "\n", - "Exception encountered while running process_batch, please contact the Node Admin for more info.\n" - ] - } - ], - "source": [ - "job.subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "6d97d2a4", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

Job List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n" - ], - "text/plain": [ - "[syft.service.job.job_stash.Job, syft.service.job.job_stash.Job]" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.subjobs" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "afbe026b", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting batch 1\n", - "\n", - "\n", - "Encountered while executing process_batch:\n", - "Traceback (most recent call last):\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/code/user_code.py\", line 1087, in execute_byte_code\n", - " result = eval(evil_string, _globals, _locals) # nosec\n", - " File \"\", line 1, in \n", - " File \"\", line 10, in user_func_process_batch_563f3f1899d092d0501158fbb086f3fd2c79a010c4e85bfe4fe12e0e1c01abde_3fa7f836b5a65455ef117b87f10f6a3f711d956132679e32522b6ae4c1440ec5\n", - " File \"\", line 6, in process_batch\n", - "IndexError: list index out of range\n", - "\n", - " 3 print(f'starting batch {batch}')\n", - " 4 from time import sleep\n", - "--> 5 [1, 2, 3, 4][5]\n", - " 6 sleep(1)\n" - ] - } - ], - "source": [ - "client.jobs[0].subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "id": "81c23727", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

Job List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n" - ], - "text/plain": [ - "[syft.service.job.job_stash.Job, syft.service.job.job_stash.Job]" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.subjobs" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "a9fde08d", - "metadata": {}, - "outputs": [ - { - "ename": "IndexError", - "evalue": "list index out of range", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[12], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mjob\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msubjobs\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241m.\u001b[39mlogs(stderr\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n", - "\u001b[0;31mIndexError\u001b[0m: list index out of range" - ] - } - ], - "source": [ - "job.subjobs[0].logs(stderr=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "78bd08dc", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting batch 2\n", - "\n", - "\n", - "Encountered while executing process_batch:\n", - "Traceback (most recent call last):\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/code/user_code.py\", line 1072, in execute_byte_code\n", - " result = eval(evil_string, _globals, _locals) # nosec\n", - " File \"\", line 1, in \n", - " File \"\", line 10, in user_func_process_batch_47f8c8f3db3a30695a28e4a51e44916669ac3d111924cb614181c64b2c3b8323_3fa7f836b5a65455ef117b87f10f6a3f711d956132679e32522b6ae4c1440ec5\n", - " File \"\", line 6, in process_batch\n", - "IndexError: list index out of range\n", - "\n", - " 3 print(f'starting batch {batch}')\n", - " 4 from time import sleep\n", - "--> 5 [1, 2, 3, 4][5]\n", - " 6 sleep(1)\n" - ] - } - ], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "cc0db669", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Pointer:\n", - "None" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.wait()" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "5bf0974f", - "metadata": {}, - "outputs": [ - { - "ename": "TypeError", - "evalue": "unsupported operand type(s) for +: 'int' and 'Err'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[11], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;43msum\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[43mj\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwait\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mj\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mjob\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msubjobs\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n", - "\u001b[0;31mTypeError\u001b[0m: unsupported operand type(s) for +: 'int' and 'Err'" - ] - } - ], - "source": [ - "sum([j.wait().get() for j in job.subjobs])" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "55892031", - "metadata": {}, - "outputs": [], - "source": [ - "# import sys" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "b0800f1d", - "metadata": {}, - "outputs": [], - "source": [ - "# print(\"D\")\n", - "# print(\"A\")\n", - "\n", - "# try:\n", - "# raise ValueError()\n", - "# except Exception as e:\n", - "# tb = e.__traceback__\n", - "# # print(len([tb.tb_next while tb is not None])\n", - "# # trace = []\n", - "# # while tb is not None:\n", - "# # trace.append({\n", - "# # \"filename\": tb.tb_frame.f_code.co_filename,\n", - "# # \"name\": tb.tb_frame.f_code.co_name,\n", - "# # \"lineno\": tb.tb_lineno\n", - "# # })\n", - "# # tb = tb.tb_next\n", - "\n", - "\n", - "# # print(sys.exc_info()[-1].tb_lineno)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "6c8fd04c", - "metadata": {}, - "outputs": [], - "source": [ - "# n = 0\n", - "# while tb is not None:\n", - "# tb = tb.tb_next\n", - "# n+=1" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "0c9368c9", - "metadata": {}, - "outputs": [], - "source": [ - "# n" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "1d73ef1e", - "metadata": {}, - "outputs": [], - "source": [ - "# trace" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "e61760f5", - "metadata": {}, - "outputs": [], - "source": [ - "import traceback\n", - "import sys" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "f5f2c781", - "metadata": {}, - "outputs": [], - "source": [ - "# try:\n", - "# raise ValueError()\n", - "# except Exception as e:\n", - "# print(sys.exception())\n", - "# # print()\n", - "\n", - "\n", - "# # # traceback.print_stack()\n", - "\n", - "\n", - "# # # tb = e.__traceback__\n", - "# # # traceback.print_tb(type(e), e, tb)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ead1e368", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "e956f350", - "metadata": {}, - "outputs": [], - "source": [ - "# print(\"\\n\".join(['def user_func_process_batch_47f8c8f3db3a30695a28e4a51e44916669ac3d111924cb614181c64b2c3b8323_bd42b6607712b09a0b187e3c67030f772c5dcd3c4152e4abebcb3a9f0a8259e4(batch):',\n", - "# '', ' def process_batch(batch):', \" print(f'starting batch {batch}')\",\n", - "# ' from time import sleep', ' sleep(1)', ' [1, 2, 3][5]',\n", - "# \" print('done')\", ' return batch + 1', ' result = process_batch(batch=batch)',\n", - "# ' return result']))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/helm/nested_requests.ipynb b/notebooks/helm/nested-requests.ipynb similarity index 99% rename from notebooks/helm/nested_requests.ipynb rename to notebooks/helm/nested-requests.ipynb index 2dcd088254f..56472460644 100644 --- a/notebooks/helm/nested_requests.ipynb +++ b/notebooks/helm/nested-requests.ipynb @@ -23,29 +23,6 @@ "import inspect" ] }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: 133 Protocol Updates Staged to dev

" - ], - "text/plain": [ - "SyftSuccess: 133 Protocol Updates Staged to dev" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sy.stage_protocol_changes()" - ] - }, { "cell_type": "code", "execution_count": 3, diff --git a/notebooks/helm/nested_syft_functions.ipynb b/notebooks/helm/nested-syft-functions.ipynb similarity index 99% rename from notebooks/helm/nested_syft_functions.ipynb rename to notebooks/helm/nested-syft-functions.ipynb index fd528db9c35..a7d17165464 100644 --- a/notebooks/helm/nested_syft_functions.ipynb +++ b/notebooks/helm/nested-syft-functions.ipynb @@ -234,9 +234,7 @@ " jobs += [batch_job]\n", " print(\"starting aggregation\")\n", " print(\"Done\")\n", - "# results = [x.wait().get() for x in jobs]\n", - " return 3\n", - "# return sum(results)" + " return None" ] }, { diff --git a/notebooks/helm/producer node.ipynb b/notebooks/helm/producer node.ipynb deleted file mode 100644 index e6206aa83be..00000000000 --- a/notebooks/helm/producer node.ipynb +++ /dev/null @@ -1,502 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "a196017f", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n" - ] - } - ], - "source": [ - "import syft as sy\n", - "from syft import ActionObject\n", - "from syft import syft_function, syft_function_single_use\n", - "from time import sleep\n", - "from syft.service.queue.zmq_queue import ZMQQueueConfig, ZMQClientConfig" - ] - }, - { - "cell_type": "markdown", - "id": "cb2d07de", - "metadata": {}, - "source": [ - "with server" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "4cbee0b1", - "metadata": {}, - "outputs": [], - "source": [ - "# client = sy.login(port=8800, email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "markdown", - "id": "6c9ce5a0", - "metadata": {}, - "source": [ - "Without server" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "9b31c627", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"test-domain-helm2\", dev_mode=True,\n", - " reset=True,\n", - " n_consumers=0,\n", - " create_producer=True,\n", - " queue_port=62249\n", - " )\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "31df1d8f", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'tcp://localhost:62249'" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "node.python_node.queue_manager.producers[\"api_call\"].address" - ] - }, - { - "cell_type": "markdown", - "id": "176addfb", - "metadata": {}, - "source": [ - "setup: compute train-test overlap between a very large train set and a smaller test set. Small test is still to big for memory, so we split it into 54 parts. We keep 1 of those parts in memory. We dont keep the train set in memory, but read and compare with 1/54 parts line by line. Each part takes ~30 hours, but we can run 54 processes in parallel." - ] - }, - { - "cell_type": "markdown", - "id": "a0cea81b", - "metadata": {}, - "source": [ - "# Setup syft functions" - ] - }, - { - "cell_type": "markdown", - "id": "da2b114a", - "metadata": {}, - "source": [ - "## Dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "83307a2f", - "metadata": {}, - "outputs": [], - "source": [ - "x = ActionObject.from_obj([1,2])\n", - "x_ptr = x.send(client)" - ] - }, - { - "cell_type": "markdown", - "id": "31bbb3ff", - "metadata": {}, - "source": [ - "## Batch function" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "5d2fd248", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@syft_function()\n", - "def process_batch(batch):\n", - " # takes 30 hours normally\n", - " print(f\"starting batch {batch}\")\n", - " from time import sleep\n", - " sleep(1)\n", - " print(\"done\")\n", - " return batch+1" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "9ba22655", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User Code Submitted

" - ], - "text/plain": [ - "SyftSuccess: User Code Submitted" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.code.submit(process_batch)" - ] - }, - { - "cell_type": "markdown", - "id": "01319f1f", - "metadata": {}, - "source": [ - "## Main function" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "ca1b95ee", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@syft_function_single_use(x=x_ptr)\n", - "def process_all(domain, x):\n", - " jobs = []\n", - " print(\"Launching jobs\")\n", - " for elem in x:\n", - " # We inject a domain object in the scope\n", - " batch_job = domain.launch_job(process_batch, batch=elem)\n", - " jobs += [batch_job]\n", - " print(\"starting aggregation\")\n", - " print(\"Done\")\n", - "# results = [x.wait().get() for x in jobs]\n", - " return 1\n", - "# return sum(results)" - ] - }, - { - "cell_type": "markdown", - "id": "1e77c5db", - "metadata": {}, - "source": [ - "# Approve & run" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "0ab572f9", - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Request approved for domain test-domain-helm2\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request 60267d2c91664370b8a116d93c7ac8d6 changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request 60267d2c91664370b8a116d93c7ac8d6 changes applied" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.code.request_code_execution(process_all)\n", - "client.requests[-1].approve()" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "375ed965", - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Idle worker expired: b'B37B-150D'\n", - "Idle worker expired: b'8486-F974'\n" - ] - } - ], - "source": [ - "job = client.code.process_all(x=x_ptr, blocking=False)\n", - "sleep(5)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "e7a94e1c", - "metadata": {}, - "outputs": [], - "source": [ - "# print_open_files()" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "e5af2d1d", - "metadata": {}, - "outputs": [], - "source": [ - "# job.subjobs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7b8b2a2e", - "metadata": {}, - "outputs": [], - "source": [ - "res = job.wait().get()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3cba7a2a", - "metadata": {}, - "outputs": [], - "source": [ - "res" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "22ac31a6", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting batch 1\n", - "\n" - ] - } - ], - "source": [ - "job.subjobs[1].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "9cb2e2bf", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting batch 2\n", - "done\n", - "done\n", - "\n" - ] - } - ], - "source": [ - "job.subjobs[0].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "7fb84ccd", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "starting batch 1\n", - "\n" - ] - } - ], - "source": [ - "job.subjobs[1].logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "9ec222a7", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "3" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.subjobs[0].wait().get()" - ] - }, - { - "cell_type": "markdown", - "id": "3d141e3b", - "metadata": {}, - "source": [ - "##### Next steps" - ] - }, - { - "cell_type": "markdown", - "id": "4e856437", - "metadata": {}, - "source": [ - "\n", - "\n", - "- [x] seaweedfs\n", - "- - [wip] merge into normal code\n", - "- [x] Multiple consumers (queues)\n", - "- - [x] implement messagequeue\n", - "- - [x] integrate messagequeue with the rest of the code\n", - "- [x] test flow with async main function\n", - "- [x] up datastructures for jobs and logs\n", - "- - [x] set up reprs\n", - "- [wip] integrate helm code\n", - "- [ ] test on large machine" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5ab56b04", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/helm/syft with dynamic docker workers.ipynb b/notebooks/helm/syft with dynamic docker workers.ipynb deleted file mode 100644 index 2558f33365f..00000000000 --- a/notebooks/helm/syft with dynamic docker workers.ipynb +++ /dev/null @@ -1,1519 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "b27d69a2", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft/notebooks\n" - ] - } - ], - "source": [ - "import syft as sy\n", - "from syft.store.blob_storage import BlobStorageConfig, BlobStorageClientConfig\n", - "from syft.store.blob_storage.seaweedfs import SeaweedFSClient, SeaweedFSClientConfig\n", - "from syft import ActionObject\n", - "from syft.service.action.action_data_empty import ActionFileData\n", - "from syft.service.queue.zmq_queue import ZMQQueueConfig, ZMQClientConfig\n", - "from collections import defaultdict" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "dcad6636", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"test-domain-helm2\", dev_mode=True,\n", - " reset=True,\n", - " n_consumers=0,\n", - " create_producer=True)\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "a9ea17d8", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "> \u001b[0;32m/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\u001b[0m(52)\u001b[0;36mget_default_env_vars\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 50 \u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mpdb\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 51 \u001b[0;31m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m---> 52 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnode\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mruns_in_docker\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 53 \u001b[0;31m \u001b[0;31m# get env vars from current environment\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 54 \u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mos\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0menviron\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n", - "ipdb> c\n", - "> \u001b[0;32m/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\u001b[0m(52)\u001b[0;36mget_default_env_vars\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 50 \u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mpdb\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 51 \u001b[0;31m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m---> 52 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnode\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mruns_in_docker\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 53 \u001b[0;31m \u001b[0;31m# get env vars from current environment\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 54 \u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mos\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0menviron\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n", - "ipdb> q\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftError: Exception calling worker.start_workers. Traceback (most recent call last):\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/node/node.py\", line 929, in handle_api_call_with_unsigned_result\n", - " result = method(context, *api_call.args, **api_call.kwargs)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py\", line 339, in _decorator\n", - " result = func(self, *args, **kwargs)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 152, in start_workers\n", - " res = start_worker_container(context)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 125, in start_worker_container\n", - " env_vars = get_env_vars(context)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 84, in get_env_vars\n", - " default_env_vars = get_default_env_vars(context)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 52, in get_default_env_vars\n", - " if context.node.runs_in_docker:\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 52, in get_default_env_vars\n", - " if context.node.runs_in_docker:\n", - " File \"/opt/anaconda3/envs/syft/lib/python3.9/bdb.py\", line 88, in trace_dispatch\n", - " return self.dispatch_line(frame)\n", - " File \"/opt/anaconda3/envs/syft/lib/python3.9/bdb.py\", line 113, in dispatch_line\n", - " if self.quitting: raise BdbQuit\n", - "bdb.BdbQuit\n", - "

" - ], - "text/plain": [ - "SyftError: Exception calling worker.start_workers. Traceback (most recent call last):\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/node/node.py\", line 929, in handle_api_call_with_unsigned_result\n", - " result = method(context, *api_call.args, **api_call.kwargs)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/service.py\", line 339, in _decorator\n", - " result = func(self, *args, **kwargs)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 152, in start_workers\n", - " res = start_worker_container(context)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 125, in start_worker_container\n", - " env_vars = get_env_vars(context)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 84, in get_env_vars\n", - " default_env_vars = get_default_env_vars(context)\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 52, in get_default_env_vars\n", - " if context.node.runs_in_docker:\n", - " File \"/Users/koen/workspace/PySyft/packages/syft/src/syft/service/worker/worker_service.py\", line 52, in get_default_env_vars\n", - " if context.node.runs_in_docker:\n", - " File \"/opt/anaconda3/envs/syft/lib/python3.9/bdb.py\", line 88, in trace_dispatch\n", - " return self.dispatch_line(frame)\n", - " File \"/opt/anaconda3/envs/syft/lib/python3.9/bdb.py\", line 113, in dispatch_line\n", - " if self.quitting: raise BdbQuit\n", - "bdb.BdbQuit" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.worker.start_workers(n=3)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "c0c8331c", - "metadata": {}, - "outputs": [], - "source": [ - "workers = client.worker.list()" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "f8fc2e1b", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

DockerWorker List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n" - ], - "text/plain": [ - "[syft.service.worker.worker_service.DockerWorker,\n", - " syft.service.worker.worker_service.DockerWorker,\n", - " syft.service.worker.worker_service.DockerWorker]" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "workers" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "28c5e351", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: 2 workers stopped

" - ], - "text/plain": [ - "SyftSuccess: 2 workers stopped" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.worker.stop(workers)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "542ec911", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2ac0c822", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3f1ce246", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "210a3bbd", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4e95077d", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "87ca6d8a", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "659434fe", - "metadata": {}, - "source": [ - "## DEV" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38d0fa4a", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d2cc165", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "09c2d6ac", - "metadata": {}, - "outputs": [], - "source": [ - "import docker" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "6934ec8f", - "metadata": {}, - "outputs": [], - "source": [ - "image = \"openmined/grid-backend\"\n", - "client = docker.from_env()" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "c7bb1389", - "metadata": {}, - "outputs": [], - "source": [ - "PYSYFT_DIR = f\"/Users/koen/workspace/pysyft\"" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "a742f934", - "metadata": {}, - "outputs": [], - "source": [ - "volumes = {\n", - " f'{PYSYFT_DIR}/packages/syft': {'bind': '/app/syft', 'mode': 'rw'},\n", - " f'{PYSYFT_DIR}/packages/grid/backend/grid': {'bind': '/app/grid', 'mode': 'rw'},\n", - " \"/var/run/docker.sock\": {\"bind\": \"/var/run/docker.sock\"}\n", - "\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "09b15930", - "metadata": {}, - "outputs": [], - "source": [ - "def get_default_env_vars():\n", - " env_path = f\"{PYSYFT_DIR}/packages/grid/.env\"\n", - "\n", - " with open(env_path, \"r\") as f:\n", - " lines = f.read().splitlines()\n", - "\n", - " default_env_vars = dict()\n", - " for l in lines:\n", - " if \"=\" in l:\n", - " try:\n", - " var_name, value = l.split(\"=\", 1)\n", - " \n", - " def remove_redundant_quotes(value):\n", - " for s in ['\"', \"'\"]:\n", - " if len(value) !=0:\n", - " if value[0] == s:\n", - " value = value[1:]\n", - " if value[-1] == s:\n", - " value = value[:-1]\n", - " \n", - " value = remove_redundant_quotes(value)\n", - " default_env_vars[var_name] = value\n", - " except Exception as e:\n", - " print(\"error parsing env file\", e)\n", - " return default_env_vars" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "85a5ed92", - "metadata": {}, - "outputs": [], - "source": [ - "def get_env_vars():\n", - " default_env_vars = get_default_env_vars()\n", - " import secrets\n", - " worker_tag = \"\".join([str(secrets.choice(list(range(10)))) for i in range(10)])\n", - "\n", - " extra_env_vars = {\n", - " \"SERVICE_NAME\": \"backend\",\n", - " \"CREATE_PRODUCER\": \"false\",\n", - " \"N_CONSUMERS\": \"1\",\n", - " \"DEV_MODE\": \"True\",\n", - " \"DEFAULT_ROOT_USERNAME\": f\"worker-{worker_tag}\",\n", - " \"PORT\": \"8003\",\n", - " \"HTTP_PORT\": \"88\",\n", - " \"HTTPS_PORT\": \"446\",\n", - " \"DEFAULT_ROOT_EMAIL\": f\"{worker_tag}@openmined.org\"\n", - " \n", - " }\n", - "\n", - " return {**default_env_vars, **extra_env_vars}\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "4a92d374", - "metadata": {}, - "outputs": [], - "source": [ - "import random" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "1aa550f7", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "220" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "random.randint(0, 1000)" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "a0450b60", - "metadata": {}, - "outputs": [], - "source": [ - "env_vars = get_env_vars()" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "17f547d4", - "metadata": {}, - "outputs": [], - "source": [ - "from pprint import pprint" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "bc445c39", - "metadata": {}, - "outputs": [ - { - "ename": "ModuleNotFoundError", - "evalue": "No module named 'grid'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[21], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mgrid\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mbackend\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mcore\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mconfig\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m settings\n", - "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'grid'" - ] - } - ], - "source": [ - "from grid.backend.core.config import settings" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "1835d02b", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'/Users/koen/workspace/pysyft'" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from pathlib import Path\n", - "str(Path(\"/Users/koen/workspace/pysyft/packages/grid\").parent.parent)" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "id": "13cc6c67", - "metadata": {}, - "outputs": [], - "source": [ - "# pprint(env_vars)" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "id": "f58bd1eb", - "metadata": {}, - "outputs": [], - "source": [ - "# env_vars" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "4ce97360", - "metadata": {}, - "outputs": [], - "source": [ - "# client.containers" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "id": "d7b0f47b", - "metadata": {}, - "outputs": [], - "source": [ - "# client.containers.run('alpine', 'echo hello world', )" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "8eb7cb8b", - "metadata": {}, - "outputs": [], - "source": [ - "res = client.containers.run(image=image, volumes=volumes, environment=env_vars,\n", - " detach=True, network_mode=\"container:grid-proxy-1\")" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "id": "d1608704", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'b7a8e3c2a0b8295d22bebdcd004d7a2f14b05c187c4b3cc20a37e914938aee63'" - ] - }, - "execution_count": 32, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res.id" - ] - }, - { - "cell_type": "code", - "execution_count": 118, - "id": "b5d9e7b7", - "metadata": {}, - "outputs": [], - "source": [ - "for x in client.containers.list(filters={\"id\": res.id}):\n", - " x.stop()" - ] - }, - { - "cell_type": "code", - "execution_count": 123, - "id": "92d85752", - "metadata": {}, - "outputs": [], - "source": [ - "x = res.stop()" - ] - }, - { - "cell_type": "code", - "execution_count": 121, - "id": "9dd4e6ca", - "metadata": {}, - "outputs": [], - "source": [ - "x" - ] - }, - { - "cell_type": "code", - "execution_count": 50, - "id": "72f128d9", - "metadata": {}, - "outputs": [], - "source": [ - "# print(res.logs().decode())" - ] - }, - { - "cell_type": "code", - "execution_count": 62, - "id": "a918bd10", - "metadata": {}, - "outputs": [], - "source": [ - "# env_vars[\"BACKEND_CORS_ORIGINS\"].replace(\"\\'\", \"'\")" - ] - }, - { - "cell_type": "code", - "execution_count": 56, - "id": "197c1277", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['http://localhost',\n", - " 'http://localhost:4200',\n", - " 'http://localhost:3000',\n", - " 'http://localhost:8080',\n", - " 'https://localhost',\n", - " 'https://localhost:4200',\n", - " 'https://localhost:3000',\n", - " 'https://localhost:8080',\n", - " 'http://dev.grid.openmined.org',\n", - " 'https://stag.grid.openmined.org',\n", - " 'https://grid.openmined.org']" - ] - }, - "execution_count": 56, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[\"http://localhost\",\"http://localhost:4200\",\"http://localhost:3000\",\"http://localhost:8080\",\"https://localhost\",\"https://localhost:4200\",\"https://localhost:3000\",\"https://localhost:8080\",\"http://dev.grid.openmined.org\",\"https://stag.grid.openmined.org\",\"https://grid.openmined.org\"]" - ] - }, - { - "cell_type": "code", - "execution_count": 57, - "id": "ec5208ac", - "metadata": {}, - "outputs": [], - "source": [ - "# for k, v in env_vars.items():\n", - "# print(f\"{k}={v}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e40cd44b", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2a4d5566", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "af7db453", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "164513fd", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "2adb65af", - "metadata": {}, - "source": [ - "\n", - " \"Mounts\": [\n", - " {\n", - " \"Type\": \"bind\",\n", - " \"Source\": \"/Users/koen/workspace/pysyft/packages/grid/data/package-cache\",\n", - " \"Destination\": \"/root/.cache\",\n", - " \"Mode\": \"rw\",\n", - " \"RW\": true,\n", - " \"Propagation\": \"rprivate\"\n", - " },\n", - " {\n", - " \"Type\": \"bind\",\n", - " \"Source\": \"/Users/koen/workspace/pysyft/packages/grid/backend/grid\",\n", - " \"Destination\": \"/app/grid\",\n", - " \"Mode\": \"rw\",\n", - " \"RW\": true,\n", - " \"Propagation\": \"rprivate\"\n", - " },\n", - " {\n", - " \"Type\": \"bind\",\n", - " \"Source\": \"/Users/koen/workspace/pysyft/packages/syft\",\n", - " \"Destination\": \"/app/syft\",\n", - " \"Mode\": \"rw\",\n", - " \"RW\": true,\n", - " \"Propagation\": \"rprivate\"\n", - " },\n", - " {\n", - " \"Type\": \"volume\",\n", - " \"Name\": \"grid_credentials-data-worker\",\n", - " \"Source\": \"/var/lib/docker/volumes/grid_credentials-data-worker/_data\",\n", - " \"Destination\": \"/storage\",\n", - " \"Driver\": \"local\",\n", - " \"Mode\": \"z\",\n", - " \"RW\": true,\n", - " \"Propagation\": \"\"\n", - " }\n", - " ],\n", - " \"Config\": {\n", - " \"Hostname\": \"high-side\",\n", - " \"Domainname\": \"\",\n", - " \"User\": \"\",\n", - " \"AttachStdin\": false,\n", - " \"AttachStdout\": true,\n", - " \"AttachStderr\": true,\n", - " \"Tty\": true,\n", - " \"OpenStdin\": true,\n", - " \"StdinOnce\": false,\n", - " \"Env\": [\n", - " \"TRAEFIK_TLS_CERTS=./traefik/certs\",\n", - " \"S3_ROOT_PWD=admin\",\n", - " \"REDIS_STORE_DB_ID=0\",\n", - " \"ASSOCIATION_TIMEOUT=10\",\n", - " \"REDIS_HOST=redis\",\n", - " \"DOCKER_IMAGE_WORKER=openmined/grid-enclave\",\n", - " \"QUEUE_PORT=5556\",\n", - " \"FRONTEND_TARGET=grid-ui-production\",\n", - " \"DOCKER_IMAGE_HEADSCALE=openmined/grid-vpn-headscale\",\n", - " \"DOCKER_IMAGE_TRAEFIK=traefik\",\n", - " \"TRAEFIK_TAG=2b0a5be942b68dde2b00b8bca4514e4dde7f312d0d5cd397a9733bb1520f5b47\",\n", - " \"SMTP_TLS=True\",\n", - " \"S3_ENDPOINT=seaweedfs\",\n", - " \"HTTPS_PORT=445\",\n", - " \"OBLV_KEY_PATH=~/.oblv\",\n", - " \"ENABLE_WARNINGS=True\",\n", - " \"USE_BLOB_STORAGE=True\",\n", - " \"TRAEFIK_TLS_CONF=./traefik/dynamic-configurations\",\n", - " \"ENABLE_OBLV=false\",\n", - " \"JAEGER_HOST=localhost\",\n", - " \"DOCKER_BUILDKIT=1\",\n", - " \"S3_PRESIGNED_TIMEOUT_SECS=1800\",\n", - " \"RELEASE=production\",\n", - " \"STACK_NAME=grid-openmined-org\",\n", - " \"USERS_OPEN_REGISTRATION=False\",\n", - " \"EMAILS_FROM_EMAIL=info@openmined.org\",\n", - " \"PROJECT_NAME=grid\",\n", - " \"SERVER_HOST=https://localhost\",\n", - " \"HTTP_PORT=87\",\n", - " \"ENABLE_SIGNUP=False\",\n", - " \"TRAEFIK_PUBLIC_NETWORK_IS_EXTERNAL=False\",\n", - " \"VERSION=0.8.2-beta.6\",\n", - " \"TRAEFIK_VERSION=v2.8.1\",\n", - " \"S3_ROOT_USER=admin\",\n", - " \"VERSION_HASH=dockerhub\",\n", - " \"MONGO_HOST=mongo\",\n", - " \"DOCKER_IMAGE_BACKEND=openmined/grid-backend\",\n", - " \"IGNORE_TLS_ERRORS=False\",\n", - " \"HEADSCALE_PORT=8080\",\n", - " \"CONTAINER_HOST=docker\",\n", - " \"SMTP_USER=\",\n", - " \"VITE_PUBLIC_API_BASE_URL=/api/v2\",\n", - " \"DEV_MODE=True\",\n", - " \"CREATE_PRODUCER=false\",\n", - " \"SECRET_KEY=changethis\",\n", - " \"STACK_API_KEY=zXuYOLdamjg70sB5IoVQCM5gdxGK9R79Otu9L9ZnaDZCqyeV\",\n", - " \"TRACE=False\",\n", - " \"SERVICE_NAME=backend\",\n", - " \"MONGO_IMAGE=mongo\",\n", - " \"USE_NEW_SERVICE=False\",\n", - " \"JAX_ENABLE_X64=True\",\n", - " \"REDIS_PORT=6379\",\n", - " \"MONGO_VERSION=7.0.0\",\n", - " \"TRAEFIK_PUBLIC_NETWORK=traefik-public\",\n", - " \"DEFAULT_ROOT_PASSWORD=changethis\",\n", - " \"NETWORK_NAME=omnet\",\n", - " \"SEAWEEDFS_VERSION=3.55\",\n", - " \"COMPOSE_DOCKER_CLI_BUILD=1\",\n", - " \"TRAEFIK_PUBLIC_TAG=traefik-public\",\n", - " \"SMTP_PASSWORD=\",\n", - " \"RABBITMQ_VERSION=3\",\n", - " \"REDIS_VERSION=6.2\",\n", - " \"NETWORK_CHECK_INTERVAL=60\",\n", - " \"DOMAIN_CHECK_INTERVAL=60\",\n", - " \"DOCKER_IMAGE_SEAWEEDFS=chrislusf/seaweedfs\",\n", - " \"DEFAULT_ROOT_EMAIL=info2@openmined.org\",\n", - " \"NODE_SIDE_TYPE=high\",\n", - " \"S3_VOLUME_SIZE_MB=1024\",\n", - " \"DOCKER_IMAGE_TAILSCALE=openmined/grid-vpn-tailscale\",\n", - " \"S3_PORT=8333\",\n", - " \"NODE_TYPE=domain\",\n", - " \"DOCKER_IMAGE_FRONTEND=openmined/grid-frontend\",\n", - " \"SMTP_HOST=\",\n", - " \"SMTP_PORT=587\",\n", - " \"MONGO_USERNAME=root\",\n", - " \"SYFT_TUTORIAL_MODE=False\",\n", - " \"RELATIVE_PATH=\",\n", - " \"JAEGER_PORT=14268\",\n", - " \"BACKEND_STORAGE_PATH=credentials-data\",\n", - " \"DOMAIN_CONNECTION_PORT=3030\",\n", - " \"REDIS_LEDGER_DB_ID=1\",\n", - " \"N_CONSUMERS=1\",\n", - " \"BACKEND_CORS_ORIGINS=[\\\"http://localhost\\\",\\\"http://localhost:4200\\\",\\\"http://localhost:3000\\\",\\\"http://localhost:8080\\\",\\\"https://localhost\\\",\\\"https://localhost:4200\\\",\\\"https://localhost:3000\\\",\\\"https://localhost:8080\\\",\\\"http://dev.grid.openmined.org\\\",\\\"https://stag.grid.openmined.org\\\",\\\"https://grid.openmined.org\\\"]\",\n", - " \"DEFAULT_ROOT_USERNAME=worker1\",\n", - " \"NODE_NAME=high-side\",\n", - " \"S3_REGION=us-east-1\",\n", - " \"MONGO_PASSWORD=example\",\n", - " \"PORT=8002\",\n", - " \"DOCKER_IMAGE_VPN_IPTABLES=openmined/grid-vpn-iptables\",\n", - " \"DOMAIN=localhost\",\n", - " \"MONGO_PORT=27017\",\n", - " \"DOCKER_IMAGE_SVELTE=openmined/grid-svelte\",\n", - " \"PATH=/root/.local/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\",\n", - " \"LANG=C.UTF-8\",\n", - " \"GPG_KEY=A035C8C19219BA821ECEA86B64E628F8D684696D\",\n", - " \"PYTHON_VERSION=3.11.4\",\n", - " \"PYTHON_PIP_VERSION=23.1.2\",\n", - " \"PYTHON_SETUPTOOLS_VERSION=65.5.1\",\n", - " \"PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/0d8570dc44796f4369b652222cf176b3db6ac70e/public/get-pip.py\",\n", - " \"PYTHON_GET_PIP_SHA256=96461deced5c2a487ddc65207ec5a9cffeca0d34e7af7ea1afc470ff0d746207\",\n", - " \"PYTHONPATH=/app\"\n", - " ],\n", - " \"Cmd\": [\n", - " \"/app/grid/start.sh\"\n", - " ],\n", - " \"Image\": \"openmined/grid-backend:0.8.2-beta.6\",\n", - " \"Volumes\": null,\n", - " \"WorkingDir\": \"/app\",\n", - " \"Entrypoint\": null,\n", - " \"OnBuild\": null,\n", - " \"Labels\": {\n", - " \"com.docker.compose.config-hash\": \"c14a5d85da73c7c973ad7274199b22309e5a5e2d4ac1e6ac18e52b10457af660\",\n", - " \"com.docker.compose.container-number\": \"1\",\n", - " \"com.docker.compose.depends_on\": \"proxy:service_started:false,backend:service_started:false\",\n", - " \"com.docker.compose.image\": \"sha256:95c41545ecfa8667a2353d825420be76da65eb85c0dc1e85da65d1b1961a019c\",\n", - " \"com.docker.compose.oneoff\": \"False\",\n", - " \"com.docker.compose.project\": \"grid\",\n", - " \"com.docker.compose.project.config_files\": \"/Users/koen/workspace/pysyft/packages/grid/docker-compose.multinode.yml,/Users/koen/workspace/pysyft/packages/grid/docker-compose.dev.yml\",\n", - " \"com.docker.compose.project.working_dir\": \"/Users/koen/workspace/pysyft/packages/grid\",\n", - " \"com.docker.compose.service\": \"backendworker\",\n", - " \"com.docker.compose.version\": \"2.19.1\",\n", - " \"orgs.openmined.syft\": \"this is a syft backend container\"\n", - " }\n", - " },\n", - " \"NetworkSettings\": {\n", - " \"Bridge\": \"\",\n", - " \"SandboxID\": \"\",\n", - " \"HairpinMode\": false,\n", - " \"LinkLocalIPv6Address\": \"\",\n", - " \"LinkLocalIPv6PrefixLen\": 0,\n", - " \"Ports\": {},\n", - " \"SandboxKey\": \"\",\n", - " \"SecondaryIPAddresses\": null,\n", - " \"SecondaryIPv6Addresses\": null,\n", - " \"EndpointID\": \"\",\n", - " \"Gateway\": \"\",\n", - " \"GlobalIPv6Address\": \"\",\n", - " \"GlobalIPv6PrefixLen\": 0,\n", - " \"IPAddress\": \"\",\n", - " \"IPPrefixLen\": 0,\n", - " \"IPv6Gateway\": \"\",\n", - " \"MacAddress\": \"\",\n", - " \"Networks\": {}\n", - " }\n", - " }\n", - "]" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/helm/test_composing_jobs.ipynb b/notebooks/helm/test_composing_jobs.ipynb deleted file mode 100644 index 54f2ce73632..00000000000 --- a/notebooks/helm/test_composing_jobs.ipynb +++ /dev/null @@ -1,327 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /Users/koen/workspace/pysyft\n" - ] - } - ], - "source": [ - "import syft as sy\n" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Staging Protocol Changes...\n", - "Data Migrated to latest version !!!\n", - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"test-jobs\", dev_mode=True, reset=True, create_producer=True, queue_port=5556, n_consumers=3)\n", - "client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "x = sy.ActionObject.from_obj([1,2,3])\n", - "x_ptr = x.send(client)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_batch' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@sy.syft_function()\n", - "def process_batch(batch):\n", - " # takes 30 hours normally\n", - " print(f\"starting batch {batch}\")\n", - " from time import sleep\n", - " import random\n", - " sleep(random.random()*5)\n", - " print(\"done\")\n", - " return batch + 1" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User Code Submitted

" - ], - "text/plain": [ - "SyftSuccess: User Code Submitted" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.code.submit(process_batch)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'aggregate_job' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'aggregate_job' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@sy.syft_function()\n", - "def aggregate_job(job_results):\n", - " print([(res, type(res))for res in job_results])\n", - " results = [res for res in job_results]\n", - " return sum(results)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User Code Submitted

" - ], - "text/plain": [ - "SyftSuccess: User Code Submitted" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.code.submit(aggregate_job)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'process_all' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@sy.syft_function_single_use(x=x_ptr)\n", - "def process_all(domain, x):\n", - " job_results = []\n", - " print(\"Launching jobs\")\n", - " for elem in x:\n", - " # We inject a domain object in the scope\n", - " batch_job = domain.launch_job(process_batch, batch=elem)\n", - " job_results.append(batch_job.result)\n", - " print(\"starting aggregation\")\n", - " print(\"Done\")\n", - " reduce_job = domain.launch_job(aggregate_job, job_results=job_results)\n", - " return reduce_job.result" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Request approved for domain test-jobs\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request ee21e794e1e64d56b5f881108a47436a changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request ee21e794e1e64d56b5f881108a47436a changes applied" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.code.request_code_execution(process_all)\n", - "client.requests[-1].approve()" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "24/11/23 12:34:14 FUNCTION LOG (2574b19651f14d0993194fcee98761e5): Launching jobs\n", - "24/11/23 12:34:15 FUNCTION LOG (2574b19651f14d0993194fcee98761e5): starting aggregation\n", - "24/11/23 12:34:15 FUNCTION LOG (2574b19651f14d0993194fcee98761e5): Done\n", - "24/11/23 12:34:16 FUNCTION LOG (46b45ee456b944dea6f7cb79d270ee41): starting batch 1\n", - "24/11/23 12:34:16 FUNCTION LOG (b0d88895c5a74a9aaed6a1b77bc88985): starting batch 2\n", - "24/11/23 12:34:17 FUNCTION LOG (8c73ffcb58644a538a085cbc7800c92e): starting batch 3\n", - "24/11/23 12:34:18 FUNCTION LOG (46b45ee456b944dea6f7cb79d270ee41): done\n", - "24/11/23 12:34:18 FUNCTION LOG (b0d88895c5a74a9aaed6a1b77bc88985): done\n", - "24/11/23 12:34:22 FUNCTION LOG (8c73ffcb58644a538a085cbc7800c92e): done\n", - "24/11/23 12:34:23 FUNCTION LOG (4a3edb89a7ec41628cea08f62d0fff99): [(2, ), (3, ), (4, )]\n" - ] - } - ], - "source": [ - "from time import sleep\n", - "job = client.code.process_all(x=x_ptr, blocking=False)\n", - "sleep(10)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "result = job.resolve.get().get()" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "result == 9" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 2 -}