diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index c76f4ad2cf5..e05b5405d9c 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.8.7-beta.9
+current_version = 0.8.7-beta.10
tag = False
tag_name = {new_version}
commit = True
diff --git a/VERSION b/VERSION
index 53c9ccb0023..61a3b991302 100644
--- a/VERSION
+++ b/VERSION
@@ -1,5 +1,5 @@
# Mono Repo Global Version
-__version__ = "0.8.7-beta.9"
+__version__ = "0.8.7-beta.10"
# elsewhere we can call this file: `python VERSION` and simply take the stdout
# stdlib
diff --git a/packages/grid/VERSION b/packages/grid/VERSION
index 53c9ccb0023..61a3b991302 100644
--- a/packages/grid/VERSION
+++ b/packages/grid/VERSION
@@ -1,5 +1,5 @@
# Mono Repo Global Version
-__version__ = "0.8.7-beta.9"
+__version__ = "0.8.7-beta.10"
# elsewhere we can call this file: `python VERSION` and simply take the stdout
# stdlib
diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile
index a700d274754..f1b6207ce90 100644
--- a/packages/grid/backend/grid/images/worker_cpu.dockerfile
+++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile
@@ -9,7 +9,7 @@
# Later we'd want to uninstall old python, and then install a new python runtime...
# ... but pre-built syft deps may break!
-ARG SYFT_VERSION_TAG="0.8.7-beta.9"
+ARG SYFT_VERSION_TAG="0.8.7-beta.10"
FROM openmined/grid-backend:${SYFT_VERSION_TAG}
ARG PYTHON_VERSION="3.12"
diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml
index b4dce1826de..f34e1494419 100644
--- a/packages/grid/devspace.yaml
+++ b/packages/grid/devspace.yaml
@@ -27,7 +27,7 @@ vars:
DOCKER_IMAGE_SEAWEEDFS: openmined/grid-seaweedfs
DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/grid-enclave-attestation
CONTAINER_REGISTRY: "docker.io"
- VERSION: "0.8.7-beta.9"
+ VERSION: "0.8.7-beta.10"
PLATFORM: $(uname -m | grep -q 'arm64' && echo "arm64" || echo "amd64")
# This is a list of `images` that DevSpace can build for this project
diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json
index 34edb36848f..f5115c53976 100644
--- a/packages/grid/frontend/package.json
+++ b/packages/grid/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "pygrid-ui",
- "version": "0.8.7-beta.9",
+ "version": "0.8.7-beta.10",
"private": true,
"scripts": {
"dev": "pnpm i && vite dev --host --port 80",
diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml
index f96a36439ab..4c88b674ff0 100644
--- a/packages/grid/helm/repo/index.yaml
+++ b/packages/grid/helm/repo/index.yaml
@@ -1,9 +1,22 @@
apiVersion: v1
entries:
syft:
+ - apiVersion: v2
+ appVersion: 0.8.7-beta.10
+ created: "2024-06-03T13:45:21.377002407Z"
+ description: Perform numpy-like analysis on data that remains in someone elses
+ server
+ digest: 00773cb241522e281c1915339fc362e047650e08958a736e93d6539f44cb5e25
+ home: https://github.com/OpenMined/PySyft/
+ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png
+ name: syft
+ type: application
+ urls:
+ - https://openmined.github.io/PySyft/helm/syft-0.8.7-beta.10.tgz
+ version: 0.8.7-beta.10
- apiVersion: v2
appVersion: 0.8.7-beta.9
- created: "2024-05-28T06:38:21.789929376Z"
+ created: "2024-06-03T13:45:21.382840443Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: a3f8e85d9ddef7a644b959fcc2fcb0fc08f7b6abae1045e893d0d62fa4ae132e
@@ -16,7 +29,7 @@ entries:
version: 0.8.7-beta.9
- apiVersion: v2
appVersion: 0.8.7-beta.8
- created: "2024-05-28T06:38:21.789209632Z"
+ created: "2024-06-03T13:45:21.382193467Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: a422ac88d8fd1fb80d5004d5eb6e95fa9efc7f6a87da12e5ac04829da7f04c4d
@@ -29,7 +42,7 @@ entries:
version: 0.8.7-beta.8
- apiVersion: v2
appVersion: 0.8.7-beta.7
- created: "2024-05-28T06:38:21.7885552Z"
+ created: "2024-06-03T13:45:21.381537725Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 0dc313a1092e6256a7c8aad002c8ec380b3add2c289d680db1e238a336399b7a
@@ -42,7 +55,7 @@ entries:
version: 0.8.7-beta.7
- apiVersion: v2
appVersion: 0.8.7-beta.6
- created: "2024-05-28T06:38:21.787925885Z"
+ created: "2024-06-03T13:45:21.380874049Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 052a2ec1102d2a4c9915f95647abd4a6012f56fa05a106f4952ee9b55bf7bae8
@@ -55,7 +68,7 @@ entries:
version: 0.8.7-beta.6
- apiVersion: v2
appVersion: 0.8.7-beta.5
- created: "2024-05-28T06:38:21.787289887Z"
+ created: "2024-06-03T13:45:21.380230309Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 1728af756907c3fcbe87c2fd2de014a2d963c22a4c2eb6af6596b525a9b9a18a
@@ -68,7 +81,7 @@ entries:
version: 0.8.7-beta.5
- apiVersion: v2
appVersion: 0.8.7-beta.4
- created: "2024-05-28T06:38:21.786635214Z"
+ created: "2024-06-03T13:45:21.379600085Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 387a57a3904a05ed61e92ee48605ef6fd5044ff7e822e0924e0d4c485e2c88d2
@@ -81,7 +94,7 @@ entries:
version: 0.8.7-beta.4
- apiVersion: v2
appVersion: 0.8.7-beta.3
- created: "2024-05-28T06:38:21.785700417Z"
+ created: "2024-06-03T13:45:21.378911612Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 3668002b7a4118516b2ecd61d6275f60d83fc12841587ab8f62e1c1200731c67
@@ -94,7 +107,7 @@ entries:
version: 0.8.7-beta.3
- apiVersion: v2
appVersion: 0.8.7-beta.2
- created: "2024-05-28T06:38:21.784490317Z"
+ created: "2024-06-03T13:45:21.377596593Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: e62217ffcadee2b8896ab0543f9ccc42f2df898fd979438ac9376d780b802af7
@@ -107,7 +120,7 @@ entries:
version: 0.8.7-beta.2
- apiVersion: v2
appVersion: 0.8.7-beta.1
- created: "2024-05-28T06:38:21.783898482Z"
+ created: "2024-06-03T13:45:21.37632278Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 553981fe1d5c980e6903b3ff2f1b9b97431f6dd8aee91e3976bcc5594285235e
@@ -120,7 +133,7 @@ entries:
version: 0.8.7-beta.1
- apiVersion: v2
appVersion: 0.8.6
- created: "2024-05-28T06:38:21.783378981Z"
+ created: "2024-06-03T13:45:21.375759532Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: ddbbe6fea1702e57404875eb3019a3b1a341017bdbb5fbc6ce418507e5c15756
@@ -133,7 +146,7 @@ entries:
version: 0.8.6
- apiVersion: v2
appVersion: 0.8.6-beta.1
- created: "2024-05-28T06:38:21.782827792Z"
+ created: "2024-06-03T13:45:21.375198047Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: cc2c81ef6796ac853dce256e6bf8a6af966c21803e6534ea21920af681c62e61
@@ -146,7 +159,7 @@ entries:
version: 0.8.6-beta.1
- apiVersion: v2
appVersion: 0.8.5
- created: "2024-05-28T06:38:21.782209838Z"
+ created: "2024-06-03T13:45:21.37461952Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: db5d90d44006209fd5ecdebd88f5fd56c70f7c76898343719a0ff8da46da948a
@@ -159,7 +172,7 @@ entries:
version: 0.8.5
- apiVersion: v2
appVersion: 0.8.5-post.2
- created: "2024-05-28T06:38:21.781401218Z"
+ created: "2024-06-03T13:45:21.3738741Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: ea3f7269b55f773fa165d7008c054b7cf3ec4c62eb40a96f08cd3a9b77fd2165
@@ -172,7 +185,7 @@ entries:
version: 0.8.5-post.2
- apiVersion: v2
appVersion: 0.8.5-post.1
- created: "2024-05-28T06:38:21.780853575Z"
+ created: "2024-06-03T13:45:21.373337562Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 9deb844d3dc2d8480c60f8c631dcc7794adfb39cec3aa3b1ce22ea26fdf87d02
@@ -185,7 +198,7 @@ entries:
version: 0.8.5-post.1
- apiVersion: v2
appVersion: 0.8.5-beta.10
- created: "2024-05-28T06:38:21.772964061Z"
+ created: "2024-06-03T13:45:21.365760974Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49
@@ -198,7 +211,7 @@ entries:
version: 0.8.5-beta.10
- apiVersion: v2
appVersion: 0.8.5-beta.9
- created: "2024-05-28T06:38:21.780097252Z"
+ created: "2024-06-03T13:45:21.372587593Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b
@@ -211,7 +224,7 @@ entries:
version: 0.8.5-beta.9
- apiVersion: v2
appVersion: 0.8.5-beta.8
- created: "2024-05-28T06:38:21.779336602Z"
+ created: "2024-06-03T13:45:21.37183012Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298
@@ -224,7 +237,7 @@ entries:
version: 0.8.5-beta.8
- apiVersion: v2
appVersion: 0.8.5-beta.7
- created: "2024-05-28T06:38:21.778542709Z"
+ created: "2024-06-03T13:45:21.371012243Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd
@@ -237,7 +250,7 @@ entries:
version: 0.8.5-beta.7
- apiVersion: v2
appVersion: 0.8.5-beta.6
- created: "2024-05-28T06:38:21.776864395Z"
+ created: "2024-06-03T13:45:21.369543119Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0
@@ -250,7 +263,7 @@ entries:
version: 0.8.5-beta.6
- apiVersion: v2
appVersion: 0.8.5-beta.5
- created: "2024-05-28T06:38:21.776076954Z"
+ created: "2024-06-03T13:45:21.368795474Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d
@@ -263,7 +276,7 @@ entries:
version: 0.8.5-beta.5
- apiVersion: v2
appVersion: 0.8.5-beta.4
- created: "2024-05-28T06:38:21.775324299Z"
+ created: "2024-06-03T13:45:21.368047108Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab
@@ -276,7 +289,7 @@ entries:
version: 0.8.5-beta.4
- apiVersion: v2
appVersion: 0.8.5-beta.3
- created: "2024-05-28T06:38:21.774531648Z"
+ created: "2024-06-03T13:45:21.367286349Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054
@@ -289,7 +302,7 @@ entries:
version: 0.8.5-beta.3
- apiVersion: v2
appVersion: 0.8.5-beta.2
- created: "2024-05-28T06:38:21.773768854Z"
+ created: "2024-06-03T13:45:21.36650959Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8
@@ -302,7 +315,7 @@ entries:
version: 0.8.5-beta.2
- apiVersion: v2
appVersion: 0.8.5-beta.1
- created: "2024-05-28T06:38:21.772200725Z"
+ created: "2024-06-03T13:45:21.364910646Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9
@@ -314,7 +327,7 @@ entries:
version: 0.8.5-beta.1
- apiVersion: v2
appVersion: 0.8.4
- created: "2024-05-28T06:38:21.771807Z"
+ created: "2024-06-03T13:45:21.364529059Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0
@@ -326,7 +339,7 @@ entries:
version: 0.8.4
- apiVersion: v2
appVersion: 0.8.4-beta.31
- created: "2024-05-28T06:38:21.768438589Z"
+ created: "2024-06-03T13:45:21.361126791Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc
@@ -338,7 +351,7 @@ entries:
version: 0.8.4-beta.31
- apiVersion: v2
appVersion: 0.8.4-beta.30
- created: "2024-05-28T06:38:21.768033172Z"
+ created: "2024-06-03T13:45:21.360708906Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad
@@ -350,7 +363,7 @@ entries:
version: 0.8.4-beta.30
- apiVersion: v2
appVersion: 0.8.4-beta.29
- created: "2024-05-28T06:38:21.767270548Z"
+ created: "2024-06-03T13:45:21.359963866Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971
@@ -362,7 +375,7 @@ entries:
version: 0.8.4-beta.29
- apiVersion: v2
appVersion: 0.8.4-beta.28
- created: "2024-05-28T06:38:21.766864299Z"
+ created: "2024-06-03T13:45:21.359560718Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c
@@ -374,7 +387,7 @@ entries:
version: 0.8.4-beta.28
- apiVersion: v2
appVersion: 0.8.4-beta.27
- created: "2024-05-28T06:38:21.766456207Z"
+ created: "2024-06-03T13:45:21.35915197Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba
@@ -386,7 +399,7 @@ entries:
version: 0.8.4-beta.27
- apiVersion: v2
appVersion: 0.8.4-beta.26
- created: "2024-05-28T06:38:21.766046351Z"
+ created: "2024-06-03T13:45:21.358714849Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a
@@ -398,7 +411,7 @@ entries:
version: 0.8.4-beta.26
- apiVersion: v2
appVersion: 0.8.4-beta.25
- created: "2024-05-28T06:38:21.76562777Z"
+ created: "2024-06-03T13:45:21.358309608Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f
@@ -410,7 +423,7 @@ entries:
version: 0.8.4-beta.25
- apiVersion: v2
appVersion: 0.8.4-beta.24
- created: "2024-05-28T06:38:21.765169163Z"
+ created: "2024-06-03T13:45:21.357903745Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e
@@ -422,7 +435,7 @@ entries:
version: 0.8.4-beta.24
- apiVersion: v2
appVersion: 0.8.4-beta.23
- created: "2024-05-28T06:38:21.764692493Z"
+ created: "2024-06-03T13:45:21.357499837Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c
@@ -434,7 +447,7 @@ entries:
version: 0.8.4-beta.23
- apiVersion: v2
appVersion: 0.8.4-beta.22
- created: "2024-05-28T06:38:21.764270976Z"
+ created: "2024-06-03T13:45:21.357090377Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414
@@ -446,7 +459,7 @@ entries:
version: 0.8.4-beta.22
- apiVersion: v2
appVersion: 0.8.4-beta.21
- created: "2024-05-28T06:38:21.763638294Z"
+ created: "2024-06-03T13:45:21.356672332Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683
@@ -458,7 +471,7 @@ entries:
version: 0.8.4-beta.21
- apiVersion: v2
appVersion: 0.8.4-beta.20
- created: "2024-05-28T06:38:21.762637034Z"
+ created: "2024-06-03T13:45:21.356230252Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28
@@ -470,7 +483,7 @@ entries:
version: 0.8.4-beta.20
- apiVersion: v2
appVersion: 0.8.4-beta.19
- created: "2024-05-28T06:38:21.761635915Z"
+ created: "2024-06-03T13:45:21.354339461Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687
@@ -482,7 +495,7 @@ entries:
version: 0.8.4-beta.19
- apiVersion: v2
appVersion: 0.8.4-beta.18
- created: "2024-05-28T06:38:21.761200121Z"
+ created: "2024-06-03T13:45:21.353946513Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7
@@ -494,7 +507,7 @@ entries:
version: 0.8.4-beta.18
- apiVersion: v2
appVersion: 0.8.4-beta.17
- created: "2024-05-28T06:38:21.760796607Z"
+ created: "2024-06-03T13:45:21.35354575Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498
@@ -506,7 +519,7 @@ entries:
version: 0.8.4-beta.17
- apiVersion: v2
appVersion: 0.8.4-beta.16
- created: "2024-05-28T06:38:21.760389156Z"
+ created: "2024-06-03T13:45:21.353142151Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0
@@ -518,7 +531,7 @@ entries:
version: 0.8.4-beta.16
- apiVersion: v2
appVersion: 0.8.4-beta.15
- created: "2024-05-28T06:38:21.759981455Z"
+ created: "2024-06-03T13:45:21.352732211Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb
@@ -530,7 +543,7 @@ entries:
version: 0.8.4-beta.15
- apiVersion: v2
appVersion: 0.8.4-beta.14
- created: "2024-05-28T06:38:21.759568965Z"
+ created: "2024-06-03T13:45:21.352334795Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6
@@ -542,7 +555,7 @@ entries:
version: 0.8.4-beta.14
- apiVersion: v2
appVersion: 0.8.4-beta.13
- created: "2024-05-28T06:38:21.759210485Z"
+ created: "2024-06-03T13:45:21.351989927Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83
@@ -554,7 +567,7 @@ entries:
version: 0.8.4-beta.13
- apiVersion: v2
appVersion: 0.8.4-beta.12
- created: "2024-05-28T06:38:21.758857365Z"
+ created: "2024-06-03T13:45:21.351643917Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c
@@ -566,7 +579,7 @@ entries:
version: 0.8.4-beta.12
- apiVersion: v2
appVersion: 0.8.4-beta.11
- created: "2024-05-28T06:38:21.758511459Z"
+ created: "2024-06-03T13:45:21.35129452Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3
@@ -578,7 +591,7 @@ entries:
version: 0.8.4-beta.11
- apiVersion: v2
appVersion: 0.8.4-beta.10
- created: "2024-05-28T06:38:21.758162598Z"
+ created: "2024-06-03T13:45:21.350902194Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388
@@ -590,7 +603,7 @@ entries:
version: 0.8.4-beta.10
- apiVersion: v2
appVersion: 0.8.4-beta.9
- created: "2024-05-28T06:38:21.771380954Z"
+ created: "2024-06-03T13:45:21.364116334Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26
@@ -602,7 +615,7 @@ entries:
version: 0.8.4-beta.9
- apiVersion: v2
appVersion: 0.8.4-beta.8
- created: "2024-05-28T06:38:21.771028917Z"
+ created: "2024-06-03T13:45:21.36376841Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999
@@ -614,7 +627,7 @@ entries:
version: 0.8.4-beta.8
- apiVersion: v2
appVersion: 0.8.4-beta.7
- created: "2024-05-28T06:38:21.770328058Z"
+ created: "2024-06-03T13:45:21.363407161Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a
@@ -626,7 +639,7 @@ entries:
version: 0.8.4-beta.7
- apiVersion: v2
appVersion: 0.8.4-beta.6
- created: "2024-05-28T06:38:21.769503799Z"
+ created: "2024-06-03T13:45:21.362698854Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337
@@ -638,7 +651,7 @@ entries:
version: 0.8.4-beta.6
- apiVersion: v2
appVersion: 0.8.4-beta.5
- created: "2024-05-28T06:38:21.769117798Z"
+ created: "2024-06-03T13:45:21.361807359Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b
@@ -650,7 +663,7 @@ entries:
version: 0.8.4-beta.5
- apiVersion: v2
appVersion: 0.8.4-beta.4
- created: "2024-05-28T06:38:21.768777282Z"
+ created: "2024-06-03T13:45:21.361471147Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e
@@ -662,7 +675,7 @@ entries:
version: 0.8.4-beta.4
- apiVersion: v2
appVersion: 0.8.4-beta.3
- created: "2024-05-28T06:38:21.767624118Z"
+ created: "2024-06-03T13:45:21.360307792Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54
@@ -674,7 +687,7 @@ entries:
version: 0.8.4-beta.3
- apiVersion: v2
appVersion: 0.8.4-beta.2
- created: "2024-05-28T06:38:21.762205498Z"
+ created: "2024-06-03T13:45:21.355072969Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -690,7 +703,7 @@ entries:
version: 0.8.4-beta.2
- apiVersion: v2
appVersion: 0.8.4-beta.1
- created: "2024-05-28T06:38:21.757792166Z"
+ created: "2024-06-03T13:45:21.350539272Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -706,7 +719,7 @@ entries:
version: 0.8.4-beta.1
- apiVersion: v2
appVersion: 0.8.3
- created: "2024-05-28T06:38:21.756852152Z"
+ created: "2024-06-03T13:45:21.349972637Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -722,7 +735,7 @@ entries:
version: 0.8.3
- apiVersion: v2
appVersion: 0.8.3-beta.6
- created: "2024-05-28T06:38:21.755645198Z"
+ created: "2024-06-03T13:45:21.349141275Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -738,7 +751,7 @@ entries:
version: 0.8.3-beta.6
- apiVersion: v2
appVersion: 0.8.3-beta.5
- created: "2024-05-28T06:38:21.755054685Z"
+ created: "2024-06-03T13:45:21.347981356Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -754,7 +767,7 @@ entries:
version: 0.8.3-beta.5
- apiVersion: v2
appVersion: 0.8.3-beta.4
- created: "2024-05-28T06:38:21.754478419Z"
+ created: "2024-06-03T13:45:21.347397349Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -770,7 +783,7 @@ entries:
version: 0.8.3-beta.4
- apiVersion: v2
appVersion: 0.8.3-beta.3
- created: "2024-05-28T06:38:21.753821873Z"
+ created: "2024-06-03T13:45:21.346709046Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -786,7 +799,7 @@ entries:
version: 0.8.3-beta.3
- apiVersion: v2
appVersion: 0.8.3-beta.2
- created: "2024-05-28T06:38:21.753228675Z"
+ created: "2024-06-03T13:45:21.346158131Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -802,7 +815,7 @@ entries:
version: 0.8.3-beta.2
- apiVersion: v2
appVersion: 0.8.3-beta.1
- created: "2024-05-28T06:38:21.752671094Z"
+ created: "2024-06-03T13:45:21.345552283Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -818,7 +831,7 @@ entries:
version: 0.8.3-beta.1
- apiVersion: v2
appVersion: 0.8.2
- created: "2024-05-28T06:38:21.752092894Z"
+ created: "2024-06-03T13:45:21.344859932Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -834,7 +847,7 @@ entries:
version: 0.8.2
- apiVersion: v2
appVersion: 0.8.2-beta.60
- created: "2024-05-28T06:38:21.751450545Z"
+ created: "2024-06-03T13:45:21.344215902Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -850,7 +863,7 @@ entries:
version: 0.8.2-beta.60
- apiVersion: v2
appVersion: 0.8.2-beta.59
- created: "2024-05-28T06:38:21.750791644Z"
+ created: "2024-06-03T13:45:21.343556895Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -866,7 +879,7 @@ entries:
version: 0.8.2-beta.59
- apiVersion: v2
appVersion: 0.8.2-beta.58
- created: "2024-05-28T06:38:21.749496924Z"
+ created: "2024-06-03T13:45:21.342837122Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -882,7 +895,7 @@ entries:
version: 0.8.2-beta.58
- apiVersion: v2
appVersion: 0.8.2-beta.57
- created: "2024-05-28T06:38:21.748587706Z"
+ created: "2024-06-03T13:45:21.341469051Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -898,7 +911,7 @@ entries:
version: 0.8.2-beta.57
- apiVersion: v2
appVersion: 0.8.2-beta.56
- created: "2024-05-28T06:38:21.747954453Z"
+ created: "2024-06-03T13:45:21.340829309Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -914,7 +927,7 @@ entries:
version: 0.8.2-beta.56
- apiVersion: v2
appVersion: 0.8.2-beta.53
- created: "2024-05-28T06:38:21.747306664Z"
+ created: "2024-06-03T13:45:21.340191661Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -930,7 +943,7 @@ entries:
version: 0.8.2-beta.53
- apiVersion: v2
appVersion: 0.8.2-beta.52
- created: "2024-05-28T06:38:21.746672379Z"
+ created: "2024-06-03T13:45:21.339543493Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -946,7 +959,7 @@ entries:
version: 0.8.2-beta.52
- apiVersion: v2
appVersion: 0.8.2-beta.51
- created: "2024-05-28T06:38:21.746031432Z"
+ created: "2024-06-03T13:45:21.338845252Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -962,7 +975,7 @@ entries:
version: 0.8.2-beta.51
- apiVersion: v2
appVersion: 0.8.2-beta.50
- created: "2024-05-28T06:38:21.745343748Z"
+ created: "2024-06-03T13:45:21.338148503Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -978,7 +991,7 @@ entries:
version: 0.8.2-beta.50
- apiVersion: v2
appVersion: 0.8.2-beta.49
- created: "2024-05-28T06:38:21.744698944Z"
+ created: "2024-06-03T13:45:21.337497129Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -994,7 +1007,7 @@ entries:
version: 0.8.2-beta.49
- apiVersion: v2
appVersion: 0.8.2-beta.48
- created: "2024-05-28T06:38:21.74402222Z"
+ created: "2024-06-03T13:45:21.336820537Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1010,7 +1023,7 @@ entries:
version: 0.8.2-beta.48
- apiVersion: v2
appVersion: 0.8.2-beta.47
- created: "2024-05-28T06:38:21.742703206Z"
+ created: "2024-06-03T13:45:21.335899539Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1026,7 +1039,7 @@ entries:
version: 0.8.2-beta.47
- apiVersion: v2
appVersion: 0.8.2-beta.46
- created: "2024-05-28T06:38:21.742145034Z"
+ created: "2024-06-03T13:45:21.334762583Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1042,7 +1055,7 @@ entries:
version: 0.8.2-beta.46
- apiVersion: v2
appVersion: 0.8.2-beta.45
- created: "2024-05-28T06:38:21.741579417Z"
+ created: "2024-06-03T13:45:21.334005641Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1058,7 +1071,7 @@ entries:
version: 0.8.2-beta.45
- apiVersion: v2
appVersion: 0.8.2-beta.44
- created: "2024-05-28T06:38:21.741002991Z"
+ created: "2024-06-03T13:45:21.333450688Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1074,7 +1087,7 @@ entries:
version: 0.8.2-beta.44
- apiVersion: v2
appVersion: 0.8.2-beta.43
- created: "2024-05-28T06:38:21.740442514Z"
+ created: "2024-06-03T13:45:21.332882671Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1090,7 +1103,7 @@ entries:
version: 0.8.2-beta.43
- apiVersion: v2
appVersion: 0.8.2-beta.41
- created: "2024-05-28T06:38:21.739788402Z"
+ created: "2024-06-03T13:45:21.332237018Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1106,7 +1119,7 @@ entries:
version: 0.8.2-beta.41
- apiVersion: v2
appVersion: 0.8.2-beta.40
- created: "2024-05-28T06:38:21.73916057Z"
+ created: "2024-06-03T13:45:21.331568342Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1122,7 +1135,7 @@ entries:
version: 0.8.2-beta.40
- apiVersion: v2
appVersion: 0.8.2-beta.39
- created: "2024-05-28T06:38:21.738587209Z"
+ created: "2024-06-03T13:45:21.330974116Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1138,7 +1151,7 @@ entries:
version: 0.8.2-beta.39
- apiVersion: v2
appVersion: 0.8.2-beta.38
- created: "2024-05-28T06:38:21.738029828Z"
+ created: "2024-06-03T13:45:21.330415346Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1154,7 +1167,7 @@ entries:
version: 0.8.2-beta.38
- apiVersion: v2
appVersion: 0.8.2-beta.37
- created: "2024-05-28T06:38:21.737397788Z"
+ created: "2024-06-03T13:45:21.329830537Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1170,7 +1183,7 @@ entries:
version: 0.8.2-beta.37
- apiVersion: v2
appVersion: 0.8.1
- created: "2024-05-28T06:38:21.736515572Z"
+ created: "2024-06-03T13:45:21.329215361Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -1184,4 +1197,4 @@ entries:
urls:
- https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz
version: 0.8.1
-generated: "2024-05-28T06:38:21.735496128Z"
+generated: "2024-06-03T13:45:21.328440806Z"
diff --git a/packages/grid/helm/repo/syft-0.8.7-beta.10.tgz b/packages/grid/helm/repo/syft-0.8.7-beta.10.tgz
new file mode 100644
index 00000000000..084dd1efbf9
Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.7-beta.10.tgz differ
diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml
index b9b63a2966d..4b925cc05c1 100644
--- a/packages/grid/helm/syft/Chart.yaml
+++ b/packages/grid/helm/syft/Chart.yaml
@@ -2,7 +2,7 @@ apiVersion: v2
name: syft
description: Perform numpy-like analysis on data that remains in someone elses server
type: application
-version: "0.8.7-beta.9"
-appVersion: "0.8.7-beta.9"
+version: "0.8.7-beta.10"
+appVersion: "0.8.7-beta.10"
home: https://github.com/OpenMined/PySyft/
icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png
diff --git a/packages/grid/helm/syft/templates/NOTES.txt b/packages/grid/helm/syft/templates/NOTES.txt
index 8f12e0ccacf..b412c4ff833 100644
--- a/packages/grid/helm/syft/templates/NOTES.txt
+++ b/packages/grid/helm/syft/templates/NOTES.txt
@@ -10,17 +10,65 @@
Following class versions are either added/removed.
{
- "NodeSettingsUpdate": {
+ "NodeMetadata": {
+ "5": {
+ "version": 5,
+ "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8",
+ "action": "add"
+ }
+ },
+ "SyftAPI": {
"3": {
"version": 3,
- "hash": "0f812fdd5aecc3e3aa1a7c953bbf7f8d8b03a77c5cdbb37e981fa91c8134c9f4",
+ "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a",
"action": "add"
}
},
- "NodeSettings": {
+ "HTMLObject": {
+ "1": {
+ "version": 1,
+ "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863",
+ "action": "add"
+ }
+ },
+ "NodeSettingsUpdate": {
+ "2": {
+ "version": 2,
+ "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261",
+ "action": "remove"
+ },
"4": {
"version": 4,
- "hash": "318e578f8a9af213a6af0cc2c567b62196b0ff81769d808afff4dd1eb7c372b8",
+ "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0",
+ "action": "add"
+ },
+ "5": {
+ "version": 5,
+ "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e",
+ "action": "add"
+ }
+ },
+ "NodeSettings": {
+ "3": {
+ "version": 3,
+ "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1",
+ "action": "remove"
+ },
+ "5": {
+ "version": 5,
+ "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e",
+ "action": "add"
+ },
+ "6": {
+ "version": 6,
+ "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403",
+ "action": "add"
+ }
+ },
+ "BlobRetrievalByURL": {
+ "5": {
+ "version": 5,
+ "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3",
"action": "add"
}
},
@@ -67,6 +115,13 @@
"action": "add"
}
},
+ "CreateCustomImageChange": {
+ "3": {
+ "version": 3,
+ "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995",
+ "action": "add"
+ }
+ },
"TwinAPIContextView": {
"1": {
"version": 1,
@@ -168,6 +223,13 @@
"action": "add"
}
},
+ "NodeMetadataUpdate": {
+ "2": {
+ "version": 2,
+ "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed",
+ "action": "remove"
+ }
+ },
"SyncStateItem": {
"1": {
"version": 1,
@@ -182,17 +244,10 @@
"action": "remove"
}
},
- "BlobRetrievalByURL": {
- "5": {
- "version": 5,
- "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3",
- "action": "add"
- }
- },
- "CreateCustomImageChange": {
- "3": {
- "version": 3,
- "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995",
+ "NodePeerUpdate": {
+ "1": {
+ "version": 1,
+ "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9",
"action": "add"
}
}
diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml
index 5a361cc2e39..2644eac26e4 100644
--- a/packages/grid/helm/syft/values.yaml
+++ b/packages/grid/helm/syft/values.yaml
@@ -1,7 +1,7 @@
global:
# Affects only backend, frontend, and seaweedfs containers
registry: docker.io
- version: 0.8.7-beta.9
+ version: 0.8.7-beta.10
# Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION
randomizedSecrets: true
diff --git a/packages/syft/PYPI.md b/packages/syft/PYPI.md
index bf355d8fa81..0a2b08b1495 100644
--- a/packages/syft/PYPI.md
+++ b/packages/syft/PYPI.md
@@ -70,6 +70,10 @@ domain_client = sy.login(
## Deploy Kubernetes Helm Chart
+#### 0. Deploy Kubernetes with 8+ Cores and 16GB RAM
+
+If you're using Docker Desktop to deploy your Kubernetes, you may need to go into Settings > Resources and increase CPUs and Memory.
+
**Note**: Assuming we have a Kubernetes cluster already setup.
#### 1. Add and update Helm repo for Syft
@@ -406,12 +410,6 @@ OpenMined and Syft appreciates all contributors, if you would like to fix a bug
-# Open Collective
-
-`OpenMined` is a fiscally sponsored `501(c)(3)` in the USA. We are funded by our generous supporters on Open Collective.
-
-
-
# Disclaimer
Syft is under active development and is not yet ready for pilots on private data without our assistance. As early access participants, please contact us via [Slack](https://slack.openmined.org/) or email if you would like to ask a question or have a use case that you would like to discuss.
diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg
index 02e5c6870a3..59bfee973ea 100644
--- a/packages/syft/setup.cfg
+++ b/packages/syft/setup.cfg
@@ -1,6 +1,6 @@
[metadata]
name = syft
-version = attr: "0.8.7-beta.9"
+version = attr: "0.8.7-beta.10"
description = Perform numpy-like analysis on data that remains in someone elses server
author = OpenMined
author_email = info@openmined.org
diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION
index 53c9ccb0023..61a3b991302 100644
--- a/packages/syft/src/syft/VERSION
+++ b/packages/syft/src/syft/VERSION
@@ -1,5 +1,5 @@
# Mono Repo Global Version
-__version__ = "0.8.7-beta.9"
+__version__ = "0.8.7-beta.10"
# elsewhere we can call this file: `python VERSION` and simply take the stdout
# stdlib
diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py
index 38cb26cb9e0..d7183898935 100644
--- a/packages/syft/src/syft/__init__.py
+++ b/packages/syft/src/syft/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "0.8.7-beta.9"
+__version__ = "0.8.7-beta.10"
# stdlib
from collections.abc import Callable
diff --git a/packages/syft/src/syft/client/registry.py b/packages/syft/src/syft/client/registry.py
index 5a1f99a41eb..ee57b642f53 100644
--- a/packages/syft/src/syft/client/registry.py
+++ b/packages/syft/src/syft/client/registry.py
@@ -13,7 +13,8 @@
# relative
from ..service.metadata.node_metadata import NodeMetadataJSON
-from ..service.network.network_service import NodePeer
+from ..service.network.node_peer import NodePeer
+from ..service.network.node_peer import NodePeerConnectionStatus
from ..service.response import SyftException
from ..types.grid_url import GridURL
from ..util.constants import DEFAULT_TIMEOUT
@@ -120,13 +121,40 @@ def _repr_html_(self) -> str:
on = self.online_networks
if len(on) == 0:
return "(no gateways online - try syft.gateways.all_networks to see offline gateways)"
- return pd.DataFrame(on)._repr_html_() # type: ignore
+ df = pd.DataFrame(on)
+ total_df = pd.DataFrame(
+ [
+ [
+ f"{len(on)} / {len(self.all_networks)} (online networks / all networks)"
+ ]
+ + [""] * (len(df.columns) - 1)
+ ],
+ columns=df.columns,
+ index=["Total"],
+ )
+ df = pd.concat([df, total_df])
+ return df._repr_html_() # type: ignore
def __repr__(self) -> str:
on = self.online_networks
if len(on) == 0:
return "(no gateways online - try syft.gateways.all_networks to see offline gateways)"
- return pd.DataFrame(on).to_string()
+ df = pd.DataFrame(on)
+ total_df = pd.DataFrame(
+ [
+ [
+ f"{len(on)} / {len(self.all_networks)} (online networks / all networks)"
+ ]
+ + [""] * (len(df.columns) - 1)
+ ],
+ columns=df.columns,
+ index=["Total"],
+ )
+ df = pd.concat([df, total_df])
+ return df.to_string()
+
+ def __len__(self) -> int:
+ return len(self.all_networks)
@staticmethod
def create_client(network: dict[str, Any]) -> Client:
@@ -228,32 +256,25 @@ def check_network(network: dict) -> dict[Any, Any] | None:
@property
def online_domains(self) -> list[tuple[NodePeer, NodeMetadataJSON | None]]:
- def check_domain(
- peer: NodePeer,
- ) -> tuple[NodePeer, NodeMetadataJSON | None] | None:
- try:
- guest_client = peer.guest_client
- metadata = guest_client.metadata
- return peer, metadata
- except Exception as e: # nosec
- print(f"Error in checking domain with exception {e}")
- return None
-
networks = self.online_networks
- # We can use a with statement to ensure threads are cleaned up promptly
- with futures.ThreadPoolExecutor(max_workers=20) as executor:
- # map
- _all_online_domains = []
- for network in networks:
+ _all_online_domains = []
+ for network in networks:
+ try:
network_client = NetworkRegistry.create_client(network)
- domains: list[NodePeer] = network_client.domains.retrieve_nodes()
- for domain in domains:
- self.all_domains[str(domain.id)] = domain
- _online_domains = list(
- executor.map(lambda domain: check_domain(domain), domains)
- )
- _all_online_domains += _online_domains
+ except Exception as e:
+ print(f"Error in creating network client with exception {e}")
+ continue
+
+ domains: list[NodePeer] = network_client.domains.retrieve_nodes()
+ for domain in domains:
+ self.all_domains[str(domain.id)] = domain
+
+ _all_online_domains += [
+ (domain, domain.guest_client.metadata)
+ for domain in domains
+ if domain.ping_status == NodePeerConnectionStatus.ACTIVE
+ ]
return [domain for domain in _all_online_domains if domain is not None]
@@ -281,13 +302,33 @@ def _repr_html_(self) -> str:
on: list[dict[str, Any]] = self.__make_dict__()
if len(on) == 0:
return "(no domains online - try syft.domains.all_domains to see offline domains)"
- return pd.DataFrame(on)._repr_html_() # type: ignore
+ df = pd.DataFrame(on)
+ total_df = pd.DataFrame(
+ [
+ [f"{len(on)} / {len(self.all_domains)} (online domains / all domains)"]
+ + [""] * (len(df.columns) - 1)
+ ],
+ columns=df.columns,
+ index=["Total"],
+ )
+ df = pd.concat([df, total_df])
+ return df._repr_html_() # type: ignore
def __repr__(self) -> str:
on: list[dict[str, Any]] = self.__make_dict__()
if len(on) == 0:
return "(no domains online - try syft.domains.all_domains to see offline domains)"
- return pd.DataFrame(on).to_string()
+ df = pd.DataFrame(on)
+ total_df = pd.DataFrame(
+ [
+ [f"{len(on)} / {len(self.all_domains)} (online domains / all domains)"]
+ + [""] * (len(df.columns) - 1)
+ ],
+ columns=df.columns,
+ index=["Total"],
+ )
+ df = pd.concat([df, total_df])
+ return df._repr_html_() # type: ignore
def create_client(self, peer: NodePeer) -> Client:
try:
diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json
index faf2c0de850..5f52d682c21 100644
--- a/packages/syft/src/syft/protocol/protocol_version.json
+++ b/packages/syft/src/syft/protocol/protocol_version.json
@@ -253,6 +253,13 @@
"hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c",
"action": "add"
}
+ },
+ "NodePeerUpdate": {
+ "1": {
+ "version": 1,
+ "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9",
+ "action": "add"
+ }
}
}
}
diff --git a/packages/syft/src/syft/service/network/network_service.py b/packages/syft/src/syft/service/network/network_service.py
index 410b996eeca..b38d822c7f4 100644
--- a/packages/syft/src/syft/service/network/network_service.py
+++ b/packages/syft/src/syft/service/network/network_service.py
@@ -5,7 +5,7 @@
from typing import Any
# third party
-from result import Err
+from loguru import logger
from result import Result
# relative
@@ -50,6 +50,7 @@
from ..warnings import CRUDWarning
from .association_request import AssociationRequestChange
from .node_peer import NodePeer
+from .node_peer import NodePeerUpdate
from .routes import HTTPNodeRoute
from .routes import NodeRoute
from .routes import NodeRouteType
@@ -87,13 +88,13 @@ def get_by_name(
def update(
self,
credentials: SyftVerifyKey,
- peer: NodePeer,
+ peer_update: NodePeerUpdate,
has_permission: bool = False,
) -> Result[NodePeer, str]:
- valid = self.check_type(peer, NodePeer)
+ valid = self.check_type(peer_update, NodePeerUpdate)
if valid.is_err():
- return Err(SyftError(message=valid.err()))
- return super().update(credentials, peer)
+ return SyftError(message=valid.err())
+ return super().update(credentials, peer_update, has_permission=has_permission)
def create_or_update_peer(
self, credentials: SyftVerifyKey, peer: NodePeer
@@ -113,13 +114,15 @@ def create_or_update_peer(
valid = self.check_type(peer, NodePeer)
if valid.is_err():
return SyftError(message=valid.err())
- existing: Result | NodePeer = self.get_by_uid(
- credentials=credentials, uid=peer.id
- )
- if existing.is_ok() and existing.ok():
- existing = existing.ok()
- existing.update_routes(peer.node_routes)
- result = self.update(credentials, existing)
+
+ existing = self.get_by_uid(credentials=credentials, uid=peer.id)
+ if existing.is_ok() and existing.ok() is not None:
+ existing_peer: NodePeer = existing.ok()
+ existing_peer.update_routes(peer.node_routes)
+ peer_update = NodePeerUpdate(
+ id=peer.id, node_routes=existing_peer.node_routes
+ )
+ result = self.update(credentials, peer_update)
return result
else:
result = self.set(credentials, peer)
@@ -150,8 +153,6 @@ def __init__(self, store: DocumentStore) -> None:
self.store = store
self.stash = NetworkStash(store=store)
- # TODO: Check with MADHAVA, can we even allow guest user to introduce routes to
- # domain nodes?
@service_method(
path="network.exchange_credentials_with",
name="exchange_credentials_with",
@@ -191,26 +192,21 @@ def exchange_credentials_with(
existing_peer_result.is_ok()
and (existing_peer := existing_peer_result.ok()) is not None
):
- msg = [
- (
- f"{existing_peer.node_type} peer '{existing_peer.name}' already exist for "
- f"{self_node_peer.node_type} '{self_node_peer.name}'."
- )
- ]
+ logger.info(
+ f"{remote_node_peer.node_type} '{remote_node_peer.name}' already exist as a peer for "
+ f"{self_node_peer.node_type} '{self_node_peer.name}'."
+ )
+
if existing_peer != remote_node_peer:
result = self.stash.create_or_update_peer(
context.node.verify_key,
remote_node_peer,
)
- msg.append(
- f"{existing_peer.node_type} peer '{existing_peer.name}' information change detected."
- )
if result.is_err():
- msg.append(
- f"Attempt to update peer '{existing_peer.name}' information failed."
+ return SyftError(
+ message=f"Failed to update peer: {remote_node_peer.name} information."
)
- return SyftError(message="\n".join(msg))
- msg.append(
+ logger.info(
f"{existing_peer.node_type} peer '{existing_peer.name}' information successfully updated."
)
@@ -219,28 +215,32 @@ def exchange_credentials_with(
name=self_node_peer.name
)
if isinstance(remote_self_node_peer, NodePeer):
- msg.append(
+ logger.info(
f"{self_node_peer.node_type} '{self_node_peer.name}' already exist "
f"as a peer for {remote_node_peer.node_type} '{remote_node_peer.name}'."
)
if remote_self_node_peer != self_node_peer:
+ updated_peer = NodePeerUpdate(
+ id=self_node_peer.id, node_routes=self_node_peer.node_routes
+ )
result = remote_client.api.services.network.update_peer(
- peer=self_node_peer,
+ peer_update=updated_peer
)
- msg.append(
+ logger.info(
f"{self_node_peer.node_type} peer '{self_node_peer.name}' information change detected."
)
if isinstance(result, SyftError):
- msg.apnpend(
+ logger.error(
f"Attempt to remotely update {self_node_peer.node_type} peer "
- f"'{self_node_peer.name}' information remotely failed."
+ f"'{self_node_peer.name}' information remotely failed. Error: {result.message}"
)
- return SyftError(message="\n".join(msg))
- msg.append(
+ return SyftError(message="Failed to update peer information.")
+
+ logger.info(
f"{self_node_peer.node_type} peer '{self_node_peer.name}' "
f"information successfully updated."
)
- msg.append(
+ msg = (
f"Routes between {remote_node_peer.node_type} '{remote_node_peer.name}' and "
f"{self_node_peer.node_type} '{self_node_peer.name}' already exchanged."
)
@@ -465,20 +465,24 @@ def get_peers_by_type(
return result.ok() or []
@service_method(
- path="network.update_peer", name="update_peer", roles=GUEST_ROLE_LEVEL
+ path="network.update_peer",
+ name="update_peer",
+ roles=GUEST_ROLE_LEVEL,
)
def update_peer(
self,
context: AuthedServiceContext,
- peer: NodePeer,
+ peer_update: NodePeerUpdate,
) -> SyftSuccess | SyftError:
+ # try setting all fields of NodePeerUpdate according to NodePeer
+
result = self.stash.update(
credentials=context.node.verify_key,
- peer=peer,
+ peer_update=peer_update,
)
if result.is_err():
return SyftError(
- message=f"Failed to update peer '{peer.name}'. Error: {result.err()}"
+ message=f"Failed to update peer '{peer_update.name}'. Error: {result.err()}"
)
return SyftSuccess(
message=f"Peer '{result.ok().name}' information successfully updated."
@@ -589,9 +593,12 @@ def add_route(
f"peer '{remote_node_peer.name}' with id '{existed_route.id}'."
)
# update the peer in the store with the updated routes
+ peer_update = NodePeerUpdate(
+ id=remote_node_peer.id, node_routes=remote_node_peer.node_routes
+ )
result = self.stash.update(
credentials=context.node.verify_key,
- peer=remote_node_peer,
+ peer_update=peer_update,
)
if result.is_err():
return SyftError(message=str(result.err()))
@@ -747,8 +754,11 @@ def delete_route(
)
else:
# update the peer with the route removed
+ peer_update = NodePeerUpdate(
+ id=remote_node_peer.id, node_routes=remote_node_peer.node_routes
+ )
result = self.stash.update(
- credentials=context.node.verify_key, peer=remote_node_peer
+ credentials=context.node.verify_key, peer_update=peer_update
)
if result.is_err():
return SyftError(message=str(result.err()))
@@ -846,7 +856,10 @@ def update_route_priority(
return updated_node_route
new_priority: int = updated_node_route.priority
# update the peer in the store
- result = self.stash.update(context.node.verify_key, remote_node_peer)
+ peer_update = NodePeerUpdate(
+ id=remote_node_peer.id, node_routes=remote_node_peer.node_routes
+ )
+ result = self.stash.update(context.node.verify_key, peer_update)
if result.is_err():
return SyftError(message=str(result.err()))
diff --git a/packages/syft/src/syft/service/network/node_peer.py b/packages/syft/src/syft/service/network/node_peer.py
index 4a5447e293d..c2db506ba23 100644
--- a/packages/syft/src/syft/service/network/node_peer.py
+++ b/packages/syft/src/syft/service/network/node_peer.py
@@ -17,6 +17,8 @@
from ...service.response import SyftError
from ...types.datetime import DateTime
from ...types.syft_migration import migrate
+from ...types.syft_object import PartialSyftObject
+from ...types.syft_object import SYFT_OBJECT_VERSION_1
from ...types.syft_object import SYFT_OBJECT_VERSION_2
from ...types.syft_object import SYFT_OBJECT_VERSION_3
from ...types.syft_object import SyftObject
@@ -71,7 +73,7 @@ class NodePeer(SyftObject):
"name",
"node_type",
"admin_email",
- "ping_status.value",
+ "ping_status",
"ping_status_message",
"pinged_timestamp",
]
@@ -117,9 +119,9 @@ def existed_route(
return (False, None)
- def assign_highest_priority(self, route: NodeRoute) -> NodeRoute:
+ def update_route_priority(self, route: NodeRoute) -> NodeRoute:
"""
- Assign the new_route's to have the highest priority
+ Assign the new_route's priority to be current max + 1
Args:
route (NodeRoute): The new route whose priority is to be updated.
@@ -131,15 +133,39 @@ def assign_highest_priority(self, route: NodeRoute) -> NodeRoute:
route.priority = current_max_priority + 1
return route
+ def pick_highest_priority_route(self, oldest: bool = True) -> NodeRoute:
+ """
+ Picks the route with the highest priority from the list of node routes.
+
+ Args:
+ oldest (bool):
+ If True, picks the oldest route to have the highest priority,
+ meaning the route with min priority value.
+ If False, picks the most recent route with the highest priority,
+ meaning the route with max priority value.
+
+ Returns:
+ NodeRoute: The route with the highest priority.
+
+ """
+ highest_priority_route: NodeRoute = self.node_routes[-1]
+ for route in self.node_routes[:-1]:
+ if oldest:
+ if route.priority < highest_priority_route.priority:
+ highest_priority_route = route
+ else:
+ if route.priority > highest_priority_route.priority:
+ highest_priority_route = route
+ return highest_priority_route
+
def update_route(self, route: NodeRoute) -> NodeRoute | None:
"""
Update the route for the node.
If the route already exists, return it.
- If the route is new, assign it to have the highest priority
- before appending it to the peer's list of node routes.
+ If the route is new, assign it to have the priority of (current_max + 1)
Args:
- route (NodeRoute): The new route to be added to the peer.
+ route (NodeRoute): The new route to be added to the peer's node route list
Returns:
NodeRoute | None: if the route already exists, return it, else returns None
@@ -148,7 +174,7 @@ def update_route(self, route: NodeRoute) -> NodeRoute | None:
if existed:
return route
else:
- new_route = self.assign_highest_priority(route)
+ new_route = self.update_route_priority(route)
self.node_routes.append(new_route)
return None
@@ -199,7 +225,7 @@ def update_existed_route_priority(
if priority is not None:
self.node_routes[index].priority = priority
else:
- self.node_routes[index].priority = self.assign_highest_priority(
+ self.node_routes[index].priority = self.update_route_priority(
route
).priority
@@ -223,7 +249,7 @@ def client_with_context(
if len(self.node_routes) < 1:
raise ValueError(f"No routes to peer: {self}")
- # select the highest priority route (i.e. added or updated the latest)
+ # select the route with highest priority to connect to the peer
final_route: NodeRoute = self.pick_highest_priority_route()
connection: NodeConnection = route_to_connection(route=final_route)
try:
@@ -244,7 +270,7 @@ def client_with_context(
def client_with_key(self, credentials: SyftSigningKey) -> SyftClient | SyftError:
if len(self.node_routes) < 1:
raise ValueError(f"No routes to peer: {self}")
- # select the latest added route
+
final_route: NodeRoute = self.pick_highest_priority_route()
connection = route_to_connection(route=final_route)
@@ -262,13 +288,6 @@ def guest_client(self) -> SyftClient:
def proxy_from(self, client: SyftClient) -> SyftClient:
return client.proxy_to(self)
- def pick_highest_priority_route(self) -> NodeRoute:
- highest_priority_route: NodeRoute = self.node_routes[-1]
- for route in self.node_routes:
- if route.priority > highest_priority_route.priority:
- highest_priority_route = route
- return highest_priority_route
-
def delete_route(
self, route: NodeRouteType | None = None, route_id: UID | None = None
) -> SyftError | None:
@@ -302,6 +321,20 @@ def delete_route(
return None
+@serializable()
+class NodePeerUpdate(PartialSyftObject):
+ __canonical_name__ = "NodePeerUpdate"
+ __version__ = SYFT_OBJECT_VERSION_1
+
+ id: UID
+ name: str
+ node_routes: list[NodeRouteType]
+ admin_email: str
+ ping_status: NodePeerConnectionStatus
+ ping_status_message: str
+ pinged_timestamp: DateTime
+
+
def drop_veilid_route() -> Callable:
def _drop_veilid_route(context: TransformContext) -> TransformContext:
if context.output:
diff --git a/packages/syft/src/syft/service/network/utils.py b/packages/syft/src/syft/service/network/utils.py
index c9e98da6179..b03bc589d15 100644
--- a/packages/syft/src/syft/service/network/utils.py
+++ b/packages/syft/src/syft/service/network/utils.py
@@ -15,6 +15,7 @@
from .network_service import NodePeerAssociationStatus
from .node_peer import NodePeer
from .node_peer import NodePeerConnectionStatus
+from .node_peer import NodePeerUpdate
@serializable(without=["thread"])
@@ -51,20 +52,22 @@ def peer_route_heathcheck(self, context: AuthedServiceContext) -> SyftError | No
all_peers: list[NodePeer] = result.ok()
for peer in all_peers:
- peer.pinged_timestamp = DateTime.now()
+ peer_update = NodePeerUpdate(id=peer.id)
+ peer_update.pinged_timestamp = DateTime.now()
try:
peer_client = peer.client_with_context(context=context)
if peer_client.is_err():
logger.error(
f"Failed to create client for peer: {peer}: {peer_client.err()}"
)
- peer.ping_status = NodePeerConnectionStatus.TIMEOUT
+ peer_update.ping_status = NodePeerConnectionStatus.TIMEOUT
peer_client = None
except Exception as e:
logger.error(
f"Failed to create client for peer: {peer} with exception {e}"
)
- peer.ping_status = NodePeerConnectionStatus.TIMEOUT
+
+ peer_update.ping_status = NodePeerConnectionStatus.TIMEOUT
peer_client = None
if peer_client is not None:
@@ -72,21 +75,24 @@ def peer_route_heathcheck(self, context: AuthedServiceContext) -> SyftError | No
peer_status = peer_client.api.services.network.check_peer_association(
peer_id=context.node.id
)
- peer.ping_status = (
+ peer_update.ping_status = (
NodePeerConnectionStatus.ACTIVE
if peer_status == NodePeerAssociationStatus.PEER_ASSOCIATED
else NodePeerConnectionStatus.INACTIVE
)
if isinstance(peer_status, SyftError):
- peer.ping_status_message = (
+ peer_update.ping_status_message = (
f"Error `{peer_status.message}` when pinging peer '{peer.name}'"
)
else:
- peer.ping_status_message = f"Peer '{peer.name}''s ping status: {peer.ping_status.value.lower()}"
+ peer_update.ping_status_message = (
+ f"Peer '{peer.name}''s ping status: "
+ f"{peer_update.ping_status.value.lower()}"
+ )
result = network_stash.update(
credentials=context.node.verify_key,
- peer=peer,
+ peer_update=peer_update,
has_permission=True,
)
diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py
index c5fda8e0f3e..5bf5739d6fc 100644
--- a/packages/syft/src/syft/service/policy/policy.py
+++ b/packages/syft/src/syft/service/policy/policy.py
@@ -17,6 +17,7 @@
# third party
from RestrictedPython import compile_restricted
+import requests
from result import Err
from result import Ok
from result import Result
@@ -151,15 +152,20 @@ def partition_by_node(kwargs: dict[str, Any]) -> dict[NodeIdentity, dict[str, UI
_obj_exists = False
for api in api_list:
- if api.services.action.exists(uid):
- node_identity = NodeIdentity.from_api(api)
- if node_identity not in output_kwargs:
- output_kwargs[node_identity] = {k: uid}
- else:
- output_kwargs[node_identity].update({k: uid})
-
- _obj_exists = True
- break
+ try:
+ if api.services.action.exists(uid):
+ node_identity = NodeIdentity.from_api(api)
+ if node_identity not in output_kwargs:
+ output_kwargs[node_identity] = {k: uid}
+ else:
+ output_kwargs[node_identity].update({k: uid})
+
+ _obj_exists = True
+ break
+ except requests.exceptions.ConnectionError:
+ # To handle the cases , where there an old api objects in
+ # in APIRegistry
+ continue
if not _obj_exists:
raise Exception(f"Input data {k}:{uid} does not belong to any Domain")
diff --git a/packages/syft/src/syft/service/service.py b/packages/syft/src/syft/service/service.py
index 55f2c1f4b5d..cda115cb8b4 100644
--- a/packages/syft/src/syft/service/service.py
+++ b/packages/syft/src/syft/service/service.py
@@ -289,7 +289,7 @@ def reconstruct_args_kwargs(
else:
raise Exception(f"Missing {param_key} not in kwargs.")
- if "context":
+ if "context" in kwargs:
final_kwargs["context"] = kwargs["context"]
return (args, final_kwargs)
diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py
index fa38d6c1ba8..59d6799c2bb 100644
--- a/packages/syft/src/syft/store/mongo_document_store.py
+++ b/packages/syft/src/syft/store/mongo_document_store.py
@@ -357,17 +357,17 @@ def _update(
if has_permission or self.has_permission(
ActionObjectWRITE(uid=prev_obj.id, credentials=credentials)
):
- # we don't want to overwrite Mongo's "id_" or Syft's "id" on update
- obj_id = obj["id"]
+ for key, value in obj.to_dict(exclude_empty=True).items():
+ # we don't want to overwrite Mongo's "id_" or Syft's "id" on update
+ if key == "id":
+ # protected field
+ continue
- # Set ID to the updated object value
- obj.id = prev_obj["id"]
+ # Overwrite the value if the key is already present
+ setattr(prev_obj, key, value)
# Create the Mongo object
- storage_obj = obj.to(self.storage_type)
-
- # revert the ID
- obj.id = obj_id
+ storage_obj = prev_obj.to(self.storage_type)
try:
collection.update_one(
diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py
index f07df398dd5..8ef1b2803a8 100644
--- a/packages/syft/src/syft/store/sqlite_document_store.py
+++ b/packages/syft/src/syft/store/sqlite_document_store.py
@@ -351,9 +351,8 @@ def __iter__(self) -> Any:
def __del__(self) -> None:
try:
self._close()
- except BaseException:
- print("Could not close connection")
- pass
+ except Exception as e:
+ print(f"Could not close connection. Error: {e}")
@serializable()
diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml
index 9aeaf0e2624..a9e200f513d 100644
--- a/packages/syftcli/manifest.yml
+++ b/packages/syftcli/manifest.yml
@@ -1,11 +1,11 @@
manifestVersion: 1.0
-syftVersion: 0.8.7-beta.9
-dockerTag: 0.8.7-beta.9
+syftVersion: 0.8.7-beta.10
+dockerTag: 0.8.7-beta.10
images:
- - docker.io/openmined/grid-frontend:0.8.7-beta.9
- - docker.io/openmined/grid-backend:0.8.7-beta.9
+ - docker.io/openmined/grid-frontend:0.8.7-beta.10
+ - docker.io/openmined/grid-backend:0.8.7-beta.10
- docker.io/library/mongo:7.0.4
- docker.io/traefik:v2.11.0
diff --git a/tests/integration/local/gateway_local_test.py b/tests/integration/local/gateway_local_test.py
index a26e6ad35bb..ef71e3faf0e 100644
--- a/tests/integration/local/gateway_local_test.py
+++ b/tests/integration/local/gateway_local_test.py
@@ -132,6 +132,7 @@ def test_create_gateway(
set_network_json_env_var, gateway_webserver, domain_webserver, domain_2_webserver
):
assert isinstance(sy.gateways, sy.NetworkRegistry)
+ assert len(sy.gateways) == 1
assert len(sy.gateways.all_networks) == 1
assert sy.gateways.all_networks[0]["name"] == gateway_webserver.name
assert len(sy.gateways.online_networks) == 1
@@ -157,14 +158,15 @@ def test_create_gateway(
result = domain_client_2.connect_to_gateway(handle=gateway_webserver)
assert isinstance(result, SyftSuccess)
- time.sleep(PeerHealthCheckTask.repeat_time + 1)
+ time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1)
assert len(sy.domains.all_domains) == 2
assert len(sy.domains.online_domains) == 2
+ # check for peer connection status
+ for peer in gateway_client.api.services.network.get_all_peers():
+ assert peer.ping_status == NodePeerConnectionStatus.ACTIVE
-
-@pytest.mark.local_node
-def test_create_gateway_client(gateway):
- client = gateway.client
+ # check the guest client
+ client = gateway_webserver.client
assert isinstance(client, GatewayClient)
assert client.metadata.node_type == NodeType.GATEWAY.value
@@ -244,7 +246,6 @@ def test_domain_connect_to_gateway(gateway_association_request_auto_approval, do
def test_domain_connect_to_gateway_routes_priority(gateway, domain, domain_2) -> None:
"""
A test for routes' priority (PythonNodeRoute)
- TODO: Add a similar test for HTTPNodeRoute
"""
gateway_client: GatewayClient = gateway.login(
email="info@openmined.org",
@@ -375,8 +376,3 @@ def test_repeated_association_requests_peers_health_check(
)
assert isinstance(res, NodePeerAssociationStatus)
assert res.value == "PEER_ASSOCIATED"
-
- # check for peer connection status
- time.sleep(PeerHealthCheckTask.repeat_time + 1)
- domain_peer = gateway_client.api.services.network.get_all_peers()[0]
- assert domain_peer.ping_status == NodePeerConnectionStatus.ACTIVE
diff --git a/tests/integration/network/gateway_test.py b/tests/integration/network/gateway_test.py
index be72aae81e6..9c42a9e9687 100644
--- a/tests/integration/network/gateway_test.py
+++ b/tests/integration/network/gateway_test.py
@@ -84,6 +84,7 @@ def test_network_registry_from_url() -> None:
assert len(sy.gateways.all_networks) == len(sy.gateways.online_networks) == 1
+@pytest.mark.network
def test_network_registry_env_var(set_env_var) -> None:
assert isinstance(sy.gateways, NetworkRegistry)
assert len(sy.gateways.all_networks) == len(sy.gateways.online_networks) == 1
@@ -91,6 +92,7 @@ def test_network_registry_env_var(set_env_var) -> None:
assert isinstance(sy.gateways[0].connection, HTTPConnection)
+@pytest.mark.network
def test_domain_connect_to_gateway(
set_env_var, domain_1_port: int, gateway_port: int
) -> None:
@@ -106,6 +108,10 @@ def test_domain_connect_to_gateway(
port=domain_1_port, email="info@openmined.org", password="changethis"
)
+ # Try removing existing peers just to make sure
+ _remove_existing_peers(domain_client)
+ _remove_existing_peers(gateway_client)
+
# connecting the domain to the gateway
result = domain_client.connect_to_gateway(gateway_client)
assert isinstance(result, Request)
@@ -122,6 +128,7 @@ def test_domain_connect_to_gateway(
assert len(gateway_client.peers) == 1
+ time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1)
# check that the domain is online on the network
assert len(sy.domains.all_domains) == 1
assert len(sy.domains.online_domains) == 1
@@ -163,6 +170,7 @@ def test_domain_connect_to_gateway(
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.network
def test_dataset_search(set_env_var, gateway_port: int, domain_1_port: int) -> None:
"""
Scenario: Connecting a domain node to a gateway node. The domain
@@ -177,14 +185,16 @@ def test_dataset_search(set_env_var, gateway_port: int, domain_1_port: int) -> N
port=domain_1_port, email="info@openmined.org", password="changethis"
)
+ # Try removing existing peers just to make sure
+ _remove_existing_peers(domain_client)
+ _remove_existing_peers(gateway_client)
+
res = gateway_client.settings.allow_association_request_auto_approval(enable=True)
assert isinstance(res, SyftSuccess)
# connect the domain to the gateway
result = domain_client.connect_to_gateway(gateway_client)
assert isinstance(result, SyftSuccess)
- assert len(sy.gateways.all_networks) == len(sy.gateways.online_networks) == 1
- assert len(sy.domains.all_domains) == len(sy.domains.online_domains) == 1
# the domain client uploads a dataset
input_data = np.array([1, 2, 3])
@@ -196,6 +206,9 @@ def test_dataset_search(set_env_var, gateway_port: int, domain_1_port: int) -> N
dataset_res = domain_client.upload_dataset(dataset)
assert isinstance(dataset_res, SyftSuccess)
+ # since dataset search is done by checking from the online domains,
+ # we need to wait to make sure peers health check is done
+ time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1)
# test if the dataset can be searched by the syft network
right_search = sy.search(dataset_name)
assert isinstance(right_search, SearchResults)
@@ -218,6 +231,8 @@ def test_dataset_search(set_env_var, gateway_port: int, domain_1_port: int) -> N
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.skip(reason="Possible bug")
+@pytest.mark.network
def test_domain_gateway_user_code(
set_env_var, domain_1_port: int, gateway_port: int
) -> None:
@@ -229,6 +244,10 @@ def test_domain_gateway_user_code(
port=domain_1_port, email="info@openmined.org", password="changethis"
)
+ # Try removing existing peers just to make sure
+ _remove_existing_peers(domain_client)
+ _remove_existing_peers(gateway_client)
+
# the domain client uploads a dataset
input_data = np.array([1, 2, 3])
mock_data = np.array([4, 5, 6])
@@ -294,6 +313,7 @@ def mock_function(asset):
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.network
def test_deleting_peers(set_env_var, domain_1_port: int, gateway_port: int) -> None:
# login to the domain and gateway
gateway_client: GatewayClient = sy.login(
@@ -303,6 +323,10 @@ def test_deleting_peers(set_env_var, domain_1_port: int, gateway_port: int) -> N
port=domain_1_port, email="info@openmined.org", password="changethis"
)
+ # clean up before test
+ _remove_existing_peers(domain_client)
+ _remove_existing_peers(gateway_client)
+
# Enable automatic acceptance of association requests
res = gateway_client.settings.allow_association_request_auto_approval(enable=True)
assert isinstance(res, SyftSuccess)
@@ -312,50 +336,41 @@ def test_deleting_peers(set_env_var, domain_1_port: int, gateway_port: int) -> N
assert isinstance(result, SyftSuccess)
assert len(domain_client.peers) == 1
assert len(gateway_client.peers) == 1
- # check that the domain is online on the network
- assert len(sy.domains.all_domains) == 1
- assert len(sy.domains.online_domains) == 1
# Remove existing peers
assert isinstance(_remove_existing_peers(domain_client), SyftSuccess)
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
# check that removing peers work as expected
+ assert len(domain_client.peers) == 0
+ assert len(gateway_client.peers) == 0
+
+ # check that the online domains and gateways are updated
+ time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1)
assert len(sy.gateways.all_networks) == 1
assert len(sy.domains.all_domains) == 0
- assert len(sy.domains.all_domains) == 0
assert len(sy.domains.online_domains) == 0
- assert len(domain_client.peers) == 0
- assert len(gateway_client.peers) == 0
# reconnect the domain to the gateway
result = domain_client.connect_to_gateway(gateway_client)
assert isinstance(result, SyftSuccess)
assert len(domain_client.peers) == 1
assert len(gateway_client.peers) == 1
- # check that the domain
- assert len(sy.domains.all_domains) == 1
- assert len(sy.domains.online_domains) == 1
# Remove existing peers
assert isinstance(_remove_existing_peers(domain_client), SyftSuccess)
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
# check that removing peers work as expected
- assert len(sy.domains.all_domains) == 0
- assert len(sy.domains.all_domains) == 0
- assert len(sy.domains.online_domains) == 0
assert len(domain_client.peers) == 0
assert len(gateway_client.peers) == 0
-def test_add_update_route_priority(
- set_env_var, gateway_port: int, domain_1_port: int
-) -> None:
+@pytest.mark.network
+def test_add_route(set_env_var, gateway_port: int, domain_1_port: int) -> None:
"""
Test the network service's `add_route` functionalities to add routes directly
for a self domain.
Scenario: Connect a domain to a gateway. The gateway adds 2 new routes to the domain
- and check their priorities.
- Then update an existed route's priority and check if its priority gets updated.
+ and check their priorities get updated.
Check for the gateway if the proxy client to connect to the domain uses the
route with the highest priority.
"""
@@ -414,21 +429,8 @@ def test_add_update_route_priority(
assert domain_peer.node_routes[0].priority == 1
# getting the proxy client using the current highest priority route should
- # give back an error since it is a route with a random port (10001)
- proxy_domain_client = gateway_client.peers[0]
- assert isinstance(proxy_domain_client, SyftError)
- assert "Failed to establish a connection with" in proxy_domain_client.message
-
- # update the valid route to have the highest priority
- res = gateway_client.api.services.network.update_route_priority(
- peer_verify_key=domain_peer.verify_key, route=domain_peer.node_routes[0]
- )
- assert isinstance(res, SyftSuccess)
- domain_peer = gateway_client.api.services.network.get_all_peers()[0]
- assert len(domain_peer.node_routes) == 3
- assert domain_peer.node_routes[0].priority == 4
-
- # proxying should success now
+ # be successful since now we pick the oldest route (port 9082 with priority 1)
+ # to have the highest priority by default
proxy_domain_client = gateway_client.peers[0]
assert isinstance(proxy_domain_client, DomainClient)
@@ -441,6 +443,7 @@ def test_add_update_route_priority(
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.network
def test_delete_route(set_env_var, gateway_port: int, domain_1_port: int) -> None:
"""
Scenario:
@@ -455,6 +458,10 @@ def test_delete_route(set_env_var, gateway_port: int, domain_1_port: int) -> Non
port=domain_1_port, email="info@openmined.org", password="changethis"
)
+ # Try removing existing peers just to make sure
+ _remove_existing_peers(domain_client)
+ _remove_existing_peers(gateway_client)
+
# Enable automatic acceptance of association requests
res = gateway_client.settings.allow_association_request_auto_approval(enable=True)
assert isinstance(res, SyftSuccess)
@@ -490,14 +497,12 @@ def test_delete_route(set_env_var, gateway_port: int, domain_1_port: int) -> Non
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
-def test_add_update_route_priority_on_peer(
- set_env_var, gateway_port: int, domain_1_port: int
-) -> None:
+@pytest.mark.network
+def test_add_route_on_peer(set_env_var, gateway_port: int, domain_1_port: int) -> None:
"""
Test the `add_route_on_peer` of network service.
Connect a domain to a gateway.
- The gateway adds 2 new routes for the domain and check their priorities.
- The gateway updates the route priority for the domain remotely.
+ The gateway adds 2 new routes for itself remotely on the domain and check their priorities.
Then the domain adds a route to itself for the gateway.
"""
# login to the domain and gateway
@@ -532,7 +537,7 @@ def test_add_update_route_priority_on_peer(
peer=domain_peer, route=new_route
)
assert isinstance(res, SyftSuccess)
- gateway_peer = domain_client.peers[0]
+ gateway_peer = domain_client.api.services.network.get_all_peers()[0]
assert len(gateway_peer.node_routes) == 2
assert gateway_peer.node_routes[-1].port == new_route.port
assert gateway_peer.node_routes[-1].priority == 2
@@ -543,18 +548,11 @@ def test_add_update_route_priority_on_peer(
peer=domain_peer, route=new_route2
)
assert isinstance(res, SyftSuccess)
- gateway_peer = domain_client.peers[0]
+ gateway_peer = domain_client.api.services.network.get_all_peers()[0]
assert len(gateway_peer.node_routes) == 3
assert gateway_peer.node_routes[-1].port == new_route2.port
assert gateway_peer.node_routes[-1].priority == 3
- # update the route priority remotely on the domain
- first_route = gateway_peer.node_routes[0]
- res = gateway_client.api.services.network.update_route_priority_on_peer(
- peer=domain_peer, route=first_route
- )
- assert isinstance(res, SyftSuccess)
-
# the domain calls `add_route_on_peer` to to add a route to itself for the gateway
assert len(domain_peer.node_routes) == 1
res = domain_client.api.services.network.add_route_on_peer(
@@ -570,6 +568,7 @@ def test_add_update_route_priority_on_peer(
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.network
def test_delete_route_on_peer(
set_env_var, gateway_port: int, domain_1_port: int
) -> None:
@@ -585,6 +584,10 @@ def test_delete_route_on_peer(
port=domain_1_port, email="info@openmined.org", password="changethis"
)
+ # Remove existing peers
+ _remove_existing_peers(domain_client)
+ _remove_existing_peers(gateway_client)
+
# Enable automatic acceptance of association requests
res = gateway_client.settings.allow_association_request_auto_approval(enable=True)
assert isinstance(res, SyftSuccess)
@@ -637,6 +640,7 @@ def test_delete_route_on_peer(
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.network
def test_update_route_priority(
set_env_var, gateway_port: int, domain_1_port: int
) -> None:
@@ -693,6 +697,8 @@ def test_update_route_priority(
}
assert routes_port_priority[new_route.port] == 5
+ # if we don't specify `priority`, the route will be automatically updated
+ # to have the biggest priority value among all routes
res = gateway_client.api.services.network.update_route_priority(
peer_verify_key=domain_peer.verify_key, route=new_route2
)
@@ -708,6 +714,7 @@ def test_update_route_priority(
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.network
def test_update_route_priority_on_peer(
set_env_var, gateway_port: int, domain_1_port: int
) -> None:
@@ -731,7 +738,7 @@ def test_update_route_priority_on_peer(
result = domain_client.connect_to_gateway(gateway_client)
assert isinstance(result, SyftSuccess)
- # gateway adds 2 new routes for the domain to itself
+ # gateway adds 2 new routes to itself remotely on the domain node
domain_peer: NodePeer = gateway_client.api.services.network.get_all_peers()[0]
new_route = HTTPNodeRoute(host_or_ip="localhost", port=10000)
res = gateway_client.api.services.network.add_route_on_peer(
@@ -760,7 +767,7 @@ def test_update_route_priority_on_peer(
)
assert isinstance(res, SyftSuccess)
res = gateway_client.api.services.network.update_route_priority_on_peer(
- peer=domain_peer, route=gateway_peer.node_routes[0]
+ peer=domain_peer, route=new_route2
)
assert isinstance(res, SyftSuccess)
@@ -769,13 +776,14 @@ def test_update_route_priority_on_peer(
route.port: route.priority for route in gateway_peer.node_routes
}
assert routes_port_priority[new_route.port] == 5
- assert routes_port_priority[gateway_port] == 6
+ assert routes_port_priority[new_route2.port] == 6
# Remove existing peers
assert isinstance(_remove_existing_peers(domain_client), SyftSuccess)
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.network
def test_dataset_stream(set_env_var, gateway_port: int, domain_1_port: int) -> None:
"""
Scenario: Connecting a domain node to a gateway node. The domain
@@ -800,8 +808,6 @@ def test_dataset_stream(set_env_var, gateway_port: int, domain_1_port: int) -> N
# connect the domain to the gateway
result = domain_client.connect_to_gateway(gateway_client)
assert isinstance(result, SyftSuccess)
- assert len(sy.gateways.all_networks) == len(sy.gateways.online_networks) == 1
- assert len(sy.domains.all_domains) == len(sy.domains.online_domains) == 1
# the domain client uploads a dataset
input_data = np.array([1, 2, 3])
@@ -833,15 +839,12 @@ def test_dataset_stream(set_env_var, gateway_port: int, domain_1_port: int) -> N
assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess)
+@pytest.mark.network
def test_peer_health_check(set_env_var, gateway_port: int, domain_1_port: int) -> None:
"""
Scenario: Connecting a domain node to a gateway node.
The gateway client approves the association request.
The gateway client checks that the domain peer is associated
- TODO: check for peer connection status through NodePeer.pingstatus
- TODO: check that the domain is online with `DomainRegistry.online_domains`
- Then make the domain go offline, which should be reflected when calling
- `DomainRegistry.online_domains`
"""
# login to the domain and gateway
gateway_client: GatewayClient = sy.login(
@@ -902,7 +905,7 @@ def test_peer_health_check(set_env_var, gateway_port: int, domain_1_port: int) -
assert isinstance(res, NodePeerAssociationStatus)
assert res.value == "PEER_ASSOCIATED"
- time.sleep(PeerHealthCheckTask.repeat_time + 1)
+ time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1)
domain_peer = gateway_client.api.services.network.get_all_peers()[0]
assert domain_peer.ping_status == NodePeerConnectionStatus.ACTIVE
diff --git a/tox.ini b/tox.ini
index 2b94b8785c3..3f8d1601474 100644
--- a/tox.ini
+++ b/tox.ini
@@ -395,7 +395,7 @@ allowlist_externals =
setenv =
PYTEST_MODULES = {env:PYTEST_MODULES:local_node}
ASSOCIATION_REQUEST_AUTO_APPROVAL = {env:ASSOCIATION_REQUEST_AUTO_APPROVAL:true}
- PYTEST_FLAGS = {env:PYTEST_FLAGS:--ignore=tests/integration/local/gateway_local_test.py --ignore=tests/integration/local/job_test.py}
+ PYTEST_FLAGS = {env:PYTEST_FLAGS:--ignore=tests/integration/local/job_test.py}
commands =
python -c 'import syft as sy; sy.stage_protocol_changes()'